List of usage examples for java.lang IllegalArgumentException getStackTrace
public StackTraceElement[] getStackTrace()
From source file:com.daro.persistence.generic.dao.GenericDaoImpl.java
/** * Persist new entity p./*from w w w .ja v a 2 s . com*/ * * @param p Data type of entity. * @throws PersistenceException */ @Override public void add(T p) throws PersistenceException { Session session = this.getCurrentSession(); try { session.persist(p); } catch (java.lang.IllegalArgumentException illegalArgumentEx) { logger.error("Persistence layer error: " + illegalArgumentEx.getStackTrace()); PersistenceError error; if (p == null) { error = PersistenceError.ENTITY_NULL; } else { error = PersistenceError.PERSISTENCE_INTERNAL_ERROR; } throw new PersistenceException(error, illegalArgumentEx.getCause()); } if (loggerInfoEnabled) logger.debug("Persistence layer info: " + "Saved successfully, Details=" + p); }
From source file:com.daro.persistence.generic.dao.GenericDaoImpl.java
/** * Update entity p. Persists existing entity p. * //from w w w.j a v a2 s. co m * @param p T Data type of entity * @throws PersistenceException */ @Override public void update(T p) throws PersistenceException { Session session = this.getCurrentSession(); try { //session.update(p); session.merge(p); } catch (java.lang.IllegalArgumentException illegalArgumentEx) { logger.error("Persistence layer error: " + illegalArgumentEx.getStackTrace()); PersistenceError error; if (p == null) { error = PersistenceError.ENTITY_NULL; } else { error = PersistenceError.PERSISTENCE_INTERNAL_ERROR; } throw new PersistenceException(error, illegalArgumentEx.getCause()); } if (loggerInfoEnabled) logger.debug("Persistence layer info: " + "Person updated successfully, Person Details=" + p); }
From source file:com.datastax.loader.CqlDelimLoad.java
private boolean setup() throws IOException, KeyStoreException, NoSuchAlgorithmException, KeyManagementException, CertificateException, UnrecoverableKeyException { // Connect to Cassandra Session tsession = null;/* ww w . j a v a 2 s. c o m*/ try { PoolingOptions pOpts = new PoolingOptions(); pOpts.setMaxConnectionsPerHost(HostDistance.LOCAL, 8); pOpts.setCoreConnectionsPerHost(HostDistance.LOCAL, 8); Cluster.Builder clusterBuilder = Cluster.builder().addContactPoint(host).withPort(port) //.withCompression(ProtocolOptions.Compression.LZ4) .withPoolingOptions(pOpts) .withLoadBalancingPolicy(new TokenAwarePolicy(DCAwareRoundRobinPolicy.builder().build())); if (null != username) clusterBuilder = clusterBuilder.withCredentials(username, password); if (null != truststorePath) clusterBuilder = clusterBuilder.withSSL(createSSLOptions()); cluster = clusterBuilder.build(); if (null == cluster) { throw new IOException("Could not create cluster"); } tsession = cluster.connect(); } catch (IllegalArgumentException e) { System.err.println("Could not connect to the cluster, check your hosts"); //e.printStackTrace(); return false; } catch (Exception e) { System.err.println(e.getStackTrace()); e.printStackTrace(); return false; } if ((0 > cluster.getConfiguration().getProtocolOptions().getProtocolVersion().compareTo(ProtocolVersion.V4)) && nullsUnset) { System.err.println("Cannot use nullsUnset with ProtocolVersion less than V4 (prior to Cassandra 3.0"); cleanup(); return false; } if (null != rateFile) { if (STDERR.equalsIgnoreCase(rateFile)) { rateStream = System.err; } else { rateStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(rateFile)), true); } } Metrics metrics = cluster.getMetrics(); com.codahale.metrics.Timer timer = metrics.getRequestsTimer(); rateLimiter = new RateLimiter(rate, progressRate, timer, rateStream); //rateLimiter = new Latency999RateLimiter(rate, progressRate, 3000, 200, 10, 0.5, 0.1, cluster, false); session = new RateLimitedSession(tsession, rateLimiter); return true; }
From source file:org.openregistry.core.service.DefaultPersonService.java
protected Person recalculatePersonBiodemInfo(final Person person, final SorPerson sorPerson, final RecalculationType recalculationType, boolean mistake) { final List<SorPerson> sorPersons = this.personRepository.getSoRRecordsForPerson(person); logger.info("recalculatePersonBiodemInfo: start"); if (recalculationType == RecalculationType.ADD || (recalculationType == RecalculationType.DELETE && !mistake)) { sorPersons.add(sorPerson);//from w w w . ja v a 2 s .c o m } copySorNamesToPerson(person, sorPersons); final Date birthDate = this.birthDateFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final String gender = this.genderFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final SorName preferredName = this.preferredNameFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final SorName officialName = this.officialNameFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final EmailAddress emailAddress = this.preferredContactEmailAddressFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final Phone phone = this.preferredContactPhoneNumberFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final Map<String, String> attributes = this.attributesElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final SorDisclosureSettings disclosure = this.disclosureFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); final String ssn = this.ssnFieldElector.elect(sorPerson, sorPersons, recalculationType == RecalculationType.DELETE); Identifier primarySSN = person.getPrimaryIdentifiersByType().get("SSN"); //check if the elector elcted some ssn and person does have previous ssn assigned to it if (!org.apache.commons.lang.StringUtils.isEmpty(ssn) && primarySSN != null) { try { this.identifierChangeService.change(person.getPrimaryIdentifiersByType().get("SSN"), ssn); } catch (IllegalArgumentException e) { logger.debug(e.getStackTrace().toString()); } //all other exception should be propogated } person.setDateOfBirth(birthDate); person.setGender(gender); person.getPreferredContactEmailAddress().update(emailAddress); person.getPreferredContactPhoneNumber().update(phone); person.calculateDisclosureSettings(disclosure); person.setAttributes(attributes); String affiliation = ""; Type affiliationType = null; if (disclosure != null) { logger.info("after person.calculateDisclosureSettings, disclosure code : " + disclosure.getDisclosureCode()); } else { logger.info("Disclosure is null"); } List<SorRole> sorroles = sorPerson.getRoles(); for (SorRole role : sorroles) { if (role != null) { logger.info("Role = " + role.getTitle()); if (role.getAffiliationType() != null) { logger.info("Role desc= " + role.getAffiliationType().getDescription()); affiliation = role.getAffiliationType().getDescription(); if (person.getDisclosureSettings() != null) { logger.info("recalculating disclosure setting 1..."); //person.getDisclosureSettings().recalculate(this.strategyRepository.getDisclosureRecalculationStrategy()); person.getDisclosureSettings().recalculate( this.strategyRepository.getDisclosureRecalculationStrategy(), affiliation, referenceRepository); } } } } //SSN election is happening in the ssn identifier assigner. boolean preferred = false; boolean official = false; for (final Name name : person.getNames()) { if (!preferred && name.sameAs(preferredName)) { name.setPreferredName(true); preferred = true; } if (!official && name.sameAs(officialName)) { name.setOfficialName(true); official = true; } if (official && preferred) { break; } } logger.info("recalculatePersonBiodemInfo: end"); // return this.personRepository.savePerson(person); return person; }
From source file:org.metis.sql.SqlStmnt.java
/** * Called by the Controller bean (RDB or PDB) to execute this SQL statement * with the given params./*from ww w .j av a 2 s.c o m*/ * * @param params * @throws SQLException */ public SqlResult execute(List<Map<String, String>> params) { if (params == null) { params = new ArrayList<Map<String, String>>(); } LOG.debug("execute: executing this statement: " + getOriginal()); LOG.debug("execute: ... with this number of param maps = " + params.size()); // first, do some light validation work if (params.size() == 0 && (isPrepared() || isCallable())) { // if it is callable and it requires an IN param if (isCallable() && getInTokens().size() > 0) { LOG.error("execute: ERROR, IN params were not provided " + "for this callable statement that requires IN params: " + getPrepared()); return null; } // all prepared statements that are not callable require an // input param else { LOG.error("execute: ERROR, params were not provided " + "for this prepared statement: " + getPrepared()); return null; } } else if (params.size() > 0 && !isPrepared()) { LOG.error("execute: ERROR, params were provided " + "for this static or non-prepared statement that does not " + "require params: " + getOriginal()); return null; } // make sure given params match if (params.size() > 0) { for (Map<String, String> pMap : params) { if (!isMatch(pMap.keySet())) { LOG.error("execute: ERROR, given key:value set does not match " + "this statement's key:value set\n" + getKeyTokens().toString() + " vs. " + params.toString()); return null; } } } // if trace is on, dump params if any if (params.size() > 0 && LOG.isTraceEnabled()) { for (Map<String, String> pMap : params) { LOG.trace("execute: valid param set = " + pMap.toString()); } } // A list that essentially represents the result set returned by the // DB for queries. List<Map<String, Object>> listOfMaps = new ArrayList<Map<String, Object>>(); // dequeue a sqlResult object from the SqlResult cache SqlResult sqlResult = SqlResult.dequeue(); try { // if this statement is call'able, then execute its stored procedure // object. Note that we don't support batching calls to stored // procedures and functions. Maybe that can be a future // enhancement... if (isCallable()) { LOG.debug("execute: invoking this stored procedure or function: " + getStoredProcName()); Map<String, Object> kvMap = new HashMap<String, Object>(); // first prepare the IN params (if any) if (params.size() > 0) { for (KeyValueObject kvObj : getPreparedObjects(params.get(0))) { kvMap.put(kvObj.getKey(), kvObj.getObj()); } } // now execute the function or stored proc // Note from Spring docs: The execute() method returns a // map with an entry for each declared output parameter, // using the parameter name as the key. kvMap = getStoredProcedure().execute(kvMap); // now that the execute has completed, fetch the OUT params // from the kvMap. i suppose it is possible for a stored proc // not to have any OUT params. // need to transfer each key:value that is associated with // the OUT param as a map to listOfMaps. However, those // keys that pertain to cursors or sets, point // to a List of Maps!! for (SqlToken sqlToken : getSortedKeyTokens()) { // skip IN only params; we're only looking for OUT params if (sqlToken.isIn()) { continue; } Object outObj = kvMap.remove(sqlToken.getKey()); if (outObj == null) { LOG.error("execute: object was not returned for this " + "out param: " + sqlToken.getKey()); continue; } if (sqlToken.isCursor() || sqlToken.isRset()) { if (outObj instanceof List) { List<Map<String, Object>> mList = (List<Map<String, Object>>) outObj; for (Map<String, Object> map : mList) { listOfMaps.add(map); } } else { LOG.error("execute: this OUT result set param did not return a type of List: " + sqlToken.getKey()); LOG.error("execute: got this type/class instead: " + outObj.getClass().getName()); } } else { Map<String, Object> map = new HashMap<String, Object>(); map.put(sqlToken.getKey(), outObj); listOfMaps.add(map); } } /* * Any undeclared results returned are added to the output map * with generated names like "#result-set-1" "#result-set-2" * etc. You can change this by setting 'skipUndeclaredResults' * to true, and then these undeclared resultsets will be * skipped. TODO: look into the update count */ if (!kvMap.isEmpty()) { LOG.debug("execute: looking for result sets"); for (Object kvObj : kvMap.values()) { if (kvObj instanceof List) { for (Map<String, Object> map : (List<Map<String, Object>>) kvObj) { listOfMaps.add(map); } } else { LOG.debug( "execute: unknown object returned from execute: " + kvObj.getClass().getName()); LOG.debug("execute: unknown object's toString value: " + kvObj.toString()); } } } sqlResult.setResultSet(listOfMaps); return sqlResult; } // if (isCallable()... // key:value type objects used for binding the input params to // prepared statements List<KeyValueObject> kvObjs = null; Object bindObjs[] = null; // is this a query; i.e., select statement? if (getSqlStmntType() == SqlStmntType.SELECT) { if (isPrepared()) { LOG.debug("execute: executing this prepared SELECT statement: " + getPrepared()); kvObjs = getPreparedObjects(params.get(0)); bindObjs = new Object[kvObjs.size()]; for (int i = 0; i < bindObjs.length; i++) { bindObjs[i] = kvObjs.get(i).getObj(); } listOfMaps = getJdbcTemplate().query(getPrepared(), bindObjs, this); } else { LOG.trace("execute: executing this SELECT statement: " + getOriginal()); listOfMaps = getJdbcTemplate().query(getOriginal(), this); } if (listOfMaps != null && listOfMaps.size() > 0) { LOG.trace("execute: dumping first map - " + listOfMaps.get(0).toString()); } sqlResult.setResultSet(listOfMaps); return sqlResult; } // ok, this statement is neither call'able nor a query so it // must be either an update of some kind; i.e., insert, update or // delete // note that keyHolders are only used for INSERT statements! if (!isPrepared()) { PreparedStmntCreator creatorSetter = new PreparedStmntCreator(this, bindObjs); // i guess it is possible to have a non prepared update of some // sort if (getSqlStmntType() == SqlStmntType.INSERT) { GeneratedKeyHolder keyHolder = new GeneratedKeyHolder(); sqlResult.setNumRows(getJdbcTemplate().update(creatorSetter, keyHolder)); sqlResult.setKeyHolder(keyHolder); } else { sqlResult.setNumRows(getJdbcTemplate().update(getOriginal(), creatorSetter)); } } // we have a prepared update; is the client requesting a batch // update? else if (params.size() > 1) { LOG.debug("execute: invoking batch update for this statement: " + getPrepared()); // create the list of objects for the batch update List<Object[]> batchArgs = new ArrayList<Object[]>(); for (Map<String, String> map : params) { // prepare the bind objects for the prepared // statement kvObjs = getPreparedObjects(map); bindObjs = new Object[kvObjs.size()]; for (int i = 0; i < bindObjs.length; i++) { bindObjs[i] = kvObjs.get(i).getObj(); } batchArgs.add(bindObjs); } sqlResult.setBatchNumRows(getJdbcTemplate().batchUpdate(getPrepared(), batchArgs)); // note that a key holder is not possible with a batch // update } // we have a prepared update, but it is not a batch update else if (params.size() == 1) { LOG.debug("execute: invoking prepared update for this statement: " + getPrepared()); kvObjs = getPreparedObjects(params.get(0)); bindObjs = new Object[kvObjs.size()]; for (int i = 0; i < bindObjs.length; i++) { bindObjs[i] = kvObjs.get(i).getObj(); } // note that PreparedStmntCreator is both a creator and setter PreparedStmntCreator creatorSetter = new PreparedStmntCreator(this, bindObjs); if (getSqlStmntType() == SqlStmntType.INSERT) { LOG.trace("execute: executing prepared INSERT statement"); GeneratedKeyHolder keyHolder = new GeneratedKeyHolder(); int numRows = getJdbcTemplate().update(creatorSetter, keyHolder); sqlResult.setNumRows(numRows); sqlResult.setKeyHolder(keyHolder); } else { LOG.trace("execute: executing UPDATE statement"); int numRows = getJdbcTemplate().update(getPrepared(), creatorSetter); sqlResult.setNumRows(numRows); } } } catch (IllegalArgumentException exc) { LOG.error("execute: ERROR, caught this " + "IllegalArgumentException while executing sql: " + exc.toString()); LOG.error("execute: exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); if (exc.getCause() != null) { LOG.error("execute: Caused by " + exc.getCause().toString()); LOG.error("execute: causing exception stack trace follows:"); dumpStackTrace(exc.getCause().getStackTrace()); } if (sqlResult != null) { SqlResult.enqueue(sqlResult); } sqlResult = null; } catch (DataAccessException exc) { LOG.error("execute:ERROR, caught this " + "DataAccessException while executing sql: " + exc.toString()); LOG.error("execute: exception stack trace follows:"); dumpStackTrace(exc.getStackTrace()); LOG.error("execute: Most Specific Cause = " + exc.getMostSpecificCause().toString()); LOG.error("execute: MSC exception stack trace follows:"); dumpStackTrace(exc.getMostSpecificCause().getStackTrace()); if (sqlResult != null) { SqlResult.enqueue(sqlResult); } sqlResult = null; } return sqlResult; }
From source file:com.opengamma.masterdb.batch.DbBatchWriter.java
@SuppressWarnings("unchecked") public synchronized void addJobResultsInTransaction(TransactionStatus transactionStatus, ObjectId runId, ViewComputationResultModel resultModel) { ArgumentChecker.notNull(runId, "runId"); ArgumentChecker.notNull(resultModel, "resultModel"); final long riskRunId = extractOid(runId); ArgumentChecker.notNull(riskRunId, "riskRunId"); Map<ComputeFailureKey, ComputeFailure> computeFailureCache = _computeFailureCacheByRunId.get(riskRunId); Map<Pair<Long, Long>, StatusEntry> statusCache = _statusCacheByRunId.get(riskRunId); Map<ValueSpecification, BatchResultWriterFailure> errorCache = populateErrorCache(computeFailureCache, resultModel.getAllResults()); RiskRun run = _riskRunsByIds.get(riskRunId); if (run.getSnapshotMode().equals(SnapshotMode.WRITE_THROUGH)) { addComputedValuesToMarketDataInTransaction(run.getMarketData().getObjectId(), resultModel.getAllMarketData()); }/*from ww w . j ava 2 s .co m*/ for (String calcConfigName : resultModel.getCalculationConfigurationNames()) { ViewCalculationResultModel viewCalculationResultModel = resultModel .getCalculationResult(calcConfigName); final Set<ComputationTargetSpecification> successfulTargets = newHashSet(); final Set<ComputationTargetSpecification> failedTargets = newHashSet(); List<SqlParameterSource> targetProperties = newArrayList(); List<SqlParameterSource> successes = newArrayList(); List<SqlParameterSource> failures = newArrayList(); List<SqlParameterSource> failureReasons = newArrayList(); Instant evalInstant = Instant.now(); long calcConfId = _calculationConfigurations.get(calcConfigName); for (final ComputationTargetSpecification targetSpec : viewCalculationResultModel.getAllTargets()) { final long computationTargetId = _computationTargets.get(targetSpec); boolean specFailures = false; for (final ComputedValueResult computedValue : viewCalculationResultModel .getAllValues(targetSpec)) { ResultConverter<Object> resultConverter = null; if (!(computedValue.getValue() instanceof MissingValue)) { try { resultConverter = (ResultConverter<Object>) _resultConverterCache .getConverter(computedValue.getValue()); } catch (IllegalArgumentException e) { s_logger.info("No converter for value of type " + computedValue.getValue().getClass() + " for " + computedValue.getSpecification()); } } final ValueSpecification specification = computedValue.getSpecification(); if (!_riskValueSpecifications.containsKey(specification)) { s_logger.error("Unexpected result specification " + specification + ". Result cannot be written. Result value was " + computedValue.getValue()); continue; } final long valueSpecificationId = _riskValueSpecifications.get(specification); final long functionUniqueId = getFunctionUniqueIdInTransaction( specification.getFunctionUniqueId()).getId(); final long computeNodeId = getOrCreateComputeNode(computedValue.getComputeNodeId()).getId(); if (resultConverter != null && computedValue.getInvocationResult() == InvocationResult.SUCCESS) { s_logger.debug("Writing value {} for value spec {}", computedValue.getValue(), specification); Map<String, Double> valueAsDoublesMap = resultConverter .convert(computedValue.getSpecification().getValueName(), computedValue.getValue()); for (Map.Entry<String, Double> valueEntry : valueAsDoublesMap.entrySet()) { final String valueName = valueEntry.getKey(); final Double doubleValue = ensureDatabasePrecision(valueEntry.getValue()); final long successId = nextId(RSK_SEQUENCE_NAME); successes.add(getSuccessArgs(successId, riskRunId, evalInstant, calcConfId, computationTargetId, valueSpecificationId, functionUniqueId, computeNodeId, valueName, doubleValue)); } } else { s_logger.info("Writing failure for {} with invocation result {}, {} ", newArray(computedValue.getSpecification(), computedValue.getInvocationResult(), computedValue.getAggregatedExecutionLog())); specFailures = true; final long failureId = nextId(RSK_SEQUENCE_NAME); failures.add(getFailureArgs(failureId, riskRunId, evalInstant, calcConfId, computationTargetId, valueSpecificationId, functionUniqueId, computeNodeId, specification.getValueName())); BatchResultWriterFailure cachedFailure = errorCache.get(specification); if (cachedFailure != null) { for (long computeFailureId : cachedFailure.getComputeFailureIds()) { ArgumentChecker.notNull(computeFailureId, "computeFailureId"); final long failureReasonId = nextId(RSK_SEQUENCE_NAME); failureReasons .add(getFailureReasonArgs(failureReasonId, failureId, computeFailureId)); } } } } StatusEntry.Status status = getStatus(statusCache, calcConfigName, targetSpec); if (specFailures || status == StatusEntry.Status.FAILURE) { successfulTargets.remove(targetSpec); failedTargets.add(targetSpec); } else { successfulTargets.add(targetSpec); } // storing target data ComputationTarget computationTarget = _computationTargetResolver.resolve(targetSpec, VersionCorrection.LATEST); Object targetValue = computationTarget.getValue(); if (targetValue instanceof Bean) { Bean bean = (Bean) targetValue; for (String propertyName : bean.propertyNames()) { Property<Object> property = bean.property(propertyName); final long targetPropertyId = nextId(RSK_SEQUENCE_NAME); targetProperties.add(getTargetPropertyArgs(targetPropertyId, computationTargetId, propertyName, property.get() == null ? "NULL" : property.get().toString())); } } } if (successes.isEmpty() && failures.isEmpty() && failureReasons.isEmpty() && successfulTargets.isEmpty() && failedTargets.isEmpty()) { s_logger.debug("Nothing to write to DB for {}", resultModel); return; } Object preSuccessSavepoint = transactionStatus.createSavepoint(); try { getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskSuccess"), successes.toArray(new DbMapSqlParameterSource[successes.size()])); } catch (Exception e) { s_logger.error("Failed to write successful calculations to batch database. Converting to failures.", e); transactionStatus.rollbackToSavepoint(preSuccessSavepoint); if (!successes.isEmpty()) { String exceptionClass = e.getClass().getName(); String exceptionMsg = e.getMessage(); final StringBuilder buffer = new StringBuilder(); for (StackTraceElement element : e.getStackTrace()) { buffer.append(element.toString()).append("\n"); } final String stackTrace = buffer.toString(); for (SqlParameterSource success : successes) { failures.add(convertSuccessToFailure(success)); final long failureId = getId(success); final long functionId = getFunctionId(success); ComputeFailureKey computeFailureKey = new ComputeFailureKey(String.valueOf(functionId), exceptionClass, exceptionMsg, stackTrace); ComputeFailure computeFailure = getComputeFailureFromDb(computeFailureCache, computeFailureKey); final long failureReasonId = nextId(RSK_SEQUENCE_NAME); failureReasons .add(getFailureReasonArgs(failureReasonId, failureId, computeFailure.getId())); } failedTargets.addAll(successfulTargets); successes.clear(); successfulTargets.clear(); targetProperties.clear(); } } Object preTargetPropertiesFailureSavepoint = transactionStatus.createSavepoint(); try { getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertTargetProperties"), targetProperties.toArray(new DbMapSqlParameterSource[targetProperties.size()])); } catch (Exception e) { s_logger.error("Failed to write target properties to batch database", e); transactionStatus.rollbackToSavepoint(preTargetPropertiesFailureSavepoint); } Object preFailureSavepoint = transactionStatus.createSavepoint(); try { getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskFailure"), failures.toArray(new DbMapSqlParameterSource[failures.size()])); getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskFailureReason"), failureReasons.toArray(new DbMapSqlParameterSource[failureReasons.size()])); } catch (Exception e) { s_logger.error("Failed to write failures to batch database", e); transactionStatus.rollbackToSavepoint(preFailureSavepoint); } updateStatusEntries(riskRunId, statusCache, calcConfigName, StatusEntry.Status.SUCCESS, successfulTargets); updateStatusEntries(riskRunId, statusCache, calcConfigName, StatusEntry.Status.FAILURE, failedTargets); } }
From source file:org.mskcc.cbio.portal.servlet.CrossCancerJSON.java
/** * Gets all Genomic Data.//w w w . java 2 s . com */ private ProfileDataSummary getGenomicData(String cancerStudyId, HashMap<String, GeneticProfile> defaultGeneticProfileSet, SampleList defaultSampleSet, String geneListStr, ArrayList<SampleList> sampleList, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, DaoException { // parse geneList, written in the OncoPrintSpec language (except for changes by XSS clean) double zScore = ZScoreUtil.getZScore(new HashSet<String>(defaultGeneticProfileSet.keySet()), new ArrayList<GeneticProfile>(defaultGeneticProfileSet.values()), request); double rppaScore = ZScoreUtil.getRPPAScore(request); ParserOutput theOncoPrintSpecParserOutput = OncoPrintSpecificationDriver.callOncoPrintSpecParserDriver( geneListStr, new HashSet<String>(defaultGeneticProfileSet.keySet()), new ArrayList<GeneticProfile>(defaultGeneticProfileSet.values()), zScore, rppaScore); ArrayList<String> geneList = new ArrayList<String>(); geneList.addAll(theOncoPrintSpecParserOutput.getTheOncoPrintSpecification().listOfGenes()); ArrayList<ProfileData> profileDataList = new ArrayList<ProfileData>(); Set<String> warningUnion = new HashSet<String>(); for (GeneticProfile profile : defaultGeneticProfileSet.values()) { try { GetProfileData remoteCall = new GetProfileData(profile, geneList, StringUtils.join(defaultSampleSet.getSampleList(), " ")); ProfileData pData = remoteCall.getProfileData(); warningUnion.addAll(remoteCall.getWarnings()); profileDataList.add(pData); } catch (IllegalArgumentException e) { e.getStackTrace(); } } ProfileMerger merger = new ProfileMerger(profileDataList); ProfileData mergedProfile = merger.getMergedProfile(); ProfileDataSummary dataSummary = new ProfileDataSummary(mergedProfile, theOncoPrintSpecParserOutput.getTheOncoPrintSpecification(), zScore, rppaScore); return dataSummary; }
From source file:org.openecomp.sdc.validation.utils.ValidationConfigurationManager.java
private static Validator validatorInit(ValidatorConfiguration validatorConf) { Validator validator = null;//from w ww . j a v a 2 s . c o m try { validator = CommonMethods.newInstance(validatorConf.getImplementationClass(), Validator.class); } catch (IllegalArgumentException iae) { logger.error("Validator:" + validatorConf.getName() + " Class:" + validatorConf.getImplementationClass() + " failed in initialization. error:" + iae.toString() + " trace:" + Arrays.toString(iae.getStackTrace())); } return validator; }
From source file:org.sonar.plugins.spcaf.SpcafSensor.java
private void parseReport(SpcafReportParser parser, File reportFile) { LOG.info("SPCAF: Parsing report: " + reportFile); List<SpcafIssue> parse = parser.parse(reportFile); LOG.info("SPCAF: Notifications:" + parse.size()); allFiles.clear();// w w w. j ava 2 s . c om addTree(fileSystem.baseDir().toPath(), allFiles); LOG.info(""); LOG.info("--------------------"); LOG.info("SPCAF: Input files"); LOG.info("--------------------"); for (InputFile file : fileSystem.inputFiles(fileSystem.predicates().all())) { LOG.info(file.absolutePath()); } LOG.info("--------------------"); List<String> allowedExtensions; switch (SpcafConf.repositoryKey().toLowerCase()) { case "spcaf-aspx": allowedExtensions = Arrays.asList(spcafPlugin.ASPX_SUFFIXES_DEFAULT_VALUE.split(",")); break; case "spcaf-cs": allowedExtensions = Arrays.asList(new String[] { ".cs", ".dll" }); break; case "spcaf-css": allowedExtensions = Arrays.asList(spcafPlugin.CSS_SUFFIXES_DEFAULT_VALUE.split(",")); break; case "spcaf-ps1": allowedExtensions = Arrays.asList(spcafPlugin.PS1_SUFFIXES_DEFAULT_VALUE.split(",")); break; case "spcaf-js": allowedExtensions = Arrays.asList(spcafPlugin.JS_SUFFIXES_DEFAULT_VALUE.split(",")); break; case "spcaf-xml": allowedExtensions = Arrays.asList(spcafPlugin.XML_SUFFIXES_DEFAULT_VALUE.split(",")); break; default: return; } LOG.info("Allowed Extensions:" + StringUtils.join(allowedExtensions.toArray(), ",")); for (SpcafIssue issue : parse) { try { Path p = Paths.get(issue.filePath()); String fileName = p.getFileName().toString(); String ext = FilenameUtils.getExtension(fileName); // Skip files that we aren't checking under the current repo if (!allowedExtensions.contains("." + ext)) { continue; } LOG.info("SPCAF: Issue[" + issue.ruleKey() + "] " + issue.message()); LOG.info("SPCAF: Issue Filename:" + fileName); File file = null; // Simplify .cs and .webpart files if (ext.equals("cs")) { file = p.toFile(); } else if (ext.equals("webpart")) { for (File inputFile : fileSystem.files(fileSystem.predicates().all())) { if (inputFile.getName().equals(fileName)) { file = inputFile; break; } } } else if (ext.equals("dll")) { for (File inputFile : fileSystem.files(fileSystem.predicates().all())) { if (inputFile.getName().equals("AssemblyInfo.cs")) { file = inputFile; break; } } } else { file = getCorrectFile(issue.md5(), fileName); } if (file == null) { logSkippedIssue(issue, "\"" + fileName + "\" is not in SonarQube Repository."); continue; } String correctedPath = file.getAbsolutePath().replace('\\', '/'); LOG.info("SPCAF: Physical file located: " + correctedPath); InputFile inputFile = null; for (InputFile currentInputFile : fileSystem.inputFiles(fileSystem.predicates().all())) { if (currentInputFile.absolutePath().equals(correctedPath) && currentInputFile.type().equals(InputFile.Type.MAIN)) { inputFile = currentInputFile; break; } } if (inputFile == null) { continue; } LOG.info("SPCAF: Input file path:" + inputFile.absolutePath()); if (inputFile == null) { logSkippedIssue(issue, "\"" + fileName + "\" is not in SonarQube Input Files."); continue; } LOG.info("Repository file located"); Issuable issuable = perspectives.as(Issuable.class, inputFile); if (issuable == null) { logSkippedIssue(issue, "\"" + fileName + "\" is not issuable in SonarQube."); } else { issuable.addIssue(issuable.newIssueBuilder() .ruleKey(RuleKey.of(SpcafConf.repositoryKey(), issue.ruleKey())).line(issue.line()) .message(issue.message()).build()); LOG.info("SPCAF: Issue added for " + issue.ruleKey() + " on " + fileName); } } catch (IllegalArgumentException ex) { LOG.warn("SPCAF: Illegal Argument Exception in injecting issues has occurred"); LOG.warn("SPCAF: Current issue: " + issue.toString()); LOG.warn("SPCAF: Exception type: " + (ex.getMessage() == null ? "null" : ex.getMessage())); LOG.warn("SPCAF: StackTrace type: " + (ex.getStackTrace() == null ? "null" : ex.getStackTrace())); } catch (NullPointerException ex) { LOG.warn("SPCAF: Null Pointer Exception in injecting issues has occurred"); LOG.warn("SPCAF: Current issue:" + issue.toString()); LOG.warn("SPCAF: Exception type: " + (ex.getMessage() == null ? "null" : ex.getMessage())); LOG.warn("SPCAF: StackTrace type: " + (ex.getStackTrace() == null ? "null" : ex.getStackTrace())); } catch (MessageException ex) { LOG.warn("SPCAF: Message Exception in injecting issues has occurred"); LOG.warn("SPCAF: Current issue: " + issue.toString()); LOG.warn("SPCAF: Exception type: " + (ex.getMessage() == null ? "null" : ex.getMessage())); LOG.warn("SPCAF: StackTrace type: " + (ex.getStackTrace() == null ? "null" : ex.getStackTrace())); } catch (Exception ex) { LOG.warn("SPCAF: General Exception in injecting issues has occurred"); LOG.warn("SPCAF: Current issue: " + issue.toString()); LOG.warn("SPCAF: Exception type: " + ex.getClass().getName()); } } }