List of usage examples for java.lang InternalError InternalError
public InternalError(Throwable cause)
From source file:com.cloud.hypervisor.xenserver.resource.Xenserver625StorageProcessorTest.java
@Test @PrepareForTest({ Host.class, SR.class }) public void createNewFileSrTestThrowingDbUniqueException() throws XenAPIException, XmlRpcException { String uuid = "hostUuid"; Mockito.when(citrixResourceBase._host.getUuid()).thenReturn(uuid); SR srMock = Mockito.mock(SR.class); Mockito.doReturn(srMock).when(xenserver625StorageProcessor) .retrieveAlreadyConfiguredSrWithoutException(connectionMock, pathMock); String srUuid = UUID.nameUUIDFromBytes(pathMock.getBytes()).toString(); Host hostMock = Mockito.mock(Host.class); PowerMockito.mockStatic(Host.class); PowerMockito.when(Host.getByUuid(connectionMock, uuid)).thenReturn(hostMock); PowerMockito.mockStatic(SR.class); InternalError dbUniquenessException = new InternalError( "message: Db_exn.Uniqueness_constraint_violation(\"SR\", \"uuid\", \"fd3edbcf-f142-83d1-3fcb-029ca2446b68\")"); PowerMockito.when(SR.introduce(Mockito.eq(connectionMock), Mockito.eq(srUuid), Mockito.eq(pathMock), Mockito.eq(pathMock), Mockito.eq("file"), Mockito.eq("file"), Mockito.eq(false), Mockito.anyMapOf(String.class, String.class))).thenThrow(dbUniquenessException); Mockito.doNothing().when(xenserver625StorageProcessor).removeSrAndPbdIfPossible(Mockito.eq(connectionMock), Mockito.any(SR.class), Mockito.any(PBD.class)); SR sr = xenserver625StorageProcessor.createNewFileSr(connectionMock, pathMock); Assert.assertEquals(srMock, sr);//from w w w.j a v a 2s. co m Mockito.verify(xenserver625StorageProcessor, times(0)).removeSrAndPbdIfPossible(Mockito.eq(connectionMock), Mockito.any(SR.class), Mockito.any(PBD.class)); Mockito.verify(xenserver625StorageProcessor).retrieveAlreadyConfiguredSrWithoutException(connectionMock, pathMock); }
From source file:org.example.Serializer.java
private void compare(AstNode deserialize) { if (!AstComparer.compare(original, deserialize, true, true)) throw new InternalError("Deserialized AST differs from original AST!"); }
From source file:org.texai.torrent.domainEntity.MetaInfo.java
/** Returns the hex-encoded digest of the piece hashes. * * @return the hex-encoded digest of the piece hashes *///from ww w. ja v a2s . co m public String getDigestedPieceHashes() { try { final MessageDigest digest = MessageDigest.getInstance("SHA"); return hexEncode(digest.digest(pieceHashes)); } catch (final NoSuchAlgorithmException nsa) { throw new InternalError(nsa.toString()); // NOPMD } }
From source file:com.clustercontrol.accesscontrol.dialog.RoleSettingDialog.java
/** * ???//from w w w. j ava 2s.co m * * @param roleInfo ???? */ protected void setInputData(String managerName, RoleInfo roleInfo) { this.inputData = roleInfo; // ???? java.util.List<UserInfo> allUserList = null; // ? try { AccessEndpointWrapper wrapper = AccessEndpointWrapper.getWrapper(managerName); allUserList = wrapper.getUserInfoList(); // Collections.sort(allUserList, new Comparator<UserInfo>() { @Override public int compare(UserInfo o1, UserInfo o2) { return o1.getUserId().compareTo(o2.getUserId()); } }); } catch (InvalidRole_Exception e) { // ??? MessageDialog.openInformation(null, Messages.getString("message"), Messages.getString("message.accesscontrol.16")); } catch (Exception e) { // ? m_log.warn("getOwnUserList(), " + HinemosMessage.replace(e.getMessage()), e); MessageDialog.openError(null, Messages.getString("failed"), Messages.getString("message.hinemos.failure.unexpected") + ", " + HinemosMessage.replace(e.getMessage())); } if (allUserList == null) throw new InternalError("allUserList is null"); // ? String listName = null; for (UserInfo userInfo : allUserList) { //????? listName = userInfo.getUserName() + "(" + userInfo.getUserId() + ")"; if (roleInfo.getUserList().contains(userInfo.getUserId())) { this.listRoleUser.add(listName); this.mapRoleUser.put(listName, userInfo.getUserId()); } else { this.listNotRoleUser.add(listName); this.mapNotRoleUser.put(listName, userInfo.getUserId()); } } }
From source file:edu.cornell.med.icb.goby.modes.FalseDiscoveryRateMode.java
private void combineVCF(String[] inputFiles, DifferentialExpressionResults data, DifferentialExpressionCalculator deCalculator, ObjectList<String> columnIdList, Writer writer) throws IOException { Columns columns = new Columns(); ObjectArrayList<String> sampleIdList = new ObjectArrayList(); for (String filename : inputFiles) { VCFParser parser = new VCFParser(filename); try {/* www .j av a 2s . c o m*/ try { parser.readHeader(); } catch (VCFParser.SyntaxException e) { throw new InternalError("this syntax error should have been caught in the first pass."); } Columns fileColumns = parser.getColumns(); for (ColumnInfo col : fileColumns) { if (columns.find(col.getColumnName()) == null) { columns.add(col); if (col.useFormat) { final String sampleId = col.getColumnName(); if (!sampleIdList.contains(sampleId)) { sampleIdList.add(sampleId); } } } } } finally { parser.close(); } } VCFWriter vcfWriter = new VCFWriter(writer); vcfWriter.defineSchema(columns); vcfWriter.defineSamples(sampleIdList.toArray(new String[sampleIdList.size()])); Int2IntMap statIndexToInfoFieldIndex = new Int2IntOpenHashMap(); // add adjusted columns: ColumnInfo infoColumn = columns.find("INFO"); int statIndex = 0; for (String fieldName : adjustedColumnIds) { vcfWriter.defineField("INFO", fieldName, 1, ColumnType.Float, String.format("Benjamini Hochberg FDR adjusted for column %s.", fieldName)); statIndexToInfoFieldIndex.put(statIndex++, vcfWriter.getNumInfoFields() - 1); } vcfWriter.writeHeader(); int elementIndex = 0; for (String filename : inputFiles) { System.out.printf("Combining %s%n", filename); VCFParser parser = new VCFParser(filename); try { try { parser.readHeader(); } catch (VCFParser.SyntaxException e) { throw new InternalError("this syntax error should have been caught in the first pass."); } final int chromosomeFieldIndex = columns.find("CHROM").getField("VALUE").globalFieldIndex; final int positionFieldIndex = columns.find("POS").getField("VALUE").globalFieldIndex; final int idFieldIndex = columns.find("ID").getField("VALUE").globalFieldIndex; final int refFieldIndex = columns.find("REF").getField("VALUE").globalFieldIndex; final int altFieldIndex = columns.find("ALT").getField("VALUE").globalFieldIndex; final int qualFieldIndex = columns.find("QUAL").getField("VALUE").globalFieldIndex; final int filterFieldIndex = columns.find("FILTER").getField("VALUE").globalFieldIndex; final IntSet infoFieldGlobalIndices = new IntArraySet(); for (ColumnField infoField : parser.getColumns().find("INFO").fields) { infoFieldGlobalIndices.add(infoField.globalFieldIndex); } final IntSet formatFieldGlobalIndices = new IntArraySet(); Int2IntMap globalIndexToSampleIndex = new Int2IntOpenHashMap(); int sampleIndex = 0; for (ColumnInfo col : columns) { if (col.useFormat) { for (ColumnField field : col.fields) { globalIndexToSampleIndex.put(field.globalFieldIndex, sampleIndex++); formatFieldGlobalIndices.add(field.globalFieldIndex); } } } int infoFieldIndex = 0; int formatFieldCount = 0; int previousSampleIndex = -1; while (parser.hasNextDataLine()) { final String elementId = Integer.toString(elementIndex); boolean keepThisLine = false; if (elementPermutation.containsKey(elementIndex)) { final int permutedIndex = elementPermutation.get(elementIndex); final DifferentialExpressionInfo differentialExpressionInfo = data.get(permutedIndex); assert Integer.parseInt(differentialExpressionInfo.getElementId() .toString()) == elementIndex : " element index must match with data element retrived"; for (final String adjustedColumn : adjustedColumnIds) { final int adjustedColumnIndex = data.getStatisticIndex(adjustedColumn); final DoubleArrayList list = differentialExpressionInfo.statistics(); final double adjustedPValue = list.get(adjustedColumnIndex); keepThisLine = determineKeepThisLine(keepThisLine, adjustedPValue); if (topHitNum != 0 && topHitsElementIndices.isEmpty()) { // early stop: there are no q-values below the threshold and we have seen enough top hits already. break; } if (topHitNum != 0 && topHitsElementIndices.contains(elementIndex)) { // the q-value is not good enough for the threshold, but we want to include up to top hits in the results: keepThisLine = true; topHitsElementIndices.remove(elementIndex); } } if (adjustedColumnIds.size() == 0) { // no selection column, keep all lines. keepThisLine = true; } if (keepThisLine) { final DifferentialExpressionInfo info = differentialExpressionInfo; assert info.getElementId().equals(elementId) : " elementId must match"; // transfer previous columsn and fields: infoFieldIndex = 0; sampleIndex = 0; formatFieldCount = 0; previousSampleIndex = -1; String format = parser.getStringColumnValue(columns.find("FORMAT").columnIndex); final String[] formatTokens = format.split(":"); int numFormatFields = columns.find("FORMAT").fields.size(); sampleIndex = 0; int formatFieldIndex = 0; for (int globalFieldIndex = 0; globalFieldIndex < parser .countAllFields(); globalFieldIndex++) { String value = parser.getStringFieldValue(globalFieldIndex); if (globalFieldIndex == chromosomeFieldIndex) { vcfWriter.setChromosome(value); } else if (globalFieldIndex == positionFieldIndex) { vcfWriter.setPosition(Integer.parseInt(value)); } else if (globalFieldIndex == idFieldIndex) { vcfWriter.setId(value); } else if (globalFieldIndex == refFieldIndex) { vcfWriter.setReferenceAllele(value); } else if (globalFieldIndex == altFieldIndex) { vcfWriter.setAlternateAllele(value); } else if (globalFieldIndex == qualFieldIndex) { vcfWriter.setQual(value); } else if (globalFieldIndex == filterFieldIndex) { vcfWriter.setFilter(value); } if (infoFieldGlobalIndices.contains(globalFieldIndex)) { vcfWriter.setInfo(infoFieldIndex++, value); } if (formatFieldGlobalIndices.contains(globalFieldIndex)) { /* System.out.printf("Set sampleValue formatIndex: %d sampleIndex: %d value: %s%n", formatFieldCount, sampleIndex, value); System.out.flush(); */ if (formatFieldIndex < formatTokens.length) { if (!"".equals(formatTokens[formatFieldIndex])) { vcfWriter.setSampleValue(formatTokens[formatFieldIndex], sampleIndex, value); } } formatFieldCount++; if (value.length() != 0) { formatFieldIndex++; } if (formatFieldCount == numFormatFields) { formatFieldIndex = 0; formatFieldCount = 0; sampleIndex++; } } } // add new INFO field values (the adjusted p-values): statIndex = 0; for (String adjustedColumn : adjustedColumnIds) { int adjustedColumnIndex = data.getStatisticIndex(adjustedColumn); final DoubleArrayList list = differentialExpressionInfo.statistics(); double newColValue = list.get(adjustedColumnIndex); vcfWriter.setInfo(statIndexToInfoFieldIndex.get(statIndex), Double.toString(newColValue)); statIndex++; } // This is a line we keep, write it: vcfWriter.writeRecord(); } } elementIndex++; parser.next(); } } finally { parser.close(); } } }
From source file:com.swordlord.jalapeno.datacontainer.DataContainer.java
/** * Gets the table.//from w w w.j a va2 s . c o m * * @param clazz the clazz * * @return the table */ public DataTableBase getTable(Class<? extends DataTableBase> clazz) { if (clazz == null) { throw new InternalError("Table is null (clazz)"); } String strTableId = null; try { Constructor<? extends DataTableBase> ct = clazz.getConstructor(DataContainer.class); DataTableBase tab = ct.newInstance(this); strTableId = tab.getAbsoluteTableName(); } catch (InstantiationException ex) { LOG.error(ex); } catch (IllegalAccessException ex) { LOG.error(ex); } catch (NoSuchMethodException ex) { LOG.error(ex); } catch (InvocationTargetException ex) { LOG.error(ex); } return getTableByAbsoluteName(strTableId); }
From source file:com.clust4j.algo.HierarchicalAgglomerative.java
static int[] hcCut(final int n_clusters, final double[][] children, final int n_leaves) { /*// ww w .j av a2 s . co m * Leave children as a double[][] despite it * being ints. This will allow VecUtils to operate */ if (n_clusters > n_leaves) throw new InternalError(n_clusters + " > " + n_leaves); // Init nodes SimpleHeap<Integer> nodes = new SimpleHeap<>(-((int) VecUtils.max(children[children.length - 1]) + 1)); for (int i = 0; i < n_clusters - 1; i++) { int inner_idx = -nodes.get(0) - n_leaves; if (inner_idx < 0) inner_idx = children.length + inner_idx; double[] these_children = children[inner_idx]; nodes.push(-((int) these_children[0])); nodes.pushPop(-((int) these_children[1])); } int i = 0; final int[] labels = new int[n_leaves]; for (Integer node : nodes) { Integer[] descendants = hcGetDescendents(-node, children, n_leaves); for (Integer desc : descendants) labels[desc] = i; i++; } return labels; }
From source file:com.swordlord.jalapeno.datacontainer.DataContainer.java
/** * Gets the table by absolute name.//from www . j a v a2 s . c om * * @param strId the str id * * @return the table by absolute name */ @SuppressWarnings("unchecked") public DataTableBase getTableByAbsoluteName(String strId) { if (strId == null) { throw new InternalError("Tablename is null (strId)"); } if (!_dataTables.contains(strId)) { // if not exists, create! try { Class<DataTableBase> clazz = (Class<DataTableBase>) Class.forName(strId); Constructor<DataTableBase> ct = clazz.getConstructor(DataContainer.class); DataTableBase tab = ct.newInstance(this); if (tab != null) { addTable(tab); } } catch (ClassNotFoundException ex) { LOG.error(ex); } catch (InstantiationException ex) { LOG.error(ex); } catch (IllegalAccessException ex) { LOG.error(ex); } catch (NoSuchMethodException ex) { LOG.error(ex); } catch (InvocationTargetException ex) { LOG.error(ex); } } return _dataTables.get(strId); }
From source file:org.projectforge.core.ConfigXml.java
/** * Copies only not null values of the configuration. */// www. ja v a 2 s . com private static void copyDeclaredFields(final String prefix, final Class<?> srcClazz, final Object src, final Object dest, final String... ignoreFields) { final Field[] fields = srcClazz.getDeclaredFields(); AccessibleObject.setAccessible(fields, true); for (final Field field : fields) { if (ignoreFields != null && ArrayUtils.contains(ignoreFields, field.getName()) == false && accept(field)) { try { final Object srcFieldValue = field.get(src); if (srcFieldValue == null) { // Do nothing } else if (srcFieldValue instanceof ConfigurationData) { final Object destFieldValue = field.get(dest); Validate.notNull(destFieldValue); final StringBuffer buf = new StringBuffer(); if (prefix != null) { buf.append(prefix); } String alias = null; if (field.isAnnotationPresent(XmlField.class)) { final XmlField xmlFieldAnn = field.getAnnotation(XmlField.class); if (xmlFieldAnn != null) { alias = xmlFieldAnn.alias(); } } if (alias != null) { buf.append(alias); } else { buf.append(field.getClass().getName()); } buf.append("."); copyDeclaredFields(buf.toString(), srcFieldValue.getClass(), srcFieldValue, destFieldValue, ignoreFields); } else if (PLUGIN_CONFIGS_FIELD_NAME.equals(field.getName()) == true) { // Do nothing. } else { field.set(dest, srcFieldValue); if (StringHelper.isIn(field.getName(), "receiveSmsKey", "phoneLookupKey") == true) { log.info(StringUtils.defaultString(prefix) + field.getName() + " = ****"); } else { log.info(StringUtils.defaultString(prefix) + field.getName() + " = " + srcFieldValue); } } } catch (final IllegalAccessException ex) { throw new InternalError("Unexpected IllegalAccessException: " + ex.getMessage()); } } } final Class<?> superClazz = srcClazz.getSuperclass(); if (superClazz != null) { copyDeclaredFields(prefix, superClazz, src, dest, ignoreFields); } }
From source file:org.bdval.DiscoverWithGeneticAlgorithm.java
@Override public void process(final DAVOptions options) { super.process(options); //final Table inputTable = options.inputTable; System.out.println("Optimizing " + (this.optimizeMeasureName)); for (final ClassificationTask task : options.classificationTasks) { for (final GeneList geneList : options.geneLists) { int reducedNumProbeset; int iteration = 1; System.out.println("Processing gene list: " + geneList.toString()); try { int numKept; GeneList activeGeneList = geneList; IntSet sourceSet = new IntArraySet(); IntSet previousSubset = null; IntSet fitestSubSet = null; double[] fitestParamValues = ArrayUtils.EMPTY_DOUBLE_ARRAY; options.trainingPlatform = new GEOPlatformIndexed(); Table processedTable = processTable(activeGeneList, options.inputTable, options, MicroarrayTrainEvaluate.calculateLabelValueGroups(task)); // try to free some memory (we cannot call processTable anymore after this:) options.inputTable = null; System.gc();//from w ww . j a va 2s . c o m do { System.out.println("Discover markers with GA wrapper for " + task); final EvaluationMeasure evalMeasure; if (iteration == 1) { // FIRST ITERATION final ClassificationHelper helper = getClassifier(processedTable, MicroarrayTrainEvaluate.calculateLabelValueGroups(task)); final RandomEngine randomEngine = new MersenneTwister(options.randomSeed); final CrossValidation CV = new CrossValidation(helper.classifier, helper.problem, randomEngine); final boolean useR = optimizationRequiresR(); CV.useRServer(useR); // user R server only when required. CV.setRepeatNumber(cvRepeatNumber); CV.setScalerClass(options.scalerClass); if (useRServer) { CV.evaluateMeasure(optimizeMeasureName); } // CV.setScalerClass(PercentileScalingRowProcessor.class); evalMeasure = CV.crossValidation(foldNumber); System.out.println("Initial " + getPerformanceMeasureName() + " measure: " + getOptimizationMeasure(evalMeasure)); numKept = options.trainingPlatform.getProbeIds().size(); for (int i = 0; i < numKept; i++) { sourceSet.add(i); } inputIdentifiers = options.trainingPlatform; if (numKept <= numProbesets) { LOG.error("Cannot remove probesets, already below the target number."); fitestSubSet = sourceSet; break; } } else { numKept = previousSubset.size(); sourceSet = previousSubset; } localOptions = options; System.out.println("Num probeset as input : " + numKept); final SubSetFitnessFunction convergenceCriterion; final Table processedTableConstant = processedTable; convergenceCriterion = new AbstractSubSetFitnessFunction() { @Override public double evaluate(final IntSet subset, final double[] paramValues) { try { GeneList geneListFromSubset = convertSubsetToGeneList(subset); Table filteredTable = filterTable(options, processedTableConstant, geneListFromSubset); ClassificationHelper helper = getClassifier(filteredTable, MicroarrayTrainEvaluate.calculateLabelValueGroups(task)); int paramIndex = 0; for (final String parameterName : getParameterNames(discreteParameters)) { helper.parameters.setParameter(parameterName, paramValues[paramIndex++]); } helper.classifier.setParameters(helper.parameters); RandomEngine randomEngine = new MersenneTwister(options.randomSeed); CrossValidation CV = new CrossValidation(helper.classifier, helper.problem, randomEngine); final boolean useR = optimizationRequiresR(); CV.useRServer(useR); CV.setRepeatNumber(cvRepeatNumber); CV.setScalerClass(options.scalerClass); CV.evaluateMeasure(optimizeMeasureName); final EvaluationMeasure eMeasure = CV.crossValidation(foldNumber); geneListFromSubset = null; filteredTable = null; helper = null; randomEngine = null; CV = null; final double measure = getOptimizationMeasure(eMeasure); LOG.info(task.getExperimentDataFilename() + " evaluated " + getPerformanceMeasureName() + " " + measure); // double std = getOptimizationMeasureStd(eMeasure); // return measure -std; return measure; } catch (InvalidColumnException e) { LOG.error(e); System.exit(10); } catch (TypeMismatchException e) { LOG.error(e); System.exit(10); } throw new InternalError("This statement should never be reached."); } }; reducedNumProbeset = Math.max(numProbesets, (int) (numKept * ratio)); if (reducedNumProbeset == numKept) { System.out.println("Previous step optimized the same number of probesets already."); break; } else { System.out.println("Optimizing to keep " + reducedNumProbeset + " probesets."); } final OptimizeSubSet optimizationEngine = new OptimizeSubSet(sourceSet, reducedNumProbeset, convergenceCriterion, populationSize, discreteParameters); optimizationEngine.setModuloProgressReport(2); optimizationEngine.optimize(numOptimizationStepsPerIteration, .001); fitestSubSet = optimizationEngine.getFitestSubset(); fitestParamValues = optimizationEngine.getFitestParameterValues(); previousSubset = fitestSubSet; // use as gene list for the next iteration: the restriction of the previous gene list // to those probesets that were found with to have the bext F-1 measure in this iteration. activeGeneList = convertSubsetToGeneList(fitestSubSet); System.out.println("numKept: " + numKept + " reducedNumber: " + reducedNumProbeset); if (!writeGeneListFormat) { // print intermediate result for this iteration. printFeatures(options, iteration, fitestSubSet, convergenceCriterion.evaluate(fitestSubSet, fitestParamValues)); } iteration++; final GeneList geneListFromSubset = convertSubsetToGeneList(fitestSubSet); final Table fitestTable = filterTable(options, processedTable, geneListFromSubset); processedTable = fitestTable; // hint to the JVM that this would be a good time to garbage collect. System.gc(); } while (numKept > numProbesets); printFeatures(options, iteration, fitestSubSet, 0); printFitestParamValues(options, fitestParamValues, getParameterNames(discreteParameters)); } catch (Exception e) { LOG.error(e); e.printStackTrace(); System.exit(10); } } } }