List of usage examples for java.lang Double compare
public static int compare(double d1, double d2)
From source file:edu.harvard.iq.dataverse.ingest.IngestServiceBean.java
private void processDatasetMetadata(FileMetadataIngest fileMetadataIngest, DatasetVersion editVersion) throws IOException { for (MetadataBlock mdb : editVersion.getDataset().getOwner().getMetadataBlocks()) { if (mdb.getName().equals(fileMetadataIngest.getMetadataBlockName())) { logger.fine("Ingest Service: dataset version has " + mdb.getName() + " metadata block enabled."); editVersion.setDatasetFields(editVersion.initDatasetFields()); Map<String, Set<String>> fileMetadataMap = fileMetadataIngest.getMetadataMap(); for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) { if (dsft.isPrimitive()) { if (!dsft.isHasParent()) { String dsfName = dsft.getName(); // See if the plugin has found anything for this field: if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) { logger.fine("Ingest Service: found extracted metadata for field " + dsfName); // go through the existing fields: for (DatasetField dsf : editVersion.getFlatDatasetFields()) { if (dsf.getDatasetFieldType().equals(dsft)) { // yep, this is our field! // let's go through the values that the ingest // plugin found in the file for this field: Set<String> mValues = fileMetadataMap.get(dsfName); // Special rules apply to aggregation of values for // some specific fields - namely, the resolution.* // fields from the Astronomy Metadata block. // TODO: rather than hard-coded, this needs to be // programmatically defined. -- L.A. 4.0 if (dsfName.equals("resolution.Temporal") || dsfName.equals("resolution.Spatial") || dsfName.equals("resolution.Spectral")) { // For these values, we aggregate the minimum-maximum // pair, for the entire set. // So first, we need to go through the values found by // the plugin and select the min. and max. values of // these: // (note that we are assuming that they all must // validate as doubles!) Double minValue = null; Double maxValue = null; for (String fValue : mValues) { try { double thisValue = Double.parseDouble(fValue); if (minValue == null || Double.compare(thisValue, minValue) < 0) { minValue = thisValue; } if (maxValue == null || Double.compare(thisValue, maxValue) > 0) { maxValue = thisValue; } } catch (NumberFormatException e) { } } // Now let's see what aggregated values we // have stored already: // (all of these resolution.* fields have allowedMultiple set to FALSE, // so there can be only one!) //logger.fine("Min value: "+minValue+", Max value: "+maxValue); if (minValue != null && maxValue != null) { Double storedMinValue = null; Double storedMaxValue = null; String storedValue = ""; if (dsf.getDatasetFieldValues() != null && dsf.getDatasetFieldValues().get(0) != null) { storedValue = dsf.getDatasetFieldValues().get(0).getValue(); if (storedValue != null && !storedValue.equals("")) { try { if (storedValue.indexOf(" - ") > -1) { storedMinValue = Double.parseDouble(storedValue .substring(0, storedValue.indexOf(" - "))); storedMaxValue = Double.parseDouble(storedValue .substring(storedValue.indexOf(" - ") + 3)); } else { storedMinValue = Double.parseDouble(storedValue); storedMaxValue = storedMinValue; } if (storedMinValue != null && storedMinValue.compareTo(minValue) < 0) { minValue = storedMinValue; } if (storedMaxValue != null && storedMaxValue.compareTo(maxValue) > 0) { maxValue = storedMaxValue; } } catch (NumberFormatException e) { } } else { storedValue = ""; } } //logger.fine("Stored min value: "+storedMinValue+", Stored max value: "+storedMaxValue); String newAggregateValue = ""; if (minValue.equals(maxValue)) { newAggregateValue = minValue.toString(); } else { newAggregateValue = minValue.toString() + " - " + maxValue.toString(); } // finally, compare it to the value we have now: if (!storedValue.equals(newAggregateValue)) { if (dsf.getDatasetFieldValues() == null) { dsf.setDatasetFieldValues( new ArrayList<DatasetFieldValue>()); } if (dsf.getDatasetFieldValues().get(0) == null) { DatasetFieldValue newDsfv = new DatasetFieldValue(dsf); dsf.getDatasetFieldValues().add(newDsfv); } dsf.getDatasetFieldValues().get(0).setValue(newAggregateValue); } } // Ouch. } else { // Other fields are aggregated simply by // collecting a list of *unique* values encountered // for this Field throughout the dataset. // This means we need to only add the values *not yet present*. // (the implementation below may be inefficient - ?) for (String fValue : mValues) { if (!dsft.isControlledVocabulary()) { Iterator<DatasetFieldValue> dsfvIt = dsf.getDatasetFieldValues() .iterator(); boolean valueExists = false; while (dsfvIt.hasNext()) { DatasetFieldValue dsfv = dsfvIt.next(); if (fValue.equals(dsfv.getValue())) { logger.fine("Value " + fValue + " already exists for field " + dsfName); valueExists = true; break; } } if (!valueExists) { logger.fine("Creating a new value for field " + dsfName + ": " + fValue); DatasetFieldValue newDsfv = new DatasetFieldValue(dsf); newDsfv.setValue(fValue); dsf.getDatasetFieldValues().add(newDsfv); } } else { // A controlled vocabulary entry: // first, let's see if it's a legit control vocab. entry: ControlledVocabularyValue legitControlledVocabularyValue = null; Collection<ControlledVocabularyValue> definedVocabularyValues = dsft .getControlledVocabularyValues(); if (definedVocabularyValues != null) { for (ControlledVocabularyValue definedVocabValue : definedVocabularyValues) { if (fValue.equals(definedVocabValue.getStrValue())) { logger.fine("Yes, " + fValue + " is a valid controlled vocabulary value for the field " + dsfName); legitControlledVocabularyValue = definedVocabValue; break; } } } if (legitControlledVocabularyValue != null) { // Only need to add the value if it is new, // i.e. if it does not exist yet: boolean valueExists = false; List<ControlledVocabularyValue> existingControlledVocabValues = dsf .getControlledVocabularyValues(); if (existingControlledVocabValues != null) { Iterator<ControlledVocabularyValue> cvvIt = existingControlledVocabValues .iterator(); while (cvvIt.hasNext()) { ControlledVocabularyValue cvv = cvvIt.next(); if (fValue.equals(cvv.getStrValue())) { // or should I use if (legitControlledVocabularyValue.equals(cvv)) ? logger.fine("Controlled vocab. value " + fValue + " already exists for field " + dsfName); valueExists = true; break; } } } if (!valueExists) { logger.fine("Adding controlled vocabulary value " + fValue + " to field " + dsfName); dsf.getControlledVocabularyValues() .add(legitControlledVocabularyValue); } } } } }// w w w . ja va 2 s . c o m } } } } } else { // A compound field: // See if the plugin has found anything for the fields that // make up this compound field; if we find at least one // of the child values in the map of extracted values, we'll // create a new compound field value and its child // DatasetFieldCompoundValue compoundDsfv = new DatasetFieldCompoundValue(); int nonEmptyFields = 0; for (DatasetFieldType cdsft : dsft.getChildDatasetFieldTypes()) { String dsfName = cdsft.getName(); if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) { logger.fine("Ingest Service: found extracted metadata for field " + dsfName + ", part of the compound field " + dsft.getName()); if (cdsft.isPrimitive()) { // probably an unnecessary check - child fields // of compound fields are always primitive... // but maybe it'll change in the future. if (!cdsft.isControlledVocabulary()) { // TODO: can we have controlled vocabulary // sub-fields inside compound fields? DatasetField childDsf = new DatasetField(); childDsf.setDatasetFieldType(cdsft); DatasetFieldValue newDsfv = new DatasetFieldValue(childDsf); newDsfv.setValue((String) fileMetadataMap.get(dsfName).toArray()[0]); childDsf.getDatasetFieldValues().add(newDsfv); childDsf.setParentDatasetFieldCompoundValue(compoundDsfv); compoundDsfv.getChildDatasetFields().add(childDsf); nonEmptyFields++; } } } } if (nonEmptyFields > 0) { // let's go through this dataset's fields and find the // actual parent for this sub-field: for (DatasetField dsf : editVersion.getFlatDatasetFields()) { if (dsf.getDatasetFieldType().equals(dsft)) { // Now let's check that the dataset version doesn't already have // this compound value - we are only interested in aggregating // unique values. Note that we need to compare compound values // as sets! -- i.e. all the sub fields in 2 compound fields // must match in order for these 2 compounds to be recognized // as "the same": boolean alreadyExists = false; for (DatasetFieldCompoundValue dsfcv : dsf.getDatasetFieldCompoundValues()) { int matches = 0; for (DatasetField cdsf : dsfcv.getChildDatasetFields()) { String cdsfName = cdsf.getDatasetFieldType().getName(); String cdsfValue = cdsf.getDatasetFieldValues().get(0).getValue(); if (cdsfValue != null && !cdsfValue.equals("")) { String extractedValue = (String) fileMetadataMap.get(cdsfName) .toArray()[0]; logger.fine("values: existing: " + cdsfValue + ", extracted: " + extractedValue); if (cdsfValue.equals(extractedValue)) { matches++; } } } if (matches == nonEmptyFields) { alreadyExists = true; break; } } if (!alreadyExists) { // save this compound value, by attaching it to the // version for proper cascading: compoundDsfv.setParentDatasetField(dsf); dsf.getDatasetFieldCompoundValues().add(compoundDsfv); } } } } } } } } }
From source file:org.esa.s1tbx.calibration.gpf.calibrators.ERSCalibrator.java
/** * Compute distance from satellite to the Earth center using satellite corrodinate in Metadata. * * @return The distance.// w w w. j a v a 2s. c o m */ private double getSatelliteToEarthCenterDistanceForENVISAT() { final MetadataElement orbit_state_vectors = absRoot.getElement(AbstractMetadata.orbit_state_vectors); final MetadataElement orbit_vector = orbit_state_vectors.getElement(AbstractMetadata.orbit_vector + 3); final float xpos = (float) orbit_vector.getAttributeDouble("x_pos"); final float ypos = (float) orbit_vector.getAttributeDouble("y_pos"); final float zpos = (float) orbit_vector.getAttributeDouble("z_pos"); final double rSat = Math.sqrt(xpos * xpos + ypos * ypos + zpos * zpos); // in m if (Double.compare(rSat, 0.0) == 0) { throw new OperatorException("x, y and z positions in orbit_state_vectors are all zeros"); } return rSat; }
From source file:org.esa.nest.gpf.ERSCalibrator.java
/** * Compute distance from satellite to the Earth center using satellite corrodinate in Metadata. * * @return The distance./* w w w .j ava 2 s . co m*/ */ private double getSatelliteToEarthCenterDistanceForENVISAT() { final MetadataElement mppAds = origMetadataRoot.getElement("MAIN_PROCESSING_PARAMS_ADS"); if (mppAds == null) { throw new OperatorException("MAIN_PROCESSING_PARAMS_ADS not found"); } MetadataElement ads; if (numMPPRecords == 1) { ads = mppAds; } else { ads = mppAds.getElement("MAIN_PROCESSING_PARAMS_ADS." + 1); } final MetadataAttribute xPositionAttr = ads.getAttribute("ASAR_Main_ADSR.sd/orbit_state_vectors.3.x_pos_1"); if (xPositionAttr == null) { throw new OperatorException("x_pos_1 not found"); } final float x_pos = xPositionAttr.getData().getElemInt() / 100.0f; // divide 100 to convert unit from 10^-2 m to m //System.out.println("x position is " + x_pos); final MetadataAttribute yPositionAttr = ads.getAttribute("ASAR_Main_ADSR.sd/orbit_state_vectors.3.y_pos_1"); if (yPositionAttr == null) { throw new OperatorException("y_pos_1 not found"); } final float y_pos = yPositionAttr.getData().getElemInt() / 100.0f; // divide 100 to convert unit from 10^-2 m to m //System.out.println("y position is " + y_pos); final MetadataAttribute zPositionAttr = ads.getAttribute("ASAR_Main_ADSR.sd/orbit_state_vectors.3.z_pos_1"); if (zPositionAttr == null) { throw new OperatorException("z_pos_1 not found"); } final float z_pos = zPositionAttr.getData().getElemInt() / 100.0f; // divide 100 to convert unit from 10^-2 m to m //System.out.println("z position is " + z_pos); final double rSat = Math.sqrt(x_pos * x_pos + y_pos * y_pos + z_pos * z_pos); // in m if (Double.compare(rSat, 0.0) == 0) { throw new OperatorException("x, y and z positions in orbit_state_vectors are all zeros"); } return rSat; }
From source file:org.sakaiproject.tool.assessment.services.GradingService.java
public void updateAssessmentGradingScore(AssessmentGradingData adata, PublishedAssessmentIfc pub) { try {//from www.j a v a2 s .c o m Set itemGradingSet = adata.getItemGradingSet(); Iterator iter = itemGradingSet.iterator(); double totalAutoScore = 0; double totalOverrideScore = adata.getTotalOverrideScore().doubleValue(); while (iter.hasNext()) { ItemGradingData i = (ItemGradingData) iter.next(); if (i.getAutoScore() != null) totalAutoScore += i.getAutoScore().doubleValue(); } double oldAutoScore = adata.getTotalAutoScore().doubleValue(); double scoreDifference = totalAutoScore - oldAutoScore; adata.setTotalAutoScore(Double.valueOf(totalAutoScore)); if (Double.compare((totalAutoScore + totalOverrideScore), Double.valueOf("0").doubleValue()) < 0) { adata.setFinalScore(Double.valueOf("0")); } else { adata.setFinalScore(Double.valueOf(totalAutoScore + totalOverrideScore)); } saveOrUpdateAssessmentGrading(adata); if (scoreDifference != 0) { notifyGradebookByScoringType(adata, pub); } } catch (GradebookServiceException ge) { ge.printStackTrace(); throw ge; } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } }
From source file:blusunrize.immersiveengineering.client.ClientProxy.java
public static void handleMineralManual() { if (ManualHelper.getManual() != null) { ArrayList<IManualPage> pages = new ArrayList(); pages.add(new ManualPages.Text(ManualHelper.getManual(), "minerals0")); pages.add(new ManualPages.Crafting(ManualHelper.getManual(), "minerals1", new ItemStack(IEContent.blockMetalDevice1, 1, BlockTypes_MetalDevice1.SAMPLE_DRILL.getMeta()))); pages.add(new ManualPages.Text(ManualHelper.getManual(), "minerals2")); final ExcavatorHandler.MineralMix[] minerals = ExcavatorHandler.mineralList.keySet() .toArray(new ExcavatorHandler.MineralMix[0]); ArrayList<Integer> mineralIndices = new ArrayList(); for (int i = 0; i < minerals.length; i++) if (minerals[i].isValid()) mineralIndices.add(i);/* w ww. ja v a 2 s. c o m*/ Collections.sort(mineralIndices, new Comparator<Integer>() { @Override public int compare(Integer paramT1, Integer paramT2) { String name1 = Lib.DESC_INFO + "mineral." + minerals[paramT1].name; String localizedName1 = I18n.format(name1); if (localizedName1 == name1) localizedName1 = minerals[paramT1].name; String name2 = Lib.DESC_INFO + "mineral." + minerals[paramT2].name; String localizedName2 = I18n.format(name2); if (localizedName2 == name2) localizedName2 = minerals[paramT2].name; return localizedName1.compareToIgnoreCase(localizedName2); } }); for (int i : mineralIndices) { String name = Lib.DESC_INFO + "mineral." + minerals[i].name; String localizedName = I18n.format(name); if (localizedName.equalsIgnoreCase(name)) localizedName = minerals[i].name; String s0 = ""; if (minerals[i].dimensionWhitelist != null && minerals[i].dimensionWhitelist.length > 0) { String validDims = ""; for (int dim : minerals[i].dimensionWhitelist) validDims += (!validDims.isEmpty() ? ", " : "") + "<dim;" + dim + ">"; s0 = I18n.format("ie.manual.entry.mineralsDimValid", localizedName, validDims); } else if (minerals[i].dimensionBlacklist != null && minerals[i].dimensionBlacklist.length > 0) { String invalidDims = ""; for (int dim : minerals[i].dimensionBlacklist) invalidDims += (!invalidDims.isEmpty() ? ", " : "") + "<dim;" + dim + ">"; s0 = I18n.format("ie.manual.entry.mineralsDimInvalid", localizedName, invalidDims); } else s0 = I18n.format("ie.manual.entry.mineralsDimAny", localizedName); ArrayList<Integer> formattedOutputs = new ArrayList<Integer>(); for (int j = 0; j < minerals[i].oreOutput.size(); j++) formattedOutputs.add(j); final int fi = i; Collections.sort(formattedOutputs, new Comparator<Integer>() { @Override public int compare(Integer paramT1, Integer paramT2) { return -Double.compare(minerals[fi].recalculatedChances[paramT1], minerals[fi].recalculatedChances[paramT2]); } }); String s1 = ""; NonNullList<ItemStack> sortedOres = NonNullList.withSize(minerals[i].oreOutput.size(), ItemStack.EMPTY); for (int j = 0; j < formattedOutputs.size(); j++) if (!minerals[i].oreOutput.get(j).isEmpty()) { int sorted = formattedOutputs.get(j); s1 += "<br>" + new DecimalFormat("00.00").format(minerals[i].recalculatedChances[sorted] * 100) .replaceAll("\\G0", " ") + "% " + minerals[i].oreOutput.get(sorted).getDisplayName(); sortedOres.set(j, minerals[i].oreOutput.get(sorted)); } String s2 = I18n.format("ie.manual.entry.minerals3", s0, s1); pages.add(new ManualPages.ItemDisplay(ManualHelper.getManual(), s2, sortedOres)); } // String[][][] multiTables = formatToTable_ExcavatorMinerals(); // for(String[][] minTable : multiTables) // pages.add(new ManualPages.Table(ManualHelper.getManual(), "", minTable,true)); if (mineralEntry != null) mineralEntry.setPages(pages.toArray(new IManualPage[pages.size()])); else { ManualHelper.addEntry("minerals", ManualHelper.CAT_GENERAL, pages.toArray(new IManualPage[pages.size()])); mineralEntry = ManualHelper.getManual().getEntry("minerals"); } } }
From source file:com.joptimizer.optimizers.LPPresolver.java
public boolean isLBUnbounded(double lb) { return Double.compare(unboundedLBValue, lb) == 0; }
From source file:com.joptimizer.optimizers.LPPresolver.java
public boolean isUBUnbounded(double ub) { return Double.compare(unboundedUBValue, ub) == 0; }
From source file:com.joptimizer.optimizers.LPPresolver.java
/** * This method is just for testing scope. *//* w w w . j av a 2 s .c om*/ private void checkProgress(DoubleMatrix1D c, DoubleMatrix2D A, DoubleMatrix1D b, DoubleMatrix1D lb, DoubleMatrix1D ub, DoubleMatrix1D ylb, DoubleMatrix1D yub, DoubleMatrix1D zlb, DoubleMatrix1D zub) { if (this.expectedSolution == null) { return; } if (Double.isNaN(this.expectedTolerance)) { //for this to work properly, this method must be called at least one time before presolving operations start RealVector X = MatrixUtils.createRealVector(expectedSolution); RealMatrix AMatrix = MatrixUtils.createRealMatrix(A.toArray()); RealVector Bvector = MatrixUtils.createRealVector(b.toArray()); RealVector Axb = AMatrix.operate(X).subtract(Bvector); double norm = Axb.getNorm(); this.expectedTolerance = Math.max(1.e-7, 1.5 * norm); } double tolerance = this.expectedTolerance; log.debug("tolerance: " + tolerance); RealVector X = MatrixUtils.createRealVector(expectedSolution); RealMatrix AMatrix = MatrixUtils.createRealMatrix(A.toArray()); RealVector Bvector = MatrixUtils.createRealVector(b.toArray()); //logger.debug("A.X-b: " + ArrayUtils.toString(originalA.operate(X).subtract(originalB))); //nz rows for (int i = 0; i < vRowPositions.length; i++) { short[] vRowPositionsI = vRowPositions[i]; for (short nzJ : vRowPositionsI) { if (Double.compare(A.getQuick(i, nzJ), 0.) == 0) { log.debug("entry " + i + "," + nzJ + " est zero: " + A.getQuick(i, nzJ)); throw new IllegalStateException(); } } } //nz columns for (int j = 0; j < vColPositions.length; j++) { short[] vColPositionsJ = vColPositions[j]; for (short nzI : vColPositionsJ) { if (Double.compare(A.getQuick(nzI, j), 0.) == 0) { log.debug("entry (" + nzI + "," + j + ") est zero: " + A.getQuick(nzI, j)); throw new IllegalStateException(); } } } //nz Aij for (int i = 0; i < A.rows(); i++) { short[] vRowPositionsI = vRowPositions[i]; for (int j = 0; j < A.columns(); j++) { if (Double.compare(Math.abs(A.getQuick(i, j)), 0.) != 0) { if (!ArrayUtils.contains(vRowPositionsI, (short) j)) { log.debug("entry " + i + "," + j + " est non-zero: " + A.getQuick(i, j)); throw new IllegalStateException(); } if (!ArrayUtils.contains(vColPositions[j], (short) i)) { log.debug("entry " + i + "," + j + " est non-zero: " + A.getQuick(i, j)); throw new IllegalStateException(); } } } } // //boolean deepCheckA = true; // boolean deepCheckA = false; // if(deepCheckA){ // //check for 0-rows // List<Integer> zeroRows = new ArrayList<Integer>(); // for(int i=0; i<A.rows(); i++){ // boolean isNotZero = false; // for(int j=0;!isNotZero && j<A.columns(); j++){ // isNotZero = Double.compare(0., A.getQuick(i, j))!=0; // } // if(!isNotZero){ // zeroRows.add(zeroRows.size(), i); // } // } // if(!zeroRows.isEmpty()){ // log.debug("All 0 entries in rows " + ArrayUtils.toString(zeroRows)); // //log.debug(ArrayUtils.toString(A.toArray())); // throw new IllegalStateException(); // } // // //check for 0-columns // List<Integer> zeroCols = new ArrayList<Integer>(); // for(int j=0; j<A.columns(); j++){ // boolean isNotZero = false; // for(int i=0;!isNotZero && i<A.rows(); i++){ // isNotZero = Double.compare(0., A.getQuick(i, j))!=0; // } // if(!isNotZero){ // zeroCols.add(zeroCols.size(), j); // } // } // if(!zeroCols.isEmpty()){ // log.debug("All 0 entries in columns " + ArrayUtils.toString(zeroCols)); // //log.debug(ArrayUtils.toString(A.toArray())); // throw new IllegalStateException(); // } // // // check rank(A): must be A pXn with rank(A)=p < n // QRSparseFactorization qr = null; // boolean factOK = true; // try{ // qr = new QRSparseFactorization((SparseDoubleMatrix2D)A); // qr.factorize(); // }catch(Exception e){ // factOK = false; // log.warn("Warning", e); // } // if(factOK){ // log.debug("p : " + AMatrix.getRowDimension()); // log.debug("n : " + AMatrix.getColumnDimension()); // log.debug("full rank: " + qr.hasFullRank()); // if(!(A.rows() < A.columns())){ // log.debug("!( p < n )"); // throw new IllegalStateException(); // } // if(!qr.hasFullRank()){ // log.debug("not full rank A matrix"); // throw new IllegalStateException(); // } // } // } //A.x = b RealVector Axb = AMatrix.operate(X).subtract(Bvector); double norm = Axb.getNorm(); log.debug("|| A.x-b ||: " + norm); if (norm > tolerance) { //where is the error? for (int i = 0; i < Axb.getDimension(); i++) { if (Math.abs(Axb.getEntry(i)) > tolerance) { log.debug("entry " + i + ": " + Axb.getEntry(i)); throw new IllegalStateException(); } } throw new IllegalStateException(); } //upper e lower for (int i = 0; i < X.getDimension(); i++) { if (X.getEntry(i) + tolerance < lb.getQuick(i)) { log.debug("lower bound " + i + " not respected: lb=" + lb.getQuick(i) + ", value=" + X.getEntry(i)); throw new IllegalStateException(); } if (X.getEntry(i) > ub.getQuick(i) + tolerance) { log.debug("upper bound " + i + " not respected: ub=" + ub.getQuick(i) + ", value=" + X.getEntry(i)); throw new IllegalStateException(); } } }
From source file:org.sakaiproject.tool.assessment.services.GradingService.java
public int compareTo(EMIScore o) { //we want the correct higher scores first if (correct == o.correct) { int c = Double.compare(o.score, score); if (c == 0) { if (itemId != o.itemId) { return (int) (itemId - o.itemId); }/*from w w w .ja v a 2 s . c o m*/ if (itemTextId != o.itemTextId) { return (int) (itemTextId - o.itemTextId); } if (answerId != o.answerId) { return (int) (answerId - o.answerId); } return hashCode() - o.hashCode(); } else { return c; } } else { return correct ? -1 : 1; } }
From source file:org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic.java
private String getLatestParentArtifactDataIdByArtifactUUID(String artifactUUID, Wrapper<ResponseFormat> errorWrapper, String parentId, ComponentTypeEnum componentType) { String artifactId = null;/*w w w. j a va 2s . c o m*/ ActionStatus actionStatus = ActionStatus.ARTIFACT_NOT_FOUND; StorageOperationStatus storageStatus; ArtifactDefinition latestArtifact = null; List<ArtifactDefinition> artifacts = null; NodeTypeEnum parentType; if (componentType.equals(ComponentTypeEnum.RESOURCE)) { parentType = NodeTypeEnum.Resource; } else { parentType = NodeTypeEnum.Service; } Either<Map<String, ArtifactDefinition>, StorageOperationStatus> getArtifactsRes = artifactOperation .getArtifacts(parentId, parentType, false); if (getArtifactsRes.isRight()) { storageStatus = getArtifactsRes.right().value(); log.debug("Couldn't fetch artifacts data for parent component {} with uid {}, error: {}", componentType.name(), parentId, storageStatus); if (!storageStatus.equals(StorageOperationStatus.NOT_FOUND)) { actionStatus = componentsUtils.convertFromStorageResponse(storageStatus); } errorWrapper.setInnerElement(componentsUtils.getResponseFormat(actionStatus, artifactUUID)); } if (errorWrapper.isEmpty()) { artifacts = getArtifactsRes.left().value().values().stream() .filter(a -> a.getArtifactUUID() != null && a.getArtifactUUID().equals(artifactUUID)) .collect(Collectors.toList()); if (artifacts == null || artifacts.isEmpty()) { log.debug( "Couldn't fetch artifact with UUID {} data for parent component {} with uid {}, error: {}", artifactUUID, componentType.name(), parentId, actionStatus); errorWrapper.setInnerElement(componentsUtils.getResponseFormat(actionStatus, artifactUUID)); } } if (errorWrapper.isEmpty()) { latestArtifact = artifacts.stream().max((a1, a2) -> { int compareRes = Double.compare(Double.parseDouble(a1.getArtifactVersion()), Double.parseDouble(a2.getArtifactVersion())); if (compareRes == 0) { compareRes = Long.compare(a1.getLastUpdateDate() == null ? 0 : a1.getLastUpdateDate(), a2.getLastUpdateDate() == null ? 0 : a2.getLastUpdateDate()); } return compareRes; }).get(); if (latestArtifact == null) { log.debug( "Couldn't fetch latest artifact with UUID {} data for parent component {} with uid {}, error: {}", artifactUUID, componentType.name(), parentId, actionStatus); errorWrapper.setInnerElement(componentsUtils.getResponseFormat(actionStatus, artifactUUID)); } } if (errorWrapper.isEmpty()) { artifactId = latestArtifact.getUniqueId(); } return artifactId; }