List of usage examples for java.lang Float isNaN
public static boolean isNaN(float v)
From source file:routines.system.BigDataParserUtils.java
public static Float parseTo_Float(float input) { if (Float.isNaN(input)) { return null; }//from ww w. j a v a 2 s . com return ((Float) input).floatValue(); }
From source file:juicebox.data.MatrixZoomData.java
/** * Compute the Pearson's. Read in the observed, calculate O/E from the expected value function, subtract the row * means, compute the Pearson's correlation on that matrix * * @param df Expected value//from ww w.jav a 2 s.c o m * @return Pearson's correlation matrix */ private BasicMatrix computePearsons(ExpectedValueFunction df) { if (chr1 != chr2) { throw new RuntimeException("Cannot compute pearsons for non-diagonal matrices"); } // # of columns. We could let the data itself define this int dim; if (zoom.getUnit() == HiC.Unit.BP) { dim = chr1.getLength() / zoom.getBinSize() + 1; } else { dim = ((DatasetReaderV2) reader).getFragCount(chr1) / zoom.getBinSize() + 1; } // Compute O/E column vectors double[][] vectors = new double[dim][]; // Loop through all contact records Iterator<ContactRecord> iter = contactRecordIterator(); while (iter.hasNext()) { ContactRecord record = iter.next(); int i = record.getBinX(); int j = record.getBinY(); float counts = record.getCounts(); if (Float.isNaN(counts)) continue; int dist = Math.abs(i - j); double expected = df.getExpectedValue(chr1.getIndex(), dist); double oeValue = counts / expected; double[] vi = vectors[i]; if (vi == null) { vi = new double[dim]; //zeroValue) ; vectors[i] = vi; } vi[j] = oeValue; double[] vj = vectors[j]; if (vj == null) { vj = new double[dim]; // zeroValue) ; vectors[j] = vj; } vj[i] = oeValue; } // Subtract row means double[] rowMeans = new double[dim]; for (int i = 0; i < dim; i++) { double[] row = vectors[i]; rowMeans[i] = row == null ? 0 : getVectorMean(row); } for (int j = 0; j < dim; j++) { for (int i = 0; i < dim; i++) { double[] column = vectors[j]; if (column == null) continue; column[i] -= rowMeans[i]; } } BasicMatrix pearsons = Pearsons.computePearsons(vectors, dim); pearsonsMap.put(df.getNormalizationType(), pearsons); return pearsons; }
From source file:MSUmpire.PeptidePeakClusterDetection.PDHandlerBase.java
protected void ReadPepIsoMS1PatternMap() throws FileNotFoundException, IOException { InputStream is = this.getClass().getClassLoader().getResourceAsStream("resource/IsotopicPatternRange.csv"); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); IsotopePatternMap = new TreeMap[Math.max(2, LCMSPeakBase.MaxNoPeakCluster - 1)]; for (int i = 0; i < IsotopePatternMap.length; i++) { IsotopePatternMap[i] = new TreeMap<>(); }//from w ww . ja va 2 s .c o m String line = ""; while ((line = reader.readLine()) != null) { float MW = Float.parseFloat(line.split(",")[0]); for (int i = 0; i < IsotopePatternMap.length; i++) { float Mean = Float.parseFloat(line.split(",")[1 + (i * 2)]); float SD = Float.parseFloat(line.split(",")[2 + (i * 2)]); if (!Float.isNaN(Mean)) { IsotopePatternMap[i].put(MW, new XYData(Mean + 3.3f * SD, Mean - 3.3f * SD)); } } } reader.close(); }
From source file:fr.amap.lidar.amapvox.chart.VoxelsToChart.java
private XYSeries createVegetationProfileSerie(VoxelFileReader reader, String key, int indiceMin, int indiceMax, LayerReference reference, float maxPAD) { float resolution = reader.getVoxelSpaceInfos().getResolution(); int layersNumber = (int) (reader.getVoxelSpaceInfos().getSplit().z); float[] padMeanByLayer = new float[layersNumber]; int[] valuesNumberByLayer = new int[layersNumber]; //calcul de la couche sol ou canope Iterator<Voxel> iterator; int[][] canopeeArray = null; int[][] groundArray = null; if (reference == LayerReference.FROM_BELOW_CANOPEE) { canopeeArray = new int[reader.getVoxelSpaceInfos().getSplit().x][reader.getVoxelSpaceInfos() .getSplit().y];//from w w w . j a v a2s. c om iterator = reader.iterator(); while (iterator.hasNext()) { Voxel voxel = iterator.next(); if (voxel.nbSampling > 0 && voxel.nbEchos > 0) { if (voxel.$k > canopeeArray[voxel.$i][voxel.$j]) { canopeeArray[voxel.$i][voxel.$j] = voxel.$k; } } } } else if (reference == LayerReference.FROM_ABOVE_GROUND) { groundArray = new int[reader.getVoxelSpaceInfos().getSplit().x][reader.getVoxelSpaceInfos() .getSplit().y]; for (int i = 0; i < groundArray.length; i++) { for (int j = 0; j < groundArray[0].length; j++) { groundArray[i][j] = reader.getVoxelSpaceInfos().getSplit().z - 1; } } iterator = reader.iterator(); while (iterator.hasNext()) { Voxel voxel = iterator.next(); if (voxel.ground_distance > 0) { if (voxel.$k < groundArray[voxel.$i][voxel.$j]) { groundArray[voxel.$i][voxel.$j] = voxel.$k; } } } } iterator = reader.iterator(); while (iterator.hasNext()) { Voxel voxel = iterator.next(); //float pad = voxel.calculatePAD(maxPAD); float pad = voxel.PadBVTotal; if (pad > maxPAD) { pad = maxPAD; } if (!Float.isNaN(pad)) { if (!doQuadratFiltering(voxel, indiceMin, indiceMax)) { int layerIndex; if (reference == LayerReference.FROM_BELOW_CANOPEE) { layerIndex = canopeeArray[voxel.$i][voxel.$j] - (int) ((voxel.$k) / resolution); } else { //layerIndex = (int)((voxel.$k)/resolution) - groundArray[voxel.$i][voxel.$j]; layerIndex = (int) (voxel.ground_distance / resolution); } if (layerIndex >= 0 & layerIndex < padMeanByLayer.length) { padMeanByLayer[layerIndex] += pad; valuesNumberByLayer[layerIndex]++; } } } } final XYSeries serie = new XYSeries(key, false); float lai = 0; int maxHeight = layersNumber - 1; for (int i = layersNumber - 1; i >= 0; i--) { if (padMeanByLayer[i] != 0) { maxHeight = i; break; } } for (int i = 0; i < layersNumber; i++) { padMeanByLayer[i] = padMeanByLayer[i] / valuesNumberByLayer[i]; if (i <= maxHeight && i > 0) { //don't show the first layer because it's biased serie.add(padMeanByLayer[i], i * resolution); } if (!Float.isNaN(padMeanByLayer[i])) { lai += padMeanByLayer[i]; } } lai *= resolution; serie.setKey(key + '\n' + "PAI = " + (Math.round(lai * 10)) / 10.0); return serie; }
From source file:org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService.java
@Override public AllocateResponse allocate(AllocateRequest request) throws YarnException, IOException { AMRMTokenIdentifier amrmTokenIdentifier = YarnServerSecurityUtils.authorizeRequest(); ApplicationAttemptId appAttemptId = amrmTokenIdentifier.getApplicationAttemptId(); ApplicationId applicationId = appAttemptId.getApplicationId(); this.amLivelinessMonitor.receivedPing(appAttemptId); /* check if its in cache */ AllocateResponseLock lock = responseMap.get(appAttemptId); if (lock == null) { String message = "Application attempt " + appAttemptId + " doesn't exist in ApplicationMasterService cache."; LOG.error(message);// w ww. j a v a 2 s.com throw new ApplicationAttemptNotFoundException(message); } synchronized (lock) { AllocateResponse lastResponse = lock.getAllocateResponse(); if (!hasApplicationMasterRegistered(appAttemptId)) { String message = "AM is not registered for known application attempt: " + appAttemptId + " or RM had restarted after AM registered . AM should re-register."; LOG.info(message); RMAuditLogger.logFailure(this.rmContext.getRMApps().get(appAttemptId.getApplicationId()).getUser(), AuditConstants.AM_ALLOCATE, "", "ApplicationMasterService", message, applicationId, appAttemptId); throw new ApplicationMasterNotRegisteredException(message); } if ((request.getResponseId() + 1) == lastResponse.getResponseId()) { /* old heartbeat */ return lastResponse; } else if (request.getResponseId() + 1 < lastResponse.getResponseId()) { String message = "Invalid responseId in AllocateRequest from application attempt: " + appAttemptId + ", expect responseId to be " + (lastResponse.getResponseId() + 1); throw new InvalidApplicationMasterRequestException(message); } //filter illegal progress values float filteredProgress = request.getProgress(); if (Float.isNaN(filteredProgress) || filteredProgress == Float.NEGATIVE_INFINITY || filteredProgress < 0) { request.setProgress(0); } else if (filteredProgress > 1 || filteredProgress == Float.POSITIVE_INFINITY) { request.setProgress(1); } // Send the status update to the appAttempt. this.rmContext.getDispatcher().getEventHandler() .handle(new RMAppAttemptStatusupdateEvent(appAttemptId, request.getProgress())); List<ResourceRequest> ask = request.getAskList(); List<ContainerId> release = request.getReleaseList(); ResourceBlacklistRequest blacklistRequest = request.getResourceBlacklistRequest(); List<String> blacklistAdditions = (blacklistRequest != null) ? blacklistRequest.getBlacklistAdditions() : Collections.EMPTY_LIST; List<String> blacklistRemovals = (blacklistRequest != null) ? blacklistRequest.getBlacklistRemovals() : Collections.EMPTY_LIST; RMApp app = this.rmContext.getRMApps().get(applicationId); // set label expression for Resource Requests if resourceName=ANY ApplicationSubmissionContext asc = app.getApplicationSubmissionContext(); for (ResourceRequest req : ask) { if (null == req.getNodeLabelExpression() && ResourceRequest.ANY.equals(req.getResourceName())) { req.setNodeLabelExpression(asc.getNodeLabelExpression()); } } Resource maximumCapacity = rScheduler.getMaximumResourceCapability(); // sanity check try { RMServerUtils.normalizeAndValidateRequests(ask, maximumCapacity, app.getQueue(), rScheduler, rmContext); } catch (InvalidResourceRequestException e) { LOG.warn("Invalid resource ask by application " + appAttemptId, e); throw e; } try { RMServerUtils.validateBlacklistRequest(blacklistRequest); } catch (InvalidResourceBlacklistRequestException e) { LOG.warn("Invalid blacklist request by application " + appAttemptId, e); throw e; } // In the case of work-preserving AM restart, it's possible for the // AM to release containers from the earlier attempt. if (!app.getApplicationSubmissionContext().getKeepContainersAcrossApplicationAttempts()) { try { RMServerUtils.validateContainerReleaseRequest(release, appAttemptId); } catch (InvalidContainerReleaseException e) { LOG.warn("Invalid container release by application " + appAttemptId, e); throw e; } } // Split Update Resource Requests into increase and decrease. // No Exceptions are thrown here. All update errors are aggregated // and returned to the AM. List<UpdateContainerRequest> increaseResourceReqs = new ArrayList<>(); List<UpdateContainerRequest> decreaseResourceReqs = new ArrayList<>(); List<UpdateContainerError> updateContainerErrors = RMServerUtils.validateAndSplitUpdateResourceRequests( rmContext, request, maximumCapacity, increaseResourceReqs, decreaseResourceReqs); // Send new requests to appAttempt. Allocation allocation; RMAppAttemptState state = app.getRMAppAttempt(appAttemptId).getAppAttemptState(); if (state.equals(RMAppAttemptState.FINAL_SAVING) || state.equals(RMAppAttemptState.FINISHING) || app.isAppFinalStateStored()) { LOG.warn(appAttemptId + " is in " + state + " state, ignore container allocate request."); allocation = EMPTY_ALLOCATION; } else { allocation = this.rScheduler.allocate(appAttemptId, ask, release, blacklistAdditions, blacklistRemovals, increaseResourceReqs, decreaseResourceReqs); } if (!blacklistAdditions.isEmpty() || !blacklistRemovals.isEmpty()) { LOG.info("blacklist are updated in Scheduler." + "blacklistAdditions: " + blacklistAdditions + ", " + "blacklistRemovals: " + blacklistRemovals); } RMAppAttempt appAttempt = app.getRMAppAttempt(appAttemptId); AllocateResponse allocateResponse = recordFactory.newRecordInstance(AllocateResponse.class); if (!allocation.getContainers().isEmpty()) { allocateResponse.setNMTokens(allocation.getNMTokens()); } // Notify the AM of container update errors if (!updateContainerErrors.isEmpty()) { allocateResponse.setUpdateErrors(updateContainerErrors); } // update the response with the deltas of node status changes List<RMNode> updatedNodes = new ArrayList<RMNode>(); if (app.pullRMNodeUpdates(updatedNodes) > 0) { List<NodeReport> updatedNodeReports = new ArrayList<NodeReport>(); for (RMNode rmNode : updatedNodes) { SchedulerNodeReport schedulerNodeReport = rScheduler.getNodeReport(rmNode.getNodeID()); Resource used = BuilderUtils.newResource(0, 0); int numContainers = 0; if (schedulerNodeReport != null) { used = schedulerNodeReport.getUsedResource(); numContainers = schedulerNodeReport.getNumContainers(); } NodeId nodeId = rmNode.getNodeID(); NodeReport report = BuilderUtils.newNodeReport(nodeId, rmNode.getState(), rmNode.getHttpAddress(), rmNode.getRackName(), used, rmNode.getTotalCapability(), numContainers, rmNode.getHealthReport(), rmNode.getLastHealthReportTime(), rmNode.getNodeLabels()); updatedNodeReports.add(report); } allocateResponse.setUpdatedNodes(updatedNodeReports); } allocateResponse.setAllocatedContainers(allocation.getContainers()); allocateResponse.setCompletedContainersStatuses(appAttempt.pullJustFinishedContainers()); allocateResponse.setResponseId(lastResponse.getResponseId() + 1); allocateResponse.setAvailableResources(allocation.getResourceLimit()); // Handling increased/decreased containers List<UpdatedContainer> updatedContainers = new ArrayList<>(); if (allocation.getIncreasedContainers() != null) { for (Container c : allocation.getIncreasedContainers()) { updatedContainers.add(UpdatedContainer.newInstance(ContainerUpdateType.INCREASE_RESOURCE, c)); } } if (allocation.getDecreasedContainers() != null) { for (Container c : allocation.getDecreasedContainers()) { updatedContainers.add(UpdatedContainer.newInstance(ContainerUpdateType.DECREASE_RESOURCE, c)); } } allocateResponse.setUpdatedContainers(updatedContainers); allocateResponse.setNumClusterNodes(this.rScheduler.getNumClusterNodes()); // add preemption to the allocateResponse message (if any) allocateResponse.setPreemptionMessage(generatePreemptionMessage(allocation)); // Set application priority allocateResponse.setApplicationPriority(app.getApplicationSubmissionContext().getPriority()); // update AMRMToken if the token is rolled-up MasterKeyData nextMasterKey = this.rmContext.getAMRMTokenSecretManager().getNextMasterKeyData(); if (nextMasterKey != null && nextMasterKey.getMasterKey().getKeyId() != amrmTokenIdentifier.getKeyId()) { RMAppAttemptImpl appAttemptImpl = (RMAppAttemptImpl) appAttempt; Token<AMRMTokenIdentifier> amrmToken = appAttempt.getAMRMToken(); if (nextMasterKey.getMasterKey().getKeyId() != appAttemptImpl.getAMRMTokenKeyId()) { LOG.info("The AMRMToken has been rolled-over. Send new AMRMToken back" + " to application: " + applicationId); amrmToken = rmContext.getAMRMTokenSecretManager().createAndGetAMRMToken(appAttemptId); appAttemptImpl.setAMRMToken(amrmToken); } allocateResponse.setAMRMToken(org.apache.hadoop.yarn.api.records.Token.newInstance( amrmToken.getIdentifier(), amrmToken.getKind().toString(), amrmToken.getPassword(), amrmToken.getService().toString())); } /* * As we are updating the response inside the lock object so we don't * need to worry about unregister call occurring in between (which * removes the lock object). */ lock.setAllocateResponse(allocateResponse); return allocateResponse; } }
From source file:com.openddal.test.BaseTestCase.java
/** * Check if two values are equal, and if not throw an exception. * * @param expected the expected value/*from w w w . j a v a2 s . c om*/ * @param actual the actual value * @throws AssertionError if the values are not equal */ protected void assertEquals(float expected, float actual) { if (expected != actual) { if (Float.isNaN(expected) && Float.isNaN(actual)) { // if both a NaN, then there is no error } else { fail("Expected: " + expected + " actual: " + actual); } } }
From source file:org.fhcrc.cpl.viewer.commandline.modules.SpreadsheetMergeCLM.java
/** * do the actual work//from w ww .j av a 2 s. c om */ public void execute() throws CommandLineModuleExecutionException { PrintWriter outPW = null; try { if (outFile != null) outPW = new PrintWriter(outFile); List<String> combinedColumns = new ArrayList<String>(); combinedColumns.add(mergeColumnName); StringBuffer headerLine = new StringBuffer(mergeColumnName); TabLoader[] tabLoaders = new TabLoader[inFiles.length]; List<TabLoader.ColumnDescriptor>[] columnsAllFiles = new List[inFiles.length]; for (int i = 0; i < inFiles.length; i++) { File inFile = inFiles[i]; System.err.println("Loading file " + inFile.getName()); //forcing reading the first file line as a header line, since we're dependent on column names TabLoader loader = new TabLoader(new FileReader(inFile), true); List<TabLoader.ColumnDescriptor> columnsThisFile = new ArrayList<TabLoader.ColumnDescriptor>(); if (i == 1 && file2ColumnName != null) { for (TabLoader.ColumnDescriptor column : loader.getColumns()) { if (column.name.equals(file2ColumnName)) columnsThisFile.add(column); } headerLine.append("\t" + file2ColumnName); } else if (i == 1 && newColumnName != null) { headerLine.append("\t" + newColumnName); columnsThisFile.add(new TabLoader.ColumnDescriptor(newColumnName, String.class)); } else { for (TabLoader.ColumnDescriptor column : loader.getColumns()) { columnsThisFile.add(column); if (!mergeColumnName.equals(column.name)) headerLine.append("\t" + column.name); } } List<String> columnNamesThisFile = new ArrayList<String>(); for (TabLoader.ColumnDescriptor column : columnsThisFile) columnNamesThisFile.add(column.name); if (!columnNamesThisFile.contains(mergeColumnName)) throw new CommandLineModuleExecutionException( "File " + inFile.getAbsolutePath() + " does not contain column " + mergeColumnName); columnsAllFiles[i] = columnsThisFile; tabLoaders[i] = loader; } if (outFile != null) { outPW.println(headerLine.toString()); outPW.flush(); } Map<String, Map>[] rowMaps = new Map[inFiles.length]; for (int i = 0; i < inFiles.length; i++) { rowMaps[i] = mapRowsByMergeCol((Map[]) new TabLoader(inFiles[i]).load()); //Replace map with presence annotations if that's what we're doing if (i == 1 && newColumnName != null) { Map<String, Map> annotRowMap = new HashMap<String, Map>(); for (String key : rowMaps[i].keySet()) { Map<String, String> keyMap = new HashMap<String, String>(); keyMap.put(newColumnName, presenceAnnotation); annotRowMap.put(key, keyMap); } rowMaps[i] = annotRowMap; } ApplicationContext.infoMessage("Loaded " + rowMaps[i].size() + " rows from file " + (i + 1)); } Set<String> keysInAllFiles = new HashSet<String>(); for (String key : rowMaps[0].keySet()) { boolean notFoundSomewhere = false; for (int i = 1; i < rowMaps.length; i++) { if (!rowMaps[i].containsKey(key)) { notFoundSomewhere = true; break; } } if (!notFoundSomewhere) keysInAllFiles.add(key); } ApplicationContext.infoMessage("Rows in common: " + keysInAllFiles.size()); Set<String> keysToWrite = new HashSet<String>(); if (keepAllValuesAllFiles) { for (Map<String, Map> rowMap : rowMaps) keysToWrite.addAll(rowMap.keySet()); } else if (keepAllFile1Values) keysToWrite = rowMaps[0].keySet(); else { keysToWrite = keysInAllFiles; } for (String key : keysToWrite) { //if (key.equals("IPI00115660")) System.err.println("***!"); Map[] mapsAllFiles = new Map[rowMaps.length]; for (int j = 0; j < rowMaps.length; j++) { mapsAllFiles[j] = rowMaps[j].get(key); //if (key.equals("IPI00115660")) System.err.println("\t" + j + ", " + mapsAllFiles[j].get("geommean_defghi")); //if we don't find it, and we're supposed to split up keys by ";", do so if (mapsAllFiles[j] == null && j > 0 && multipleMergeColumnValuesFirstFile && key.contains(";")) { for (String partKey : key.split(";")) { if (rowMaps[j].containsKey(partKey)) { mapsAllFiles[j] = rowMaps[j].get(partKey); ApplicationContext .infoMessage("Split up multi-key " + key + ", found match for " + partKey); break; } } } } String line = createFileLine(key, columnsAllFiles, mapsAllFiles); if (outFile != null) { outPW.println(line); outPW.flush(); } } if (outUnique2File != null) { PrintWriter unique2OutWriter = new PrintWriter(outUnique2File); StringBuffer headerLineBuf = new StringBuffer(mergeColumnName); for (TabLoader.ColumnDescriptor column : columnsAllFiles[1]) { if (!mergeColumnName.equals(column.name)) headerLineBuf.append("\t" + column.name); } unique2OutWriter.println(headerLineBuf); List<Float> plotColumnUnique2Values = new ArrayList<Float>(); for (String key : rowMaps[1].keySet()) { if (keysInAllFiles.contains(key)) continue; List<TabLoader.ColumnDescriptor>[] colArray = new List[] { columnsAllFiles[1] }; Map[] colMap = new Map[] { rowMaps[1].get(key) }; String line = createFileLine(key, colArray, colMap); if (outUnique2File != null) { unique2OutWriter.println(line); unique2OutWriter.flush(); } if (plotColumnName != null && colMap[0].get(plotColumnName) != null) { try { plotColumnUnique2Values.add(columnValueAsFloat(colMap[0].get(plotColumnNameFile2))); } catch (ClassCastException e) { } } } if (plotColumnName != null & !plotColumnUnique2Values.isEmpty()) { PanelWithHistogram pwh = new PanelWithHistogram(plotColumnUnique2Values, "Values unique to 2"); pwh.displayInTab(); } unique2OutWriter.close(); ApplicationContext .infoMessage("Wrote lines unique to file 2 in " + outUnique2File.getAbsolutePath()); } //first two files only if (plotColumnName != null) { List<Float> values1 = new ArrayList<Float>(); List<Float> values2 = new ArrayList<Float>(); List<String> commonKeys = new ArrayList<String>(); List<Float> trackValues1 = new ArrayList<Float>(); List<Float> trackValues2 = new ArrayList<Float>(); Map<String, Map> rowMaps1 = rowMaps[0]; Map<String, Map> rowMaps2 = rowMaps[1]; for (String key : rowMaps2.keySet()) { if (!rowMaps1.containsKey(key)) { Object o2 = rowMaps2.get(key).get(plotColumnNameFile2); if (valuesToTrack != null && valuesToTrack.contains(key)) { System.err.println(key + "\tNA\t" + o2); } } } for (String key : rowMaps1.keySet()) { //System.err.println("Key: " + key); Object o1 = rowMaps1.get(key).get(plotColumnName); if (rowMaps2.containsKey(key)) { //System.err.println("\t" + o1 + rowMaps2.get(key).get(plotColumnName)); Object o2 = rowMaps2.get(key).get(plotColumnNameFile2); if (o1 == null || o2 == null) continue; //if (key.equals("IPI00115660")) System.err.println("@@@" + o1 + ", " + o2); try { float value1 = columnValueAsFloat(o1); float value2 = columnValueAsFloat(o2); // System.err.println("Unplottable! " + value1 + ", " + value2); float displayValue1 = value1; float displayValue2 = value2; if (plotLog) { if (displayValue1 == 0) displayValue1 += 0.000001; if (displayValue2 == 0) displayValue2 += 0.000001; displayValue1 = (float) Math.log(displayValue1); displayValue2 = (float) Math.log(displayValue2); } if (!Float.isInfinite(displayValue1) && !Float.isInfinite(displayValue2) && !Float.isNaN(displayValue1) && !Float.isNaN(displayValue2)) { //System.err.println("***" + displayValue1 + ", " + displayValue2); values1.add(displayValue1); values2.add(displayValue2); commonKeys.add(key); if (valuesToTrack != null && valuesToTrack.contains(key)) { // System.err.println(key + "\t" + displayValue1 + "\t" + displayValue2); trackValues1.add(displayValue1); trackValues2.add(displayValue2); } } } catch (ClassCastException e) { ApplicationContext.infoMessage( "Crap! Can't process value " + rowMaps1.get(key).get(plotColumnName) + " or " + rowMaps2.get(key).get(plotColumnName)); } } else { if (valuesToTrack != null && valuesToTrack.contains(key)) { // System.err.println(key + "\t" + o1 + "\tNA"); } } } ApplicationContext.infoMessage("Rows in common and plottable: " + values1.size()); ApplicationContext.infoMessage( "Correlation coefficient: " + BasicStatistics.correlationCoefficient(values1, values2)); PanelWithScatterPlot pwsp = new PanelWithScatterPlot(values1, values2, plotColumnName); pwsp.setAxisLabels("File 1", "File 2"); pwsp.displayInTab(); List<Float> differences = new ArrayList<Float>(); for (int i = 0; i < values1.size(); i++) differences.add(values2.get(i) - values1.get(i)); new PanelWithHistogram(differences, "Differences").displayInTab(); if (valuesToTrack != null && trackValues1.size() > 0) { PanelWithScatterPlot pwsp2 = new PanelWithScatterPlot(trackValues1, trackValues2, plotColumnName + "_track"); pwsp2.setAxisLabels("File 1", "File 2"); pwsp2.displayInTab(); } if (compareOutFile != null) { PrintWriter compareOutWriter = new PrintWriter(compareOutFile); compareOutWriter .println(mergeColumnName + "\t" + plotColumnName + "_1\t" + plotColumnName + "_2"); for (int i = 0; i < values1.size(); i++) { compareOutWriter.println(commonKeys.get(i) + "\t" + values1.get(i) + "\t" + values2.get(i)); compareOutWriter.flush(); } compareOutWriter.close(); } } } catch (Exception e) { throw new CommandLineModuleExecutionException(e); } finally { if (outPW != null) outPW.close(); } }
From source file:papaya.Rank.java
/** * Returns a list of indexes where <code>ranks</code> is <code>NaN.</code> * * @param ranks array to search for <code>NaNs</code> * @return list of indexes i such that <code>ranks[i] = NaN</code> *///from ww w.j a v a2 s . c om private static List<Integer> getNanPositions(IntFloatPair[] ranks) { ArrayList<Integer> out = new ArrayList<Integer>(); for (int i = 0; i < ranks.length; i++) { if (Float.isNaN(ranks[i].getValue())) { out.add(Integer.valueOf(i)); } } return out; }
From source file:routines.system.BigDataParserUtils.java
public static BigDecimal parseTo_BigDecimal(float input) { if (Float.isNaN(input)) { return null; }//w ww .j ava 2s . co m return BigDecimal.valueOf(input); }
From source file:model.scenario.CompetitiveScenarioTest.java
@Test public void tooManyLearnedCompetitors() { int competitors = 20; // System.out.println("FORCED COMPETITIVE FIRMS: " + (competitors+1)); Class<? extends AskPricingStrategy> strategies[] = new Class[2]; strategies[1] = InventoryBufferSalesControl.class; strategies[0] = SalesControlWithFixedInventoryAndPID.class; // strategies[2] = SalesControlWithFixedInventoryAndPID.class; for (Class<? extends AskPricingStrategy> strategy : strategies) { System.out.println(strategy.getSimpleName()); for (int i = 0; i < 5; i++) { final MacroII macroII = new MacroII(System.currentTimeMillis()); final TripolistScenario scenario1 = new TripolistScenario(macroII); scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class); scenario1.setAskPricingStrategy(strategy); scenario1.setControlType(// w w w . java2 s. com MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL); scenario1.setAdditionalCompetitors(competitors); scenario1.setWorkersToBeRehiredEveryDay(true); scenario1.setDemandIntercept(102); //scenario1.setSalesPricePreditorStrategy(MarketSalesPredictor.class); //scenario1.setSalesPricePreditorStrategy(PricingSalesPredictor.class); // scenario1.setPurchasesPricePreditorStrategy(PricingPurchasesPredictor.class); //assign scenario macroII.setScenario(scenario1); macroII.start(); macroII.schedule.step(macroII); for (Firm firm : scenario1.getCompetitors()) { for (HumanResources hr : firm.getHRs()) hr.setPredictor(new FixedIncreasePurchasesPredictor(0)); firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC) .setPredictorStrategy(new FixedDecreaseSalesPredictor(0)); } while (macroII.schedule.getTime() < 15000) { macroII.schedule.step(macroII); } float averagePrice = 0; float averageQ = 0; float averageInventory = 0; float workers = 0; for (int j = 0; j < 500; j++) { macroII.schedule.step(macroII); for (Firm f : scenario1.getCompetitors()) workers += f.getHRs().iterator().next().getWorkerTarget(); assert !Float.isNaN(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice()); averagePrice += macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice(); averageQ += macroII.getMarket(UndifferentiatedGoodType.GENERIC) .countTodayProductionByRegisteredSellers(); averageInventory += macroII.getMarket(UndifferentiatedGoodType.GENERIC) .getLatestObservation(MarketDataType.SELLERS_INVENTORY); } averagePrice = averagePrice / 500f; workers /= 500f; averageQ = averageQ / 500f; averageInventory = averageInventory / 500f; averageInventory = averageInventory / (competitors + 1); System.out.println(averagePrice + "," + averageQ); System.out.println((competitors + 1) + "," + averagePrice + "," + averageQ + "," + averageInventory + "," + workers); for (Firm f : scenario1.getCompetitors()) System.out.print(f.getHRs().iterator().next().getWorkerTarget() + ","); System.out.println(); assertEquals(averagePrice, 57, 5); assertEquals(averageQ, 44, 5); } } }