List of usage examples for java.lang Float compare
public static int compare(float f1, float f2)
From source file:org.apache.maven.plugin.javadoc.JavadocReportTest.java
/** * Test the javadoc resources.//www .j a v a2 s . c o m * * @throws Exception if any */ public void testJavadocResources() throws Exception { File testPom = new File(unit, "resources-test/resources-test-plugin-config.xml"); JavadocReport mojo = (JavadocReport) lookupMojo("javadoc", testPom); mojo.execute(); File apidocs = new File(getBasedir(), "target/test/unit/resources-test/target/site/apidocs/"); File app = new File(apidocs, "resources/test/App.html"); assertTrue(app.exists()); String content = readFile(app); assertTrue(content.contains("<img src=\"doc-files/maven-feather.png\" alt=\"Maven\">")); assertTrue(new File(apidocs, "resources/test/doc-files/maven-feather.png").exists()); File app2 = new File(apidocs, "resources/test2/App2.html"); assertTrue(app2.exists()); content = readFile(app2); assertTrue(content.contains("<img src=\"doc-files/maven-feather.png\" alt=\"Maven\">")); assertFalse(new File(apidocs, "resources/test2/doc-files/maven-feather.png").exists()); // with excludes testPom = new File(unit, "resources-with-excludes-test/resources-with-excludes-test-plugin-config.xml"); mojo = (JavadocReport) lookupMojo("javadoc", testPom); mojo.execute(); apidocs = new File(getBasedir(), "target/test/unit/resources-with-excludes-test/target/site/apidocs"); app = new File(apidocs, "resources/test/App.html"); assertTrue(app.exists()); content = readFile(app); assertTrue(content.contains("<img src=\"doc-files/maven-feather.png\" alt=\"Maven\">")); float javadocVersion = (Float) getVariableValueFromObject(mojo, "fJavadocVersion"); if (Float.compare(1.8f, javadocVersion) == 0) { // https://bugs.openjdk.java.net/browse/JDK-8032205 assertTrue("This bug appeared in JDK8 and was planned to be fixed in JDK9, see JDK-8032205", new File(apidocs, "resources/test/doc-files/maven-feather.png").exists()); } else { assertFalse(new File(apidocs, "resources/test/doc-files/maven-feather.png").exists()); } app2 = new File(apidocs, "resources/test2/App2.html"); assertTrue(app2.exists()); content = readFile(app2); assertTrue(content.contains("<img src=\"doc-files/maven-feather.png\" alt=\"Maven\">")); assertTrue(new File(apidocs, "resources/test2/doc-files/maven-feather.png").exists()); }
From source file:com.moviejukebox.tools.OverrideTools.java
public static boolean checkOverwriteFPS(Movie movie, String source) { if (skipCheck(movie, OverrideFlag.FPS, source)) { // skip the check return Boolean.FALSE; } else if (Float.compare(movie.getFps(), 60f) == 0) { // assume 60 as default value return Boolean.TRUE; }// ww w . ja va2s . c o m return checkOverwrite(movie, OverrideFlag.FPS, source); }
From source file:pipeline.misc_util.Utils.java
public static void main(String[] args) throws IOException {// findStepsInProfile File profileFile = new File(args[0]); int columnIndex = Integer.parseInt(args[1]); int xColumnIndex = Integer.parseInt(args[2]); int nColumns = Integer.parseInt(args[3]); float windowLength = Float.parseFloat(args[4]); // FIXME Code duplicated in RegisterVideoLabelTimes plugin. Create a parsing class. try (Reader r = new BufferedReader(new FileReader(profileFile))) { ReaderTokenizer stok = new ReaderTokenizer(r); stok.parseNumbers();// w w w .j a va 2 s.com int currentColumn = 0; // Skip column headers for (int i = 0; i < nColumns; i++) { stok.nextToken(); } stok.nextToken(); // FloatList profileList=new ArrayFloatList(100000); //Time in seconds // FloatList xPositionList=new ArrayFloatList(100000); //Time in seconds ArrayList<float[]> readings = new ArrayList<>(); int currentIndex = 0; readings.add(new float[2]); while (stok.ttype != ReaderTokenizer.TT_EOF) { if (stok.ttype == ReaderTokenizer.TT_NUMBER) { float f = (float) stok.nval; if (currentColumn == columnIndex) { readings.get(currentIndex)[1] = f; if (f < 0) { Utils.log("Read negative profile", LogLevel.ERROR); } } else if (currentColumn == xColumnIndex) { readings.get(currentIndex)[0] = f; if (f < 0) { Utils.log("Read negative profile", LogLevel.ERROR); } } } else throw new RuntimeException( "Unexpected read from detected cell file " + stok.sval + " of type " + stok.ttype); stok.nextToken(); currentColumn++; if (currentColumn == nColumns) { currentColumn = 0; currentIndex++; readings.add(new float[2]); } } readings.remove(readings.size() - 1); float[][] readingsArray = readings.toArray(new float[][] { { 1, 2 } }); Arrays.sort(readingsArray, (o1, o2) -> Float.compare(o1[0], o2[0])); float[] steps = minMaxSlidingWindow(readingsArray, windowLength); for (int i = 0; i < steps.length; i++) { System.out.println((readingsArray[i][0] + windowLength * 0.5) + "\t" + steps[i]); } } catch (FileNotFoundException e) { throw new RuntimeException("Could not find output file to read times from", e); } }
From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java
private void createBiomes() { if (graph == null) { return;//from w w w . j ava 2s . c o m } //assign temperatures for (Graph.Corner c : graph.corners) { c.temperature = c.elevation; c.temperature *= c.temperature; c.temperature = 1 - c.temperature; } assignCenterTemperature(); //create random rivers Random rand = new Random(seed * 3); for (Graph.Corner c : graph.corners) { c.river = 0; } float riverProb = 0.2f; float riverStartHeight = 0.7f; int riverCounter = 0; corner: for (Graph.Corner c : graph.corners) { if (c.water || c.elevation < riverStartHeight) { continue; } if (rand.nextFloat() > riverProb) { continue; } if (c.river > 0) continue; for (Graph.Corner c2 : c.adjacent) { if (c2.river > 0) { continue corner; } for (Graph.Corner c3 : c2.adjacent) { if (c3.river > 0) { continue corner; } } } //start new river from here Graph.Corner current = c; current.river = Math.max(current.river, 1); while (!current.ocean && !current.coast) { float minH = current.elevation; Graph.Corner minC = null; for (Graph.Corner c2 : current.adjacent) { if (c2.river > 0 && c2.elevation < current.elevation) { minC = c2; //force closing of rivers break; } if (c2.elevation < minH) { minC = c2; minH = c2.elevation; } } if (minC == null) { LOG.warning("river stuck in a local minima without reaching the ocean"); break; } minC.river = Math.max(minC.river, current.river + 1); current = minC; } riverCounter++; } LOG.info("count of created rivers: " + riverCounter); showRivers = true; //assign moisture Queue<Graph.Corner> queue = new ArrayDeque<>(); for (Graph.Corner q : graph.corners) { if ((q.water || q.river > 0) && !q.ocean) { q.moisture = q.river > 0 ? Math.min(3.0f, (0.4f * q.river)) : 1; queue.add(q); } else { q.moisture = 0; } } while (!queue.isEmpty()) { Graph.Corner q = queue.poll(); for (Graph.Corner r : q.adjacent) { float newMoisture = q.moisture * 0.8f; if (newMoisture > r.moisture) { r.moisture = newMoisture; queue.add(r); } } } for (Graph.Corner q : graph.corners) { if (q.ocean || q.coast) { q.moisture = 1; } } //redistribute moisture ArrayList<Graph.Corner> corners = new ArrayList<>(); for (Graph.Corner q : graph.corners) { if (!q.ocean && !q.coast) { corners.add(q); } } Collections.sort(corners, new Comparator<Graph.Corner>() { @Override public int compare(Graph.Corner o1, Graph.Corner o2) { return Float.compare(o1.moisture, o2.moisture); } }); for (int i = 0; i < corners.size(); i++) { corners.get(i).moisture = i / (float) (corners.size() - 1); } assignCenterMoisture(); assignBiomes(); //update mesh updateTemperatureGeometry(); updateMoistureGeometry(); updateBiomesGeometry(); }
From source file:ml.shifu.shifu.core.dtrain.dt.DTWorker.java
@Override public DTWorkerParams doCompute(WorkerContext<DTMasterParams, DTWorkerParams> context) { if (context.isFirstIteration()) { return new DTWorkerParams(); }//from ww w . j a v a 2s. c o m DTMasterParams lastMasterResult = context.getLastMasterResult(); final List<TreeNode> trees = lastMasterResult.getTrees(); final Map<Integer, TreeNode> todoNodes = lastMasterResult.getTodoNodes(); if (todoNodes == null) { return new DTWorkerParams(); } LOG.info("Start to work: todoNodes size is {}", todoNodes.size()); Map<Integer, NodeStats> statistics = initTodoNodeStats(todoNodes); double trainError = 0d, validationError = 0d; double weightedTrainCount = 0d, weightedValidationCount = 0d; // renew random seed if (this.isGBDT && !this.gbdtSampleWithReplacement && lastMasterResult.isSwitchToNextTree()) { this.baggingRandomMap = new HashMap<Integer, Random>(); } long start = System.nanoTime(); for (Data data : this.trainingData) { if (this.isRF) { for (TreeNode treeNode : trees) { if (treeNode.getNode().getId() == Node.INVALID_INDEX) { continue; } Node predictNode = predictNodeIndex(treeNode.getNode(), data, true); if (predictNode.getPredict() != null) { // only update when not in first node, for treeNode, no predict statistics at that time float weight = data.subsampleWeights[treeNode.getTreeId()]; if (Float.compare(weight, 0f) == 0) { // oob data, no need to do weighting validationError += data.significance * loss .computeError((float) (predictNode.getPredict().getPredict()), data.label); weightedValidationCount += data.significance; } else { trainError += weight * data.significance * loss .computeError((float) (predictNode.getPredict().getPredict()), data.label); weightedTrainCount += weight * data.significance; } } } } if (this.isGBDT) { if (this.isContinuousEnabled && lastMasterResult.isContinuousRunningStart()) { recoverGBTData(context, data.output, data.predict, data, false); trainError += data.significance * loss.computeError(data.predict, data.label); weightedTrainCount += data.significance; } else { if (isNeedRecoverGBDTPredict) { if (this.recoverTrees == null) { this.recoverTrees = recoverCurrentTrees(); } // recover gbdt data for fail over recoverGBTData(context, data.output, data.predict, data, true); } int currTreeIndex = trees.size() - 1; if (lastMasterResult.isSwitchToNextTree()) { if (currTreeIndex >= 1) { Node node = trees.get(currTreeIndex - 1).getNode(); Node predictNode = predictNodeIndex(node, data, false); if (predictNode.getPredict() != null) { double predict = predictNode.getPredict().getPredict(); // first tree logic, master must set it to first tree even second tree with ROOT is // sending if (context.getLastMasterResult().isFirstTree()) { data.predict = (float) predict; } else { // random drop boolean drop = (this.dropOutRate > 0.0 && dropOutRandom.nextDouble() < this.dropOutRate); if (!drop) { data.predict += (float) (this.learningRate * predict); } } data.output = -1f * loss.computeGradient(data.predict, data.label); } // if not sampling with replacement in gbdt, renew bagging sample rate in next tree if (!this.gbdtSampleWithReplacement) { Random random = null; int classValue = (int) (data.label + 0.01f); if (this.isStratifiedSampling) { random = baggingRandomMap.get(classValue); if (random == null) { random = DTrainUtils.generateRandomBySampleSeed( modelConfig.getTrain().getBaggingSampleSeed(), CommonConstants.NOT_CONFIGURED_BAGGING_SEED); baggingRandomMap.put(classValue, random); } } else { random = baggingRandomMap.get(0); if (random == null) { random = DTrainUtils.generateRandomBySampleSeed( modelConfig.getTrain().getBaggingSampleSeed(), CommonConstants.NOT_CONFIGURED_BAGGING_SEED); baggingRandomMap.put(0, random); } } if (random.nextDouble() <= modelConfig.getTrain().getBaggingSampleRate()) { data.subsampleWeights[currTreeIndex % data.subsampleWeights.length] = 1f; } else { data.subsampleWeights[currTreeIndex % data.subsampleWeights.length] = 0f; } } } } if (context.getLastMasterResult().isFirstTree() && !lastMasterResult.isSwitchToNextTree()) { Node currTree = trees.get(currTreeIndex).getNode(); Node predictNode = predictNodeIndex(currTree, data, true); if (predictNode.getPredict() != null) { trainError += data.significance * loss .computeError((float) (predictNode.getPredict().getPredict()), data.label); weightedTrainCount += data.significance; } } else { trainError += data.significance * loss.computeError(data.predict, data.label); weightedTrainCount += data.significance; } } } } LOG.debug("Compute train error time is {}ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start)); if (validationData != null) { start = System.nanoTime(); for (Data data : this.validationData) { if (this.isRF) { for (TreeNode treeNode : trees) { if (treeNode.getNode().getId() == Node.INVALID_INDEX) { continue; } Node predictNode = predictNodeIndex(treeNode.getNode(), data, true); if (predictNode.getPredict() != null) { // only update when not in first node, for treeNode, no predict statistics at that time validationError += data.significance * loss .computeError((float) (predictNode.getPredict().getPredict()), data.label); weightedValidationCount += data.significance; } } } if (this.isGBDT) { if (this.isContinuousEnabled && lastMasterResult.isContinuousRunningStart()) { recoverGBTData(context, data.output, data.predict, data, false); validationError += data.significance * loss.computeError(data.predict, data.label); weightedValidationCount += data.significance; } else { if (isNeedRecoverGBDTPredict) { if (this.recoverTrees == null) { this.recoverTrees = recoverCurrentTrees(); } // recover gbdt data for fail over recoverGBTData(context, data.output, data.predict, data, true); } int currTreeIndex = trees.size() - 1; if (lastMasterResult.isSwitchToNextTree()) { if (currTreeIndex >= 1) { Node node = trees.get(currTreeIndex - 1).getNode(); Node predictNode = predictNodeIndex(node, data, false); if (predictNode.getPredict() != null) { double predict = predictNode.getPredict().getPredict(); if (context.getLastMasterResult().isFirstTree()) { data.predict = (float) predict; } else { data.predict += (float) (this.learningRate * predict); } data.output = -1f * loss.computeGradient(data.predict, data.label); } } } if (context.getLastMasterResult().isFirstTree() && !lastMasterResult.isSwitchToNextTree()) { Node predictNode = predictNodeIndex(trees.get(currTreeIndex).getNode(), data, true); if (predictNode.getPredict() != null) { validationError += data.significance * loss .computeError((float) (predictNode.getPredict().getPredict()), data.label); weightedValidationCount += data.significance; } } else { validationError += data.significance * loss.computeError(data.predict, data.label); weightedValidationCount += data.significance; } } } } LOG.debug("Compute val error time is {}ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start)); } if (this.isGBDT) { // reset trees to null to save memory this.recoverTrees = null; if (this.isNeedRecoverGBDTPredict) { // no need recover again this.isNeedRecoverGBDTPredict = false; } } start = System.nanoTime(); CompletionService<Map<Integer, NodeStats>> completionService = new ExecutorCompletionService<Map<Integer, NodeStats>>( this.threadPool); int realThreadCount = 0; LOG.debug("while todo size {}", todoNodes.size()); int realRecords = this.trainingData.size(); int realThreads = this.workerThreadCount > realRecords ? realRecords : this.workerThreadCount; int[] trainLows = new int[realThreads]; int[] trainHighs = new int[realThreads]; int stepCount = realRecords / realThreads; if (realRecords % realThreads != 0) { // move step count to append last gap to avoid last thread worse 2*stepCount-1 stepCount += (realRecords % realThreads) / stepCount; } for (int i = 0; i < realThreads; i++) { trainLows[i] = i * stepCount; if (i != realThreads - 1) { trainHighs[i] = trainLows[i] + stepCount - 1; } else { trainHighs[i] = realRecords - 1; } } for (int i = 0; i < realThreads; i++) { final Map<Integer, TreeNode> localTodoNodes = new HashMap<Integer, TreeNode>(todoNodes); final Map<Integer, NodeStats> localStatistics = initTodoNodeStats(todoNodes); final int startIndex = trainLows[i]; final int endIndex = trainHighs[i]; LOG.info("Thread {} todo size {} stats size {} start index {} end index {}", i, localTodoNodes.size(), localStatistics.size(), startIndex, endIndex); if (localTodoNodes.size() == 0) { continue; } realThreadCount += 1; completionService.submit(new Callable<Map<Integer, NodeStats>>() { @Override public Map<Integer, NodeStats> call() throws Exception { long start = System.nanoTime(); List<Integer> nodeIndexes = new ArrayList<Integer>(trees.size()); for (int j = startIndex; j <= endIndex; j++) { Data data = DTWorker.this.trainingData.get(j); nodeIndexes.clear(); if (DTWorker.this.isRF) { for (TreeNode treeNode : trees) { if (treeNode.getNode().getId() == Node.INVALID_INDEX) { nodeIndexes.add(Node.INVALID_INDEX); } else { Node predictNode = predictNodeIndex(treeNode.getNode(), data, false); nodeIndexes.add(predictNode.getId()); } } } if (DTWorker.this.isGBDT) { int currTreeIndex = trees.size() - 1; Node predictNode = predictNodeIndex(trees.get(currTreeIndex).getNode(), data, false); // update node index nodeIndexes.add(predictNode.getId()); } for (Map.Entry<Integer, TreeNode> entry : localTodoNodes.entrySet()) { // only do statistics on effective data Node todoNode = entry.getValue().getNode(); int treeId = entry.getValue().getTreeId(); int currPredictIndex = 0; if (DTWorker.this.isRF) { currPredictIndex = nodeIndexes.get(entry.getValue().getTreeId()); } if (DTWorker.this.isGBDT) { currPredictIndex = nodeIndexes.get(0); } if (todoNode.getId() == currPredictIndex) { List<Integer> features = entry.getValue().getFeatures(); if (features.isEmpty()) { features = getAllValidFeatures(); } for (Integer columnNum : features) { double[] featuerStatistic = localStatistics.get(entry.getKey()) .getFeatureStatistics().get(columnNum); float weight = data.subsampleWeights[treeId % data.subsampleWeights.length]; if (Float.compare(weight, 0f) != 0) { // only compute weight is not 0 short binIndex = data.inputs[DTWorker.this.inputIndexMap.get(columnNum)]; DTWorker.this.impurity.featureUpdate(featuerStatistic, binIndex, data.output, data.significance, weight); } } } } } LOG.debug("Thread computing stats time is {}ms in thread {}", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start), Thread.currentThread().getName()); return localStatistics; } }); } int rCnt = 0; while (rCnt < realThreadCount) { try { Map<Integer, NodeStats> currNodeStatsmap = completionService.take().get(); if (rCnt == 0) { statistics = currNodeStatsmap; } else { for (Entry<Integer, NodeStats> entry : statistics.entrySet()) { NodeStats resultNodeStats = entry.getValue(); mergeNodeStats(resultNodeStats, currNodeStatsmap.get(entry.getKey())); } } } catch (ExecutionException e) { throw new RuntimeException(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } rCnt += 1; } LOG.debug("Compute stats time is {}ms", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start)); LOG.info( "worker count is {}, error is {}, and stats size is {}. weightedTrainCount {}, weightedValidationCount {}, trainError {}, validationError {}", count, trainError, statistics.size(), weightedTrainCount, weightedValidationCount, trainError, validationError); return new DTWorkerParams(weightedTrainCount, weightedValidationCount, trainError, validationError, statistics); }
From source file:org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.java
@Override public TimelinePutResponse putEntities(ApplicationAttemptId appAttemptId, TimelineEntityGroupId groupId, TimelineEntity... entities) throws IOException, YarnException { if (Float.compare(this.timelineServiceVersion, 1.5f) != 0) { throw new YarnException( "This API is not supported under current Timeline Service Version: " + timelineServiceVersion); }/*from w w w . j av a 2s. c o m*/ return timelineWriter.putEntities(appAttemptId, groupId, entities); }
From source file:org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.java
@Override public void putDomain(ApplicationAttemptId appAttemptId, TimelineDomain domain) throws IOException, YarnException { if (Float.compare(this.timelineServiceVersion, 1.5f) != 0) { throw new YarnException( "This API is not supported under current Timeline Service Version: " + timelineServiceVersion); }/* w w w .j a v a2 s . c om*/ timelineWriter.putDomain(appAttemptId, domain); }
From source file:org.ejbca.core.model.ca.caadmin.CVCCA.java
/** Implementation of UpgradableDataHashMap function upgrade. *//*from ww w . ja v a 2 s. c o m*/ public void upgrade() { if (Float.compare(LATEST_VERSION, getVersion()) != 0) { // New version of the class, upgrade log.info("Upgrading CVCCA with version " + getVersion()); // Put upgrade code here... // v1->v2 is only an upgrade in order to upgrade CA token // v2->v3 is a upgrade of X509CA that has to be adjusted here two, due to the common heritage if (data.get(CRLPERIOD) instanceof Integer) { setCRLPeriod(0L); } if (data.get(CRLISSUEINTERVAL) instanceof Integer) { setCRLIssueInterval(0L); } if (data.get(CRLOVERLAPTIME) instanceof Integer) { setCRLOverlapTime(0L); } if (data.get(DELTACRLPERIOD) instanceof Integer) { setDeltaCRLPeriod(0L); } data.put(VERSION, new Float(LATEST_VERSION)); } }
From source file:us.parr.animl.data.DataTable.java
public void sortBy(int colIndex) { switch (colTypes[colIndex]) { case CATEGORICAL_INT: case NUMERICAL_INT: case CATEGORICAL_STRING: // strings are encoded as ints case TARGET_CATEGORICAL_STRING: case TARGET_CATEGORICAL_INT: case UNUSED_INT: case UNUSED_STRING: Collections.sort(rows, (ra, rb) -> { return Integer.compare(ra[colIndex], rb[colIndex]); });//ww w. j ava2s .c o m break; case NUMERICAL_FLOAT: case UNUSED_FLOAT: Collections.sort(rows, (ra, rb) -> { return Float.compare(Float.intBitsToFloat(ra[colIndex]), Float.intBitsToFloat(rb[colIndex])); }); break; } }
From source file:com.waz.zclient.pages.main.drawing.DrawingFragment.java
private boolean isShowingKeyboard() { return sketchEditTextView.getVisibility() == View.VISIBLE && Float.compare(sketchEditTextView.getAlpha(), TEXT_ALPHA_VISIBLE) == 0 && KeyboardUtils.isKeyboardVisible(getContext()); }