List of usage examples for java.lang Float isNaN
public static boolean isNaN(float v)
From source file:at.pagu.soldockr.core.query.Criteria.java
private String processCriteriaEntry(String key, Object value) { if (value == null) { return null; }/*from w ww.j a va2 s .c o m*/ // do not filter espressions if (StringUtils.equals(OperationKey.EXPRESSION.getKey(), key)) { return value.toString(); } if (StringUtils.equals(OperationKey.BETWEEN.getKey(), key)) { Object[] args = (Object[]) value; String rangeFragment = "["; rangeFragment += args[0] != null ? filterCriteriaValue(args[0]) : WILDCARD; rangeFragment += RANGE_OPERATOR; rangeFragment += args[1] != null ? filterCriteriaValue(args[1]) : WILDCARD; rangeFragment += "]"; return rangeFragment; } Object filteredValue = filterCriteriaValue(value); if (StringUtils.equals(OperationKey.CONTAINS.getKey(), key)) { return WILDCARD + filteredValue + WILDCARD; } if (StringUtils.equals(OperationKey.STARTS_WITH.getKey(), key)) { return filteredValue + WILDCARD; } if (StringUtils.equals(OperationKey.ENDS_WITH.getKey(), key)) { return WILDCARD + filteredValue; } if (StringUtils.equals(OperationKey.IS_NOT.getKey(), key)) { return "-" + filteredValue; } if (StringUtils.startsWith(key, "$fuzzy")) { String sDistance = StringUtils.substringAfter(key, "$fuzzy#"); float distance = Float.NaN; if (StringUtils.isNotBlank(sDistance)) { distance = Float.parseFloat(sDistance); } return filteredValue + "~" + (Float.isNaN(distance) ? "" : sDistance); } return filteredValue.toString(); }
From source file:com.google.android.car.kitchensink.sensor.SensorsTestFragment.java
private String getGyroscopeString(CarSensorEvent event) { String x = mNaString;/*from www. ja v a 2 s. c o m*/ String y = mNaString; String z = mNaString; if (event != null) { CarSensorEvent.GyroscopeData gyro = event.getGyroscopeData(); x = Float.isNaN(gyro.x) ? x : String.valueOf(gyro.x); y = Float.isNaN(gyro.y) ? y : String.valueOf(gyro.y); z = Float.isNaN(gyro.z) ? z : String.valueOf(gyro.z); } return getContext().getString(R.string.sensor_gyroscope, getTimestamp(event), x, y, z); }
From source file:com.google.android.car.kitchensink.sensor.SensorsTestFragment.java
private String getAccelerometerString(CarSensorEvent event) { String x = mNaString;//from w w w .jav a 2s .co m String y = mNaString; String z = mNaString; if (event != null) { CarSensorEvent.AccelerometerData gyro = event.getAccelerometerData(); x = Float.isNaN(gyro.x) ? x : String.valueOf(gyro.x); y = Float.isNaN(gyro.y) ? y : String.valueOf(gyro.y); z = Float.isNaN(gyro.z) ? z : String.valueOf(gyro.z); } return getContext().getString(R.string.sensor_accelerometer, getTimestamp(event), x, y, z); }
From source file:cn.edu.bjtu.cit.recommender.Recommender.java
@SuppressWarnings("unchecked") public int run(String[] args) throws Exception { if (args.length < 2) { System.err.println();//from www . j av a 2 s . c o m System.err.println("Usage: " + this.getClass().getName() + " [generic options] input output [profiling] [estimation] [clustersize]"); System.err.println(); printUsage(); GenericOptionsParser.printGenericCommandUsage(System.err); return 1; } OptionParser parser = new OptionParser(args); Pipeline pipeline = new MRPipeline(Recommender.class, getConf()); if (parser.hasOption(CLUSTER_SIZE)) { pipeline.getConfiguration().setInt(ClusterOracle.CLUSTER_SIZE, Integer.parseInt(parser.getOption(CLUSTER_SIZE).getValue())); } if (parser.hasOption(PROFILING)) { pipeline.getConfiguration().setBoolean(Profiler.IS_PROFILE, true); this.profileFilePath = parser.getOption(PROFILING).getValue(); } if (parser.hasOption(ESTIMATION)) { estFile = parser.getOption(ESTIMATION).getValue(); est = new Estimator(estFile, clusterSize); } if (parser.hasOption(OPT_REDUCE)) { pipeline.getConfiguration().setBoolean(OPT_REDUCE, true); } if (parser.hasOption(OPT_MSCR)) { pipeline.getConfiguration().setBoolean(OPT_MSCR, true); } if (parser.hasOption(ACTIVE_THRESHOLD)) { threshold = Integer.parseInt(parser.getOption("at").getValue()); } if (parser.hasOption(TOP)) { top = Integer.parseInt(parser.getOption("top").getValue()); } profiler = new Profiler(pipeline); /* * input node */ PCollection<String> lines = pipeline.readTextFile(args[0]); if (profiler.isProfiling() && lines.getSize() > 10 * 1024 * 1024) { lines = lines.sample(0.1); } /* * S0 + GBK */ PGroupedTable<Long, Long> userWithPrefs = lines.parallelDo(new MapFn<String, Pair<Long, Long>>() { @Override public Pair<Long, Long> map(String input) { String[] split = input.split(Estimator.DELM); long userID = Long.parseLong(split[0]); long itemID = Long.parseLong(split[1]); return Pair.of(userID, itemID); } @Override public float scaleFactor() { return est.getScaleFactor("S0").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S0").recsFactor; } }, Writables.tableOf(Writables.longs(), Writables.longs())).groupByKey(est.getClusterSize()); /* * S1 */ PTable<Long, Vector> userVector = userWithPrefs .parallelDo(new MapFn<Pair<Long, Iterable<Long>>, Pair<Long, Vector>>() { @Override public Pair<Long, Vector> map(Pair<Long, Iterable<Long>> input) { Vector userVector = new RandomAccessSparseVector(Integer.MAX_VALUE, 100); for (long itemPref : input.second()) { userVector.set((int) itemPref, 1.0f); } return Pair.of(input.first(), userVector); } @Override public float scaleFactor() { return est.getScaleFactor("S1").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S1").recsFactor; } }, Writables.tableOf(Writables.longs(), Writables.vectors())); userVector = profiler.profile("S0-S1", pipeline, userVector, ProfileConverter.long_vector(), Writables.tableOf(Writables.longs(), Writables.vectors())); /* * S2 */ PTable<Long, Vector> filteredUserVector = userVector .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Long, Vector>>() { @Override public void process(Pair<Long, Vector> input, Emitter<Pair<Long, Vector>> emitter) { if (input.second().getNumNondefaultElements() > threshold) { emitter.emit(input); } } @Override public float scaleFactor() { return est.getScaleFactor("S2").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S2").recsFactor; } }, Writables.tableOf(Writables.longs(), Writables.vectors())); filteredUserVector = profiler.profile("S2", pipeline, filteredUserVector, ProfileConverter.long_vector(), Writables.tableOf(Writables.longs(), Writables.vectors())); /* * S3 + GBK */ PGroupedTable<Integer, Integer> coOccurencePairs = filteredUserVector .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Integer, Integer>>() { @Override public void process(Pair<Long, Vector> input, Emitter<Pair<Integer, Integer>> emitter) { Iterator<Vector.Element> it = input.second().iterateNonZero(); while (it.hasNext()) { int index1 = it.next().index(); Iterator<Vector.Element> it2 = input.second().iterateNonZero(); while (it2.hasNext()) { int index2 = it2.next().index(); emitter.emit(Pair.of(index1, index2)); } } } @Override public float scaleFactor() { float size = est.getScaleFactor("S3").sizeFactor; return size; } @Override public float scaleFactorByRecord() { float recs = est.getScaleFactor("S3").recsFactor; return recs; } }, Writables.tableOf(Writables.ints(), Writables.ints())).groupByKey(est.getClusterSize()); /* * S4 */ PTable<Integer, Vector> coOccurenceVector = coOccurencePairs .parallelDo(new MapFn<Pair<Integer, Iterable<Integer>>, Pair<Integer, Vector>>() { @Override public Pair<Integer, Vector> map(Pair<Integer, Iterable<Integer>> input) { Vector cooccurrenceRow = new RandomAccessSparseVector(Integer.MAX_VALUE, 100); for (int itemIndex2 : input.second()) { cooccurrenceRow.set(itemIndex2, cooccurrenceRow.get(itemIndex2) + 1.0); } return Pair.of(input.first(), cooccurrenceRow); } @Override public float scaleFactor() { return est.getScaleFactor("S4").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S4").recsFactor; } }, Writables.tableOf(Writables.ints(), Writables.vectors())); coOccurenceVector = profiler.profile("S3-S4", pipeline, coOccurenceVector, ProfileConverter.int_vector(), Writables.tableOf(Writables.ints(), Writables.vectors())); /* * S5 Wrapping co-occurrence columns */ PTable<Integer, VectorOrPref> wrappedCooccurrence = coOccurenceVector .parallelDo(new MapFn<Pair<Integer, Vector>, Pair<Integer, VectorOrPref>>() { @Override public Pair<Integer, VectorOrPref> map(Pair<Integer, Vector> input) { return Pair.of(input.first(), new VectorOrPref(input.second())); } @Override public float scaleFactor() { return est.getScaleFactor("S5").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S5").recsFactor; } }, Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs())); wrappedCooccurrence = profiler.profile("S5", pipeline, wrappedCooccurrence, ProfileConverter.int_vopv(), Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs())); /* * S6 Splitting user vectors */ PTable<Integer, VectorOrPref> userVectorSplit = filteredUserVector .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Integer, VectorOrPref>>() { @Override public void process(Pair<Long, Vector> input, Emitter<Pair<Integer, VectorOrPref>> emitter) { long userID = input.first(); Vector userVector = input.second(); Iterator<Vector.Element> it = userVector.iterateNonZero(); while (it.hasNext()) { Vector.Element e = it.next(); int itemIndex = e.index(); float preferenceValue = (float) e.get(); emitter.emit(Pair.of(itemIndex, new VectorOrPref(userID, preferenceValue))); } } @Override public float scaleFactor() { return est.getScaleFactor("S6").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S6").recsFactor; } }, Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs())); userVectorSplit = profiler.profile("S6", pipeline, userVectorSplit, ProfileConverter.int_vopp(), Writables.tableOf(Writables.ints(), VectorOrPref.vectorOrPrefs())); /* * S7 Combine VectorOrPrefs */ PTable<Integer, VectorAndPrefs> combinedVectorOrPref = wrappedCooccurrence.union(userVectorSplit) .groupByKey(est.getClusterSize()) .parallelDo(new DoFn<Pair<Integer, Iterable<VectorOrPref>>, Pair<Integer, VectorAndPrefs>>() { @Override public void process(Pair<Integer, Iterable<VectorOrPref>> input, Emitter<Pair<Integer, VectorAndPrefs>> emitter) { Vector vector = null; List<Long> userIDs = Lists.newArrayList(); List<Float> values = Lists.newArrayList(); for (VectorOrPref vop : input.second()) { if (vector == null) { vector = vop.getVector(); } long userID = vop.getUserID(); if (userID != Long.MIN_VALUE) { userIDs.add(vop.getUserID()); } float value = vop.getValue(); if (!Float.isNaN(value)) { values.add(vop.getValue()); } } emitter.emit(Pair.of(input.first(), new VectorAndPrefs(vector, userIDs, values))); } @Override public float scaleFactor() { return est.getScaleFactor("S7").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S7").recsFactor; } }, Writables.tableOf(Writables.ints(), VectorAndPrefs.vectorAndPrefs())); combinedVectorOrPref = profiler.profile("S5+S6-S7", pipeline, combinedVectorOrPref, ProfileConverter.int_vap(), Writables.tableOf(Writables.ints(), VectorAndPrefs.vectorAndPrefs())); /* * S8 Computing partial recommendation vectors */ PTable<Long, Vector> partialMultiply = combinedVectorOrPref .parallelDo(new DoFn<Pair<Integer, VectorAndPrefs>, Pair<Long, Vector>>() { @Override public void process(Pair<Integer, VectorAndPrefs> input, Emitter<Pair<Long, Vector>> emitter) { Vector cooccurrenceColumn = input.second().getVector(); List<Long> userIDs = input.second().getUserIDs(); List<Float> prefValues = input.second().getValues(); for (int i = 0; i < userIDs.size(); i++) { long userID = userIDs.get(i); if (userID != Long.MIN_VALUE) { float prefValue = prefValues.get(i); Vector partialProduct = cooccurrenceColumn.times(prefValue); emitter.emit(Pair.of(userID, partialProduct)); } } } @Override public float scaleFactor() { return est.getScaleFactor("S8").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S8").recsFactor; } }, Writables.tableOf(Writables.longs(), Writables.vectors())).groupByKey(est.getClusterSize()) .combineValues(new CombineFn<Long, Vector>() { @Override public void process(Pair<Long, Iterable<Vector>> input, Emitter<Pair<Long, Vector>> emitter) { Vector partial = null; for (Vector vector : input.second()) { partial = partial == null ? vector : partial.plus(vector); } emitter.emit(Pair.of(input.first(), partial)); } @Override public float scaleFactor() { return est.getScaleFactor("combine").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("combine").recsFactor; } }); partialMultiply = profiler.profile("S8-combine", pipeline, partialMultiply, ProfileConverter.long_vector(), Writables.tableOf(Writables.longs(), Writables.vectors())); /* * S9 Producing recommendations from vectors */ PTable<Long, RecommendedItems> recommendedItems = partialMultiply .parallelDo(new DoFn<Pair<Long, Vector>, Pair<Long, RecommendedItems>>() { @Override public void process(Pair<Long, Vector> input, Emitter<Pair<Long, RecommendedItems>> emitter) { Queue<RecommendedItem> topItems = new PriorityQueue<RecommendedItem>(11, Collections.reverseOrder(BY_PREFERENCE_VALUE)); Iterator<Vector.Element> recommendationVectorIterator = input.second().iterateNonZero(); while (recommendationVectorIterator.hasNext()) { Vector.Element element = recommendationVectorIterator.next(); int index = element.index(); float value = (float) element.get(); if (topItems.size() < top) { topItems.add(new GenericRecommendedItem(index, value)); } else if (value > topItems.peek().getValue()) { topItems.add(new GenericRecommendedItem(index, value)); topItems.poll(); } } List<RecommendedItem> recommendations = new ArrayList<RecommendedItem>(topItems.size()); recommendations.addAll(topItems); Collections.sort(recommendations, BY_PREFERENCE_VALUE); emitter.emit(Pair.of(input.first(), new RecommendedItems(recommendations))); } @Override public float scaleFactor() { return est.getScaleFactor("S9").sizeFactor; } @Override public float scaleFactorByRecord() { return est.getScaleFactor("S9").recsFactor; } }, Writables.tableOf(Writables.longs(), RecommendedItems.recommendedItems())); recommendedItems = profiler.profile("S9", pipeline, recommendedItems, ProfileConverter.long_ri(), Writables.tableOf(Writables.longs(), RecommendedItems.recommendedItems())); /* * Profiling */ if (profiler.isProfiling()) { profiler.writeResultToFile(profileFilePath); profiler.cleanup(pipeline.getConfiguration()); return 0; } /* * asText */ pipeline.writeTextFile(recommendedItems, args[1]); PipelineResult result = pipeline.done(); return result.succeeded() ? 0 : 1; }
From source file:routines.system.BigDataParserUtils.java
public static byte parseTo_byte(float input) { if (Float.isNaN(input)) { return defaultValueByte; }/* w w w . j a va2 s . co m*/ return ((Float) input).byteValue(); }
From source file:jp.co.acroquest.jsonic.Formatter.java
public boolean format(final JSON json, final Context context, final Object src, final Object o, final OutputSource out) throws Exception { NumberFormat f = context.getNumberFormat(); float[] array = (float[]) o; out.append('['); for (int i = 0; i < array.length; i++) { if (Float.isNaN(array[i]) || Float.isInfinite(array[i])) { if (context.getMode() != Mode.SCRIPT) { out.append('"'); out.append(Float.toString(array[i])); out.append('"'); } else if (Double.isNaN(array[i])) { out.append("Number.NaN"); } else { out.append("Number."); out.append((array[i] > 0) ? "POSITIVE" : "NEGATIVE"); out.append("_INFINITY"); }//ww w.j a v a 2 s . co m } else if (f != null) { StringFormatter.serialize(context, f.format(array[i]), out); } else { out.append(String.valueOf(array[i])); } if (i != array.length - 1) { out.append(','); if (context.isPrettyPrint()) out.append(' '); } } out.append(']'); return true; }
From source file:org.caleydo.view.bicluster.elem.GLRootElement.java
/** * * @param isDimensionThresholds//from w w w . ja v a 2 s. c o m * @param thresholds * bicluster id x threshold */ public void setThresholds(EDimension dimension, Map<Integer, Float> thresholds) { float thresh = Float.NEGATIVE_INFINITY; for (NormalClusterElement elem : allNormalClusters()) { int number = elem.getBiClusterNumber(); if (thresholds.containsKey(number)) { float t = thresholds.get(number); if (Float.isInfinite(thresh)) thresh = t; if (t != thresh) thresh = Float.NaN; elem.setThreshold(dimension, t, MyUnboundSpinner.UNBOUND, EThresholdMode.ABS); } } if (!Float.isNaN(thresh) && !Float.isInfinite(thresh)) { // all the same set that in the parameter toolbar this.toolbarParam.setThreshold(dimension, thresh); } updateAllEdges(); }
From source file:papaya.Rank.java
/** * Returns an array that is a copy of the input array with IntFloatPairs * having NaN values removed./* w ww . j a va2s . c om*/ * @param ranks input array * @return array with NaN-valued entries removed */ private static IntFloatPair[] removeNaNs(IntFloatPair[] ranks) { if (!containsNaNs(ranks)) { return ranks; } IntFloatPair[] outRanks = new IntFloatPair[ranks.length]; int j = 0; for (int i = 0; i < ranks.length; i++) { if (Float.isNaN(ranks[i].getValue())) { // drop, but adjust original ranks of later elements for (int k = i + 1; k < ranks.length; k++) { ranks[k] = new IntFloatPair(ranks[k].getValue(), ranks[k].getPosition() - 1); } } else { outRanks[j] = new IntFloatPair(ranks[i].getValue(), ranks[i].getPosition()); j++; } } IntFloatPair[] returnRanks = new IntFloatPair[j]; System.arraycopy(outRanks, 0, returnRanks, 0, j); return returnRanks; }
From source file:com.bazaarvoice.jackson.rison.RisonGenerator.java
@Override public void writeNumber(float f) throws IOException, JsonGenerationException { if (_cfgNumbersAsStrings || // [JACKSON-139] (((Float.isNaN(f) || Float.isInfinite(f)) && isEnabled(JsonGenerator.Feature.QUOTE_NON_NUMERIC_NUMBERS)))) { writeString(Float.toString(f)); return;/*from w w w .j a v a 2s . c om*/ } // What is the max length for floats? _verifyValueWrite("write number"); _writeRaw(formatFloat(f)); }
From source file:org.caleydo.view.parcoords.v2.ParallelCoordinateElement.java
private List<Vec2f> asPoints(Integer recordID, Iterable<AAxisElement> it) { final Table table = getTablePerspective().getDataDomain().getTable(); List<Vec2f> points = new ArrayList<>(this.size()); for (AAxisElement axis : it) { float raw = table.getNormalizedValue(axis.getId(), recordID); if (!axis.apply(raw)) return Collections.emptyList(); if (Float.isNaN(raw)) { raw = NAN_VALUE;//from w w w . jav a2s . co m } points.add(new Vec2f(axis.getX(), 1 - raw)); } return points; }