List of usage examples for java.lang Float NaN
float NaN
To view the source code for java.lang Float NaN.
Click Source Link
From source file:tufts.vue.RichTextBox.java
RichTextBox(LWComponent lwc, String text) { if (DEBUG.TEXT && DEBUG.LAYOUT) tufts.Util.printClassTrace("tufts.vue.", "NEW RichTextBox, txt=" + text); if (TestDebug || DEBUG.TEXT) out("NEW [" + text + "] " + lwc); // vueHighlighter = new RichTextHighlighter(this); // this.setHighlighter(vueHighlighter); SHTMLEditorKit kit = new SHTMLEditorKit(/* renderMode */); //kit.resetStyleSheet(); setEditorKit(kit);//from w w w. j a v a 2s . c om this.lwc = lwc; setDragEnabled(false); setBorder(null); if (text != null) setText(text); setMargin(null); setOpaque(false); // don't bother to paint background setVisible(true); addMouseListener(this); addKeyListener(this); addFocusListener(this); getDocument().addDocumentListener(this); if (VueUtil.isWindowsPlatform() && SelectionColor != null) setSelectionColor(SelectionColor); if (VueUtil.isWindowsPlatform() && SelectionColor != null) setSelectedTextColor(Color.black); mBounds.x = Float.NaN; // mark as uninitialized mBounds.y = Float.NaN; // mark as uninitialized if (TestDebug || DEBUG.TEXT) out("constructed " + getSize()); }
From source file:dk.dma.ais.abnormal.analyzer.analysis.CloseEncounterAnalysis.java
void analyseCloseEncounter(Track track1, Track track2) { if (track1.getSpeedOverGround() > sogMin && !isTrackPairAnalyzed(track1, track2)) { final long t = max(track1.getTimeOfLastPositionReport(), track2.getTimeOfLastPositionReport()); if (t > track1.getTimeOfLastPositionReport()) { track1.predict(t);// w w w .j av a 2 s. co m } if (t > track2.getTimeOfLastPositionReport()) { track2.predict(t); } if (isLastAisTrackingReportTooOld(track1, t)) { LOG.debug("Skipping analysis: MMSI " + track1.getMmsi() + " was predicted for too long."); return; } if (isLastAisTrackingReportTooOld(track2, t)) { LOG.debug("Skipping analysis: MMSI " + track2.getMmsi() + " was predicted for too long."); return; } boolean allValuesPresent = false; float track1Cog = Float.NaN, track1Sog = Float.NaN, track2Hdg = Float.NaN; int track1Loa = -1, track1Beam = -1, track1Stern = -1, track1Starboard = -1, track2Loa = -1, track2Beam = -1, track2Stern = -1, track2Starboard = -1; try { track1Cog = track1.getCourseOverGround(); track1Sog = track1.getSpeedOverGround(); track1Loa = track1.getVesselLength(); track1Beam = track1.getVesselBeam(); track1Stern = track1.getShipDimensionStern(); track1Starboard = track1.getShipDimensionStarboard(); track2Hdg = track2.getTrueHeading(); track2Loa = track2.getVesselLength(); track2Beam = track2.getVesselBeam(); track2Stern = track2.getShipDimensionStern(); track2Starboard = track2.getShipDimensionStarboard(); allValuesPresent = true; } catch (NullPointerException e) { } if (allValuesPresent && !Float.isNaN(track1Cog) && !Float.isNaN(track2Hdg)) { Ellipse safetyEllipseTrack1 = safetyZone(track1.getPosition(), track1.getPosition(), track1Cog, track1Sog, track1Loa, track1Beam, track1Stern, track1Starboard); Ellipse extentTrack2 = vesselExtent(track1.getPosition(), track2.getPosition(), track2Hdg, track2Loa, track2Beam, track2Stern, track2Starboard); if (safetyEllipseTrack1 != null && extentTrack2 != null && safetyEllipseTrack1.intersects(extentTrack2)) { track1.setProperty(Track.SAFETY_ZONE, safetyEllipseTrack1); track2.setProperty(Track.EXTENT, extentTrack2); raiseOrMaintainAbnormalEvent(CloseEncounterEvent.class, track1, track2); } else { lowerExistingAbnormalEventIfExists(CloseEncounterEvent.class, track1); } } markTrackPairAnalyzed(track1, track2); } else { LOG.debug("PREVIOUSLY COMPARED " + track1.getMmsi() + " AGAINST " + track2.getMmsi()); } }
From source file:uk.ac.babraham.SeqMonk.Pipelines.IntronRegressionPipeline.java
protected void startPipeline() { // We first need to generate probes over all of the features listed in // the feature types. The probes should cover the whole area of the // feature regardless of where it splices. Vector<Probe> probes = new Vector<Probe>(); int minDensity = optionsPanel.minDensity(); int minLength = optionsPanel.minLength(); double maxPValue = optionsPanel.maxPValue(); int binSize = optionsPanel.measurementBinSize(); QuantitationStrandType readFilter = optionsPanel.readFilter(); Chromosome[] chrs = collection().genome().getAllChromosomes(); for (int c = 0; c < chrs.length; c++) { if (cancel) { progressCancelled();/*from ww w .ja v a2 s .co m*/ return; } Vector<Probe> probesForThisChromosome = new Vector<Probe>(); progressUpdated("Making probes", c, chrs.length); Feature[] features = getValidFeatures(chrs[c]); for (int f = 0; f < features.length; f++) { if (cancel) { progressCancelled(); return; } // Now we can iterate through the introns in this feature if (!(features[f].location() instanceof SplitLocation)) continue; // There are no introns here Location[] subLocations = ((SplitLocation) features[f].location()).subLocations(); // TODO: Reverse the subLocations if its a reverse feature for (int intron = 1; intron < subLocations.length; intron++) { int start = subLocations[intron - 1].end(); int end = subLocations[intron].start(); if ((end - start) + 1 < minLength) { continue; // This intron is too short. } // TODO: We could throw away any probes which didn't have enough reads in any feature Probe p = new Probe(chrs[c], start, end, features[f].location().strand(), features[f].name() + "_" + intron); probesForThisChromosome.add(p); } } // Now we can deduplicate the probes for this chromosome and add them to the main collection Probe[] dupProbes = probesForThisChromosome.toArray(new Probe[0]); Arrays.sort(dupProbes); for (int p = 0; p < dupProbes.length; p++) { if (p > 0 && dupProbes[p].packedPosition() == dupProbes[p - 1].packedPosition()) continue; probes.add(dupProbes[p]); } } Probe[] allProbes = probes.toArray(new Probe[0]); collection().setProbeSet(new ProbeSet("Features over " + optionsPanel.getSelectedFeatureType(), allProbes)); // Now we go back through the probes and quantitate them for (int p = 0; p < allProbes.length; p++) { if (cancel) { progressCancelled(); return; } if (p % 1000 == 0) { progressUpdated("Quantitated " + p + " out of " + allProbes.length + " probes", p, allProbes.length); } for (int d = 0; d < data.length; d++) { long[] reads = data[d].getReadsForProbe(allProbes[p]); int[] countsPerSite = new int[allProbes[p].length()]; int usableCounts = 0; for (int r = 0; r < reads.length; r++) { if (readFilter.useRead(allProbes[p], reads[r])) { ++usableCounts; for (int pos = Math.max(0, SequenceRead.start(reads[r]) - allProbes[p].start()); pos <= Math .min(countsPerSite.length - 1, SequenceRead.end(reads[r]) - allProbes[p].start()); pos++) { ++countsPerSite[pos]; } } } if (usableCounts / (allProbes[p].length() / 1000d) >= minDensity) { // We're going to do a linear regression rather than a correlation // We're analysing in bins so we'll work out the bin counts and // add them dynamically to the regression. SimpleRegression regression = new SimpleRegression(); int binCount = 0; for (int i = 0; i < countsPerSite.length; i++) { if (i > 0 && i % binSize == 0) { regression.addData(i, binCount); binCount = 0; } binCount += countsPerSite[i]; } float slope = (float) (regression.getSlope() * 1000000); double pValue = regression.getSignificance(); if (allProbes[p].strand() == Location.REVERSE) { slope = 0 - slope; } if (pValue <= maxPValue) { data[d].setValueForProbe(allProbes[p], slope); } else { data[d].setValueForProbe(allProbes[p], Float.NaN); } } else { data[d].setValueForProbe(allProbes[p], Float.NaN); } } } StringBuffer quantitationDescription = new StringBuffer(); quantitationDescription.append("Intron regression pipeline quantitation "); quantitationDescription.append(". Directionality was "); quantitationDescription.append(optionsPanel.libraryTypeBox.getSelectedItem()); quantitationDescription.append(". Min intron length was "); quantitationDescription.append(minLength); quantitationDescription.append(". Min read density was "); quantitationDescription.append(minDensity); quantitationDescription.append(". Max slope p-value was "); quantitationDescription.append(maxPValue); collection().probeSet().setCurrentQuantitation(quantitationDescription.toString()); quantitatonComplete(); }
From source file:org.caleydo.core.util.impute.KNNImpute.java
public static void main(String[] args) throws IOException { ImmutableList.Builder<Gene> b = ImmutableList.builder(); List<String> lines = CharStreams .readLines(new InputStreamReader(KNNImpute.class.getResourceAsStream("khan.csv"))); lines = lines.subList(1, lines.size()); int j = 0;/*from w w w . j a v a 2 s . c o m*/ for (String line : lines) { String[] l = line.split(";"); float[] d = new float[l.length]; int nans = 0; for (int i = 0; i < l.length; ++i) { if ("NA".equals(l[i])) { nans++; d[i] = Float.NaN; } else { d[i] = Float.parseFloat(l[i]); } } b.add(new Gene(j++, nans, d)); } final KNNImputeDescription desc2 = new KNNImputeDescription(); desc2.setMaxp(100000); KNNImpute r = new KNNImpute(desc2, b.build()); ForkJoinPool p = new ForkJoinPool(); p.invoke(r); try (PrintWriter w = new PrintWriter("khan.imputed.csv")) { w.println(StringUtils.repeat("sample", ";", r.samples)); for (Gene g : r.genes) { float[] d = g.data; int nan = 0; w.print(Float.isNaN(d[0]) ? g.nanReplacements[nan++] : d[0]); for (int i = 1; i < d.length; ++i) w.append(';').append(String.valueOf(Float.isNaN(d[i]) ? g.nanReplacements[nan++] : d[i])); w.println(); } } }
From source file:ExposedFloat.java
public boolean action(Event evt, Object arg) { if (evt.target instanceof Button) { String bname = (String) arg; if (bname.equals(incrementButtonString)) { ++value;//from www . j a va2s . com } else if (bname.equals(decrementButtonString)) { --value; } else if (bname.equals(multByZeroButtonString)) { value *= (float) 0.0; } else if (bname.equals(piButtonString)) { value = (float) Math.PI; } else if (bname.equals(positiveInfinityButtonString)) { value = Float.POSITIVE_INFINITY; } else if (bname.equals(negativeInfinityButtonString)) { value = Float.NEGATIVE_INFINITY; } else if (bname.equals(maximumButtonString)) { value = Float.MAX_VALUE; } else if (bname.equals(minimumButtonString)) { value = Float.MIN_VALUE; } else if (bname.equals(notANumberButtonString)) { value = Float.NaN; } else if (bname.equals(changeSignButtonString)) { value *= -1.0; } else if (bname.equals(doubleButtonString)) { value *= 2.0; } else if (bname.equals(halveButtonString)) { value /= 2.0; } updateNumberFields(); enableDisableButton(maximumButton, Float.MAX_VALUE); enableDisableButton(minimumButton, Float.MIN_VALUE); enableDisableButton(positiveInfinityButton, Float.POSITIVE_INFINITY); enableDisableButton(negativeInfinityButton, Float.NEGATIVE_INFINITY); enableDisableButton(piButton, (float) Math.PI); enableDisableButton(notANumberButton, Float.NaN); if (!notANumberButton.isEnabled()) { if (!Float.isNaN(value)) { notANumberButton.enable(); } } else if (Float.isNaN(value)) { notANumberButton.disable(); } } return true; }
From source file:org.caleydo.core.util.impute.KNNImpute.java
/** * split the neighbor hood in two groups based on 2 k-means * * @param neighborhood// ww w . j av a 2s . c o m * @return */ private Pair<List<Gene>, List<Gene>> twoMeanClusterSplit(List<Gene> neighborhood) { final int n = neighborhood.size(); final int maxit = desc.getMaxit(); final double eps = desc.getEps(); int a_start = r.nextInt(n); int b_start = r.nextInt(n); Gene a_center = new Gene(1, -1, Arrays.copyOf(neighborhood.get(a_start).data, samples)); Gene b_center = new Gene(1, -1, Arrays.copyOf(neighborhood.get(b_start).data, samples)); float[] a_center_pong = new float[samples]; Arrays.fill(a_center_pong, Float.NaN); float[] b_center_pong = new float[samples]; Arrays.fill(b_center_pong, Float.NaN); float[] tmp; BitSet partOf_a = new BitSet(n); double d_old = 0; for (int i = 0; i < maxit; ++i) { int j = 0; int changed = 0; double d_new = 0; for (Gene gene : neighborhood) { final double a_distance = distance(a_center, gene); final double b_distance = distance(b_center, gene); final boolean in_a = a_distance < b_distance; if (partOf_a.get(j) != in_a) { changed++; partOf_a.set(j, in_a); } d_new += in_a ? a_distance : b_distance; tmp = in_a ? a_center_pong : b_center_pong; // shift new center for (int k = 0; k < samples; ++k) { if (!gene.isNaN(k)) { if (Float.isNaN(tmp[k])) tmp[k] = gene.get(k); else tmp[k] += gene.get(k); } } j++; } if (changed == 0 || d_new == 0) break; final double ratio = Math.abs(d_new - d_old) / d_old; if (i > 0 && ratio < eps) break; d_old = d_new; int a_n = partOf_a.cardinality(); int b_n = n - a_n; if (a_n == 0 || b_n == 0) { // FIXME } updateCenter(a_center, a_center_pong, a_n); updateCenter(b_center, b_center_pong, b_n); } return split(neighborhood, partOf_a); }
From source file:org.mrgeo.image.MrsImagePyramidMetadata.java
@JsonIgnore public float getDefaultValueFloat(final int band) { if (band < getBands()) { return Double.valueOf(defaultValues[band]).floatValue(); }/*from w w w . j av a 2 s .c o m*/ return Float.NaN; }
From source file:org.apache.hadoop.yarn.server.resourcemanager.TestApplicationMasterService.java
@Test(timeout = 1200000) public void testProgressFilter() throws Exception { MockRM rm = new MockRM(conf); rm.start();/*from w ww. ja v a 2 s. c o m*/ // Register node1 MockNM nm1 = rm.registerNode("127.0.0.1:1234", 6 * GB); // Submit an application RMApp app1 = rm.submitApp(2048); nm1.nodeHeartbeat(true); RMAppAttempt attempt1 = app1.getCurrentAppAttempt(); MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId()); am1.registerAppAttempt(); AllocateRequestPBImpl allocateRequest = new AllocateRequestPBImpl(); List<ContainerId> release = new ArrayList<ContainerId>(); List<ResourceRequest> ask = new ArrayList<ResourceRequest>(); allocateRequest.setReleaseList(release); allocateRequest.setAskList(ask); allocateRequest.setProgress(Float.POSITIVE_INFINITY); am1.allocate(allocateRequest); while (attempt1.getProgress() != 1) { LOG.info("Waiting for allocate event to be handled ..."); sleep(100); } allocateRequest.setProgress(Float.NaN); am1.allocate(allocateRequest); while (attempt1.getProgress() != 0) { LOG.info("Waiting for allocate event to be handled ..."); sleep(100); } allocateRequest.setProgress((float) 9); am1.allocate(allocateRequest); while (attempt1.getProgress() != 1) { LOG.info("Waiting for allocate event to be handled ..."); sleep(100); } allocateRequest.setProgress(Float.NEGATIVE_INFINITY); am1.allocate(allocateRequest); while (attempt1.getProgress() != 0) { LOG.info("Waiting for allocate event to be handled ..."); sleep(100); } allocateRequest.setProgress((float) 0.5); am1.allocate(allocateRequest); while (attempt1.getProgress() != 0.5) { LOG.info("Waiting for allocate event to be handled ..."); sleep(100); } allocateRequest.setProgress((float) -1); am1.allocate(allocateRequest); while (attempt1.getProgress() != 0) { LOG.info("Waiting for allocate event to be handled ..."); sleep(100); } }
From source file:io.anserini.search.SearchCollection.java
public <K> ScoredDocuments searchTweets(IndexSearcher searcher, K qid, String queryString, long t) throws IOException { Query keywordQuery = AnalyzerUtils.buildBagOfWordsQuery(FIELD_BODY, analyzer, queryString); List<String> queryTokens = AnalyzerUtils.tokenize(analyzer, queryString); // Do not consider the tweets with tweet ids that are beyond the queryTweetTime // <querytweettime> tag contains the timestamp of the query in terms of the // chronologically nearest tweet id within the corpus Query filter = LongPoint.newRangeQuery(TweetGenerator.StatusField.ID_LONG.name, 0L, t); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(filter, BooleanClause.Occur.FILTER); builder.add(keywordQuery, BooleanClause.Occur.MUST); Query compositeQuery = builder.build(); TopDocs rs = new TopDocs(0, new ScoreDoc[] {}, Float.NaN); if (!(isRerank && args.rerankcutoff <= 0)) { if (args.arbitraryScoreTieBreak) {// Figure out how to break the scoring ties. rs = searcher.search(compositeQuery, isRerank ? args.rerankcutoff : args.hits); } else {/* www.j av a2s.c o m*/ rs = searcher.search(compositeQuery, isRerank ? args.rerankcutoff : args.hits, BREAK_SCORE_TIES_BY_TWEETID, true, true); } } RerankerContext context = new RerankerContext<>(searcher, qid, keywordQuery, queryString, queryTokens, filter, args); return cascade.run(ScoredDocuments.fromTopDocs(rs, searcher), context); }
From source file:org.janusgraph.core.attribute.Geoshape.java
/** * Constructs a circle from a given center point and a radius in kilometer * @param latitude/*from w w w. j a v a 2s .c o m*/ * @param longitude * @param radiusInKM * @return */ public static final Geoshape circle(final float latitude, final float longitude, final float radiusInKM) { Preconditions.checkArgument(isValidCoordinate(latitude, longitude), "Invalid coordinate provided"); Preconditions.checkArgument(radiusInKM > 0, "Invalid radius provided [%s]", radiusInKM); return new Geoshape( new float[][] { new float[] { latitude, Float.NaN }, new float[] { longitude, radiusInKM } }); }