List of usage examples for java.lang Integer MIN_VALUE
int MIN_VALUE
To view the source code for java.lang Integer MIN_VALUE.
Click Source Link
From source file:gov.nih.nci.cabig.caaers.accesscontrol.query.impl.AbstractIdFetcher.java
protected Integer getOrganizationAllSiteAccessRoles(String loginId) { AbstractQuery query = new HQLQuery( "select role from OrganizationIndex oi where loginId=:LOGIN_ID and organization.id = " + Integer.MIN_VALUE); query.getParameterMap().put("LOGIN_ID", loginId); List<Integer> ids = (List<Integer>) search(query); if (!ids.isEmpty()) return ids.get(0); return 0;/*from w w w . j av a 2s . com*/ }
From source file:edu.stanford.slac.archiverappliance.PB.data.BoundaryConditionsSimulationValueGenerator.java
/** * Get a value based on the DBR type. /* w ww . ja v a2s . c o m*/ * We should check for boundary conditions here and make sure PB does not throw exceptions when we come close to MIN_ and MAX_ values * @param type * @param secondsIntoYear * @return */ public SampleValue getSampleValue(ArchDBRTypes type, int secondsIntoYear) { switch (type) { case DBR_SCALAR_STRING: return new ScalarStringSampleValue(Integer.toString(secondsIntoYear)); case DBR_SCALAR_SHORT: if (0 <= secondsIntoYear && secondsIntoYear < 1000) { // Check for some numbers around the minimum value return new ScalarValue<Short>((short) (Short.MIN_VALUE + secondsIntoYear)); } else if (1000 <= secondsIntoYear && secondsIntoYear < 2000) { // Check for some numbers around the maximum value return new ScalarValue<Short>((short) (Short.MAX_VALUE - (secondsIntoYear - 1000))); } else { // Check for some numbers around 0 return new ScalarValue<Short>((short) (secondsIntoYear - 2000)); } case DBR_SCALAR_FLOAT: if (0 <= secondsIntoYear && secondsIntoYear < 1000) { // Check for some numbers around the minimum value return new ScalarValue<Float>(Float.MIN_VALUE + secondsIntoYear); } else if (1000 <= secondsIntoYear && secondsIntoYear < 2000) { // Check for some numbers around the maximum value return new ScalarValue<Float>(Float.MAX_VALUE - (secondsIntoYear - 1000)); } else { // Check for some numbers around 0. Divide by a large number to make sure we cater to the number of precision digits return new ScalarValue<Float>((secondsIntoYear - 2000.0f) / secondsIntoYear); } case DBR_SCALAR_ENUM: return new ScalarValue<Short>((short) secondsIntoYear); case DBR_SCALAR_BYTE: return new ScalarValue<Byte>(((byte) (secondsIntoYear % 255))); case DBR_SCALAR_INT: if (0 <= secondsIntoYear && secondsIntoYear < 1000) { // Check for some numbers around the minimum value return new ScalarValue<Integer>(Integer.MIN_VALUE + secondsIntoYear); } else if (1000 <= secondsIntoYear && secondsIntoYear < 2000) { // Check for some numbers around the maximum value return new ScalarValue<Integer>(Integer.MAX_VALUE - (secondsIntoYear - 1000)); } else { // Check for some numbers around 0 return new ScalarValue<Integer>(secondsIntoYear - 2000); } case DBR_SCALAR_DOUBLE: if (0 <= secondsIntoYear && secondsIntoYear < 1000) { // Check for some numbers around the minimum value return new ScalarValue<Double>(Double.MIN_VALUE + secondsIntoYear); } else if (1000 <= secondsIntoYear && secondsIntoYear < 2000) { // Check for some numbers around the maximum value return new ScalarValue<Double>(Double.MAX_VALUE - (secondsIntoYear - 1000)); } else { // Check for some numbers around 0. Divide by a large number to make sure we cater to the number of precision digits return new ScalarValue<Double>((secondsIntoYear - 2000.0) / (secondsIntoYear * 1000000)); } case DBR_WAVEFORM_STRING: // Varying number of copies of a typical value return new VectorStringSampleValue( Collections.nCopies(secondsIntoYear, Integer.toString(secondsIntoYear))); case DBR_WAVEFORM_SHORT: return new VectorValue<Short>(Collections.nCopies(1, (short) secondsIntoYear)); case DBR_WAVEFORM_FLOAT: // Varying number of copies of a typical value return new VectorValue<Float>( Collections.nCopies(secondsIntoYear, (float) Math.cos(secondsIntoYear * Math.PI / 3600))); case DBR_WAVEFORM_ENUM: return new VectorValue<Short>(Collections.nCopies(1024, (short) secondsIntoYear)); case DBR_WAVEFORM_BYTE: // Large number of elements in the array return new VectorValue<Byte>( Collections.nCopies(65536 * secondsIntoYear, ((byte) (secondsIntoYear % 255)))); case DBR_WAVEFORM_INT: // Varying number of copies of a typical value return new VectorValue<Integer>( Collections.nCopies(secondsIntoYear, secondsIntoYear * secondsIntoYear)); case DBR_WAVEFORM_DOUBLE: // Varying number of copies of a typical value return new VectorValue<Double>( Collections.nCopies(secondsIntoYear, Math.sin(secondsIntoYear * Math.PI / 3600))); case DBR_V4_GENERIC_BYTES: // Varying number of copies of a typical value ByteBuffer buf = ByteBuffer.allocate(1024 * 10); buf.put(Integer.toString(secondsIntoYear).getBytes()); buf.flip(); return new ByteBufSampleValue(buf); default: throw new RuntimeException("We seemed to have missed a DBR type when generating sample data"); } }
From source file:com.gbcom.system.domain.base.BaseSysRegistration.java
/** * Set the unique identifier of this class * * @param id the new ID// w w w . j a v a 2 s . c om * @deprecated */ public void setId(java.lang.Long id) { this.id = id; this.hashCode = Integer.MIN_VALUE; }
From source file:org.openremote.controller.protocol.http.HttpGetCommand.java
@Override public String read(EnumSensorType sensorType, Map<String, String> stateMap) { String rawResult = requestURL(); if (sensorType == null) { return rawResult; }//from w w w . j a v a 2s .c o m if ("".equals(rawResult)) { return UNKNOWN_STATUS; } switch (sensorType) { case SWITCH: rawResult = rawResult.trim(); if (rawResult.equalsIgnoreCase("on")) return "on"; else if (rawResult.equalsIgnoreCase("off")) return "off"; else return UNKNOWN_STATUS; case RANGE: try { int rangeMin = Integer.MIN_VALUE; int rangeMax = Integer.MAX_VALUE; int result = resolveResultAsInteger(rawResult); if (stateMap != null) { rangeMin = resolveRangeMinimum(stateMap.get(Sensor.RANGE_MIN_STATE)); rangeMax = resolveRangeMaximum(stateMap.get(Sensor.RANGE_MAX_STATE)); } return resolveToRangeSensorValue(result, rangeMin, rangeMax); } catch (ConversionException e) { return UNKNOWN_STATUS; } case LEVEL: try { return resolveToLevelSensorValue(resolveResultAsInteger(rawResult)); } catch (ConversionException e) { return UNKNOWN_STATUS; } default://NOTE: if sensor type is RANGE, this map only contains min/max states. for (String state : stateMap.keySet()) { if (rawResult.equals(stateMap.get(state))) { return state; } } break; } return rawResult; }
From source file:mt.LengthDistribution.java
public static void GetLengthDistributionArray(ArrayList<File> AllMovies, double[] calibration) { ArrayList<Double> maxlist = new ArrayList<Double>(); for (int i = 0; i < AllMovies.size(); ++i) { double maxlength = LengthDistribution.Lengthdistro(AllMovies.get(i)); if (maxlength != Double.NaN && maxlength > 0) maxlist.add(maxlength);//from ww w . j a v a 2 s. co m } Collections.sort(maxlist); int min = 0; int max = (int) Math.round(maxlist.get(maxlist.size() - 1)) + 1; XYSeries counterseries = new XYSeries("MT length distribution"); XYSeries Logcounterseries = new XYSeries("MT Log length distribution"); final ArrayList<Point> points = new ArrayList<Point>(); for (int length = 0; length < max; ++length) { HashMap<Integer, Integer> frameseed = new HashMap<Integer, Integer>(); int count = 0; for (int i = 0; i < AllMovies.size(); ++i) { File file = AllMovies.get(i); double currentlength = LengthDistribution.Lengthdistro(file); ArrayList<FLSobject> currentobject = Tracking.loadMTStat(file); if (currentlength > length) { for (int index = 0; index < currentobject.size(); ++index) { ArrayList<Integer> seedlist = new ArrayList<Integer>(); if (currentobject.get(index).length >= length) { seedlist.add(currentobject.get(index).seedID); if (frameseed.get(currentobject.get(index).Framenumber) != null && frameseed.get(currentobject.get(index).Framenumber) != Double.NaN) { int currentcount = frameseed.get(currentobject.get(index).Framenumber); frameseed.put(currentobject.get(index).Framenumber, seedlist.size() + currentcount); } else if (currentobject.get(index) != null) frameseed.put(currentobject.get(index).Framenumber, seedlist.size()); } } } } // Get maxima length, count int maxvalue = Integer.MIN_VALUE; for (int key : frameseed.keySet()) { int Count = frameseed.get(key); if (Count >= maxvalue) maxvalue = Count; } if (maxvalue != Integer.MIN_VALUE) { counterseries.add(length, maxvalue); if (maxvalue > 0) { Logcounterseries.add((length), Math.log(maxvalue)); points.add(new Point(new double[] { length, Math.log(maxvalue) })); } } } final XYSeriesCollection dataset = new XYSeriesCollection(); final XYSeriesCollection nofitdataset = new XYSeriesCollection(); dataset.addSeries(counterseries); nofitdataset.addSeries(counterseries); final XYSeriesCollection Logdataset = new XYSeriesCollection(); Logdataset.addSeries(Logcounterseries); final JFreeChart chart = ChartFactory.createScatterPlot("MT length distribution", "Number of MT", "Length (micrometer)", dataset); final JFreeChart nofitchart = ChartFactory.createScatterPlot("MT length distribution", "Number of MT", "Length (micrometer)", nofitdataset); // Fitting line to log of the length distribution interpolation.Polynomial poly = new interpolation.Polynomial(1); try { poly.fitFunction(points); } catch (NotEnoughDataPointsException e) { // TODO Auto-generated catch block e.printStackTrace(); } DisplayPoints.display(nofitchart, new Dimension(800, 500)); dataset.addSeries(Tracking.drawexpFunction(poly, counterseries.getMinX(), counterseries.getMaxX(), 0.5, "Exponential fit")); NumberFormat nf = NumberFormat.getInstance(Locale.ENGLISH); nf.setMaximumFractionDigits(3); TextTitle legendText = new TextTitle("Mean Length" + " : " + nf.format(-1.0 / poly.getCoefficients(1)) + " " + "Standard Deviation" + " : " + nf.format(poly.SSE)); legendText.setPosition(RectangleEdge.RIGHT); DisplayPoints.display(chart, new Dimension(800, 500)); chart.addSubtitle(legendText); final JFreeChart logchart = ChartFactory.createScatterPlot("MT Log length distribution", "Number of MT", "Length (micrometer)", Logdataset); // DisplayPoints.display(logchart, new Dimension(800, 500)); for (int i = 1; i >= 0; --i) System.out.println(poly.getCoefficients(i) + " " + "x" + " X to the power of " + i); // Logdataset.addSeries(Tracking.drawFunction(poly, counterseries.getMinX(), counterseries.getMaxX(), 0.5, "Straight line fit")); WriteLengthdistroFile(AllMovies, counterseries, 0); }
From source file:org.elasticsearch.client.sniff.ElasticsearchNodesSnifferTests.java
public void testConstructorValidation() throws IOException { try {//ww w . ja v a2 s . c o m new ElasticsearchNodesSniffer(null, 1, ElasticsearchNodesSniffer.Scheme.HTTP); fail("should have failed"); } catch (NullPointerException e) { assertEquals("restClient cannot be null", e.getMessage()); } HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); try (RestClient restClient = RestClient.builder(httpHost).build()) { try { new ElasticsearchNodesSniffer(restClient, 1, null); fail("should have failed"); } catch (NullPointerException e) { assertEquals(e.getMessage(), "scheme cannot be null"); } try { new ElasticsearchNodesSniffer(restClient, RandomNumbers.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0), ElasticsearchNodesSniffer.Scheme.HTTP); fail("should have failed"); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0"); } } }
From source file:edu.cornell.med.icb.goby.alignments.Merge.java
public void merge(final List<File> inputFiles, final String outputFile) throws IOException { // we will store one target index permutation for each input file in the following list: referenceIndexPermutation = new ObjectArrayList<int[]>(); int maxNumberOfReads = Integer.MIN_VALUE; if (verbose) { System.out.println("Finding max number of reads..."); System.out.flush();// w ww. j a v a 2s. c o m } int mergedReferenceIndex = 0; final IndexedIdentifier mergedTargetIdentifiers = new IndexedIdentifier(); final Int2IntMap mergedTargetLengths = new Int2IntOpenHashMap(); final IntSet numberOfReadsSet = new IntArraySet(); int minQueryIndex = Integer.MAX_VALUE; for (final File inputFile : inputFiles) { final AlignmentReaderImpl reader = new AlignmentReaderImpl(inputFile.toString()); reader.readHeader(); message("Found input file with " + reader.getNumberOfTargets() + " target(s)"); mergedReferenceIndex = constructTargetIndexPermutations(mergedReferenceIndex, mergedTargetIdentifiers, mergedTargetLengths, reader); minQueryIndex = Math.min(minQueryIndex, reader.getSmallestSplitQueryIndex()); numberOfReadsSet.add(reader.getNumberOfQueries()); reader.close(); final AlignmentTooManyHitsReader tmhReader = new AlignmentTooManyHitsReader(inputFile.toString()); for (final int queryIndex : tmhReader.getQueryIndices()) { minQueryIndex = Math.min(minQueryIndex, queryIndex); } } if (numberOfReadsSet.size() != 1) { message("Aborting: the input alignments must have exactly the same number of reads, " + "we found different number of reads in " + numberOfReadsSet + " input files:"); return; } maxNumberOfReads = numberOfReadsSet.iterator().nextInt(); message("... max number of reads was " + maxNumberOfReads); final AbstractAlignmentEntryFilter entryFilter = getFilter(maxNumberOfReads, minQueryIndex); entryFilter.setTargetIdentifiers(mergedTargetIdentifiers); ProgressLogger progress = new ProgressLogger(LOG); progress.expectedUpdates = inputFiles.size(); progress.start(); float totalNumberOfLogicalEntries = 0; message("First pass: determine which reads should be kept in the merged alignment."); int totalNumberOfEntries = 0; for (final File inputFile : inputFiles) { message("Scanning " + inputFile.getName()); final AlignmentReaderImpl reader = new AlignmentReaderImpl(inputFile.toString()); reader.readHeader(); entryFilter.setTargetIdentifiers(reader.getTargetIdentifiers()); while (reader.hasNext()) { final Alignments.AlignmentEntry entry = reader.next(); entryFilter.inspectEntry(entry); ++totalNumberOfEntries; totalNumberOfLogicalEntries += entry.getMultiplicity(); } progress.update(); reader.close(); } progress.stop(); entryFilter.postProcessing(); message(String.format("Found %d logical alignment entries.", (int) totalNumberOfLogicalEntries)); message("Prepare merged too many hits information."); prepareMergedTooManyHits(outputFile, maxNumberOfReads, minQueryIndex, inputFiles.toArray(new File[inputFiles.size()])); message("Second pass: writing the merged alignment."); int wrote = 0; int skipped = 0; int skippedTooManyHits = 0; int skippedNotBestScore = 0; final AlignmentWriterImpl writer = new AlignmentWriterImpl(outputFile); progress = new ProgressLogger(LOG); progress.expectedUpdates = totalNumberOfEntries; progress.start(); if (mergedTargetIdentifiers.size() > 0) { // set merged target info in the merged header: writer.setTargetIdentifiers(mergedTargetIdentifiers); } int inputFileIndex = 0; // use the merged too many hits info: final AlignmentTooManyHitsReader tmhReader = new AlignmentTooManyHitsReader(outputFile); final IntSet queriesIndicesAligned = new IntOpenHashSet(); for (final File inputFile : inputFiles) { final String basename = inputFile.toString(); final AlignmentReaderImpl reader = new AlignmentReaderImpl(basename); reader.readHeader(); entryFilter.setTargetIdentifiers(reader.getTargetIdentifiers()); final AlignmentTooManyHitsReader specificTmhReader = new AlignmentTooManyHitsReader(basename); while (reader.hasNext()) { final Alignments.AlignmentEntry entry = reader.next(); progress.lightUpdate(); // System.out.println("Processing queryIndex "+entry.getQueryIndex()); if (entryFilter.shouldRetainEntry(entry)) { final int queryIndex = entry.getQueryIndex(); final int matchLength = specificTmhReader.getLengthOfMatch(queryIndex); if (!tmhReader.isQueryAmbiguous(queryIndex, k, matchLength)) { //switch in the mergedTargetIndex and append to writer: final int newTargetIndex = referenceIndexPermutation.get(inputFileIndex)[entry .getTargetIndex()]; Alignments.AlignmentEntry entry1 = entry; entry1 = entry1.newBuilderForType().mergeFrom(entry1).setTargetIndex(newTargetIndex) .build(); writer.appendEntry(entry1); wrote += entry.getMultiplicity(); queriesIndicesAligned.add(entry.getQueryIndex()); } else { skipped += entry.getMultiplicity(); skippedTooManyHits += entry.getMultiplicity(); // System.out.println("too many hits for queryIndex "+entry.getQueryIndex()); // since query hits too many locations in the reference sequences.. } } else { skipped += entry.getMultiplicity(); skippedNotBestScore += entry.getMultiplicity(); // "not best score for queryIndex "+entry.getQueryIndex()); // since this alignment does not have the best score of the reference locations aligned // with this specific query/read. } if (((wrote + skipped) % 1000000) == 0) { printStatus(wrote + skipped, wrote, skipped, skippedTooManyHits, skippedNotBestScore); } } reader.close(); inputFileIndex++; } progress.stop(); writer.setNumTargets(mergedReferenceIndex); final int[] targetLengths = new int[mergedReferenceIndex]; for (int i = 0; i < mergedReferenceIndex; i++) { targetLengths[i] = mergedTargetLengths.get(i); } writer.setTargetLengths(targetLengths); printStatus((int) totalNumberOfLogicalEntries, wrote, skipped, skippedTooManyHits, skippedNotBestScore); if (verbose) { writer.printStats(System.out); } entryFilter.printStats(); final float numQuerySequences = maxNumberOfReads; final float percentWritten = ((float) wrote) * 100f / totalNumberOfLogicalEntries; final float skippedPercent = ((float) skipped * 100f / totalNumberOfLogicalEntries); final float skippedTooManyHitsPercent = ((float) skippedTooManyHits) * 100f / totalNumberOfLogicalEntries; final float skippedNotBestScorePercent = ((float) skippedNotBestScore * 100f / totalNumberOfLogicalEntries); float percentAligned = queriesIndicesAligned.size(); percentAligned /= numQuerySequences; percentAligned *= 100f; final double percentEntriesRetained = ((double) wrote) / numQuerySequences * 100d; writer.putStatistic("entries.written.number", wrote); writer.putStatistic("entries.written.percent", percentWritten); writer.putStatistic("entries.input.logical.number", totalNumberOfLogicalEntries); writer.putStatistic("entries.input.number", totalNumberOfEntries); writer.putStatistic("skipped.Total.percent", skippedPercent); writer.putStatistic("skipped.TooManyHits.percent", skippedTooManyHitsPercent); writer.putStatistic("skipped.TooManyHits.number", skippedTooManyHits); writer.putStatistic("skipped.NotBestScore.percent", skippedNotBestScorePercent); writer.putStatistic("skipped.NotBestScore.number", skippedNotBestScore); writer.putStatistic("entries.retained.percent", percentEntriesRetained); writer.putStatistic("number.Query", maxNumberOfReads); writer.putStatistic("number.Target", mergedTargetIdentifiers.size()); writer.putStatistic("reads.aligned.number", queriesIndicesAligned.size()); writer.putStatistic("reads.aligned.percent", percentAligned); writer.close(); message("Percent aligned: " + percentAligned); }
From source file:com.github.veqryn.net.Cidr4.java
/** * Constructor that takes a CIDR-notation string, e.g. "192.168.0.1/16" * * @param cidrNotation A CIDR-notation string, e.g. "192.168.0.1/16" * @throws IllegalArgumentException if the parameter is invalid, * i.e. does not match n.n.n.n/m where n=1-3 decimal digits, * m = 1-3 decimal digits in range 1-32 *//* w w w . ja v a 2 s.c o m*/ public Cidr4(final String cidrNotation) { final Matcher matcher = cidrPattern.matcher(cidrNotation); if (matcher.matches()) { final int address = toInteger(matchAddress(matcher), true); final int netmask = getNetMask(rangeCheck(Integer.parseInt(matcher.group(5)), 0, NBITS)); final int network = getLowestBinaryWithNetmask(address, netmask); this.low = network ^ Integer.MIN_VALUE; this.high = getHighestBinaryWithNetmask(network, netmask) ^ Integer.MIN_VALUE; } else { throw new IllegalArgumentException("Could not parse [" + cidrNotation + "]"); } }
From source file:org.exoplatform.mongo.factory.MongoFactoryBean.java
private void attemptMongoConnection(String host, int port) throws Exception { try {//ww w . j a va2 s. c o m if (port == Integer.MIN_VALUE) { replicaSetSeeds.add(new ServerAddress(host)); } else { replicaSetSeeds.add(new ServerAddress(host, port)); } } catch (UnknownHostException unknownHost) { throw unknownHost; } }
From source file:LongVector.java
/** * Removes the first occurrence of the argument from this vector. If the * object is found in this vector, each component in the vector with an * index greater or equal to the object's index is shifted downward to have * an index one smaller than the value it had previously. * /*from w w w.j a va 2 s.c om*/ * * @return True if the int was removed, false if it was not found */ public final boolean removeElement(long s) { for (int i = 0; i < _size; i++) { if (_data[i] == s) { if ((i + 1) < _size) System.arraycopy(_data, i + 1, _data, i - 1, _size - i); else _data[i] = java.lang.Integer.MIN_VALUE; _size--; return true; } } return false; }