List of usage examples for java.util Collections max
public static <T extends Object & Comparable<? super T>> T max(Collection<? extends T> coll)
From source file:loci.formats.in.MetamorphReader.java
@Override protected void initFile(String id) throws FormatException, IOException { if (checkSuffix(id, ND_SUFFIX)) { LOGGER.info("Initializing " + id); // find an associated STK file String stkFile = id.substring(0, id.lastIndexOf(".")); if (stkFile.indexOf(File.separatorChar) != -1) { stkFile = stkFile.substring(stkFile.lastIndexOf(File.separator) + 1); }/* w ww . j a v a 2 s . co m*/ Location parent = new Location(id).getAbsoluteFile().getParentFile(); LOGGER.info("Looking for STK file in {}", parent.getAbsolutePath()); String[] dirList = parent.list(true); Arrays.sort(dirList); for (String f : dirList) { int underscore = f.indexOf('_'); if (underscore < 0) underscore = f.indexOf('.'); if (underscore < 0) underscore = f.length(); String prefix = f.substring(0, underscore); if ((f.equals(stkFile) || stkFile.startsWith(prefix)) && checkSuffix(f, STK_SUFFIX)) { stkFile = new Location(parent.getAbsolutePath(), f).getAbsolutePath(); break; } } if (!checkSuffix(stkFile, STK_SUFFIX)) { throw new FormatException("STK file not found in " + parent.getAbsolutePath() + "."); } super.initFile(stkFile); } else super.initFile(id); Location ndfile = null; if (checkSuffix(id, ND_SUFFIX)) ndfile = new Location(id); else if (canLookForND && isGroupFiles()) { // an STK file was passed to initFile // let's check the parent directory for an .nd file Location stk = new Location(id).getAbsoluteFile(); String stkName = stk.getName(); String stkPrefix = stkName; if (stkPrefix.indexOf('_') >= 0) { stkPrefix = stkPrefix.substring(0, stkPrefix.indexOf('_') + 1); } Location parent = stk.getParentFile(); String[] list = parent.list(true); int matchingChars = 0; for (String f : list) { if (checkSuffix(f, ND_SUFFIX)) { String prefix = f.substring(0, f.lastIndexOf(".")); if (prefix.indexOf('_') >= 0) { prefix = prefix.substring(0, prefix.indexOf('_') + 1); } if (stkName.startsWith(prefix) || prefix.equals(stkPrefix)) { int charCount = 0; for (int i = 0; i < f.length(); i++) { if (i >= stkName.length()) { break; } if (f.charAt(i) == stkName.charAt(i)) { charCount++; } else { break; } } if (charCount > matchingChars || (charCount == matchingChars && f.charAt(charCount) == '.')) { ndfile = new Location(parent, f).getAbsoluteFile(); matchingChars = charCount; } } } } } String creationTime = null; if (ndfile != null && ndfile.exists() && (fileGroupOption(id) == FormatTools.MUST_GROUP || isGroupFiles())) { // parse key/value pairs from .nd file int zc = getSizeZ(), cc = getSizeC(), tc = getSizeT(); int nstages = 0; String z = null, c = null, t = null; final List<Boolean> hasZ = new ArrayList<Boolean>(); waveNames = new ArrayList<String>(); stageNames = new ArrayList<String>(); boolean useWaveNames = true; ndFilename = ndfile.getAbsolutePath(); String[] lines = DataTools.readFile(ndFilename).split("\n"); boolean globalDoZ = true; boolean doTimelapse = false; StringBuilder currentValue = new StringBuilder(); String key = ""; for (String line : lines) { int comma = line.indexOf(','); if (comma <= 0) { currentValue.append("\n"); currentValue.append(line); continue; } String value = currentValue.toString(); addGlobalMeta(key, value); if (key.equals("NZSteps")) z = value; else if (key.equals("DoTimelapse")) { doTimelapse = Boolean.parseBoolean(value); } else if (key.equals("NWavelengths")) c = value; else if (key.equals("NTimePoints")) t = value; else if (key.startsWith("WaveDoZ")) { hasZ.add(Boolean.parseBoolean(value)); } else if (key.startsWith("WaveName")) { String waveName = value.substring(1, value.length() - 1); if (waveName.equals("Both lasers") || waveName.startsWith("DUAL")) { bizarreMultichannelAcquisition = true; } waveNames.add(waveName); } else if (key.startsWith("Stage")) { stageNames.add(value); } else if (key.startsWith("StartTime")) { creationTime = value; } else if (key.equals("ZStepSize")) { value = value.replace(',', '.'); stepSize = Double.parseDouble(value); } else if (key.equals("NStagePositions")) { nstages = Integer.parseInt(value); } else if (key.equals("WaveInFileName")) { useWaveNames = Boolean.parseBoolean(value); } else if (key.equals("DoZSeries")) { globalDoZ = Boolean.parseBoolean(value); } key = line.substring(1, comma - 1).trim(); currentValue.delete(0, currentValue.length()); currentValue.append(line.substring(comma + 1).trim()); } if (!globalDoZ) { for (int i = 0; i < hasZ.size(); i++) { hasZ.set(i, false); } } // figure out how many files we need if (z != null) zc = Integer.parseInt(z); if (c != null) cc = Integer.parseInt(c); if (t != null) tc = Integer.parseInt(t); else if (!doTimelapse) { tc = 1; } if (cc == 0) cc = 1; if (cc == 1 && bizarreMultichannelAcquisition) { cc = 2; } if (tc == 0) { tc = 1; } int numFiles = cc * tc; if (nstages > 0) numFiles *= nstages; // determine series count int stagesCount = nstages == 0 ? 1 : nstages; int seriesCount = stagesCount; firstSeriesChannels = new boolean[cc]; Arrays.fill(firstSeriesChannels, true); boolean differentZs = false; for (int i = 0; i < cc; i++) { boolean hasZ1 = i < hasZ.size() && hasZ.get(i); boolean hasZ2 = i != 0 && (i - 1 < hasZ.size()) && hasZ.get(i - 1); if (i > 0 && hasZ1 != hasZ2 && globalDoZ) { if (!differentZs) seriesCount *= 2; differentZs = true; } } int channelsInFirstSeries = cc; if (differentZs) { channelsInFirstSeries = 0; for (int i = 0; i < cc; i++) { if ((!hasZ.get(0) && i == 0) || (hasZ.get(0) && hasZ.get(i))) { channelsInFirstSeries++; } else firstSeriesChannels[i] = false; } } stks = new String[seriesCount][]; if (seriesCount == 1) stks[0] = new String[numFiles]; else if (differentZs) { for (int i = 0; i < stagesCount; i++) { stks[i * 2] = new String[channelsInFirstSeries * tc]; stks[i * 2 + 1] = new String[(cc - channelsInFirstSeries) * tc]; } } else { for (int i = 0; i < stks.length; i++) { stks[i] = new String[numFiles / stks.length]; } } String prefix = ndfile.getPath(); prefix = prefix.substring(prefix.lastIndexOf(File.separator) + 1, prefix.lastIndexOf(".")); // build list of STK files boolean anyZ = hasZ.contains(Boolean.TRUE); int[] pt = new int[seriesCount]; for (int i = 0; i < tc; i++) { for (int s = 0; s < stagesCount; s++) { for (int j = 0; j < cc; j++) { boolean validZ = j >= hasZ.size() || hasZ.get(j); int seriesNdx = s * (seriesCount / stagesCount); if ((seriesCount != 1 && (!validZ || (hasZ.size() > 0 && !hasZ.get(0)))) || (nstages == 0 && ((!validZ && cc > 1) || seriesCount > 1))) { if (anyZ && j > 0 && seriesNdx < seriesCount - 1 && (!validZ || !hasZ.get(0))) { seriesNdx++; } } if (seriesNdx >= stks.length || seriesNdx >= pt.length || pt[seriesNdx] >= stks[seriesNdx].length) { continue; } stks[seriesNdx][pt[seriesNdx]] = prefix; if (j < waveNames.size() && waveNames.get(j) != null) { stks[seriesNdx][pt[seriesNdx]] += "_w" + (j + 1); if (useWaveNames) { String waveName = waveNames.get(j); // If there are underscores in the wavelength name, translate // them to hyphens. (See #558) waveName = waveName.replace('_', '-'); // If there are slashes (forward or backward) in the wavelength // name, translate them to hyphens. (See #5922) waveName = waveName.replace('/', '-'); waveName = waveName.replace('\\', '-'); waveName = waveName.replace('(', '-'); waveName = waveName.replace(')', '-'); stks[seriesNdx][pt[seriesNdx]] += waveName; } } if (nstages > 0) { stks[seriesNdx][pt[seriesNdx]] += "_s" + (s + 1); } if (tc > 1 || doTimelapse) { stks[seriesNdx][pt[seriesNdx]] += "_t" + (i + 1) + ".STK"; } else stks[seriesNdx][pt[seriesNdx]] += ".STK"; pt[seriesNdx]++; } } } ndfile = ndfile.getAbsoluteFile(); // check that each STK file exists for (int s = 0; s < stks.length; s++) { for (int f = 0; f < stks[s].length; f++) { Location l = new Location(ndfile.getParent(), stks[s][f]); stks[s][f] = getRealSTKFile(l); } } String file = locateFirstValidFile(); if (file == null) { throw new FormatException("Unable to locate at least one valid STK file!"); } RandomAccessInputStream s = new RandomAccessInputStream(file, 16); TiffParser tp = new TiffParser(s); IFD ifd = tp.getFirstIFD(); CoreMetadata ms0 = core.get(0); s.close(); ms0.sizeX = (int) ifd.getImageWidth(); ms0.sizeY = (int) ifd.getImageLength(); if (bizarreMultichannelAcquisition) { ms0.sizeX /= 2; } ms0.sizeZ = hasZ.size() > 0 && !hasZ.get(0) ? 1 : zc; ms0.sizeC = cc; ms0.sizeT = tc; ms0.imageCount = getSizeZ() * getSizeC() * getSizeT(); if (isRGB()) { ms0.sizeC *= 3; } ms0.dimensionOrder = "XYZCT"; if (stks != null && stks.length > 1) { // Note that core can't be replaced with newCore until the end of this block. ArrayList<CoreMetadata> newCore = new ArrayList<CoreMetadata>(); for (int i = 0; i < stks.length; i++) { CoreMetadata ms = new CoreMetadata(); newCore.add(ms); ms.sizeX = getSizeX(); ms.sizeY = getSizeY(); ms.sizeZ = getSizeZ(); ms.sizeC = getSizeC(); ms.sizeT = getSizeT(); ms.pixelType = getPixelType(); ms.imageCount = getImageCount(); ms.dimensionOrder = getDimensionOrder(); ms.rgb = isRGB(); ms.littleEndian = isLittleEndian(); ms.interleaved = isInterleaved(); ms.orderCertain = true; } if (stks.length > nstages) { for (int j = 0; j < stagesCount; j++) { int idx = j * 2 + 1; CoreMetadata midx = newCore.get(idx); CoreMetadata pmidx = newCore.get(j * 2); pmidx.sizeC = stks[j * 2].length / getSizeT(); midx.sizeC = stks[idx].length / midx.sizeT; midx.sizeZ = hasZ.size() > 1 && hasZ.get(1) && core.get(0).sizeZ == 1 ? zc : 1; pmidx.imageCount = pmidx.sizeC * pmidx.sizeT * pmidx.sizeZ; midx.imageCount = midx.sizeC * midx.sizeT * midx.sizeZ; } } core = newCore; } } if (stks == null) { stkReaders = new MetamorphReader[1][1]; stkReaders[0][0] = new MetamorphReader(); stkReaders[0][0].setCanLookForND(false); } else { stkReaders = new MetamorphReader[stks.length][]; for (int i = 0; i < stks.length; i++) { stkReaders[i] = new MetamorphReader[stks[i].length]; for (int j = 0; j < stkReaders[i].length; j++) { stkReaders[i][j] = new MetamorphReader(); stkReaders[i][j].setCanLookForND(false); if (j > 0) { stkReaders[i][j].setMetadataOptions(new DynamicMetadataOptions(MetadataLevel.MINIMUM)); } } } } // check stage labels for plate data int rows = 0; int cols = 0; Map<String, Integer> rowMap = null; Map<String, Integer> colMap = null; isHCS = true; if (null == stageLabels) { isHCS = false; } else { Set<Map.Entry<Integer, Integer>> uniqueWells = new HashSet<Map.Entry<Integer, Integer>>(); rowMap = new HashMap<String, Integer>(); colMap = new HashMap<String, Integer>(); for (String label : stageLabels) { if (null == label) { isHCS = false; break; } Map.Entry<Integer, Integer> wellCoords = getWellCoords(label); if (null == wellCoords) { isHCS = false; break; } uniqueWells.add(wellCoords); rowMap.put(label, wellCoords.getKey()); colMap.put(label, wellCoords.getValue()); } if (uniqueWells.size() != stageLabels.length) { isHCS = false; } else { rows = Collections.max(rowMap.values()); cols = Collections.max(colMap.values()); CoreMetadata c = core.get(0); core.clear(); c.sizeZ = 1; c.sizeT = 1; c.imageCount = 1; for (int s = 0; s < uniqueWells.size(); s++) { CoreMetadata toAdd = new CoreMetadata(c); if (s > 0) { toAdd.seriesMetadata.clear(); } core.add(toAdd); } seriesToIFD = true; } } List<String> timestamps = null; MetamorphHandler handler = null; MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this, true); if (isHCS) { store.setPlateID(MetadataTools.createLSID("Plate", 0), 0); store.setPlateRows(new PositiveInteger(rows), 0); store.setPlateColumns(new PositiveInteger(cols), 0); store.setPlateRowNamingConvention(NamingConvention.LETTER, 0); store.setPlateColumnNamingConvention(NamingConvention.NUMBER, 0); } int nextObjective = 0; String instrumentID = MetadataTools.createLSID("Instrument", 0); String detectorID = MetadataTools.createLSID("Detector", 0, 0); store.setInstrumentID(instrumentID, 0); store.setDetectorID(detectorID, 0, 0); store.setDetectorType(getDetectorType("Other"), 0, 0); for (int i = 0; i < getSeriesCount(); i++) { setSeries(i); // do not reparse the same XML for every well if (i == 0 || !isHCS) { handler = new MetamorphHandler(getSeriesMetadata()); } if (isHCS) { String label = stageLabels[i]; String wellID = MetadataTools.createLSID("Well", 0, i); store.setWellID(wellID, 0, i); store.setWellColumn(new NonNegativeInteger(colMap.get(label)), 0, i); store.setWellRow(new NonNegativeInteger(rowMap.get(label)), 0, i); store.setWellSampleID(MetadataTools.createLSID("WellSample", 0, i, 0), 0, i, 0); store.setWellSampleImageRef(MetadataTools.createLSID("Image", i), 0, i, 0); store.setWellSampleIndex(new NonNegativeInteger(i), 0, i, 0); } store.setImageInstrumentRef(instrumentID, i); String comment = getFirstComment(i); if (i == 0 || !isHCS) { if (comment != null && comment.startsWith("<MetaData>")) { try { XMLTools.parseXML(XMLTools.sanitizeXML(comment), handler); } catch (IOException e) { } } } if (creationTime != null) { String date = DateTools.formatDate(creationTime, SHORT_DATE_FORMAT, "."); if (date != null) { store.setImageAcquisitionDate(new Timestamp(date), 0); } } store.setImageName(makeImageName(i).trim(), i); if (getMetadataOptions().getMetadataLevel() == MetadataLevel.MINIMUM) { continue; } store.setImageDescription("", i); store.setImagingEnvironmentTemperature(new Temperature(handler.getTemperature(), UNITS.CELSIUS), i); if (sizeX == null) sizeX = handler.getPixelSizeX(); if (sizeY == null) sizeY = handler.getPixelSizeY(); Length physicalSizeX = FormatTools.getPhysicalSizeX(sizeX); Length physicalSizeY = FormatTools.getPhysicalSizeY(sizeY); if (physicalSizeX != null) { store.setPixelsPhysicalSizeX(physicalSizeX, i); } if (physicalSizeY != null) { store.setPixelsPhysicalSizeY(physicalSizeY, i); } if (zDistances != null) { stepSize = zDistances[0]; } else { List<Double> zPositions = new ArrayList<Double>(); final List<Double> uniqueZ = new ArrayList<Double>(); for (IFD ifd : ifds) { MetamorphHandler zPlaneHandler = new MetamorphHandler(); String zComment = ifd.getComment(); if (zComment != null && zComment.startsWith("<MetaData>")) { try { XMLTools.parseXML(XMLTools.sanitizeXML(zComment), zPlaneHandler); } catch (IOException e) { } } zPositions = zPlaneHandler.getZPositions(); for (Double z : zPositions) { if (!uniqueZ.contains(z)) uniqueZ.add(z); } } if (uniqueZ.size() > 1 && uniqueZ.size() == getSizeZ()) { BigDecimal lastZ = BigDecimal.valueOf(uniqueZ.get(uniqueZ.size() - 1)); BigDecimal firstZ = BigDecimal.valueOf(uniqueZ.get(0)); BigDecimal zRange = (lastZ.subtract(firstZ)).abs(); BigDecimal zSize = BigDecimal.valueOf((double) (getSizeZ() - 1)); MathContext mc = new MathContext(10, RoundingMode.HALF_UP); stepSize = zRange.divide(zSize, mc).doubleValue(); } } Length physicalSizeZ = FormatTools.getPhysicalSizeZ(stepSize); if (physicalSizeZ != null) { store.setPixelsPhysicalSizeZ(physicalSizeZ, i); } if (handler.getLensNA() != 0 || handler.getLensRI() != 0) { String objectiveID = MetadataTools.createLSID("Objective", 0, nextObjective); store.setObjectiveID(objectiveID, 0, nextObjective); if (handler.getLensNA() != 0) { store.setObjectiveLensNA(handler.getLensNA(), 0, nextObjective); } store.setObjectiveSettingsID(objectiveID, i); if (handler.getLensRI() != 0) { store.setObjectiveSettingsRefractiveIndex(handler.getLensRI(), i); } nextObjective++; } int waveIndex = 0; for (int c = 0; c < getEffectiveSizeC(); c++) { if (firstSeriesChannels == null || (stageNames != null && stageNames.size() == getSeriesCount())) { waveIndex = c; } else if (firstSeriesChannels != null) { int s = i % 2; while (firstSeriesChannels[waveIndex] == (s == 1) && waveIndex < firstSeriesChannels.length) { waveIndex++; } } if (waveNames != null && waveIndex < waveNames.size()) { store.setChannelName(waveNames.get(waveIndex).trim(), i, c); } if (handler.getBinning() != null) binning = handler.getBinning(); if (binning != null) { store.setDetectorSettingsBinning(getBinning(binning), i, c); } if (handler.getReadOutRate() != 0) { store.setDetectorSettingsReadOutRate(new Frequency(handler.getReadOutRate(), UNITS.HERTZ), i, c); } if (gain == null) { gain = handler.getGain(); } if (gain != null) { store.setDetectorSettingsGain(gain, i, c); } store.setDetectorSettingsID(detectorID, i, c); if (wave != null && waveIndex < wave.length) { Length wavelength = FormatTools.getWavelength(wave[waveIndex]); if ((int) wave[waveIndex] >= 1) { // link LightSource to Image int laserIndex = i * getEffectiveSizeC() + c; String lightSourceID = MetadataTools.createLSID("LightSource", 0, laserIndex); store.setLaserID(lightSourceID, 0, laserIndex); store.setChannelLightSourceSettingsID(lightSourceID, i, c); store.setLaserType(getLaserType("Other"), 0, laserIndex); store.setLaserLaserMedium(getLaserMedium("Other"), 0, laserIndex); if (wavelength != null) { store.setChannelLightSourceSettingsWavelength(wavelength, i, c); } } } waveIndex++; } timestamps = handler.getTimestamps(); for (int t = 0; t < timestamps.size(); t++) { String date = DateTools.convertDate(DateTools.getTime(timestamps.get(t), SHORT_DATE_FORMAT, "."), DateTools.UNIX, SHORT_DATE_FORMAT + ".SSS"); addSeriesMetaList("timestamp", date); } long startDate = 0; if (timestamps.size() > 0) { startDate = DateTools.getTime(timestamps.get(0), SHORT_DATE_FORMAT, "."); } final Length positionX = handler.getStagePositionX(); final Length positionY = handler.getStagePositionY(); final List<Double> exposureTimes = handler.getExposures(); if (exposureTimes.size() == 0) { for (int p = 0; p < getImageCount(); p++) { exposureTimes.add(exposureTime); } } else if (exposureTimes.size() == 1 && exposureTimes.size() < getEffectiveSizeC()) { for (int c = 1; c < getEffectiveSizeC(); c++) { MetamorphHandler channelHandler = new MetamorphHandler(); String channelComment = getComment(i, c); if (channelComment != null && channelComment.startsWith("<MetaData>")) { try { XMLTools.parseXML(XMLTools.sanitizeXML(channelComment), channelHandler); } catch (IOException e) { } } final List<Double> channelExpTime = channelHandler.getExposures(); exposureTimes.add(channelExpTime.get(0)); } } int lastFile = -1; IFDList lastIFDs = null; IFD lastIFD = null; double distance = zStart; TiffParser tp = null; RandomAccessInputStream stream = null; for (int p = 0; p < getImageCount(); p++) { int[] coords = getZCTCoords(p); Double deltaT = 0d; Double expTime = exposureTime; Double xmlZPosition = null; int fileIndex = getIndex(0, coords[1], coords[2]) / getSizeZ(); if (fileIndex >= 0) { String file = stks == null ? currentId : stks[i][fileIndex]; if (file != null) { if (fileIndex != lastFile) { if (stream != null) { stream.close(); } stream = new RandomAccessInputStream(file, 16); tp = new TiffParser(stream); tp.checkHeader(); IFDList f = tp.getMainIFDs(); if (f.size() > 0) { lastFile = fileIndex; lastIFDs = f; } else { file = null; stks[i][fileIndex] = null; } } } if (file != null) { lastIFD = lastIFDs.get(p % lastIFDs.size()); Object commentEntry = lastIFD.get(IFD.IMAGE_DESCRIPTION); if (commentEntry != null) { if (commentEntry instanceof String) { comment = (String) commentEntry; } else if (commentEntry instanceof TiffIFDEntry) { comment = tp.getIFDValue((TiffIFDEntry) commentEntry).toString(); } } if (comment != null) comment = comment.trim(); if (comment != null && comment.startsWith("<MetaData>")) { String[] lines = comment.split("\n"); timestamps = new ArrayList<String>(); for (String line : lines) { line = line.trim(); if (line.startsWith("<prop")) { int firstQuote = line.indexOf("\"") + 1; int lastQuote = line.lastIndexOf("\""); String key = line.substring(firstQuote, line.indexOf("\"", firstQuote)); String value = line.substring(line.lastIndexOf("\"", lastQuote - 1) + 1, lastQuote); if (key.equals("z-position")) { xmlZPosition = new Double(value); } else if (key.equals("acquisition-time-local")) { timestamps.add(value); } } } } } } int index = 0; if (timestamps.size() > 0) { if (coords[2] < timestamps.size()) index = coords[2]; String stamp = timestamps.get(index); long ms = DateTools.getTime(stamp, SHORT_DATE_FORMAT, "."); deltaT = (ms - startDate) / 1000.0; } else if (internalStamps != null && p < internalStamps.length) { long delta = internalStamps[p] - internalStamps[0]; deltaT = delta / 1000.0; if (coords[2] < exposureTimes.size()) index = coords[2]; } if (index == 0 && p > 0 && exposureTimes.size() > 0) { index = coords[1] % exposureTimes.size(); } if (index < exposureTimes.size()) { expTime = exposureTimes.get(index); } if (deltaT != null) { store.setPlaneDeltaT(new Time(deltaT, UNITS.SECOND), i, p); } if (expTime != null) { store.setPlaneExposureTime(new Time(expTime, UNITS.SECOND), i, p); } if (stageX != null && p < stageX.length) { store.setPlanePositionX(stageX[p], i, p); } else if (positionX != null) { store.setPlanePositionX(positionX, i, p); } if (stageY != null && p < stageY.length) { store.setPlanePositionY(stageY[p], i, p); } else if (positionY != null) { store.setPlanePositionY(positionY, i, p); } if (zDistances != null && p < zDistances.length) { if (p > 0) { if (zDistances[p] != 0d) distance += zDistances[p]; else distance += zDistances[0]; } final Length zPos = new Length(distance, UNITS.REFERENCEFRAME); store.setPlanePositionZ(zPos, i, p); } else if (xmlZPosition != null) { final Length zPos = new Length(xmlZPosition, UNITS.REFERENCEFRAME); store.setPlanePositionZ(zPos, i, p); } } if (stream != null) { stream.close(); } } setSeries(0); }
From source file:org.cds06.speleograph.data.Series.java
private void setMinMaxValue() { seriesMaxValue = Collections.max(items).getValue(); seriesMinValue = Collections.min(items).getValue(); }
From source file:com.android.tools.lint.checks.GradleDetector.java
/** Returns the latest build tools installed for the given major version. * We just cache this once; we don't need to be accurate in the sense that if the * user opens the SDK manager and installs a more recent version, we capture this in * the same IDE session./*from ww w . ja v a 2s . c o m*/ * * @param client the associated client * @param major the major version of build tools to look up (e.g. typically 18, 19, ...) * @return the corresponding highest known revision */ @Nullable private static PreciseRevision getLatestBuildTools(@NonNull LintClient client, int major) { if (major != sMajorBuildTools) { sMajorBuildTools = major; List<PreciseRevision> revisions = Lists.newArrayList(); if (major == 21) { revisions.add(new PreciseRevision(21, 1, 2)); } else if (major == 20) { revisions.add(new PreciseRevision(20)); } else if (major == 19) { revisions.add(new PreciseRevision(19, 1)); } else if (major == 18) { revisions.add(new PreciseRevision(18, 1, 1)); } // The above versions can go stale. // Check if a more recent one is installed. (The above are still useful for // people who haven't updated with the SDK manager recently.) File sdkHome = client.getSdkHome(); if (sdkHome != null) { File[] dirs = new File(sdkHome, FD_BUILD_TOOLS).listFiles(); if (dirs != null) { for (File dir : dirs) { String name = dir.getName(); if (!dir.isDirectory() || !Character.isDigit(name.charAt(0))) { continue; } PreciseRevision v = parseRevisionSilently(name); if (v != null && v.getMajor() == major) { revisions.add(v); } } } } if (!revisions.isEmpty()) { sLatestBuildTools = Collections.max(revisions); } } return sLatestBuildTools; }
From source file:org.linagora.linshare.core.service.impl.UploadRequestServiceImpl.java
private UploadRequest setHistory(UploadRequest req) { if (req.getUploadRequestHistory() != null && Lists.newArrayList(req.getUploadRequestHistory()) != null && !Lists.newArrayList(req.getUploadRequestHistory()).isEmpty()) { UploadRequestHistory last = Collections.max(Lists.newArrayList(req.getUploadRequestHistory())); UploadRequestHistory hist = new UploadRequestHistory(req, UploadRequestHistoryEventType.fromStatus(req.getStatus()), !last.getStatus().equals(req.getStatus())); req.getUploadRequestHistory().add(hist); } else {/*from www. jav a2 s . c o m*/ UploadRequestHistory hist = new UploadRequestHistory(req, UploadRequestHistoryEventType.fromStatus(req.getStatus()), false); req.getUploadRequestHistory().add(hist); } return req; }
From source file:io.plaidapp.ui.HomeActivity.java
/** * Highlight the new source(s) by:/* www. j a va2s .com*/ * 1. opening the drawer * 2. scrolling new source(s) into view * 3. flashing new source(s) background * 4. closing the drawer (if user hasn't interacted with it) */ private void highlightNewSources(final Source... sources) { final Runnable closeDrawerRunnable = new Runnable() { @Override public void run() { drawer.closeDrawer(GravityCompat.END); } }; drawer.addDrawerListener(new DrawerLayout.SimpleDrawerListener() { // if the user interacts with the filters while it's open then don't auto-close private final View.OnTouchListener filtersTouch = new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { drawer.removeCallbacks(closeDrawerRunnable); return false; } }; @Override public void onDrawerOpened(View drawerView) { // scroll to the new item(s) and highlight them List<Integer> filterPositions = new ArrayList<>(sources.length); for (Source source : sources) { if (source != null) { filterPositions.add(filtersAdapter.getFilterPosition(source)); } } int scrollTo = Collections.max(filterPositions); filtersList.smoothScrollToPosition(scrollTo); for (int position : filterPositions) { filtersAdapter.highlightFilter(position); } filtersList.setOnTouchListener(filtersTouch); } @Override public void onDrawerClosed(View drawerView) { // reset filtersList.setOnTouchListener(null); drawer.removeDrawerListener(this); } @Override public void onDrawerStateChanged(int newState) { // if the user interacts with the drawer manually then don't auto-close if (newState == DrawerLayout.STATE_DRAGGING) { drawer.removeCallbacks(closeDrawerRunnable); } } }); drawer.openDrawer(GravityCompat.END); drawer.postDelayed(closeDrawerRunnable, 2000L); }
From source file:uk.ac.diamond.scisoft.analysis.rcp.editors.CompareFilesEditor.java
/** * Generate new index values from current list of included files and expressions * @return New index equal to the maximum index incremented by one *///ww w .j a v a 2 s . c om private int getNewIndex() { List<Integer> idxList = new ArrayList<Integer>(); for (SelectedObject obj : fileList) { idxList.add(obj.getIndex()); } for (SelectedObject obj : expressionList) { idxList.add(obj.getIndex()); } int idxNew = Collections.max(idxList) + 1; return idxNew; }
From source file:io.hummer.util.test.GenericTestResult.java
public static GenericTestResult loadLast(String fileNamePattern, String placeHolder) throws Exception { String dir = fileNamePattern.contains("/") ? fileNamePattern.substring(0, fileNamePattern.lastIndexOf("/")) : "./"; String file = fileNamePattern.contains("/") ? fileNamePattern.substring(fileNamePattern.lastIndexOf("/") + 1) : fileNamePattern;/*from w ww . ja v a 2 s. com*/ List<Long> ids = new LinkedList<Long>(); for (String f : new File(dir).list()) { String pattern = file.replace(placeHolder, "(.+)"); Pattern p = Pattern.compile(pattern); Matcher m = p.matcher(f); if (m.matches()) { String id = m.group(1); try { ids.add(Long.parseLong(id)); } catch (Exception e) { logger.warn("Ignoring ID '" + id + "', which cannot be parsed as Long."); } } } String id = ids.isEmpty() ? "" : ("" + Collections.max(ids)); return load(fileNamePattern.replace(placeHolder, id)); }
From source file:com.thoughtworks.go.server.dao.PipelineSqlMapDao.java
private PipelineInstanceModels loadHistory(String pipelineName, List<Long> ids) { if (ids.isEmpty()) { return PipelineInstanceModels.createPipelineInstanceModels(); }/*from w w w .ja va2 s .c o m*/ Map<String, Object> args = arguments("pipelineName", pipelineName).and("from", Collections.min(ids)) .and("to", Collections.max(ids)).asMap(); PipelineInstanceModels history = PipelineInstanceModels .createPipelineInstanceModels((List<PipelineInstanceModel>) getSqlMapClientTemplate() .queryForList("getPipelineHistoryByName", args)); for (PipelineInstanceModel pipelineInstanceModel : history) { loadPipelineHistoryBuildCause(pipelineInstanceModel); } return history; }
From source file:com.vgi.mafscaling.ClosedLoop.java
private void setRanges() { double paddingX; double paddingY; currMafData.setDescription(currentDataName); corrMafData.setDescription(correctedDataName); smoothMafData.setDescription(smoothedDataName); XYPlot plot = mafChartPanel.getChartPanel().getChart().getXYPlot(); plot.getDomainAxis(0).setLabel(XAxisName); plot.getRangeAxis(0).setLabel(Y1AxisName); plot.getRangeAxis(1).setLabel(Y2AxisName); plot.getRangeAxis(0).setVisible(true); plot.getRenderer(0).setSeriesVisible(0, true); plot.getRenderer(0).setSeriesVisible(1, true); plot.getRenderer(0).setSeriesVisible(2, true); if (checkBoxDvdtData.isSelected() && checkBoxDvdtData.isEnabled()) { paddingX = runData.getMaxX() * 0.05; paddingY = runData.getMaxY() * 0.05; plot.getDomainAxis(0).setRange(runData.getMinX() - paddingX, runData.getMaxX() + paddingX); plot.getRangeAxis(1).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); plot.getRangeAxis(0).setVisible(false); plot.getRangeAxis(1).setLabel(dvdtAxisName); plot.getDomainAxis(0).setLabel(timeAxisName); plot.getRenderer(0).setSeriesVisible(0, false); plot.getRenderer(0).setSeriesVisible(1, false); plot.getRenderer(0).setSeriesVisible(2, false); } else if (checkBoxIatData.isSelected() && checkBoxIatData.isEnabled()) { paddingX = runData.getMaxX() * 0.05; paddingY = runData.getMaxY() * 0.05; plot.getDomainAxis(0).setRange(runData.getMinX() - paddingX, runData.getMaxX() + paddingX); plot.getRangeAxis(1).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); plot.getRangeAxis(0).setVisible(false); plot.getRangeAxis(1).setLabel(iatAxisName); plot.getDomainAxis(0).setLabel(timeAxisName); plot.getRenderer(0).setSeriesVisible(0, false); plot.getRenderer(0).setSeriesVisible(1, false); plot.getRenderer(0).setSeriesVisible(2, false); } else if (checkBoxTrpmData.isSelected() && checkBoxTrpmData.isEnabled()) { paddingX = runData.getMaxX() * 0.05; paddingY = runData.getMaxY() * 0.05; plot.getDomainAxis(0).setRange(runData.getMinX() - paddingX, runData.getMaxX() + paddingX); plot.getRangeAxis(1).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); plot.getRangeAxis(0).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); plot.getRangeAxis(0).setVisible(false); plot.getRangeAxis(1).setLabel(trpmAxisName); plot.getDomainAxis(0).setLabel(rpmAxisName); plot.getRenderer(0).setSeriesVisible(0, false); plot.getRenderer(0).setSeriesVisible(1, false); currMafData.setDescription("Trend"); } else if (checkBoxMnmdData.isSelected() && checkBoxMnmdData.isEnabled()) { paddingX = runData.getMaxX() * 0.05; paddingY = runData.getMaxY() * 0.05; plot.getDomainAxis(0).setRange(runData.getMinX() - paddingX, runData.getMaxX() + paddingX); plot.getRangeAxis(1).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); plot.getRangeAxis(0).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); plot.getRangeAxis(0).setLabel(mnmdAxisName); plot.getRangeAxis(1).setLabel(mnmd2AxisName); plot.getDomainAxis(0).setLabel(XAxisName); plot.getRenderer(0).setSeriesVisible(0, false); currMafData.setDescription("Mean"); corrMafData.setDescription("Mode"); } else if (checkBoxRunData.isSelected() && checkBoxRunData.isEnabled() && !checkBoxCurrentMaf.isSelected() && !checkBoxCorrectedMaf.isSelected() && !checkBoxSmoothedMaf.isSelected()) { paddingX = runData.getMaxX() * 0.05; paddingY = runData.getMaxY() * 0.05; plot.getDomainAxis(0).setRange(runData.getMinX() - paddingX, runData.getMaxX() + paddingX); plot.getRangeAxis(1).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); } else if (checkBoxSmoothing.isSelected()) { double maxY = Collections.max(Arrays.asList( new Double[] { currMafData.getMaxY(), smoothMafData.getMaxY(), corrMafData.getMaxY() })); double minY = Collections.max(Arrays.asList( new Double[] { currMafData.getMinY(), smoothMafData.getMinY(), corrMafData.getMinY() })); paddingX = smoothMafData.getMaxX() * 0.05; paddingY = maxY * 0.05;//w w w .ja va2 s.c om plot.getDomainAxis(0).setRange(smoothMafData.getMinX() - paddingX, smoothMafData.getMaxX() + paddingX); plot.getRangeAxis(0).setRange(minY - paddingY, maxY + paddingY); corrMafData.setDescription(mafCurveDataName); currMafData.setDescription(currentSlopeDataName); smoothMafData.setDescription(smoothedSlopeDataName); } else if ((checkBoxCurrentMaf.isSelected() && checkBoxCurrentMaf.isEnabled()) || (checkBoxCorrectedMaf.isSelected() && checkBoxCorrectedMaf.isEnabled()) || (checkBoxSmoothedMaf.isSelected() && checkBoxSmoothedMaf.isEnabled())) { paddingX = voltArray.get(voltArray.size() - 1) * 0.05; paddingY = gsCorrected.get(gsCorrected.size() - 1) * 0.05; plot.getDomainAxis(0).setRange(voltArray.get(0) - paddingX, voltArray.get(voltArray.size() - 1) + paddingX); plot.getRangeAxis(0).setRange(gsCorrected.get(0) - paddingY, gsCorrected.get(gsCorrected.size() - 1) + paddingY); if (checkBoxRunData.isSelected()) { paddingX = runData.getMaxX() * 0.05; paddingY = runData.getMaxY() * 0.05; plot.getRangeAxis(1).setRange(runData.getMinY() - paddingY, runData.getMaxY() + paddingY); } } else { plot.getRangeAxis(0).setAutoRange(true); plot.getDomainAxis(0).setAutoRange(true); } }
From source file:com.hortonworks.historian.nifi.reporter.HistorianDeanReporter.java
private void indexLateData(List<String> dataSourceExclusions) { String nifiControllersUrl = nifiUrl + "/nifi-api/flow/process-groups/root/controller-services"; try {//from www.j ava 2 s . c om JSONArray controllers = getJSONFromUrl(nifiControllersUrl, basicAuth) .getJSONArray("controllerServices"); getLogger().info("********** Getting List of Druid Tranquility Controllers..."); for (int i = 0; i < controllers.length(); i++) { JSONObject currentController = controllers.getJSONObject(i).getJSONObject("component"); String currentControllerType = currentController.getString("type"); if (currentControllerType .equalsIgnoreCase("com.hortonworks.nifi.controller.DruidTranquilityController")) { String lateDataPath = lateDataRoot + "/" + currentController.getJSONObject("properties") .getString("query_granularity").toLowerCase() + "/"; getLogger().info("********** Checking for Late Arriving Data at HDFS Path: " + lateDataPath); if (fs.exists(new Path(lateDataPath))) { FileStatus[] fileStatus = fs.listStatus(new Path(lateDataPath)); List<Date> dates = new ArrayList<Date>(); List<String> sourceData = new ArrayList<String>(); for (FileStatus status : fileStatus) { String[] address = status.getPath().toString().split("/"); String currentBin = address[address.length - 1]; Date binDate = new SimpleDateFormat("yyyy-MM-dd-HH-mm").parse(currentBin); sourceData.add(lateDataPath + currentBin); dates.add(binDate); } ((Collection<?>) sourceData).removeAll(dataSourceExclusions); getLogger().info("********** Detected " + sourceData.size() + " bins of relevant late data, initiating Delta Indexing task..."); if (fileStatus.length > 0 && sourceData.size() > 0) { String intervalStart = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") .format(Collections.min(dates)); String intervalEnd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") .format(Collections.max(dates)); String bins = String.join(",", sourceData); JSONArray dimensionsList = new JSONArray(Arrays.asList(currentController .getJSONObject("properties").getString("dimensions_list").split(","))); String ingestSpec = "{" + " \"type\" : \"index_hadoop\"," + " \"spec\" : {" + " \"dataSchema\" : {" + " \"dataSource\": \"" + currentController.getJSONObject("properties").getString("data_source") + "\"," + " \"parser\" : {" + " \"type\" : \"hadoopyString\"," + " \"parseSpec\" : {" + " \"format\" : \"json\"," + " \"timestampSpec\" : {" + " \"column\" : \"" + currentController.getJSONObject("properties").getString("timestamp_field") + "\"," + " \"format\" : \"auto\"" + " }," + " \"dimensionsSpec\" : {" + " \"dimensions\": " + dimensionsList + " }" + " }" + " }," + " \"metricsSpec\" : " + currentController.getJSONObject("properties") .getString("aggregators_descriptor") + "," + " \"granularitySpec\" : {" + " \"type\" : \"uniform\"," + " \"segmentGranularity\" : \"" + currentController.getJSONObject("properties").getString("segment_granularity") + "\"," + " \"queryGranularity\" : \"" + currentController.getJSONObject("properties").getString("query_granularity") + "\"," + " \"intervals\": [\"" + intervalStart + "/" + intervalEnd + "\"]" + " }" + " }," + " \"ioConfig\" : {" + " \"type\" : \"hadoop\"," + " \"inputSpec\" : {" + " \"type\" : \"multi\"," + " \"children\": [" + " {" + " \"type\" : \"dataSource\"," + " \"ingestionSpec\" : {" + " \"dataSource\": \"" + currentController.getJSONObject("properties").getString("data_source") + "\"," + " \"intervals\": [\"" + intervalStart + "/" + intervalEnd + "\"]" + " }" + " }," + " {" + " \"type\" : \"static\"," + " \"paths\": \"" + bins + "\"" + " }" + " ]" + " }" + " }," + " \"tuningConfig\" : {" + " \"type\": \"hadoop\"" + " }" + " }" + "}"; getLogger().info("********** Delta Ingestion Spec: " + ingestSpec); String indexTaskId = createDruidIndexingTask(ingestSpec); getLogger().info("********** Created Indexing Task " + indexTaskId); Map<String, Object> currentTaskMetaData = new HashMap<String, Object>(); currentTaskMetaData.put("ingestSpec", ingestSpec); currentTaskMetaData.put("sourceData", sourceData); deltaIndexTasks.put(indexTaskId, currentTaskMetaData); String currentTaskDirPath = lateDataTasksPath + "/" + indexTaskId.replace(":", "__"); getLogger().info("********** Persisting Record of Task: " + currentTaskDirPath); currentTaskDirPath = createHDFSDirectory(currentTaskDirPath); writeHDFSFile(currentTaskDirPath + "/ingestSpec", ingestSpec); writeHDFSFile(currentTaskDirPath + "/sourceData", bins); } else { getLogger().info("********** " + lateDataPath + " does not contain any data..."); } } else { getLogger().info("********** There is a Druid Controller mapped to " + lateDataPath + ", however, the path does not yet exist..."); } } } } catch (IOException e) { e.printStackTrace(); } catch (ParseException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } }