Example usage for org.apache.commons.lang3.tuple Pair getLeft

List of usage examples for org.apache.commons.lang3.tuple Pair getLeft

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getLeft.

Prototype

public abstract L getLeft();

Source Link

Document

Gets the left element from this pair.

When treated as a key-value pair, this is the key.

Usage

From source file:net.community.chest.gitcloud.facade.AbstractEnvironmentInitializer.java

protected void contextInitialized(ServletContext context) {
    PlaceholderResolver contextResolver = ServletUtils.toPlaceholderResolver(context);
    Pair<File, Boolean> result = ConfigUtils.resolveGitcloudBase(new AggregatedExtendedPlaceholderResolver(
            contextResolver, ExtendedPlaceholderResolverUtils.SYSPROPS_RESOLVER,
            ExtendedPlaceholderResolverUtils.ENVIRON_RESOLVER));
    File rootDir = result.getLeft();
    Boolean baseExists = result.getRight();
    if (!baseExists.booleanValue()) {
        System.setProperty(ConfigUtils.GITCLOUD_BASE_PROP, rootDir.getAbsolutePath());
        logger.info("contextInitialized(" + context.getContextPath() + ") - added "
                + ConfigUtils.GITCLOUD_BASE_PROP + ": " + ExtendedFileUtils.toString(rootDir));
    } else {/*from  ww  w  .j  a  va 2s  .  c o  m*/
        logger.info("contextInitialized(" + context.getContextPath() + ") using "
                + ConfigUtils.GITCLOUD_BASE_PROP + ": " + ExtendedFileUtils.toString(rootDir));
    }

    extractConfigFiles(new File(rootDir, ConfigUtils.CONF_DIR_NAME));
}

From source file:com.formkiq.core.util.StringsTest.java

/**
 * Test Base64 Image to Bytes and back./*from  w w  w.  jav  a  2s .c  om*/
 * @throws IOException IOException
 */
@Test
public void testBase64StringToImg01() throws IOException {
    // given
    String sig = getResourceAsString("/signature.txt");

    // when
    Pair<byte[], String> result = Strings.base64StringToImg(sig);

    // then
    final int len = 4541;
    assertEquals(len, result.getLeft().length);
    assertEquals("png", result.getRight());

    // when
    String imgsrc = Strings.bytesToImg(result.getLeft(), result.getRight());

    // then
    String expect = "data:image/png;base64," + "iVBORw0KGgoAAAANSUhEUgAAASwAAACWCAYAAABkW7XSAAA";
    assertTrue(sig.startsWith(expect));
    assertTrue(imgsrc.startsWith(expect));
}

From source file:com.yahoo.elide.Elide.java

protected ElideResponse buildResponse(Pair<Integer, JsonNode> response) {
    try {//from  www  .  ja v a2s  .co  m
        JsonNode responseNode = response.getRight();
        Integer responseCode = response.getLeft();
        String body = mapper.writeJsonApiDocument(responseNode);
        return new ElideResponse(responseCode, body);
    } catch (JsonProcessingException e) {
        return new ElideResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR, e.toString());
    }
}

From source file:com.sludev.commons.vfs2.provider.s3.SS3FileObject.java

/**
 * Callback for checking the type of the current FileObject.  Typically can
 * be of type...//from w  w w . j  a  va  2s . c o  m
 * FILE for regular remote files
 * FOLDER for regular remote containers
 * IMAGINARY for a path that does not exist remotely.
 * 
 * @return
 * @throws Exception 
 */
@Override
protected FileType doGetType() throws Exception {
    FileType res;

    Pair<String, String> path = getContainerAndPath();

    if (objectExists(path.getLeft(), path.getRight())) {
        res = FileType.FILE;
    } else {
        // Blob Service does not have folders.  Just files with path separators in
        // their names.

        // Here's the trick for folders.
        //
        // Do a listing on that prefix.  If it returns anything, after not
        // existing, then it's a folder.
        String prefix = path.getRight();
        if (prefix.endsWith("/") == false) {
            // We need folders ( prefixes ) to end with a slash
            prefix += "/";
        }

        ObjectListing blobs = null;
        if (prefix.equals("/")) {
            // Special root path case. List the root blobs with no prefix
            blobs = fileSystem.getClient().listObjects(path.getLeft());
        } else {
            blobs = fileSystem.getClient().listObjects(path.getLeft(), prefix);
        }

        if (blobs.getObjectSummaries().isEmpty()) {
            res = FileType.IMAGINARY;
        } else {
            res = FileType.FOLDER;
        }
    }

    return res;
}

From source file:functionaltests.job.scheduling.TestJobSchedulingStarvationAndPriority.java

public void testJobPriority(boolean addNewNodes) throws Exception {

    Scheduler scheduler = schedulerHelper.getSchedulerInterface();

    int nbNewNodes = 15;

    int nbRunsHigh = RMTHelper.DEFAULT_NODES_NUMBER * 2 + (addNewNodes ? nbNewNodes * 3 : 0);
    int nbRunsLow = RMTHelper.DEFAULT_NODES_NUMBER * 2 + (addNewNodes ? nbNewNodes * 2 : 0);

    String jobDescriptorPath = new File(jobDescriptor.toURI()).getAbsolutePath();

    Job jobHigh = JobFactory.getFactory().createJob(jobDescriptorPath,
            ImmutableMap.of("RUNS", "" + nbRunsHigh));
    jobHigh.setPriority(JobPriority.HIGH);
    JobId jobIdHigh = schedulerHelper.submitJob(jobHigh);
    schedulerHelper.waitForEventTaskRunning(jobIdHigh, REPLICATE_TASK_NAME);
    Job jobLow = JobFactory.getFactory().createJob(jobDescriptorPath, ImmutableMap.of("RUNS", "" + nbRunsLow));
    jobLow.setPriority(JobPriority.LOW);
    JobId jobIdLow = schedulerHelper.submitJob(jobLow);

    if (addNewNodes) {
        schedulerHelper.addExtraNodes(nbNewNodes);
    }//from   ww  w  .  j  ava2 s  . c  o m

    schedulerHelper.waitForEventJobFinished(jobIdHigh);
    schedulerHelper.waitForEventJobFinished(jobIdLow);

    Pair<Long, Long> minMaxHigh = computeMinMaxStartingTime(scheduler, jobIdHigh, REPLICATE_TASK_NAME_FILTER);
    Pair<Long, Long> minMaxLow = computeMinMaxStartingTime(scheduler, jobIdLow, REPLICATE_TASK_NAME_FILTER);

    Assert.assertTrue("Low Priority tasks min start time : " + minMaxLow.getLeft()
            + " should be greater than the max start time of high priority jobs : " + minMaxHigh.getRight(),
            minMaxLow.getLeft() > minMaxHigh.getRight());
}

From source file:com.sludev.commons.vfs2.provider.s3.SS3FileObject.java

/**
 * Upload a local file to Amazon S3./*from www . j  av a  2s. c om*/
 * 
 * @param f File object from the local file-system to be uploaded to Amazon S3
 */
public void putObject(File f) {
    Pair<String, String> path = getContainerAndPath();

    fileSystem.getClient().putObject(new PutObjectRequest(path.getLeft(), path.getRight(), f));
}

From source file:com.act.lcms.db.model.LCMSWell.java

public List<LCMSWell> insertFromPlateComposition(DB db, PlateCompositionParser parser, Plate p)
        throws SQLException, IOException {
    Map<Pair<String, String>, String> msids = parser.getCompositionTables().get("msid");
    List<Pair<String, String>> sortedCoordinates = new ArrayList<>(msids.keySet());
    Collections.sort(sortedCoordinates, new Comparator<Pair<String, String>>() {
        // TODO: parse the values of these pairs as we read them so we don't need this silly comparator.
        @Override//from   ww  w . j a  va2 s .c  o m
        public int compare(Pair<String, String> o1, Pair<String, String> o2) {
            if (o1.getKey().equals(o2.getKey())) {
                return Integer.valueOf(Integer.parseInt(o1.getValue()))
                        .compareTo(Integer.parseInt(o2.getValue()));
            }
            return o1.getKey().compareTo(o2.getKey());
        }
    });

    List<LCMSWell> results = new ArrayList<>();
    for (Pair<String, String> coords : sortedCoordinates) {
        String msid = msids.get(coords);
        if (msid == null || msid.isEmpty()) {
            continue;
        }
        String composition = parser.getCompositionTables().get("composition").get(coords);
        String chemical = parser.getCompositionTables().get("chemical").get(coords);
        String note = null;
        if (parser.getCompositionTables().get("note") != null) {
            note = parser.getCompositionTables().get("note").get(coords);
        }
        Pair<Integer, Integer> index = parser.getCoordinatesToIndices().get(coords);
        LCMSWell s = INSTANCE.insert(db, p.getId(), index.getLeft(), index.getRight(), msid, composition,
                chemical, note);

        results.add(s);
    }

    return results;
}

From source file:com.act.lcms.plotter.WriteAndPlotMS1Results.java

public void plotFeedings(List<Pair<Double, MS1ScanForWellAndMassCharge>> feedings, String ion, String outPrefix,
        String fmt, String gnuplotFile) throws IOException {
    String outSpectraImg = outPrefix + "." + fmt;
    String outSpectraData = outPrefix + ".data";
    String outFeedingImg = outPrefix + ".fed." + fmt;
    String outFeedingData = outPrefix + ".fed.data";
    String feedingGnuplotFile = gnuplotFile + ".fed";

    boolean useMaxPeak = true;

    // maps that hold the values for across different concentrations
    List<Pair<Double, List<XZ>>> concSpectra = new ArrayList<>();
    List<Pair<Double, Double>> concAreaUnderSpectra = new ArrayList<>();
    List<Pair<Double, Double>> concMaxPeak = new ArrayList<>();

    // we will compute a running max of the intensity in the plot, and integral
    Double maxIntensity = 0.0d, maxAreaUnder = 0.0d;

    // now compute the maps { conc -> spectra } and { conc -> area under spectra }
    for (Pair<Double, MS1ScanForWellAndMassCharge> feedExpr : feedings) {
        Double concentration = feedExpr.getLeft();
        MS1ScanForWellAndMassCharge scan = feedExpr.getRight();

        // get the ms1 spectra for the selected ion, and the max for it as well
        List<XZ> ms1 = scan.getIonsToSpectra().get(ion);
        Double maxInThisSpectra = scan.getMaxIntensityForIon(ion);
        Double areaUnderSpectra = scan.getIntegralForIon(ion);

        // update the max intensity over all different spectra
        maxIntensity = Math.max(maxIntensity, maxInThisSpectra);
        maxAreaUnder = Math.max(maxAreaUnder, areaUnderSpectra);

        // install this concentration and spectra in map, to be dumped to file later
        concSpectra.add(Pair.of(concentration, ms1));
        concAreaUnderSpectra.add(Pair.of(concentration, areaUnderSpectra));
        concMaxPeak.add(Pair.of(concentration, maxInThisSpectra));
    }//from  w w w . j ava2  s .c o  m

    // Write data output to outfiles
    List<String> plotID = null;
    try (FileOutputStream outSpectra = new FileOutputStream(outSpectraData)) {
        plotID = writeFeedMS1Values(concSpectra, maxIntensity, outSpectra);
    }

    try (FileOutputStream outFeeding = new FileOutputStream(outFeedingData)) {
        writeFeedMS1Values(useMaxPeak ? concMaxPeak : concAreaUnderSpectra, outFeeding);
    }

    // render outDATA to outPDF using gnuplot
    Gnuplotter gp = new Gnuplotter();
    String[] plotNames = plotID.toArray(new String[plotID.size()]);
    gp.plotOverlayed2D(outSpectraData, outSpectraImg, plotNames, "time", maxIntensity, "intensity", fmt,
            gnuplotFile);
    gp.plot2D(outFeedingData, outFeedingImg, new String[] { "feeding ramp" }, "concentration",
            useMaxPeak ? maxIntensity : maxAreaUnder, "integrated area under spectra", fmt, null, null, null,
            feedingGnuplotFile);
}

From source file:com.splicemachine.db.impl.ast.CorrelatedPushDown.java

public boolean pushdownPredWithColumn(ResultSetNode rsn, Predicate pred, ValueNode colRef)
        throws StandardException {
    try {//from  w  ww .ja va2  s  .c  o  m
        ResultColumn rc = RSUtils.refToRC.apply(colRef);
        List<Pair<Integer, Integer>> chain = ColumnUtils.rsnChain(rc);
        Pair<Integer, Integer> lastLink = chain.get(chain.size() - 1);
        List<ResultSetNode> subTree = RSUtils.getSelfAndDescendants(rsn);
        Map<Integer, ResultSetNode> nodeMap = zipMap(Iterables.transform(subTree, RSUtils.rsNum), subTree);
        ResultSetNode targetRSN = nodeMap.get(lastLink.getLeft());
        rc.setResultSetNumber(lastLink.getLeft());
        rc.setVirtualColumnId(lastLink.getRight());
        ((Optimizable) targetRSN).pushOptPredicate(pred);
    } catch (StandardException e) {
        LOG.warn("Exception pushing down topmost subquery predicate:", e);
        return false;
    }
    return true;
}

From source file:com.epam.catgenome.manager.TrackHelper.java

/**
 * Sets fixed bounds for track//from w ww.j ava 2 s  .  co  m
 * @param track Track to fix bounds
 * @param bounds Pair of bounds to set
 */
public void setBounds(final Track track, final Pair<Integer, Integer> bounds) {
    if (bounds == null) {
        return;
    }

    if (track.getStartIndex() < bounds.getLeft()) {
        track.setStartIndex(bounds.getLeft());
    }
    if (track.getEndIndex() > bounds.getRight()) {
        track.setEndIndex(bounds.getRight());
    }
}