Example usage for org.apache.commons.lang3.tuple Pair getLeft

List of usage examples for org.apache.commons.lang3.tuple Pair getLeft

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getLeft.

Prototype

public abstract L getLeft();

Source Link

Document

Gets the left element from this pair.

When treated as a key-value pair, this is the key.

Usage

From source file:com.vmware.identity.openidconnect.server.LoginTest.java

private static void assertErrorResponse(String loginString, String authzHeader, String expectedError,
        String expectedAuthzResponseHeader, String expectedAuthenticateHeader, CasIdmClient idmClient)
        throws Exception {
    Pair<ModelAndView, MockHttpServletResponse> result = doRequest(loginString, authzHeader,
            null /* sessionCookie */, idmClient);
    ModelAndView modelView = result.getLeft();
    MockHttpServletResponse response = result.getRight();
    Assert.assertNull("modelView", modelView);
    Assert.assertNull("sessionCookie", response.getCookie(SESSION_COOKIE_NAME));
    Assert.assertEquals("status", 401, response.getStatus());
    Object errorResponseHeader = response.getHeader("CastleError");
    Assert.assertNotNull("errorResponseHeader", errorResponseHeader);
    Assert.assertEquals("errorMessage", expectedError, response.getErrorMessage());

    if (expectedAuthzResponseHeader != null) {
        Object authzResponseHeader = response.getHeader("CastleAuthorization");
        Assert.assertNotNull("authzResponseHeader", authzResponseHeader);
        Assert.assertEquals("expectedAuthzResponseHeader", expectedAuthzResponseHeader,
                authzResponseHeader.toString());
    }//from  ww  w .j  av a 2s  .c o  m

    if (expectedAuthenticateHeader != null) {
        Object wwwAuthenticateHeader = response.getHeader("WWW-Authenticate");
        Assert.assertNotNull("wwwAuthenticateHeader", wwwAuthenticateHeader);
        Assert.assertEquals("expectedAuthenticateHeader", expectedAuthenticateHeader,
                wwwAuthenticateHeader.toString());
    }
}

From source file:controllers.oer.Application.java

private static Status response(JsonNode json) {
    /* JSONP callback support for remote server calls with JavaScript: */
    final String[] callback = request() == null || request().queryString() == null ? null
            : request().queryString().get("callback");
    Pair<String, Lang> negotiatedContent = negotiateContent(json);
    final Status notAcceptable = status(406, "Not acceptable: unsupported content type requested\n");
    if (invalidAcceptHeader() || negotiatedContent == null)
        return notAcceptable;
    if (callback != null)
        return ok(String.format("%s(%s)", callback[0], negotiatedContent.getLeft()));
    if (negotiatedContent.getRight().equals(Lang.JSONLD))
        return ok(Json.parse(negotiatedContent.getLeft()));
    return ok(negotiatedContent.getLeft());
}

From source file:com.twitter.graphjet.bipartite.edgepool.EdgePoolConcurrentTestHelper.java

/**
 * This helper method sets up a concurrent read-write situation with a single writer and multiple
 * readers that access the same underlying edgePool, and tests for correct edge access after
 * every single edge write via latches. This helps test write flushing after every edge insertion.
 *
 * @param edgePool    is the underlying {@link com.twitter.graphjet.bipartite.edgepool.EdgePool}
 * @param edgesToAdd  is a list of edges to add in the graph
 * @return the readers that store the state that they saw so that the state can be tested. There
 *         is a reader for every edge insertion.
 *//*from  www  . ja  v a  2 s. c om*/
public static List<EdgePoolReader> runConcurrentReadWriteThreads(EdgePool edgePool,
        List<Pair<Integer, Integer>> edgesToAdd) {
    int numReaders = edgesToAdd.size(); // start reading after first edge is written
    ExecutorService executor = Executors.newFixedThreadPool(numReaders + 1); // single writer

    List<CountDownLatch> readerStartLatches = Lists.newArrayListWithCapacity(numReaders);
    List<CountDownLatch> readerDoneLatches = Lists.newArrayListWithCapacity(numReaders);
    List<EdgePoolReader> readers = Lists.newArrayListWithCapacity(numReaders);

    for (Pair<Integer, Integer> edge : edgesToAdd) {
        CountDownLatch startLatch = new CountDownLatch(1);
        CountDownLatch doneLatch = new CountDownLatch(1);
        // Each time, get edges for the node added in the previous step
        EdgePoolReader edgePoolReader = new EdgePoolReader(edgePool, startLatch, doneLatch, edge.getLeft(), 0);
        readers.add(edgePoolReader);
        executor.submit(edgePoolReader);
        readerStartLatches.add(startLatch);
        readerDoneLatches.add(doneLatch);
    }

    /**
     * The start/done latches achieve the following execution order: writer, then reader 1, then
     * writer, then reader 2, and so on. As a concrete example, suppose we have two readers and a
     * writer, then the start/done latches are used as follows:
     * Initial latches state:
     * s1 = 1, d1 = 1
     * s2 = 1, d2 = 1
     * Execution steps:
     * - writer writes edge 1, sets s1 = 0 and waits on d1
     * - reader 1 reads since s1 == 0 and sets d1 = 0
     * - writer writes edge 2, sets s2 = 0 and waits on d2
     * - reader 2 reads since s2 == 0 and sets d2 = 0
     */
    List<WriterInfo> writerInfo = Lists.newArrayListWithCapacity(edgesToAdd.size());
    for (int i = 0; i < numReaders; i++) {
        // Start writing immediately at first, but then write an edge once the reader finishes reading
        // the previous edge
        CountDownLatch startLatch = (i > 0) ? readerDoneLatches.get(i - 1) : new CountDownLatch(0);
        // Release the next reader
        CountDownLatch doneLatch = readerStartLatches.get(i);
        writerInfo.add(new WriterInfo(edgesToAdd.get(i).getLeft(), edgesToAdd.get(i).getRight(), startLatch,
                doneLatch));
    }

    executor.submit(new EdgePoolWriter(edgePool, writerInfo));

    // Wait for all the processes to finish and then confirm that they did what they worked as
    // expected
    try {
        readerDoneLatches.get(numReaders - 1).await();
    } catch (InterruptedException e) {
        throw new RuntimeException("Execution for last reader was interrupted: ", e);
    }
    return readers;
}

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

public static Map<String, Pair<XZ, Double>> performSNRAnalysisAndReturnMetlinIonsRankOrderedBySNRForWells(
        ChemicalToMapOfMetlinIonsToIntensityTimeValues ionToIntensityDataPos,
        List<ChemicalToMapOfMetlinIonsToIntensityTimeValues> ionToIntensityDataNegList,
        Set<Pair<String, Double>> searchMZs) {

    Map<String, Pair<XZ, Double>> result = new HashMap<>();

    for (Pair<String, Double> mz : searchMZs) {

        String chemicalName = mz.getLeft();

        // Compress the input intensity time graph to solve sparse data issues (multiple retention times where intensity
        // is zero). However, we make sure to preserve what the maximum intensity was in that time window in the function
        // called below.
        Pair<List<XZ>, Map<Double, Double>> positiveXZValuesAndMaxIntensity = compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                ionToIntensityDataPos.getMetlinIonsOfChemical(
                        AnalysisHelper.constructChemicalAndScanTypeName(chemicalName, ScanData.KIND.POS_SAMPLE))
                        .get(chemicalName),
                COMPRESSION_CONSTANT);//w ww.  j av a2  s .  c  om

        List<XZ> positiveIntensityTimeValues = positiveXZValuesAndMaxIntensity.getLeft();
        Map<Double, Double> positiveTimeToMaxPeak = positiveXZValuesAndMaxIntensity.getRight();

        // Next, we detect peaks within the compressed data.
        List<XZ> positiveIntensityTime = detectPeaksInIntensityTimeWaveform(positiveIntensityTimeValues,
                PEAK_DETECTION_THRESHOLD);

        // Get the compressed results for the negative control data.
        List<List<XZ>> negativeIntensityTimes = new ArrayList<>();
        for (ChemicalToMapOfMetlinIonsToIntensityTimeValues neg : ionToIntensityDataNegList) {
            List<XZ> negativeIntensityTimeValues = compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                    neg.getMetlinIonsOfChemical(AnalysisHelper.constructChemicalAndScanTypeName(chemicalName,
                            ScanData.KIND.NEG_CONTROL)).get(chemicalName),
                    COMPRESSION_CONSTANT).getLeft();

            negativeIntensityTimes.add(negativeIntensityTimeValues);
        }

        // Get the RMS of the negative intensity times
        List<XZ> rmsOfNegativeValues = rmsOfIntensityTimeGraphs(negativeIntensityTimes);

        Double maxSNR = 0.0;
        Double maxTime = 0.0;
        Double peakIntensity = 0.0;

        // For each of the peaks detected in the positive control, find the spectral intensity values from the negative
        // controls and calculate SNR based on that.
        for (XZ positivePosition : positiveIntensityTime) {

            Double time = positivePosition.getTime();

            XZ negativeControlPosition = null;
            for (XZ position : rmsOfNegativeValues) {
                if (position.getTime() > time - POSITION_TIME_WINDOW_IN_SECONDS
                        && position.getTime() < time + POSITION_TIME_WINDOW_IN_SECONDS) {
                    negativeControlPosition = position;
                    break;
                }
            }

            Double snr;
            if (negativeControlPosition == null) {
                LOGGER.error(
                        "There is no intensity value at this time range for the negative control, which is not expected");
                snr = 0.0;
            } else {
                snr = Math.pow(positivePosition.getIntensity() / negativeControlPosition.getIntensity(), 2);
            }

            if (snr > maxSNR) {
                maxSNR = snr;
                maxTime = time;
                peakIntensity = positiveTimeToMaxPeak.get(positivePosition.getTime());
            }
        }

        result.put(chemicalName, Pair.of(new XZ(maxTime, peakIntensity), maxSNR));
    }

    return result;
}

From source file:com.act.lcms.db.analysis.IonSearchAnalysis.java

public static void produceLCMSSearchPlots(File lcmsDir, String outData, String outImg,
        Pair<List<ScanData<StandardWell>>, Double> allStandardScans,
        Pair<List<ScanData<LCMSWell>>, Double> allPositiveScans,
        Pair<List<ScanData<LCMSWell>>, Double> allNegativeScans, Double fontScale, boolean useFineGrainedMZ,
        boolean makeHeatmaps, boolean useSNR) throws Exception {
    List<ScanData> allScanData = new ArrayList<ScanData>() {
        {/*  w w w. jav a  2 s.  c  o m*/
            addAll(allStandardScans.getLeft());
            addAll(allPositiveScans.getLeft());
            addAll(allNegativeScans.getLeft());
        }
    };
    // Get the global maximum intensity across all scans.
    Double maxIntensity = Math.max(allStandardScans.getRight(),
            Math.max(allPositiveScans.getRight(), allNegativeScans.getRight()));
    System.out.format("Processing LCMS scans for graphing:\n");
    for (ScanData scanData : allScanData) {
        System.out.format("  %s\n", scanData.toString());
    }

    String fmt = "pdf";
    System.err.format("Writing combined scan data to %s and graphs to %s\n", outData, outImg);

    // Generate the data file and graphs.
    try (FileOutputStream fos = new FileOutputStream(outData)) {
        // Write all the scan data out to a single data file.
        List<String> graphLabels = new ArrayList<>();
        for (ScanData scanData : allScanData) {
            graphLabels.addAll(
                    AnalysisHelper.writeScanData(fos, lcmsDir, maxIntensity, scanData, makeHeatmaps, true));
        }

        Gnuplotter plotter = fontScale == null ? new Gnuplotter() : new Gnuplotter(fontScale);
        if (makeHeatmaps) {
            plotter.plotHeatmap(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]),
                    maxIntensity, fmt);
        } else {
            plotter.plot2D(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]), "time",
                    maxIntensity, "intensity", fmt);
        }
    }
}

From source file:com.act.lcms.db.analysis.AnalysisHelper.java

private static <A, B> Pair<List<A>, List<B>> split(List<Pair<A, B>> lpairs) {
    List<A> a = new ArrayList<>();
    List<B> b = new ArrayList<>();
    for (Pair<A, B> p : lpairs) {
        a.add(p.getLeft());
        b.add(p.getRight());/*from w w  w  . j a  v a 2s  . co  m*/
    }
    return Pair.of(a, b);
}

From source file:com.samsung.sjs.ExplanationsTest.java

private static String explainErrors(JSEnvironment env, String sourceCode) {
    AstRoot root = new Parser().parse(sourceCode, "", 1);
    SatSolver sat = new Sat4J();
    SJSTypeTheory theory = new SJSTypeTheory(env, null, root);
    List<Integer> hard = new ArrayList<>();
    List<Integer> soft = new ArrayList<>();
    List<ITypeConstraint> constraints = theory.getConstraints();
    for (int i = 0; i < constraints.size(); ++i) {
        (theory.hackyGenerator().hasExplanation(constraints.get(i)) ? soft : hard).add(i);
    }//  w  w  w  . ja v  a2s . com
    Pair<TypeAssignment, Collection<Integer>> result = TheorySolver.solve(theory, new SatFixingSetFinder<>(sat),
            hard, soft);
    ConstraintGenerator g = theory.hackyGenerator();
    StringBuilder buf = new StringBuilder();
    for (int broken : result.getRight()) {
        ITypeConstraint c = theory.hackyConstraintAccess().get(broken);
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        g.explainFailure(c, result.getLeft()).prettyprint(new PrintStream(stream));
        buf.append(stream.toString());
    }
    return buf.toString();
}

From source file:com.twitter.graphjet.bipartite.GraphConcurrentTestHelper.java

/**
 * This helper method tests up a concurrent read-write situation with a single writer and multiple
 * readers that access the same underlying bipartiteGraph, and tests for correct edge access after
 * every single edge write via latches. This helps test write flushing after every edge insertion.
 *
 * @param graph           is the underlying
 *                        {@link BipartiteGraph}
 * @param edgesToAdd      is a list of edges to add in the graph
 *///from  w  ww .  j av a2s  .  co  m
public static <T extends BipartiteGraph & DynamicBipartiteGraph> void testConcurrentReadWriteThreads(T graph,
        List<Pair<Long, Long>> edgesToAdd) {
    int numReaders = edgesToAdd.size(); // start reading after first edge is written
    ExecutorService executor = Executors.newFixedThreadPool(2 * (2 * numReaders) + 1);

    List<CountDownLatch> readerStartLatches = Lists.newArrayListWithCapacity(numReaders);
    List<CountDownLatch> readerDoneLatches = Lists.newArrayListWithCapacity(numReaders);
    List<BipartiteGraphReader> leftReaders = Lists.newArrayListWithCapacity(numReaders);
    List<BipartiteGraphReader> rightReaders = Lists.newArrayListWithCapacity(numReaders);

    for (Pair<Long, Long> edge : edgesToAdd) {
        CountDownLatch startLatch = new CountDownLatch(1);
        CountDownLatch doneLatch = new CountDownLatch(2);
        // Each time, get edges for the node added in the previous step
        BipartiteGraphReader leftReader = new BipartiteGraphReader(graph, startLatch, doneLatch, edge.getLeft(),
                true, 0);
        BipartiteGraphReader rightReader = new BipartiteGraphReader(graph, startLatch, doneLatch,
                edge.getRight(), false, 0);
        leftReaders.add(leftReader);
        executor.submit(leftReader);
        rightReaders.add(rightReader);
        executor.submit(rightReader);
        readerStartLatches.add(startLatch);
        readerDoneLatches.add(doneLatch);
    }

    /**
     * The start/done latches achieve the following execution order: writer, then reader 1, then
     * writer, then reader 2, and so on. As a concrete example, suppose we have two readers and a
     * writer, then the start/done latches are used as follows:
     * Initial latches state:
     * s1 = 1, d1 = 1
     * s2 = 1, d2 = 1
     * Execution steps:
     * - writer writes edge 1, sets s1 = 0 and waits on d1
     * - reader 1 reads since s1 == 0 and sets d1 = 0
     * - writer writes edge 2, sets s2 = 0 and waits on d2
     * - reader 2 reads since s2 == 0 and sets d2 = 0
     *
     * One detail to note is that here we have two readers (one for left, one for right) so the done
     * latches are initialized to value 2 so that both readers complete the read before moving on.
     */
    List<WriterInfo> writerInfo = Lists.newArrayListWithCapacity(numReaders);
    for (int i = 0; i < numReaders; i++) {
        // Start writing immediately at first, but then write an edge once the reader finishes reading
        // the previous edge
        CountDownLatch startLatch = (i > 0) ? readerDoneLatches.get(i - 1) : new CountDownLatch(0);
        // Release the next reader
        CountDownLatch doneLatch = readerStartLatches.get(i);
        writerInfo.add(new WriterInfo(edgesToAdd.get(i).getLeft(), edgesToAdd.get(i).getRight(), startLatch,
                doneLatch));
    }

    executor.submit(new BipartiteGraphWriter(graph, writerInfo));

    // Wait for all the processes to finish and then confirm that they did what they worked as
    // expected
    try {
        readerDoneLatches.get(numReaders - 1).await();
    } catch (InterruptedException e) {
        throw new RuntimeException("Execution for last reader was interrupted: ", e);
    }

    // Now we test the readers
    Long2ObjectMap<LongArrayList> leftSideGraph = new Long2ObjectOpenHashMap<LongArrayList>(numReaders);
    Long2ObjectMap<LongArrayList> rightSideGraph = new Long2ObjectOpenHashMap<LongArrayList>(numReaders);
    for (int i = 0; i < numReaders; i++) {
        long leftNode = edgesToAdd.get(i).getLeft();
        long rightNode = edgesToAdd.get(i).getRight();
        // Add edges to the graph
        if (!leftSideGraph.containsKey(leftNode)) {
            leftSideGraph.put(leftNode, new LongArrayList(new long[] { rightNode }));
        } else {
            leftSideGraph.get(leftNode).add(rightNode);
        }
        if (!rightSideGraph.containsKey(rightNode)) {
            rightSideGraph.put(rightNode, new LongArrayList(new long[] { leftNode }));
        } else {
            rightSideGraph.get(rightNode).add(leftNode);
        }
        // Check the read info
        assertEquals(leftSideGraph.get(leftNode).size(), leftReaders.get(i).getQueryNodeDegree());
        assertEquals(leftSideGraph.get(leftNode), leftReaders.get(i).getQueryNodeEdges());
        assertEquals(rightSideGraph.get(rightNode).size(), rightReaders.get(i).getQueryNodeDegree());
        assertEquals(rightSideGraph.get(rightNode), rightReaders.get(i).getQueryNodeEdges());
    }
}

From source file:com.stratelia.webactiv.util.DBUtil.java

/**
 * Update query executor./*ww  w  . j  a v  a  2 s  . co  m*/
 * @param con
 * @param updateQueries
 * @throws SQLException
 */
public static <O> long executeUpdate(Connection con, List<Pair<String, List<O>>> updateQueries)
        throws SQLException {
    long nbUpdate = 0;
    for (Pair<String, List<O>> updateQuery : updateQueries) {
        nbUpdate += executeUpdate(con, updateQuery.getLeft(), updateQuery.getRight());
    }
    return nbUpdate;
}

From source file:com.act.lcms.db.model.ScanFile.java

public static List<Pair<Integer, DB.OPERATION_PERFORMED>> insertOrUpdateScanFilesInDirectory(DB db,
        File directory) throws SQLException, IOException {
    if (directory == null || !directory.isDirectory()) {
        throw new RuntimeException(String.format("Scan files directory at %s is not a directory",
                directory == null ? null : directory.getAbsolutePath()));
    }//from  w  w  w .  j a v  a  2 s .c  o m

    List<Pair<Integer, DB.OPERATION_PERFORMED>> results = new ArrayList<>();

    File[] contentsArr = directory.listFiles();

    if (contentsArr == null || contentsArr.length == 0) {
        System.err.format("WARNING: no files found in directory %s", directory.getAbsolutePath());
        return null;
    }
    List<File> contents = Arrays.asList(contentsArr);
    Collections.sort(contents, new Comparator<File>() {
        @Override
        public int compare(File o1, File o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });

    for (File f : contents) {
        for (Pair<Pattern, Map<SCAN_NAME_COMPONENT, Integer>> scan : NAME_EXTRACTION_PATTERNS) {
            Pattern p = scan.getLeft();
            Map<SCAN_NAME_COMPONENT, Integer> groupMap = scan.getRight();
            Matcher m = p.matcher(f.getName());
            if (m.matches()) {
                if (groupMap.containsKey(SCAN_NAME_COMPONENT.SCAN_PART)) {
                    String scanPartStr = m.group(groupMap.get(SCAN_NAME_COMPONENT.SCAN_PART));
                    if (scanPartStr != null && !scanPartStr.isEmpty()) {
                        Integer scanPart = Integer.parseInt(scanPartStr);
                        if (!LCMS_MAIN_SCAN_PART.equals(scanPart)) {
                            break;
                        }
                    }
                }

                Plate plate;
                Integer plateId = null;

                if (f.getName().startsWith("STD_MEOH")) {
                    // The toffeStandard plate doesn't follow the usual naming convention, so we fake it here.
                    plate = Plate.getPlateByBarcode(db, "toffeeStandards");
                } else if (groupMap.containsKey(SCAN_NAME_COMPONENT.PLATE_BARCODE)) {
                    plate = Plate.getPlateByBarcode(db,
                            m.group((groupMap.get(SCAN_NAME_COMPONENT.PLATE_BARCODE))));
                } else if (groupMap.containsKey(SCAN_NAME_COMPONENT.PLATE_NAME)) {
                    plate = Plate.getPlateByName(db, m.group((groupMap.get(SCAN_NAME_COMPONENT.PLATE_NAME))));
                } else {
                    // The occurrence of this exception represents a developer oversight.
                    throw new RuntimeException(
                            String.format("No plate identifier available for pattern %s", p));
                }
                if (plate == null) {
                    System.err.format("WARNING: unable to find plate for scan file %s\n", f.getName());
                } else {
                    plateId = plate.getId();
                }

                Integer plateRow = null, plateColumn = null;
                if (groupMap.containsKey(SCAN_NAME_COMPONENT.ROW)) {
                    String plateRowStr = m.group(groupMap.get(SCAN_NAME_COMPONENT.ROW));
                    if (plateRowStr != null && !plateRowStr.isEmpty()) {
                        if (plateRowStr.length() > 1) {
                            // TODO: handle larger plates?
                            throw new RuntimeException(
                                    String.format("Unable to handle multi-character plate row %s for scan %s",
                                            plateRowStr, f.getName()));
                        }
                        plateRow = plateRowStr.charAt(0) - 'A';
                    }
                }

                if (groupMap.containsKey(SCAN_NAME_COMPONENT.COLUMN)) {
                    String plateColumnStr = m.group(groupMap.get(SCAN_NAME_COMPONENT.COLUMN));
                    if (plateColumnStr != null && !plateColumnStr.isEmpty()) {
                        plateColumn = Integer.parseInt(plateColumnStr) - 1; // Wells are one-indexed.
                    }
                }

                SCAN_MODE scanMode = SCAN_MODE.POS; // Assume positive scans by default.
                if (groupMap.containsKey(SCAN_NAME_COMPONENT.MODE)) {
                    String scanModeStr = m.group(groupMap.get(SCAN_NAME_COMPONENT.MODE));
                    if (scanModeStr != null && !scanModeStr.isEmpty()) {
                        scanMode = SCAN_MODE.valueOf(scanModeStr.toUpperCase());
                    }
                }

                SCAN_FILE_TYPE fileType = null;
                if (groupMap.containsKey(SCAN_NAME_COMPONENT.FILE_TYPE)) {
                    String fileTypeStr = m.group(groupMap.get(SCAN_NAME_COMPONENT.FILE_TYPE));
                    if (fileTypeStr != null && !fileTypeStr.isEmpty()) {
                        fileType = SCAN_FILE_TYPE.valueOf(fileTypeStr.toUpperCase());
                    }
                }

                ScanFile scanFile = getScanFileByFilename(db, f.getName());
                DB.OPERATION_PERFORMED op;
                if (scanFile == null) {
                    scanFile = insertScanFile(db, f.getName(), scanMode, fileType, plateId, plateRow,
                            plateColumn);
                    op = DB.OPERATION_PERFORMED.CREATE;
                } else {
                    scanFile.setFilename(f.getName());
                    scanFile.setMode(scanMode);
                    scanFile.setFileType(fileType);
                    scanFile.setPlateId(plateId);
                    scanFile.setPlateRow(plateRow);
                    scanFile.setPlateColumn(plateColumn);
                    updateScanFile(db, scanFile);
                    op = DB.OPERATION_PERFORMED.UPDATE;
                }

                // Should only be null if we can't insert the scanFile into the DB for some reason.
                if (scanFile == null) {
                    results.add(Pair.of((Integer) null, op));
                } else {
                    results.add(Pair.of(scanFile.getId(), op));
                }
                break;
            }
        }
    }
    return results;
}