Example usage for org.apache.commons.lang3.tuple Pair getValue

List of usage examples for org.apache.commons.lang3.tuple Pair getValue

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getValue.

Prototype

@Override
public R getValue() 

Source Link

Document

Gets the value from this pair.

This method implements the Map.Entry interface returning the right element as the value.

Usage

From source file:org.lenskit.cli.util.ScriptEnvironment.java

public ScriptEnvironment(Namespace ns) {
    properties = new Properties();
    List<Pair<String, String>> props = ns.getList("properties");
    if (props != null) {
        for (Pair<String, String> arg : props) {
            properties.setProperty(arg.getKey(), arg.getValue());
        }/*from  w w w .ja  v a  2 s.  c om*/
    }

    List<String> cp = ns.getList("classpath");
    if (cp != null) {
        classpath = cp;
    } else {
        classpath = Collections.emptyList();
    }
}

From source file:org.lightjason.agentspeak.agent.TestCAgent.java

/**
 * asl parsing test/* www .  j a  va 2s  .  c o m*/
 */
@Test
public final void testASLManual() {
    ASL.forEach((i, j) -> {
        final Pair<Boolean, String> l_result = testAgentManual(i, j);
        assertTrue(l_result.getRight(), l_result.getLeft());
        System.out.println(l_result.getValue());
    });
}

From source file:org.matsim.contrib.drt.optimizer.rebalancing.mincostflow.TransportProblem.java

public List<Triple<P, C, Integer>> solve(List<Pair<P, Integer>> supply, List<Pair<C, Integer>> demand) {
    final int P = supply.size();
    final int C = demand.size();
    final int N = P + C + 2;

    // N nodes, which indices are:
    // 0 - source
    // 1..P - producers 1..P
    // P+1..P+C - consumers 1..C
    // P+C+1 - sink

    @SuppressWarnings("unchecked")
    List<Edge>[] graph = Stream.generate(ArrayList::new).limit(N).toArray(List[]::new);

    // source -> producers
    int totalSupply = 0;
    for (int i = 0; i < P; i++) {
        int supplyValue = supply.get(i).getValue();
        MinCostFlow.addEdge(graph, 0, 1 + i, supplyValue, 0);
        totalSupply += supplyValue;/*from www .  ja  v  a2s .co  m*/
    }

    // producers --> consumers
    for (int i = 0; i < P; i++) {
        Pair<P, Integer> producer = supply.get(i);
        for (int j = 0; j < C; j++) {
            Pair<C, Integer> consumer = demand.get(j);
            int capacity = Math.min(producer.getValue(), consumer.getValue());
            int cost = costFunction.applyAsInt(producer.getKey(), consumer.getKey());
            MinCostFlow.addEdge(graph, 1 + i, 1 + P + j, capacity, cost);
        }
    }

    // consumers -> sink
    int totalDemand = 0;
    for (int j = 0; j < C; j++) {
        int demandValue = demand.get(j).getValue();
        MinCostFlow.addEdge(graph, 1 + P + j, N - 1, demandValue, 0);
        totalDemand += demandValue;
    }

    // solve min cost flow problem
    int[] result = MinCostFlow.minCostFlow(graph, 0, N - 1, Math.min(totalSupply, totalDemand), false);
    if (result[0] == 0) {
        return Collections.emptyList();
    }

    // extract flows
    List<Triple<P, C, Integer>> flows = new ArrayList<>();
    for (int i = 0; i < P; i++) {
        P from = supply.get(i).getKey();
        for (Edge e : graph[1 + i]) {
            int flow = e.getFlow();
            if (flow > 0) {
                int j = e.getTo() - (1 + P);
                C to = demand.get(j).getKey();
                flows.add(Triple.of(from, to, flow));
            }
        }
    }
    return flows;
}

From source file:org.metaborg.intellij.idea.sdks.MetaborgSdkType.java

/**
 * Adds the Metaborg SDK paths./*from   w w  w  .ja va 2  s .co  m*/
 *
 * @param sdkModificator The SDK modificator.
 * @param sdkHomePath The SDK home path.
 */
private void addMetaborgSdkPaths(final SdkModificator sdkModificator, @Nullable final String sdkHomePath) {
    if (sdkHomePath == null) {
        // Anything else we need to do?
        return;
    }

    // The added SDK files must be in the jar:// file system.
    // Adding normal file:// files works when creating the SDK,
    // but they are lost when the SDK is reloaded (e.g. after restart).
    for (final Pair<String, VirtualFile> pair : getSdkJars(sdkHomePath)) {
        String filename = pair.getKey();
        @Nullable
        VirtualFile file = pair.getValue();
        if (file == null) {
            this.logger.error("SDK file not found: {}", filename);
        } else {
            if (!file.exists()) {
                this.logger.warn("SDK file may not exist: {}", filename);
            }
            sdkModificator.addRoot(file, OrderRootType.CLASSES);
        }
    }
}

From source file:org.omg.bpmn.miwg.util.xml.diff.AbstractXmlDifferenceListener.java

private void parseAttributes(List<String> attrs, Map<Node, Set<String>> map) {
    List<Node> tmpNodeList;
    for (String attrXpath : attrs) {
        // split attribute name and XPath for node
        Pair<String, String> nodeAndAttribute = XPathUtil.splitXPathIntoNodeAndAttribute(attrXpath);
        tmpNodeList = helper.getAllMatchingNodesFromBothDocuments(nodeAndAttribute.getKey());
        for (Node attrNode : tmpNodeList) {
            getAttributeSetForNode(attrNode, map).add(nodeAndAttribute.getValue());
        }//w  w  w.j a  v  a2s  . c o m
    }
}

From source file:org.opencb.opencga.storage.core.variant.VariantStoragePipeline.java

/**
 * Transform raw variant files into biodata model.
 *
 * @param inputUri Input file. Accepted formats: *.vcf, *.vcf.gz
 * @param pedigreeUri Pedigree input file. Accepted formats: *.ped
 * @param outputUri The destination folder
 * @throws StorageEngineException If any IO problem
 */// w  w  w . j a v  a 2  s .  c o m
@Override
public URI transform(URI inputUri, URI pedigreeUri, URI outputUri) throws StorageEngineException {
    // input: VcfReader
    // output: JsonWriter

    Path input = Paths.get(inputUri.getPath());
    Path pedigree = pedigreeUri == null ? null : Paths.get(pedigreeUri.getPath());
    Path output = Paths.get(outputUri.getPath());

    //        boolean includeSamples = options.getBoolean(Options.INCLUDE_GENOTYPES.key(), false);
    boolean includeStats = options.getBoolean(Options.INCLUDE_STATS.key(), false);
    //        boolean includeSrc = options.getBoolean(Options.INCLUDE_SRC.key(), Options.INCLUDE_SRC.defaultValue());
    boolean includeSrc = false;
    boolean failOnError = options.getBoolean(Options.TRANSFORM_FAIL_ON_MALFORMED_VARIANT.key(),
            Options.TRANSFORM_FAIL_ON_MALFORMED_VARIANT.defaultValue());
    String format = options.getString(Options.TRANSFORM_FORMAT.key(), Options.TRANSFORM_FORMAT.defaultValue());
    String parser = options.getString("transform.parser", HTSJDK_PARSER);

    VariantSource source = buildVariantSource(input);
    String fileName = source.getFileName();
    boolean generateReferenceBlocks = options.getBoolean(Options.GVCF.key(), false);

    int batchSize = options.getInt(Options.TRANSFORM_BATCH_SIZE.key(),
            Options.TRANSFORM_BATCH_SIZE.defaultValue());

    String compression = options.getString(Options.COMPRESS_METHOD.key(),
            Options.COMPRESS_METHOD.defaultValue());
    String extension = "";
    int numTasks = options.getInt(Options.TRANSFORM_THREADS.key(), Options.TRANSFORM_THREADS.defaultValue());
    int capacity = options.getInt("blockingQueueCapacity", numTasks * 2);

    if ("gzip".equalsIgnoreCase(compression) || "gz".equalsIgnoreCase(compression)) {
        extension = ".gz";
    } else if ("snappy".equalsIgnoreCase(compression) || "snz".equalsIgnoreCase(compression)) {
        extension = ".snappy";
    } else if (!compression.isEmpty()) {
        throw new IllegalArgumentException("Unknown compression method " + compression);
    }

    Path outputMalformedVariants = output.resolve(fileName + "." + VariantReaderUtils.MALFORMED_FILE + ".txt");
    Path outputVariantsFile = output
            .resolve(fileName + "." + VariantReaderUtils.VARIANTS_FILE + "." + format + extension);
    Path outputMetaFile = VariantReaderUtils.getMetaFromTransformedFile(outputVariantsFile);

    // Close at the end!
    final MalformedVariantHandler malformedHandler;
    try {
        malformedHandler = new MalformedVariantHandler(outputMalformedVariants);
    } catch (IOException e) {
        throw new StorageEngineException(e.getMessage(), e);
    }

    ParallelTaskRunner.Config config = ParallelTaskRunner.Config.builder().setNumTasks(numTasks)
            .setBatchSize(batchSize).setCapacity(capacity).setSorted(true).build();

    logger.info("Transforming variants using {} into {} ...", parser, format);
    long start, end;
    if (numTasks == 1 && "json".equals(format)) { //Run transformation with a SingleThread runner. The legacy way
        if (!".gz".equals(extension)) { //FIXME: Add compatibility with snappy compression
            logger.warn("Force using gzip compression");
            extension = ".gz";
            outputVariantsFile = output.resolve(fileName + ".variants.json" + extension);
        }

        //Ped Reader
        PedigreeReader pedReader = null;
        if (pedigree != null && pedigree.toFile().exists()) { //FIXME Add "endsWith(".ped") ??
            pedReader = new PedigreePedReader(pedigree.toString());
        }

        //Reader
        VariantReader reader = new VariantVcfReader(source, input.toAbsolutePath().toString());

        //Writers
        VariantJsonWriter jsonWriter = new VariantJsonWriter(source, output);
        jsonWriter.includeStats(includeStats);

        List<VariantWriter> writers = Collections.<VariantWriter>singletonList(jsonWriter);

        //Runner
        VariantRunner vr = new VariantRunner(source, reader, pedReader, writers,
                Collections.<Task<Variant>>singletonList(new VariantGlobalStatsCalculator(source)), batchSize);

        logger.info("Single thread transform...");
        start = System.currentTimeMillis();
        try {
            vr.run();
        } catch (IOException e) {
            throw new StorageEngineException("Fail runner execution", e);
        }
        end = System.currentTimeMillis();

    } else if ("avro".equals(format)) {
        //Read VariantSource
        source = VariantReaderUtils.readVariantSource(input, source);

        //Reader
        StringDataReader dataReader = new StringDataReader(input);
        long fileSize = 0;
        try {
            fileSize = dataReader.getFileSize();
        } catch (IOException e) {
            throw new StorageEngineException("Error reading file " + input, e);
        }
        ProgressLogger progressLogger = new ProgressLogger("Transforming file:", fileSize, 200);
        dataReader.setReadBytesListener((totalRead, delta) -> progressLogger.increment(delta, "Bytes"));

        //Writer
        DataWriter<ByteBuffer> dataWriter;
        try {
            dataWriter = new AvroFileWriter<>(VariantAvro.getClassSchema(), compression,
                    new FileOutputStream(outputVariantsFile.toFile()));
        } catch (FileNotFoundException e) {
            throw new StorageEngineException("Fail init writer", e);
        }
        Supplier<VariantTransformTask<ByteBuffer>> taskSupplier;

        if (parser.equalsIgnoreCase(HTSJDK_PARSER)) {
            logger.info("Using HTSJDK to read variants.");
            FullVcfCodec codec = new FullVcfCodec();
            final VariantSource finalSource = source;
            Pair<VCFHeader, VCFHeaderVersion> header = readHtsHeader(input);
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(source);
            taskSupplier = () -> new VariantAvroTransformTask(header.getKey(), header.getValue(), finalSource,
                    outputMetaFile, statsCalculator, includeSrc, generateReferenceBlocks)
                            .setFailOnError(failOnError).addMalformedErrorHandler(malformedHandler);
        } else {
            // TODO Create a utility to determine which extensions are variants files
            final VariantVcfFactory factory = createVariantVcfFactory(source, fileName);
            logger.info("Using Biodata to read variants.");
            final VariantSource finalSource = source;
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(source);
            taskSupplier = () -> new VariantAvroTransformTask(factory, finalSource, outputMetaFile,
                    statsCalculator, includeSrc).setFailOnError(failOnError)
                            .addMalformedErrorHandler(malformedHandler);
        }

        logger.info("Generating output file {}", outputVariantsFile);

        ParallelTaskRunner<String, ByteBuffer> ptr;
        try {
            ptr = new ParallelTaskRunner<>(dataReader, taskSupplier, dataWriter, config);
        } catch (Exception e) {
            throw new StorageEngineException("Error while creating ParallelTaskRunner", e);
        }
        logger.info("Multi thread transform... [1 reading, {} transforming, 1 writing]", numTasks);
        start = System.currentTimeMillis();
        try {
            ptr.run();
        } catch (ExecutionException e) {
            throw new StorageEngineException("Error while executing TransformVariants in ParallelTaskRunner",
                    e);
        }
        end = System.currentTimeMillis();
    } else if ("json".equals(format)) {
        //Read VariantSource
        source = VariantReaderUtils.readVariantSource(input, source);

        //Reader
        StringDataReader dataReader = new StringDataReader(input);
        long fileSize = 0;
        try {
            fileSize = dataReader.getFileSize();
        } catch (IOException e) {
            throw new StorageEngineException("Error reading file " + input, e);
        }
        ProgressLogger progressLogger = new ProgressLogger("Transforming file:", fileSize, 200);
        dataReader.setReadBytesListener((totalRead, delta) -> progressLogger.increment(delta, "Bytes"));

        //Writers
        StringDataWriter dataWriter = new StringDataWriter(outputVariantsFile, true);

        final VariantSource finalSource = source;
        ParallelTaskRunner<String, String> ptr;

        Supplier<VariantTransformTask<String>> taskSupplier;
        if (parser.equalsIgnoreCase(HTSJDK_PARSER)) {
            logger.info("Using HTSJDK to read variants.");
            Pair<VCFHeader, VCFHeaderVersion> header = readHtsHeader(input);
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(finalSource);
            taskSupplier = () -> new VariantJsonTransformTask(header.getKey(), header.getValue(), finalSource,
                    outputMetaFile, statsCalculator, includeSrc, generateReferenceBlocks)
                            .setFailOnError(failOnError).addMalformedErrorHandler(malformedHandler);
        } else {
            // TODO Create a utility to determine which extensions are variants files
            final VariantVcfFactory factory = createVariantVcfFactory(source, fileName);
            logger.info("Using Biodata to read variants.");
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(source);
            taskSupplier = () -> new VariantJsonTransformTask(factory, finalSource, outputMetaFile,
                    statsCalculator, includeSrc).setFailOnError(failOnError)
                            .addMalformedErrorHandler(malformedHandler);
        }

        logger.info("Generating output file {}", outputVariantsFile);

        try {
            ptr = new ParallelTaskRunner<>(dataReader, taskSupplier, dataWriter, config);
        } catch (Exception e) {
            throw new StorageEngineException("Error while creating ParallelTaskRunner", e);
        }

        logger.info("Multi thread transform... [1 reading, {} transforming, 1 writing]", numTasks);
        start = System.currentTimeMillis();
        try {
            ptr.run();
        } catch (ExecutionException e) {
            throw new StorageEngineException("Error while executing TransformVariants in ParallelTaskRunner",
                    e);
        }
        end = System.currentTimeMillis();
    } else if ("proto".equals(format)) {
        //Read VariantSource
        source = VariantReaderUtils.readVariantSource(input, source);
        Pair<Long, Long> times = processProto(input, fileName, output, source, outputVariantsFile,
                outputMetaFile, includeSrc, parser, generateReferenceBlocks, batchSize, extension, compression,
                malformedHandler, failOnError);
        start = times.getKey();
        end = times.getValue();
    } else {
        throw new IllegalArgumentException("Unknown format " + format);
    }
    logger.info("end - start = " + (end - start) / 1000.0 + "s");
    logger.info("Variants transformed!");

    // Close the malformed variant handler
    malformedHandler.close();
    if (malformedHandler.getMalformedLines() > 0) {
        getTransformStats().put("malformed lines", malformedHandler.getMalformedLines());
    }

    return outputUri.resolve(outputVariantsFile.getFileName().toString());
}

From source file:org.opendaylight.netvirt.federation.plugin.FederationPluginIngress.java

private <T extends DataObject, S extends DataObject> void processModification(String listenerKey,
        S modification, ModificationType modificationType, WriteTransaction tx, int generationNumber)
        throws FederationCorruptedStateException {
    FederationPluginCounters.ingress_process_modification.inc();
    LogicalDatastoreType datastoreType = FederationPluginUtils.getListenerDatastoreType(listenerKey);
    if (datastoreType == null) {
        logger.error("Failed to get datastore type for {}", listenerKey);
        return;/*from   w  w  w  . j  a v  a 2  s .  c om*/
    }
    if (!applyFilter(listenerKey, modification, modificationType)) {
        logger.trace("listener {} {} filtered out", listenerKey, modification);
        return;
    }

    Pair<InstanceIdentifier<T>, T> transformedModification = FederationPluginUtils.applyIngressTransformation(
            listenerKey, modification, modificationType, generationNumber, remoteIp);
    if (transformedModification == null) {
        logger.error("Failed to apply ingress transformation for {} {}", listenerKey, modification);
        return;
    }
    if (ModificationType.DELETE.equals(modificationType)) {
        logger.trace("Delete modification listener {} identifier {}", listenerKey,
                transformedModification.getKey());
        deleteModification(datastoreType, transformedModification.getKey(), MAX_TRANSACTION_SUBMIT_RETRIES);
        return;
    }

    logger.trace("Write modification type {} listener {} data {}", modificationType, listenerKey,
            transformedModification);
    if (tx == null) {
        writeModification(datastoreType, transformedModification.getKey(), transformedModification.getValue(),
                MAX_TRANSACTION_SUBMIT_RETRIES);
    } else {
        writeModification(listenerKey, datastoreType, transformedModification.getKey(),
                transformedModification.getValue(), tx);
    }
}

From source file:org.openhim.mediator.fhir.FhirProxyHandler.java

private String determineClientContentType() {
    // first check for Accept header
    String accept = request.getHeaders().get("Accept");
    if (accept != null && !"*/*".equals(accept)) {
        return accept;
    }/*from  w ww .  ja va2 s. c  o m*/

    // secondly for _format param
    for (Pair<String, String> param : request.getParams()) {
        if (param.getKey().equals("_format")) {
            return param.getValue();
        }
    }

    // thirdly check for the format the client sent content with
    String contentType = request.getHeaders().get("Content-Type");
    if (contentType != null) {
        return contentType.contains("json") ? Constants.FHIR_MIME_JSON : Constants.FHIR_MIME_XML;
    }

    // else use JSON as a default
    return Constants.FHIR_MIME_JSON;
}

From source file:org.openrepose.core.services.ratelimit.cache.UserRateLimit.java

@Override
public UserRateLimit applyPatch(Patch patch) {
    HashMap<String, CachedRateLimit> returnLimits = new HashMap<String, CachedRateLimit>();
    Pair<ConfiguredRatelimit, CachedRateLimit> lowestLimit = null;

    for (Pair<String, ConfiguredRatelimit> limitEntry : patch.getLimitMap()) {
        CachedRateLimit rateLimit = adjustLimit(limitEntry);
        returnLimits.put(limitEntry.getKey(), rateLimit);
        if (lowestLimit == null || (rateLimit.maxAmount()
                - rateLimit.amount() < lowestLimit.getValue().maxAmount() - lowestLimit.getValue().amount())) {
            lowestLimit = Pair.of(limitEntry.getValue(), rateLimit);
        }//from   w w w .  j  a  v  a 2s. c o  m
        if (rateLimit.amount() > rateLimit.maxAmount())
            break;
    }

    return new UserRateLimit(returnLimits, lowestLimit);
}

From source file:org.openrepose.core.services.ratelimit.cache.UserRateLimit.java

private CachedRateLimit adjustLimit(Pair<String, ConfiguredRatelimit> limitEntry) {
    CachedRateLimit returnRateLimit;//from w w w. j  a v  a2  s. c  o m

    while (true) {
        CachedRateLimit newRateLimit = new CachedRateLimit(limitEntry.getValue(), 1);
        CachedRateLimit oldRateLimit = limitMap.putIfAbsent(limitEntry.getKey(), newRateLimit);

        if (oldRateLimit == null) {
            return newRateLimit;
        }

        if ((System.currentTimeMillis() - oldRateLimit.timestamp()) > oldRateLimit.unit()) {
            returnRateLimit = newRateLimit;
        } else {
            returnRateLimit = new CachedRateLimit(limitEntry.getValue(), oldRateLimit.amount() + 1,
                    oldRateLimit.timestamp());
        }

        if (limitMap.replace(limitEntry.getKey(), oldRateLimit, returnRateLimit)) {
            return returnRateLimit;
        }
    }
}