Example usage for java.util LinkedList pop

List of usage examples for java.util LinkedList pop

Introduction

In this page you can find the example usage for java.util LinkedList pop.

Prototype

public E pop() 

Source Link

Document

Pops an element from the stack represented by this list.

Usage

From source file:com.nhncorp.lucy.security.xss.XssSaxFilter.java

/**
 * @param stackForObjectTag/*from w w  w .ja va 2 s  .c  o m*/
 * @param stackForAllowNetworkingValue
 * @param element
 */
private void doObjectParamStartTagProcess(LinkedList<Element> stackForObjectTag,
        LinkedList<String> stackForAllowNetworkingValue, Element element) {

    if ("object".equalsIgnoreCase(element.getName())) {
        stackForObjectTag.push(element);
        boolean isDataWhiteUrl = false;

        Attribute dataUrl = element.getAttribute("data");

        if (dataUrl != null) { // data ??  ?
            String dataUrlStr = dataUrl.getValue();
            isDataWhiteUrl = this.isWhiteUrl(dataUrlStr);

            // URL MIME ?
            boolean isVulnerable = SecurityUtils.checkVulnerable(element, dataUrlStr, isDataWhiteUrl);

            if (isVulnerable) {
                element.setEnabled(false);
                return;
            }
        }

        if (isDataWhiteUrl) {
            stackForAllowNetworkingValue.push("\"all\""); // data?? url ? white url? allowNetworking ? ? all
        } else {
            stackForAllowNetworkingValue.push("\"internal\""); // allowNetworking ? ? internal
        }
    } else if (stackForObjectTag.size() > 0 && "param".equalsIgnoreCase(element.getName())) {
        Attribute nameAttr = element.getAttribute("name");
        Attribute valueAttr = element.getAttribute("value");

        if (nameAttr != null && valueAttr != null) {
            stackForObjectTag.push(element);
            if (containsURLName(nameAttr.getValue())) {
                stackForAllowNetworkingValue.pop();
                boolean whiteUrl = isWhiteUrl(valueAttr.getValue());

                if (whiteUrl) {
                    stackForAllowNetworkingValue.push("\"all\""); // whiteUrl ?  allowNetworking ? all  
                } else {
                    stackForAllowNetworkingValue.push("\"internal\""); // whiteUrl ? ?  allowNetworking ? internal  
                }
            }
        }
    }
}

From source file:org.jembi.rhea.impl.ApelonServiceImpl.java

/**
 * Export a specified namespace as a CSV string. Traversal is done depth-first.
 *///  ww w.j  a  va2  s  .  c o  m
public String exportNamespace(int namespaceId) throws TerminologyService.TSException {
    StringBuilder res = new StringBuilder();

    try {
        LinkedList<_TSTreeNode> cStack = new LinkedList<_TSTreeNode>();
        List<TSTerm> breadth = getRootTerms(namespaceId);
        List<TSProperty> props = getAllPropertyTypes(namespaceId);
        int i = 0;
        ThesaurusConceptQuery cQuery = ThesaurusConceptQuery.createInstance(getConn());

        res.append("\"Code\",\"Name\"");
        for (TSProperty prop : props)
            res.append(",\"" + prop.getName() + "\"");
        res.append("\n");

        while (i < breadth.size()) {
            ApelonTerm term = (ApelonTerm) breadth.get(i);
            //properties aren't being fetched for sub-concepts, so look up the term to fetch it's properties
            DTSProperty[] termProps = cQuery.findConceptById(term.concept.getId(), namespaceId, asd)
                    .getFetchedProperties();

            res.append("\"" + term.getCode() + "\",\"" + term.getName() + "\"");
            for (TSProperty prop : props) {
                boolean addedProp = false;

                for (DTSProperty termProp : termProps) {
                    if (termProp.getName().equals(prop.getName())) {
                        res.append(",\"" + termProp.getValue() + "\"");
                        addedProp = true;
                    }
                }

                if (!addedProp)
                    res.append(",\"\"");
            }
            res.append("\n");

            if (term.getHasSubConcepts()) {
                cStack.push(new _TSTreeNode(breadth, i));
                breadth = term.getSubConcepts();
                i = 0;
                continue;
            }

            while (i + 1 == breadth.size() && !cStack.isEmpty()) {
                _TSTreeNode node = cStack.pop();
                breadth = node.breadth;
                i = node.i;
            }

            i++;
        }
    } catch (Exception ex) {
        throw new TerminologyService.TSException(ex);
    }

    return res.toString();
}

From source file:org.epics.archiverappliance.etl.DataReductionPostProcessorsTest.java

/**
 * 1) Set up the raw and reduced PV's//w  w w  .jav  a  2s . c  o  m
 * 2) Generate data in STS
 * 3) Run ETL
 * 4) Compare
 */
private void testPostProcessor(String reduceDataUsing) throws Exception {
    cleanDataFolders();

    ConfigServiceForTests configService = new ConfigServiceForTests(new File("./bin"), 1);
    // Set up the raw and reduced PV's
    PlainPBStoragePlugin etlSTS = (PlainPBStoragePlugin) StoragePluginURLParser
            .parseStoragePlugin("pb://localhost?name=STS&rootFolder=" + shortTermFolderName
                    + "/&partitionGranularity=PARTITION_HOUR", configService);
    ;
    PlainPBStoragePlugin etlMTS = (PlainPBStoragePlugin) StoragePluginURLParser
            .parseStoragePlugin("pb://localhost?name=MTS&rootFolder=" + mediumTermFolderName
                    + "/&partitionGranularity=PARTITION_DAY", configService);
    PlainPBStoragePlugin etlLTSRaw = (PlainPBStoragePlugin) StoragePluginURLParser
            .parseStoragePlugin("pb://localhost?name=LTS&rootFolder=" + longTermFolderName
                    + "/&partitionGranularity=PARTITION_YEAR", configService);
    PlainPBStoragePlugin etlLTSReduced = (PlainPBStoragePlugin) StoragePluginURLParser
            .parseStoragePlugin(
                    "pb://localhost?name=LTS&rootFolder=" + longTermFolderName
                            + "/&partitionGranularity=PARTITION_YEAR&reducedata=" + reduceDataUsing,
                    configService);
    {
        PVTypeInfo typeInfo = new PVTypeInfo(rawPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1);
        String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(),
                etlLTSRaw.getURLRepresentation() };
        typeInfo.setDataStores(dataStores);
        typeInfo.setPaused(true);
        configService.updateTypeInfoForPV(rawPVName, typeInfo);
        configService.registerPVToAppliance(rawPVName, configService.getMyApplianceInfo());
    }
    {
        PVTypeInfo typeInfo = new PVTypeInfo(reducedPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1);
        String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(),
                etlLTSReduced.getURLRepresentation() };
        typeInfo.setDataStores(dataStores);
        typeInfo.setPaused(true);
        configService.updateTypeInfoForPV(reducedPVName, typeInfo);
        configService.registerPVToAppliance(reducedPVName, configService.getMyApplianceInfo());
    }
    // Control ETL manually
    configService.getETLLookup().manualControlForUnitTests();

    short currentYear = TimeUtils.getCurrentYear();

    logger.info("Testing data reduction for postprocessor " + reduceDataUsing);

    for (int day = 0; day < 40; day++) {
        // Generate data into the STS on a daily basis
        ArrayListEventStream genDataRaw = new ArrayListEventStream(86400,
                new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, rawPVName, currentYear));
        ArrayListEventStream genDataReduced = new ArrayListEventStream(86400,
                new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, reducedPVName, currentYear));
        for (int second = 0; second < 86400; second++) {
            YearSecondTimestamp ysts = new YearSecondTimestamp(currentYear, day * 86400 + second, 0);
            Timestamp ts = TimeUtils.convertFromYearSecondTimestamp(ysts);
            genDataRaw.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts,
                    new ScalarValue<Double>(second * 1.0), 0, 0));
            genDataReduced.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts,
                    new ScalarValue<Double>(second * 1.0), 0, 0));
        }

        try (BasicContext context = new BasicContext()) {
            etlSTS.appendData(context, rawPVName, genDataRaw);
            etlSTS.appendData(context, reducedPVName, genDataReduced);
        }
        logger.debug(
                "For postprocessor " + reduceDataUsing + " done generating data into the STS for day " + day);

        // Run ETL at the end of the day
        Timestamp timeETLruns = TimeUtils
                .convertFromYearSecondTimestamp(new YearSecondTimestamp(currentYear, day * 86400 + 86399, 0));
        ETLExecutor.runETLs(configService, timeETLruns);
        logger.debug("For postprocessor " + reduceDataUsing + " done performing ETL as though today is "
                + TimeUtils.convertToHumanReadableString(timeETLruns));

        // Compare data for raw+postprocessor and reduced PV's.
        PostProcessor postProcessor = PostProcessors.findPostProcessor(reduceDataUsing);
        postProcessor.initialize(reduceDataUsing, rawPVName);

        int rawWithPPCount = 0;
        int reducedCount = 0;

        try (BasicContext context = new BasicContext()) {
            Timestamp startTime = TimeUtils.minusDays(TimeUtils.now(), 10 * 366);
            Timestamp endTime = TimeUtils.plusDays(TimeUtils.now(), 10 * 366);
            LinkedList<Timestamp> rawTimestamps = new LinkedList<Timestamp>();
            LinkedList<Timestamp> reducedTimestamps = new LinkedList<Timestamp>();
            if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) {
                List<Callable<EventStream>> callables = etlLTSRaw.getDataForPV(context, rawPVName, startTime,
                        endTime, postProcessor);
                for (Callable<EventStream> callable : callables) {
                    callable.call();
                }
                for (Event e : ((PostProcessorWithConsolidatedEventStream) postProcessor)
                        .getConsolidatedEventStream()) {
                    rawTimestamps.add(e.getEventTimeStamp());
                    rawWithPPCount++;
                }
            } else {
                try (EventStream rawWithPP = new CurrentThreadWorkerEventStream(rawPVName,
                        etlLTSRaw.getDataForPV(context, rawPVName, startTime, endTime, postProcessor))) {
                    for (Event e : rawWithPP) {
                        rawTimestamps.add(e.getEventTimeStamp());
                        rawWithPPCount++;
                    }
                }
            }
            try (EventStream reduced = new CurrentThreadWorkerEventStream(reducedPVName,
                    etlLTSReduced.getDataForPV(context, reducedPVName, startTime, endTime))) {
                for (Event e : reduced) {
                    reducedTimestamps.add(e.getEventTimeStamp());
                    reducedCount++;
                }
            }

            logger.debug(
                    "For postprocessor " + reduceDataUsing + " for day " + day + " we have " + rawWithPPCount
                            + " raw with postprocessor events and " + reducedCount + " reduced events");
            if (rawTimestamps.size() != reducedTimestamps.size()) {
                while (!rawTimestamps.isEmpty() || !reducedTimestamps.isEmpty()) {
                    if (!rawTimestamps.isEmpty())
                        logger.info("Raw/PP " + TimeUtils.convertToHumanReadableString(rawTimestamps.pop()));
                    if (!reducedTimestamps.isEmpty())
                        logger.info(
                                "Reduced" + TimeUtils.convertToHumanReadableString(reducedTimestamps.pop()));
                }
            }
            assertTrue(
                    "For postprocessor " + reduceDataUsing + " for day " + day + " we have " + rawWithPPCount
                            + " rawWithPP events and " + reducedCount + " reduced events",
                    rawWithPPCount == reducedCount);
        }
        if (day > 2) {
            assertTrue("For postprocessor " + reduceDataUsing + " for day " + day
                    + ", seems like no events were moved by ETL into LTS for " + rawPVName + " Count = "
                    + rawWithPPCount, (rawWithPPCount != 0));
            assertTrue("For postprocessor " + reduceDataUsing + " for day " + day
                    + ", seems like no events were moved by ETL into LTS for " + reducedPVName + " Count = "
                    + reducedCount, (reducedCount != 0));
        }
    }

    configService.shutdownNow();
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

/**
 * Parse the timeranges parameter and generate a list of TimeSpans.
 * @param resp/*www.  j  av  a  2s .  c o  m*/
 * @param pvName
 * @param requestTimes - list of timespans that we add the valid times to.
 * @param timeRangesStr
 * @return
 * @throws IOException
 */
private boolean parseTimeRanges(HttpServletResponse resp, String pvName, LinkedList<TimeSpan> requestTimes,
        String timeRangesStr) throws IOException {
    String[] timeRangesStrList = timeRangesStr.split(",");
    if (timeRangesStrList.length % 2 != 0) {
        String msg = "Need to specify an even number of times in timeranges for pv " + pvName + ". We have "
                + timeRangesStrList.length + " times";
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return false;
    }

    LinkedList<Timestamp> timeRangesList = new LinkedList<Timestamp>();
    for (String timeRangesStrItem : timeRangesStrList) {
        try {
            Timestamp ts = TimeUtils.convertFromISO8601String(timeRangesStrItem);
            timeRangesList.add(ts);
        } catch (IllegalArgumentException ex) {
            try {
                Timestamp ts = TimeUtils.convertFromDateTimeStringWithOffset(timeRangesStrItem);
                timeRangesList.add(ts);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + timeRangesStrItem;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return false;
            }
        }
    }

    assert (timeRangesList.size() % 2 == 0);
    Timestamp prevEnd = null;
    while (!timeRangesList.isEmpty()) {
        Timestamp t0 = timeRangesList.pop();
        Timestamp t1 = timeRangesList.pop();

        if (t1.before(t0)) {
            String msg = "For request, end " + t1.toString() + " is before start " + t0.toString() + " for pv "
                    + pvName;
            logger.error(msg);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
            return false;
        }

        if (prevEnd != null) {
            if (t0.before(prevEnd)) {
                String msg = "For request, start time " + t0.toString() + " is before previous end time "
                        + prevEnd.toString() + " for pv " + pvName;
                logger.error(msg);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return false;
            }
        }
        prevEnd = t1;
        requestTimes.add(new TimeSpan(t0, t1));
    }
    return true;
}

From source file:org.commoncrawl.service.parser.server.ParseWorker.java

@Override
public Document buildDocument(InstructionsPool instructionsPool, FileOutputStream optionalOutputStream)
        throws IOException {

    //LOG.info("Build Document Called");
    List<Integer> operations = instructionsPool.operations;
    List<String> arguments = instructionsPool.arguments;
    LinkedList<Integer> nodeStack = new LinkedList<Integer>();
    LinkedList<BlockObjectInContext> blockStack = new LinkedList<BlockObjectInContext>();
    HTMLMeta meta = null;/*  w w w. j a va2s.c o m*/

    for (int i = 0; i < operations.size(); i++) {
        int domOperation = operations.get(i);
        String domArgument = arguments.get(i);
        //System.out.println("Operation :" + ParserInstruction.getOperationString(domOperation)+" Arg:~" + domArgument+"~");
        switch (domOperation) {
        // Open node :
        case ParserInstruction.OpenNode:
        case ParserInstruction.AddLeaf: {
            activeLink = null;
            blockInConstruction = null;
            String nodeName = domArgument.toLowerCase();

            // append new-line of start of a block level tag ... 
            if (domOperation == ParserInstruction.OpenNode && blockLevelHTMLTags.contains(nodeName)) {
                if (textAccumulator.length() != 0
                        && textAccumulator.charAt(textAccumulator.length() - 1) != '\n')
                    textAccumulator.append("\n");
            }

            if (nodeName.equals("meta")) {
                meta = new HTMLMeta();
            } else if (linkTypeToSrcMap.containsKey(nodeName)) {
                //LOG.info("Node:" + nodeName + " is of type Link. Adding to LinksUnderConst");
                activeLink = new LinkUnderConstruction(nodeName, blockStack.peek());
                linksUnderConstruction.push(activeLink);
            } else if (nodeName.equals("head")) {
                inHeadTag++;
            } else if (nodeName.equals("base")) {
                if (inHeadTag != 0) {
                    inBase++;
                }
            } else if (nodeName.equals("table") || nodeName.equals("div")) {
                blockInConstruction = new BlockObjectInContext(blockStack.peek(), nodeName, ++blockId);
                blockStack.push(blockInConstruction);
            } else if (nodeName.equals("tr") || nodeName.equals("th")) {
                BlockObjectInContext table = blockStack.peek();
                if (table != null) {
                    table.rowNumber++;
                    table.cellNumber = -1;
                }
            } else if (nodeName.equals("td")) {
                BlockObjectInContext table = blockStack.peek();
                if (table != null) {
                    table.cellNumber++;
                }
            }
            nodeStack.push(i);
        }
            break;
        // Close node :
        case ParserInstruction.CloseNode:
        case ParserInstruction.CloseLeaf: {
            int arguementPos = nodeStack.pop();
            String nodeName = arguments.get(arguementPos).toLowerCase();

            // append new-line of start of a block level tag ... 
            if (domOperation == ParserInstruction.CloseNode && blockLevelHTMLTags.contains(nodeName)) {
                if (textAccumulator.length() != 0
                        && textAccumulator.charAt(textAccumulator.length() - 1) != '\n')
                    textAccumulator.append("\n");
            }

            //LOG.info("Close Node Called on Node:" + nodeName);
            if (nodeName.equals("head")) {
                inHeadTag--;
            } else if (nodeName.equals("base")) {
                if (inHeadTag != 0) {
                    inBase--;
                }
            } else if (linkTypeToSrcMap.containsKey(nodeName)) {
                //LOG.info("Node:" + nodeName + " is a Link Type");
                LinkUnderConstruction linkPartial = linksUnderConstruction.pop();
                if (linkPartial != null) {
                    //LOG.info("POPed a partial LinkObject of type:" + linkPartial.type);
                    Link link = linkPartial.buildLink();
                    if (link != null) {
                        activeParseResult.getExtractedLinks().add(link);
                    }
                }
            } else if (nodeName.equals("table") || nodeName.equals("div")) {
                blockStack.pop();
            } else if (nodeName.equals("meta")) {
                if (meta != null) {
                    activeParseResult.getMetaTags().add(meta);
                    meta = null;
                }
            }
            if (textAccumulator.length() != 0
                    && !Character.isWhitespace(textAccumulator.charAt(textAccumulator.length() - 1))) {
                textAccumulator.append(" ");
            }

        }
            break;
        case ParserInstruction.AddText: {
            Integer arguementPos = nodeStack.peek();
            String nodeName = (arguementPos != null) ? arguments.get(arguementPos).toLowerCase() : null;
            LinkUnderConstruction link = linksUnderConstruction.peek();

            if (link != null) {
                if (link.linkText.length() != 0)
                    link.linkText += " ";
                link.linkText += domArgument.trim();
            }
            if (nodeName == null || !ignoreTextTagSet.contains(nodeName.toLowerCase())) {
                textAccumulator.append(domArgument);
            }

        }
            break;
        //        case ParserInstruction.AddContent:
        //          System.out.println("AddContent:"+domArgument);
        //          break;

        case ParserInstruction.WriteAttributeKey: {

            // grab key name .. 
            String key = domArgument.toLowerCase();

            // and lookahead one to grab attribute value ... 
            i++;

            if (i < operations.size() && operations.get(i) == ParserInstruction.WriteAttributeValue) {
                // grab value ... 
                String value = arguments.get(i);

                // if metatag capture key/value ... 
                if (meta != null) {
                    // create a new attribute object  
                    HTMLMetaAttribute attribute = new HTMLMetaAttribute();

                    attribute.setName(key);
                    attribute.setValue(value);

                    // append to meta tag 
                    meta.getAttributes().add(attribute);
                } else {
                    if (key.equals("href") && inBase != 0) {
                        if (value.length() != 0) {
                            try {
                                baseURL = new URL(value);
                            } catch (Exception e) {
                                LOG.error(CCStringUtils.stringifyException(e));
                                throw new IOException(e);
                            }
                        }
                    } else if (activeLink != null) {
                        if (linkTypeToSrcMap.get(activeLink.type).equalsIgnoreCase(key)) {
                            activeLink.linkURL = value;
                        } else {
                            activeLink.jsonObject.addProperty(key, value);
                        }
                    } else if (blockInConstruction != null) {
                        if (key.equals("class")) {
                            blockInConstruction.classId = value;
                        } else if (key.equals("id")) {
                            blockInConstruction.htmlId = value;
                        }
                    }
                }
            } else {
                // rewind and let outer control block deal with it 
                --i;
            }
        }
            break;

        case ParserInstruction.SetTitle: {
            activeParseResult.setTitle(domArgument);
        }
            break;
        //        case ParserInstruction.AddEntity:
        //          System.out.println("AddEntity:" + domArgument);
        //            break;
        //        case ParserInstruction.AddComment:
        //          System.out.println("AddComment:" + domArgument); 
        //            break;        case ParserInstruction.SetTitle:
        //          System.out.println("SetTitle:" + domArgument);
        //            break;
        //        }
        }
    }
    return null;
}

From source file:org.opencb.opencga.storage.hadoop.variant.HadoopVariantStorageEngine.java

@Override
public List<StoragePipelineResult> index(List<URI> inputFiles, URI outdirUri, boolean doExtract,
        boolean doTransform, boolean doLoad) throws StorageEngineException {

    if (inputFiles.size() == 1 || !doLoad) {
        return super.index(inputFiles, outdirUri, doExtract, doTransform, doLoad);
    }//  w  w  w.  j a  va2  s.c o  m

    final boolean doArchive;
    final boolean doMerge;

    if (!getOptions().containsKey(HADOOP_LOAD_ARCHIVE) && !getOptions().containsKey(HADOOP_LOAD_VARIANT)) {
        doArchive = true;
        doMerge = true;
    } else {
        doArchive = getOptions().getBoolean(HADOOP_LOAD_ARCHIVE, false);
        doMerge = getOptions().getBoolean(HADOOP_LOAD_VARIANT, false);
    }

    if (!doArchive && !doMerge) {
        return Collections.emptyList();
    }

    final int nThreadArchive = getOptions().getInt(HADOOP_LOAD_ARCHIVE_BATCH_SIZE, 2);
    ObjectMap extraOptions = new ObjectMap().append(HADOOP_LOAD_ARCHIVE, true).append(HADOOP_LOAD_VARIANT,
            false);

    final List<StoragePipelineResult> concurrResult = new CopyOnWriteArrayList<>();
    List<VariantStoragePipeline> etlList = new ArrayList<>();
    ExecutorService executorService = Executors.newFixedThreadPool(nThreadArchive, r -> {
        Thread t = new Thread(r);
        t.setDaemon(true);
        return t;
    }); // Set Daemon for quick shutdown !!!
    LinkedList<Future<StoragePipelineResult>> futures = new LinkedList<>();
    List<Integer> indexedFiles = new CopyOnWriteArrayList<>();
    for (URI inputFile : inputFiles) {
        //Provide a connected storageETL if load is required.

        VariantStoragePipeline storageETL = newStorageETL(doLoad, new ObjectMap(extraOptions));
        futures.add(executorService.submit(() -> {
            try {
                Thread.currentThread().setName(Paths.get(inputFile).getFileName().toString());
                StoragePipelineResult storagePipelineResult = new StoragePipelineResult(inputFile);
                URI nextUri = inputFile;
                boolean error = false;
                if (doTransform) {
                    try {
                        nextUri = transformFile(storageETL, storagePipelineResult, concurrResult, nextUri,
                                outdirUri);

                    } catch (StoragePipelineException ignore) {
                        //Ignore here. Errors are stored in the ETLResult
                        error = true;
                    }
                }

                if (doLoad && doArchive && !error) {
                    try {
                        loadFile(storageETL, storagePipelineResult, concurrResult, nextUri, outdirUri);
                    } catch (StoragePipelineException ignore) {
                        //Ignore here. Errors are stored in the ETLResult
                        error = true;
                    }
                }
                if (doLoad && !error) {
                    // Read the VariantSource to get the original fileName (it may be different from the
                    // nextUri.getFileName if this is the transformed file)
                    String fileName = storageETL.readVariantSource(nextUri, null).getFileName();
                    // Get latest study configuration from DB, might have been changed since
                    StudyConfiguration studyConfiguration = storageETL.getStudyConfiguration();
                    // Get file ID for the provided file name
                    Integer fileId = studyConfiguration.getFileIds().get(fileName);
                    indexedFiles.add(fileId);
                }
                return storagePipelineResult;
            } finally {
                try {
                    storageETL.close();
                } catch (StorageEngineException e) {
                    logger.error("Issue closing DB connection ", e);
                }
            }
        }));
    }

    executorService.shutdown();

    int errors = 0;
    try {
        while (!futures.isEmpty()) {
            executorService.awaitTermination(1, TimeUnit.MINUTES);
            // Check values
            if (futures.peek().isDone() || futures.peek().isCancelled()) {
                Future<StoragePipelineResult> first = futures.pop();
                StoragePipelineResult result = first.get(1, TimeUnit.MINUTES);
                if (result.getTransformError() != null) {
                    //TODO: Handle errors. Retry?
                    errors++;
                    result.getTransformError().printStackTrace();
                } else if (result.getLoadError() != null) {
                    //TODO: Handle errors. Retry?
                    errors++;
                    result.getLoadError().printStackTrace();
                }
                concurrResult.add(result);
            }
        }
        if (errors > 0) {
            throw new StoragePipelineException("Errors found", concurrResult);
        }

        if (doLoad && doMerge) {
            int batchMergeSize = getOptions().getInt(HADOOP_LOAD_VARIANT_BATCH_SIZE, 10);
            // Overwrite default ID list with user provided IDs
            List<Integer> pendingFiles = indexedFiles;
            if (getOptions().containsKey(HADOOP_LOAD_VARIANT_PENDING_FILES)) {
                List<Integer> idList = getOptions().getAsIntegerList(HADOOP_LOAD_VARIANT_PENDING_FILES);
                if (!idList.isEmpty()) {
                    // only if the list is not empty
                    pendingFiles = idList;
                }
            }

            List<Integer> filesToMerge = new ArrayList<>(batchMergeSize);
            int i = 0;
            for (Iterator<Integer> iterator = pendingFiles.iterator(); iterator.hasNext(); i++) {
                Integer indexedFile = iterator.next();
                filesToMerge.add(indexedFile);
                if (filesToMerge.size() == batchMergeSize || !iterator.hasNext()) {
                    extraOptions = new ObjectMap().append(HADOOP_LOAD_ARCHIVE, false)
                            .append(HADOOP_LOAD_VARIANT, true)
                            .append(HADOOP_LOAD_VARIANT_PENDING_FILES, filesToMerge);

                    AbstractHadoopVariantStoragePipeline localEtl = newStorageETL(doLoad, extraOptions);

                    int studyId = getOptions().getInt(Options.STUDY_ID.key());
                    localEtl.preLoad(inputFiles.get(i), outdirUri);
                    localEtl.merge(studyId, filesToMerge);
                    localEtl.postLoad(inputFiles.get(i), outdirUri);
                    filesToMerge.clear();
                }
            }

            annotateLoadedFiles(outdirUri, inputFiles, concurrResult, getOptions());
            calculateStatsForLoadedFiles(outdirUri, inputFiles, concurrResult, getOptions());

        }
    } catch (InterruptedException e) {
        Thread.interrupted();
        throw new StoragePipelineException("Interrupted!", e, concurrResult);
    } catch (ExecutionException e) {
        throw new StoragePipelineException("Execution exception!", e, concurrResult);
    } catch (TimeoutException e) {
        throw new StoragePipelineException("Timeout Exception", e, concurrResult);
    } finally {
        if (!executorService.isShutdown()) {
            try {
                executorService.shutdownNow();
            } catch (Exception e) {
                logger.error("Problems shutting executer service down", e);
            }
        }
    }
    return concurrResult;
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

public Graph<SVGNode> getSVGRelationGraph() {
    Graph<SVGNode> out = new Graph<SVGNode>();

    LinkedList<InheritanceProject> open = new LinkedList<InheritanceProject>();
    HashSet<InheritanceProject> visited = new HashSet<InheritanceProject>();

    open.add(this);
    while (!open.isEmpty()) {
        InheritanceProject ip = open.pop();
        if (visited.contains(ip)) {
            continue;
        } else {/*  ww w  . j  av  a 2  s .  co m*/
            visited.add(ip);
        }
        out.addNode(ip);

        for (InheritanceProject parent : ip.getParentProjects()) {
            open.add(parent);
            out.addNode(ip, parent);
        }
    }

    return out;
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

/**
 * This method returns the versions selected for this project and its
 * parents./*from  w  ww.j a  v  a 2  s.  c  om*/
 * 
 * @return
 */
public Map<String, Long> getAllVersionsFromCurrentState() {
    LinkedList<InheritanceProject> open = new LinkedList<InheritanceProject>();
    Set<String> closed = new HashSet<String>();
    Map<String, Long> out = new HashMap<String, Long>();

    //Adding ourselves as the first node
    open.add(this);

    while (!open.isEmpty()) {
        InheritanceProject ip = open.pop();
        //Fetching the user-requested version for the open node
        Long v = ip.getUserDesiredVersion();
        out.put(ip.getName(), v);
        //Then, adding this node to the closed set
        closed.add(ip.getName());
        //And adding the parent nodes to the open list
        for (AbstractProjectReference apr : ip.getParentReferences()) {
            if (closed.contains(apr.getName())) {
                continue;
            }
            InheritanceProject next = apr.getProject();
            if (next == null) {
                continue;
            }
            open.addLast(next);
        }
    }
    return out;
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

public Map<InheritanceProject, Relationship> getRelationships() {
    Object obj = onInheritChangeBuffer.get(this, "getRelationships");
    if (obj != null && obj instanceof Map) {
        return (Map) obj;
    }/* ww  w . j av a2s.  c  om*/

    //Creating the returned map and pre-filling it with empty lists
    Map<InheritanceProject, Relationship> map = new HashMap<InheritanceProject, Relationship>();

    //Preparing the set of projects that were already explored
    HashSet<String> seenProjects = new HashSet<String>();

    //Fetching the map of all projects and their connections
    Map<String, ProjectGraphNode> connGraph = getConnectionGraph();

    //Fetching the node for the current (this) project
    ProjectGraphNode node = connGraph.get(this.getName());
    if (node == null) {
        return map;
    }

    //Mates can be filled quite easily
    for (String mate : node.mates) {
        InheritanceProject p = InheritanceProject.getProjectByName(mate);
        ProjectGraphNode mateNode = connGraph.get(mate);
        boolean isLeaf = (mateNode == null) ? true : mateNode.children.isEmpty();
        if (p == null) {
            continue;
        }
        //Checking if we've seen this mate already
        if (!seenProjects.contains(p.getName())) {
            map.put(p, new Relationship(Relationship.Type.MATE, 0, isLeaf));
            seenProjects.add(p.getName());
        }
    }

    //Exploring parents
    int distance = 1;
    seenProjects.clear();
    LinkedList<InheritanceProject> cOpen = new LinkedList<InheritanceProject>();
    LinkedList<InheritanceProject> nOpen = new LinkedList<InheritanceProject>();
    cOpen.add(this);
    while (!cOpen.isEmpty()) {
        InheritanceProject ip = cOpen.pop();
        if (ip == null || seenProjects.contains(ip.getName())) {
            continue;
        }
        seenProjects.add(ip.getName());

        node = connGraph.get(ip.getName());
        if (ip == null || node == null) {
            continue;
        }
        //Adding all parents
        for (String parent : node.parents) {
            InheritanceProject par = InheritanceProject.getProjectByName(parent);
            if (par == null || seenProjects.contains(parent)) {
                continue;
            }
            map.put(par, new Relationship(Relationship.Type.PARENT, distance, false));
            nOpen.push(par);
        }
        if (cOpen.isEmpty() && !nOpen.isEmpty()) {
            cOpen = nOpen;
            nOpen = new LinkedList<InheritanceProject>();
            distance++;
        }
    }

    //Exploring children
    distance = 1;
    seenProjects.clear();
    cOpen.clear();
    nOpen.clear();
    cOpen.add(this);
    while (!cOpen.isEmpty()) {
        InheritanceProject ip = cOpen.pop();
        if (ip == null || seenProjects.contains(ip.getName())) {
            continue;
        }
        seenProjects.add(ip.getName());

        node = connGraph.get(ip.getName());
        if (ip == null || node == null) {
            continue;
        }
        //Adding all parents
        for (String child : node.children) {
            InheritanceProject cProj = InheritanceProject.getProjectByName(child);
            if (cProj == null || seenProjects.contains(child)) {
                continue;
            }
            ProjectGraphNode childNode = connGraph.get(child);
            boolean isLeaf = (childNode == null) ? true : childNode.children.isEmpty();
            map.put(cProj, new Relationship(Relationship.Type.CHILD, distance, isLeaf));
            nOpen.push(cProj);
        }
        if (cOpen.isEmpty() && !nOpen.isEmpty()) {
            cOpen = nOpen;
            nOpen = new LinkedList<InheritanceProject>();
            distance++;
        }
    }

    onInheritChangeBuffer.set(this, "getRelationships", map);
    return map;
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

/**
 * Tests if this project's configuration leads to a cyclic, diamond or
 * multiple dependency.<br/>//from   w ww. j  ava  2 s .  c  o m
 * <br/>
 * See <a href="http://en.wikipedia.org/wiki/Cycle_detection">cycle detection</a> and
 * <a href="http://en.wikipedia.org/wiki/Diamond_problem">diamond problem</a>.
 * 
 * @return true, if there is a cyclic, diamond or repeated dependency among
 * this project's parents.
 */
public final boolean hasCyclicDependency(String... whenTheseProjectsAdded) {
    /* TODO: While this method runs reasonably fast, it is run very often
     * As such, find a way to buffer the result across all projects and
     * only rebuild if necessary.
     */

    /* TODO: Further more, this method is not space-optimal
     * See: http://en.wikipedia.org/wiki/Cycle_detection
     * But do note that any replacement algorithm also, by contract, needs
     * to detect multiple inheritance and its special case of diamond
     * inheritance.
     */

    //Preparing the set of project names that were seen at least once
    HashSet<String> closed = new HashSet<String>();

    //Creating the list of parent projects to still explore
    LinkedList<InheritanceProject> open = new LinkedList<InheritanceProject>();
    //And scheduling ourselves as the first to evaluate
    open.push(this);

    //And finally, creating a list of additional references the caller
    //wishes to eventually add to THIS project
    LinkedList<String> additionalRefs = new LinkedList<String>();
    for (String pName : whenTheseProjectsAdded) {
        //We need to ignore those, that we already refer to as parents
        //Do note that this makes direct multiple inheritance impossible
        //to detect in advance, but such errors should be obvious anyway
        boolean isAlreadyReferenced = false;
        for (AbstractProjectReference par : this.getParentReferences()) {
            if (par.getName().equals(pName)) {
                isAlreadyReferenced = true;
                break;
            }
        }
        if (!isAlreadyReferenced) {
            additionalRefs.add(pName);
        }
    }

    //Processing the open stack, checking if we're already met that parent
    //and if not, adding its parent to our open stack
    while (open.isEmpty() == false) {
        //Popping the first element
        InheritanceProject p = open.pop();
        //Checking if we've seen that parent already
        if (closed.contains(p.name)) {
            //Detected a cyclic dependency
            return true;
        }
        // Otherwise, we add all its parents to our open set
        for (AbstractProjectReference ref : p.getParentReferences()) {
            InheritanceProject refP = ref.getProject();
            if (refP != null) {
                open.push(refP);
            }
        }
        //And if the current object is active, we also need to check the
        //new future refs
        if (p == this && !additionalRefs.isEmpty()) {
            for (String ref : additionalRefs) {
                InheritanceProject ip = InheritanceProject.getProjectByName(ref);
                if (ip != null) {
                    open.push(ip);
                }
            }
        }
        closed.add(p.name);
    }
    // If we reach this spot, there is no such dependency
    return false;
}