Example usage for java.util LinkedList clear

List of usage examples for java.util LinkedList clear

Introduction

In this page you can find the example usage for java.util LinkedList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:com.att.nsa.cambria.service.impl.MMServiceImpl.java

private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
        final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
        throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
        CambriaApiException {//w ww.  j  av  a 2s.  c  o m

    final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();

    // setup the event set
    final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);

    // start processing, building a batch to push to the backend
    final long startMs = System.currentTimeMillis();
    long count = 0;
    long maxEventBatch = 1024 * 16;
    String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
    if (null != evenlen)
        maxEventBatch = Long.parseLong(evenlen);

    final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
    final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();

    Publisher.message m = null;
    int messageSequence = 1;
    Long batchId = 1L;
    final boolean transactionEnabled = true;
    int publishBatchCount = 0;
    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");

    // LOG.warn("Batch Start Id: " +
    // Utils.getFromattedBatchSequenceId(batchId));
    try {
        // for each message...
        batchId = DMaaPContext.getBatchID();

        String responseTransactionId = null;

        while ((m = events.next()) != null) {

            // LOG.warn("Batch Start Id: " +
            // Utils.getFromattedBatchSequenceId(batchId));

            addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
                    transactionEnabled);
            messageSequence++;

            // add the message to the batch
            batch.add(m);

            responseTransactionId = m.getLogDetails().getTransactionId();

            JSONObject jsonObject = new JSONObject();
            jsonObject.put("message", m.getMessage());
            jsonObject.put("transactionId", responseTransactionId);
            final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
                    jsonObject.toString());
            kms.add(data);

            // check if the batch is full
            final int sizeNow = batch.size();
            if (sizeNow >= maxEventBatch) {
                String startTime = sdf.format(new Date());
                LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                        + batchId + "]");
                try {
                    ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                    // transactionLogs(batch);
                    for (message msg : batch) {
                        LogDetails logDetails = msg.getLogDetails();
                        LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                    }
                } catch (Exception excp) {

                    int status = HttpStatus.SC_NOT_FOUND;
                    String errorMsg = null;
                    if (excp instanceof CambriaApiException) {
                        status = ((CambriaApiException) excp).getStatus();
                        JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                        JSONObject errObject = new JSONObject(jsonTokener);
                        errorMsg = (String) errObject.get("message");
                    }
                    ErrorResponse errRes = new ErrorResponse(status,
                            DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                            "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                    + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                            null, Utils.getFormattedDate(new Date()), topic,
                            Utils.getUserApiKey(ctx.getRequest()), ctx.getRequest().getRemoteHost(), null,
                            null);
                    LOG.info(errRes.toString());
                    throw new CambriaApiException(errRes);
                }
                kms.clear();
                batch.clear();
                metricsSet.publishTick(sizeNow);
                publishBatchCount = sizeNow;
                count += sizeNow;
                // batchId++;
                String endTime = sdf.format(new Date());
                LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                        + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                        + ",Batch End Time=" + endTime + "]");
                batchId = DMaaPContext.getBatchID();
            }
        }

        // send the pending batch
        final int sizeNow = batch.size();
        if (sizeNow > 0) {
            String startTime = sdf.format(new Date());
            LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                    + batchId + "]");
            try {
                ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                // transactionLogs(batch);
                for (message msg : batch) {
                    LogDetails logDetails = msg.getLogDetails();
                    LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                }
            } catch (Exception excp) {
                int status = HttpStatus.SC_NOT_FOUND;
                String errorMsg = null;
                if (excp instanceof CambriaApiException) {
                    status = ((CambriaApiException) excp).getStatus();
                    JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                    JSONObject errObject = new JSONObject(jsonTokener);
                    errorMsg = (String) errObject.get("message");
                }

                ErrorResponse errRes = new ErrorResponse(status,
                        DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                        "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                        null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                        ctx.getRequest().getRemoteHost(), null, null);
                LOG.info(errRes.toString());
                throw new CambriaApiException(errRes);
            }
            kms.clear();
            metricsSet.publishTick(sizeNow);
            count += sizeNow;
            // batchId++;
            String endTime = sdf.format(new Date());
            publishBatchCount = sizeNow;
            LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                    + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                    + ",Batch End Time=" + endTime + "]");
        }

        final long endMs = System.currentTimeMillis();
        final long totalMs = endMs - startMs;

        LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);

        // build a response
        final JSONObject response = new JSONObject();
        response.put("count", count);
        response.put("serverTimeMs", totalMs);

    } catch (Exception excp) {
        int status = HttpStatus.SC_NOT_FOUND;
        String errorMsg = null;
        if (excp instanceof CambriaApiException) {
            status = ((CambriaApiException) excp).getStatus();
            JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
            JSONObject errObject = new JSONObject(jsonTokener);
            errorMsg = (String) errObject.get("message");
        }

        ErrorResponse errRes = new ErrorResponse(status,
                DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                        + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                ctx.getRequest().getRemoteHost(), null, null);
        LOG.info(errRes.toString());
        throw new CambriaApiException(errRes);
    }
}

From source file:com.att.nsa.cambria.service.impl.EventsServiceImpl.java

/**
 * /*from   w w  w  .j a v a  2  s  .c  o m*/
 * @param ctx
 * @param inputStream
 * @param topic
 * @param partitionKey
 * @param requestTime
 * @param chunked
 * @param mediaType
 * @throws ConfigDbException
 * @throws AccessDeniedException
 * @throws TopicExistsException
 * @throws IOException
 * @throws CambriaApiException
 */
private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
        final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
        throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
        CambriaApiException {

    final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();

    // setup the event set
    final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);

    // start processing, building a batch to push to the backend
    final long startMs = System.currentTimeMillis();
    long count = 0;
    long maxEventBatch = 1024 * 16;
    String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
    if (null != evenlen)
        maxEventBatch = Long.parseLong(evenlen);
    //final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
    final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
    final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();

    Publisher.message m = null;
    int messageSequence = 1;
    Long batchId = 1L;
    final boolean transactionEnabled = true;
    int publishBatchCount = 0;
    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");

    //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
    try {
        // for each message...
        batchId = DMaaPContext.getBatchID();

        String responseTransactionId = null;

        while ((m = events.next()) != null) {

            //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));

            addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
                    transactionEnabled);
            messageSequence++;

            // add the message to the batch
            batch.add(m);

            responseTransactionId = m.getLogDetails().getTransactionId();

            JSONObject jsonObject = new JSONObject();
            jsonObject.put("message", m.getMessage());
            jsonObject.put("transactionId", responseTransactionId);
            final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
                    jsonObject.toString());
            kms.add(data);

            // check if the batch is full
            final int sizeNow = batch.size();
            if (sizeNow >= maxEventBatch) {
                String startTime = sdf.format(new Date());
                LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                        + batchId + "]");
                try {
                    ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                    //transactionLogs(batch);
                    for (message msg : batch) {
                        LogDetails logDetails = msg.getLogDetails();
                        LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                    }
                } catch (Exception excp) {

                    int status = HttpStatus.SC_NOT_FOUND;
                    String errorMsg = null;
                    if (excp instanceof CambriaApiException) {
                        status = ((CambriaApiException) excp).getStatus();
                        JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                        JSONObject errObject = new JSONObject(jsonTokener);
                        errorMsg = (String) errObject.get("message");
                    }
                    ErrorResponse errRes = new ErrorResponse(status,
                            DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                            "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                    + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                            null, Utils.getFormattedDate(new Date()), topic,
                            Utils.getUserApiKey(ctx.getRequest()), ctx.getRequest().getRemoteHost(), null,
                            null);
                    LOG.info(errRes.toString());
                    throw new CambriaApiException(errRes);
                }
                kms.clear();
                batch.clear();
                metricsSet.publishTick(sizeNow);
                publishBatchCount = sizeNow;
                count += sizeNow;
                //batchId++;
                String endTime = sdf.format(new Date());
                LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                        + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                        + ",Batch End Time=" + endTime + "]");
                batchId = DMaaPContext.getBatchID();
            }
        }

        // send the pending batch
        final int sizeNow = batch.size();
        if (sizeNow > 0) {
            String startTime = sdf.format(new Date());
            LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                    + batchId + "]");
            try {
                ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                //transactionLogs(batch);
                for (message msg : batch) {
                    LogDetails logDetails = msg.getLogDetails();
                    LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                }
            } catch (Exception excp) {
                int status = HttpStatus.SC_NOT_FOUND;
                String errorMsg = null;
                if (excp instanceof CambriaApiException) {
                    status = ((CambriaApiException) excp).getStatus();
                    JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                    JSONObject errObject = new JSONObject(jsonTokener);
                    errorMsg = (String) errObject.get("message");
                }

                ErrorResponse errRes = new ErrorResponse(status,
                        DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                        "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                        null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                        ctx.getRequest().getRemoteHost(), null, null);
                LOG.info(errRes.toString());
                throw new CambriaApiException(errRes);
            }
            kms.clear();
            metricsSet.publishTick(sizeNow);
            count += sizeNow;
            //batchId++;
            String endTime = sdf.format(new Date());
            publishBatchCount = sizeNow;
            LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                    + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                    + ",Batch End Time=" + endTime + "]");
        }

        final long endMs = System.currentTimeMillis();
        final long totalMs = endMs - startMs;

        LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);

        if (null != responseTransactionId) {
            ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
        }

        // build a response
        final JSONObject response = new JSONObject();
        response.put("count", count);
        response.put("serverTimeMs", totalMs);
        DMaaPResponseBuilder.respondOk(ctx, response);

    } catch (Exception excp) {
        int status = HttpStatus.SC_NOT_FOUND;
        String errorMsg = null;
        if (excp instanceof CambriaApiException) {
            status = ((CambriaApiException) excp).getStatus();
            JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
            JSONObject errObject = new JSONObject(jsonTokener);
            errorMsg = (String) errObject.get("message");
        }

        ErrorResponse errRes = new ErrorResponse(status,
                DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                        + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                ctx.getRequest().getRemoteHost(), null, null);
        LOG.info(errRes.toString());
        throw new CambriaApiException(errRes);
    }
}

From source file:au.edu.ausstage.networks.LookupManager.java

/**
 * A method to lookup the key collaborators for a contributor
 *
 * @param id         the unique id of the contributor
 * @param formatType the required format of the data
 * @param sortType   the required way in which the data is to be sorted
 *
 * @return           the results of the lookup
 *///from  ww w. j a va2  s . com
public String getKeyCollaborators(String id, String formatType, String sortType) {

    // check on the parameters
    if (InputUtils.isValidInt(id) == false || InputUtils.isValid(formatType) == false
            || InputUtils.isValid(sortType) == false) {
        throw new IllegalArgumentException("All parameters to this method are required");
    }

    // define a Tree Set to store the results
    java.util.LinkedList<Collaborator> collaborators = new java.util.LinkedList<Collaborator>();

    // define other helper variables
    QuerySolution row = null;
    Collaborator collaborator = null;

    // define the base sparql query
    String sparqlQuery = "PREFIX foaf:       <" + FOAF.NS + ">" + "PREFIX ausestage:  <" + AuseStage.NS + "> "
            + "SELECT ?collaborator ?collabGivenName ?collabFamilyName ?function ?firstDate ?lastDate ?collabCount "
            + "WHERE {  " + "       @ a foaf:Person ; "
            + "                      ausestage:hasCollaboration ?collaboration. "
            + "       ?collaboration ausestage:collaborator ?collaborator; "
            + "                      ausestage:collaborationFirstDate ?firstDate; "
            + "                      ausestage:collaborationLastDate ?lastDate; "
            + "                      ausestage:collaborationCount ?collabCount. "
            + "       ?collaborator  foaf:givenName ?collabGivenName; "
            + "                      foaf:familyName ?collabFamilyName; "
            + "                      ausestage:function ?function. " + "       FILTER (?collaborator != @) "
            + "}";

    // do we need to sort by name?
    if (sortType.equals("count") == true) {
        sparqlQuery += " ORDER BY DESC(?collabCount)";
    } else if (sortType.equals("name") == true) {
        sparqlQuery += " ORDER BY ?collabFamilyName ?collabGivenName";
    }

    // build a URI from the id
    id = AusStageURI.getContributorURI(id);

    // add the contributor URI to the query
    sparqlQuery = sparqlQuery.replaceAll("@", "<" + id + ">");

    // execute the query
    ResultSet results = rdf.executeSparqlQuery(sparqlQuery);

    // build the dataset
    // use a numeric sort order
    while (results.hasNext()) {
        // loop though the resulset
        // get a new row of data
        row = results.nextSolution();

        // instantiate a collaborator object
        collaborator = new Collaborator(AusStageURI.getId(row.get("collaborator").toString()));

        // check to see if the list contains this collaborator
        if (collaborators.indexOf(collaborator) != -1) {
            // collaborator is already in the list
            collaborator = collaborators.get(collaborators.indexOf(collaborator));

            // update the function
            collaborator.setFunction(row.get("function").toString());

        } else {
            // collaborator is not on the list

            // get the name
            collaborator.setGivenName(row.get("collabGivenName").toString());
            collaborator.setFamilyName(row.get("collabFamilyName").toString(), true);

            // get the dates
            collaborator.setFirstDate(row.get("firstDate").toString());
            collaborator.setLastDate(row.get("lastDate").toString());

            // get the collaboration count
            collaborator.setCollaborations(Integer.toString(row.get("collabCount").asLiteral().getInt()));

            // add the url
            collaborator.setUrl(AusStageURI.getURL(row.get("collaborator").toString()));

            // add the function
            collaborator.setFunction(row.get("function").toString());

            collaborators.add(collaborator);
        }
    }

    // play nice and tidy up
    rdf.tidyUp();

    // sort by the id
    if (sortType.equals("id") == true) {
        TreeMap<Integer, Collaborator> collaboratorsToSort = new TreeMap<Integer, Collaborator>();

        for (int i = 0; i < collaborators.size(); i++) {
            collaborator = collaborator = collaborators.get(i);

            collaboratorsToSort.put(Integer.parseInt(collaborator.getId()), collaborator);
        }

        // empty the list
        collaborators.clear();

        // add the collaborators back to the list
        Collection values = collaboratorsToSort.values();
        Iterator iterator = values.iterator();

        while (iterator.hasNext()) {
            // get the collaborator
            collaborator = (Collaborator) iterator.next();

            collaborators.add(collaborator);
        }

        collaboratorsToSort = null;
    }

    // define a variable to store the data
    String dataString = null;

    if (formatType.equals("html") == true) {
        dataString = createHTMLOutput(collaborators);
    } else if (formatType.equals("xml") == true) {
        dataString = createXMLOutput(collaborators);
    } else if (formatType.equals("json") == true) {
        dataString = createJSONOutput(collaborators);
    }

    // return the data
    return dataString;
}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

/**
 * Coordinates are in codonsRegion space!
 * @param index/*from ww  w.j  av  a2s.c  o m*/
 * @param sequence
 * @param sg
 * @param codonsRegion
 * @return
 */
public ArrayList<OrfWithCodons> findOrfs(int index, String sequence, SpliceGraph sg,
        ImmutableReferenceGenomicRegion<IntervalTreeSet<Codon>> codonsRegion) {
    SimpleDirectedGraph<Codon> fg = new SimpleDirectedGraph<Codon>("Codongraph");

    //      if (!codonsRegion.getReference().toString().equals("chr4+") || !codonsRegion.getRegion().contains(140_283_087))
    //         return 0;

    LeftMostInFrameAndClearList buff = new LeftMostInFrameAndClearList();

    IntervalTreeSet<Codon> codons = codonsRegion.getData();
    codons.removeIf(c -> c.getTotalActivity() < minCodonActivity);
    if (codons.size() == 0)
        return new ArrayList<OrfWithCodons>();

    // add stop codons for easy orf inference
    HashSet<Codon> stopCodons = new HashSet<Codon>();
    Trie<String> stop = new Trie<String>();
    stop.put("TAG", "TAG");
    stop.put("TGA", "TGA");
    stop.put("TAA", "TAA");
    stop.iterateAhoCorasick(sequence)
            .map(r -> new Codon(new ArrayGenomicRegion(r.getStart(), r.getEnd()), r.getValue()))
            .toCollection(stopCodons);

    for (Intron intr : sg.iterateIntrons().loop()) {
        ArrayGenomicRegion reg = new ArrayGenomicRegion(intr.getStart() - 2, intr.getStart(), intr.getEnd(),
                intr.getEnd() + 1);
        String cod = stop.get(SequenceUtils.extractSequence(reg, sequence));
        if (cod != null)
            stopCodons.add(new Codon(reg, cod));

        reg = new ArrayGenomicRegion(intr.getStart() - 1, intr.getStart(), intr.getEnd(), intr.getEnd() + 2);
        cod = stop.get(SequenceUtils.extractSequence(reg, sequence));
        if (cod != null)
            stopCodons.add(new Codon(reg, cod));
    }
    stopCodons.removeAll(codons);
    codons.addAll(stopCodons);

    ArrayList<OrfWithCodons> re = new ArrayList<OrfWithCodons>();
    HashSet<Codon> usedForAnno = new HashSet<Codon>();

    if (assembleAnnotationFirst) {
        // new: first use annotated transcripts in a greedy fashion
        ArrayList<ImmutableReferenceGenomicRegion<Transcript>> transcripts = annotation.ei(codonsRegion)
                .filter(t -> t.getData().isCoding()).map(t -> codonsRegion.induce(t, "T")).list();

        int acount = 0;
        LinkedList<OrfWithCodons> orfs = new LinkedList<OrfWithCodons>();
        GenomicRegion best;
        HashSet<Codon> aremoved = new HashSet<Codon>();

        do {
            best = null;
            double bestSum = 0;
            for (ImmutableReferenceGenomicRegion<Transcript> tr : transcripts) {
                double[] a = new double[tr.getRegion().getTotalLength()];
                for (Codon c : codons) {
                    if (tr.getRegion().containsUnspliced(c)) {
                        int p = tr.induce(c.getStart());

                        assert a[p] == 0;

                        if (!aremoved.contains(c))
                            a[p] = c.totalActivity;
                        if (c.isStop())
                            a[p] = -1;
                    }
                }
                for (int f = 0; f < 3; f++) {
                    int s = -1;
                    double sum = 0;

                    for (int p = f; p < a.length; p += 3) {
                        if (a[p] == -1) {//stop
                            if (sum > bestSum) {
                                bestSum = sum;
                                best = tr.getRegion().map(new ArrayGenomicRegion(s, p + 3));
                            }
                            s = -1;
                            sum = 0;
                        } else
                            sum += a[p];

                        if (a[p] > 0 && s == -1)
                            s = p;
                    }
                }
            }
            if (best != null) {
                ArrayList<Codon> cods = new ArrayList<>();
                int uniqueCodons = 0;
                double uniqueActivity = 0;
                double totalActivity = 0;

                for (Codon c : codons) {
                    if (best.containsUnspliced(c) && best.induce(c.getStart()) % 3 == 0) {
                        if (aremoved.add(c)) {
                            uniqueActivity += c.totalActivity;
                            uniqueCodons++;
                        }
                        totalActivity += c.totalActivity;
                        if (c.totalActivity > 0)
                            cods.add(c);
                    }
                }
                //                  System.out.println(codonsRegion.map(best));
                if ((uniqueCodons >= minUniqueCodons || uniqueCodons == cods.size())
                        && uniqueActivity > minUniqueActivity && totalActivity > minOrfTotalActivity) {

                    Collections.sort(cods);
                    usedForAnno.addAll(cods);

                    OrfWithCodons orf = new OrfWithCodons(index, 0, acount++, best.toArrayGenomicRegion(), cods,
                            true);
                    orfs.add(orf);
                }

            }
        } while (best != null);

        if (orfs.size() > 1) {

            // they are not necessarily connected!
            LinkedList<OrfWithCodons>[] connected = findConnectedOrfs(orfs);
            orfs.clear();

            for (LinkedList<OrfWithCodons> corfs : connected) {
                for (boolean changed = true; changed && corfs.size() > 1;) {
                    changed = false;

                    if (useEM)
                        inferOverlappingOrfActivitiesEM(corfs);
                    else
                        overlapUniqueCoverage(corfs);

                    Iterator<OrfWithCodons> it = corfs.iterator();
                    while (it.hasNext()) {
                        OrfWithCodons orf = it.next();
                        if (orf.getEstimatedTotalActivity() < minOrfTotalActivity) {
                            it.remove();
                            changed = true;
                        }
                    }
                }

                if (corfs.size() > 1)
                    distributeCodons(corfs);
                orfs.addAll(corfs);
            }
        }
        re.addAll(orfs);
    }

    // as edges only are represented in the splice graph, singleton codons are discarded (which does make sense anyway)
    for (Codon c : codons) {
        if (!c.isStop()) {
            // find unspliced successors (can be more than one, when the successor codon itself is spliced! all of them have the same start!)
            int max = c.getEnd() + maxAminoDist * 3;
            for (Codon n : codons
                    .getIntervalsIntersecting(c.getEnd(), c.getEnd() + maxAminoDist * 3, buff.startAndClear(c))
                    .get()) {
                if (!containsInframeStop(sequence.substring(c.getEnd(), n.getStart())))
                    fg.addInteraction(c, n);
                max = n.getStart() + 2;
            }

            // find all spliced successors for each splice junction that comes before n or maxAminoDist
            sg.forEachIntronStartingBetween(c.getEnd(), max + 1, intron -> {
                for (Codon n : codons.getIntervalsIntersecting(intron.getEnd(),
                        intron.getEnd() + maxAminoDist * 3 - (intron.getStart() - c.getEnd()),
                        buff.startAndClear(c, intron)).get())
                    if (!containsInframeStop(SequenceUtils.extractSequence(new ArrayGenomicRegion(c.getStart(),
                            intron.getStart(), intron.getEnd(), n.getStart()), sequence)))
                        fg.addInteraction(c, n, intron);
            });
        }
    }

    int cc = 1;
    for (SimpleDirectedGraph<Codon> g : fg.getWeaklyConnectedComponents()) {
        if (EI.wrap(g.getSources()).mapToDouble(c -> c.getTotalActivity()).sum() == 0)
            continue;

        // iterate longest paths in g
        LinkedList<Codon> topo = g.getTopologicalOrder();
        HashSet<Codon> remInTopo = new HashSet<Codon>(topo);
        remInTopo.removeIf(c -> !stopCodons.contains(c) && !usedForAnno.contains(c));
        HashSet<Codon> removed = new HashSet<Codon>(remInTopo);

        //         double maxPathScore = 0;

        LinkedList<OrfWithCodons> orfs = new LinkedList<OrfWithCodons>();

        int count = 0;
        while (removed.size() < topo.size()) {
            HashMap<Codon, MutablePair<GenomicRegion, Double>> longestPrefixes = new HashMap<Codon, MutablePair<GenomicRegion, Double>>();
            for (Codon c : topo)
                longestPrefixes.put(c, new MutablePair<GenomicRegion, Double>(c,
                        removed.contains(c) ? 0 : (c.getTotalActivity())));

            Codon longestEnd = null;
            HashMap<Codon, Codon> backtracking = new HashMap<Codon, Codon>();

            for (Codon c : topo) {
                //               if (codonsRegion.map(c).getStart()==100_466_118)
                //                  System.out.println(c);
                //               
                //               if (codonsRegion.map(c).getStart()==100_465_842)
                //                  System.out.println(c);

                double len = longestPrefixes.get(c).Item2;
                for (AdjacencyNode<Codon> n = g.getTargets(c); n != null; n = n.next) {
                    MutablePair<GenomicRegion, Double> pref = longestPrefixes.get(n.node);

                    double nnact = removed.contains(n.node) ? 0 : (n.node.getTotalActivity());
                    if (pref.Item2 <= len + nnact) {
                        pref.set(extendFullPath(longestPrefixes.get(c).Item1, c, n.node, n.getLabel()),
                                len + nnact);
                        backtracking.put(n.node, c);
                    }
                }
                if (longestEnd == null || longestPrefixes.get(longestEnd).Item2 <= len)
                    longestEnd = c;

            }

            // determine longest path by backtracking and mark all codons on the path as removed
            ArrayList<Codon> orfCodons = new ArrayList<Codon>();
            double totalActivity = 0;
            double uniqueActivity = 0;
            int uniqueCodons = 0;
            for (Codon c = longestEnd; c != null; c = backtracking.get(c)) {
                if (removed.add(c) && c.getTotalActivity() > 0) {
                    uniqueCodons++;
                    uniqueActivity += c.getTotalActivity();
                }

                if (c.getTotalActivity() > 0) // to remove dummy stop codons
                    orfCodons.add(c);
                totalActivity += c.getTotalActivity();
            }

            //            System.out.println(codonsRegion.map(longestPrefixes.get(longestEnd).Item1));

            if ((uniqueCodons >= minUniqueCodons || uniqueCodons == orfCodons.size())
                    && uniqueActivity > minUniqueActivity && totalActivity > minOrfTotalActivity) {
                Collections.reverse(orfCodons);

                MutablePair<GenomicRegion, Double> triple = longestPrefixes.get(longestEnd);
                ArrayGenomicRegion region = triple.Item1.toArrayGenomicRegion();
                String lastCodon = SequenceUtils.extractSequence(
                        region.map(
                                new ArrayGenomicRegion(region.getTotalLength() - 3, region.getTotalLength())),
                        sequence);

                OrfWithCodons orf = new OrfWithCodons(index, cc, count++, region, orfCodons,
                        stop.containsKey(lastCodon));
                orfs.add(orf);
            }

            //            maxPathScore = Math.max(maxPathScore,totalActivity);
        }

        if (orfs.size() > 1) {

            // they are not necessarily connected!

            LinkedList<OrfWithCodons>[] connected = findConnectedOrfs(orfs);
            orfs.clear();

            for (LinkedList<OrfWithCodons> corfs : connected) {
                for (boolean changed = true; changed && corfs.size() > 1;) {
                    changed = false;

                    if (useEM)
                        inferOverlappingOrfActivitiesEM(corfs);
                    else
                        overlapUniqueCoverage(corfs);

                    Iterator<OrfWithCodons> it = corfs.iterator();
                    while (it.hasNext()) {
                        OrfWithCodons orf = it.next();
                        if (orf.getEstimatedTotalActivity() < minOrfTotalActivity) {
                            it.remove();
                            changed = true;
                        }
                    }
                }

                if (corfs.size() > 1)
                    distributeCodons(corfs);
                orfs.addAll(corfs);
            }

        }

        re.addAll(orfs);

        cc++;
    }

    return re;

}

From source file:com.ikanow.aleph2.enrichment.utils.services.TestJsScriptEngineService.java

public void test_end2end(final String js_name) throws IOException {
    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final String user_script = Resources.toString(Resources.getResource(js_name), Charsets.UTF_8);

    final JsScriptEngineService service_under_test = new JsScriptEngineService();

    final DataBucketBean bucket = Mockito.mock(DataBucketBean.class);
    //final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class);

    final LinkedList<ObjectNode> emitted = new LinkedList<>();
    final LinkedList<JsonNode> grouped = new LinkedList<>();
    final LinkedList<JsonNode> externally_emitted = new LinkedList<>();

    final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class, new Answer<Void>() {
        @SuppressWarnings("unchecked")
        public Void answer(InvocationOnMock invocation) {
            try {
                Object[] args = invocation.getArguments();
                assertTrue("Unexpected call to context object during test: " + invocation.getMethod().getName(),
                        invocation.getMethod().getName().equals("emitMutableObject")
                                || invocation.getMethod().getName().equals("externalEmit")
                                || invocation.getMethod().getName().equals("getLogger"));
                if (invocation.getMethod().getName().equals("emitMutableObject")) {
                    final Optional<JsonNode> grouping = (Optional<JsonNode>) args[3];
                    if (grouping.isPresent()) {
                        grouped.add(grouping.get());
                    }/*from w  ww.j  a  v a  2s.  c o m*/
                    emitted.add((ObjectNode) args[1]);
                } else if (invocation.getMethod().getName().equals("externalEmit")) {
                    final DataBucketBean to = (DataBucketBean) args[0];
                    final Either<JsonNode, Map<String, Object>> out = (Either<JsonNode, Map<String, Object>>) args[1];
                    externally_emitted.add(((ObjectNode) out.left().value()).put("bucket", to.full_name()));
                }
            } catch (Exception e) {
                fail(e.getMessage());
            }
            return null;
        }
    });

    final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class)
            .with(EnrichmentControlMetadataBean::config,
                    new LinkedHashMap<String, Object>(ImmutableMap.<String, Object>builder()
                            .put("script", user_script)
                            .put("config", ImmutableMap.<String, Object>builder().put("test", "config").build())
                            .put("imports", Arrays.asList("underscore-min.js")).build()))
            .done().get();

    service_under_test.onStageInitialize(context, bucket, control,
            Tuples._2T(ProcessingStage.batch, ProcessingStage.grouping),
            Optional.of(Arrays.asList("test1", "test2")));

    final List<Tuple2<Long, IBatchRecord>> batch = Arrays
            .asList(new BatchRecord(mapper.readTree("{\"test\":\"1\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"2\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"3\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"4\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"5\"}")))
            .stream().<Tuple2<Long, IBatchRecord>>map(br -> Tuples._2T(0L, br)).collect(Collectors.toList());

    service_under_test.onObjectBatch(batch.stream(), Optional.of(5),
            Optional.of(mapper.readTree("{\"key\":\"static\"}")));
    assertEquals(20, emitted.size());
    emitted.stream().forEach(on -> {
        if (on.has("len"))
            assertEquals(5, on.get("len").asInt());
        else if (on.has("grouping_key"))
            assertEquals("{\"key\":\"static\"}", on.get("grouping_key").toString());
        else if (on.has("prev")) {
            assertEquals("batch", on.get("prev").asText());
            assertEquals("grouping", on.get("next").asText());
            assertEquals("{\"test\":\"config\"}", on.get("config").toString());
            assertEquals(2, on.get("groups").size());
            //DEBUG
            //System.out.println(on.toString());
        } else {
            fail("missing field" + on.toString());
        }
    });

    assertEquals(5, grouped.size());
    assertTrue(grouped.stream().map(j -> j.toString()).allMatch(s -> s.equals("{\"key\":\"static\"}")));
    assertEquals(5, externally_emitted.size());

    // Finally, check cloning

    final IEnrichmentBatchModule service_under_test_2 = service_under_test.cloneForNewGrouping();

    final List<Tuple2<Long, IBatchRecord>> batch2 = Arrays
            .asList(new BatchRecord(mapper.readTree("{\"test\":\"1\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"2\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"3\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"4\"}")),
                    new BatchRecord(mapper.readTree("{\"test\":\"5\"}")))
            .stream().<Tuple2<Long, IBatchRecord>>map(br -> Tuples._2T(0L, br)).collect(Collectors.toList());

    emitted.clear();
    assertEquals(0, emitted.size());
    service_under_test_2.onObjectBatch(batch2.stream(), Optional.empty(), Optional.empty());
    assertEquals(20, emitted.size());
    emitted.stream().forEach(on -> {
        //DEBUG
        //System.out.println(on.toString());

        assertFalse("Wrong format: " + on.toString(), on.has("len"));
        assertFalse("Wrong format: " + on.toString(), on.has("grouping_key"));
        if (on.has("prev")) {
            assertEquals("batch", on.get("prev").asText());
            assertEquals("grouping", on.get("next").asText());
            assertEquals("{\"test\":\"config\"}", on.get("config").toString());
            assertEquals(2, on.get("groups").size());
        }
    });

}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

public Map<InheritanceProject, Relationship> getRelationships() {
    Object obj = onInheritChangeBuffer.get(this, "getRelationships");
    if (obj != null && obj instanceof Map) {
        return (Map) obj;
    }//from www .j a  va2  s  .co m

    //Creating the returned map and pre-filling it with empty lists
    Map<InheritanceProject, Relationship> map = new HashMap<InheritanceProject, Relationship>();

    //Preparing the set of projects that were already explored
    HashSet<String> seenProjects = new HashSet<String>();

    //Fetching the map of all projects and their connections
    Map<String, ProjectGraphNode> connGraph = getConnectionGraph();

    //Fetching the node for the current (this) project
    ProjectGraphNode node = connGraph.get(this.getName());
    if (node == null) {
        return map;
    }

    //Mates can be filled quite easily
    for (String mate : node.mates) {
        InheritanceProject p = InheritanceProject.getProjectByName(mate);
        ProjectGraphNode mateNode = connGraph.get(mate);
        boolean isLeaf = (mateNode == null) ? true : mateNode.children.isEmpty();
        if (p == null) {
            continue;
        }
        //Checking if we've seen this mate already
        if (!seenProjects.contains(p.getName())) {
            map.put(p, new Relationship(Relationship.Type.MATE, 0, isLeaf));
            seenProjects.add(p.getName());
        }
    }

    //Exploring parents
    int distance = 1;
    seenProjects.clear();
    LinkedList<InheritanceProject> cOpen = new LinkedList<InheritanceProject>();
    LinkedList<InheritanceProject> nOpen = new LinkedList<InheritanceProject>();
    cOpen.add(this);
    while (!cOpen.isEmpty()) {
        InheritanceProject ip = cOpen.pop();
        if (ip == null || seenProjects.contains(ip.getName())) {
            continue;
        }
        seenProjects.add(ip.getName());

        node = connGraph.get(ip.getName());
        if (ip == null || node == null) {
            continue;
        }
        //Adding all parents
        for (String parent : node.parents) {
            InheritanceProject par = InheritanceProject.getProjectByName(parent);
            if (par == null || seenProjects.contains(parent)) {
                continue;
            }
            map.put(par, new Relationship(Relationship.Type.PARENT, distance, false));
            nOpen.push(par);
        }
        if (cOpen.isEmpty() && !nOpen.isEmpty()) {
            cOpen = nOpen;
            nOpen = new LinkedList<InheritanceProject>();
            distance++;
        }
    }

    //Exploring children
    distance = 1;
    seenProjects.clear();
    cOpen.clear();
    nOpen.clear();
    cOpen.add(this);
    while (!cOpen.isEmpty()) {
        InheritanceProject ip = cOpen.pop();
        if (ip == null || seenProjects.contains(ip.getName())) {
            continue;
        }
        seenProjects.add(ip.getName());

        node = connGraph.get(ip.getName());
        if (ip == null || node == null) {
            continue;
        }
        //Adding all parents
        for (String child : node.children) {
            InheritanceProject cProj = InheritanceProject.getProjectByName(child);
            if (cProj == null || seenProjects.contains(child)) {
                continue;
            }
            ProjectGraphNode childNode = connGraph.get(child);
            boolean isLeaf = (childNode == null) ? true : childNode.children.isEmpty();
            map.put(cProj, new Relationship(Relationship.Type.CHILD, distance, isLeaf));
            nOpen.push(cProj);
        }
        if (cOpen.isEmpty() && !nOpen.isEmpty()) {
            cOpen = nOpen;
            nOpen = new LinkedList<InheritanceProject>();
            distance++;
        }
    }

    onInheritChangeBuffer.set(this, "getRelationships", map);
    return map;
}

From source file:org.pdfsam.plugin.coverfooter.listeners.RunButtonActionListener.java

public void actionPerformed(ActionEvent e) {

    if (WorkExecutor.getInstance().getRunningThreads() > 0 || panel.getSelectionPanel().isAdding()) {
        DialogUtility.showWarningAddingDocument(panel);
        return;/*from w w  w.  jav  a 2 s  .  c  o m*/
    }
    PdfSelectionTableItem[] items = panel.getSelectionPanel().getTableRows();
    if (ArrayUtils.isEmpty(items)) {
        DialogUtility.showWarningNoDocsSelected(panel, DialogUtility.AT_LEAST_ONE_DOC);
        return;
    }

    LinkedList<String> args = new LinkedList<String>();
    LinkedList<String> args1 = new LinkedList<String>();
    LinkedList<String> argsFooter = new LinkedList<String>();
    // validation and permission check are demanded
    try {
        if (panel.getOutputCompressedCheck().isSelected()) {
            args.add("-" + ConcatParsedCommand.COMPRESSED_ARG);
        }
        if (panel.getMergeTypeCheck().isSelected()) {
            args.add("-" + ConcatParsedCommand.COPYFIELDS_ARG);
        }

        args.add("-" + ConcatParsedCommand.PDFVERSION_ARG);
        args.add(((StringItem) panel.getVersionCombo().getSelectedItem()).getId());

        PdfSelectionTableItem[] coveritems = panel.getCoverSelectionPanel().getTableRows();
        PdfSelectionTableItem[] footeritems = panel.getFooterSelectionPanel().getTableRows();
        String coverSelectionString = "";
        // manage cover
        if ((coveritems == null || coveritems.length != 1)
                && (footeritems == null || footeritems.length != 1)) {
            JOptionPane.showMessageDialog(panel,
                    GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(),
                            "Select at least one cover or one footer"),
                    GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(), "Warning"),
                    JOptionPane.WARNING_MESSAGE);
        } else {

            // overwrite confirmation
            if (panel.getOverwriteCheckbox().isSelected()
                    && Configuration.getInstance().isAskOverwriteConfirmation()) {
                int dialogRet = DialogUtility.askForOverwriteConfirmation(panel);
                if (JOptionPane.NO_OPTION == dialogRet) {
                    panel.getOverwriteCheckbox().setSelected(false);
                } else if (JOptionPane.CANCEL_OPTION == dialogRet) {
                    return;
                }
            }
            if (panel.getOverwriteCheckbox().isSelected()) {
                args.add("-" + ConcatParsedCommand.OVERWRITE_ARG);
            }

            if ((coveritems != null && coveritems.length == 1)) {
                PdfSelectionTableItem coveritem = coveritems[0];
                String coverSelection = (coveritem.getPageSelection() != null
                        && coveritem.getPageSelection().length() > 0) ? coveritem.getPageSelection()
                                : CoverFooterMainGUI.ALL_STRING;
                if (coverSelection.trim().length() > 0 && coverSelection.indexOf(",") != 0) {
                    String[] selectionsArray = coverSelection.split(",");
                    for (int j = 0; j < selectionsArray.length; j++) {
                        String tmpString = selectionsArray[j].trim();
                        if ((tmpString != null) && (!tmpString.equals(""))) {
                            args.add("-" + ConcatParsedCommand.F_ARG);
                            String f = coveritem.getInputFile().getAbsolutePath();
                            if ((coveritem.getPassword()) != null && (coveritem.getPassword()).length() > 0) {
                                log.debug(GettextResource.gettext(
                                        Configuration.getInstance().getI18nResourceBundle(),
                                        "Found a password for input file."));
                                f += ":" + coveritem.getPassword();
                            }
                            args.add(f);
                            coverSelectionString += (tmpString.matches("[\\d]+"))
                                    ? tmpString + "-" + tmpString + ":"
                                    : tmpString + ":";
                        }
                    }

                } else {
                    args.add("-" + ConcatParsedCommand.F_ARG);
                    String f = coveritem.getInputFile().getAbsolutePath();
                    if ((coveritem.getPassword()) != null && (coveritem.getPassword()).length() > 0) {
                        log.debug(GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(),
                                "Found a password for input file."));
                        f += ":" + coveritem.getPassword();
                    }
                    args.add(f);
                    coverSelectionString += (coverSelection.matches("[\\d]+"))
                            ? coverSelection + "-" + coverSelection + ":"
                            : coverSelection + ":";
                }
            }
            String footerSelectionString = "";
            // manage footer
            if ((footeritems != null && footeritems.length == 1)) {
                PdfSelectionTableItem footeritem = footeritems[0];
                String footerSelection = (footeritem.getPageSelection() != null
                        && footeritem.getPageSelection().length() > 0) ? footeritem.getPageSelection()
                                : CoverFooterMainGUI.ALL_STRING;
                if (footerSelection.trim().length() > 0 && footerSelection.indexOf(",") != 0) {
                    String[] selectionsArray = footerSelection.split(",");
                    for (int j = 0; j < selectionsArray.length; j++) {
                        String tmpString = selectionsArray[j].trim();
                        if ((tmpString != null) && (!tmpString.equals(""))) {
                            argsFooter.add("-" + ConcatParsedCommand.F_ARG);
                            String footerItem = footeritem.getInputFile().getAbsolutePath();
                            if ((footeritem.getPassword()) != null && (footeritem.getPassword()).length() > 0) {
                                log.debug(GettextResource.gettext(
                                        Configuration.getInstance().getI18nResourceBundle(),
                                        "Found a password for input file."));
                                footerItem += ":" + footeritem.getPassword();
                            }
                            argsFooter.add(footerItem);
                            footerSelectionString += (tmpString.matches("[\\d]+"))
                                    ? tmpString + "-" + tmpString + ":"
                                    : tmpString + ":";
                        }
                    }

                } else {
                    argsFooter.add("-" + ConcatParsedCommand.F_ARG);
                    String footerItem = footeritem.getInputFile().getAbsolutePath();
                    if ((footeritem.getPassword()) != null && (footeritem.getPassword()).length() > 0) {
                        log.debug(GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(),
                                "Found a password for input file."));
                        footerItem += ":" + footeritem.getPassword();
                    }
                    argsFooter.add(footerItem);
                    footerSelectionString += (footerSelection.matches("[\\d]+"))
                            ? footerSelection + "-" + footerSelection + ":"
                            : footerSelection + ":";
                }
            }
            // selection page
            PdfSelectionTableItem item = null;
            for (int i = 0; i < items.length; i++) {
                String pageSelectionString = coverSelectionString;
                try {
                    args1.clear();
                    args1.addAll(args);

                    item = items[i];
                    String pageSelection = (item.getPageSelection() != null
                            && item.getPageSelection().length() > 0) ? item.getPageSelection()
                                    : CoverFooterMainGUI.ALL_STRING;
                    if (pageSelection.trim().length() > 0 && pageSelection.indexOf(",") != 0) {
                        String[] selectionsArray = pageSelection.split(",");
                        for (int j = 0; j < selectionsArray.length; j++) {
                            String tmpString = selectionsArray[j].trim();
                            if ((tmpString != null) && (!tmpString.equals(""))) {
                                args1.add("-" + ConcatParsedCommand.F_ARG);
                                String f = item.getInputFile().getAbsolutePath();
                                if ((item.getPassword()) != null && (item.getPassword()).length() > 0) {
                                    log.debug(GettextResource.gettext(
                                            Configuration.getInstance().getI18nResourceBundle(),
                                            "Found a password for input file."));
                                    f += ":" + item.getPassword();
                                }
                                args1.add(f);
                                pageSelectionString += (tmpString.matches("[\\d]+"))
                                        ? tmpString + "-" + tmpString + ":"
                                        : tmpString + ":";
                            }
                        }

                    } else {
                        args1.add("-" + ConcatParsedCommand.F_ARG);
                        String f = item.getInputFile().getAbsolutePath();
                        if ((item.getPassword()) != null && (item.getPassword()).length() > 0) {
                            log.debug(
                                    GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(),
                                            "Found a password for input file."));
                            f += ":" + item.getPassword();
                        }
                        args1.add(f);
                        pageSelectionString += (pageSelection.matches("[\\d]+"))
                                ? pageSelection + "-" + pageSelection + ":"
                                : pageSelection + ":";
                    }

                    args1.addAll(argsFooter);
                    args1.add("-" + ConcatParsedCommand.U_ARG);
                    args1.add(pageSelectionString + footerSelectionString);

                    // manage output destination option
                    args1.add("-" + ConcatParsedCommand.O_ARG);
                    if (StringUtils.isEmpty(panel.getDestinationTextField().getText())) {
                        String suggestedDir = getSuggestedDestinationDirectory(items[items.length - 1]);
                        int chosenOpt = DialogUtility.showConfirmOuputLocationDialog(panel, suggestedDir);
                        if (JOptionPane.YES_OPTION == chosenOpt) {
                            panel.getDestinationTextField().setText(suggestedDir);
                        } else if (JOptionPane.CANCEL_OPTION == chosenOpt) {
                            return;
                        }
                    }
                    if (panel.getDestinationTextField().getText().length() > 0) {
                        args1.add(panel.getDestinationTextField().getText() + File.separator
                                + item.getInputFile().getName());
                    }

                    args1.add(AbstractParsedCommand.COMMAND_CONCAT);

                    WorkExecutor.getInstance().execute(new WorkThread(args1.toArray(new String[args1.size()])));
                } catch (Exception ex) {
                    log.error(GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(),
                            "Error: "), ex);
                }
            }
        }
    } catch (Exception ex) {
        log.error(GettextResource.gettext(Configuration.getInstance().getI18nResourceBundle(), "Error: "), ex);
        SoundPlayer.getInstance().playErrorSound();
    }

}

From source file:com.peterbochs.PeterBochsDebugger.java

private void jSaveBreakpointButtonActionPerformed(ActionEvent evt) {
    jSaveBreakpointButton.setEnabled(false);
    LinkedList<Breakpoint> v = Setting.getInstance().getBreakpoint();
    v.clear();

    for (int x = 0; x < this.breakpointTable.getRowCount(); x++) {
        Breakpoint h = new Breakpoint();
        h.setNo(x);/*from   ww  w  .  j a  v a 2s. c  o m*/
        h.setType(this.breakpointTable.getValueAt(x, 0).toString());
        h.setEnable(this.breakpointTable.getValueAt(x, 1).toString());
        h.setAddress(this.breakpointTable.getValueAt(x, 2).toString());
        h.setHit(Integer.parseInt(this.breakpointTable.getValueAt(x, 3).toString()));
        v.add(h);
    }
    Setting.getInstance().save();
    jSaveBreakpointButton.setEnabled(true);
}

From source file:org.commoncrawl.service.crawler.CrawlLog.java

private void flushLog(final FlushCompletionCallback completionCallback) {
    if (Environment.detailLogEnabled())
        LOG.info("LOG_FLUSH:Collecting Entries....");
    // set flush in progress indicator ...
    setFlushInProgress(true);//from  ww w.ja  v  a 2 s  . c o m
    // and collect buffers in async thread context (thus not requiring
    // synchronization)
    final LinkedList<CrawlSegmentLog.LogItemBuffer> collector = new LinkedList<CrawlSegmentLog.LogItemBuffer>();
    // flush robots log
    _robotsSegment.flushLog(collector);
    // walk segments collecting log items ....
    for (CrawlSegmentLog logger : _loggers.values()) {
        // flush any log items into the collector
        logger.flushLog(collector);
    }
    if (Environment.detailLogEnabled())
        LOG.info("LOG_FLUSH:Collection Returned " + collector.size() + " Buffers");

    // walk collector list identifying the list of unique segment ids
    final Set<Long> packedSegmentIdSet = new HashSet<Long>();

    int urlItemCount = 0;

    for (CrawlSegmentLog.LogItemBuffer buffer : collector) {
        if (buffer.getListId() != -1 && buffer.getSegmentId() != -1) {
            packedSegmentIdSet.add(makeSegmentLogId(buffer.getListId(), buffer.getSegmentId()));
        }
        urlItemCount += buffer.getItemCount();
    }

    if (Environment.detailLogEnabled())
        LOG.info("LOG_FLUSH:There are  " + urlItemCount + " Items in Flush Buffer Associated With "
                + packedSegmentIdSet.size() + " Segments");

    final File crawlLogFile = getActivePath(_rootDirectory);

    // now check to see if there is anything to do ...
    if (collector.size() != 0) {
        if (Environment.detailLogEnabled())
            LOG.info("LOG_FLUSH: Collector Size is NOT Zero... Starting Log Flusher Thread");
        // ok ... time to spawn a thread to do the blocking flush io
        _threadPool.submit(new ConcurrentTask<Boolean>(_eventLoop,

                new Callable<Boolean>() {

                    public Boolean call() throws Exception {

                        if (Environment.detailLogEnabled())
                            LOG.info("LOG_FLUSH: Log Flusher Thread Started");
                        long startTime = System.currentTimeMillis();

                        Map<Long, DataOutputStream> streamsMapByPackedId = new HashMap<Long, DataOutputStream>();
                        Map<Long, Integer> recordCountsByPackedId = new HashMap<Long, Integer>();

                        long crawlLogRecordCount = 0;

                        // open the actual crawler log file ...
                        final DataOutputStream crawlLogStream = new DataOutputStream(
                                new FileOutputStream(crawlLogFile, true));

                        try {
                            if (Environment.detailLogEnabled())
                                LOG.info(
                                        "LOG_FLUSH: Log Flusher Thread Opening Streams for Segments in Buffer");
                            // now open a set of file descriptors related to the identified
                            // segments
                            for (long packedSegmentId : packedSegmentIdSet) {
                                // construct the unique filename for the given log file...
                                File activeSegmentLog = CrawlSegmentLog.buildActivePath(_rootDirectory,
                                        getListIdFromLogId(packedSegmentId),
                                        getSegmentIdFromLogId(packedSegmentId));
                                // initialize the segment log ...
                                CrawlSegmentLog.initializeLogFile(activeSegmentLog);
                                // initialize record counts per stream ...
                                recordCountsByPackedId.put(packedSegmentId,
                                        CrawlSegmentLog.readerHeader(activeSegmentLog));
                                // and open an output stream for the specified log file ...
                                streamsMapByPackedId.put(packedSegmentId,
                                        new DataOutputStream(new FileOutputStream(activeSegmentLog, true)));
                            }

                            if (Environment.detailLogEnabled())
                                LOG.info("LOG_FLUSH: Log Flusher Thread Walking Items in Buffer");

                            // initialize a total item count variable
                            int totalItemCount = 0;

                            // crawl history stream
                            DataOutputBuffer historyStream = new DataOutputBuffer();

                            // and now walk log buffers ...
                            for (CrawlSegmentLog.LogItemBuffer buffer : collector) {
                                if (Environment.detailLogEnabled())
                                    LOG.info("LOG_FLUSH: Log Flusher Thread Writing " + buffer.getItemCount()
                                            + " Entries for Segment:" + buffer.getSegmentId());

                                // output stream
                                DataOutputStream segmentLogStream = null;

                                if (buffer.getListId() != -1 && buffer.getSegmentId() != -1) {
                                    // update segment count first ...
                                    recordCountsByPackedId.put(
                                            makeSegmentLogId(buffer.getListId(), buffer.getSegmentId()),
                                            recordCountsByPackedId.get(
                                                    makeSegmentLogId(buffer.getListId(), buffer.getSegmentId()))
                                                    + buffer.getItemCount());
                                    // get output stream associated with segment id
                                    segmentLogStream = streamsMapByPackedId
                                            .get(makeSegmentLogId(buffer.getListId(), buffer.getSegmentId()));
                                }

                                // and our local record counter ...
                                crawlLogRecordCount += buffer.getItemCount();

                                // and next do the actual disk flush ...
                                totalItemCount += buffer.flushToDisk(totalItemCount,

                                        new CrawlSegmentLog.LogItemBuffer.CrawlURLWriter() {

                                            SyncedCrawlURLLogWriter syncedLogWriter = new SyncedCrawlURLLogWriter();

                                            public void writeItem(CrawlURL url) throws IOException {
                                                // log it
                                                logCrawlLogWrite(url, url.getContentSize());
                                                // write it
                                                syncedLogWriter.writeItem(crawlLogStream, url);
                                            }

                                            public void writeItemCount(int entryCount) throws IOException {
                                            }

                                        }, segmentLogStream, historyStream);
                            }

                            if (Environment.detailLogEnabled())
                                LOG.info("LOG_FLUSH: Log Flusher Finished Writing Entries To Disk");
                            collector.clear();

                        } catch (IOException e) {
                            LOG.error("Critical Exception during Crawl Log Flush:"
                                    + CCStringUtils.stringifyException(e));
                            throw e;
                        } finally {
                            if (crawlLogStream != null) {
                                crawlLogStream.flush();
                                crawlLogStream.close();
                            }

                            for (DataOutputStream stream : streamsMapByPackedId.values()) {
                                if (stream != null)
                                    stream.flush();
                                stream.close();
                            }
                        }
                        // at this point... update the crawl log header ...
                        try {
                            if (Environment.detailLogEnabled())
                                LOG.info("LOG_FLUSH: Updating Log File Headers");
                            // update the log file header
                            updateLogFileHeader(crawlLogFile, _header, crawlLogRecordCount);
                            // and update each completion log header ...
                            for (long packedSegmentId : recordCountsByPackedId.keySet()) {
                                File activeSegmentLogPath = CrawlSegmentLog.buildActivePath(_rootDirectory,
                                        getListIdFromLogId(packedSegmentId),
                                        getSegmentIdFromLogId(packedSegmentId));
                                CrawlSegmentLog.writeHeader(activeSegmentLogPath,
                                        recordCountsByPackedId.get(packedSegmentId));
                            }
                        } catch (IOException e) {
                            LOG.error("Criticial Exception during Crawl Log Fluhs:"
                                    + CCStringUtils.stringifyException(e));
                            throw e;
                        } finally {

                        }

                        long endTime = System.currentTimeMillis();

                        _flushTimeAVG.addSample((double) endTime - startTime);
                        _flushTimeSmoothed.addSample((double) endTime - startTime);
                        _lastFlushTime = endTime - startTime;

                        LOG.info("LOG_FLUSH: Log Flusher Flushed Successfully");
                        return true;
                    }
                },

                new CompletionCallback<Boolean>() {

                    public void taskComplete(Boolean updateResult) {
                        setFlushInProgress(false);
                        if (completionCallback != null) {
                            completionCallback.flushComplete();
                        }
                    }

                    public void taskFailed(Exception e) {

                        setFlushInProgress(false);

                        if (completionCallback != null) {
                            completionCallback.flushFailed(e);
                        }

                        // all failures are critical in this particular task ...
                        LOG.fatal("Crawl Log FLUSH Threw Exception:" + CCStringUtils.stringifyException(e));

                        // no matter ... it is time to CORE the server ...
                        throw new RuntimeException("CRITICAL FAILURE: Crawl Log FLUSH Threw Exception:"
                                + CCStringUtils.stringifyException(e));

                    }
                }));
    } else {
        setFlushInProgress(false);
        if (completionCallback != null) {
            completionCallback.flushComplete();
        }
    }
}

From source file:com.primovision.lutransport.service.ImportMainSheetServiceImpl.java

@Override
public List<LinkedList<Object>> importVendorSpecificFuelLog(InputStream is,
        LinkedHashMap<String, String> vendorSpecificColumns, Long vendor,
        HashMap<String, Object> additionalVendorData) throws Exception {
    List<LinkedList<Object>> data = new ArrayList<LinkedList<Object>>();
    try {/*from   w w w .ja va2 s  . com*/
        POIFSFileSystem fs = new POIFSFileSystem(is);

        HSSFWorkbook wb = new HSSFWorkbook(fs);
        Sheet sheet = wb.getSheetAt(0);
        Row titleRow = sheet.getRow(sheet.getFirstRowNum());

        LinkedHashMap<String, Integer> orderedColIndexes = getOrderedColumnIndexes(titleRow,
                vendorSpecificColumns);
        Set<Entry<String, Integer>> keySet = orderedColIndexes.entrySet();

        System.out.println("Physical number of rows in Excel = " + sheet.getPhysicalNumberOfRows());
        System.out.println("While reading values from vendor specific Excel Sheet: ");

        Map criterias = new HashMap();
        criterias.put("id", vendor);
        FuelVendor fuelVendor = genericDAO.findByCriteria(FuelVendor.class, criterias, "name", false).get(0);

        boolean stopParsing = false;
        for (int i = titleRow.getRowNum() + 1; !stopParsing && i <= sheet.getPhysicalNumberOfRows() - 1; i++) {
            LinkedList<Object> rowObjects = new LinkedList<Object>();

            rowObjects.add(fuelVendor.getName());
            rowObjects.add(fuelVendor.getCompany().getName());

            Row row = sheet.getRow(i);

            Iterator<Entry<String, Integer>> iterator = keySet.iterator();
            while (iterator.hasNext()) {
                Entry<String, Integer> entry = iterator.next();

                // corresponding column not found in actual column list, find in additionalVendorData
                if (entry.getValue() == -1) {
                    System.out.println("Additional vendor data = " + additionalVendorData);
                    System.out.println("Column " + entry.getKey()
                            + " not found in Vendor Excel, checking in additionalVendorData");
                    Object cellValueObj = additionalVendorData.get(entry.getKey());
                    if (cellValueObj != null) {
                        rowObjects.add(cellValueObj);
                    } else {
                        rowObjects.add(StringUtils.EMPTY);
                    }
                    continue;
                }

                Object cellValueObj = getCellValue((HSSFCell) row.getCell(entry.getValue()), true);
                if (cellValueObj != null && cellValueObj.toString().equalsIgnoreCase("END_OF_DATA")) {
                    System.out.println("Received END_OF_DATA");
                    stopParsing = true;
                    rowObjects.clear();
                    break;
                }
                rowObjects.add(cellValueObj);
            }

            if (!stopParsing) {
                data.add(rowObjects);
            }
        }

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();

    }
    return data;
}