Example usage for java.util LinkedList size

List of usage examples for java.util LinkedList size

Introduction

In this page you can find the example usage for java.util LinkedList size.

Prototype

int size

To view the source code for java.util LinkedList size.

Click Source Link

Usage

From source file:com.google.cloud.dns.testing.LocalDnsHelper.java

/**
 * Lists record sets for a zone. Next page token is the ID of the last record listed.
 *///from   w ww . j  ava 2 s.c  o m
@VisibleForTesting
Response listDnsRecords(String projectId, String zoneName, String query) {
    Map<String, Object> options = OptionParsers.parseListDnsRecordsOptions(query);
    Response response = checkListOptions(options);
    if (response != null) {
        return response;
    }
    ZoneContainer zoneContainer = findZone(projectId, zoneName);
    if (zoneContainer == null) {
        return Error.NOT_FOUND.response(
                String.format("The 'parameters.managedZone' resource named '%s' does not exist.", zoneName));
    }
    ImmutableSortedMap<String, ResourceRecordSet> dnsRecords = zoneContainer.dnsRecords().get();
    String[] fields = (String[]) options.get("fields");
    String name = (String) options.get("name");
    String type = (String) options.get("type");
    String pageToken = (String) options.get("pageToken");
    ImmutableSortedMap<String, ResourceRecordSet> fragment = pageToken != null
            ? dnsRecords.tailMap(pageToken, false)
            : dnsRecords;
    Integer maxResults = options.get("maxResults") == null ? null
            : Integer.valueOf((String) options.get("maxResults"));
    boolean sizeReached = false;
    boolean hasMorePages = false;
    LinkedList<String> serializedRrsets = new LinkedList<>();
    String lastRecordId = null;
    for (String recordSetId : fragment.keySet()) {
        ResourceRecordSet recordSet = fragment.get(recordSetId);
        if (matchesCriteria(recordSet, name, type)) {
            if (sizeReached) {
                // we do not add this, just note that there would be more and there should be a token
                hasMorePages = true;
                break;
            } else {
                lastRecordId = recordSetId;
                try {
                    serializedRrsets
                            .addLast(jsonFactory.toString(OptionParsers.extractFields(recordSet, fields)));
                } catch (IOException e) {
                    return Error.INTERNAL_ERROR.response(String.format(
                            "Error when serializing resource record set in managed zone %s in project %s",
                            zoneName, projectId));
                }
            }
        }
        sizeReached = maxResults != null && maxResults.equals(serializedRrsets.size());
    }
    boolean includePageToken = hasMorePages
            && (fields == null || Arrays.asList(fields).contains("nextPageToken"));
    return toListResponse(serializedRrsets, "rrsets", lastRecordId, includePageToken);
}

From source file:at.ac.tuwien.inso.subcat.ui.widgets.TrendView.java

public void addConfiguration(TrendChartConfigData config) {
    assert (config != null);

    LinkedList<Combo> combos = new LinkedList<Combo>();

    // Title Row:
    Label lblGrpTitle = new Label(optionComposite, SWT.NONE);
    lblGrpTitle.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 2, 1));

    Helper.setLabelStyle(lblGrpTitle, SWT.BOLD);
    lblGrpTitle.setText(config.getName());

    Composite topOptions = new Composite(optionComposite, SWT.NONE);
    topOptions.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1));
    topOptions.setLayout(new GridLayout(config.getDropDowns().size(), true));

    for (DropDownData dropData : config.getDropDowns()) {
        Combo comboDropDown = new Combo(topOptions, SWT.DROP_DOWN | SWT.BORDER | SWT.READ_ONLY);
        comboDropDown.setLayoutData(new GridData(SWT.CENTER, SWT.CENTER, true, false, 1, 1));
        comboDropDown.setData(dropData);
        combos.add(comboDropDown);//from   ww w  .  j  ava  2s.  c  o m

        for (DropDownData.Pair data : dropData.getData()) {
            comboDropDown.add(data.name);
        }

        comboDropDown.select(0);

        comboDropDown.addSelectionListener(this.comboListener);
    }

    // Separator:
    Helper.separator(optionComposite, 3);

    // Left Option Labels:
    new Label(optionComposite, SWT.NONE);

    Composite leftOptions = new Composite(optionComposite, SWT.NONE);
    leftOptions.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1));

    leftOptions.setLayout(new GridLayout(1, true));
    for (OptionListConfigData.Pair pair : config.getOptionList().getData()) {
        Label lblOpt = new Label(leftOptions, SWT.NONE);
        lblOpt.setText(pair.name);
    }

    // Check Boxes:
    Composite selectionComposite = new Composite(optionComposite, SWT.NONE);
    selectionComposite.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1));
    selectionComposite.setLayout(new GridLayout(combos.size(), true));

    OptionListConfig leftConfig = config.getOptionList().getConfig();
    int x = 0;

    for (Combo combo : combos) {
        TrendChartPlotConfig topConfig = (TrendChartPlotConfig) config.getDropDowns().get(x).getConfig();

        for (OptionListConfigData.Pair pair : config.getOptionList().getData()) {
            Button button = new Button(selectionComposite, SWT.CHECK);
            button.setLayoutData(new GridData(SWT.CENTER, SWT.CENTER, true, false, 1, 1));
            button.setData(new ChartIdentifier(topConfig, combo, leftConfig, pair.id, boxWeight++));
            button.addSelectionListener(boxListener);
        }

        x++;
    }

    // Scrolling area size update:
    scrolledComposite.setMinSize(optionComposite.computeSize(SWT.DEFAULT, SWT.DEFAULT));
}

From source file:eu.stratosphere.nephele.multicast.MulticastManager.java

/**
 * Returns a list of (physical) Nodes (=hosts) within the multicast tree. Each node contains the local ChannelIDs,
 * records// w w w . j  a  v  a2  s .c  o m
 * must be forwarded to. The first node in the List is the only multicast sender.
 * 
 * @param sourceChannelID
 * @return
 */
private LinkedList<TreeNode> extractTreeNodes(final InstanceConnectionInfo source, final JobID jobID,
        final ChannelID sourceChannelID, final boolean randomize) {

    final ExecutionGraph eg = this.scheduler.getExecutionGraphByID(jobID);

    final ExecutionEdge outputChannel = eg.getEdgeByID(sourceChannelID);

    final ExecutionGate broadcastGate = outputChannel.getOutputGate();

    final LinkedList<ExecutionEdge> outputChannels = new LinkedList<ExecutionEdge>();

    // Get all broadcast output channels
    final int numberOfOutputChannels = broadcastGate.getNumberOfEdges();
    for (int i = 0; i < numberOfOutputChannels; ++i) {
        final ExecutionEdge c = broadcastGate.getEdge(i);

        if (c.isBroadcast()) {
            outputChannels.add(c);
        }
    }

    final LinkedList<TreeNode> treeNodes = new LinkedList<TreeNode>();

    LinkedList<ChannelID> actualLocalTargets = new LinkedList<ChannelID>();

    int firstConnectionID = 0;
    // search for local targets for the tree node
    for (Iterator<ExecutionEdge> iter = outputChannels.iterator(); iter.hasNext();) {

        final ExecutionEdge actualOutputChannel = iter.next();

        // the connection ID should not be needed for the root node (as it is not set as remote receiver)
        // but in order to maintain consistency, it also gets the connectionID of the first channel pointing to it
        firstConnectionID = actualOutputChannel.getConnectionID();

        final ExecutionVertex targetVertex = actualOutputChannel.getInputGate().getVertex();

        // is the target vertex running on the same instance?
        if (targetVertex.getAllocatedResource().getInstance().getInstanceConnectionInfo().equals(source)) {

            actualLocalTargets.add(actualOutputChannel.getInputChannelID());
            iter.remove();
        }

    }

    // create sender node (root) with source instance
    TreeNode actualNode = new TreeNode(
            eg.getVertexByChannelID(sourceChannelID).getAllocatedResource().getInstance(), source,
            firstConnectionID, actualLocalTargets);

    treeNodes.add(actualNode);

    // now we have the root-node.. lets extract all other nodes

    LinkedList<TreeNode> receiverNodes = new LinkedList<TreeNode>();

    while (outputChannels.size() > 0) {

        final ExecutionEdge firstChannel = outputChannels.pollFirst();

        // each receiver nodes' endpoint is associated with the connection ID
        // of the first channel pointing to this node.
        final int connectionID = firstChannel.getConnectionID();

        final ExecutionVertex firstTarget = firstChannel.getInputGate().getVertex();

        final InstanceConnectionInfo actualInstance = firstTarget.getAllocatedResource().getInstance()
                .getInstanceConnectionInfo();

        actualLocalTargets = new LinkedList<ChannelID>();

        // add first local target
        actualLocalTargets.add(firstChannel.getInputChannelID());

        // now we iterate through the remaining channels to find other local targets...
        for (Iterator<ExecutionEdge> iter = outputChannels.iterator(); iter.hasNext();) {

            final ExecutionEdge actualOutputChannel = iter.next();

            final ExecutionVertex actualTarget = actualOutputChannel.getInputGate().getVertex();

            // is the target vertex running on the same instance?
            if (actualTarget.getAllocatedResource().getInstance().getInstanceConnectionInfo()
                    .equals(actualInstance)) {
                actualLocalTargets.add(actualOutputChannel.getInputChannelID());

                iter.remove();

            }

        } // end for

        // create tree node for current instance
        actualNode = new TreeNode(firstTarget.getAllocatedResource().getInstance(), actualInstance,
                connectionID, actualLocalTargets);

        receiverNodes.add(actualNode);

    } // end while

    // Do we want to shuffle the receiver nodes?
    // Only randomize the receivers, as the sender (the first one) has to stay the same
    if (randomize) {
        Collections.shuffle(receiverNodes);
    } else {
        // Sort Tree Nodes according to host name..
        Collections.sort(receiverNodes);
    }

    treeNodes.addAll(receiverNodes);

    return treeNodes;

}

From source file:com.att.nsa.cambria.service.impl.EventsServiceImpl.java

/**
 * /* w w w.  ja  va2  s . co  m*/
 * @param ctx
 * @param inputStream
 * @param topic
 * @param partitionKey
 * @param requestTime
 * @param chunked
 * @param mediaType
 * @throws ConfigDbException
 * @throws AccessDeniedException
 * @throws TopicExistsException
 * @throws IOException
 * @throws CambriaApiException
 */
private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
        final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
        throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
        CambriaApiException {

    final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();

    // setup the event set
    final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);

    // start processing, building a batch to push to the backend
    final long startMs = System.currentTimeMillis();
    long count = 0;
    long maxEventBatch = 1024 * 16;
    String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
    if (null != evenlen)
        maxEventBatch = Long.parseLong(evenlen);
    //final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
    final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
    final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();

    Publisher.message m = null;
    int messageSequence = 1;
    Long batchId = 1L;
    final boolean transactionEnabled = true;
    int publishBatchCount = 0;
    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");

    //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
    try {
        // for each message...
        batchId = DMaaPContext.getBatchID();

        String responseTransactionId = null;

        while ((m = events.next()) != null) {

            //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));

            addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
                    transactionEnabled);
            messageSequence++;

            // add the message to the batch
            batch.add(m);

            responseTransactionId = m.getLogDetails().getTransactionId();

            JSONObject jsonObject = new JSONObject();
            jsonObject.put("message", m.getMessage());
            jsonObject.put("transactionId", responseTransactionId);
            final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
                    jsonObject.toString());
            kms.add(data);

            // check if the batch is full
            final int sizeNow = batch.size();
            if (sizeNow >= maxEventBatch) {
                String startTime = sdf.format(new Date());
                LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                        + batchId + "]");
                try {
                    ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                    //transactionLogs(batch);
                    for (message msg : batch) {
                        LogDetails logDetails = msg.getLogDetails();
                        LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                    }
                } catch (Exception excp) {

                    int status = HttpStatus.SC_NOT_FOUND;
                    String errorMsg = null;
                    if (excp instanceof CambriaApiException) {
                        status = ((CambriaApiException) excp).getStatus();
                        JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                        JSONObject errObject = new JSONObject(jsonTokener);
                        errorMsg = (String) errObject.get("message");
                    }
                    ErrorResponse errRes = new ErrorResponse(status,
                            DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                            "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                    + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                            null, Utils.getFormattedDate(new Date()), topic,
                            Utils.getUserApiKey(ctx.getRequest()), ctx.getRequest().getRemoteHost(), null,
                            null);
                    LOG.info(errRes.toString());
                    throw new CambriaApiException(errRes);
                }
                kms.clear();
                batch.clear();
                metricsSet.publishTick(sizeNow);
                publishBatchCount = sizeNow;
                count += sizeNow;
                //batchId++;
                String endTime = sdf.format(new Date());
                LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                        + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                        + ",Batch End Time=" + endTime + "]");
                batchId = DMaaPContext.getBatchID();
            }
        }

        // send the pending batch
        final int sizeNow = batch.size();
        if (sizeNow > 0) {
            String startTime = sdf.format(new Date());
            LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
                    + batchId + "]");
            try {
                ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                //transactionLogs(batch);
                for (message msg : batch) {
                    LogDetails logDetails = msg.getLogDetails();
                    LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
                }
            } catch (Exception excp) {
                int status = HttpStatus.SC_NOT_FOUND;
                String errorMsg = null;
                if (excp instanceof CambriaApiException) {
                    status = ((CambriaApiException) excp).getStatus();
                    JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
                    JSONObject errObject = new JSONObject(jsonTokener);
                    errorMsg = (String) errObject.get("message");
                }

                ErrorResponse errRes = new ErrorResponse(status,
                        DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                        "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                                + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                        null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                        ctx.getRequest().getRemoteHost(), null, null);
                LOG.info(errRes.toString());
                throw new CambriaApiException(errRes);
            }
            kms.clear();
            metricsSet.publishTick(sizeNow);
            count += sizeNow;
            //batchId++;
            String endTime = sdf.format(new Date());
            publishBatchCount = sizeNow;
            LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
                    + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
                    + ",Batch End Time=" + endTime + "]");
        }

        final long endMs = System.currentTimeMillis();
        final long totalMs = endMs - startMs;

        LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);

        if (null != responseTransactionId) {
            ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
        }

        // build a response
        final JSONObject response = new JSONObject();
        response.put("count", count);
        response.put("serverTimeMs", totalMs);
        DMaaPResponseBuilder.respondOk(ctx, response);

    } catch (Exception excp) {
        int status = HttpStatus.SC_NOT_FOUND;
        String errorMsg = null;
        if (excp instanceof CambriaApiException) {
            status = ((CambriaApiException) excp).getStatus();
            JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
            JSONObject errObject = new JSONObject(jsonTokener);
            errorMsg = (String) errObject.get("message");
        }

        ErrorResponse errRes = new ErrorResponse(status,
                DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
                        + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
                null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
                ctx.getRequest().getRemoteHost(), null, null);
        LOG.info(errRes.toString());
        throw new CambriaApiException(errRes);
    }
}

From source file:com.google.cloud.dns.testing.LocalDnsHelper.java

/**
 * Lists zones. Next page token is the last listed zone name and is returned only of there is more
 * to list and if the user does not exclude nextPageToken from field options.
 *//*  w ww .j a v a 2s.c o  m*/
@VisibleForTesting
Response listZones(String projectId, String query) {
    Map<String, Object> options = OptionParsers.parseListZonesOptions(query);
    Response response = checkListOptions(options);
    if (response != null) {
        return response;
    }
    ConcurrentSkipListMap<String, ZoneContainer> containers = findProject(projectId).zones();
    String[] fields = (String[]) options.get("fields");
    String dnsName = (String) options.get("dnsName");
    String pageToken = (String) options.get("pageToken");
    Integer maxResults = options.get("maxResults") == null ? null
            : Integer.valueOf((String) options.get("maxResults"));
    boolean sizeReached = false;
    boolean hasMorePages = false;
    LinkedList<String> serializedZones = new LinkedList<>();
    String lastZoneName = null;
    ConcurrentNavigableMap<String, ZoneContainer> fragment = pageToken != null
            ? containers.tailMap(pageToken, false)
            : containers;
    for (ZoneContainer zoneContainer : fragment.values()) {
        ManagedZone zone = zoneContainer.zone();
        if (dnsName == null || zone.getDnsName().equals(dnsName)) {
            if (sizeReached) {
                // we do not add this, just note that there would be more and there should be a token
                hasMorePages = true;
                break;
            } else {
                try {
                    lastZoneName = zone.getName();
                    serializedZones.addLast(jsonFactory.toString(OptionParsers.extractFields(zone, fields)));
                } catch (IOException e) {
                    return Error.INTERNAL_ERROR.response(String.format(
                            "Error when serializing managed zone %s in project %s", lastZoneName, projectId));
                }
            }
        }
        sizeReached = maxResults != null && maxResults.equals(serializedZones.size());
    }
    boolean includePageToken = hasMorePages
            && (fields == null || Arrays.asList(fields).contains("nextPageToken"));
    return toListResponse(serializedZones, "managedZones", lastZoneName, includePageToken);
}

From source file:org.epics.archiverappliance.retrieval.DataRetrievalServlet.java

/**
 * Parse the timeranges parameter and generate a list of TimeSpans.
 * @param resp//from  www  .  j a v a 2  s  . c  om
 * @param pvName
 * @param requestTimes - list of timespans that we add the valid times to.
 * @param timeRangesStr
 * @return
 * @throws IOException
 */
private boolean parseTimeRanges(HttpServletResponse resp, String pvName, LinkedList<TimeSpan> requestTimes,
        String timeRangesStr) throws IOException {
    String[] timeRangesStrList = timeRangesStr.split(",");
    if (timeRangesStrList.length % 2 != 0) {
        String msg = "Need to specify an even number of times in timeranges for pv " + pvName + ". We have "
                + timeRangesStrList.length + " times";
        logger.error(msg);
        resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
        resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
        return false;
    }

    LinkedList<Timestamp> timeRangesList = new LinkedList<Timestamp>();
    for (String timeRangesStrItem : timeRangesStrList) {
        try {
            Timestamp ts = TimeUtils.convertFromISO8601String(timeRangesStrItem);
            timeRangesList.add(ts);
        } catch (IllegalArgumentException ex) {
            try {
                Timestamp ts = TimeUtils.convertFromDateTimeStringWithOffset(timeRangesStrItem);
                timeRangesList.add(ts);
            } catch (IllegalArgumentException ex2) {
                String msg = "Cannot parse time " + timeRangesStrItem;
                logger.warn(msg, ex2);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return false;
            }
        }
    }

    assert (timeRangesList.size() % 2 == 0);
    Timestamp prevEnd = null;
    while (!timeRangesList.isEmpty()) {
        Timestamp t0 = timeRangesList.pop();
        Timestamp t1 = timeRangesList.pop();

        if (t1.before(t0)) {
            String msg = "For request, end " + t1.toString() + " is before start " + t0.toString() + " for pv "
                    + pvName;
            logger.error(msg);
            resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
            return false;
        }

        if (prevEnd != null) {
            if (t0.before(prevEnd)) {
                String msg = "For request, start time " + t0.toString() + " is before previous end time "
                        + prevEnd.toString() + " for pv " + pvName;
                logger.error(msg);
                resp.addHeader(MimeResponse.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
                resp.sendError(HttpServletResponse.SC_BAD_REQUEST, msg);
                return false;
            }
        }
        prevEnd = t1;
        requestTimes.add(new TimeSpan(t0, t1));
    }
    return true;
}

From source file:es.bsc.servicess.ide.editors.ImplementationFormPage.java

/**
 * Get the label of a list of service elements
 * @param selectedList selected service element
 * @return Array of element labels/*  ww w. j av a 2 s  .  c o  m*/
 */
protected String[] getElementsNames(LinkedList<ServiceElement> selectedList) {
    String[] elements = new String[selectedList.size()];
    for (int i = 0; i < selectedList.size(); i++) {
        elements[i] = selectedList.get(i).getLabel();
    }
    return elements;
}

From source file:elh.eus.absa.Features.java

/**
 *  Extract word form n-grams up to a certain length from a kaf/naf file
 * //  w ww. java 2  s.  c om
 * @param int length : which 'n' use for 'n-grams' 
 * @param KAFDocument kafDoc : postagged kaf document to extract ngrams from.
 * @param boolean save : safe ngrams to file or not. 
 * @return TreeSet<String> return word form ngrams of length length
 */
private int extractWfNgramsKAF(int length, KAFDocument kafDoc, boolean save) {
    //System.err.println("ngram extraction: _"+length+"_");
    if (length == 0) {
        return 0;
    }

    for (List<WF> sent : kafDoc.getSentences()) {
        LinkedList<String> ngrams = new LinkedList<String>();
        for (WF wf : sent) {
            if (ngrams.size() >= length) {
                ngrams.removeFirst();
            }
            ngrams.add(wf.getForm());
            //ngrams.add(normalize(wf.getForm(), params.getProperty("normalization", "none")));

            // add ngrams to the feature list
            for (int i = 0; i < ngrams.size(); i++) {
                String ng = featureFromArray(ngrams.subList(0, i + 1), "wf");
                addNgram("wf", ng);
            }
        }
        //empty ngram list and add remaining ngrams to the feature list
        while (!ngrams.isEmpty()) {
            String ng = featureFromArray(ngrams, "wf");
            addNgram("wf", ng);
            ngrams.removeFirst();
        }
    }
    return 1;
}

From source file:gedi.riboseq.inference.orf.OrfFinder.java

private void overlapUniqueCoverage(List<OrfWithCodons> orfs) {

    HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>();
    int numCond = -1;
    for (OrfWithCodons orf : orfs)
        for (Codon c : orf.getCodons()) {
            cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf);
            numCond = c.getActivity().length;
        }/*ww w .  j a  v a  2s.c om*/

    // now equivalence classes: gives you all codons that are consistent with a specific combination of orfs
    HashMap<HashSet<OrfWithCodons>, HashSet<Codon>> equi = new HashMap<HashSet<OrfWithCodons>, HashSet<Codon>>();
    for (Codon c : cod2Orf.keySet()) {
        equi.computeIfAbsent(cod2Orf.get(c), x -> new HashSet<>()).add(c);
    }

    // compute equi regions for their length
    HashMap<HashSet<OrfWithCodons>, Integer> equiLengths = new HashMap<HashSet<OrfWithCodons>, Integer>();
    for (HashSet<OrfWithCodons> e : equi.keySet()) {
        LinkedList<ArrayGenomicRegion> equiCodons = null;
        for (OrfWithCodons orf : e) {
            if (equiCodons == null) {
                equiCodons = new LinkedList<ArrayGenomicRegion>();
                for (int i = 0; i < orf.getRegion().getTotalLength(); i += 3)
                    equiCodons.add(orf.getRegion().map(new ArrayGenomicRegion(i, i + 3)));
            } else {
                Iterator<ArrayGenomicRegion> it = equiCodons.iterator();
                while (it.hasNext()) {
                    ArrayGenomicRegion cod = it.next();
                    if (!orf.getRegion().containsUnspliced(cod)
                            || orf.getRegion().induce(cod.getStart()) % 3 != 0)
                        it.remove();
                }
            }
        }
        for (OrfWithCodons orf : orfs) {
            if (!e.contains(orf)) {

                Iterator<ArrayGenomicRegion> it = equiCodons.iterator();
                while (it.hasNext()) {
                    ArrayGenomicRegion cod = it.next();
                    if (orf.getRegion().containsUnspliced(cod)
                            && orf.getRegion().induce(cod.getStart()) % 3 == 0)
                        it.remove();
                }
            }

        }
        equiLengths.put(e, equiCodons.size());
    }

    HashMap<OrfWithCodons, double[]> total = estimateByCoverage(equi, equiLengths, c -> c.getTotalActivity());
    double sum = EI.wrap(total.values()).mapToDouble(a -> a[0]).sum();
    for (OrfWithCodons orf : total.keySet())
        orf.setEstimatedTotalActivity(total.get(orf)[0], total.get(orf)[0] / sum);

    for (int i = 0; i < numCond; i++) {
        int ei = i;
        total = estimateByCoverage(equi, equiLengths, c -> c.getActivity()[ei]);
        sum = EI.wrap(total.values()).mapToDouble(a -> a[0]).sum();
        for (OrfWithCodons orf : total.keySet())
            orf.setEstimatedTotalActivity(i, total.get(orf)[0], total.get(orf)[0] / sum);
    }

}

From source file:com.quix.aia.cn.imo.mapper.UserMaintenance.java

/**
 * <p>This method retrieves all Users<p>
 * @return list of User//from   w w  w . j av  a2s . com
 */
public Pager getAllUserListing(HttpServletRequest req) {
    // TODO Auto-generated method stub
    LinkedList item = new LinkedList();
    User user = null;
    ArrayList vecAllRes = new ArrayList();

    vecAllRes = getAllUser(req);
    if (vecAllRes.size() != 0) {
        for (int i = 0; i < vecAllRes.size(); i++) {
            user = new User();
            user = (User) vecAllRes.get(i);

            if (user.getCreatedUserSscCode() > 0)
                user.setCreatedUserSscLevel(true);
            else if (user.getCreatedUserCityCode() > 0)
                user.setCreatedUserCityLevel(true);
            else if (user.getCreatedUserDistCode() > 0)
                user.setCreatedUserDistLevel(true);
            else if (user.getCreatedUserBuCode() > 0)
                user.setCreatedUserBuLevel(true);
            item.add(user.getGetResListingTableRow(i, req, user));
        }
    }
    Pager pager = new Pager();
    pager.setActualSize(item.size());
    pager.setCurrentPageNumber(0);
    pager.setMaxIndexPages(10);
    pager.setMaxPageItems(10);
    for (; item.size() % 10 != 0; item.add("<tr></tr>"))
        ;
    pager.setItems(item);
    return pager;

}