Example usage for java.util TreeMap entrySet

List of usage examples for java.util TreeMap entrySet

Introduction

In this page you can find the example usage for java.util TreeMap entrySet.

Prototype

EntrySet entrySet

To view the source code for java.util TreeMap entrySet.

Click Source Link

Document

Fields initialized to contain an instance of the entry set view the first time this view is requested.

Usage

From source file:org.commoncrawl.service.listcrawler.CrawlList.java

void loadSubDomainMetadataFromDisk() throws IOException {
    LOG.info("*** LIST:" + getListId() + " LOAD SUBDOMAIN METADATA FROM DISK ...  ");
    if (_subDomainMetadataFile.exists()) {

        LOG.info("*** LIST:" + getListId() + " FILE EXISTS LOADING SUBDOMAIN DATA FROM DISK.");

        RandomAccessFile file = new RandomAccessFile(_subDomainMetadataFile, "rw");
        DataInputBuffer inputBuffer = new DataInputBuffer();
        byte fixedDataBlock[] = new byte[CrawlListMetadata.Constants.FixedDataSize];

        try {//from   w  w  w. j  a v  a  2  s . co m
            // skip version 
            file.read();
            // read item count 
            int itemCount = file.readInt();

            LOG.info("*** LIST:" + getListId() + " SUBDOMAIN ITEM COUNT:" + itemCount);

            CrawlListMetadata newMetadata = new CrawlListMetadata();

            TreeMap<Long, Integer> idToOffsetMap = new TreeMap<Long, Integer>();
            for (int i = 0; i < itemCount; ++i) {

                long orignalPos = file.getFilePointer();
                file.readFully(fixedDataBlock, 0, fixedDataBlock.length);
                inputBuffer.reset(fixedDataBlock, fixedDataBlock.length);
                try {
                    newMetadata.deserialize(inputBuffer, new BinaryProtocol());
                } catch (Exception e) {
                    LOG.error("-----Failed to Deserialize Metadata at Index:" + i + " Exception:"
                            + CCStringUtils.stringifyException(e));
                }
                idToOffsetMap.put(newMetadata.getDomainHash(), (int) orignalPos);
            }

            // write lookup table 
            _offsetLookupTable = new DataOutputBuffer(idToOffsetMap.size() * OFFSET_TABLE_ENTRY_SIZE);
            for (Map.Entry<Long, Integer> entry : idToOffsetMap.entrySet()) {
                _offsetLookupTable.writeLong(entry.getKey());
                _offsetLookupTable.writeInt(entry.getValue());
            }
        } finally {
            file.close();
        }
        LOG.info("*** LIST:" + getListId() + " DONE LOADING SUBDOMAIN DATA FROM DISK");
    } else {

        LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA DOES NOT EXIST! LOADING FROM SCRATCH");

        RandomAccessFile fixedDataReader = new RandomAccessFile(_fixedDataFile, "rw");
        RandomAccessFile stringDataReader = new RandomAccessFile(_variableDataFile, "rw");

        try {

            //ok rebuild top level metadata as well 
            _metadata.clear();

            OnDiskCrawlHistoryItem item = new OnDiskCrawlHistoryItem();

            int processedCount = 0;
            while (fixedDataReader.getFilePointer() != fixedDataReader.length()) {

                long position = fixedDataReader.getFilePointer();

                // store offset in item 
                item._fileOffset = position;
                // load from disk 
                item.deserialize(fixedDataReader);
                try {
                    // seek to string data 
                    stringDataReader.seek(item._stringsOffset);
                    // and skip buffer length 
                    WritableUtils.readVInt(stringDataReader);
                    // and read primary string 
                    String url = stringDataReader.readUTF();

                    // get metadata object for subdomain 
                    CrawlListMetadata subDomainMetadata = getTransientSubDomainMetadata(url);

                    // increment url count 
                    subDomainMetadata.setUrlCount(subDomainMetadata.getUrlCount() + 1);

                    // increment top level metadata count 
                    _metadata.setUrlCount(_metadata.getUrlCount() + 1);

                    // update top level metadata ..
                    updateMetadata(item, _metadata, 0);

                    // update sub-domain metadata object  from item data
                    updateMetadata(item, subDomainMetadata, 0);

                    ++processedCount;
                } catch (IOException e) {
                    LOG.error("Exception Reading String Data For Item:" + (processedCount + 1));
                    LOG.error("Exception:" + CCStringUtils.stringifyException(e));
                    LOG.error("File Position:" + fixedDataReader.getFilePointer() + " StringsPointer:"
                            + stringDataReader.getFilePointer());
                }

                if (processedCount % 10000 == 0) {
                    LOG.info("*** LIST:" + getListId() + " Processed:" + processedCount + " Items");
                }
            }

            // ok commit top level metadata to disk as well 
            writeMetadataToDisk();

        } catch (IOException e) {
            LOG.error("Encountered Exception Queueing Items for List:" + _listId + " Exception:"
                    + CCStringUtils.stringifyException(e));
            LOG.error("File Position:" + fixedDataReader.getFilePointer() + " StringsPointer:"
                    + stringDataReader.getFilePointer());
            _queueState = QueueState.QUEUED;
        } finally {
            fixedDataReader.close();
            stringDataReader.close();
        }
        LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA REBUILT FROM LIST DATA . WRITING TO DISK");

        // write metadat to disk 
        writeInitialSubDomainMetadataToDisk();

        LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA REBUILT FROM LIST DATA . WRITE COMPLETE");
    }
}

From source file:org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap.java

/** Writes the debug dump of the table into logs. Not thread-safe. */
public void debugDumpTable() {
    StringBuilder dump = new StringBuilder(keysAssigned + " keys\n");
    TreeMap<Long, Integer> byteIntervals = new TreeMap<Long, Integer>();
    int examined = 0;
    for (int slot = 0; slot < refs.length; ++slot) {
        long ref = refs[slot];
        if (ref == 0) {
            continue;
        }//from  w  ww .ja  va 2  s .  c  o  m
        ++examined;
        long recOffset = getFirstRecordLengthsOffset(ref, null);
        long tailOffset = Ref.getOffset(ref);
        writeBuffers.setUnsafeReadPoint(recOffset);
        int valueLength = (int) writeBuffers.unsafeReadVLong(),
                keyLength = (int) writeBuffers.unsafeReadVLong();
        long ptrOffset = writeBuffers.getUnsafeReadPoint();
        if (Ref.hasList(ref)) {
            byteIntervals.put(recOffset, (int) (ptrOffset + 5 - recOffset));
        }
        long keyOffset = tailOffset - valueLength - keyLength;
        byte[] key = new byte[keyLength];
        WriteBuffers.ByteSegmentRef fakeRef = new WriteBuffers.ByteSegmentRef(keyOffset, keyLength);
        byteIntervals.put(keyOffset - 4, keyLength + 4);
        writeBuffers.populateValue(fakeRef);
        System.arraycopy(fakeRef.getBytes(), (int) fakeRef.getOffset(), key, 0, keyLength);
        dump.append(Utils.toStringBinary(key, 0, key.length)).append(" ref [").append(dumpRef(ref))
                .append("]: ");
        Result hashMapResult = new Result();
        getValueResult(key, 0, key.length, hashMapResult);
        List<WriteBuffers.ByteSegmentRef> results = new ArrayList<WriteBuffers.ByteSegmentRef>();
        WriteBuffers.ByteSegmentRef byteSegmentRef = hashMapResult.first();
        while (byteSegmentRef != null) {
            results.add(hashMapResult.byteSegmentRef);
            byteSegmentRef = hashMapResult.next();
        }
        dump.append(results.size()).append(" rows\n");
        for (int i = 0; i < results.size(); ++i) {
            WriteBuffers.ByteSegmentRef segment = results.get(i);
            byteIntervals.put(segment.getOffset(), segment.getLength() + ((i == 0) ? 1 : 0)); // state byte in the first record
        }
    }
    if (examined != keysAssigned) {
        dump.append("Found " + examined + " keys!\n");
    }
    // Report suspicious gaps in writeBuffers
    long currentOffset = 0;
    for (Map.Entry<Long, Integer> e : byteIntervals.entrySet()) {
        long start = e.getKey(), len = e.getValue();
        if (start - currentOffset > 4) {
            dump.append("Gap! [" + currentOffset + ", " + start + ")\n");
        }
        currentOffset = start + len;
    }
    LOG.info("Hashtable dump:\n " + dump.toString());
}

From source file:org.commoncrawl.service.listcrawler.CrawlList.java

void writeInitialSubDomainMetadataToDisk() throws IOException {

    RandomAccessFile file = new RandomAccessFile(_subDomainMetadataFile, "rw");

    try {//w  w w  .  ja  v a2 s. c  o m

        file.writeByte(0); // version
        file.writeInt(_transientSubDomainStats.size());

        ArrayList<CrawlListMetadata> sortedMetadata = new ArrayList<CrawlListMetadata>();
        sortedMetadata.addAll(_transientSubDomainStats.values());
        _transientSubDomainStats = null;
        CrawlListMetadata metadataArray[] = sortedMetadata.toArray(new CrawlListMetadata[0]);
        Arrays.sort(metadataArray, new Comparator<CrawlListMetadata>() {

            @Override
            public int compare(CrawlListMetadata o1, CrawlListMetadata o2) {
                int result = ((Integer) o2.getUrlCount()).compareTo(o1.getUrlCount());
                if (result == 0) {
                    result = o1.getDomainName().compareTo(o2.getDomainName());
                }
                return result;
            }
        });

        DataOutputBuffer outputBuffer = new DataOutputBuffer(CrawlListMetadata.Constants.FixedDataSize);

        TreeMap<Long, Integer> idToOffsetMap = new TreeMap<Long, Integer>();

        for (CrawlListMetadata entry : metadataArray) {
            // reset output buffer 
            outputBuffer.reset();
            // write item to disk 
            entry.serialize(outputBuffer, new BinaryProtocol());

            if (outputBuffer.getLength() > CrawlListMetadata.Constants.FixedDataSize) {
                LOG.fatal("Metadata Serialization for List:" + getListId() + " SubDomain:"
                        + entry.getDomainName());
                System.out.println("Metadata Serialization for List:" + getListId() + " SubDomain:"
                        + entry.getDomainName());
            }
            // save offset 
            idToOffsetMap.put(entry.getDomainHash(), (int) file.getFilePointer());
            // write out fixed data size 
            file.write(outputBuffer.getData(), 0, CrawlListMetadata.Constants.FixedDataSize);
        }

        // write lookup table 
        _offsetLookupTable = new DataOutputBuffer(idToOffsetMap.size() * OFFSET_TABLE_ENTRY_SIZE);

        for (Map.Entry<Long, Integer> entry : idToOffsetMap.entrySet()) {
            _offsetLookupTable.writeLong(entry.getKey());
            _offsetLookupTable.writeInt(entry.getValue());
        }
    } finally {
        file.close();
    }
    _transientSubDomainStats = null;
}

From source file:gda.scan.ConcurrentScanChild.java

/**
 * Moves to the next step unless start is true, then moves to the start of the current (possibly child) scan.
 * @throws Exception/*from w w  w.ja v a  2s .  c om*/
 */
protected void acquirePoint(boolean start, boolean collectDetectors) throws Exception {

    TreeMap<Integer, Scannable[]> devicesToMoveByLevel;
    if (collectDetectors) {
        devicesToMoveByLevel = generateDevicesToMoveByLevel(scannableLevels, allDetectors);
    } else {
        devicesToMoveByLevel = scannableLevels;
    }

    for (Integer thisLevel : devicesToMoveByLevel.keySet()) {

        Scannable[] scannablesAtThisLevel = devicesToMoveByLevel.get(thisLevel);

        // If there is a detector at this level then wait for detector readout thread to complete
        for (Scannable scannable : scannablesAtThisLevel) {
            if (scannable instanceof Detector) {
                waitForDetectorReadoutAndPublishCompletion();
                break;
            }
        }
        checkThreadInterrupted();

        // trigger at level start on all Scannables
        for (Scannable scannable : scannablesAtThisLevel) {
            scannable.atLevelStart();
        }

        // trigger at level move start on all Scannables
        for (Scannable scannable : scannablesAtThisLevel) {
            if (isScannableToBeMoved(scannable) != null) {
                if (isScannableToBeMoved(scannable).hasStart()) {
                    scannable.atLevelMoveStart();
                }
            }
        }

        // on detectors (technically scannables) that implement DetectorWithReadout call waitForReadoutComplete
        for (Scannable scannable : scannablesAtThisLevel) {
            if (scannable instanceof DetectorWithReadout) {
                if (!detectorWithReadoutDeprecationWarningGiven) {
                    logger.warn(
                            "The DetectorWithReadout interface is deprecated. Set gda.scan.concurrentScan.readoutConcurrently to true instead (after reading the 8.24 release note");
                    detectorWithReadoutDeprecationWarningGiven = true;
                }
                ((DetectorWithReadout) scannable).waitForReadoutCompletion();
            }
        }

        for (Scannable device : scannablesAtThisLevel) {
            if (!(device instanceof Detector)) {
                // does this scan (is a hierarchy of nested scans) operate this scannable?
                ScanObject scanObject = isScannableToBeMoved(device);
                if (scanObject != null) {
                    if (start) {
                        checkThreadInterrupted();
                        scanObject.moveToStart();
                    } else {
                        checkThreadInterrupted();
                        scanObject.moveStep();
                    }
                }
            } else {
                if (callCollectDataOnDetectors) {
                    checkThreadInterrupted();
                    ((Detector) device).collectData();
                }
            }
        }

        // pause here until all the scannables at this level have finished moving
        for (Entry<Integer, Scannable[]> entriesByLevel : devicesToMoveByLevel.entrySet()) {
            Scannable[] scannablesAtLevel = entriesByLevel.getValue();
            for (int i = 0; i < scannablesAtLevel.length; i++) {
                Scannable scn = scannablesAtLevel[i];
                scn.waitWhileBusy();
            }
        }
        for (Scannable scannable : scannablesAtThisLevel) {
            scannable.atLevelEnd();
        }
    }

}

From source file:com.adobe.cq.dialogconversion.datasources.DialogsDataSource.java

private void setDataSource(Resource resource, String path, ResourceResolver resourceResolver,
        SlingHttpServletRequest request, String itemResourceType) throws RepositoryException {
    List<Resource> resources = new ArrayList<Resource>();

    if (StringUtils.isNotEmpty(path)) {
        Session session = request.getResourceResolver().adaptTo(Session.class);
        TreeMap<String, Node> nodeMap = new TreeMap<String, Node>();

        // sanitize path
        path = path.trim();//from  w  w w. j  a v  a2  s . co  m
        if (!path.startsWith("/")) {
            path = "/" + path;
        }

        // First check if the supplied path is a dialog node itself
        if (session.nodeExists(path)) {
            Node node = session.getNode(path);
            DialogType type = DialogRewriteUtils.getDialogType(node);

            if (type != DialogType.UNKNOWN && type != DialogType.CORAL_3) {
                nodeMap.put(node.getPath(), node);
            }
        }

        // If the path does not point to a dialog node: we query for dialog nodes
        if (nodeMap.isEmpty()) {
            String encodedPath = "/".equals(path) ? "" : ISO9075.encodePath(path);
            if (encodedPath.length() > 1 && encodedPath.endsWith("/")) {
                encodedPath = encodedPath.substring(0, encodedPath.length() - 1);
            }
            String classicStatement = "SELECT * FROM [" + NT_DIALOG + "] AS s WHERE ISDESCENDANTNODE(s, '"
                    + encodedPath + "') " + "AND NAME() IN ('" + NameConstants.NN_DIALOG + "', '"
                    + NameConstants.NN_DESIGN_DIALOG + "')";
            String coral2Statement = "SELECT parent.* FROM [nt:unstructured] AS parent INNER JOIN [nt:unstructured] "
                    + "AS child on ISCHILDNODE(child, parent) WHERE ISDESCENDANTNODE(parent, '" + encodedPath
                    + "') " + "AND NAME(parent) IN ('" + NN_CQ_DIALOG + "', '" + NN_CQ_DIALOG
                    + CORAL_2_BACKUP_SUFFIX + "', '" + NN_CQ_DESIGN_DIALOG + "', '" + NN_CQ_DESIGN_DIALOG
                    + CORAL_2_BACKUP_SUFFIX + "') "
                    + "AND NAME(child) = 'content' AND child.[sling:resourceType] NOT LIKE '"
                    + DIALOG_CONTENT_RESOURCETYPE_PREFIX_CORAL3 + "%'";

            QueryManager queryManager = session.getWorkspace().getQueryManager();
            List<Query> queries = new ArrayList<Query>();
            queries.add(queryManager.createQuery(classicStatement, Query.JCR_SQL2));
            queries.add(queryManager.createQuery(coral2Statement, Query.JCR_SQL2));

            for (Query query : queries) {
                NodeIterator iterator = query.execute().getNodes();
                while (iterator.hasNext()) {
                    Node node = iterator.nextNode();
                    Node parent = node.getParent();
                    if (parent != null) {
                        // put design dialogs at a relative key
                        String key = (DialogRewriteUtils.isDesignDialog(node))
                                ? parent.getPath() + "/" + NameConstants.NN_DESIGN_DIALOG
                                : parent.getPath();

                        // backup Coral 2 dialogs shouldn't override none backup ones
                        if (node.getName().endsWith(CORAL_2_BACKUP_SUFFIX) && nodeMap.get(key) != null) {
                            continue;
                        }

                        nodeMap.put(key, node);
                    }
                }
            }
        }

        int index = 0;
        Iterator iterator = nodeMap.entrySet().iterator();

        while (iterator.hasNext()) {
            Map.Entry entry = (Map.Entry) iterator.next();
            Node dialog = (Node) entry.getValue();

            if (dialog == null) {
                continue;
            }

            Node parent = dialog.getParent();

            if (parent == null) {
                continue;
            }

            DialogType dialogType = DialogRewriteUtils.getDialogType(dialog);

            String dialogPath = dialog.getPath();
            String type = dialogType.getString();
            String href = externalizer.relativeLink(request, dialogPath) + ".html";
            String crxHref = externalizer.relativeLink(request, CRX_LITE_PATH) + ".jsp#" + dialogPath;
            boolean isDesignDialog = DialogRewriteUtils.isDesignDialog(dialog);

            // only allow Coral 2 backup dialogs in the result if there's a replacement
            if (dialogType == DialogType.CORAL_2 && dialog.getName().endsWith(CORAL_2_BACKUP_SUFFIX)) {
                if ((!isDesignDialog && !parent.hasNode(NN_CQ_DIALOG))
                        || (isDesignDialog && !parent.hasNode(NN_CQ_DESIGN_DIALOG))) {
                    continue;
                }
            }

            boolean converted = false;
            if (dialogType == DialogType.CLASSIC) {
                converted = isDesignDialog ? parent.hasNode(NN_CQ_DESIGN_DIALOG) : parent.hasNode(NN_CQ_DIALOG);
            } else if (dialogType == DialogType.CORAL_2) {
                converted = dialog.getName().endsWith(CORAL_2_BACKUP_SUFFIX);
            }

            Map<String, Object> map = new HashMap<String, Object>();
            map.put("dialogPath", dialogPath);
            map.put("type", type);
            map.put("href", href);
            map.put("converted", converted);
            map.put("crxHref", crxHref);

            if (converted) {
                Node convertedNode = (isDesignDialog) ? parent.getNode(NN_CQ_DESIGN_DIALOG)
                        : parent.getNode(NN_CQ_DIALOG);
                String touchHref = externalizer.relativeLink(request, convertedNode.getPath()) + ".html";
                String touchCrxHref = externalizer.relativeLink(request, CRX_LITE_PATH) + ".jsp#"
                        + convertedNode.getPath().replaceAll(":", "%3A");
                map.put("touchHref", touchHref);
                map.put("touchCrxHref", touchCrxHref);
            }

            resources.add(new ValueMapResource(resourceResolver, resource.getPath() + "/dialog_" + index,
                    itemResourceType, new ValueMapDecorator(map)));
            index++;
        }
    }

    DataSource ds = new SimpleDataSource(resources.iterator());

    request.setAttribute(DataSource.class.getName(), ds);
}

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

/**
 * This function takes in a standard molecules's intensity vs time data and a collection of negative controls data
 * and plots the SNR value at each time period, assuming the time jitter effects are negligible (more info on this
 * is here: https://github.com/20n/act/issues/136). Based on the snr values, it rank orders the metlin ions of the
 * molecule.//w  w  w .j a  va  2s  .c o m
 * @param ionToIntensityData A map of chemical to intensity/time data
 * @param standardChemical The chemical that is the standard of analysis
 * @return A sorted linked hash map of Metlin ion to (intensity, time) pairs from highest intensity to lowest
 */
public static LinkedHashMap<String, XZ> performSNRAnalysisAndReturnMetlinIonsRankOrderedBySNR(
        ChemicalToMapOfMetlinIonsToIntensityTimeValues ionToIntensityData, String standardChemical,
        Map<String, List<Double>> restrictedTimeWindows) {

    TreeMap<Double, List<String>> sortedIntensityToIon = new TreeMap<>(Collections.reverseOrder());
    Map<String, XZ> ionToSNR = new HashMap<>();

    for (String ion : ionToIntensityData.getMetlinIonsOfChemical(standardChemical).keySet()) {

        // We first compress the ion spectra by 5 seconds (this number was gotten from trial and error on labelled
        // spectra). Then, we do feature detection of peaks in the compressed data.
        List<XZ> standardIntensityTime = detectPeaksInIntensityTimeWaveform(
                compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(standardChemical).get(ion),
                        COMPRESSION_CONSTANT).getLeft(),
                PEAK_DETECTION_THRESHOLD);

        List<List<XZ>> negativeIntensityTimes = new ArrayList<>();
        for (String chemical : ionToIntensityData.getIonList()) {
            if (!chemical.equals(standardChemical)) {
                negativeIntensityTimes.add(compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(chemical).get(ion), COMPRESSION_CONSTANT)
                                .getLeft());
            }
        }

        List<XZ> rmsOfNegativeValues = rmsOfIntensityTimeGraphs(negativeIntensityTimes);

        List<Double> listOfTimeWindows = new ArrayList<>();
        if (restrictedTimeWindows != null && restrictedTimeWindows.get(ion) != null) {
            listOfTimeWindows.addAll(restrictedTimeWindows.get(ion));
        }

        Boolean canUpdateMaxSNRAndTime = true;
        Boolean useRestrictedTimeWindowAnalysis = false;

        // If there are restricted time windows, set the default to not update SNR until certain conditions are met.
        if (listOfTimeWindows.size() > 0) {
            useRestrictedTimeWindowAnalysis = true;
            canUpdateMaxSNRAndTime = false;
        }

        Double maxSNR = 0.0;
        Double maxTime = 0.0;

        // For each of the peaks detected in the positive control, find the spectral intensity values from the negative
        // controls and calculate SNR based on that.
        for (XZ positivePosition : standardIntensityTime) {

            Double time = positivePosition.getTime();

            XZ negativeControlPosition = null;
            for (XZ position : rmsOfNegativeValues) {
                if (position.getTime() > time - POSITION_TIME_WINDOW_IN_SECONDS
                        && position.getTime() < time + POSITION_TIME_WINDOW_IN_SECONDS) {
                    negativeControlPosition = position;
                    break;
                }
            }

            Double snr = Math.pow(positivePosition.getIntensity() / negativeControlPosition.getIntensity(), 2);

            // If the given time point overlaps with one of the restricted time windows, we can update the snr calculations.
            for (Double restrictedTimeWindow : listOfTimeWindows) {
                if ((time > restrictedTimeWindow - RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)
                        && (time < restrictedTimeWindow + RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)) {
                    canUpdateMaxSNRAndTime = true;
                    break;
                }
            }

            if (canUpdateMaxSNRAndTime) {
                maxSNR = Math.max(maxSNR, snr);
                maxTime = Math.max(maxTime, time);
            }

            if (useRestrictedTimeWindowAnalysis) {
                canUpdateMaxSNRAndTime = false;
            }
        }

        ionToSNR.put(ion, new XZ(maxTime, maxSNR));

        List<String> ionValues = sortedIntensityToIon.get(maxSNR);
        if (ionValues == null) {
            ionValues = new ArrayList<>();
            sortedIntensityToIon.put(maxSNR, ionValues);
        }

        ionValues.add(ion);
    }

    LinkedHashMap<String, XZ> result = new LinkedHashMap<>(sortedIntensityToIon.size());
    for (Map.Entry<Double, List<String>> entry : sortedIntensityToIon.entrySet()) {
        List<String> ions = entry.getValue();
        for (String ion : ions) {
            result.put(ion, ionToSNR.get(ion));
        }
    }

    return result;
}

From source file:org.apache.hadoop.mapred.HFSPScheduler.java

/**
 * Preempt missingSlots number of slots from jobs bigger
 * //from   w  ww  .  j ava  2 s.  c o  m
 * @param jip
 *          Job that clamies slots
 * @param allJobs
 *          all the size based jobs in the cluster
 * @param localJobs
 *          size based jobs that can be immediately suspended
 * @param missingSlots
 *          number of slots to claim
 * @param numToSkip
 *          number of slots on non-local
 * 
 * @return number of tasks preemped in the cluster for jip. The first elem of
 *         the tuple is the number of tasks preempted for new tasks, the
 *         second in the number of tasks preempted for tasks to be resumed
 */
private ClaimedSlots claimSlots(HelperForType helper, final Phase phase, final JobInProgress jip,
        int missingNewSlots, int missingResumableSlots, int numToSkip,
        TreeMap<JobDurationInfo, JobInProgress> allJobs, TreeMap<JobDurationInfo, TaskStatuses> localJobs) {

    assert phase == Phase.SIZE_BASED || missingResumableSlots == 0;

    final TaskType type = helper.taskType;
    JobDurationInfo jdi = this.getDuration(jip.getJobID(), type);

    /* #size based tasks that occupies train slots in the cluster (suspendable) */
    int numTasksToPreempt = 0;
    if (phase == Phase.TRAIN) {
        /** num of size based tasks that can be suspended for training */
        int numOverflowSizeBasedTasks = helper.maxSizeBasedSlots > helper.runningSizeBasedTasks ? 0
                : helper.runningSizeBasedTasks - helper.maxSizeBasedSlots;

        /* num of size base tasks to preempt for the training of jip */
        numTasksToPreempt = Math.min(missingNewSlots, numOverflowSizeBasedTasks);
        if (LOG.isDebugEnabled()) {
            LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):"
                    + " numOverflowSizeBasedTasks: " + numOverflowSizeBasedTasks + " numTasksToPreempt: "
                    + numTasksToPreempt + " missingNewSlots: " + missingNewSlots + " numTrainTasksForJob: "
                    + helper.numTrainTasksForJob + " canAssignTrain: " + helper.canAssignTrain()
                    + " numToSkip: " + numToSkip);
        }
    } else {
        numTasksToPreempt = missingNewSlots;
        if (LOG.isDebugEnabled()) {
            LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " missingNewSlots: "
                    + missingNewSlots + " missingResumableSlots: " + missingResumableSlots
                    + " numTrainTasksForJob: " + helper.numTrainTasksForJob + " canAssignTrain: "
                    + helper.canAssignTrain() + " numToSkip: " + numToSkip);
        }
    }
    final int startingNumTasksToPreemptForNew = numTasksToPreempt;
    final int startingResumableSlots = missingResumableSlots;

    // try to free pendingTasks number of slots among running on this TT
    Iterator<Entry<JobDurationInfo, JobInProgress>> sizeBasedJobsDescIter = allJobs.descendingMap().entrySet()
            .iterator();
    Iterator<Entry<JobDurationInfo, TaskStatuses>> sizeBasedJobsDescIterOnTT = localJobs.entrySet().iterator();

    Entry<JobDurationInfo, TaskStatuses> biggerOnTT = sizeBasedJobsDescIterOnTT.hasNext()
            ? sizeBasedJobsDescIterOnTT.next()
            : null;
    while (this.preemptionStrategy.isPreemptionActive()
            && (numTasksToPreempt > 0 || missingResumableSlots > 0)) {
        if (!sizeBasedJobsDescIter.hasNext()) {
            if (LOG.isDebugEnabled()) {
                LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " should preempt "
                        + numTasksToPreempt + " for new tasks and " + missingResumableSlots + " for resumable "
                        + "tasks but no sizeBasedJob is running");
            }
            break;
        }

        Entry<JobDurationInfo, JobInProgress> nextSBJ = sizeBasedJobsDescIter.next();

        JobInProgress jipToPreempt = nextSBJ.getValue();

        /* don't try to suspend if jip is bigger than any other jip */
        if (jdi != null) {

            if (jipToPreempt.getJobID().equals(jip.getJobID())) {
                return new ClaimedSlots(startingNumTasksToPreemptForNew - numTasksToPreempt,
                        startingResumableSlots - missingResumableSlots);
            }

            if (JOB_DURATION_COMPARATOR.compare(nextSBJ.getKey(), jdi) <= 0) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " should preempt "
                            + numTasksToPreempt + ", but bigger job avail is " + jip.getJobID() + ".len: "
                            + jdi.getPhaseDuration() + " > " + nextSBJ.getValue().getJobID() + ".len: "
                            + nextSBJ.getKey().getPhaseDuration());
                }
                return new ClaimedSlots(startingNumTasksToPreemptForNew - numTasksToPreempt,
                        startingResumableSlots - missingResumableSlots);
            }
        }

        if (jipToPreempt.getJobID().equals(jip.getJobID())) {
            continue;
        }

        /*
         * don't try to claim slots from a job in training
         * 
         * FIXME: ideally a job can claim slots from a training job until this job
         * has enough tasks for training
         */
        if (!this.isTrained(jipToPreempt, type)) {
            LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " ignoring "
                    + jipToPreempt.getJobID() + " because in training");
            continue;
        }

        int numSuspendedOnThisTT = 0;

        /* if jipToPreempt has tasks on this TT, then suspend them */
        if (biggerOnTT != null // && type == TaskType.REDUCE
                && biggerOnTT.getKey().getJobID().equals(nextSBJ.getKey().getJobID())) {

            TreeMap<TaskAttemptID, TaskStatus> preemptableTAIDS = biggerOnTT.getValue().taskStatuses;
            int numPreemptions = Math.min(preemptableTAIDS.size(), missingResumableSlots + numTasksToPreempt);
            for (int i = 0; i < numPreemptions; i++) {
                TaskAttemptID pTAID = preemptableTAIDS.firstKey();
                TaskStatus pTS = preemptableTAIDS.remove(pTAID);
                JobInProgress pJIP = this.taskTrackerManager.getJob(pTAID.getJobID());
                TaskInProgress pTIP = pJIP.getTaskInProgress(pTAID.getTaskID());

                if (type == TaskType.REDUCE) {
                    // if (this.eagerPreemption == PreemptionType.KILL
                    // && pTIP.killTask(pTAID, false)) {
                    // if (missingResumableSlots > 0)
                    // missingResumableSlots -= 1;
                    // else
                    // numTasksToPreempt -= 1;
                    // numSuspendedOnThisTT += 1;
                    // if (jdi == null) {
                    // taskHelper.kill(pTAID, jip.getJobID(), phase);
                    // } else {
                    // taskHelper.kill(pTAID, jip.getJobID(), phase, nextSBJ.getKey(),
                    // jdi);
                    // }
                    // } else if (this.preemptionStrategy.isPreemptionActive()
                    // && this.canBeSuspended(pTS) && pTIP.suspendTaskAttempt(pTAID)) {
                    if (this.preemptionStrategy.isPreemptionActive()
                            && this.preemptionStrategy.canBePreempted(pTS)
                            && this.preemptionStrategy.preempt(pTIP, pTS)) {
                        if (missingResumableSlots > 0)
                            missingResumableSlots -= 1;
                        else
                            numTasksToPreempt -= 1;
                        numSuspendedOnThisTT += 1;
                        if (jdi == null) {
                            taskHelper.suspend(pTAID, jip.getJobID(), phase);
                        } else {
                            taskHelper.suspend(pTAID, jip.getJobID(), phase, nextSBJ.getKey(), jdi);
                        }
                    } else {
                        LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "): cannot suspend "
                                + pTAID + " for " + jip);
                    }
                }
            }

            if (preemptableTAIDS.size() - numPreemptions <= 0) {
                biggerOnTT = sizeBasedJobsDescIterOnTT.hasNext() ? sizeBasedJobsDescIterOnTT.next() : null;
            }
        }

        /* #tasks that can be preempted */
        int numPreemptibleRunTasks = this.getNumRunningTasks(jipToPreempt, type) - numSuspendedOnThisTT;

        /*
         * Two cases: numToSkip is bigger then preemptible tasks or it is not: -
         * is bigger: then we skip this preemptible jip - it is not: then
         * numToSkip is set to 0 and we do the real wait preemption
         */
        if (numPreemptibleRunTasks <= numToSkip) {
            numToSkip -= numPreemptibleRunTasks;
        } else {
            /* #tasks that can be preempted by jip */
            int numPreemptibleByJIPRunTasks = numPreemptibleRunTasks - numToSkip;

            numToSkip = 0;

            /* #tasks that will be preempted by jip on other TTs s */
            int numRunTasksEventuallyPreemptedByJIP = Math.min(numTasksToPreempt, numPreemptibleByJIPRunTasks);

            numTasksToPreempt -= numRunTasksEventuallyPreemptedByJIP;
        }
    }

    return new ClaimedSlots(startingNumTasksToPreemptForNew - numTasksToPreempt,
            startingResumableSlots - missingNewSlots);
}

From source file:com.wasteofplastic.askyblock.commands.IslandCmd.java

/**
 * Shows available languages to the player
 * @param player//from  w  w w. j  a  va  2 s .com
 */
private void displayLocales(Player player) {
    TreeMap<Integer, String> langs = new TreeMap<Integer, String>();
    for (ASLocale locale : plugin.getAvailableLocales().values()) {
        if (!locale.getLocaleName().equalsIgnoreCase("locale")) {
            langs.put(locale.getIndex(), locale.getLanguageName() + " (" + locale.getCountryName() + ")");
        }
    }
    for (Entry<Integer, String> entry : langs.entrySet()) {
        player.sendMessage(entry.getKey() + ": " + entry.getValue());
    }
}

From source file:org.fife.ui.rsyntaxtextarea.EOLPreservingRSyntaxDocument.java

private InsertStringResult insertStringImpl(int offset, String str, AttributeSet a)
        throws BadLocationException {
    if (StringUtils.isEmpty(str)) {
        return new InsertStringResult(str, null);
    }/*from w  ww.j a v  a2  s.c om*/

    PeekReader reader = null;
    CompoundEdit edit = new CompoundEdit();

    try {
        reader = new PeekReader(new StringReader(str));
        StringBuilder builder = new StringBuilder();
        TreeMap<Integer, char[]> tempMap = new TreeMap<Integer, char[]>();
        char[] buff = new char[1024];
        int nch;
        int cOffset = offset;
        boolean wasCR = false;

        while ((nch = reader.read(buff, 0, buff.length)) != -1) {
            for (int i = 0; i < nch; i++) {
                char c = buff[i];

                if (c == '\r') {
                    boolean updated = false;
                    if (i == nch - 1 && !reader.peek() && Arrays.equals(eolMap.get(offset), LF)) {
                        edit.addEdit(new ChangeEOLEdit(offset, CRLF));
                        eolMap.put(offset, CRLF);
                        updated = true;
                    }

                    if (!updated && (wasCR || i == nch - 1 && !reader.peek())) {
                        // Insert CR
                        tempMap.put(cOffset++, CR);
                        builder.append(LF);
                    }

                    wasCR = true;
                } else if (c == '\n') {
                    boolean updated = false;
                    if (cOffset == offset) {
                        if (Arrays.equals(eolMap.get(offset - 1), CR)) {
                            edit.addEdit(new ChangeEOLEdit(offset - 1, CRLF));
                            eolMap.put(offset - 1, CRLF);
                            updated = true;
                        }
                    }

                    if (!updated) {
                        if (wasCR) {
                            // Insert CRLF
                            tempMap.put(cOffset++, CRLF);
                            builder.append(LF);
                        } else {
                            // Insert LF
                            tempMap.put(cOffset++, LF);
                            builder.append(LF);
                        }
                    }

                    wasCR = false;
                } else if (replaceControlCharacters && c != '\t' && (c < ' ' || c == 0x7F)) {
                    if (wasCR) {
                        // Insert previous CR
                        tempMap.put(cOffset++, CR);
                        builder.append(LF);
                    }

                    // Insert control character
                    cOffset++;
                    builder.append((char) (c == 0x7F ? '\u2421' : '\u2400' + c));
                    wasCR = false;
                } else {
                    if (wasCR) {
                        // Insert previous CR
                        tempMap.put(cOffset++, CR);
                        builder.append(LF);
                    }

                    // Insert regular character
                    cOffset++;
                    builder.append(c);
                    wasCR = false;
                }
            }
        }

        str = builder.toString();

        Integer key = eolMap.isEmpty() ? null : eolMap.lastKey();
        while (key != null && key >= offset) {
            edit.addEdit(new ChangeEOLEdit(key, null));
            char[] eol = eolMap.remove(key);

            int newKey = key + str.length();
            edit.addEdit(new ChangeEOLEdit(newKey, eol));
            eolMap.put(newKey, eol);

            key = eolMap.lowerKey(key);
        }

        for (Entry<Integer, char[]> entry : tempMap.entrySet()) {
            edit.addEdit(new ChangeEOLEdit(entry.getKey(), entry.getValue()));
        }
        eolMap.putAll(tempMap);
    } catch (IOException e) {
        // Only using a StringReader, so should not happen
    } finally {
        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
            }
        }
    }

    edit.end();
    return new InsertStringResult(str, edit);
}

From source file:com.edgenius.wiki.render.handler.PageIndexHandler.java

public List<RenderPiece> handle(RenderContext renderContext, Map<String, String> values)
        throws RenderHandlerException {
    if (indexer == null) {
        log.warn("Unable to find valid page for index");
        throw new RenderHandlerException("Unable to find valid page for index");
    }/* ww w.j a v a 2s  . c o m*/

    List<String> filters = getFilterList(values != null ? StringUtils.trimToNull(values.get("filter")) : null);
    List<String> filtersOut = getFilterList(
            values != null ? StringUtils.trimToNull(values.get("filterout")) : null);

    //temporary cache for indexer after filter out
    TreeMap<Character, Integer> indexerFiltered = new TreeMap<Character, Integer>();
    List<RenderPiece> listPieces = new ArrayList<RenderPiece>();
    //render each character list
    Character indexKey = null;
    boolean requireEndOfMacroPageIndexList = false;
    for (Entry<String, LinkModel> entry : indexMap.entrySet()) {
        String title = entry.getKey();

        if (filters.size() > 0) {
            boolean out = false;
            for (String filter : filters) {
                if (!FilenameUtils.wildcardMatch(title.toLowerCase(), filter.toLowerCase())) {
                    out = true;
                    break;
                }
            }
            if (out)
                continue;
        }

        if (filtersOut.size() > 0) {
            boolean out = false;
            for (String filterOut : filtersOut) {
                if (FilenameUtils.wildcardMatch(title.toLowerCase(), filterOut.toLowerCase())) {
                    out = true;
                    break;
                }
            }
            if (out)
                continue;
        }

        Character first = Character.toUpperCase(title.charAt(0));
        if (!first.equals(indexKey)) {
            if (requireEndOfMacroPageIndexList) {
                listPieces.add(new TextModel("</div>")); //macroPageIndexList
            }
            Integer anchorIdx = indexer.get(first);
            indexKey = first;
            if (anchorIdx != null) {
                indexerFiltered.put(first, anchorIdx);
                listPieces.add(new TextModel(new StringBuilder().append(
                        "<div class=\"macroPageIndexList\"><div class=\"macroPageIndexKey\" id=\"pageindexanchor-")
                        .append(anchorIdx).append("\">").append(first).toString()));
                requireEndOfMacroPageIndexList = true;
                //up image line to return top
                if (RenderContext.RENDER_TARGET_PAGE.equals(renderContext.getRenderTarget())) {
                    LinkModel back = new LinkModel();
                    back.setAnchor("pageindexanchor-0");
                    back.setAid("Go back index character list");
                    back.setView(renderContext.buildSkinImageTag("render/link/up.png", NameConstants.AID,
                            SharedConstants.NO_RENDER_TAG));
                    listPieces.add(back);
                }

                listPieces.add(new TextModel("</div>"));//macroPageIndexKey
            } else {
                log.error("Unable to page indexer for {}", indexKey);
            }
        }
        listPieces.add(new TextModel("<div class=\"macroPageIndexLink\">"));
        LinkModel link = entry.getValue();
        link.setLinkTagStr(renderContext.buildURL(link));
        listPieces.add(link);
        listPieces.add(new TextModel("</div>"));//macroPageIndexLink

    }
    if (requireEndOfMacroPageIndexList) {
        listPieces.add(new TextModel("</div>")); //macroPageIndexList
    }

    //render sum of characters - although it display before page list, however, as filter may hide some characters, so display later than
    //other
    List<RenderPiece> pieces = new ArrayList<RenderPiece>();

    StringBuffer sbuf = new StringBuffer("<div aid=\"pageindex\" class=\"macroPageIndex ")
            .append(WikiConstants.mceNonEditable).append("\"");
    if (values != null && values.size() > 0) {
        sbuf.append(" wajax=\"")
                .append(RichTagUtil.buildWajaxAttributeString(this.getClass().getName(), values)).append("\" ");
    }
    sbuf.append("><div id=\"pageindexanchor-0\" class=\"macroPageIndexKeys\">");

    pieces.add(new TextModel(sbuf.toString()));
    for (Entry<Character, Integer> entry : indexerFiltered.entrySet()) {
        LinkModel anchor = new LinkModel();
        anchor.setView(entry.getKey().toString());
        anchor.setAnchor("pageindexanchor-" + entry.getValue());
        anchor.setLinkTagStr(renderContext.buildURL(anchor));
        pieces.add(anchor);
    }
    pieces.add(new TextModel("</div>")); //macroPageIndexKeys
    pieces.addAll(listPieces);
    pieces.add(new TextModel("</div>")); //macroPageIndex

    return pieces;

}