Example usage for java.util SortedMap get

List of usage examples for java.util SortedMap get

Introduction

In this page you can find the example usage for java.util SortedMap get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.aurel.track.exchange.track.importer.ImporterDropdownParser.java

private void addLabelBean(SortedMap<String, List<ISerializableLabelBean>> dropDowns,
        Map<String, String> attributesMap) {
    String strExternalFieldID = attributesMap.get(ExchangeFieldNames.FIELDID);
    String strParameterCode = attributesMap.get(ExchangeFieldNames.PARAMETERCODE);
    Integer parameterCode = null;
    if (strParameterCode != null) {
        parameterCode = new Integer(strParameterCode);
    }//from  w w  w. j a  v  a2 s.c om
    Integer internalFieldID;
    Integer externalFieldID = new Integer(strExternalFieldID);
    if (fieldMatcher.get(externalFieldID) != null) {
        internalFieldID = fieldMatcher.get(externalFieldID);
    } else {
        LOGGER.warn("No matcher field found for externalFieldID " + externalFieldID);
        internalFieldID = externalFieldID;
    }
    IFieldTypeRT fieldTypeRT = FieldTypeManager.getFieldTypeRT(internalFieldID, parameterCode);
    if (fieldTypeRT == null) {
        missingFields.add(externalFieldID);
        return;
    }
    ILookup lookup = (ILookup) fieldTypeRT;
    ISerializableLabelBean labelBean = lookup.deserializeBean(attributesMap);
    if (labelBean == null) {
        LOGGER.warn("Deserialized labelBean is null for field " + strExternalFieldID);
    } else {
        String mergeKey = MergeUtil.mergeKey(strExternalFieldID, strParameterCode);
        List<ISerializableLabelBean> labelBeanList = dropDowns.get(mergeKey);
        if (labelBeanList == null) {
            labelBeanList = new ArrayList<ISerializableLabelBean>();
            dropDowns.put(mergeKey, labelBeanList);
        }
        labelBeanList.add(labelBean);
    }
}

From source file:rrlFramework.RRLExperiment.java

/**
 * Compiles the performance files together into a single file, detailing the
 * average, min and max performances./*from www.  ja va  2  s .  com*/
 * 
 * @param runEnd
 *            The last run.
 * @param byEpisode
 *            If the performances are being combined by episode (in
 *            intervals) or by regular CE interval.
 */
private long combineTempFiles(File performanceFile, int runEnd, long experimentStart) throws Exception {
    List<List<Float[]>> performances = new ArrayList<List<Float[]>>();
    float min = Float.MAX_VALUE;
    int minRun = -1;
    float max = -Float.MAX_VALUE;
    int maxRun = -1;
    double[] episodeLengths = new double[runEnd];
    double[] numSlots = new double[runEnd];
    long averageRunTime = 0;

    File combinedPerfFile = performanceFile;
    if (Config.getInstance().getGeneratorFile() != null) {
        combinedPerfFile = new File(performanceFile.getAbsolutePath() + "greedy");
        ProgramArgument.PERFORMANCE_EPISODE_GAP
                .setDoubleValue(ProgramArgument.PERFORMANCE_TESTING_SIZE.intValue()
                        * ProgramArgument.POLICY_REPEATS.intValue());
    }
    if (!combinedPerfFile.exists())
        combinedPerfFile.createNewFile();
    // For every performance file
    for (int i = 0; i < runEnd; i++) {
        File tempPerf = new File(Config.TEMP_FOLDER + "/" + performanceFile + i);
        if (!Performance.readRawPerformanceFile(tempPerf, true)) {
            System.err.println("Error reading performance file.");
            return 0;
        }

        List<Float[]> thisRunPerformances = new ArrayList<Float[]>();
        performances.add(thisRunPerformances);

        // Run through the performances and place them in the matrix
        SortedMap<Integer, Float[]> runPerformances = Performance.getPerformanceArray();
        averageRunTime += Performance.getRunTime();
        Iterator<Integer> iter = runPerformances.keySet().iterator();
        Integer current = iter.next();
        Integer previous = null;
        int currentKeyframeEpisode = ProgramArgument.PERFORMANCE_EPISODE_GAP.intValue();
        // Run through the performances, using linear interpolation to
        // get estimates of the performance at a given interval.
        do {
            // If the current segment is further along than the current
            // value, advance to the next value.
            while (currentKeyframeEpisode > current) {
                previous = current;
                if (iter.hasNext())
                    current = iter.next();
                else
                    break;
            }

            // If the keyframe isn't up to the first episode, just use
            // the current value
            Float[] episodePerformance = runPerformances.get(current);
            if (previous == null) {
                // Add to the previous value.
                thisRunPerformances.add(episodePerformance);
            } else {
                // Interpolate from the previous value to the current
                // one.
                Float[] interpolatedPerformance = new Float[episodePerformance.length];
                if (previous == current) {
                    interpolatedPerformance = episodePerformance;
                } else {
                    Float[] prevPerformance = runPerformances.get(previous);

                    for (int j = 0; j < episodePerformance.length; j++) {
                        Float currPerf = episodePerformance[j];
                        Float prevPerf = prevPerformance[j];
                        // Adjust for null elites
                        if (j == PerformanceDetails.ELITEMAX.ordinal()
                                || j == PerformanceDetails.ELITEMEAN.ordinal()) {
                            if (currPerf == null)
                                currPerf = episodePerformance[PerformanceDetails.MEAN.ordinal()];
                            if (prevPerf == null)
                                prevPerf = prevPerformance[PerformanceDetails.MEAN.ordinal()];
                        }

                        if (currPerf == null || prevPerf == null)
                            interpolatedPerformance[j] = null;
                        else
                            interpolatedPerformance[j] = (currPerf - prevPerf)
                                    * (1f * (currentKeyframeEpisode - previous) / (current - previous))
                                    + prevPerf;
                    }
                }

                // Add to the performances
                thisRunPerformances.add(interpolatedPerformance);
            }

            // To the next increment
            currentKeyframeEpisode += ProgramArgument.PERFORMANCE_EPISODE_GAP.intValue();
        } while (currentKeyframeEpisode <= runPerformances.lastKey());
        Float[] lastPerf = runPerformances.get(runPerformances.lastKey());
        thisRunPerformances.add(lastPerf);
        System.out.println(runPerformances.get(runPerformances.lastKey())[PerformanceDetails.MEAN.ordinal()]);

        // Find min or max runs
        float runVal = runPerformances.get(runPerformances.lastKey())[PerformanceDetails.MEAN.ordinal()];
        if (runVal < min) {
            min = runVal;
            minRun = i;
        }
        if (runVal > max) {
            max = runVal;
            maxRun = i;
        }
        episodeLengths[i] = runPerformances.lastKey();
    }

    // Calculate the average and print out the stats
    FileWriter writer = new FileWriter(combinedPerfFile);
    BufferedWriter buf = new BufferedWriter(writer);
    Config.writeFileHeader(buf, Config.getInstance().getGoal());

    buf.write(
            "Episode\tAverage\tSD\tMin\tMax\tElite-Average\tElite-SD\tNumSlots\tSlots-SD\tNumRules\tRules-SD\n");
    boolean moreEpisodes = true;
    int index = 0;
    Mean mean = new Mean();
    StandardDeviation sd = new StandardDeviation();
    while (moreEpisodes) {
        moreEpisodes = false;
        // Compile the array of performances for the given index
        double[][] performanceArray = new double[PerformanceDetails.values().length][performances.size()];
        double maxVal = 0;
        double minVal = 0;
        for (int run = 0; run < performances.size(); run++) {
            List<Float[]> runPerformanceList = performances.get(run);
            int thisIndex = Math.min(index, runPerformanceList.size() - 1);
            if (index < runPerformanceList.size() - 1)
                moreEpisodes = true;
            Float[] performanceDetails = runPerformanceList.get(thisIndex);
            for (int j = 0; j < performanceDetails.length; j++) {
                if (performanceDetails[j] != null)
                    performanceArray[j][run] = performanceDetails[j];
            }

            // Max and min
            if (run == minRun)
                minVal = performanceArray[PerformanceDetails.MEAN.ordinal()][run];
            if (run == maxRun)
                maxVal = performanceArray[PerformanceDetails.MEAN.ordinal()][run];
        }

        // Find the statistics
        int episodeNum = (index + 1) * ProgramArgument.PERFORMANCE_EPISODE_GAP.intValue();
        buf.write(episodeNum + "\t" + mean.evaluate(performanceArray[PerformanceDetails.MEAN.ordinal()]) + "\t"
                + sd.evaluate(performanceArray[PerformanceDetails.MEAN.ordinal()]) + "\t" + minVal + "\t"
                + maxVal + "\t" + mean.evaluate(performanceArray[PerformanceDetails.ELITEMEAN.ordinal()]) + "\t"
                + sd.evaluate(performanceArray[PerformanceDetails.ELITEMEAN.ordinal()]) + "\t"
                + mean.evaluate(performanceArray[PerformanceDetails.NUMSLOTS.ordinal()]) + "\t"
                + sd.evaluate(performanceArray[PerformanceDetails.NUMSLOTS.ordinal()]) + "\t"
                + mean.evaluate(performanceArray[PerformanceDetails.NUMRULES.ordinal()]) + "\t"
                + sd.evaluate(performanceArray[PerformanceDetails.NUMRULES.ordinal()]) + "\n");
        index++;
    }

    averageRunTime /= runEnd;
    buf.write("Average Run Time: " + toTimeFormat(averageRunTime) + "\n");

    // Write the average episode length
    buf.write("\nAverage episode length: " + mean.evaluate(episodeLengths) + " +- "
            + sd.evaluate(episodeLengths) + "\n");
    buf.write("\nAverage num slots: " + mean.evaluate(numSlots) + " +- " + sd.evaluate(numSlots) + "\n");

    buf.close();
    writer.close();
    return averageRunTime;
}

From source file:com.aurel.track.report.dashboard.StatusOverTimeGraph.java

/**
 * Computes the hierarchical data for status changes
 *
 * @return/*from  w  ww. j a  v  a  2  s  .c  o  m*/
 */
public static SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> calculateTotalInStatus(
        int[] workItemIDs, Date dateFrom, Date dateTo, List<Integer> statusIDs, int selectedTimeInterval,
        Locale locale) {
    SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> yearToPeriodToStatusIDToStatusNumbersMap = new TreeMap<Integer, SortedMap<Integer, Map<Integer, Integer>>>();

    if (statusIDs != null && statusIDs.isEmpty()) {
        LOGGER.warn("No status specified");
        return yearToPeriodToStatusIDToStatusNumbersMap;
    }
    Set<Integer> statusIDsSet = GeneralUtils.createIntegerSetFromIntegerList(statusIDs);
    if (workItemIDs == null || workItemIDs.length == 0) {
        // LOGGER.warn("No issues satisfy the filtering condition (read right revoked, project/release deleted?)");
        return yearToPeriodToStatusIDToStatusNumbersMap;
    }

    Map<Integer, Integer> statusForWorkItems = new HashMap<Integer, Integer>();
    List<HistorySelectValues> historySelectValuesBefore = null;
    if (dateFrom != null) {
        //get all status changes till the beginning of the reporting period
        //include all statuses (not just the selected ones)
        //because we are interested only in the status at the end of each period
        historySelectValuesBefore = HistoryTransactionBL.getByWorkItemsFieldNewValuesDates(workItemIDs,
                SystemFields.INTEGER_STATE, null, null, dateFrom);
    }
    //get all status changes for the reporting period
    //include all statuses (not just the selected ones)
    //because we are interested only in the status at the end of each period
    List<HistorySelectValues> historySelectValuesReportingPeriod = HistoryTransactionBL
            .getByWorkItemsFieldNewValuesDates(workItemIDs, SystemFields.INTEGER_STATE, null, dateFrom, dateTo);
    SortedMap<Integer, SortedMap<Integer, List<HistorySelectValues>>> periodStatusChangesReportingPeriod = getStatusChangesMap(
            historySelectValuesReportingPeriod, selectedTimeInterval, true/*, statusIDs*/);

    Integer year = null;
    Integer period = null;
    Iterator yearIterator;

    //calculate the values for the beginning of the first reporting period
    if (historySelectValuesBefore != null) {
        //get the first year and period
        if (dateFrom != null) {
            //explicit dateFrom specified by user
            Calendar calendar = Calendar.getInstance();
            calendar.setTime(dateFrom);
            year = Integer.valueOf(calendar.get(Calendar.YEAR));
            int calendarInterval = getCalendarInterval(selectedTimeInterval);
            period = Integer.valueOf(calendar.get(calendarInterval));
        } else {
            //no explicit dateFrom specified by the user, get the first found entry in the history
            yearIterator = periodStatusChangesReportingPeriod.keySet().iterator();
            if (yearIterator.hasNext()) {
                year = (Integer) yearIterator.next();
                SortedMap<Integer, List<HistorySelectValues>> intervalToStatusChangeBeans = periodStatusChangesReportingPeriod
                        .get(year);
                Iterator<Integer> periodIterator = intervalToStatusChangeBeans.keySet().iterator();
                period = periodIterator.next();
            }
        }

        if (year == null || period == null) {
            //nothing found
            return yearToPeriodToStatusIDToStatusNumbersMap;
        }

        Iterator<HistorySelectValues> iterator = historySelectValuesBefore.iterator();
        while (iterator.hasNext()) {
            //count the workItems in status till the beginning of the reporting period
            HistorySelectValues historySelectValues = iterator.next();
            Integer workItemID = historySelectValues.getWorkItemID();
            Integer statusID = historySelectValues.getNewValue();
            if (statusForWorkItems.get(workItemID) == null) {
                //take into account only the last stateChange for the workItem
                statusForWorkItems.put(workItemID, statusID);
                if (statusIDsSet.contains(statusID)) {
                    //count only if selected status
                    setCount(yearToPeriodToStatusIDToStatusNumbersMap, year, period, statusID, 1);
                }
            }
        }
    }
    yearIterator = periodStatusChangesReportingPeriod.keySet().iterator();
    while (yearIterator.hasNext()) {
        year = (Integer) yearIterator.next();
        SortedMap intervalToStatusChangeBeans = periodStatusChangesReportingPeriod.get(year);
        Iterator<Integer> periodIterator = intervalToStatusChangeBeans.keySet().iterator();
        while (periodIterator.hasNext()) {
            period = periodIterator.next();
            List statusChangeBeansForInterval = (List) intervalToStatusChangeBeans.get(period);
            if (statusChangeBeansForInterval != null) {
                Iterator statusChangeBeansIterator = statusChangeBeansForInterval.iterator();
                while (statusChangeBeansIterator.hasNext()) {
                    HistorySelectValues historySelectValues = (HistorySelectValues) statusChangeBeansIterator
                            .next();
                    Integer workItemID = historySelectValues.getWorkItemID();
                    Integer nextStatusID = historySelectValues.getNewValue();
                    Integer previousStatus = statusForWorkItems.get(workItemID);
                    if (previousStatus == null) {
                        //probably the item was created in the actual period
                        statusForWorkItems.put(workItemID, nextStatusID);
                        if (statusIDsSet.contains(nextStatusID)) {
                            setCount(yearToPeriodToStatusIDToStatusNumbersMap, year, period, nextStatusID, 1);
                        }
                    } else {
                        if (!previousStatus.equals(nextStatusID)) {
                            statusForWorkItems.put(workItemID, nextStatusID);
                            //add as new status
                            if (statusIDsSet.contains(nextStatusID)) {
                                setCount(yearToPeriodToStatusIDToStatusNumbersMap, year, period, nextStatusID,
                                        1);
                            }
                            //decrement the count for the previous status
                            if (statusIDsSet.contains(previousStatus)) {
                                setCount(yearToPeriodToStatusIDToStatusNumbersMap, year, period, previousStatus,
                                        -1);
                            }
                        }
                    }
                }
            }
        }
    }
    addZerosForEmptyIntervals(dateFrom, dateTo, selectedTimeInterval, yearToPeriodToStatusIDToStatusNumbersMap,
            statusIDs);
    //addTimeSeries(timeSeriesCollection, yearToPeriodToStatusIDToStatusNumbersMap, statusMap, selectedTimeInterval, true);
    return yearToPeriodToStatusIDToStatusNumbersMap;
}

From source file:org.jahia.services.render.scripting.bundle.BundleScriptResolver.java

/**
 * Method for registering a new resource view for a bundle.
 *
 * @param bundle the bundle to register views for
 * @param path   the path of the view to register
 *//* w w  w .ja v a  2 s.c  o m*/
public void addBundleScript(Bundle bundle, String path) {
    if (path.split("/").length != 4) {
        return;
    }
    ViewResourceInfo scriptResource = new ViewResourceInfo(path);
    final String symbolicName = bundle.getSymbolicName();
    SortedMap<String, ViewResourceInfo> existingBundleScripts = availableScripts.get(symbolicName);
    if (existingBundleScripts == null) {
        existingBundleScripts = new TreeMap<>();
        availableScripts.put(symbolicName, existingBundleScripts);
        existingBundleScripts.put(scriptResource.path, scriptResource);
    } else if (!existingBundleScripts.containsKey(scriptResource.path)) {
        existingBundleScripts.put(scriptResource.path, scriptResource);
    } else {
        // if we already have a script resource available, retrieve it to make sure we update it with new properties
        // this is required because it is possible that the properties file is not found when the view is first processed due to
        // file ordering processing in ModulesDataSource.start.process method.
        scriptResource = existingBundleScripts.get(scriptResource.path);
    }

    String properties = StringUtils.substringBeforeLast(path, ".") + ".properties";
    final URL propertiesResource = bundle.getResource(properties);
    if (propertiesResource != null) {
        Properties p = new Properties();
        try {
            p.load(propertiesResource.openStream());
        } catch (IOException e) {
            logger.error("Cannot read properties", e);
        }
        scriptResource.setProperties(p);
    } else {
        scriptResource.setProperties(new Properties());
    }
    clearCaches();
}

From source file:com.alibaba.cobar.client.CobarSqlMapClientTemplate.java

@Override
public int delete(final String statementName, final Object parameterObject) throws DataAccessException {
    auditSqlIfNecessary(statementName, parameterObject);

    long startTimestamp = System.currentTimeMillis();
    try {//from w  w w  .  j ava2  s.  c  o m
        if (isPartitioningBehaviorEnabled()) {
            SortedMap<String, DataSource> dsMap = lookupDataSourcesByRouter(statementName, parameterObject);
            if (!MapUtils.isEmpty(dsMap)) {

                SqlMapClientCallback action = new SqlMapClientCallback() {
                    public Object doInSqlMapClient(SqlMapExecutor executor) throws SQLException {
                        return executor.delete(statementName, parameterObject);
                    }
                };

                if (dsMap.size() == 1) {
                    DataSource dataSource = dsMap.get(dsMap.firstKey());
                    return (Integer) executeWith(dataSource, action);
                } else {
                    List<Object> results = executeInConcurrency(action, dsMap);
                    Integer rowAffacted = 0;
                    for (Object item : results) {
                        rowAffacted += (Integer) item;
                    }
                    return rowAffacted;
                }
            }
        } // end if for partitioning status checking
        return super.delete(statementName, parameterObject);
    } finally {
        if (isProfileLongTimeRunningSql()) {
            long interval = System.currentTimeMillis() - startTimestamp;
            if (interval > getLongTimeRunningSqlIntervalThreshold()) {
                logger.warn(
                        "SQL Statement [{}] with parameter object [{}] ran out of the normal time range, it consumed [{}] milliseconds.",
                        new Object[] { statementName, parameterObject, interval });
            }
        }
    }
}

From source file:org.apache.hadoop.hbase.regionserver.transactional.THLogRecoveryManager.java

/**
 * Go through the WAL, and look for transactions that were started, but never
 * completed. If the transaction was committed, then those edits will need to
 * be applied.//w  w  w .j a v a  2s  .  c  o m
 * 
 * @param reconstructionLog
 * @param maxSeqID
 * @param reporter
 * @return map of batch updates
 * @throws UnsupportedEncodingException
 * @throws IOException
 */
public Map<Long, List<KeyValue>> getCommitsFromLog(final Path reconstructionLog, final long maxSeqID,
        final Progressable reporter) throws UnsupportedEncodingException, IOException {
    if (reconstructionLog == null || !fileSystem.exists(reconstructionLog)) {
        // Nothing to do.
        return null;
    }
    // Check its not empty.
    FileStatus[] stats = fileSystem.listStatus(reconstructionLog);
    if (stats == null || stats.length == 0) {
        LOG.warn("Passed reconstruction log " + reconstructionLog + " is zero-length");
        return null;
    }

    SortedMap<Long, List<KeyValue>> pendingTransactionsById = new TreeMap<Long, List<KeyValue>>();
    Set<Long> commitedTransactions = new HashSet<Long>();
    Set<Long> abortedTransactions = new HashSet<Long>();

    SequenceFile.Reader logReader = new SequenceFile.Reader(fileSystem, reconstructionLog, conf);

    try {
        THLogKey key = new THLogKey();
        KeyValue val = new KeyValue();
        long skippedEdits = 0;
        long totalEdits = 0;
        long startCount = 0;
        long writeCount = 0;
        long abortCount = 0;
        long commitCount = 0;
        // How many edits to apply before we send a progress report.
        int reportInterval = conf.getInt("hbase.hstore.report.interval.edits", 2000);

        while (logReader.next(key, val)) {
            if (LOG.isTraceEnabled()) {
                LOG.trace("Processing edit: key: " + key.toString() + " val: " + val.toString());
            }
            if (key.getLogSeqNum() < maxSeqID) {
                skippedEdits++;
                continue;
            }

            if (key.getTrxOp() == null || !Bytes.equals(key.getRegionName(), regionInfo.getRegionName())) {
                continue;
            }
            long transactionId = key.getTransactionId();

            List<KeyValue> updates = pendingTransactionsById.get(transactionId);
            switch (key.getTrxOp()) {

            case OP:
                if (updates == null) {
                    updates = new ArrayList<KeyValue>();
                    pendingTransactionsById.put(transactionId, updates);
                    startCount++;
                }

                updates.add(val);
                val = new KeyValue();
                writeCount++;
                break;

            case ABORT:
                if (updates == null) {
                    LOG.error("Processing abort for transaction: " + transactionId
                            + ", but have not seen start message");
                    throw new IOException("Corrupted transaction log");
                }
                abortedTransactions.add(transactionId);
                pendingTransactionsById.remove(transactionId);
                abortCount++;
                break;

            case COMMIT:
                if (updates == null) {
                    LOG.error("Processing commit for transaction: " + transactionId
                            + ", but have not seen start message");
                    throw new IOException("Corrupted transaction log");
                }
                if (abortedTransactions.contains(transactionId)) {
                    LOG.error("Processing commit for transaction: " + transactionId
                            + ", but also have abort message");
                    throw new IOException("Corrupted transaction log");
                }
                if (commitedTransactions.contains(transactionId)) {
                    LOG.error("Processing commit for transaction: " + transactionId
                            + ", but have already commited transaction with that id");
                    throw new IOException("Corrupted transaction log");
                }
                pendingTransactionsById.remove(transactionId);
                commitedTransactions.add(transactionId);
                commitCount++;
                break;
            default:
                throw new IllegalStateException("Unexpected log entry type");
            }
            totalEdits++;

            if (reporter != null && (totalEdits % reportInterval) == 0) {
                reporter.progress();
            }
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("Read " + totalEdits + " tranasctional operations (skipped " + skippedEdits
                    + " because sequence id <= " + maxSeqID + "): " + startCount + " starts, " + writeCount
                    + " writes, " + abortCount + " aborts, and " + commitCount + " commits.");
        }
    } finally {
        logReader.close();
    }

    if (pendingTransactionsById.size() > 0) {
        return resolvePendingTransaction(pendingTransactionsById);
    }

    return null;
}

From source file:eu.domibus.submission.webService.impl.BackendWebServiceImpl.java

private void enrichMessageWithSMPData(Messaging ebMSHeaderInfo) {
    String endpointAddress = "";

    final SortedMap<String, Object> metadata = new TreeMap<String, Object>();
    //Service@Type is ignored because as of now it is not supported by the SMP datamodel
    metadata.put(Metadata.PROCESS_ID,/*from w w  w .j av  a  2s. c  o  m*/
            ebMSHeaderInfo.getUserMessage().getCollaborationInfo().getService().getValue());
    metadata.put(Metadata.DOCUMENT_OR_ACTION_ID,
            ebMSHeaderInfo.getUserMessage().getCollaborationInfo().getAction());

    for (Property property : ebMSHeaderInfo.getUserMessage().getMessageProperties().getProperty()) {
        switch (property.getName()) {
        case "originalSender":
            metadata.put(Metadata.SENDING_END_ENTITY_ID, property.getValue());
            break;
        case "finalRecipient":
            metadata.put(Metadata.RECEIVING_END_ENTITY_ID, property.getValue());
            break;
        }
    }

    //should be configurable
    metadata.put(Metadata.COMMUNITY, "civil-law");
    metadata.put(Metadata.ENVIRONMENT, "test");

    metadata.put(Metadata.TRANSPORT_PROFILE_ID, "ebms3-as4");
    metadata.put(Metadata.SUFFIX, "community.eu");

    //!!!
    metadata.put(Metadata.COUNTRY_CODE_OR_EU, "*");
    metadata.put(Metadata.NAMING_SCHEME, new ECodexNamingScheme());

    try {
        discoveryClient.resolveMetadata(metadata);
        endpointAddress = (String) metadata.get(Metadata.ENDPOINT_ADDRESS);

    } catch (DiscoveryException e) {
        LOG.error("", e);
    }

    Collection<Identifier> identifiers = partyIndentifierResolverService.resolveByEndpoint(endpointAddress);
    PartyId partyId = new PartyId();
    for (Identifier identifier : identifiers) {
        partyId.setType(identifier.getPartyIdType().getValue());
        partyId.setValue(identifier.getPartyId());
        ebMSHeaderInfo.getUserMessage().getPartyInfo().getTo().getPartyId().add(partyId);
    }

}

From source file:annis.visualizers.htmlvis.HTMLVis.java

private String createHTML(SDocumentGraph graph, VisualizationDefinition[] definitions) {
    SortedMap<Long, SortedSet<OutputItem>> outputStartTags = new TreeMap<Long, SortedSet<OutputItem>>();
    SortedMap<Long, SortedSet<OutputItem>> outputEndTags = new TreeMap<Long, SortedSet<OutputItem>>();
    StringBuilder sb = new StringBuilder();

    EList<SToken> token = graph.getSortedSTokenByText();

    for (SToken t : token) {

        for (VisualizationDefinition vis : definitions) {
            String matched = vis.getMatcher().matchedAnnotation(t);
            if (matched != null) {
                vis.getOutputter().outputHTML(t, matched, outputStartTags, outputEndTags);
            }/*from  ww w .jav  a 2s  . c o  m*/
        }
    }

    List<SSpan> spans = graph.getSSpans();
    for (VisualizationDefinition vis : definitions) {
        for (SSpan span : spans) {
            String matched = vis.getMatcher().matchedAnnotation(span);
            if (matched != null) {
                vis.getOutputter().outputHTML(span, matched, outputStartTags, outputEndTags);
            }
        }
    }

    // get all used indexes
    Set<Long> indexes = new TreeSet<Long>();
    indexes.addAll(outputStartTags.keySet());
    indexes.addAll(outputEndTags.keySet());

    for (Long i : indexes) {
        // output all strings belonging to this token position

        // first the start tags for this position
        SortedSet<OutputItem> itemsStart = outputStartTags.get(i);
        if (itemsStart != null) {
            Iterator<OutputItem> it = itemsStart.iterator();
            boolean first = true;
            while (it.hasNext()) {
                OutputItem s = it.next();
                if (!first) {
                    sb.append("-->");
                }
                first = false;
                sb.append(s.getOutputString());
                if (it.hasNext()) {
                    sb.append("<!--\n");
                }
            }
        }
        // then the end tags for this position, but inverse their order
        SortedSet<OutputItem> itemsEnd = outputEndTags.get(i);
        if (itemsEnd != null) {
            List<OutputItem> itemsEndReverse = new LinkedList<OutputItem>(itemsEnd);
            Collections.reverse(itemsEndReverse);
            for (OutputItem s : itemsEndReverse) {
                sb.append(s.getOutputString());
            }
        }

    }

    return sb.toString();
}

From source file:hudson.model.Job.java

/**
 * Gets the youngest build #m that satisfies <tt>n&lt;=m</tt>.
 * /*from  ww  w.  j a va  2 s . c o  m*/
 * This is useful when you'd like to fetch a build but the exact build might
 * be already gone (deleted, rotated, etc.)
 * @see LazyBuildMixIn#getNearestBuild
 */
public RunT getNearestBuild(int n) {
    SortedMap<Integer, ? extends RunT> m = _getRuns().headMap(n - 1); // the map should
                                                                      // include n, so n-1
    if (m.isEmpty())
        return null;
    return m.get(m.lastKey());
}

From source file:org.apache.hadoop.hbase.regionserver.ccindex.Checker.java

private boolean recoveryDataForCCIT(byte[] row, HTable org, SortedMap<byte[], byte[]> columnValues) {
    boolean success = false;
    System.out.println("indexed table lost data");
    SimpleIndexKeyGenerator sg = (SimpleIndexKeyGenerator) this.indexOrg.getKeyGenerator();

    byte[] rowBase = sg.getOrgRowKey(row);
    Result recovery = null;// w  ww  .ja v  a2 s . c om
    try {
        if (this.baseTable != null)
            recovery = this.baseTable.get(new Get(rowBase));
    } catch (Throwable e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
        System.out.println("get from:base error");

    }
    if (!this.emptyRow(recovery)) {
        SortedMap<byte[], byte[]> columns = IndexedRegion.convertToValueMap(recovery);
        Put update = IndexMaintenanceUtils.recoverPut(row, columns, false, this.des);
        try {
            org.put(update);
            success = true;
        } catch (Throwable e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    if (!success) {
        for (IndexSpecification ind : this.CCITS.keySet()) {
            if (Bytes.equals(ind.getCCITName(), org.getTableName()))
                continue;
            try {
                byte[] indexRow = ind.getKeyGenerator().createIndexKey(rowBase,
                        columnValues.get(ind.getIndexedColumn()));

                Result recovery2 = this.CCITS.get(ind).get(new Get(indexRow));
                if (!this.emptyRow(recovery2)) {

                    SortedMap<byte[], byte[]> columns = IndexedRegion.convertToValueMap(recovery2);
                    Put update = IndexMaintenanceUtils.recoverPutSimple(row, columns);
                    org.put(update);
                    success = true;
                    break;
                }
            } catch (NotServingRegionException e1) {
                continue;
            } catch (Throwable e) {
                // TODO Auto-generated catch
                // block
                e.printStackTrace();
                System.out.println("get from:" + ind.getIndexId() + " error");
                continue;
            }

        }
    }
    return success;
}