Example usage for java.util SortedMap keySet

List of usage examples for java.util SortedMap keySet

Introduction

In this page you can find the example usage for java.util SortedMap keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.kixeye.chassis.support.metrics.aws.MetricsCloudWatchReporter.java

private void addHistograms(SortedMap<String, Histogram> histograms, LinkedList<PutMetricDataRequest> requests,
        Date timestamp) {/* www. ja  va  2s .co m*/
    logger.debug("Adding Histograms...");
    for (String name : histograms.keySet()) {
        Histogram histogram = histograms.get(name);
        Snapshot snapshot = histogram.getSnapshot();
        checkAndAddDatum(MetricFilter.Stat.COUNT, name, histogram.getCount(), requests, timestamp);
        checkAndAddDatum(MetricFilter.Stat.MIN, name, snapshot.getMin(), requests, timestamp);
        checkAndAddDatum(MetricFilter.Stat.MAX, name, snapshot.getMax(), requests, timestamp);
        checkAndAddDatum(MetricFilter.Stat.MEAN, name, snapshot.getMean(), requests, timestamp);
        checkAndAddDatum(MetricFilter.Stat.STDDEV, name, snapshot.getStdDev(), requests, timestamp);
        checkAndAddDatum(MetricFilter.Stat.PERCENTILE_75, name, snapshot.get75thPercentile(), requests,
                timestamp);
        checkAndAddDatum(MetricFilter.Stat.PERCENTILE_95, name, snapshot.get95thPercentile(), requests,
                timestamp);
        checkAndAddDatum(MetricFilter.Stat.PERCENTILE_98, name, snapshot.get98thPercentile(), requests,
                timestamp);
        checkAndAddDatum(MetricFilter.Stat.PERCENTILE_99, name, snapshot.get99thPercentile(), requests,
                timestamp);
        checkAndAddDatum(MetricFilter.Stat.PERCENTILE_999, name, snapshot.get999thPercentile(), requests,
                timestamp);
    }
}

From source file:de.ingrid.admin.controller.GeneralController.java

private String toJsonMap(final SortedMap<String, List<Provider>> map) {
    final StringBuilder result = new StringBuilder("{");
    for (final String key : map.keySet()) {
        result.append("'" + key + "':[");
        final Iterator<Provider> it = map.get(key).iterator();
        while (it.hasNext()) {
            final Provider p = it.next();
            result.append(//from   w  w w.j  a v a2  s  .  c om
                    "{'shortName':'" + p.getShortName() + "','displayName':'" + p.getDisplayName() + "'}");
            if (it.hasNext()) {
                result.append(",");
            }
        }
        result.append("]");
        if (key != map.lastKey()) {
            result.append(",");
        }
    }
    result.append("}");
    return result.toString();
}

From source file:org.gcaldaemon.core.mailterm.MailTerminal.java

public MailTerminal(ThreadGroup mainGroup, Configurator configurator) throws Exception {
    super(mainGroup, "Mail terminal");
    this.configurator = configurator;

    // Get inbox polling time
    long timeout = configurator.getConfigProperty(Configurator.MAILTERM_POLLING_GOOGLE, 10000L);
    if (timeout < 60000L) {
        log.warn("The fastest Gmail inbox polling period is '1 min'!");
        timeout = 60000L;/*from   ww w  . j  a  va  2  s  . c om*/
    }
    pollingTimeout = timeout;

    // Get Gmail user
    username = configurator.getConfigProperty(Configurator.MAILTERM_GOOGLE_USERNAME, null);
    if (username == null) {
        throw new NullPointerException("Missing username (" + Configurator.MAILTERM_GOOGLE_USERNAME + ")!");
    }

    // Get Gmail password
    password = configurator.getPasswordProperty(Configurator.MAILTERM_GOOGLE_PASSWORD);

    // Get subject of the command mails
    subject = configurator.getPasswordProperty(Configurator.MAILTERM_MAIL_SUBJECT);

    // Get script directory
    String path = configurator.getConfigProperty(Configurator.MAILTERM_DIR_PATH, "/scripts");
    scriptDir = new File(path);
    if (!scriptDir.isDirectory()) {
        scriptDir.mkdirs();
        if (!scriptDir.isDirectory()) {
            throw new Exception("Unable to read script directory (" + path + ")! Permission denied!");
        }
    }

    // Get native console encoding
    String consoleEncoding = configurator.getConfigProperty(Configurator.MAILTERM_CONSOLE_ENCODING,
            StringUtils.US_ASCII);
    try {
        StringUtils.US_ASCII.getBytes(consoleEncoding);
    } catch (Exception unsupportedEncoding) {

        // Dump supported encodings
        SortedMap map = Charset.availableCharsets();
        if (map != null) {
            Set set = map.keySet();
            if (set != null) {
                String[] array = new String[set.size()];
                set.toArray(array);
                QuickWriter writer = new QuickWriter();
                writer.write("Invalid charset (");
                writer.write(consoleEncoding);
                writer.write(")! Supported console encodings:\r\n");
                for (int i = 0; i < array.length; i++) {
                    writer.write(array[i]);
                    if (i < array.length - 1) {
                        writer.write(", ");
                    }
                    if (i % 6 == 5) {
                        writer.write("\r\n");
                    }
                }
                log.warn(writer.toString().trim());
            }
        }
        consoleEncoding = StringUtils.US_ASCII;
    }
    encoding = consoleEncoding;

    // Get acceptable e-mail addresses
    addresses = configurator.getFilterProperty(Configurator.MAILTERM_ALLOWED_ADDRESSES, true);

    // Start listener
    log.info("Mailterm service started successfully.");
    start();
}

From source file:org.web4thejob.web.util.ToolbarRenderer.java

@Override
public void render() {
    final boolean isEmpty = isEmpty();
    if (toolbar != null && isEmpty) {
        reset();//from  ww w .  java  2 s.  co  m
        return;
    } else if (toolbar != null || isEmpty) {
        return;
    }

    toolbar = new Toolbar();
    toolbar.setAlign(align);
    container.insertBefore(toolbar, container.getFirstChild());

    if (!HtmlViewPanel.class.isInstance(getPrimaryOwner())) {
        toolbar.setStyle("border-width: 0;");
    }

    SortedMap<CommandEnum, List<Command>> map = mergeCommands();

    for (final CommandEnum id : map.keySet()) {
        CommandDecorator commandDecorator = null;
        if (map.get(id).size() == 1) {
            commandDecorator = getDecorator(map.get(id).get(0));
        } else {
            for (Command command : map.get(id)) {
                if (commandDecorator == null) {
                    commandDecorator = new DefaultDropdownCommandDecorator(command);
                } else {
                    ((DropdownCommandDecorator) commandDecorator).add(command);
                }
            }
        }

        if (id.isRequiresStartSeparator() && id != map.firstKey() && !isPreviousSeparator()) {
            addSeparator();
        }

        if (commandDecorator != null) {
            commandDecorator.attach(toolbar);
            commandDecorator.addMessageListener(this);
            commandDecorator.render();
        }

        if (id.isRequiresEndSeparator() && id != map.lastKey()) {
            addSeparator();
        }

        Space space = new Space();
        space.setSpacing("8px");
        space.setParent(toolbar);
    }
}

From source file:org.kairosdb.datastore.remote.RemoteDatastore.java

private void flushMap() {
    Multimap<DataPointKey, DataPoint> flushMap = createNewMap();

    synchronized (m_dataFileLock) {
        try {//  w w w.ja v  a  2 s . c  om
            try {
                for (DataPointKey dataPointKey : flushMap.keySet()) {
                    //We have to reset the writer every time or it gets confused
                    //because we are only writing partial json each time.
                    JSONWriter writer = new JSONWriter(m_dataWriter);

                    if (!m_firstDataPoint) {
                        m_dataWriter.write(",\n");
                    }
                    m_firstDataPoint = false;

                    writer.object();

                    writer.key("name").value(dataPointKey.getName());
                    writer.key("skip_validate").value(true);
                    writer.key("tags").object();
                    SortedMap<String, String> tags = dataPointKey.getTags();
                    for (String tag : tags.keySet()) {
                        writer.key(tag).value(tags.get(tag));
                    }
                    writer.endObject();

                    writer.key("datapoints").array();
                    for (DataPoint dataPoint : flushMap.get(dataPointKey)) {
                        m_dataPointCounter++;
                        writer.array();
                        writer.value(dataPoint.getTimestamp());
                        dataPoint.writeValueToJson(writer);
                        writer.value(dataPoint.getApiDataType());
                        /*if (dataPoint.isLong())
                           writer.value(dataPoint.getLongValue());
                        else
                           writer.value(dataPoint.getDoubleValue());*/
                        writer.endArray();
                    }
                    writer.endArray();

                    writer.endObject();
                }
            } catch (JSONException e) {
                logger.error("Unable to write datapoints to file", e);
            }

            m_dataWriter.flush();
        } catch (IOException e) {
            logger.error("Unable to write datapoints to file", e);
        }
    }
}

From source file:org.apache.nifi.processors.standard.HashAttribute.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    FlowFile flowFile = session.get();/*  ww w. j  a va 2 s.  c  o m*/
    if (flowFile == null) {
        return;
    }

    final Map<String, Pattern> patterns = regexMapRef.get();
    final ComponentLog logger = getLogger();

    final SortedMap<String, String> attributes = getRelevantAttributes(flowFile, patterns);
    if (attributes.size() != patterns.size()) {
        final Set<String> wantedKeys = patterns.keySet();
        final Set<String> foundKeys = attributes.keySet();
        final StringBuilder missingKeys = new StringBuilder();
        for (final String wantedKey : wantedKeys) {
            if (!foundKeys.contains(wantedKey)) {
                missingKeys.append(wantedKey).append(" ");
            }
        }

        logger.error("routing {} to 'failure' because of missing attributes: {}",
                new Object[] { flowFile, missingKeys.toString() });
        session.transfer(flowFile, REL_FAILURE);
    } else {
        // create single string of attribute key/value pairs to use for group ID hash
        final StringBuilder hashableValue = new StringBuilder();
        for (final Map.Entry<String, String> entry : attributes.entrySet()) {
            hashableValue.append(entry.getKey());
            if (StringUtils.isBlank(entry.getValue())) {
                hashableValue.append("EMPTY");
            } else {
                hashableValue.append(entry.getValue());
            }
        }

        // create group ID
        final String hashValue = DigestUtils.md5Hex(hashableValue.toString());

        logger.info("adding Hash Value {} to attributes for {} and routing to success",
                new Object[] { hashValue, flowFile });
        flowFile = session.putAttribute(flowFile, context.getProperty(HASH_VALUE_ATTRIBUTE).getValue(),
                hashValue);
        session.getProvenanceReporter().modifyAttributes(flowFile);
        session.transfer(flowFile, REL_SUCCESS);
    }
}

From source file:org.opennaas.extensions.genericnetwork.capability.circuitstatistics.CircuitStatisticsCapability.java

@Override
public void reportStatistics(String csvStatistics) throws CapabilityException {

    log.info("Circuit Statistics report received.");

    try {/*from www  .  j  ava2s .  co m*/
        SortedMap<Long, List<CircuitStatistics>> circuitStatistics = parseCSV(csvStatistics);
        GenericNetworkModel model = (GenericNetworkModel) resource.getModel();

        SortedMap<Long, List<CircuitStatistics>> modelStatistics = model.getCircuitStatistics();
        for (Long timestamp : circuitStatistics.keySet()) {
            if (modelStatistics.keySet().contains(timestamp))
                modelStatistics.get(timestamp).addAll(circuitStatistics.get(timestamp));
            else
                modelStatistics.put(timestamp, circuitStatistics.get(timestamp));
        }

    } catch (Exception e) {
        log.info("Error parsing received CSV", e);
        throw new CapabilityException(e);
    }

    log.info("Circuits statistics stored.");

}

From source file:eu.stratosphere.pact.runtime.hash.HashFunctionCollisionBenchmark.java

private void checkBoundaries(BucketBoundaries[] boundaries) {
    for (int level = 0; level < boundaries.length; level++) {
        int lowerBound = boundaries[level].getLowerBound();
        int upperBound = boundaries[level].getUpperBound();
        int bucketCountInLevel = 0;
        int bucketCountOutOfRange = 0;

        SortedMap<Integer, Integer> levelMap = bucketSizesPerLevel.get(level);
        Iterator<Integer> bucketSizeIterator = levelMap.keySet().iterator();

        while (bucketSizeIterator.hasNext()) {
            int bucketSize = bucketSizeIterator.next();
            if (bucketSize != 0) {
                int countForBucketSize = levelMap.get(bucketSize);
                bucketCountInLevel += countForBucketSize;
                if (lowerBound > bucketSize || upperBound < bucketSize) {
                    bucketCountOutOfRange += countForBucketSize;
                }//from ww w . j a  va2s. c  o m

            }
        }
        double bucketsOutOfRange = (double) bucketCountOutOfRange / (double) bucketCountInLevel;
        double maxBucketsOutOfRange = boundaries[level].getPercentOutOfRange();
        Assert.assertTrue(
                "More than " + (maxBucketsOutOfRange * 100) + "% of buckets out of range in level " + level,
                bucketsOutOfRange <= maxBucketsOutOfRange);

        int maxEmpty = boundaries[level].getMaxEmpty();
        Assert.assertTrue("More than " + maxEmpty + " empty buckets in level " + level,
                (maxEmpty == BucketBoundaries.MAX_EMPTY_UNBOUNDED)
                        || (levelMap.get(0) <= boundaries[level].getMaxEmpty()));
    }
}

From source file:com.aurel.track.report.dashboard.StatusOverTimeGraph.java

/**
 * Computes the hierarchical data for status changes
 * @return//from  w  w  w .  java 2s .co m
 */
public static SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> calculateStatus(int[] workItemIDs,
        Date dateFrom, Date dateTo, List<Integer> statusIDs, int selectedTimeInterval, Locale locale) {
    SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> yearToPeriodToStatusIDToStatusNumbersMap = new TreeMap<Integer, SortedMap<Integer, Map<Integer, Integer>>>();

    if (statusIDs != null && statusIDs.isEmpty()) {
        LOGGER.debug("No status specified");
        return yearToPeriodToStatusIDToStatusNumbersMap;
    }
    if (workItemIDs == null || workItemIDs.length == 0) {
        // LOGGER.warn("No issues satisfy the filtering condition (read right revoked, project/release deleted?)");
        return yearToPeriodToStatusIDToStatusNumbersMap;
    }
    List<HistorySelectValues> historySelectValuesList = HistoryTransactionBL.getByWorkItemsFieldNewValuesDates(
            workItemIDs, SystemFields.INTEGER_STATE, statusIDs, dateFrom, dateTo);
    SortedMap<Integer, SortedMap<Integer, List<HistorySelectValues>>> periodStatusChanges = getStatusChangesMap(
            historySelectValuesList, selectedTimeInterval, false/*, statusIDs*/);
    Iterator<Integer> yearIterator = periodStatusChanges.keySet().iterator();
    while (yearIterator.hasNext()) {
        Integer year = yearIterator.next();
        SortedMap<Integer, List<HistorySelectValues>> intervalToStatusChangeBeans = periodStatusChanges
                .get(year);
        Iterator<Integer> periodIterator = intervalToStatusChangeBeans.keySet().iterator();
        while (periodIterator.hasNext()) {
            Integer period = periodIterator.next();
            List<HistorySelectValues> statusChangeBeansForInterval = intervalToStatusChangeBeans.get(period);
            if (statusChangeBeansForInterval != null) {
                Iterator statusChangeBeansIterator = statusChangeBeansForInterval.iterator();
                while (statusChangeBeansIterator.hasNext()) {
                    HistorySelectValues stateChangeBean = (HistorySelectValues) statusChangeBeansIterator
                            .next();
                    Integer statusID = stateChangeBean.getNewValue();
                    setCount(yearToPeriodToStatusIDToStatusNumbersMap, year, period, statusID, 1);
                }
            }
        }
    }
    addZerosForEmptyIntervals(dateFrom, dateTo, selectedTimeInterval, yearToPeriodToStatusIDToStatusNumbersMap,
            statusIDs);
    //addTimeSeries(timeSeriesCollection, yearToPeriodToStatusIDToStatusNumbersMap, statusMap, selectedTimeInterval, accumulated);
    return yearToPeriodToStatusIDToStatusNumbersMap;
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcLoadRecordWriter.java

@Override
public List<OnRecordErrorException> writeBatch(Iterator<Record> recordIterator) throws StageException {
    final List<OnRecordErrorException> errorRecords = new LinkedList<>();
    if (!recordIterator.hasNext()) {
        return errorRecords;
    }/*from  w  ww . ja  va2  s  .c om*/

    // Assume all records have the same columns.
    final Record first = recordIterator.next();
    SortedMap<String, String> columnsToParameters = recordReader.getColumnsToParameters(first,
            OperationType.LOAD_CODE, getColumnsToParameters(), getColumnsToFields());
    if (columnsToParameters.isEmpty()) {
        throw new StageException(JdbcErrors.JDBC_22);
    }

    final Set<String> columnNames = columnsToParameters.keySet();
    final String loadSql = "LOAD DATA LOCAL INFILE '' " + duplicateKeyAction.getKeyword() + " INTO TABLE "
            + getTableName() + " (" + Joiner.on(", ").join(columnNames) + ")";
    try (Connection connection = getDataSource().getConnection()) {
        Connection conn = connection.unwrap(Connection.class);
        try (PreparedStatement statement = conn.prepareStatement(loadSql)) {
            PipedInputStream is = new PipedInputStream();
            PipedOutputStream os = new PipedOutputStream(is);
            statement.getClass().getMethod("setLocalInfileInputStream", InputStream.class).invoke(statement,
                    is);

            Future<?> future = loadOutputExecutor.submit(() -> {
                try (OutputStreamWriter writer = new OutputStreamWriter(os)) {
                    CSVPrinter printer = new CSVPrinter(writer, CSVFormat.MYSQL);
                    Record record = first;
                    while (record != null) {
                        int opCode = getOperationCode(record, errorRecords);
                        if (opCode == OperationType.LOAD_CODE) {
                            for (String column : columnNames) {
                                Field field = record.get(getColumnsToFields().get(column));
                                printer.print(field.getValue());
                            }
                            printer.println();
                        } else if (opCode > 0) {
                            LOG.debug("Sending record to error due to unsupported operation {}", opCode);
                            errorRecords.add(new OnRecordErrorException(record, JdbcErrors.JDBC_70, opCode));
                        } else {
                            // It should be added to the error records.
                        }
                        record = recordIterator.hasNext() ? recordIterator.next() : null;
                    }
                    ;
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            });

            if (LOG.isDebugEnabled()) {
                LOG.debug("Executing query: {}", statement.toString());
            }
            statement.execute();
            future.get();
        }
        connection.commit();
    } catch (SQLException e) {
        handleSqlException(e);
    } catch (Exception e) {
        throw new StageException(JdbcErrors.JDBC_14, e.getMessage(), e);
    }
    return errorRecords;
}