Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:edu.indiana.soic.ts.mapreduce.VectorCalculator.java

public void submitJob() {
    try {/* w  ww  .  j  av  a2  s .c  o m*/
        Configuration config = HBaseConfiguration.create();
        config.set("mapreduce.output.textoutputformat.separator", ",");
        TreeMap<String, List<Date>> genDates = TableUtils.genDates(TableUtils.getDate(startDate),
                TableUtils.getDate(endDate), this.window, TimeUnit.DAYS, this.headShift, this.tailShift,
                TimeUnit.DAYS);
        LOG.info("Start Date : {} End Date : {}, Gen dates size: {}", startDate, endDate, genDates.size());
        for (String id : genDates.keySet()) {
            LOG.info("Vector calculation for: {}", id);
            Scan scan = new Scan();
            scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
            scan.setCacheBlocks(false); // don't set to true for MR jobs
            List<Date> dates = genDates.get(id);
            String start = TableUtils.convertDateToString(dates.get(0));
            String end = TableUtils.convertDateToString(dates.get(1));
            List<String> suitableDateList = TableUtils.getDates(start, end);
            config.set(Constants.Job.NO_OF_DAYS, String.valueOf(suitableDateList.size()));
            LOG.info("Vector calculator for start: {}, end: {} time window: {}, shift: {}, days: {}", startDate,
                    endDate, window, headShift, suitableDateList.size());
            for (String date : suitableDateList) {
                scan.addColumn(Constants.STOCK_TABLE_CF_BYTES, date.getBytes());
            }
            Job job = new Job(config, "Vector calculation: " + id);
            job.setJarByClass(VectorCalculator.class);
            TableMapReduceUtil.initTableMapperJob(Constants.STOCK_TABLE_NAME, // input HBase table name
                    scan, // Scan instance to control CF and attribute selection
                    VectorCalculatorMapper.class, // mapper
                    IntWritable.class, // mapper output key
                    Text.class, // mapper output value
                    job);
            // adjust directories as required
            String outPutDir = tsConfiguration.getInterMediateVectorDir() + "/" + id;
            FileOutputFormat.setOutputPath(job, new Path(outPutDir));
            boolean b = job.waitForCompletion(true);
            if (!b) {
                LOG.error("Error with job for vector calculation");
                throw new RuntimeException("Error with job for vector calculation");
            }
            Utils.concatOutput(config, id, outPutDir, tsConfiguration.getVectorDir());
        }
    } catch (ParseException e) {
        LOG.error("Error while parsing date", e);
        throw new RuntimeException("Error while parsing date", e);
    } catch (InterruptedException | ClassNotFoundException | IOException e) {
        LOG.error("Error while creating the job", e);
        throw new RuntimeException("Error while creating the job", e);
    }
}

From source file:org.apache.hadoop.hbase.TestScanner2.java

private void checkRegexingScanner(final HTable table, final String regexColumnname) throws IOException {
    Text[] regexCol = new Text[] { new Text(regexColumnname) };
    HScannerInterface scanner = table.obtainScanner(regexCol, HConstants.EMPTY_START_ROW);
    HStoreKey key = new HStoreKey();
    TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
    int count = 0;
    while (scanner.next(key, results)) {
        for (Text c : results.keySet()) {
            assertTrue(c.toString().matches(regexColumnname));
            count++;//from  www.  j  ava  2  s .  c  om
        }
    }
    assertTrue(count == 1);
    scanner.close();
}

From source file:dreamboxdataservice.DreamboxDataService.java

/**
 * @return All channels available in the dreambox
 *///from  w w w  .  j  a  v a  2s .c  o  m
public ArrayList<Channel> getChannels() {
    try {
        ArrayList<Channel> allChannels = new ArrayList<Channel>();

        TreeMap<String, String> bouquets = getServiceDataBonquets(URLEncoder.encode(BOUQUETLIST, "UTF8"));

        if (bouquets != null) {
            for (String key : bouquets.keySet()) {
                TreeMap<String, String> map = getServiceData(URLEncoder.encode(key, "UTF8"));

                for (String mkey : map.keySet()) {
                    Channel ch = new Channel(this, map.get(mkey), "DREAM" + StringUtils.replace(mkey, ":", "_"),
                            TimeZone.getTimeZone("GMT+1:00"), "de", "Imported from Dreambox", "", mChannelGroup,
                            null, Channel.CATEGORY_TV);
                    allChannels.add(ch);
                }
            }
        }

        return allChannels;
    } catch (UnsupportedEncodingException e) {
        e.printStackTrace();
    }

    return new ArrayList<Channel>();
}

From source file:monasca.thresh.infrastructure.persistence.AlarmDAOImpl.java

private byte[] calculateDimensionSHA1(final Map<String, String> dimensions) {
    // Calculate dimensions sha1 hash id.
    final StringBuilder dimensionIdStringToHash = new StringBuilder("");
    if (dimensions != null) {
        // Sort the dimensions on name and value.
        TreeMap<String, String> dimensionTreeMap = new TreeMap<>(dimensions);
        for (String dimensionName : dimensionTreeMap.keySet()) {
            if (dimensionName != null && !dimensionName.isEmpty()) {
                String dimensionValue = dimensionTreeMap.get(dimensionName);
                if (dimensionValue != null && !dimensionValue.isEmpty()) {
                    dimensionIdStringToHash.append(trunc(dimensionName, MAX_COLUMN_LENGTH));
                    dimensionIdStringToHash.append(trunc(dimensionValue, MAX_COLUMN_LENGTH));
                }/*w w  w. j  av  a2 s  . com*/
            }
        }
    }

    final byte[] dimensionIdSha1Hash = DigestUtils.sha(dimensionIdStringToHash.toString());
    return dimensionIdSha1Hash;
}

From source file:org.apache.hadoop.hive.ql.exec.ComputationBalancerReducer.java

void flushHistogram(FSDataOutputStream out) throws Exception {
    out.writeBytes(HistogramOperator.HISTOGRAMTABLE + "\n");
    for (String _s : mcvList.keySet()) {
        out.writeBytes(_s + "\n");
        ToolBox _tb = new ToolBox();
        TreeMap<String, Integer> _tsi = mcvList.get(_s);
        for (String _s_inner_ : _tsi.keySet()) {
            _tb.push(_s_inner_, _tsi.get(_s_inner_));
        }/*from   w  ww .j a v  a 2  s.c o  m*/
        ToolBox _copyBox = HistogramOperator.binning(_tb, 10);
        String _curString = null;
        String _preString = _copyBox.getStringAtIdx(0);
        int idx;
        for (idx = 1; idx < _copyBox.getCapacity(); idx++) {
            _curString = _copyBox.getStringAtIdx(idx);
            if (_curString.equals(_preString)) {
                continue;
            } else {
                out.writeBytes(_copyBox.getIntegeAtIdx(idx - 1) + ToolBox.hiveDelimiter + _s
                        + ToolBox.hiveDelimiter + _copyBox.getStringAtIdx(idx - 1) + "\n");

                _preString = _curString;
            }
        }

        out.writeBytes(_copyBox.getIntegeAtIdx(idx - 1) + ToolBox.hiveDelimiter + _s + ToolBox.hiveDelimiter
                + _copyBox.getStringAtIdx(idx - 1) + "\n");

    }

}

From source file:org.opennms.netmgt.config.WmiPeerFactory.java

/**
 * Combine specific and range elements so that WMIPeerFactory has to spend
 * less time iterating all these elements.
 * TODO This really should be pulled up into PeerFactory somehow, but I'm not sure how (given that "Definition" is different for both
 * SNMP and WMI.  Maybe some sort of visitor methodology would work.  The basic logic should be fine as it's all IP address manipulation
 *
 * @throws UnknownHostException/*from   w ww . j  a v a2 s  .  co m*/
 */
static void optimize() throws UnknownHostException {

    // First pass: Remove empty definition elements
    for (Iterator<Definition> definitionsIterator = m_config.getDefinitionCollection()
            .iterator(); definitionsIterator.hasNext();) {
        Definition definition = definitionsIterator.next();
        if (definition.getSpecificCount() == 0 && definition.getRangeCount() == 0) {

            LOG.debug("optimize: Removing empty definition element");
            definitionsIterator.remove();
        }
    }

    // Second pass: Replace single IP range elements with specific elements
    for (Definition definition : m_config.getDefinitionCollection()) {
        synchronized (definition) {
            for (Iterator<Range> rangesIterator = definition.getRangeCollection().iterator(); rangesIterator
                    .hasNext();) {
                Range range = rangesIterator.next();
                if (range.getBegin().equals(range.getEnd())) {
                    definition.addSpecific(range.getBegin());
                    rangesIterator.remove();
                }
            }
        }
    }

    // Third pass: Sort specific and range elements for improved XML
    // readability and then combine them into fewer elements where possible
    for (Iterator<Definition> defIterator = m_config.getDefinitionCollection().iterator(); defIterator
            .hasNext();) {
        Definition definition = defIterator.next();

        // Sort specifics
        final TreeMap<InetAddress, String> specificsMap = new TreeMap<InetAddress, String>(
                new InetAddressComparator());
        for (String specific : definition.getSpecificCollection()) {
            specificsMap.put(InetAddressUtils.getInetAddress(specific), specific.trim());
        }

        // Sort ranges
        final TreeMap<InetAddress, Range> rangesMap = new TreeMap<InetAddress, Range>(
                new InetAddressComparator());
        for (Range range : definition.getRangeCollection()) {
            rangesMap.put(InetAddressUtils.getInetAddress(range.getBegin()), range);
        }

        // Combine consecutive specifics into ranges
        InetAddress priorSpecific = null;
        Range addedRange = null;
        for (final InetAddress specific : specificsMap.keySet()) {
            if (priorSpecific == null) {
                priorSpecific = specific;
                continue;
            }

            if (BigInteger.ONE.equals(InetAddressUtils.difference(specific, priorSpecific))
                    && InetAddressUtils.inSameScope(specific, priorSpecific)) {
                if (addedRange == null) {
                    addedRange = new Range();
                    addedRange.setBegin(InetAddressUtils.toIpAddrString(priorSpecific));
                    rangesMap.put(priorSpecific, addedRange);
                    specificsMap.remove(priorSpecific);
                }

                addedRange.setEnd(InetAddressUtils.toIpAddrString(specific));
                specificsMap.remove(specific);
            } else {
                addedRange = null;
            }

            priorSpecific = specific;
        }

        // Move specifics to ranges
        for (final InetAddress specific : new ArrayList<InetAddress>(specificsMap.keySet())) {
            for (final InetAddress begin : new ArrayList<InetAddress>(rangesMap.keySet())) {
                if (!InetAddressUtils.inSameScope(begin, specific)) {
                    continue;
                }

                if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                        .compareTo(InetAddressUtils.toInteger(specific)) > 0) {
                    continue;
                }

                Range range = rangesMap.get(begin);

                final InetAddress end = InetAddressUtils.getInetAddress(range.getEnd());

                if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                        .compareTo(InetAddressUtils.toInteger(specific)) < 0) {
                    continue;
                }

                if (InetAddressUtils.toInteger(specific).compareTo(InetAddressUtils.toInteger(begin)) >= 0
                        && InetAddressUtils.toInteger(specific)
                                .compareTo(InetAddressUtils.toInteger(end)) <= 0) {
                    specificsMap.remove(specific);
                    break;
                }

                if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                        .equals(InetAddressUtils.toInteger(specific))) {
                    rangesMap.remove(begin);
                    rangesMap.put(specific, range);
                    range.setBegin(InetAddressUtils.toIpAddrString(specific));
                    specificsMap.remove(specific);
                    break;
                }

                if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                        .equals(InetAddressUtils.toInteger(specific))) {
                    range.setEnd(InetAddressUtils.toIpAddrString(specific));
                    specificsMap.remove(specific);
                    break;
                }
            }
        }

        // Combine consecutive ranges
        Range priorRange = null;
        InetAddress priorBegin = null;
        InetAddress priorEnd = null;
        for (final Iterator<InetAddress> rangesIterator = rangesMap.keySet().iterator(); rangesIterator
                .hasNext();) {
            final InetAddress beginAddress = rangesIterator.next();
            final Range range = rangesMap.get(beginAddress);
            final InetAddress endAddress = InetAddressUtils.getInetAddress(range.getEnd());

            if (priorRange != null) {
                if (InetAddressUtils.inSameScope(beginAddress, priorEnd)
                        && InetAddressUtils.difference(beginAddress, priorEnd).compareTo(BigInteger.ONE) <= 0) {
                    priorBegin = new InetAddressComparator().compare(priorBegin, beginAddress) < 0 ? priorBegin
                            : beginAddress;
                    priorRange.setBegin(InetAddressUtils.toIpAddrString(priorBegin));
                    priorEnd = new InetAddressComparator().compare(priorEnd, endAddress) > 0 ? priorEnd
                            : endAddress;
                    priorRange.setEnd(InetAddressUtils.toIpAddrString(priorEnd));

                    rangesIterator.remove();
                    continue;
                }
            }

            priorRange = range;
            priorBegin = beginAddress;
            priorEnd = endAddress;
        }

        // Update changes made to sorted maps
        definition.setSpecific(specificsMap.values().toArray(new String[0]));
        definition.setRange(rangesMap.values().toArray(new Range[0]));
    }
}

From source file:com.deliciousdroid.client.DeliciousApi.java

/**
 * Performs an api call to Delicious's http based api methods.
 * // w  w w.  ja  va  2s.co  m
 * @param url URL of the api method to call.
 * @param params Extra parameters included in the api call, as specified by different methods.
 * @param account The account being synced.
 * @param context The current application context.
 * @return A String containing the response from the server.
 * @throws IOException If a server error was encountered.
 * @throws AuthenticationException If an authentication error was encountered.
 */
private static InputStream DeliciousApiCall(String url, TreeMap<String, String> params, Account account,
        Context context) throws IOException, AuthenticationException {

    final AccountManager am = AccountManager.get(context);

    if (account == null)
        throw new AuthenticationException();

    final String username = account.name;
    String authtoken = null;

    try {
        authtoken = am.blockingGetAuthToken(account, Constants.AUTHTOKEN_TYPE, false);
    } catch (OperationCanceledException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (AuthenticatorException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    Uri.Builder builder = new Uri.Builder();
    builder.scheme(SCHEME);
    builder.authority(DELICIOUS_AUTHORITY);
    builder.appendEncodedPath(url);
    for (String key : params.keySet()) {
        builder.appendQueryParameter(key, params.get(key));
    }

    String apiCallUrl = builder.build().toString();

    Log.d("apiCallUrl", apiCallUrl);
    final HttpGet post = new HttpGet(apiCallUrl);

    post.setHeader("User-Agent", "DeliciousDroid");
    post.setHeader("Accept-Encoding", "gzip");

    DefaultHttpClient client = (DefaultHttpClient) HttpClientFactory.getThreadSafeClient();
    CredentialsProvider provider = client.getCredentialsProvider();
    Credentials credentials = new UsernamePasswordCredentials(username, authtoken);
    provider.setCredentials(SCOPE, credentials);

    client.addRequestInterceptor(new PreemptiveAuthInterceptor(), 0);

    final HttpResponse resp = client.execute(post);

    final int statusCode = resp.getStatusLine().getStatusCode();

    if (statusCode == HttpStatus.SC_OK) {

        final HttpEntity entity = resp.getEntity();

        InputStream instream = entity.getContent();

        final Header encoding = entity.getContentEncoding();

        if (encoding != null && encoding.getValue().equalsIgnoreCase("gzip")) {
            instream = new GZIPInputStream(instream);
        }

        return instream;
    } else if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
        throw new AuthenticationException();
    } else {
        throw new IOException();
    }
}

From source file:net.spfbl.core.Peer.java

public static void dropExpired() {
    String origin = null;//from   w w  w .  j a va 2 s  .  c  o m
    for (Peer peer : getSet()) {
        long time = System.currentTimeMillis();
        if (peer.isExpired7()) {
            if (peer.drop()) {
                Server.log(time, Core.Level.INFO, "PEERH", origin, peer.getAddress(), "EXPIRED");
            }
        } else {
            try {
                peer.refreshReputationMax();
                TreeMap<String, Binomial> reputationMap = peer.getReputationMap();
                for (String key : reputationMap.keySet()) {
                    time = System.currentTimeMillis();
                    Binomial binomial = reputationMap.get(key);
                    if (binomial.isExpired3()) {
                        binomial = peer.dropReputation(key);
                        if (binomial != null) {
                            Server.log(time, Core.Level.INFO, "PEERR", peer.getAddress(), key, "EXPIRED");
                        }
                    }
                }
            } catch (Exception ex) {
                Server.logError(ex);
            }
        }
    }
}

From source file:org.apache.storm.metricstore.rocksdb.RocksDbMetricsWriter.java

private void processBatchInsert(TreeMap<RocksDbKey, RocksDbValue> batchMap) throws MetricException {
    try (WriteBatch writeBatch = new WriteBatch()) {
        // take the batched metric data and write to the database
        for (RocksDbKey k : batchMap.keySet()) {
            RocksDbValue v = batchMap.get(k);
            writeBatch.put(k.getRaw(), v.getRaw());
        }/*w  w w. j a va  2s .co  m*/
        store.db.write(writeOpts, writeBatch);
    } catch (Exception e) {
        String message = "Failed to store data to RocksDB";
        LOG.error(message, e);
        throw new MetricException(message, e);
    }
}

From source file:com.sfs.whichdoctor.export.writer.AgedDebtorsAnalysisWriter.java

/**
 * Gets the formatted period breakdown field.
 *
 * @param periods the periods//w ww  .ja v a  2s .  c om
 * @param group the group
 * @param format the format
 * @return the formatted period breakdown field
 */
private String getFormattedPeriodBreakdownField(final TreeMap<Integer, AgedDebtorsPeriod> periods,
        final AgedDebtorsBreakdown breakdown, final String format) {

    StringBuffer field = new StringBuffer();

    int i = 1;
    for (int id : periods.keySet()) {
        AgedDebtorsPeriod period = periods.get(id);
        AgedDebtorsPeriod bPeriod = breakdown.getPeriodBreakdown(period);

        if (StringUtils.equalsIgnoreCase(format, "html")) {
            field.append("<div style=\"text-align: right\">");
        }
        field.append(Formatter.toCurrency(bPeriod.getTotal(), "$"));
        if (StringUtils.equalsIgnoreCase(format, "html")) {
            field.append("</div>");
        }
        if (i < periods.size()) {
            field.append(this.getKeys().getString("ITEM_SUFFIX"));
            field.append(this.getKeys().getString("ITEM_DIVIDER"));
            field.append(this.getKeys().getString("ITEM_PREFIX"));
        }
        i++;
    }
    return field.toString();
}