Example usage for java.util TreeMap put

List of usage examples for java.util TreeMap put

Introduction

In this page you can find the example usage for java.util TreeMap put.

Prototype

public V put(K key, V value) 

Source Link

Document

Associates the specified value with the specified key in this map.

Usage

From source file:org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheTest.java

@Test
public void testTimelineMetricCacheProviderGets() throws Exception {
    Configuration configuration = createNiceMock(Configuration.class);
    expect(configuration.getMetricCacheTTLSeconds()).andReturn(3600);
    expect(configuration.getMetricCacheIdleSeconds()).andReturn(100);
    expect(configuration.getMetricsCacheManagerHeapPercent()).andReturn("10%").anyTimes();

    replay(configuration);/*from   w w  w  .  j  a va2  s .  co  m*/

    final long now = System.currentTimeMillis();

    TimelineMetrics metrics = new TimelineMetrics();

    TimelineMetric timelineMetric = new TimelineMetric();
    timelineMetric.setMetricName("cpu_user");
    timelineMetric.setAppId("app1");
    TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
    metricValues.put(now + 100, 1.0);
    metricValues.put(now + 200, 2.0);
    metricValues.put(now + 300, 3.0);
    timelineMetric.setMetricValues(metricValues);

    metrics.getMetrics().add(timelineMetric);

    TimelineMetricCacheEntryFactory cacheEntryFactory = createMock(TimelineMetricCacheEntryFactory.class);

    TimelineAppMetricCacheKey queryKey = new TimelineAppMetricCacheKey(Collections.singleton("cpu_user"),
            "app1", new TemporalInfoImpl(now, now + 1000, 1));
    TimelineMetricsCacheValue value = new TimelineMetricsCacheValue(now, now + 1000, metrics, null);
    TimelineAppMetricCacheKey testKey = new TimelineAppMetricCacheKey(Collections.singleton("cpu_user"), "app1",
            new TemporalInfoImpl(now, now + 2000, 1));

    expect(cacheEntryFactory.createEntry(anyObject())).andReturn(value);
    cacheEntryFactory.updateEntryValue(testKey, value);
    expectLastCall().once();

    replay(cacheEntryFactory);

    TimelineMetricCacheProvider cacheProvider = createMockBuilder(TimelineMetricCacheProvider.class)
            .addMockedMethod("createCacheConfiguration").withConstructor(configuration, cacheEntryFactory)
            .createNiceMock();

    expect(cacheProvider.createCacheConfiguration()).andReturn(createTestCacheConfiguration(configuration))
            .anyTimes();
    replay(cacheProvider);

    TimelineMetricCache cache = cacheProvider.getTimelineMetricsCache();

    // call to get
    metrics = cache.getAppTimelineMetricsFromCache(queryKey);
    List<TimelineMetric> metricsList = metrics.getMetrics();
    Assert.assertEquals(1, metricsList.size());
    TimelineMetric metric = metricsList.iterator().next();
    Assert.assertEquals("cpu_user", metric.getMetricName());
    Assert.assertEquals("app1", metric.getAppId());
    Assert.assertSame(metricValues, metric.getMetricValues());

    // call to update with new key
    metrics = cache.getAppTimelineMetricsFromCache(testKey);
    metricsList = metrics.getMetrics();
    Assert.assertEquals(1, metricsList.size());
    Assert.assertEquals("cpu_user", metric.getMetricName());
    Assert.assertEquals("app1", metric.getAppId());
    Assert.assertSame(metricValues, metric.getMetricValues());

    verify(configuration, cacheEntryFactory);
}

From source file:fr.cirad.mgdb.exporting.markeroriented.GFFExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }/*from   www .  ja v  a2 s . c o m*/
        }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    ArrayList<String> individualList = new ArrayList<String>();
    for (int i = 0; i < sampleIDs.size(); i++) {
        Individual individual = individuals.get(i);
        if (!individualList.contains(individual.getId())) {
            individualList.add(individual.getId());
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".gff3"));
    String header = "##gff-version 3" + LINE_SEPARATOR;
    zos.write(header.getBytes());

    TreeMap<String, String> typeToOntology = new TreeMap<String, String>();
    typeToOntology.put(Type.SNP.toString(), "SO:0000694");
    typeToOntology.put(Type.INDEL.toString(), "SO:1000032");
    typeToOntology.put(Type.MIXED.toString(), "SO:0001059");
    typeToOntology.put(Type.SYMBOLIC.toString(), "SO:0000109");
    typeToOntology.put(Type.MNP.toString(), "SO:0001059");

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    short nProgress = 0, nPreviousProgress = 0;
    long nLoadedMarkerCount = 0;

    while (markerCursor.hasNext()) {
        int nLoadedMarkerCountInLoop = 0;
        Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
        boolean fStartingNewChunk = true;
        markerCursor.batchSize(nChunkSize);
        while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
            DBObject exportVariant = markerCursor.next();
            DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
            markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                    refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                            + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
            nLoadedMarkerCountInLoop++;
            fStartingNewChunk = false;
        }

        List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
        LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                mongoTemplate, sampleIDs, currentMarkers, true,
                null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
        for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
        {
            Comparable variantId = variant.getId();
            List<String> variantDataOrigin = new ArrayList<String>();

            Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
            List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
            if (chromAndPos.size() == 0)
                LOG.warn("Chromosomal position not found for marker " + variantId);
            // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(variantId);
                if (syn != null)
                    variantId = syn;
            }

            Collection<VariantRunData> runs = variantsAndRuns.get(variant);
            if (runs != null)
                for (VariantRunData run : runs)
                    for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                        SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                        String individualId = individuals
                                .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                .getId();

                        Integer gq = null;
                        try {
                            gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                        } catch (Exception ignored) {
                        }
                        if (gq != null && gq < nMinimumGenotypeQuality)
                            continue;

                        Integer dp = null;
                        try {
                            dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                        } catch (Exception ignored) {
                        }
                        if (dp != null && dp < nMinimumReadDepth)
                            continue;

                        String gtCode = sampleGenotype.getCode();
                        List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                        if (storedIndividualGenotypes == null) {
                            storedIndividualGenotypes = new ArrayList<String>();
                            individualGenotypes.put(individualId, storedIndividualGenotypes);
                        }
                        storedIndividualGenotypes.add(gtCode);
                    }

            zos.write((chromAndPos.get(0) + "\t" + StringUtils.join(variantDataOrigin, ";") /*source*/ + "\t"
                    + typeToOntology.get(variant.getType()) + "\t" + Long.parseLong(chromAndPos.get(1)) + "\t"
                    + Long.parseLong(chromAndPos.get(1)) + "\t" + "." + "\t" + "+" + "\t" + "." + "\t")
                            .getBytes());
            Comparable syn = markerSynonyms == null ? null : markerSynonyms.get(variant.getId());
            zos.write(("ID=" + variant.getId() + ";" + (syn != null ? "Name=" + syn + ";" : "") + "alleles="
                    + StringUtils.join(variant.getKnownAlleleList(), "/") + ";" + "refallele="
                    + variant.getKnownAlleleList().get(0) + ";").getBytes());

            for (int j = 0; j < individualList
                    .size(); j++ /* we use this list because it has the proper ordering*/) {

                NumberFormat nf = NumberFormat.getInstance(Locale.US);
                nf.setMaximumFractionDigits(4);
                HashMap<String, Integer> compt1 = new HashMap<String, Integer>();
                int highestGenotypeCount = 0;
                int sum = 0;

                String individualId = individualList.get(j);
                List<String> genotypes = individualGenotypes.get(individualId);
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes

                String mostFrequentGenotype = null;
                if (genotypes != null)
                    for (String genotype : genotypes) {
                        if (genotype.length() == 0)
                            continue; /* skip missing genotypes */

                        int count = 0;
                        for (String t : variant.getAllelesFromGenotypeCode(genotype)) {
                            for (String t1 : variant.getKnownAlleleList()) {
                                if (t.equals(t1) && !(compt1.containsKey(t1))) {
                                    count++;
                                    compt1.put(t1, count);
                                } else if (t.equals(t1) && compt1.containsKey(t1)) {
                                    if (compt1.get(t1) != 0) {
                                        count++;
                                        compt1.put(t1, count);
                                    } else
                                        compt1.put(t1, count);
                                } else if (!(compt1.containsKey(t1))) {
                                    compt1.put(t1, 0);
                                }
                            }
                        }
                        for (int countValue : compt1.values()) {
                            sum += countValue;
                        }

                        int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                        if (gtCount > highestGenotypeCount) {
                            highestGenotypeCount = gtCount;
                            mostFrequentGenotype = genotype;
                        }
                        genotypeCounts.put(genotype, gtCount);
                    }

                List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                        : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                if (alleles.size() != 0) {
                    zos.write(("acounts=" + individualId + ":").getBytes());

                    for (String knowAllelesCompt : compt1.keySet()) {
                        zos.write(
                                (knowAllelesCompt + " " + nf.format(compt1.get(knowAllelesCompt) / (float) sum)
                                        + " " + compt1.get(knowAllelesCompt) + " ").getBytes());
                    }
                    zos.write((alleles.size() + ";").getBytes());
                }
                if (genotypeCounts.size() > 1) {
                    Comparable sVariantId = markerSynonyms != null ? markerSynonyms.get(variant.getId())
                            : variant.getId();
                    warningFileWriter.write("- Dissimilar genotypes found for variant "
                            + (sVariantId == null ? variant.getId() : sVariantId) + ", individual "
                            + individualId + ". Exporting most frequent: " + StringUtils.join(alleles, ",")
                            + "\n");
                }
            }
            zos.write((LINE_SEPARATOR).getBytes());
        }

        if (progress.hasAborted())
            return;

        nLoadedMarkerCount += nLoadedMarkerCountInLoop;
        nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
        if (nProgress > nPreviousProgress) {
            //            if (nProgress%5 == 0)
            //               LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:com.almende.dht.Bucket.java

/**
 * Gets the closest nodes.// w ww .j a va  2  s  .c  om
 *
 * @param near
 *            the near
 * @param limit
 *            the limit
 * @param filter
 *            the filter
 * @return the closest nodes
 */
public List<Node> getClosestNodes(final Key near, final int limit, final Collection<Key> filter) {
    synchronized (nodes) {
        final TreeMap<Key, Node> distMap = new TreeMap<Key, Node>();
        final Iterator<Entry<Key, Node>> iter = nodes.entrySet().iterator();
        while (iter.hasNext()) {
            Entry<Key, Node> entry = iter.next();
            if (filter != null && filter.contains(entry.getKey())) {
                continue;
            }
            distMap.put(near.dist(entry.getKey()), entry.getValue());
        }
        final Node[] values = distMap.values().toArray(new Node[0]);
        return Arrays.asList(Arrays.copyOf(values, Math.min(limit, distMap.size())));
    }
}

From source file:de.tudarmstadt.ukp.dkpro.wsd.wsi.io.AMBIENTReader.java

protected Map<Integer, String> readTopics(String file) throws IOException {
    final TreeMap<Integer, String> topics = new TreeMap<Integer, String>();
    final BufferedReader br = new BufferedReader(new FileReader(file));
    String line = br.readLine();/*www. j ava2s .c  o m*/
    while (line != null) {
        final String[] tok = line.split("\t");
        try {
            topics.put(Integer.parseInt(tok[0]), tok[1].replaceAll("_", " "));
        } catch (final NumberFormatException e) {

        }
        line = br.readLine();
    }
    br.close();
    return topics;
}

From source file:com.samples.platform.core.security.UserProvider.java

/**
 * Read the user definitions out of the properties and put the into the map
 * {@link UserProvider#userMap}.// w w  w  .  j a  v a  2  s . c  o  m
 */
public void createUsers() {
    this.logger.trace("+createUsers");
    /* Create an _sorted_ map containing all user definition properties. */
    TreeMap<String, String> userDefinitionProperties = new TreeMap<String, String>();
    for (Map.Entry<String, String> entry : this.properties.getProperties().entrySet()) {
        if (entry.getKey() != null && entry.getKey().startsWith(BUS_PROPERTY_NAME_START)) {
            userDefinitionProperties.put(entry.getKey(), entry.getValue());
        }
    }

    /* Create a map of UserDefinitions parsed out of the properties. */
    HashMap<String, UserDefinition> parsedUserDefinitions = new HashMap<String, UserProvider.UserDefinition>();
    UserDefinition userDefinition = null;
    String userKey = null;
    for (Map.Entry<String, String> userDefinitionProperty : userDefinitionProperties.entrySet()) {
        /* Get the user key out of the property name. */
        userKey = extractUser(userDefinitionProperty.getKey());
        if (userDefinition == null || !userDefinition.getKey().equals(userKey)) {
            /* New user key extracted out of the property name. */
            if (userDefinition != null) {
                /* Previous UserDefinition finished. Put it into the map. */
                parsedUserDefinitions.put(userDefinition.getName(), userDefinition);
            }
            /* Create the actual UserDefinition with the user key. */
            userDefinition = new UserDefinition(userKey);
        }
        /* Setup the content of the property into the UserDefinition. */
        if (userDefinitionProperty.getKey().endsWith("userName")) {
            userDefinition.setName(userDefinitionProperty.getValue());
        } else if (userDefinitionProperty.getKey().endsWith("password")) {
            userDefinition.setPassword(userDefinitionProperty.getValue());
        } else if (userDefinitionProperty.getKey().contains(".role.")) {
            if (userDefinitionProperty.getValue() != null
                    && !userDefinitionProperty.getValue().equals(ROLE_ANONYMOUS)) {
                userDefinition.addRolename(userDefinitionProperty.getValue());
            }
        }
    }
    /* Put the last UserDefinition to the map of parsedUserDefinitions. */
    if (userDefinition != null && userDefinition.getName() != null) {
        parsedUserDefinitions.put(userDefinition.getName(), userDefinition);
    }

    /*
     * Update the userMap. If the userName of the userMap is not part of the
     * parsedUserDefinitions any more, the user is removed out of the
     * userMap.
     */
    List<String> keyList = new ArrayList<String>(this.userMap.size());
    Collections.addAll(keyList, this.userMap.keySet().toArray(new String[this.userMap.keySet().size()]));
    for (String userName : keyList) {
        userDefinition = parsedUserDefinitions.get(userName);
        if (!parsedUserDefinitions.containsKey(userName)) {
            this.userMap.remove(userName);
        }
    }
    /*
     * All UserDefinitions out of the parsedUserDefinitions are mapped to a
     * User and put into the userMap.
     */
    for (UserDefinition ud : parsedUserDefinitions.values()) {
        this.userMap.put(ud.getName(), this.getUser(ud));
    }
    this.logger.trace("-createUsers");
}

From source file:cn.edu.sdnu.i.util.xauth.Xauth.java

private void doAccessToken() {

    // ?TreeMapparameters?????url?
    TreeMap<String, String> parameters = new TreeMap<String, String>();

    // 1.consumerkeyapp???keykey??app?key
    parameters.put(Constants.ConsumerKeyParameter, consumerKey);
    Log.d("test", "1.consumerKey:" + consumerKey);

    // 2.UUID???????
    /*/*  w  w w  . ja v a  2  s .  c  o m*/
     * ?GUID
     * 
     * 
     * ??GUID???????10GUID?
     * ??? FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF
     */

    UUID uuid = UUID.randomUUID();

    // ????ws?
    String guid = uuid.toString();

    // nonce
    parameters.put(Constants.NonceParameter, guid);
    Log.d("test", "2.guid:" + guid);

    // 3.?Hmacsha1
    parameters.put(Constants.SignatureMethodParameter, HMACSHA1SignatureType);
    Log.d("test", "3.SignatureMethodParameter:" + HMACSHA1SignatureType);

    // 4.
    /*
     * 
     * 
     * 
     * Unix???UTC??197011000??? ?2014/1/29
     * 16:56:58UTC+8UTC2014/1/29
     * 8:56:58?Unix160998565813909858181390985818
     * ??60?
     */
    parameters.put(Constants.TimestampParameter, Long.toString(System.currentTimeMillis() / 1000));
    Log.d("test", "4.TimestampParameter:" + Long.toString(System.currentTimeMillis() / 1000));

    // 5.XAuth??1.0?
    parameters.put(Constants.VersionParameter, OAuthVersion);
    Log.d("test", "5.VersionParameter:" + OAuthVersion);

    // 6."client_auth"
    parameters.put(Constants.AuthModeParameter, XAuthMode);
    Log.d("test", "6.AuthModeParameter:" + XAuthMode);

    // 7.?8.??
    parameters.put(Constants.AuthPasswordParameter, this.xauthPassword);
    parameters.put(Constants.AuthUsernameParameter, this.xauthUsername);
    Log.d("test", "7.AuthPasswordParameter:" + xauthPassword);
    Log.d("test", "8.AuthUsernameParameter:" + xauthUsername);

    // ????
    // ??StringBuffer?String??result
    StringBuffer resultBuffer = new StringBuffer("");

    // http?statusresult

    Boolean status = TryRequestRemote(BASE_URL + AUTH_URL, "", parameters, resultBuffer);

    // ?String
    /*
     * : oauth_token=88cd54bbf33a4ec79fe297cdd3ac5315 &
     * oauth_token_secret=a825b4ba2689a77b5cf5b3ee85b371141d3f77c2 &
     * user_id=111116 & user_type=2 & expires_in=604800
     */

    String result = resultBuffer.toString();
    Log.d("test", "17.result:" + result);

    if (status) {
        // HashTable pairs = new HashTable();
        Map<String, String> pairs = new HashMap<String, String>();
        // resultarr
        String[] arr = result.split("&");
        // ?,oauth_token=test_token_key?Map
        for (int i = 0; i < arr.length; i++) {
            // parts
            String[] parts = arr[i].split("=");

            if (parts.length == 2) {
                pairs.put(parts[0], parts[1]);
            }
        }

        // MaptokenKeytokenSecret
        String tokenKey1 = pairs.get(Constants.TokenParameter);
        String tokenSecret1 = pairs.get(Constants.TokenSecretParameter);

        if ((tokenKey1 != null) && (tokenSecret1 != null)) {
            // tokenkeytokensecret?
            token = tokenKey1;
            tokenSecret = tokenSecret1;
            Log.d("test", "18.tokenKey:" + token);
            Log.d("test", "19.tokenSecret:" + tokenSecret);
        }
    }
}

From source file:models.Indexer.java

private void performIndexing(String cur_file_name) throws IOException {
    int cur_docId;
    if (!docPaths_to_ids_map.containsKey(cur_file_name)) {
        cur_docId = num_of_docs_indexed;
        docPaths_to_ids_map.put(cur_file_name, num_of_docs_indexed);
        num_of_docs_indexed++;//ww  w.  j  a  v a2 s. c  o  m
    } else {
        cur_docId = docPaths_to_ids_map.get(cur_file_name);
    }

    if (cur_file_name.endsWith("pdf")) {
        System.out.println("reading: " + cur_file_name + " file");
        String text = null;
        PDFdoc cur_doc = new PDFdoc();
        try {
            text = cur_doc.extractPDF(installation_directory_path + "\\downloads\\" + cur_file_name);

        } catch (Exception e) {
            System.out.println("can not extract pdf");
            System.err.println();
        }
        String[] tokens_arr;
        if (text == null) {
            System.out.println("problem to read pdf");
            return;
        }
        tokens_arr = text.toLowerCase().split("[^a-z]");
        int total_words = tokens_arr.length;
        for (String tokens_arr1 : tokens_arr) {
            String term = tokens_arr1;
            if (inverted_index.containsKey(term)) // is this term present in Index?
            {
                TreeMap<Integer, Integer> Doc_hashtable;
                Doc_hashtable = inverted_index.get(term);

                if (Doc_hashtable.containsKey(cur_docId)) //is this file (value) present in Index ?
                {
                    Integer TF_of_the_term_in_this_doc = (Integer) inverted_index.get(term).get(cur_docId);
                    TF_of_the_term_in_this_doc++;
                    Doc_hashtable.put(cur_docId, TF_of_the_term_in_this_doc);
                    inverted_index.put(term, Doc_hashtable);
                } else // Index does not contain this term
                {
                    Integer TF_of_this_term_in_this_doc = 1;
                    Doc_hashtable.put(cur_docId, TF_of_this_term_in_this_doc);
                    inverted_index.put(term, Doc_hashtable);

                }
            } else // Index does not contain this term
            {
                Integer TF = 1;
                TreeMap<Integer, Integer> doc_hashtable = new TreeMap<>();
                doc_hashtable.put(cur_docId, TF);
                inverted_index.put(term, doc_hashtable);
            }
        }
        //            System.out.println(inverted_index);
        documents_len_table.put(cur_docId, new Integer(total_words));

    }

}

From source file:org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheTest.java

@Test
public void testTimelineMetricCacheTimeseriesUpdates() throws Exception {
    Configuration configuration = createNiceMock(Configuration.class);
    expect(configuration.getMetricsRequestConnectTimeoutMillis()).andReturn(10000);
    expect(configuration.getMetricsRequestReadTimeoutMillis()).andReturn(10000);
    expect(configuration.getMetricsRequestIntervalReadTimeoutMillis()).andReturn(10000);
    // Disable buffer fudge factor
    expect(configuration.getMetricRequestBufferTimeCatchupInterval()).andReturn(0l);

    replay(configuration);/*from w  ww. j a v a2s  . c  o  m*/

    TimelineMetricCacheEntryFactory factory = createMockBuilder(TimelineMetricCacheEntryFactory.class)
            .withConstructor(configuration).createMock();

    replay(factory);

    long now = System.currentTimeMillis();

    // Existing values

    final TimelineMetric timelineMetric1 = new TimelineMetric();
    timelineMetric1.setMetricName("cpu_user");
    timelineMetric1.setAppId("app1");
    TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
    metricValues.put(now - 100, 1.0);
    metricValues.put(now - 200, 2.0);
    metricValues.put(now - 300, 3.0);
    timelineMetric1.setMetricValues(metricValues);
    final TimelineMetric timelineMetric2 = new TimelineMetric();
    timelineMetric2.setMetricName("cpu_nice");
    timelineMetric2.setAppId("app1");
    metricValues = new TreeMap<Long, Double>();
    metricValues.put(now + 400, 1.0);
    metricValues.put(now + 500, 2.0);
    metricValues.put(now + 600, 3.0);
    timelineMetric2.setMetricValues(metricValues);

    TimelineMetrics existingMetrics = new TimelineMetrics();
    existingMetrics.getMetrics().add(timelineMetric1);
    existingMetrics.getMetrics().add(timelineMetric2);

    TimelineMetricsCacheValue existingMetricValue = new TimelineMetricsCacheValue(now - 1000, now + 1000,
            existingMetrics, null);

    // New values
    TimelineMetrics newMetrics = new TimelineMetrics();
    TimelineMetric timelineMetric3 = new TimelineMetric();
    timelineMetric3.setMetricName("cpu_user");
    timelineMetric3.setAppId("app1");
    metricValues = new TreeMap<Long, Double>();
    metricValues.put(now + 1400, 1.0);
    metricValues.put(now + 1500, 2.0);
    metricValues.put(now + 1600, 3.0);
    timelineMetric3.setMetricValues(metricValues);
    newMetrics.getMetrics().add(timelineMetric3);

    factory.updateTimelineMetricsInCache(newMetrics, existingMetricValue, now, now + 2000, false);

    Assert.assertEquals(2, existingMetricValue.getTimelineMetrics().getMetrics().size());

    TimelineMetric newMetric1 = null;
    TimelineMetric newMetric2 = null;

    for (TimelineMetric metric : existingMetricValue.getTimelineMetrics().getMetrics()) {
        if (metric.getMetricName().equals("cpu_user")) {
            newMetric1 = metric;
        }
        if (metric.getMetricName().equals("cpu_nice")) {
            newMetric2 = metric;
        }
    }

    Assert.assertNotNull(newMetric1);
    Assert.assertNotNull(newMetric2);
    Assert.assertEquals(3, newMetric1.getMetricValues().size());
    Assert.assertEquals(3, newMetric2.getMetricValues().size());
    Map<Long, Double> newMetricsMap = newMetric1.getMetricValues();
    Iterator<Long> metricKeyIterator = newMetricsMap.keySet().iterator();
    Assert.assertEquals(now + 1400, metricKeyIterator.next().longValue());
    Assert.assertEquals(now + 1500, metricKeyIterator.next().longValue());
    Assert.assertEquals(now + 1600, metricKeyIterator.next().longValue());

    verify(configuration, factory);
}

From source file:com.kaikoda.cah.TestDeck.java

@Test
public void testDeckTransform_nullParamValue()
        throws SAXException, IOException, TransformerException, ParserConfigurationException {

    // Retrieve the control card data for Dutch English CAH (so can assume
    // all pre-processing complete).
    Document xml = this.getDocument("/data/control/cards/netherlands.xml");

    Deck customDeck = new Deck(xml);
    customDeck.setErrorListener(new ProgressReporter());

    // Create a container to hold the result of the transformation
    DocumentBuilder documentBuilder = Deck.newDocumentBuilder();
    Document document = documentBuilder.newDocument();
    DOMResult result = new DOMResult(document);

    // Build the parameter list
    TreeMap<String, String> params = new TreeMap<String, String>();
    params.put("path-to-dictionary", TestDeck.DICTIONARY_DATA_ENGLISH.getAbsolutePath());
    params.put("output-language", null);

    customDeck.transform(new DOMSource(xml), Deck.getXsl(Deck.PATH_TO_TRANSLATION_XSL), result, params);

    // Retrieve the control card data for American CAH.
    Document expected = xml;//from w ww . j a va  2 s  .  c  o m

    // Check that the result is the same cards, translated into American
    // English (which is the default output language).
    assertXMLEqual(expected, document);

}

From source file:com.github.dozermapper.core.functional_tests.MapTypeTest.java

@Test
public void testTreeMap() {
    TreeMap map = new TreeMap();
    map.put("a", "b");

    TreeMap result = mapper.map(map, TreeMap.class);

    assertNotNull(result);//from   w  ww. j  a va  2 s.c  o m
    assertEquals(1, result.size());
}