Example usage for java.util TreeMap lastKey

List of usage examples for java.util TreeMap lastKey

Introduction

In this page you can find the example usage for java.util TreeMap lastKey.

Prototype

public K lastKey() 

Source Link

Usage

From source file:org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheEntryFactory.java

/**
 * Update cache with new timeseries data
 *///from  ww w. j av  a 2s.  c o  m
protected void updateTimelineMetricsInCache(TimelineMetrics newMetrics,
        TimelineMetricsCacheValue timelineMetricsCacheValue, Long requestedStartTime, Long requestedEndTime,
        boolean removeAll) {

    TimelineMetrics existingTimelineMetrics = timelineMetricsCacheValue.getTimelineMetrics();

    // Remove values that do not fit before adding new data
    updateExistingMetricValues(existingTimelineMetrics, requestedStartTime, requestedEndTime, removeAll);

    if (newMetrics != null && !newMetrics.getMetrics().isEmpty()) {
        for (TimelineMetric timelineMetric : newMetrics.getMetrics()) {
            if (LOG.isTraceEnabled()) {
                TreeMap<Long, Double> sortedMetrics = new TreeMap<Long, Double>(
                        timelineMetric.getMetricValues());

                LOG.trace("New metric: " + timelineMetric.getMetricName() + " # "
                        + timelineMetric.getMetricValues().size() + ", startTime = " + sortedMetrics.firstKey()
                        + ", endTime = " + sortedMetrics.lastKey());
            }

            TimelineMetric existingMetric = null;

            for (TimelineMetric metric : existingTimelineMetrics.getMetrics()) {
                if (metric.equalsExceptTime(timelineMetric)) {
                    existingMetric = metric;
                }
            }

            if (existingMetric != null) {
                // Add new ones
                existingMetric.getMetricValues().putAll(timelineMetric.getMetricValues());

                if (LOG.isTraceEnabled()) {
                    TreeMap<Long, Double> sortedMetrics = new TreeMap<Long, Double>(
                            existingMetric.getMetricValues());
                    LOG.trace("Merged metric: " + timelineMetric.getMetricName() + ", " + "Final size: "
                            + existingMetric.getMetricValues().size() + ", startTime = "
                            + sortedMetrics.firstKey() + ", endTime = " + sortedMetrics.lastKey());
                }
            } else {
                existingTimelineMetrics.getMetrics().add(timelineMetric);
            }
        }
    }
}

From source file:chatbot.Chatbot.java

/*****************************************************************************************************
 *
 * @param input/*from   w ww .  java2s.  co  m*/
 * @return
 */
public String matchBestInput(String input) {

    ArrayList<String> result = new ArrayList<>();
    TreeMap<Float, ArrayList<Integer>> sortedSim = matchInputFull(input);
    if (sortedSim == null || sortedSim.keySet().size() < 1 || sortedSim.lastKey() < .1) {
        return "I don't know";
    }
    Object[] floats = sortedSim.keySet().toArray();
    int numClusters = 3;
    if (floats.length < numClusters)
        numClusters = floats.length;
    float[] floatarray = new float[floats.length];
    for (int i = 0; i < floats.length; i++)
        floatarray[i] = (float) floats[i];
    ArrayList<ArrayList<Float>> res = KMeans.run(floatarray.length, floatarray, numClusters);
    ArrayList<Float> topCluster = res.get(res.size() - 2);
    while (res.get(res.size() - 2).size() > 3 && numClusters < floats.length) {
        numClusters++;
        res = KMeans.run(floatarray.length, floatarray, numClusters);
        topCluster = res.get(res.size() - 2);
        //System.out.println("Info in TFIDF.matchBestInput(): " + res);
        //System.out.println("Info in TFIDF.matchBestInput(): " + topCluster);
    }
    for (int i = 0; i < topCluster.size(); i++) {
        ArrayList<Integer> temp = sortedSim.get(topCluster.get(i));
        for (int j = 0; j < temp.size(); j++)
            result.add(lines.get(temp.get(j).intValue()));
    }

    ArrayList<String> resultNoProfanity = profanityFilter(result);

    ArrayList<String> rankedResponses = rankResponses(resultNoProfanity, input);

    return chooseBestResponse(rankedResponses);
}

From source file:com.datatorrent.contrib.hdht.PurgeTest.java

/**
 * Purge data from start of the file.// ww  w .  ja  v a2  s. c  o m
 */
@Test
public void testMultiplePurgeFromDataFiles() throws IOException {
    File file = new File(testInfo.getDir());
    FileUtils.deleteDirectory(file);

    FileAccessFSImpl fa = new MockFileAccess();
    fa.setBasePath(file.getAbsolutePath());
    HDHTWriter hds = new HDHTWriter();
    hds.setFileStore(fa);
    hds.setFlushSize(0); // flush after every key
    hds.setFlushIntervalCount(0);
    hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(0, new DefaultAttributeMap()));

    hds.writeExecutor = MoreExecutors.sameThreadExecutor(); // synchronous flush
    hds.beginWindow(1);
    for (int i = 100; i < 1000; i++) {
        hds.put(1, newSlice(i), newData(i));
    }
    hds.endWindow();
    hds.checkpointed(1);
    hds.committed(1);

    HDHTReader.BucketMeta meta = hds.loadBucketMeta(1);
    HDHTReader.BucketFileMeta fmeta = meta.files.firstEntry().getValue();

    hds.beginWindow(2);
    hds.purge(1, newSlice(0), newSlice(150));
    hds.endWindow();
    hds.checkpointed(2);
    hds.committed(2);

    meta = hds.loadBucketMeta(1);
    fmeta = meta.files.firstEntry().getValue();
    TreeMap<Slice, Slice> data = getData(fa, 1, fmeta.name);
    int startKey = sliceToInt(data.firstKey());
    Assert.assertEquals("The start key in new file", 151, startKey);
    int endKey = sliceToInt(data.lastKey());
    Assert.assertEquals("The end key in neww file", 999, endKey);
}

From source file:com.datatorrent.contrib.hdht.PurgeTest.java

/**
 * Purge data from start, middle and end of the file.
 *///ww  w. j a  v  a  2 s .co m
@Test
public void purgeDataFromMiddleOfFile() throws IOException {
    File file = new File(testInfo.getDir());
    FileUtils.deleteDirectory(file);

    FileAccessFSImpl fa = new MockFileAccess();
    fa.setBasePath(file.getAbsolutePath());
    HDHTWriter hds = new HDHTWriter();
    hds.setFileStore(fa);
    hds.setFlushSize(0); // flush after every key
    hds.setFlushIntervalCount(0);
    hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(0, new DefaultAttributeMap()));
    hds.writeExecutor = MoreExecutors.sameThreadExecutor(); // synchronous flush

    hds.beginWindow(1);
    for (int i = 100; i < 1000; i++) {
        hds.put(1, newSlice(i), newData(i));
    }
    hds.endWindow();
    hds.checkpointed(1);
    hds.committed(1);

    hds.beginWindow(2);
    hds.purge(1, newSlice(150), newSlice(250));
    hds.purge(1, newSlice(200), newSlice(400));
    hds.purge(1, newSlice(450), newSlice(700));
    hds.purge(1, newSlice(950), newSlice(1500));
    hds.endWindow();
    hds.checkpointed(2);
    hds.committed(2);

    HDHTReader.BucketFileMeta fmeta = hds.loadBucketMeta(1).files.firstEntry().getValue();
    TreeMap<Slice, Slice> data = getData(fa, 1, fmeta.name);
    int startKey = sliceToInt(data.firstKey());
    Assert.assertEquals("The start key in new file", 100, startKey);
    int endKey = sliceToInt(data.lastKey());

    Assert.assertArrayEquals("Key 149 is present in file ", newData(149),
            data.get(newSlice(149)).toByteArray());
    Assert.assertEquals("Key 150 is removed from file ", null, data.get(newSlice(150)));
    Assert.assertEquals("Key 160 is removed from file ", null, data.get(newSlice(160)));
    Assert.assertEquals("Key 220 is removed from file ", null, data.get(newSlice(220)));
    Assert.assertEquals("Key 400 is removed from file ", null, data.get(newSlice(400)));
    Assert.assertArrayEquals("Key 401 is present in file ", newData(401),
            data.get(newSlice(401)).toByteArray());

    Assert.assertArrayEquals("Key 449 is present in file ", newData(449),
            data.get(newSlice(449)).toByteArray());
    Assert.assertEquals("Key 450 is removed from file ", null, data.get(newSlice(450)));
    Assert.assertEquals("Key 500 is removed from file ", null, data.get(newSlice(500)));
    Assert.assertEquals("Key 700 is removed from file ", null, data.get(newSlice(700)));
    Assert.assertArrayEquals("Key 701 is present in file ", newData(701),
            data.get(newSlice(701)).toByteArray());

    Assert.assertArrayEquals("Key 949 is present in file ", newData(949),
            data.get(newSlice(949)).toByteArray());
    Assert.assertEquals("Key 950 is removed from file ", null, data.get(newSlice(950)));
    Assert.assertEquals("Key 999 is removed from file ", null, data.get(newSlice(999)));

    Assert.assertEquals("The end key in new file", 949, endKey);
}

From source file:edu.utexas.cs.tactex.tariffoptimization.TariffOptimizerBinaryOneShot.java

private TreeMap<Double, TariffSpecification> binarySearchOptimize(List<TariffSpecification> suggestedSpecs,
        HashMap<TariffSpecification, HashMap<CustomerInfo, Integer>> tariffSubscriptions,
        List<TariffSpecification> competingTariffs, CostCurvesPredictor costCurvesPredictor,
        int currentTimeslot, Broker me,
        HashMap<CustomerInfo, HashMap<TariffSpecification, ShiftedEnergyData>> customer2ShiftedEnergy,
        HashMap<CustomerInfo, ArrayRealVector> customer2NonShiftedEnergy,
        HashMap<CustomerInfo, HashMap<TariffSpecification, Double>> customer2RelevantTariffCharges) {

    TreeMap<Double, TariffSpecification> result = new TreeMap<Double, TariffSpecification>();

    // a value of null means no-op
    ArrayList<TariffSpecification> consideredTariffActions = new ArrayList<TariffSpecification>();
    consideredTariffActions.add(null);/* w ww  .  j av a 2  s .  co m*/
    TreeMap<Double, TariffSpecification> sortedTariffs = utilityEstimator.estimateUtilities(
            consideredTariffActions, tariffSubscriptions, competingTariffs, customer2RelevantTariffCharges,
            customer2ShiftedEnergy, customer2NonShiftedEnergy, marketPredictionManager, costCurvesPredictor,
            currentTimeslot, me);
    result.putAll(sortedTariffs);

    // here do the binary search
    //
    // initialize with edges and middle
    TreeMap<Double, Integer> utilToIndex = new TreeMap<Double, Integer>();
    int numTariffs = suggestedSpecs.size();
    int[] initialIndexes = { 0, numTariffs / 2, numTariffs - 1 };
    for (int index : initialIndexes) {
        evaluateAndRecord(index, utilToIndex, result, suggestedSpecs, consideredTariffActions,
                tariffSubscriptions, competingTariffs, costCurvesPredictor, currentTimeslot, me,
                customer2ShiftedEnergy, customer2NonShiftedEnergy, customer2RelevantTariffCharges);
    }
    int bestIndex = utilToIndex.lastEntry().getValue();
    int secondBestIndex = utilToIndex.lowerEntry(utilToIndex.lastKey()).getValue();
    //
    // binary search
    while (Math.abs(secondBestIndex - bestIndex) >= 2) {
        //log.info("evaluating, bestIndex=" + bestIndex + ", secondBestIndex=" + secondBestIndex);
        int midIndex = (secondBestIndex + bestIndex) / 2;
        evaluateAndRecord(midIndex, utilToIndex, result, suggestedSpecs, consideredTariffActions,
                tariffSubscriptions, competingTariffs, costCurvesPredictor, currentTimeslot, me,
                customer2ShiftedEnergy, customer2NonShiftedEnergy, customer2RelevantTariffCharges);
        bestIndex = utilToIndex.lastEntry().getValue();
        secondBestIndex = utilToIndex.lowerEntry(utilToIndex.lastKey()).getValue();

        // TODO: handling a non-convex case (how come happens?)
        if (midIndex != bestIndex && midIndex != secondBestIndex) {
            log.warn("non-convex utility values found during binary search. breaking...");
            break;
        }
    }
    //log.info("evaluating, bestIndex=" + bestIndex + ", secondBestIndex=" + secondBestIndex);

    return result;
}

From source file:org.jenkins_ci.update_center.Main.java

private void checkLatestDate(MavenRepository repository, Collection<HPI> artifacts, HPI latestByVersion) {
    try {//from ww w.  j av a  2  s  .  com
        TreeMap<Long, HPI> artifactsByDate = new TreeMap<Long, HPI>();
        for (HPI h : artifacts) {
            h.file = repository.resolve(h.artifact);
            artifactsByDate.put(h.getTimestamp(), h);
        }
        HPI latestByDate = artifactsByDate.get(artifactsByDate.lastKey());
        if (latestByDate != latestByVersion) {
            System.out.println("** Latest-by-version (" + latestByVersion.version + ','
                    + latestByVersion.getTimestampAsString() + ") doesn't match latest-by-date ("
                    + latestByDate.version + ',' + latestByDate.getTimestampAsString() + ')');
        }
    } catch (IOException e) {
        System.out.println("Unable to check for the latest plugin version by date of '"
                + latestByVersion.artifact.artifactId + "': " + e.getMessage());
    }
}

From source file:de.suse.swamp.core.container.WorkflowManager.java

/**
 * Workflowtemplates from results without errors
 * will get added to the list of available templates, and their files
 * doc + image files will get installed.
 *//*from  www  .j  a v a  2  s .com*/
public void installValidTemplates(List results) {
    for (Iterator it = results.iterator(); it.hasNext();) {
        WorkflowReadResult result = (WorkflowReadResult) it.next();
        if (result.getErrors().size() == 0) {
            WorkflowTemplate wfTemp = result.getTemplate();
            TreeMap templateVersions = new TreeMap();
            if (workflowTempls.keySet().contains(wfTemp.getName())) {
                templateVersions = (TreeMap) workflowTempls.get(wfTemp.getName());
            } else {
                workflowTempls.put(wfTemp.getName(), templateVersions);
            }
            // only install files from the latest version:
            if (templateVersions.isEmpty()
                    || ((String) templateVersions.lastKey()).compareTo(result.getWfVersion()) < 0) {
                try {
                    installWorkflowFiles(wfTemp.getName(), wfTemp.getVersion());
                } catch (Exception e) {
                    Logger.ERROR("Installing files from template: " + wfTemp.getName() + " failed. "
                            + e.getMessage());
                }
            }
            templateVersions.put(result.getWfVersion(), wfTemp);
            clearWorkflowCache(result.getWfName(), result.getWfVersion());
            Logger.DEBUG("Successfully added " + result.getWfName() + "-" + result.getWfVersion()
                    + " to TemplateList");
        }
    }
}

From source file:my.mavenproject10.FileuploadController.java

@RequestMapping(method = RequestMethod.POST)
ModelAndView upload(HttpServletRequest request, HttpServletResponse response) {

    boolean isMultipart = ServletFileUpload.isMultipartContent(request);
    String fileName = "";
    int size = 0;
    ArrayList<String> result = new ArrayList<String>();
    if (isMultipart) {
        FileItemFactory factory = new DiskFileItemFactory();
        ServletFileUpload upload = new ServletFileUpload(factory);

        try {//  www.  j a v  a 2  s  . c o m
            List items = upload.parseRequest(request);
            Iterator iterator = items.iterator();
            while (iterator.hasNext()) {
                FileItem item = (FileItem) iterator.next();
                fileName = item.getName();
                System.out.println("file name " + item.getName());
                JAXBContext jc = JAXBContext.newInstance(CustomersType.class);
                SAXParserFactory spf = SAXParserFactory.newInstance();
                XMLReader xmlReader = spf.newSAXParser().getXMLReader();
                InputSource inputSource = new InputSource(
                        new InputStreamReader(item.getInputStream(), "UTF-8"));
                SAXSource source = new SAXSource(xmlReader, inputSource);
                Unmarshaller unmarshaller = jc.createUnmarshaller();
                CustomersType data2 = (CustomersType) unmarshaller.unmarshal(source);
                //System.out.println("size " + data2.getCustomer().size());
                size = data2.getCustomer().size();
                for (CustomerType customer : data2.getCustomer()) {
                    System.out.println(customer.toString());
                }
                //  
                double summ = 0.0;
                HashMap<Integer, Float> ordersMap = new HashMap<Integer, Float>();
                for (CustomerType customer : data2.getCustomer()) {
                    for (OrderType orderType : customer.getOrders().getOrder()) {
                        Float summPerOrder = 0.0f;
                        //System.out.println(orderType);
                        for (PositionType positionType : orderType.getPositions().getPosition()) {
                            //System.out.println(positionType);
                            summPerOrder += positionType.getCount() * positionType.getPrice();
                            summ += positionType.getCount() * positionType.getPrice();
                        }
                        ordersMap.put(orderType.getId(), summPerOrder);
                    }
                }
                summ = new BigDecimal(summ).setScale(2, RoundingMode.UP).doubleValue();
                System.out.println("   " + summ);
                result.add("   " + summ);

                //    
                HashMap<Integer, Float> customersMap = new HashMap<Integer, Float>();
                for (CustomerType customer : data2.getCustomer()) {
                    Float summPerCust = 0.0f;
                    customersMap.put(customer.getId(), summPerCust);
                    for (OrderType orderType : customer.getOrders().getOrder()) {
                        for (PositionType positionType : orderType.getPositions().getPosition()) {
                            summPerCust += positionType.getCount() * positionType.getPrice();
                        }
                    }
                    //System.out.println(customer.getId() + " orders " + summPerCust);
                    customersMap.put(customer.getId(), summPerCust);
                }
                TreeMap sortedMap = sortByValue(customersMap);
                System.out.println(" " + sortedMap.keySet().toArray()[0]
                        + "    : " + sortedMap.get(sortedMap.firstKey()));
                result.add(" " + sortedMap.keySet().toArray()[0] + "    : "
                        + sortedMap.get(sortedMap.firstKey()));

                //  
                TreeMap sortedMapOrders = sortByValue(ordersMap);
                System.out.println("   " + sortedMapOrders.keySet().toArray()[0]
                        + " : " + sortedMapOrders.get(sortedMapOrders.firstKey()));
                result.add("   " + sortedMapOrders.keySet().toArray()[0] + " : "
                        + sortedMapOrders.get(sortedMapOrders.firstKey()));

                //  
                System.out.println("   "
                        + sortedMapOrders.keySet().toArray()[sortedMapOrders.keySet().toArray().length - 1]
                        + " : " + sortedMapOrders.get(sortedMapOrders.lastKey()));
                result.add("   "
                        + sortedMapOrders.keySet().toArray()[sortedMapOrders.keySet().toArray().length - 1]
                        + " : " + sortedMapOrders.get(sortedMapOrders.lastKey()));

                // 
                System.out.println("  " + sortedMapOrders.size());
                result.add("  " + sortedMapOrders.size());

                //  
                ArrayList<Float> floats = new ArrayList<Float>(sortedMapOrders.values());
                Float summAvg = 0.0f;
                Float avg = 0.0f;
                for (Float f : floats) {
                    summAvg += f;
                }
                avg = new BigDecimal(summAvg / floats.size()).setScale(2, RoundingMode.UP).floatValue();
                System.out.println("   " + avg);
                result.add("   " + avg);

            }
        } catch (FileUploadException e) {
            System.out.println("FileUploadException:- " + e.getMessage());
        } catch (JAXBException ex) {
            //Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex);
        } catch (UnsupportedEncodingException ex) {
            Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex);
        } catch (IOException ex) {
            Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex);
        } catch (ParserConfigurationException ex) {
            Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex);
        } catch (SAXException ex) {
            Logger.getLogger(FileuploadController.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    ModelAndView modelAndView = new ModelAndView("fileuploadsuccess");
    modelAndView.addObject("files", result);
    modelAndView.addObject("name", fileName);
    modelAndView.addObject("size", size);
    return modelAndView;

}

From source file:org.apache.camel.dataformat.bindy.BindyCsvFactory.java

private List<List> product(Map<Integer, List> values) {

    TreeMap<Integer, List> sortValues = new TreeMap<Integer, List>(values);

    List<List> product = new ArrayList<List>();
    Map<Integer, Integer> index = new HashMap<Integer, Integer>();

    boolean cont = true;
    int idx = 0;//w ww. j  av  a 2  s . c  o  m
    int idxSize;

    do {

        idxSize = 0;
        List v = new ArrayList();

        for (int ii = 1; ii <= sortValues.lastKey(); ii++) {

            List l = values.get(ii);

            if (l == null) {
                v.add("");
                ++idxSize;
                continue;
            }

            if (l.size() >= idx + 1) {
                v.add(l.get(idx));
                index.put(ii, idx);
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Value : " + l.get(idx) + ", pos : " + ii + ", at :" + idx);
                }

            } else {
                v.add(l.get(0));
                index.put(ii, 0);
                ++idxSize;
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Value : " + l.get(0) + ", pos : " + ii + ", at index : " + 0);
                }
            }

        }

        if (idxSize != sortValues.lastKey()) {
            product.add(v);
        }
        ++idx;

    } while (idxSize != sortValues.lastKey());

    return product;
}

From source file:cc.slda.DisplayTopic.java

@SuppressWarnings("unchecked")
public int run(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(Settings.HELP_OPTION, false, "print the help message");
    options.addOption(OptionBuilder.withArgName(Settings.PATH_INDICATOR).hasArg()
            .withDescription("input beta file").create(Settings.INPUT_OPTION));
    options.addOption(OptionBuilder.withArgName(Settings.PATH_INDICATOR).hasArg()
            .withDescription("term index file").create(ParseCorpus.INDEX));
    options.addOption(OptionBuilder.withArgName(Settings.INTEGER_INDICATOR).hasArg()
            .withDescription("display top terms only (default - 10)").create(TOP_DISPLAY_OPTION));

    String betaString = null;//from   w  ww .j ava  2  s .co m
    String indexString = null;
    int topDisplay = TOP_DISPLAY;

    CommandLineParser parser = new GnuParser();
    HelpFormatter formatter = new HelpFormatter();
    try {
        CommandLine line = parser.parse(options, args);

        if (line.hasOption(Settings.HELP_OPTION)) {
            formatter.printHelp(ParseCorpus.class.getName(), options);
            System.exit(0);
        }

        if (line.hasOption(Settings.INPUT_OPTION)) {
            betaString = line.getOptionValue(Settings.INPUT_OPTION);
        } else {
            throw new ParseException("Parsing failed due to " + Settings.INPUT_OPTION + " not initialized...");
        }

        if (line.hasOption(ParseCorpus.INDEX)) {
            indexString = line.getOptionValue(ParseCorpus.INDEX);
        } else {
            throw new ParseException("Parsing failed due to " + ParseCorpus.INDEX + " not initialized...");
        }

        if (line.hasOption(TOP_DISPLAY_OPTION)) {
            topDisplay = Integer.parseInt(line.getOptionValue(TOP_DISPLAY_OPTION));
        }
    } catch (ParseException pe) {
        System.err.println(pe.getMessage());
        formatter.printHelp(ParseCorpus.class.getName(), options);
        System.exit(0);
    } catch (NumberFormatException nfe) {
        System.err.println(nfe.getMessage());
        System.exit(0);
    }

    JobConf conf = new JobConf(DisplayTopic.class);
    FileSystem fs = FileSystem.get(conf);

    Path indexPath = new Path(indexString);
    Preconditions.checkArgument(fs.exists(indexPath) && fs.isFile(indexPath), "Invalid index path...");

    Path betaPath = new Path(betaString);
    Preconditions.checkArgument(fs.exists(betaPath) && fs.isFile(betaPath), "Invalid beta path...");

    SequenceFile.Reader sequenceFileReader = null;
    try {
        IntWritable intWritable = new IntWritable();
        Text text = new Text();
        Map<Integer, String> termIndex = new HashMap<Integer, String>();
        sequenceFileReader = new SequenceFile.Reader(fs, indexPath, conf);
        while (sequenceFileReader.next(intWritable, text)) {
            termIndex.put(intWritable.get(), text.toString());
        }

        PairOfIntFloat pairOfIntFloat = new PairOfIntFloat();
        // HMapIFW hmap = new HMapIFW();
        HMapIDW hmap = new HMapIDW();
        TreeMap<Double, Integer> treeMap = new TreeMap<Double, Integer>();
        sequenceFileReader = new SequenceFile.Reader(fs, betaPath, conf);
        while (sequenceFileReader.next(pairOfIntFloat, hmap)) {
            treeMap.clear();

            System.out.println("==============================");
            System.out.println(
                    "Top ranked " + topDisplay + " terms for Topic " + pairOfIntFloat.getLeftElement());
            System.out.println("==============================");

            Iterator<Integer> itr1 = hmap.keySet().iterator();
            int temp1 = 0;
            while (itr1.hasNext()) {
                temp1 = itr1.next();
                treeMap.put(-hmap.get(temp1), temp1);
                if (treeMap.size() > topDisplay) {
                    treeMap.remove(treeMap.lastKey());
                }
            }

            Iterator<Double> itr2 = treeMap.keySet().iterator();
            double temp2 = 0;
            while (itr2.hasNext()) {
                temp2 = itr2.next();
                if (termIndex.containsKey(treeMap.get(temp2))) {
                    System.out.println(termIndex.get(treeMap.get(temp2)) + "\t\t" + -temp2);
                } else {
                    System.out.println("How embarrassing! Term index not found...");
                }
            }
        }
    } finally {
        IOUtils.closeStream(sequenceFileReader);
    }

    return 0;
}