List of usage examples for java.util TreeMap TreeMap
public TreeMap()
From source file:de.tudarmstadt.ukp.experiments.argumentation.clustering.ClusterCentroidsMain.java
public static TreeMap<Integer, Vector> computeClusterCentroids(String inputVectorsPath, String clusterOutputPath) throws IOException { TreeMap<Integer, Vector> result = new TreeMap<>(); Map<Integer, Integer> counts = new TreeMap<>(); // input for cluto File inputVectors = new File(inputVectorsPath); // resulting clusters File clutoClustersOutput = new File(clusterOutputPath); LineIterator clustersIterator = IOUtils.lineIterator(new FileInputStream(clutoClustersOutput), "utf-8"); LineIterator vectorsIterator = IOUtils.lineIterator(new FileInputStream(inputVectors), "utf-8"); // skip first line (number of clusters and vector size vectorsIterator.next();/*from w ww . j ava2 s . co m*/ while (clustersIterator.hasNext()) { String clusterString = clustersIterator.next(); String vectorString = vectorsIterator.next(); int clusterNumber = Integer.valueOf(clusterString); // now parse the vector DenseVector vector = ClusteringUtils.parseVector(vectorString); // if there is no resulting vector for the particular cluster, add this one if (!result.containsKey(clusterNumber)) { result.put(clusterNumber, vector); } else { // otherwise add this one to the previous one result.put(clusterNumber, result.get(clusterNumber).add(vector)); } // and update counts if (!counts.containsKey(clusterNumber)) { counts.put(clusterNumber, 0); } counts.put(clusterNumber, counts.get(clusterNumber) + 1); } // now compute average for each vector for (Map.Entry<Integer, Vector> entry : result.entrySet()) { // cluster number int clusterNumber = entry.getKey(); // get counts int count = counts.get(clusterNumber); // divide by count of vectors for each cluster (averaging) for (VectorEntry vectorEntry : entry.getValue()) { vectorEntry.set(vectorEntry.get() / (double) count); } } return result; }
From source file:com.webcohesion.ofx4j.io.AggregateMarshaller.java
/** * Marshal the specified aggregate object. * * @param aggregate The aggregate to marshal. * @param writer The writer./*w w w .j av a 2 s .c o m*/ */ public void marshal(Object aggregate, OFXWriter writer) throws IOException { AggregateInfo aggregateInfo = AggregateIntrospector.getAggregateInfo(aggregate.getClass()); if (aggregateInfo == null) { throw new IllegalArgumentException( String.format("Unable to marshal object of type %s (no aggregate metadata found).", aggregate.getClass().getName())); } if (aggregateInfo.hasHeaders()) { Map<String, Object> headerValues = aggregateInfo.getHeaders(aggregate); Map<String, String> convertedValues = new TreeMap<String, String>(); for (String header : headerValues.keySet()) { convertedValues.put(header, getConversion().toString(headerValues.get(header))); } writer.writeHeaders(convertedValues); } writer.writeStartAggregate(aggregateInfo.getName()); SortedSet<AggregateAttribute> AggregateAttributes = aggregateInfo.getAttributes(); writeAggregateAttributes(aggregate, writer, AggregateAttributes); writer.writeEndAggregate(aggregateInfo.getName()); }
From source file:de.hybris.platform.acceleratorcms.services.impl.RankingCMSRestrictionService.java
@Override public Collection<AbstractPageModel> evaluatePages(final Collection<AbstractPageModel> pages, final RestrictionData data) { final NavigableMap<Integer, List<AbstractPageModel>> allowedPages = new TreeMap<>(); final Collection<AbstractPageModel> defaultPages = getDefaultPages(pages); for (final AbstractPageModel page : pages) { if (defaultPages.contains(page)) { continue; }//from ww w . j av a 2s.c o m final List<AbstractRestrictionModel> restrictions = page.getRestrictions(); if (restrictions == null || restrictions.isEmpty()) { LOG.debug("Page [" + page.getName() + "] is not default page and contains no restrictions. Skipping this page."); } else { LOG.debug("Evaluating restrictions for page [" + page.getName() + "]."); final boolean onlyOneRestrictionMustApply = page.isOnlyOneRestrictionMustApply(); final boolean allowed = evaluate(restrictions, data, onlyOneRestrictionMustApply); if (allowed) { LOG.debug("Adding page [" + page.getName() + "] to allowed pages"); final Integer countOfMatchingRestrictions = Integer .valueOf(onlyOneRestrictionMustApply ? 1 : restrictions.size()); if (allowedPages.containsKey(countOfMatchingRestrictions)) { // Add to existing list allowedPages.get(countOfMatchingRestrictions).add(page); } else { // Add a new entry final List<AbstractPageModel> list = new ArrayList<>(); list.add(page); allowedPages.put(countOfMatchingRestrictions, list); } } } } final List<AbstractPageModel> result = new ArrayList<>(); if (MapUtils.isNotEmpty(allowedPages)) { // Take the highest match count result.addAll(allowedPages.lastEntry().getValue()); } else { if (defaultPages.size() > 1) { LOG.warn(createMoreThanOneDefaultPageWarning(defaultPages)); } if (CollectionUtils.isNotEmpty(defaultPages)) { LOG.debug("Returning default page"); result.add(defaultPages.iterator().next()); } } return result; }
From source file:net.anthonypoon.ngram.rollingregression.RollingRegressionReducer.java
@Override protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException { TreeMap<String, Double> currElement = new TreeMap(); boolean pastThreshold = false; for (Text val : values) { String[] strArray = val.toString().split("\t"); if (Double.valueOf(strArray[1]) > threshold) { pastThreshold = true;/*from w w w .ja va 2 s . com*/ } currElement.put(strArray[0], Math.log(Double.valueOf(strArray[1]))); } if (pastThreshold) { for (Integer i = 0; i <= upbound - lowbound; i++) { if (!currElement.containsKey(String.valueOf(lowbound + i))) { if (i != 0) { currElement.put(String.valueOf(lowbound + i), currElement.get(String.valueOf(lowbound + i - 1))); } else { currElement.put(String.valueOf(lowbound + i), 0.0); } } } TreeMap<String, Double> result = new TreeMap(); for (Integer i = 0 + range; i <= upbound - lowbound - range; i++) { SimpleRegression regression = new SimpleRegression(); for (Integer l = -range; l <= range; l++) { regression.addData(l.doubleValue(), currElement.get(String.valueOf(i + lowbound + l))); } if (!Double.isNaN(regression.getSlope())) { if (!positiveOnly || regression.getSlope() > 0) { result.put(String.valueOf(lowbound + i), regression.getSlope()); } } } for (Map.Entry<String, Double> pair : result.entrySet()) { context.write(key, new Text(pair.getKey() + "\t" + String.format("%.5f", pair.getValue()))); } } }
From source file:web.diva.server.model.PCAGenerator.java
/** * * * @return dataset./*from w ww . j a v a 2s.com*/ */ private XYDataset createDataset(TreeMap<Integer, PCAPoint> points, int[] subSelectionData, int[] selection, boolean zoom, DivaDataset divaDataset) { final XYSeriesCollection dataset = new XYSeriesCollection(); seriesList = new TreeMap<String, XYSeries>(); seriesList.put("#000000", new XYSeries("#000000")); seriesList.put("unGrouped", new XYSeries("LIGHT_GRAY")); for (Group g : divaDataset.getRowGroups()) { if (g.isActive() && !g.getName().equalsIgnoreCase("all")) { seriesList.put(g.getHashColor(), new XYSeries(g.getHashColor())); } } if (!zoom && (selection == null || selection.length == 0) && subSelectionData == null) { for (int key : points.keySet()) { PCAPoint point = points.get(key); if (seriesList.containsKey(point.getColor())) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else if (zoom) { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int x : subSelectionData) { PCAPoint point = points.get(x); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(point.getColor())) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else if (subSelectionData != null) { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int key : subSelectionData) { PCAPoint point = points.get(key); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(divaDataset.getGeneColorArr()[point.getGeneIndex()])) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } else //selection without zoom { selectionSet.clear(); for (int i : selection) { selectionSet.add(i); } for (int key : points.keySet()) { PCAPoint point = points.get(key); if (selectionSet.contains(point.getGeneIndex())) { if (seriesList.containsKey(divaDataset.getGeneColorArr()[point.getGeneIndex()])) { seriesList.get(divaDataset.getGeneColorArr()[point.getGeneIndex()]).add(point.getX(), point.getY()); } else { seriesList.get("#000000").add(point.getX(), point.getY()); } } else { seriesList.get("unGrouped").add(point.getX(), point.getY()); } } } for (XYSeries ser : seriesList.values()) { dataset.addSeries(ser); } return dataset; }
From source file:com.mycompany.task1.Chart.java
public void makeGroupedDataChart(int kolumna) { SortedSet sortedSet = calculation.getClassTypeCollection(); DefaultCategoryDataset dataSet = new DefaultCategoryDataset(); TreeMap<Double, Integer> groupedMap; groupedMap = new TreeMap<Double, Integer>(); // mapa zawiera dane jednej kolumny dla jednej cechy nominalnej for (Object object : sortedSet) { Map<Double, Integer> mapa = calculation.histogramData(kolumna, object); prepareRangeMap(groupedMap, mapa); group(groupedMap, mapa);//from w w w . j ava 2 s . co m loadData(groupedMap, object, dataSet); } try { JFreeChart chart = displayChart(dataSet); ChartUtilities.saveChartAsJPEG(new File("GroupedData.jpg"), chart, 1000, 700); } catch (IOException ex) { Logger.getLogger(Chart.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:ANNFileDetect.EncogTestClass.java
private void createReport(TreeMap<Double, Integer> ht, String file) throws IOException { TreeMap<Integer, ArrayList<Double>> tm = new TreeMap<Integer, ArrayList<Double>>(); for (Map.Entry<Double, Integer> entry : ht.entrySet()) { if (tm.containsKey(entry.getValue())) { ArrayList<Double> al = (ArrayList<Double>) tm.get(entry.getValue()); al.add(entry.getKey());//from w ww. j ava 2 s . c o m tm.put(entry.getValue(), al); } else { ArrayList<Double> al = new ArrayList<Double>(); al.add(entry.getKey()); tm.put(entry.getValue(), al); } } String[] tmpfl = file.split("/"); if (tmpfl.length < 2) tmpfl = file.split("\\\\"); String crp = tmpfl[tmpfl.length - 1]; String[] actfl = crp.split("\\."); FileWriter fstream = new FileWriter("tempTrainingFiles/" + actfl[1].toUpperCase() + actfl[0] + ".txt"); BufferedWriter fileto = new BufferedWriter(fstream); int size = tm.size(); int cnt = 0; for (Map.Entry<Integer, ArrayList<Double>> entry : tm.entrySet()) { if (cnt > (size - 10) && entry.getKey() > 2 && entry.getValue().size() < 20) { double tmpval = ((double) entry.getKey()) / filebytes; fileto.write("Times: " + tmpval + " Values: "); for (Double dbl : entry.getValue()) { fileto.write(dbl + " "); } fileto.write("\n"); } cnt++; } fileto.close(); }
From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurvePanel.java
public NonparametricCurvePanel(NonparametricCurveCommand command) { this.command = command; charts = new TreeMap<String, JFreeChart>(); processCommand();/* www . ja v a 2 s . c o m*/ setGraphs(); }
From source file:io.fabric8.devops.ProjectConfig.java
public void addLink(String name, String url) { if (links == null) { links = new TreeMap<>(); } links.put(name, url); }
From source file:com.espertech.esper.filter.FilterParamIndexCompareString.java
public FilterParamIndexCompareString(FilterSpecLookupable lookupable, FilterOperator filterOperator) { super(filterOperator, lookupable); constantsMap = new TreeMap<Object, EventEvaluator>(); constantsMapRWLock = new ReentrantReadWriteLock(); if ((filterOperator != FilterOperator.GREATER) && (filterOperator != FilterOperator.GREATER_OR_EQUAL) && (filterOperator != FilterOperator.LESS) && (filterOperator != FilterOperator.LESS_OR_EQUAL)) { throw new IllegalArgumentException("Invalid filter operator for index of " + filterOperator); }/*from www. j ava 2s . c o m*/ }