List of usage examples for com.google.common.collect Multimap size
int size();
From source file:net.myrrix.online.eval.AbstractEvaluator.java
private Multimap<Long, RecommendedItem> split(File dataDir, File trainingFile, double trainPercentage, double evaluationPercentage, RescorerProvider provider) throws IOException { DataFileContents dataFileContents = readDataFile(dataDir, evaluationPercentage, provider); Multimap<Long, RecommendedItem> data = dataFileContents.getData(); log.info("Read data for {} users from input; splitting...", data.size()); Multimap<Long, RecommendedItem> testData = ArrayListMultimap.create(); Writer trainingOut = IOUtils.buildGZIPWriter(trainingFile); try {/*from ww w . ja v a 2s .co m*/ Iterator<Map.Entry<Long, Collection<RecommendedItem>>> it = data.asMap().entrySet().iterator(); while (it.hasNext()) { Map.Entry<Long, Collection<RecommendedItem>> entry = it.next(); long userID = entry.getKey(); List<RecommendedItem> userPrefs = Lists.newArrayList(entry.getValue()); it.remove(); if (isSplitTestByPrefValue()) { // Sort low to high, leaving high values at end for testing as "relevant" items Collections.sort(userPrefs, ByValueAscComparator.INSTANCE); } // else leave sorted in time order int numTraining = FastMath.max(1, (int) (trainPercentage * userPrefs.size())); for (RecommendedItem rec : userPrefs.subList(0, numTraining)) { trainingOut.write(Long.toString(userID)); trainingOut.write(DELIMITER); trainingOut.write(Long.toString(rec.getItemID())); trainingOut.write(DELIMITER); trainingOut.write(Float.toString(rec.getValue())); trainingOut.write('\n'); } for (RecommendedItem rec : userPrefs.subList(numTraining, userPrefs.size())) { testData.put(userID, rec); } } // All tags go in training data for (Map.Entry<String, RecommendedItem> entry : dataFileContents.getItemTags().entries()) { trainingOut.write(entry.getKey()); trainingOut.write(DELIMITER); trainingOut.write(Long.toString(entry.getValue().getItemID())); trainingOut.write(DELIMITER); trainingOut.write(Float.toString(entry.getValue().getValue())); trainingOut.write('\n'); } for (Map.Entry<String, RecommendedItem> entry : dataFileContents.getUserTags().entries()) { trainingOut.write(Long.toString(entry.getValue().getItemID())); trainingOut.write(DELIMITER); trainingOut.write(entry.getKey()); trainingOut.write(DELIMITER); trainingOut.write(Float.toString(entry.getValue().getValue())); trainingOut.write('\n'); } } finally { trainingOut.close(); // Want to know of output stream close failed -- maybe failed to write } log.info("{} users in test data", testData.size()); return testData; }
From source file:com.android.tools.idea.navigator.nodes.AndroidResFolderTypeNode.java
@NotNull @Override//from w ww.ja v a 2s . c o m public Collection<? extends AbstractTreeNode> getChildren() { // all resource folders of a given folder type List<PsiDirectory> folders = getValue(); Multimap<String, PsiFile> multimap = HashMultimap.create(); for (PsiDirectory res : folders) { for (PsiFile file : res.getFiles()) { String resName = file.getName(); multimap.put(resName, file); } } List<AbstractTreeNode> children = Lists.newArrayListWithExpectedSize(multimap.size()); for (String resName : multimap.keySet()) { List<PsiFile> files = Lists.newArrayList(multimap.get(resName)); if (files.size() > 1) { children.add(new AndroidResGroupNode(myProject, myFacet, files, resName, getSettings())); } else { children.add(new AndroidResFileNode(myProject, files.get(0), getSettings(), myFacet)); } } return children; }
From source file:forestry.core.items.ItemElectronTube.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override// w ww .j a va 2 s. c o m public void addInformation(ItemStack itemstack, EntityPlayer player, List list, boolean flag) { Multimap<ICircuitLayout, ICircuit> circuits = ArrayListMultimap.create(); for (ICircuitLayout circuitLayout : ChipsetManager.circuitRegistry.getRegisteredLayouts().values()) { ICircuit circuit = ItemSolderingIron.SolderManager.getCircuit(circuitLayout, itemstack); if (circuit != null) { circuits.put(circuitLayout, circuit); } } if (circuits.size() > 0) { if (Proxies.common.isShiftDown()) { for (ICircuitLayout circuitLayout : circuits.keys()) { String circuitLayoutName = circuitLayout.getUsage(); list.add( EnumChatFormatting.WHITE.toString() + EnumChatFormatting.UNDERLINE + circuitLayoutName); for (ICircuit circuit : circuits.get(circuitLayout)) { circuit.addTooltip(list); } } } else { list.add(EnumChatFormatting.ITALIC + "<" + StringUtil.localize("gui.tooltip.tmi") + ">"); } } else { list.add("<" + StringUtil.localize("gui.noeffect") + ">"); } }
From source file:org.apache.phoenix.hbase.index.write.RecoveryIndexWriter.java
/** * Convert the passed index updates to {@link HTableInterfaceReference}s. * //from w ww . ja va 2s .co m * @param indexUpdates * from the index builder * @return pairs that can then be written by an {@link RecoveryIndexWriter}. */ @Override protected Multimap<HTableInterfaceReference, Mutation> resolveTableReferences( Collection<Pair<Mutation, byte[]>> indexUpdates) { Multimap<HTableInterfaceReference, Mutation> updates = ArrayListMultimap .<HTableInterfaceReference, Mutation>create(); // simple map to make lookups easy while we build the map of tables to create Map<ImmutableBytesPtr, HTableInterfaceReference> tables = new HashMap<ImmutableBytesPtr, HTableInterfaceReference>( updates.size()); for (Pair<Mutation, byte[]> entry : indexUpdates) { byte[] tableName = entry.getSecond(); ImmutableBytesPtr ptr = new ImmutableBytesPtr(tableName); HTableInterfaceReference table = tables.get(ptr); if (nonExistingTablesList.contains(table)) { LOG.debug("Edits found for non existing table: " + table.getTableName() + " so skipping it!!"); continue; } if (table == null) { table = new HTableInterfaceReference(ptr); tables.put(ptr, table); } updates.put(table, entry.getFirst()); } return updates; }
From source file:com.facebook.presto.split.NativeSplitManager.java
@Override public List<Partition> getPartitions(TableHandle tableHandle, Map<ColumnHandle, Object> bindings) { Stopwatch partitionTimer = new Stopwatch(); partitionTimer.start();// w w w .j av a2 s . co m checkArgument(tableHandle instanceof NativeTableHandle, "Table must be a native table"); TableMetadata tableMetadata = metadata.getTableMetadata(tableHandle); checkState(tableMetadata != null, "no metadata for %s found", tableHandle); Set<TablePartition> tablePartitions = shardManager.getPartitions(tableHandle); log.debug("Partition retrieval, native table %s (%d partitions): %dms", tableHandle, tablePartitions.size(), partitionTimer.elapsed(TimeUnit.MILLISECONDS)); Multimap<String, ? extends PartitionKey> allPartitionKeys = shardManager.getAllPartitionKeys(tableHandle); Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(tableHandle); log.debug("Partition key retrieval, native table %s (%d keys): %dms", tableHandle, allPartitionKeys.size(), partitionTimer.elapsed(TimeUnit.MILLISECONDS)); List<Partition> partitions = ImmutableList.copyOf( Collections2.transform(tablePartitions, new PartitionFunction(columnHandles, allPartitionKeys))); log.debug("Partition generation, native table %s (%d partitions): %dms", tableHandle, partitions.size(), partitionTimer.elapsed(TimeUnit.MILLISECONDS)); return partitions; }
From source file:org.mitreid.multiparty.service.InMemoryResourceService.java
@Override public Resource getById(final String rsId) { Multimap<String, Resource> filtered = Multimaps.filterValues(resources, new Predicate<Resource>() { @Override// w w w . j a v a 2 s . c o m public boolean apply(Resource input) { if (input.getId().equals(rsId)) { return true; } else { return false; } } }); if (filtered.size() == 1) { return Iterators.getOnlyElement(filtered.values().iterator()); } else { return null; } }
From source file:io.datakernel.logfs.LogToCubeRunner.java
private void processLog_gotPositions(Map<String, LogPosition> positions, final CompletionCallback callback) { logger.trace("processLog_gotPositions called. Positions: {}", positions); final Stopwatch sw = Stopwatch.createStarted(); final AggregatorSplitter<T> aggregator = aggregatorSplitterFactory.create(eventloop); LogCommitTransaction<T> logCommitTransaction = new LogCommitTransaction<>(eventloop, logManager, log, positions, new ForwardingLogCommitCallback(callback) { @Override//from w w w .ja v a2s . c o m public void onCommit(String log, Map<String, LogPosition> oldPositions, Map<String, LogPosition> newPositions, Multimap<AggregationMetadata, AggregationChunk.NewChunk> newChunks) { processLog_doCommit(log, oldPositions, newPositions, newChunks, callback); sw.stop(); logger.info("Aggregated {} objects from log '{}' into {} chunks in {} (~{} objects/second)", aggregator.getItems(), log, newChunks.size(), sw, (int) (aggregator.getItems() / ((double) sw.elapsed(TimeUnit.NANOSECONDS) / 1E9))); } }); final StreamUnion<T> streamUnion = new StreamUnion<>(eventloop); for (String logPartition : positions.keySet()) { LogPosition logPosition = positions.get(logPartition); if (logPosition == null) { logPosition = new LogPosition(); } logCommitTransaction.logProducer(logPartition, logPosition).streamTo(streamUnion.newInput()); logger.info("Started reading log '{}' for partition '{}' from position {}", log, logPartition, logPosition); } streamUnion.getOutput().streamTo(aggregator.getInput()); aggregator.streamTo(cube, logCommitTransaction); }
From source file:com.arpnetworking.tsdcore.sinks.MonitordSink.java
/** * {@inheritDoc}//from w ww.jav a2 s . c o m */ @Override protected Collection<byte[]> serialize(final PeriodicData periodicData) { final Period period = periodicData.getPeriod(); final Multimap<String, AggregatedData> indexedData = prepareData(periodicData); final Multimap<String, Condition> indexedConditions = prepareConditions(periodicData.getConditions()); // Serialize final List<byte[]> serializedData = Lists.newArrayListWithCapacity(indexedData.size()); final StringBuilder stringBuilder = new StringBuilder(); for (final String key : indexedData.keySet()) { final Collection<AggregatedData> namedData = indexedData.get(key); if (!namedData.isEmpty()) { stringBuilder.setLength(0); final AggregatedData first = Iterables.getFirst(namedData, null); final String name = new StringBuilder().append(first.getFQDSN().getService()).append("_") .append(period.toString(ISOPeriodFormat.standard())).append("_") .append(first.getFQDSN().getMetric()).toString(); int maxStatus = 0; final StringBuilder dataBuilder = new StringBuilder(); for (final AggregatedData datum : namedData) { if (!datum.isSpecified()) { continue; } dataBuilder.append(datum.getFQDSN().getStatistic().getName()).append("%3D") .append(datum.getValue().getValue()).append("%3B"); final String conditionKey = datum.getFQDSN().getService() + "_" + datum.getFQDSN().getMetric() + "_" + datum.getFQDSN().getCluster() + "_" + datum.getFQDSN().getStatistic(); for (final Condition condition : indexedConditions.get(conditionKey)) { dataBuilder.append(datum.getFQDSN().getStatistic().getName()).append("_") .append(condition.getName()).append("%3D") .append(condition.getThreshold().getValue()).append("%3B"); if (condition.isTriggered().isPresent() && condition.isTriggered().get()) { // Collect the status of this metric final Object severity = condition.getExtensions().get("severity"); int status = _unknownSeverityStatus; if (severity != null && _severityToStatus.containsKey(severity)) { status = _severityToStatus.get(severity); } maxStatus = Math.max(status, maxStatus); } } } // Don't send an empty payload if (dataBuilder.length() == 0) { continue; } stringBuilder.append("run_every=").append(period.toStandardSeconds().getSeconds()).append("&path=") .append(first.getFQDSN().getCluster()).append("%2f") .append(periodicData.getDimensions().get("host")).append("&monitor=").append(name) .append("&status=").append(maxStatus).append("×tamp=") .append((int) Unit.SECOND.convert(periodicData.getStart().getMillis(), Unit.MILLISECOND)) .append("&output=").append(name).append("%7C").append(dataBuilder.toString()); stringBuilder.setLength(stringBuilder.length() - 3); serializedData.add(stringBuilder.toString().getBytes(Charset.forName("UTF-8"))); } } return serializedData; }
From source file:eumetsat.pn.solr.webapp.SolrApp.java
@Override protected Map<String, Object> search(String searchTerms, String filterString, int from, int size) { Map<String, Object> data = new HashMap<>(); // put "session" parameters here rightaway so it can be used in template even when empty result data.put("search_terms", searchTerms == null ? "*:*" : searchTerms); data.put("filter_terms", filterString == null ? "" : filterString); Stopwatch stopwatch = Stopwatch.createStarted(); try {//w ww . j a v a 2s . c o m SolrQuery query = new SolrQuery(); query.setQuery(searchTerms); query.setStart(from == -1 ? 0 : from); query.setRows(size); query.setFields("id", "title", "description", "thumbnail_s", "status_s", "score"); // "exclude" xmldoc query.setParam("qt", "edismax"); // probably default already // boosting query.setParam("qf", "title^10 description status^2 keywords"); // set highlight, see also https://cwiki.apache.org/confluence/display/solr/Standard+Highlighter query.setHighlight(true).setHighlightSnippets(17).setHighlightFragsize(0); // http://wiki.apache.org/solr/HighlightingParameters query.setParam("hl.preserveMulti", "true"); // preserve non-matching keywords query.setParam("hl.fl", "id", "title", "description", "keywords"); // "*"); // select fields to highlight // override defaults: query.setParam("hl.simple.pre", "<em><strong>"); query.setParam("hl.simple.post", "</strong></em>"); // configure faceting, see also http://wiki.apache.org/solr/SolrFacetingOverview and http://wiki.apache.org/solr/Solrj and https://wiki.apache.org/solr/SimpleFacetParameters and query.setFacet(true).setFacetLimit(4).setFacetMissing(true); // not in API, probably normally set in schema.xml: query.setParam("facet.field", "satellite_s", "instrument_s", "category", "societalBenefitArea_ss", "distribution_ss"); // filtering Set<String> hiddenFacets = new HashSet<>(); // hiding no facets yet if (filterString != null && !filterString.isEmpty()) { Multimap<String, String> filterTermsMap = parseFiltersTerms(filterString); if (filterTermsMap.size() > 0) { for (Map.Entry<String, String> entry : filterTermsMap.entries()) { String filter = " +" + entry.getKey() + ":" + entry.getValue(); query.addFilterQuery(filter); hiddenFacets.add(entry.getKey() + ":" + entry.getValue()); } } } data.put("tohide", hiddenFacets); log.debug("Solr query: {}", query); QueryResponse response = solr.query(query); if (response == null) { log.error("Response from {} is null!", this.name); data.put("total_hits", 0); data = addMessage(data, MessageLevel.danger, "Response is null from " + this.name); } else { log.trace("Got response: {}", response); if (response.getStatus() == 0) { List<Map<String, Object>> resHits = new ArrayList<>(); SolrDocumentList results = response.getResults(); Map<String, Map<String, List<String>>> highlights = response.getHighlighting(); data.put("total_hits", results.getNumFound()); if (results.getNumFound() < 1) { addMessage(data, MessageLevel.info, "No results found!"); } data.put("max_score", results.getMaxScore()); Map<String, Object> pagination = computePaginationParams( ((Long) (data.get("total_hits"))).intValue(), from); data.put("pagination", pagination); for (SolrDocument result : results) { HashMap<String, Object> resHit = new HashMap<>(); String currentId = (String) result.getFieldValue("id"); Map<String, List<String>> currentHighlights = highlights.get(currentId); resHit.put("id", currentId); resHit.put("score", String.format("%.4g", result.getFieldValue("score"))); resHit.put("abstract", hightlightIfGiven(result, currentHighlights, "description")); resHit.put("title", hightlightIfGiven(result, currentHighlights, "title")); resHit.put("keywords", Joiner.on(", ").join( (Collection<String>) hightlightIfGiven(result, currentHighlights, "keywords"))); resHit.put("satellite", result.get("satellite_s")); resHit.put("thumbnail", result.get("thumbnail_s")); resHit.put("status", result.get("status_s")); resHit.put("distribution", result.get("distribution_ss")); resHits.add(resHit); } data.put("hits", resHits); // faceting information: List<FacetField> facets = response.getFacetFields(); log.trace("Facets ({}): {}", facets.size(), facets); //jsObj.get("facets").get("categories").get("terms") - then term und count // convert to format of Elasticsearch: Map<String, Object> facetsJson = new HashMap<>(); for (FacetField facet : facets) { Map<String, Object> facetMap = new HashMap<>(); facetMap.put("total", facet.getValueCount()); List<Map<String, Object>> terms = new ArrayList<>(); for (Count count : facet.getValues()) { if (count.getCount() > 0) { Map<String, Object> termMap = new HashMap<>(); termMap.put("count", count.getCount()); termMap.put("term", count.getName() == null ? "N/A" : count.getName()); terms.add(termMap); } } facetMap.put("terms", terms); facetsJson.put(facet.getName(), facetMap); } data.put("facets", facetsJson); } else { // non-OK resonse log.error("Received non-200 response: {}", response); data = addMessage(data, MessageLevel.danger, "Non 200 response: " + response.toString()); } } data.put("elapsed", (double) (stopwatch.elapsed(TimeUnit.MILLISECONDS)) / (double) 1000); log.trace("Prepared data for template: {}", data); } catch (SolrServerException e) { log.error("Error querying Solr", e); addMessage(data, MessageLevel.danger, "Error during search: " + e.getMessage()); // errorResponse(e); } stopwatch.stop(); return data; }
From source file:org.terasology.crafting.ui.workstation.StationAvailableRecipesWidget.java
private void appendCategory(Multimap<String, String> categoryRelationships, Map<String, Multimap<String, CraftingStationRecipe.CraftingStationResult>> categoryRecipesMap, String category, int level) { Multimap<String, CraftingStationRecipe.CraftingStationResult> directRecipes = categoryRecipesMap .get(category);/* w w w. jav a2 s. co m*/ Collection<String> childCategories = categoryRelationships.get(category); int count = 0; if (directRecipes != null) { count += directRecipes.size(); } count += childCategories.size(); boolean isOpen = openCategories.contains(category); RecipeCategoryWidget categoryWidget = new RecipeCategoryWidget(isOpen, 25 * level, getCategoryName(category), count, new CategoryToggleCallbackImpl(category)); layout.addWidget(categoryWidget); if (isOpen) { for (String childCategory : childCategories) { appendCategory(categoryRelationships, categoryRecipesMap, childCategory, level + 1); } if (directRecipes != null) { appendRecipes(level + 1, directRecipes.entries()); } } }