List of usage examples for com.google.common.collect Multimap size
int size();
From source file:org.apache.accumulo.examples.wikisearch.logic.AbstractQueryLogic.java
public Results runQuery(Connector connector, List<String> authorizations, String query, Date beginDate, Date endDate, Set<String> types) { if (StringUtils.isEmpty(query)) { throw new IllegalArgumentException( "NULL QueryNode reference passed to " + this.getClass().getSimpleName()); }//from w ww. ja va 2 s. co m Set<Range> ranges = new HashSet<Range>(); Set<String> typeFilter = types; String array[] = authorizations.toArray(new String[0]); Authorizations auths = new Authorizations(array); Results results = new Results(); // Get the query string String queryString = query; StopWatch abstractQueryLogic = new StopWatch(); StopWatch optimizedQuery = new StopWatch(); StopWatch queryGlobalIndex = new StopWatch(); StopWatch optimizedEventQuery = new StopWatch(); StopWatch fullScanQuery = new StopWatch(); StopWatch processResults = new StopWatch(); abstractQueryLogic.start(); StopWatch parseQuery = new StopWatch(); parseQuery.start(); QueryParser parser; try { if (log.isDebugEnabled()) { log.debug("ShardQueryLogic calling QueryParser.execute"); } parser = new QueryParser(); parser.execute(queryString); } catch (org.apache.commons.jexl2.parser.ParseException e1) { throw new IllegalArgumentException("Error parsing query", e1); } int hash = parser.getHashValue(); parseQuery.stop(); if (log.isDebugEnabled()) { log.debug(hash + " Query: " + queryString); } Set<String> fields = new HashSet<String>(); for (String f : parser.getQueryIdentifiers()) { fields.add(f); } if (log.isDebugEnabled()) { log.debug("getQueryIdentifiers: " + parser.getQueryIdentifiers().toString()); } // Remove any negated fields from the fields list, we don't want to lookup negated fields // in the index. fields.removeAll(parser.getNegatedTermsForOptimizer()); if (log.isDebugEnabled()) { log.debug("getQueryIdentifiers: " + parser.getQueryIdentifiers().toString()); } // Get the mapping of field name to QueryTerm object from the query. The query term object // contains the operator, whether its negated or not, and the literal to test against. Multimap<String, QueryTerm> terms = parser.getQueryTerms(); // Find out which terms are indexed // TODO: Should we cache indexed terms or does that not make sense since we are always // loading data. StopWatch queryMetadata = new StopWatch(); queryMetadata.start(); Map<String, Multimap<String, Class<? extends Normalizer>>> metadataResults; try { metadataResults = findIndexedTerms(connector, auths, fields, typeFilter); } catch (Exception e1) { throw new RuntimeException("Error in metadata lookup", e1); } // Create a map of indexed term to set of normalizers for it Multimap<String, Normalizer> indexedTerms = HashMultimap.create(); for (Entry<String, Multimap<String, Class<? extends Normalizer>>> entry : metadataResults.entrySet()) { // Get the normalizer from the normalizer cache for (Class<? extends Normalizer> clazz : entry.getValue().values()) { indexedTerms.put(entry.getKey(), normalizerCacheMap.get(clazz)); } } queryMetadata.stop(); if (log.isDebugEnabled()) { log.debug(hash + " Indexed Terms: " + indexedTerms.toString()); } Set<String> orTerms = parser.getOrTermsForOptimizer(); // Iterate over the query terms to get the operators specified in the query. ArrayList<String> unevaluatedExpressions = new ArrayList<String>(); boolean unsupportedOperatorSpecified = false; for (Entry<String, QueryTerm> entry : terms.entries()) { if (null == entry.getValue()) { continue; } if (null != this.unevaluatedFields && this.unevaluatedFields.contains(entry.getKey().trim())) { unevaluatedExpressions.add(entry.getKey().trim() + " " + entry.getValue().getOperator() + " " + entry.getValue().getValue()); } int operator = JexlOperatorConstants.getJJTNodeType(entry.getValue().getOperator()); if (!(operator == ParserTreeConstants.JJTEQNODE || operator == ParserTreeConstants.JJTNENODE || operator == ParserTreeConstants.JJTLENODE || operator == ParserTreeConstants.JJTLTNODE || operator == ParserTreeConstants.JJTGENODE || operator == ParserTreeConstants.JJTGTNODE || operator == ParserTreeConstants.JJTERNODE)) { unsupportedOperatorSpecified = true; break; } } if (null != unevaluatedExpressions) unevaluatedExpressions.trimToSize(); if (log.isDebugEnabled()) { log.debug(hash + " unsupportedOperators: " + unsupportedOperatorSpecified + " indexedTerms: " + indexedTerms.toString() + " orTerms: " + orTerms.toString() + " unevaluatedExpressions: " + unevaluatedExpressions.toString()); } // We can use the intersecting iterator over the field index as an optimization under the // following conditions // // 1. No unsupported operators in the query. // 2. No 'or' operators and at least one term indexed // or // 1. No unsupported operators in the query. // 2. and all terms indexed // or // 1. All or'd terms are indexed. NOTE, this will potentially skip some queries and push to a full table scan // // WE should look into finding a better way to handle whether we do an optimized query or not. boolean optimizationSucceeded = false; boolean orsAllIndexed = false; if (orTerms.isEmpty()) { orsAllIndexed = false; } else { orsAllIndexed = indexedTerms.keySet().containsAll(orTerms); } if (log.isDebugEnabled()) { log.debug("All or terms are indexed"); } if (!unsupportedOperatorSpecified && (((null == orTerms || orTerms.isEmpty()) && indexedTerms.size() > 0) || (fields.size() > 0 && indexedTerms.size() == fields.size()) || orsAllIndexed)) { optimizedQuery.start(); // Set up intersecting iterator over field index. // Get information from the global index for the indexed terms. The results object will contain the term // mapped to an object that contains the total count, and partitions where this term is located. // TODO: Should we cache indexed term information or does that not make sense since we are always loading data queryGlobalIndex.start(); IndexRanges termIndexInfo; try { // If fields is null or zero, then it's probably the case that the user entered a value // to search for with no fields. Check for the value in index. if (fields.isEmpty()) { termIndexInfo = this.getTermIndexInformation(connector, auths, queryString, typeFilter); if (null != termIndexInfo && termIndexInfo.getRanges().isEmpty()) { // Then we didn't find anything in the index for this query. This may happen for an indexed term that has wildcards // in unhandled locations. // Break out of here by throwing a named exception and do full scan throw new DoNotPerformOptimizedQueryException(); } // We need to rewrite the query string here so that it's valid. if (termIndexInfo instanceof UnionIndexRanges) { UnionIndexRanges union = (UnionIndexRanges) termIndexInfo; StringBuilder buf = new StringBuilder(); String sep = ""; for (String fieldName : union.getFieldNamesAndValues().keySet()) { buf.append(sep).append(fieldName).append(" == "); if (!(queryString.startsWith("'") && queryString.endsWith("'"))) { buf.append("'").append(queryString).append("'"); } else { buf.append(queryString); } sep = " or "; } if (log.isDebugEnabled()) { log.debug("Rewrote query for non-fielded single term query: " + queryString + " to " + buf.toString()); } queryString = buf.toString(); } else { throw new RuntimeException("Unexpected IndexRanges implementation"); } } else { RangeCalculator calc = this.getTermIndexInformation(connector, auths, indexedTerms, terms, this.getIndexTableName(), this.getReverseIndexTableName(), queryString, this.queryThreads, typeFilter); if (null == calc.getResult() || calc.getResult().isEmpty()) { // Then we didn't find anything in the index for this query. This may happen for an indexed term that has wildcards // in unhandled locations. // Break out of here by throwing a named exception and do full scan throw new DoNotPerformOptimizedQueryException(); } termIndexInfo = new UnionIndexRanges(); termIndexInfo.setIndexValuesToOriginalValues(calc.getIndexValues()); termIndexInfo.setFieldNamesAndValues(calc.getIndexEntries()); termIndexInfo.getTermCardinality().putAll(calc.getTermCardinalities()); for (Range r : calc.getResult()) { // foo is a placeholder and is ignored. termIndexInfo.add("foo", r); } } } catch (TableNotFoundException e) { log.error(this.getIndexTableName() + "not found", e); throw new RuntimeException(this.getIndexTableName() + "not found", e); } catch (org.apache.commons.jexl2.parser.ParseException e) { throw new RuntimeException("Error determining ranges for query: " + queryString, e); } catch (DoNotPerformOptimizedQueryException e) { log.info("Indexed fields not found in index, performing full scan"); termIndexInfo = null; } queryGlobalIndex.stop(); // Determine if we should proceed with optimized query based on results from the global index boolean proceed = false; if (null == termIndexInfo || termIndexInfo.getFieldNamesAndValues().values().size() == 0) { proceed = false; } else if (null != orTerms && orTerms.size() > 0 && (termIndexInfo.getFieldNamesAndValues().values().size() == indexedTerms.size())) { proceed = true; } else if (termIndexInfo.getFieldNamesAndValues().values().size() > 0) { proceed = true; } else if (orsAllIndexed) { proceed = true; } else { proceed = false; } if (log.isDebugEnabled()) { log.debug("Proceed with optimized query: " + proceed); if (null != termIndexInfo) log.debug("termIndexInfo.getTermsFound().size(): " + termIndexInfo.getFieldNamesAndValues().values().size() + " indexedTerms.size: " + indexedTerms.size() + " fields.size: " + fields.size()); } if (proceed) { if (log.isDebugEnabled()) { log.debug(hash + " Performing optimized query"); } // Use the scan ranges from the GlobalIndexRanges object as the ranges for the batch scanner ranges = termIndexInfo.getRanges(); if (log.isDebugEnabled()) { log.info(hash + " Ranges: count: " + ranges.size() + ", " + ranges.toString()); } // Create BatchScanner, set the ranges, and setup the iterators. optimizedEventQuery.start(); BatchScanner bs = null; try { bs = connector.createBatchScanner(this.getTableName(), auths, queryThreads); bs.setRanges(ranges); IteratorSetting si = new IteratorSetting(21, "eval", OptimizedQueryIterator.class); if (log.isDebugEnabled()) { log.debug("Setting scan option: " + EvaluatingIterator.QUERY_OPTION + " to " + queryString); } // Set the query option si.addOption(EvaluatingIterator.QUERY_OPTION, queryString); // Set the Indexed Terms List option. This is the field name and normalized field value pair separated // by a comma. StringBuilder buf = new StringBuilder(); String sep = ""; for (Entry<String, String> entry : termIndexInfo.getFieldNamesAndValues().entries()) { buf.append(sep); buf.append(entry.getKey()); buf.append(":"); buf.append(termIndexInfo.getIndexValuesToOriginalValues().get(entry.getValue())); buf.append(":"); buf.append(entry.getValue()); if (sep.equals("")) { sep = ";"; } } if (log.isDebugEnabled()) { log.debug("Setting scan option: " + FieldIndexQueryReWriter.INDEXED_TERMS_LIST + " to " + buf.toString()); } FieldIndexQueryReWriter rewriter = new FieldIndexQueryReWriter(); String q = ""; try { q = queryString; q = rewriter.applyCaseSensitivity(q, true, false);// Set upper/lower case for fieldname/fieldvalue Map<String, String> opts = new HashMap<String, String>(); opts.put(FieldIndexQueryReWriter.INDEXED_TERMS_LIST, buf.toString()); q = rewriter.removeNonIndexedTermsAndInvalidRanges(q, opts); q = rewriter.applyNormalizedTerms(q, opts); if (log.isDebugEnabled()) { log.debug("runServerQuery, FieldIndex Query: " + q); } } catch (org.apache.commons.jexl2.parser.ParseException ex) { log.error("Could not parse query, Jexl ParseException: " + ex); } catch (Exception ex) { log.error("Problem rewriting query, Exception: " + ex.getMessage()); } si.addOption(BooleanLogicIterator.FIELD_INDEX_QUERY, q); // Set the term cardinality option sep = ""; buf.delete(0, buf.length()); for (Entry<String, Long> entry : termIndexInfo.getTermCardinality().entrySet()) { buf.append(sep); buf.append(entry.getKey()); buf.append(":"); buf.append(entry.getValue()); sep = ","; } if (log.isDebugEnabled()) log.debug("Setting scan option: " + BooleanLogicIterator.TERM_CARDINALITIES + " to " + buf.toString()); si.addOption(BooleanLogicIterator.TERM_CARDINALITIES, buf.toString()); if (this.useReadAheadIterator) { if (log.isDebugEnabled()) { log.debug("Enabling read ahead iterator with queue size: " + this.readAheadQueueSize + " and timeout: " + this.readAheadTimeOut); } si.addOption(ReadAheadIterator.QUEUE_SIZE, this.readAheadQueueSize); si.addOption(ReadAheadIterator.TIMEOUT, this.readAheadTimeOut); } if (null != unevaluatedExpressions) { StringBuilder unevaluatedExpressionList = new StringBuilder(); String sep2 = ""; for (String exp : unevaluatedExpressions) { unevaluatedExpressionList.append(sep2).append(exp); sep2 = ","; } if (log.isDebugEnabled()) log.debug("Setting scan option: " + EvaluatingIterator.UNEVALUTED_EXPRESSIONS + " to " + unevaluatedExpressionList.toString()); si.addOption(EvaluatingIterator.UNEVALUTED_EXPRESSIONS, unevaluatedExpressionList.toString()); } bs.addScanIterator(si); processResults.start(); processResults.suspend(); long count = 0; for (Entry<Key, Value> entry : bs) { count++; // The key that is returned by the EvaluatingIterator is not the same key that is in // the table. The value that is returned by the EvaluatingIterator is a kryo // serialized EventFields object. processResults.resume(); Document d = this.createDocument(entry.getKey(), entry.getValue()); results.getResults().add(d); processResults.suspend(); } log.info(count + " matching entries found in optimized query."); optimizationSucceeded = true; processResults.stop(); } catch (TableNotFoundException e) { log.error(this.getTableName() + "not found", e); throw new RuntimeException(this.getIndexTableName() + "not found", e); } finally { if (bs != null) { bs.close(); } } optimizedEventQuery.stop(); } optimizedQuery.stop(); } // WE should look into finding a better way to handle whether we do an optimized query or not. // We are not setting up an else condition here because we may have aborted the logic early in the if statement. if (!optimizationSucceeded || ((null != orTerms && orTerms.size() > 0) && (indexedTerms.size() != fields.size()) && !orsAllIndexed)) { // if (!optimizationSucceeded || ((null != orTerms && orTerms.size() > 0) && (indexedTerms.size() != fields.size()))) { fullScanQuery.start(); if (log.isDebugEnabled()) { log.debug(hash + " Performing full scan query"); } // Set up a full scan using the date ranges from the query // Create BatchScanner, set the ranges, and setup the iterators. BatchScanner bs = null; try { // The ranges are the start and end dates Collection<Range> r = getFullScanRange(beginDate, endDate, terms); ranges.addAll(r); if (log.isDebugEnabled()) { log.debug(hash + " Ranges: count: " + ranges.size() + ", " + ranges.toString()); } bs = connector.createBatchScanner(this.getTableName(), auths, queryThreads); bs.setRanges(ranges); IteratorSetting si = new IteratorSetting(22, "eval", EvaluatingIterator.class); // Create datatype regex if needed if (null != typeFilter) { StringBuilder buf = new StringBuilder(); String s = ""; for (String type : typeFilter) { buf.append(s).append(type).append(".*"); s = "|"; } if (log.isDebugEnabled()) log.debug("Setting colf regex iterator to: " + buf.toString()); IteratorSetting ri = new IteratorSetting(21, "typeFilter", RegExFilter.class); RegExFilter.setRegexs(ri, null, buf.toString(), null, null, false); bs.addScanIterator(ri); } if (log.isDebugEnabled()) { log.debug("Setting scan option: " + EvaluatingIterator.QUERY_OPTION + " to " + queryString); } si.addOption(EvaluatingIterator.QUERY_OPTION, queryString); if (null != unevaluatedExpressions) { StringBuilder unevaluatedExpressionList = new StringBuilder(); String sep2 = ""; for (String exp : unevaluatedExpressions) { unevaluatedExpressionList.append(sep2).append(exp); sep2 = ","; } if (log.isDebugEnabled()) log.debug("Setting scan option: " + EvaluatingIterator.UNEVALUTED_EXPRESSIONS + " to " + unevaluatedExpressionList.toString()); si.addOption(EvaluatingIterator.UNEVALUTED_EXPRESSIONS, unevaluatedExpressionList.toString()); } bs.addScanIterator(si); long count = 0; processResults.start(); processResults.suspend(); for (Entry<Key, Value> entry : bs) { count++; // The key that is returned by the EvaluatingIterator is not the same key that is in // the partition table. The value that is returned by the EvaluatingIterator is a kryo // serialized EventFields object. processResults.resume(); Document d = this.createDocument(entry.getKey(), entry.getValue()); results.getResults().add(d); processResults.suspend(); } processResults.stop(); log.info(count + " matching entries found in full scan query."); } catch (TableNotFoundException e) { log.error(this.getTableName() + "not found", e); } finally { if (bs != null) { bs.close(); } } fullScanQuery.stop(); } log.info("AbstractQueryLogic: " + queryString + " " + timeString(abstractQueryLogic.getTime())); log.info(" 1) parse query " + timeString(parseQuery.getTime())); log.info(" 2) query metadata " + timeString(queryMetadata.getTime())); log.info(" 3) full scan query " + timeString(fullScanQuery.getTime())); log.info(" 3) optimized query " + timeString(optimizedQuery.getTime())); log.info(" 1) process results " + timeString(processResults.getTime())); log.info(" 1) query global index " + timeString(queryGlobalIndex.getTime())); log.info(hash + " Query completed."); return results; }
From source file:com.yahoo.pulsar.broker.loadbalance.impl.SimpleLoadManagerImpl.java
private Multimap<Long, ResourceUnit> getFinalCandidates(ServiceUnitId serviceUnit, Map<Long, Set<ResourceUnit>> availableBrokers) { // need multimap or at least set of RUs Multimap<Long, ResourceUnit> matchedPrimaries = TreeMultimap.create(); Multimap<Long, ResourceUnit> matchedShared = TreeMultimap.create(); NamespaceName namespace = serviceUnit.getNamespaceObject(); boolean isIsolationPoliciesPresent = policies.IsIsolationPoliciesPresent(namespace); if (isIsolationPoliciesPresent) { log.debug("Isolation Policies Present for namespace - [{}]", namespace.toString()); }/*w ww . j a va 2 s .c om*/ for (Map.Entry<Long, Set<ResourceUnit>> entry : availableBrokers.entrySet()) { for (ResourceUnit ru : entry.getValue()) { log.debug("Considering Resource Unit [{}] with Rank [{}] for serviceUnit [{}]", ru.getResourceId(), entry.getKey(), serviceUnit); URL brokerUrl = null; try { brokerUrl = new URL(String.format(ru.getResourceId())); } catch (MalformedURLException e) { log.error("Unable to parse brokerUrl from ResourceUnitId - [{}]", e); continue; } // todo: in future check if the resource unit has resources to take the namespace if (isIsolationPoliciesPresent) { // note: serviceUnitID is namespace name and ResourceID is brokerName if (policies.isPrimaryBroker(namespace, brokerUrl.getHost())) { matchedPrimaries.put(entry.getKey(), ru); if (log.isDebugEnabled()) { log.debug( "Added Primary Broker - [{}] as possible Candidates for" + " namespace - [{}] with policies", brokerUrl.getHost(), namespace.toString()); } } else if (policies.isSharedBroker(brokerUrl.getHost())) { matchedShared.put(entry.getKey(), ru); if (log.isDebugEnabled()) { log.debug( "Added Shared Broker - [{}] as possible " + "Candidates for namespace - [{}] with policies", brokerUrl.getHost(), namespace.toString()); } } else { if (log.isDebugEnabled()) { log.debug("Skipping Broker - [{}] not primary broker and not shared" + " for namespace - [{}] ", brokerUrl.getHost(), namespace.toString()); } } } else { if (policies.isSharedBroker(brokerUrl.getHost())) { matchedShared.put(entry.getKey(), ru); log.debug("Added Shared Broker - [{}] as possible Candidates for namespace - [{}]", brokerUrl.getHost(), namespace.toString()); } } } } if (isIsolationPoliciesPresent) { return getFinalCandidatesWithPolicy(namespace, matchedPrimaries, matchedShared); } else { log.debug( "Policies not present for namespace - [{}] so only " + "considering shared [{}] brokers for possible owner", namespace.toString(), matchedShared.size()); return getFinalCandidatesNoPolicy(matchedShared); } }
From source file:org.onebusaway.nyc.vehicle_tracking.impl.inference.BlockStateService.java
/** * // ww w. ja va2 s . c o m * Build the maps and STR tree for look-up. * */ private void buildShapeSpatialIndex() throws IOException, ClassNotFoundException { try { _linesToTripInfoByAgencyId = new HashMap<String, Multimap<LocationIndexedLine, TripInfo>>(); final Multimap<AgencyAndId, AgencyAndId> allUniqueShapePointsToBlockId = HashMultimap.create(); final Multimap<AgencyAndId, BlockTripIndex> blockTripIndicesByShapeId = HashMultimap.create(); final Multimap<AgencyAndId, BlockLayoverIndex> blockLayoverIndicesByShapeId = HashMultimap.create(); final Multimap<AgencyAndId, FrequencyBlockTripIndex> frequencyBlockTripIndicesByShapeId = HashMultimap .create(); final Multimap<Envelope, LocationIndexedLine> envToLines = HashMultimap.create(); _log.info("generating shapeId & blockConfig to block trips map..."); for (final BlockEntry blockEntry : _transitGraphDao.getAllBlocks()) { for (final BlockConfigurationEntry blockConfig : blockEntry.getConfigurations()) { for (final BlockTripEntry blockTrip : blockConfig.getTrips()) { final TripEntry trip = blockTrip.getTrip(); final AgencyAndId shapeId = trip.getShapeId(); final AgencyAndId blockId = blockEntry.getId(); if (shapeId != null) { allUniqueShapePointsToBlockId.put(shapeId, blockId); blockTripIndicesByShapeId.putAll(shapeId, _blockIndexService.getBlockTripIndicesForBlock(blockId)); blockLayoverIndicesByShapeId.putAll(shapeId, _blockIndexService.getBlockLayoverIndicesForBlock(blockId)); frequencyBlockTripIndicesByShapeId.putAll(shapeId, _blockIndexService.getFrequencyBlockTripIndicesForBlock(blockId)); } } } } _log.info("\tshapePoints=" + allUniqueShapePointsToBlockId.keySet().size()); for (final Entry<AgencyAndId, Collection<AgencyAndId>> shapePointsEntry : allUniqueShapePointsToBlockId .asMap().entrySet()) { final AgencyAndId shapeId = shapePointsEntry.getKey(); final ShapePoints shapePoints = _shapePointService.getShapePointsForShapeId(shapeId); if (shapePoints == null || shapePoints.isEmpty()) { _log.warn("blocks with no shapes: " + shapePointsEntry.getValue()); continue; } final Collection<BlockTripIndex> indices = blockTripIndicesByShapeId.get(shapeId); final Collection<BlockLayoverIndex> layoverIndices = blockLayoverIndicesByShapeId.get(shapeId); final Collection<FrequencyBlockTripIndex> frequencyIndices = frequencyBlockTripIndicesByShapeId .get(shapeId); final Collection<AgencyAndId> blockIds = shapePointsEntry.getValue(); if (blockIds.isEmpty()) continue; // shape agency ID cannot be used when the bundle builder is configured for agency ID remapping, // so we use the agency ID from the block ID, since shapes are not shared across agencies, they should all be the same. final String agencyId = blockIds.iterator().next().getAgencyId(); Multimap<LocationIndexedLine, TripInfo> linesToTripInfoForThisAgencyId = _linesToTripInfoByAgencyId .get(agencyId); if (linesToTripInfoForThisAgencyId == null) { linesToTripInfoForThisAgencyId = HashMultimap.create(); } for (int i = 0; i < shapePoints.getSize() - 1; ++i) { final CoordinatePoint from = shapePoints.getPointForIndex(i); final CoordinatePoint to = shapePoints.getPointForIndex(i + 1); final Coordinate fromJts = new Coordinate(from.getLon(), from.getLat()); final Coordinate toJts = new Coordinate(to.getLon(), to.getLat()); final Geometry lineGeo = _geometryFactory.createLineString(new Coordinate[] { fromJts, toJts }); final LocationIndexedLine line = new LocationIndexedLine(lineGeo); final Envelope env = lineGeo.getEnvelopeInternal(); final double distanceFrom = shapePoints.getDistTraveledForIndex(i); final double distanceTo = shapePoints.getDistTraveledForIndex(i + 1); linesToTripInfoForThisAgencyId.put(line, new TripInfo(distanceFrom, distanceTo, indices, layoverIndices, frequencyIndices)); envToLines.put(env, line); } _linesToTripInfoByAgencyId.put(agencyId, linesToTripInfoForThisAgencyId); addShapeToDetourGeometryMap(shapePoints); } if (envToLines.size() > 0) { _log.info("\ttree size=" + envToLines.keySet().size()); _index = new STRtree(envToLines.keySet().size()); for (final Entry<Envelope, Collection<LocationIndexedLine>> envLines : envToLines.asMap() .entrySet()) { _index.insert(envLines.getKey(), envLines.getValue()); } } } catch (final Exception ex) { ex.printStackTrace(); } _log.info("done."); }
From source file:org.tomahawk.libtomahawk.infosystem.hatchet.HatchetInfoPlugin.java
/** * Build a query URL for the given parameters, with which we can request the result JSON from * the Hatchet API//from w w w. j av a 2 s.c om * * @return the built query url */ private static String buildQuery(int type, Multimap<String, String> paramsIn) throws UnsupportedEncodingException { Multimap<String, String> params = null; if (paramsIn != null) { params = LinkedListMultimap.create(paramsIn); } String queryString = null; java.util.Collection<String> paramStrings; Iterator<String> iterator; switch (type) { case InfoRequestData.INFOREQUESTDATA_TYPE_USERS: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_USERS + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_USERS_PLAYLISTS: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_USERS + "/" + iterator.next() + "/" + HATCHET_PLAYLISTS; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_USERS_LOVEDITEMS: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_USERS + "/" + iterator.next() + "/" + HATCHET_LOVEDITEMS; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_USERS_SOCIALACTIONS: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_USERS + "/" + iterator.next() + "/" + HATCHET_SOCIALACTIONS; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_USERS_FRIENDSFEED: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_USERS + "/" + iterator.next() + "/" + HATCHET_FRIENDSFEED; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_USERS_PLAYBACKLOG: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_USERS + "/" + iterator.next() + "/" + HATCHET_PLAYBACKLOG; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_PLAYLISTS_ENTRIES: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_PLAYLISTS + "/" + iterator.next() + "/" + HATCHET_PLAYLISTS_ENTRIES; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_ARTISTS: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_ARTISTS + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_ARTISTS_ALBUMS: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_ARTISTS + "/" + iterator.next() + "/" + HATCHET_ALBUMS + "/"; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_ARTISTS_TOPHITS: paramStrings = params.get(HATCHET_PARAM_ID); iterator = paramStrings.iterator(); queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_ARTISTS + "/" + iterator.next() + "/" + HATCHET_ARTISTS_TOPHITS + "/"; params.removeAll(HATCHET_PARAM_ID); break; case InfoRequestData.INFOREQUESTDATA_TYPE_TRACKS: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_TRACKS + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_ALBUMS: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_ALBUMS + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_SEARCHES: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_SEARCHES + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_PLAYBACKLOGENTRIES: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_PLAYBACKLOGENTRIES + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_PLAYBACKLOGENTRIES_NOWPLAYING: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_PLAYBACKLOGENTRIES + "/" + HATCHET_PLAYBACKLOGENTRIES_NOWPLAYING + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_SOCIALACTIONS: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_SOCIALACTIONS + "/"; break; case InfoRequestData.INFOREQUESTDATA_TYPE_RELATIONSHIPS: queryString = HATCHET_BASE_URL + "/" + HATCHET_VERSION + "/" + HATCHET_RELATIONSHIPS + "/"; break; } // append every parameter we didn't use if (params != null && params.size() > 0) { queryString += "?" + TomahawkUtils.paramsListToString(params); } return queryString; }
From source file:org.apache.rya.indexing.external.tupleSet.AccumuloIndexSet.java
/** * Core evaluation method used during query evaluation - given a collection * of binding set constraints, this method finds common binding labels * between the constraints and table, uses those to build a prefix scan of * the Accumulo table, and creates a solution binding set by iterating of * the scan results./*from www . j a v a 2 s . c om*/ * @param bindingset - collection of {@link BindingSet}s to be joined with PCJ * @return - CloseableIteration over joined results */ @Override public CloseableIteration<BindingSet, QueryEvaluationException> evaluate( final Collection<BindingSet> bindingset) throws QueryEvaluationException { if (bindingset.isEmpty()) { return new IteratorWrapper<BindingSet, QueryEvaluationException>(new HashSet<BindingSet>().iterator()); } final List<BindingSet> crossProductBs = new ArrayList<>(); final Map<String, org.openrdf.model.Value> constantConstraints = new HashMap<>(); final Set<Range> hashJoinRanges = new HashSet<>(); final Range EMPTY_RANGE = new Range("", true, "~", false); Range crossProductRange = EMPTY_RANGE; String localityGroupOrder = varOrder.get(0); int maxPrefixLen = Integer.MIN_VALUE; int prefixLen = 0; int oldPrefixLen = 0; final Multimap<String, BindingSet> bindingSetHashMap = HashMultimap.create(); HashJoinType joinType = HashJoinType.CONSTANT_JOIN_VAR; final Set<String> unAssuredVariables = Sets.difference(getTupleExpr().getBindingNames(), getTupleExpr().getAssuredBindingNames()); boolean useColumnScan = false; boolean isCrossProd = false; boolean containsConstantConstraints = false; final BindingSet constants = getConstantConstraints(); containsConstantConstraints = constants.size() > 0; try { for (final BindingSet bs : bindingset) { if (bindingset.size() == 1 && bs.size() == 0) { // in this case, only single, empty bindingset, pcj node is // first node in query plan - use full Range scan with // column // family set useColumnScan = true; } // get common vars for PCJ - only use variables associated // with assured Bindings final QueryBindingSet commonVars = new QueryBindingSet(); for (final String b : getTupleExpr().getAssuredBindingNames()) { final Binding v = bs.getBinding(b); if (v != null) { commonVars.addBinding(v); } } // no common vars implies cross product if (commonVars.size() == 0 && bs.size() != 0) { crossProductBs.add(bs); isCrossProd = true; } //get a varOrder from orders in PCJ table - use at least //one common variable final BindingSetVariableOrder varOrder = getVarOrder(commonVars.getBindingNames(), constants.getBindingNames()); // update constant constraints not used in varOrder and // update Bindings used to form range by removing unused // variables commonVars.addAll(constants); if (commonVars.size() > varOrder.varOrderLen) { final Map<String, Value> valMap = getConstantValueMap(); for (final String s : new HashSet<String>(varOrder.unusedVars)) { if (valMap.containsKey(s) && !constantConstraints.containsKey(s)) { constantConstraints.put(s, valMap.get(s)); } commonVars.removeBinding(s); } } if (containsConstantConstraints && (useColumnScan || isCrossProd)) { // only one range required in event of a cross product or // empty BindingSet // Range will either be full table Range or determined by // constant constraints if (crossProductRange == EMPTY_RANGE) { crossProductRange = getRange(varOrder.varOrder, commonVars); localityGroupOrder = prefixToOrder(varOrder.varOrder); } } else if (!useColumnScan && !isCrossProd) { // update ranges and add BindingSet to HashJoinMap if not a // cross product hashJoinRanges.add(getRange(varOrder.varOrder, commonVars)); prefixLen = varOrder.varOrderLen; // check if common Variable Orders are changing between // BindingSets (happens in case // of Optional). If common variable set length changes from // BindingSet to BindingSet // update the HashJoinType to be VARIABLE_JOIN_VAR. if (oldPrefixLen == 0) { oldPrefixLen = prefixLen; } else { if (oldPrefixLen != prefixLen && joinType == HashJoinType.CONSTANT_JOIN_VAR) { joinType = HashJoinType.VARIABLE_JOIN_VAR; } oldPrefixLen = prefixLen; } // update max prefix len if (prefixLen > maxPrefixLen) { maxPrefixLen = prefixLen; } final String key = getHashJoinKey(varOrder.varOrder, commonVars); bindingSetHashMap.put(key, bs); } isCrossProd = false; } // create full Range scan iterator and set column family if empty // collection or if cross product BindingSet exists and no hash join // BindingSets if ((useColumnScan || crossProductBs.size() > 0) && bindingSetHashMap.size() == 0) { final Scanner scanner = accCon.createScanner(tablename, auths); // cross product with no cross product constraints here scanner.setRange(crossProductRange); scanner.fetchColumnFamily(new Text(localityGroupOrder)); return new PCJKeyToCrossProductBindingSetIterator(scanner, crossProductBs, constantConstraints, unAssuredVariables, getTableVarMap()); } else if ((useColumnScan || crossProductBs.size() > 0) && bindingSetHashMap.size() > 0) { // in this case, both hash join BindingSets and cross product // BindingSets exist // create an iterator to evaluate cross product and an iterator // for hash join, then combine final List<CloseableIteration<BindingSet, QueryEvaluationException>> iteratorList = new ArrayList<>(); // create cross product iterator final Scanner scanner1 = accCon.createScanner(tablename, auths); scanner1.setRange(crossProductRange); scanner1.fetchColumnFamily(new Text(localityGroupOrder)); iteratorList.add(new PCJKeyToCrossProductBindingSetIterator(scanner1, crossProductBs, constantConstraints, unAssuredVariables, getTableVarMap())); // create hash join iterator final BatchScanner scanner2 = accCon.createBatchScanner(tablename, auths, 10); scanner2.setRanges(hashJoinRanges); final PCJKeyToJoinBindingSetIterator iterator = new PCJKeyToJoinBindingSetIterator(scanner2, getTableVarMap(), maxPrefixLen); iteratorList.add( new BindingSetHashJoinIterator(bindingSetHashMap, iterator, unAssuredVariables, joinType)); // combine iterators return new IteratorCombiner(iteratorList); } else { // only hash join BindingSets exist final BatchScanner scanner = accCon.createBatchScanner(tablename, auths, 10); // only need to create hash join iterator scanner.setRanges(hashJoinRanges); final PCJKeyToJoinBindingSetIterator iterator = new PCJKeyToJoinBindingSetIterator(scanner, getTableVarMap(), maxPrefixLen); return new BindingSetHashJoinIterator(bindingSetHashMap, iterator, unAssuredVariables, joinType); } } catch (final Exception e) { throw new QueryEvaluationException(e); } }