List of usage examples for com.google.common.collect Multimap entries
Collection<Map.Entry<K, V>> entries();
From source file:org.apache.accumulo.examples.wikisearch.logic.AbstractQueryLogic.java
public Results runQuery(Connector connector, List<String> authorizations, String query, Date beginDate, Date endDate, Set<String> types) { if (StringUtils.isEmpty(query)) { throw new IllegalArgumentException( "NULL QueryNode reference passed to " + this.getClass().getSimpleName()); }//w ww.j ava 2 s . com Set<Range> ranges = new HashSet<Range>(); Set<String> typeFilter = types; String array[] = authorizations.toArray(new String[0]); Authorizations auths = new Authorizations(array); Results results = new Results(); // Get the query string String queryString = query; StopWatch abstractQueryLogic = new StopWatch(); StopWatch optimizedQuery = new StopWatch(); StopWatch queryGlobalIndex = new StopWatch(); StopWatch optimizedEventQuery = new StopWatch(); StopWatch fullScanQuery = new StopWatch(); StopWatch processResults = new StopWatch(); abstractQueryLogic.start(); StopWatch parseQuery = new StopWatch(); parseQuery.start(); QueryParser parser; try { if (log.isDebugEnabled()) { log.debug("ShardQueryLogic calling QueryParser.execute"); } parser = new QueryParser(); parser.execute(queryString); } catch (org.apache.commons.jexl2.parser.ParseException e1) { throw new IllegalArgumentException("Error parsing query", e1); } int hash = parser.getHashValue(); parseQuery.stop(); if (log.isDebugEnabled()) { log.debug(hash + " Query: " + queryString); } Set<String> fields = new HashSet<String>(); for (String f : parser.getQueryIdentifiers()) { fields.add(f); } if (log.isDebugEnabled()) { log.debug("getQueryIdentifiers: " + parser.getQueryIdentifiers().toString()); } // Remove any negated fields from the fields list, we don't want to lookup negated fields // in the index. fields.removeAll(parser.getNegatedTermsForOptimizer()); if (log.isDebugEnabled()) { log.debug("getQueryIdentifiers: " + parser.getQueryIdentifiers().toString()); } // Get the mapping of field name to QueryTerm object from the query. The query term object // contains the operator, whether its negated or not, and the literal to test against. Multimap<String, QueryTerm> terms = parser.getQueryTerms(); // Find out which terms are indexed // TODO: Should we cache indexed terms or does that not make sense since we are always // loading data. StopWatch queryMetadata = new StopWatch(); queryMetadata.start(); Map<String, Multimap<String, Class<? extends Normalizer>>> metadataResults; try { metadataResults = findIndexedTerms(connector, auths, fields, typeFilter); } catch (Exception e1) { throw new RuntimeException("Error in metadata lookup", e1); } // Create a map of indexed term to set of normalizers for it Multimap<String, Normalizer> indexedTerms = HashMultimap.create(); for (Entry<String, Multimap<String, Class<? extends Normalizer>>> entry : metadataResults.entrySet()) { // Get the normalizer from the normalizer cache for (Class<? extends Normalizer> clazz : entry.getValue().values()) { indexedTerms.put(entry.getKey(), normalizerCacheMap.get(clazz)); } } queryMetadata.stop(); if (log.isDebugEnabled()) { log.debug(hash + " Indexed Terms: " + indexedTerms.toString()); } Set<String> orTerms = parser.getOrTermsForOptimizer(); // Iterate over the query terms to get the operators specified in the query. ArrayList<String> unevaluatedExpressions = new ArrayList<String>(); boolean unsupportedOperatorSpecified = false; for (Entry<String, QueryTerm> entry : terms.entries()) { if (null == entry.getValue()) { continue; } if (null != this.unevaluatedFields && this.unevaluatedFields.contains(entry.getKey().trim())) { unevaluatedExpressions.add(entry.getKey().trim() + " " + entry.getValue().getOperator() + " " + entry.getValue().getValue()); } int operator = JexlOperatorConstants.getJJTNodeType(entry.getValue().getOperator()); if (!(operator == ParserTreeConstants.JJTEQNODE || operator == ParserTreeConstants.JJTNENODE || operator == ParserTreeConstants.JJTLENODE || operator == ParserTreeConstants.JJTLTNODE || operator == ParserTreeConstants.JJTGENODE || operator == ParserTreeConstants.JJTGTNODE || operator == ParserTreeConstants.JJTERNODE)) { unsupportedOperatorSpecified = true; break; } } if (null != unevaluatedExpressions) unevaluatedExpressions.trimToSize(); if (log.isDebugEnabled()) { log.debug(hash + " unsupportedOperators: " + unsupportedOperatorSpecified + " indexedTerms: " + indexedTerms.toString() + " orTerms: " + orTerms.toString() + " unevaluatedExpressions: " + unevaluatedExpressions.toString()); } // We can use the intersecting iterator over the field index as an optimization under the // following conditions // // 1. No unsupported operators in the query. // 2. No 'or' operators and at least one term indexed // or // 1. No unsupported operators in the query. // 2. and all terms indexed // or // 1. All or'd terms are indexed. NOTE, this will potentially skip some queries and push to a full table scan // // WE should look into finding a better way to handle whether we do an optimized query or not. boolean optimizationSucceeded = false; boolean orsAllIndexed = false; if (orTerms.isEmpty()) { orsAllIndexed = false; } else { orsAllIndexed = indexedTerms.keySet().containsAll(orTerms); } if (log.isDebugEnabled()) { log.debug("All or terms are indexed"); } if (!unsupportedOperatorSpecified && (((null == orTerms || orTerms.isEmpty()) && indexedTerms.size() > 0) || (fields.size() > 0 && indexedTerms.size() == fields.size()) || orsAllIndexed)) { optimizedQuery.start(); // Set up intersecting iterator over field index. // Get information from the global index for the indexed terms. The results object will contain the term // mapped to an object that contains the total count, and partitions where this term is located. // TODO: Should we cache indexed term information or does that not make sense since we are always loading data queryGlobalIndex.start(); IndexRanges termIndexInfo; try { // If fields is null or zero, then it's probably the case that the user entered a value // to search for with no fields. Check for the value in index. if (fields.isEmpty()) { termIndexInfo = this.getTermIndexInformation(connector, auths, queryString, typeFilter); if (null != termIndexInfo && termIndexInfo.getRanges().isEmpty()) { // Then we didn't find anything in the index for this query. This may happen for an indexed term that has wildcards // in unhandled locations. // Break out of here by throwing a named exception and do full scan throw new DoNotPerformOptimizedQueryException(); } // We need to rewrite the query string here so that it's valid. if (termIndexInfo instanceof UnionIndexRanges) { UnionIndexRanges union = (UnionIndexRanges) termIndexInfo; StringBuilder buf = new StringBuilder(); String sep = ""; for (String fieldName : union.getFieldNamesAndValues().keySet()) { buf.append(sep).append(fieldName).append(" == "); if (!(queryString.startsWith("'") && queryString.endsWith("'"))) { buf.append("'").append(queryString).append("'"); } else { buf.append(queryString); } sep = " or "; } if (log.isDebugEnabled()) { log.debug("Rewrote query for non-fielded single term query: " + queryString + " to " + buf.toString()); } queryString = buf.toString(); } else { throw new RuntimeException("Unexpected IndexRanges implementation"); } } else { RangeCalculator calc = this.getTermIndexInformation(connector, auths, indexedTerms, terms, this.getIndexTableName(), this.getReverseIndexTableName(), queryString, this.queryThreads, typeFilter); if (null == calc.getResult() || calc.getResult().isEmpty()) { // Then we didn't find anything in the index for this query. This may happen for an indexed term that has wildcards // in unhandled locations. // Break out of here by throwing a named exception and do full scan throw new DoNotPerformOptimizedQueryException(); } termIndexInfo = new UnionIndexRanges(); termIndexInfo.setIndexValuesToOriginalValues(calc.getIndexValues()); termIndexInfo.setFieldNamesAndValues(calc.getIndexEntries()); termIndexInfo.getTermCardinality().putAll(calc.getTermCardinalities()); for (Range r : calc.getResult()) { // foo is a placeholder and is ignored. termIndexInfo.add("foo", r); } } } catch (TableNotFoundException e) { log.error(this.getIndexTableName() + "not found", e); throw new RuntimeException(this.getIndexTableName() + "not found", e); } catch (org.apache.commons.jexl2.parser.ParseException e) { throw new RuntimeException("Error determining ranges for query: " + queryString, e); } catch (DoNotPerformOptimizedQueryException e) { log.info("Indexed fields not found in index, performing full scan"); termIndexInfo = null; } queryGlobalIndex.stop(); // Determine if we should proceed with optimized query based on results from the global index boolean proceed = false; if (null == termIndexInfo || termIndexInfo.getFieldNamesAndValues().values().size() == 0) { proceed = false; } else if (null != orTerms && orTerms.size() > 0 && (termIndexInfo.getFieldNamesAndValues().values().size() == indexedTerms.size())) { proceed = true; } else if (termIndexInfo.getFieldNamesAndValues().values().size() > 0) { proceed = true; } else if (orsAllIndexed) { proceed = true; } else { proceed = false; } if (log.isDebugEnabled()) { log.debug("Proceed with optimized query: " + proceed); if (null != termIndexInfo) log.debug("termIndexInfo.getTermsFound().size(): " + termIndexInfo.getFieldNamesAndValues().values().size() + " indexedTerms.size: " + indexedTerms.size() + " fields.size: " + fields.size()); } if (proceed) { if (log.isDebugEnabled()) { log.debug(hash + " Performing optimized query"); } // Use the scan ranges from the GlobalIndexRanges object as the ranges for the batch scanner ranges = termIndexInfo.getRanges(); if (log.isDebugEnabled()) { log.info(hash + " Ranges: count: " + ranges.size() + ", " + ranges.toString()); } // Create BatchScanner, set the ranges, and setup the iterators. optimizedEventQuery.start(); BatchScanner bs = null; try { bs = connector.createBatchScanner(this.getTableName(), auths, queryThreads); bs.setRanges(ranges); IteratorSetting si = new IteratorSetting(21, "eval", OptimizedQueryIterator.class); if (log.isDebugEnabled()) { log.debug("Setting scan option: " + EvaluatingIterator.QUERY_OPTION + " to " + queryString); } // Set the query option si.addOption(EvaluatingIterator.QUERY_OPTION, queryString); // Set the Indexed Terms List option. This is the field name and normalized field value pair separated // by a comma. StringBuilder buf = new StringBuilder(); String sep = ""; for (Entry<String, String> entry : termIndexInfo.getFieldNamesAndValues().entries()) { buf.append(sep); buf.append(entry.getKey()); buf.append(":"); buf.append(termIndexInfo.getIndexValuesToOriginalValues().get(entry.getValue())); buf.append(":"); buf.append(entry.getValue()); if (sep.equals("")) { sep = ";"; } } if (log.isDebugEnabled()) { log.debug("Setting scan option: " + FieldIndexQueryReWriter.INDEXED_TERMS_LIST + " to " + buf.toString()); } FieldIndexQueryReWriter rewriter = new FieldIndexQueryReWriter(); String q = ""; try { q = queryString; q = rewriter.applyCaseSensitivity(q, true, false);// Set upper/lower case for fieldname/fieldvalue Map<String, String> opts = new HashMap<String, String>(); opts.put(FieldIndexQueryReWriter.INDEXED_TERMS_LIST, buf.toString()); q = rewriter.removeNonIndexedTermsAndInvalidRanges(q, opts); q = rewriter.applyNormalizedTerms(q, opts); if (log.isDebugEnabled()) { log.debug("runServerQuery, FieldIndex Query: " + q); } } catch (org.apache.commons.jexl2.parser.ParseException ex) { log.error("Could not parse query, Jexl ParseException: " + ex); } catch (Exception ex) { log.error("Problem rewriting query, Exception: " + ex.getMessage()); } si.addOption(BooleanLogicIterator.FIELD_INDEX_QUERY, q); // Set the term cardinality option sep = ""; buf.delete(0, buf.length()); for (Entry<String, Long> entry : termIndexInfo.getTermCardinality().entrySet()) { buf.append(sep); buf.append(entry.getKey()); buf.append(":"); buf.append(entry.getValue()); sep = ","; } if (log.isDebugEnabled()) log.debug("Setting scan option: " + BooleanLogicIterator.TERM_CARDINALITIES + " to " + buf.toString()); si.addOption(BooleanLogicIterator.TERM_CARDINALITIES, buf.toString()); if (this.useReadAheadIterator) { if (log.isDebugEnabled()) { log.debug("Enabling read ahead iterator with queue size: " + this.readAheadQueueSize + " and timeout: " + this.readAheadTimeOut); } si.addOption(ReadAheadIterator.QUEUE_SIZE, this.readAheadQueueSize); si.addOption(ReadAheadIterator.TIMEOUT, this.readAheadTimeOut); } if (null != unevaluatedExpressions) { StringBuilder unevaluatedExpressionList = new StringBuilder(); String sep2 = ""; for (String exp : unevaluatedExpressions) { unevaluatedExpressionList.append(sep2).append(exp); sep2 = ","; } if (log.isDebugEnabled()) log.debug("Setting scan option: " + EvaluatingIterator.UNEVALUTED_EXPRESSIONS + " to " + unevaluatedExpressionList.toString()); si.addOption(EvaluatingIterator.UNEVALUTED_EXPRESSIONS, unevaluatedExpressionList.toString()); } bs.addScanIterator(si); processResults.start(); processResults.suspend(); long count = 0; for (Entry<Key, Value> entry : bs) { count++; // The key that is returned by the EvaluatingIterator is not the same key that is in // the table. The value that is returned by the EvaluatingIterator is a kryo // serialized EventFields object. processResults.resume(); Document d = this.createDocument(entry.getKey(), entry.getValue()); results.getResults().add(d); processResults.suspend(); } log.info(count + " matching entries found in optimized query."); optimizationSucceeded = true; processResults.stop(); } catch (TableNotFoundException e) { log.error(this.getTableName() + "not found", e); throw new RuntimeException(this.getIndexTableName() + "not found", e); } finally { if (bs != null) { bs.close(); } } optimizedEventQuery.stop(); } optimizedQuery.stop(); } // WE should look into finding a better way to handle whether we do an optimized query or not. // We are not setting up an else condition here because we may have aborted the logic early in the if statement. if (!optimizationSucceeded || ((null != orTerms && orTerms.size() > 0) && (indexedTerms.size() != fields.size()) && !orsAllIndexed)) { // if (!optimizationSucceeded || ((null != orTerms && orTerms.size() > 0) && (indexedTerms.size() != fields.size()))) { fullScanQuery.start(); if (log.isDebugEnabled()) { log.debug(hash + " Performing full scan query"); } // Set up a full scan using the date ranges from the query // Create BatchScanner, set the ranges, and setup the iterators. BatchScanner bs = null; try { // The ranges are the start and end dates Collection<Range> r = getFullScanRange(beginDate, endDate, terms); ranges.addAll(r); if (log.isDebugEnabled()) { log.debug(hash + " Ranges: count: " + ranges.size() + ", " + ranges.toString()); } bs = connector.createBatchScanner(this.getTableName(), auths, queryThreads); bs.setRanges(ranges); IteratorSetting si = new IteratorSetting(22, "eval", EvaluatingIterator.class); // Create datatype regex if needed if (null != typeFilter) { StringBuilder buf = new StringBuilder(); String s = ""; for (String type : typeFilter) { buf.append(s).append(type).append(".*"); s = "|"; } if (log.isDebugEnabled()) log.debug("Setting colf regex iterator to: " + buf.toString()); IteratorSetting ri = new IteratorSetting(21, "typeFilter", RegExFilter.class); RegExFilter.setRegexs(ri, null, buf.toString(), null, null, false); bs.addScanIterator(ri); } if (log.isDebugEnabled()) { log.debug("Setting scan option: " + EvaluatingIterator.QUERY_OPTION + " to " + queryString); } si.addOption(EvaluatingIterator.QUERY_OPTION, queryString); if (null != unevaluatedExpressions) { StringBuilder unevaluatedExpressionList = new StringBuilder(); String sep2 = ""; for (String exp : unevaluatedExpressions) { unevaluatedExpressionList.append(sep2).append(exp); sep2 = ","; } if (log.isDebugEnabled()) log.debug("Setting scan option: " + EvaluatingIterator.UNEVALUTED_EXPRESSIONS + " to " + unevaluatedExpressionList.toString()); si.addOption(EvaluatingIterator.UNEVALUTED_EXPRESSIONS, unevaluatedExpressionList.toString()); } bs.addScanIterator(si); long count = 0; processResults.start(); processResults.suspend(); for (Entry<Key, Value> entry : bs) { count++; // The key that is returned by the EvaluatingIterator is not the same key that is in // the partition table. The value that is returned by the EvaluatingIterator is a kryo // serialized EventFields object. processResults.resume(); Document d = this.createDocument(entry.getKey(), entry.getValue()); results.getResults().add(d); processResults.suspend(); } processResults.stop(); log.info(count + " matching entries found in full scan query."); } catch (TableNotFoundException e) { log.error(this.getTableName() + "not found", e); } finally { if (bs != null) { bs.close(); } } fullScanQuery.stop(); } log.info("AbstractQueryLogic: " + queryString + " " + timeString(abstractQueryLogic.getTime())); log.info(" 1) parse query " + timeString(parseQuery.getTime())); log.info(" 2) query metadata " + timeString(queryMetadata.getTime())); log.info(" 3) full scan query " + timeString(fullScanQuery.getTime())); log.info(" 3) optimized query " + timeString(optimizedQuery.getTime())); log.info(" 1) process results " + timeString(processResults.getTime())); log.info(" 1) query global index " + timeString(queryGlobalIndex.getTime())); log.info(hash + " Query completed."); return results; }
From source file:org.apache.pulsar.broker.loadbalance.impl.SimpleLoadManagerImpl.java
/** * Assign owner for specified ServiceUnit from the given candidates, following the the principles: 1) Optimum * distribution: fill up one broker till its load reaches optimum level (defined by underload threshold) before pull * another idle broker in; 2) Even distribution: once all brokers' load are above optimum level, maintain all * brokers to have even load; 3) Set the underload threshold to small value (like 1) for pure even distribution, and * high value (like 80) for pure optimum distribution; * * Strategy to select broker: 1) The first choice is the least loaded broker which is underload but not idle; 2) The * second choice is idle broker (if there is any); 3) Othewise simply select the least loaded broker if it is NOT * overloaded; 4) If all brokers are overloaded, select the broker with maximum available capacity (considering * brokers could have different hardware configuration, this usually means to select the broker with more hardware * resource);//from w w w . j a v a 2 s .co m * * Broker's load level: 1) Load ranking (triggered by LoadReport update) estimate the load level according to the * resourse usage and namespace bundles already loaded by each broker; 2) When leader broker decide the owner for a * new namespace bundle, it may take time for the real owner to actually load the bundle and refresh LoadReport, * leader broker will store the bundle in a list called preAllocatedBundles, and the quota of all * preAllocatedBundles in preAllocatedQuotas, and re-estimate the broker's load level by putting the * preAllocatedQuota into calculation; 3) Everything (preAllocatedBundles and preAllocatedQuotas) will get reset in * load ranking. */ private synchronized ResourceUnit findBrokerForPlacement(Multimap<Long, ResourceUnit> candidates, ServiceUnitId serviceUnit) { long underloadThreshold = this.getLoadBalancerBrokerUnderloadedThresholdPercentage(); long overloadThreshold = this.getLoadBalancerBrokerOverloadedThresholdPercentage(); ResourceQuota defaultQuota = pulsar.getLocalZkCacheService().getResourceQuotaCache().getDefaultQuota(); double minLoadPercentage = 101.0; long maxAvailability = -1; ResourceUnit idleRU = null; ResourceUnit maxAvailableRU = null; ResourceUnit randomRU = null; ResourceUnit selectedRU = null; ResourceUnitRanking selectedRanking = null; String serviceUnitId = serviceUnit.toString(); // If the ranking is expected to be in the range [0,100] (which is the case for LOADBALANCER_STRATEGY_LLS), // the ranks are bounded. Otherwise (as is the case in LOADBALANCER_STRATEGY_LEAST_MSG, the ranks are simply // the total message rate which is in the range [0,Infinity) so they are unbounded. The // "boundedness" affects how two ranks are compared to see which one is better boolean unboundedRanks = getLoadBalancerPlacementStrategy().equals(LOADBALANCER_STRATEGY_LEAST_MSG); long randomBrokerIndex = (candidates.size() > 0) ? (this.brokerRotationCursor % candidates.size()) : 0; // find the least loaded & not-idle broker for (Map.Entry<Long, ResourceUnit> candidateOwner : candidates.entries()) { ResourceUnit candidate = candidateOwner.getValue(); randomBrokerIndex--; // skip broker which is not ranked. this should never happen except in unit test if (!resourceUnitRankings.containsKey(candidate)) { continue; } String resourceUnitId = candidate.getResourceId(); ResourceUnitRanking ranking = resourceUnitRankings.get(candidate); // check if this ServiceUnit is already loaded if (ranking.isServiceUnitLoaded(serviceUnitId)) { ranking.removeLoadedServiceUnit(serviceUnitId, this.getResourceQuota(serviceUnitId)); } // record a random broker if (randomBrokerIndex < 0 && randomRU == null) { randomRU = candidate; } // check the available capacity double loadPercentage = ranking.getEstimatedLoadPercentage(); double availablePercentage = Math.max(0, (100 - loadPercentage) / 100); long availability = (long) (ranking.estimateMaxCapacity(defaultQuota) * availablePercentage); if (availability > maxAvailability) { maxAvailability = availability; maxAvailableRU = candidate; } // check the load percentage if (ranking.isIdle()) { if (idleRU == null) { idleRU = candidate; } } else { if (selectedRU == null) { selectedRU = candidate; selectedRanking = ranking; minLoadPercentage = loadPercentage; } else { if ((unboundedRanks ? ranking.compareMessageRateTo(selectedRanking) : ranking.compareTo(selectedRanking)) < 0) { minLoadPercentage = loadPercentage; selectedRU = candidate; selectedRanking = ranking; } } } } if ((minLoadPercentage > underloadThreshold && idleRU != null) || selectedRU == null) { // assigned to idle broker is the least loaded broker already have optimum load (which means NOT // underloaded), or all brokers are idle selectedRU = idleRU; } else if (minLoadPercentage >= 100.0 && randomRU != null && !unboundedRanks) { // all brokers are full, assign to a random one selectedRU = randomRU; } else if (minLoadPercentage > overloadThreshold && !unboundedRanks) { // assign to the broker with maximum available capacity if all brokers are overloaded selectedRU = maxAvailableRU; } // re-calculate load level for selected broker if (selectedRU != null) { this.brokerRotationCursor = (this.brokerRotationCursor + 1) % 1000000; ResourceUnitRanking ranking = resourceUnitRankings.get(selectedRU); String loadPercentageDesc = ranking.getEstimatedLoadPercentageString(); log.info("Assign {} to {} with ({}).", serviceUnitId, selectedRU.getResourceId(), loadPercentageDesc); if (!ranking.isServiceUnitPreAllocated(serviceUnitId)) { final String namespaceName = LoadManagerShared.getNamespaceNameFromBundleName(serviceUnitId); final String bundleRange = LoadManagerShared.getBundleRangeFromBundleName(serviceUnitId); ResourceQuota quota = this.getResourceQuota(serviceUnitId); // Add preallocated bundle range so incoming bundles from the same namespace are not assigned to the // same broker. brokerToNamespaceToBundleRange .computeIfAbsent(selectedRU.getResourceId().replace("http://", ""), k -> new HashMap<>()) .computeIfAbsent(namespaceName, k -> new HashSet<>()).add(bundleRange); ranking.addPreAllocatedServiceUnit(serviceUnitId, quota); resourceUnitRankings.put(selectedRU, ranking); } } return selectedRU; }
From source file:gr.forth.ics.swkm.model2.importer.AbstractStore.java
private void storeImpl(Model model) throws SQLException { final Multimap<Resource, Triple> triples = HashMultimap.create(); TripleListener listener = new TripleListener() { public void onTripleAddition(Resource namedGraph, Triple triple) { }/* ww w. j a v a 2 s. co m*/ public void onTripleDeletion(Resource namedGraph, Triple triple) { //System.out.println(triple); triples.put(namedGraph, triple); } }; model.addTripleListener(listener); try { Inference.reduce(model); try { DbSynchronizer synchronizer = DbSynchronizer.forModel(model); KnownDbState dbState = synchronizer.synchronize(RdfSuite.IMPORTER_SIDE_EFFECTS); System.out.println("MODEL INSIDE STORE: " + model); Validator.defaultValidator().validateAndFailOnFirstError(model); UpdatedLabels labels = dbState.recalculateLabels(); storeModel(model, labels); } catch (Throwable t) { throw new RuntimeException(t); } finally { for (Triple t : model.triples().g(RdfSuite.IMPORTER_SIDE_EFFECTS).fetch()) { t.delete(); } } } finally { model.removeTripleListener(listener); for (Entry<Resource, Triple> entry : triples.entries()) { Triple t = entry.getValue(); model.add(entry.getKey(), t.subject(), t.predicate(), t.object()); } } }
From source file:com.dpbymqn.fsm.manager.FsmManager.java
private String transitAtomic(StatefulObject st, String toState) { String currentState = getState(st); String nextState = toState;/*from ww w. ja v a 2s .c om*/ // do pre-callbacks Collection<TransitionCallback> preCallbacks = new ArrayList<TransitionCallback>(); synchronized (classPreTrCallbackMap) { for (Class<?> clz : lazySuper.get(st.getClass())) { final Map<String, Multimap<String, TransitionCallback>> clzPreStateFromMap = classPreTrCallbackMap .get(clz); if (clzPreStateFromMap != null) { if (clzPreStateFromMap.get(null) != null || clzPreStateFromMap.get(currentState) != null) { Multimap<String, TransitionCallback> clzStateToMap = LinkedHashMultimap.create(); Multimap<String, TransitionCallback> clzStateToMap1 = clzPreStateFromMap.get(currentState); Multimap<String, TransitionCallback> clzStateToMap0 = clzPreStateFromMap.get(null); if (clzStateToMap1 != null) { clzStateToMap.putAll(clzStateToMap1); } if (clzStateToMap0 != null) { clzStateToMap.putAll(clzStateToMap0); } if (clzStateToMap.containsKey(nextState) && clzStateToMap.get(nextState) != null) { preCallbacks.addAll(clzStateToMap.get(nextState)); } if (clzStateToMap.get(null) != null) { preCallbacks.addAll(clzStateToMap.get(null)); } } } } } synchronized (instancePreTrCallbackMap) { if (instancePreTrCallbackMap.containsKey(st)) { for (final Map<String, Multimap<String, TransitionCallback>> instPreStateFromMap : instancePreTrCallbackMap .get(st).values()) { if (instPreStateFromMap != null) { if (instPreStateFromMap.get(null) != null || instPreStateFromMap.get(currentState) != null) { Multimap<String, TransitionCallback> instStateToMap = LinkedHashMultimap.create(); Multimap<String, TransitionCallback> instStateToMap1 = instPreStateFromMap .get(currentState); Multimap<String, TransitionCallback> instStateToMap0 = instPreStateFromMap.get(null); if (instStateToMap1 != null) { instStateToMap.putAll(instStateToMap1); } if (instStateToMap0 != null) { instStateToMap.putAll(instStateToMap0); } if (instStateToMap.containsKey(nextState) && instStateToMap.get(nextState) != null) { preCallbacks.addAll(instStateToMap.get(nextState)); } if (instStateToMap.get(null) != null) { preCallbacks.addAll(instStateToMap.get(null)); } } } } } } // - call pre callbacks // this "if" would prevent calling Pre methods when the transition is triggered by the newly registered listener(s) // if (currentState == null || !currentState.equals(nextState)) { for (TransitionCallback trCallback : preCallbacks) { trCallback.onTransition(st, currentState, nextState); } // } // move to next state setState(st, nextState); // post-callbacks // - collect post callbacks Collection<TransitionCallback> postCallbacks = new ArrayList<TransitionCallback>(); synchronized (classPostTrCallbackMap) { for (Class<?> clz : lazySuper.get(st.getClass())) { final Map<String, Multimap<String, TransitionCallback>> clzPostStateFromMap = classPostTrCallbackMap .get(clz); if (clzPostStateFromMap != null) { if (clzPostStateFromMap.get(null) != null || clzPostStateFromMap.get(currentState) != null) { Multimap<String, TransitionCallback> clzStateToMap = LinkedHashMultimap.create(); Multimap<String, TransitionCallback> clzStateToMap1 = clzPostStateFromMap.get(currentState); Multimap<String, TransitionCallback> clzStateToMap0 = clzPostStateFromMap.get(null); if (clzStateToMap1 != null) { clzStateToMap.putAll(clzStateToMap1); } if (clzStateToMap0 != null) { clzStateToMap.putAll(clzStateToMap0); } if (clzStateToMap.containsKey(nextState) && clzStateToMap.get(nextState) != null) { postCallbacks.addAll(clzStateToMap.get(nextState)); } if (clzStateToMap.get(null) != null) { postCallbacks.addAll(clzStateToMap.get(null)); } } } } } synchronized (instancePostTrCallbackMap) { if (instancePostTrCallbackMap.containsKey(st)) { for (final Map<String, Multimap<String, TransitionCallback>> instPostStateFromMap : instancePostTrCallbackMap .get(st).values()) { if (instPostStateFromMap != null) { if (instPostStateFromMap.get(null) != null || instPostStateFromMap.get(currentState) != null) { Multimap<String, TransitionCallback> instStateToMap = LinkedHashMultimap.create(); Multimap<String, TransitionCallback> instStateToMap1 = instPostStateFromMap .get(currentState); Multimap<String, TransitionCallback> instStateToMap0 = instPostStateFromMap.get(null); if (instStateToMap1 != null) { instStateToMap.putAll(instStateToMap1); } if (instStateToMap0 != null) { instStateToMap.putAll(instStateToMap0); } if (instStateToMap.containsKey(nextState) && instStateToMap.get(nextState) != null) { postCallbacks.addAll(instStateToMap.get(nextState)); } if (instStateToMap.get(null) != null) { postCallbacks.addAll(instStateToMap.get(null)); } } } } } } // - call post callbacks for (TransitionCallback trCallback : postCallbacks) { trCallback.onTransition(st, currentState, nextState); } // check decisions Set<String> decisions = new HashSet<String>(); // collect decision callbacks Multimap<String, DecisionCallback> possibleStates = HashMultimap.create(); synchronized (classDecCallbackMap) { for (Class<?> clz : lazySuper.get(st.getClass())) { final Map<String, Multimap<String, DecisionCallback>> clzDecStateFromMap = classDecCallbackMap .get(clz); if (clzDecStateFromMap != null) { if (clzDecStateFromMap.get(null) != null || clzDecStateFromMap.get(nextState) != null) { Multimap<String, DecisionCallback> clzStateToMap1 = clzDecStateFromMap.get(nextState); Multimap<String, DecisionCallback> clzStateToMap0 = clzDecStateFromMap.get(null); if (clzStateToMap1 != null) { possibleStates.putAll(clzStateToMap1); } if (clzStateToMap0 != null) { possibleStates.putAll(clzStateToMap0); } } } } } synchronized (instanceDecCallbackMap) { if (instanceDecCallbackMap.containsKey(st)) { for (final Map<String, Multimap<String, DecisionCallback>> instDecStateFromMap : instanceDecCallbackMap .get(st).values()) { if (instDecStateFromMap != null) { if (instDecStateFromMap.get(null) != null || instDecStateFromMap.get(nextState) != null) { Multimap<String, DecisionCallback> instStateToMap1 = instDecStateFromMap.get(nextState); Multimap<String, DecisionCallback> instStateToMap0 = instDecStateFromMap.get(null); if (instStateToMap1 != null) { possibleStates.putAll(instStateToMap1); } if (instStateToMap0 != null) { possibleStates.putAll(instStateToMap0); } } } } } } // - call post callbacks for (Map.Entry<String, DecisionCallback> e : possibleStates.entries()) { DecisionCallback decCallback = e.getValue(); String intoState = e.getKey(); String suggest = decCallback.query(st, nextState); if (suggest != null) { decisions.add(suggest); } else { if (intoState != null && !intoState.equals(nextState)) { final Boolean query = decCallback.query(st, nextState, intoState); if (query != null && query) { decisions.add(intoState); } } } } if (decisions.size() == 1) { return decisions.iterator().next(); } return null; }
From source file:org.openmicroscopy.shoola.agents.measurement.view.MeasurementViewerComponent.java
/** * Implemented as specified by the {@link MeasurementViewer} interface. * @see MeasurementViewer#tagSelectedFigures() *//* w ww. j a va 2 s .com*/ public void tagSelectedFigures(List<AnnotationData> tags) { Collection<Figure> figures = view.getSelectedFiguresFromTables(); if (CollectionUtils.isEmpty(figures)) { return; } List<ROIShape> shapes = new ArrayList<ROIShape>(); Iterator<Figure> kk = figures.iterator(); ROIFigure fig; while (kk.hasNext()) { fig = (ROIFigure) kk.next(); shapes.add(fig.getROIShape()); } if (CollectionUtils.isEmpty(shapes)) return; Multimap<Long, AnnotationData> m = ArrayListMultimap.create(); Iterator<AnnotationData> j = tags.iterator(); AnnotationData an; while (j.hasNext()) { an = j.next(); m.put(an.getId(), an); } Iterator<ROIShape> i = shapes.iterator(); ROIShape shape; StructuredDataResults data; List<DataObject> objects = new ArrayList<DataObject>(); ShapeData d; Map<Long, AnnotationData> mo = new HashMap<Long, AnnotationData>(); while (i.hasNext()) { shape = i.next(); d = shape.getData(); if (d != null && d.getId() > 0) { objects.add(d); data = (StructuredDataResults) shape.getFigure().getAttribute(AnnotationKeys.TAG); if (data != null && CollectionUtils.isNotEmpty(data.getTags())) { Collection<TagAnnotationData> t = data.getTags(); Iterator<TagAnnotationData> tt = t.iterator(); while (tt.hasNext()) { TagAnnotationData tag = tt.next(); if (!mo.containsKey(tag.getId())) { mo.put(tag.getId(), tag); } } } } } if (objects.isEmpty()) { UserNotifier un = MeasurementAgent.getRegistry().getUserNotifier(); un.notifyInfo("ROI Annotations", "You must save the ROI before annotating it."); return; } //Now we prepare the list of annotations to add or remove List<AnnotationData> toAdd = new ArrayList<AnnotationData>(); List<Object> toRemove = new ArrayList<Object>(); if (CollectionUtils.isNotEmpty(m.get(-1L))) { toAdd.addAll(m.removeAll(-1L)); } Iterator<Entry<Long, AnnotationData>> k = m.entries().iterator(); Entry<Long, AnnotationData> e; while (k.hasNext()) { e = k.next(); Long id = e.getKey(); if (!mo.containsKey(id)) { toAdd.add(e.getValue()); } } k = mo.entrySet().iterator(); while (k.hasNext()) { e = k.next(); Long id = e.getKey(); if (!m.containsKey(id)) { toRemove.add(e.getValue()); } } model.fireAnnotationSaving(objects, toAdd, toRemove); }
From source file:com.b2international.snowowl.snomed.datastore.request.SnomedAssociationTargetUpdateRequest.java
private void updateAssociationTargets(final TransactionContext context, final Inactivatable component) { final List<SnomedAssociationRefSetMember> existingMembers = Lists .newArrayList(component.getAssociationRefSetMembers()); final Multimap<AssociationType, String> newAssociationTargetsToCreate = HashMultimap .create(newAssociationTargets); final Iterator<SnomedAssociationRefSetMember> memberIterator = existingMembers.iterator(); while (memberIterator.hasNext()) { final SnomedAssociationRefSetMember existingMember = memberIterator.next(); final AssociationType associationType = AssociationType .getByConceptId(existingMember.getRefSetIdentifierId()); if (null == associationType) { continue; }//w w w. j a va 2 s . c om final String existingTargetId = existingMember.getTargetComponentId(); if (newAssociationTargetsToCreate.remove(associationType, existingTargetId)) { // Exact match, just make sure that the member is active and remove it from the working list if (ensureMemberActive(context, existingMember)) { updateEffectiveTime(context, getLatestReleaseBranch(context), existingMember); } memberIterator.remove(); } } for (final SnomedAssociationRefSetMember existingMember : existingMembers) { final AssociationType associationType = AssociationType .getByConceptId(existingMember.getRefSetIdentifierId()); if (null == associationType) { continue; } if (newAssociationTargetsToCreate.containsKey(associationType)) { // We can re-use the member by changing the target component identifier, and checking that it is active final Iterator<String> targetIterator = newAssociationTargetsToCreate.get(associationType) .iterator(); final String newTargetId = targetIterator.next(); targetIterator.remove(); if (LOG.isDebugEnabled()) { LOG.debug( "Changing association member {} with type {} and target component identifier from {} to {}.", existingMember.getUuid(), associationType, existingMember.getTargetComponentId(), newTargetId); } ensureMemberActive(context, existingMember); existingMember.setTargetComponentId(newTargetId); updateEffectiveTime(context, getLatestReleaseBranch(context), existingMember); // Always check; we know that targetComponentId has changed } else { // We have no use for this member -- remove or inactivate if already released removeOrDeactivate(context, existingMember); } } // With all existing members processed, any remaining entries in the multimap will need to be added as members for (final Entry<AssociationType, String> newAssociationEntry : newAssociationTargetsToCreate.entries()) { final SnomedAssociationRefSetMember member = SnomedComponents.newAssociationMember() .withRefSet(newAssociationEntry.getKey().getConceptId()) .withTargetComponentId(newAssociationEntry.getValue()) .withReferencedComponent(((Component) component).getId()) .withModule(((Component) component).getModule().getId()).addTo(context); component.getAssociationRefSetMembers().add(member); } }
From source file:no.ssb.jsonstat.v2.deser.DatasetDeserializer.java
@Override public DatasetBuildable deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { if (p.getCurrentToken() == JsonToken.START_OBJECT) { p.nextToken();// ww w . j a va 2 s . com } Set<String> ids = Collections.emptySet(); List<Integer> sizes = Collections.emptyList(); Multimap<String, String> roles = ArrayListMultimap.create(); Map<String, Dimension.Builder> dims = Collections.emptyMap(); List<Number> values = Collections.emptyList(); DatasetBuilder builder = Dataset.create(); Optional<String> version = Optional.empty(); Optional<String> clazz = Optional.empty(); Optional<ObjectNode> extension = Optional.empty(); while (p.nextValue() != JsonToken.END_OBJECT) { switch (p.getCurrentName()) { case "label": builder.withLabel(_parseString(p, ctxt)); break; case "source": builder.withSource(_parseString(p, ctxt)); break; case "href": break; case "updated": Instant updated = parseEcmaDate(_parseString(p, ctxt)); builder.updatedAt(updated); break; case "value": values = parseValues(p, ctxt); break; case "dimension": if (!version.orElse("1.x").equals("2.0")) { dims = Maps.newHashMap(); // Deal with the id, size and role inside dimension. while (p.nextValue() != JsonToken.END_OBJECT) { switch (p.getCurrentName()) { case "id": ids = p.readValueAs(ID_SET); break; case "size": sizes = p.readValueAs(SIZE_LIST); break; case "role": roles = p.readValueAs(ROLE_MULTIMAP); break; default: dims.put(p.getCurrentName(), ctxt.readValue(p, Dimension.Builder.class)); } } } else { dims = p.readValueAs(DIMENSION_MAP); } break; case "id": ids = p.readValueAs(ID_SET); break; case "size": sizes = p.readValueAs(SIZE_LIST); break; case "role": roles = p.readValueAs(ROLE_MULTIMAP); break; case "extension": extension = Optional.of(ctxt.readValue(p, ObjectNode.class)); break; case "link": case "status": // TODO p.skipChildren(); break; case "version": version = Optional.of(_parseString(p, ctxt)); break; case "class": // TODO clazz = Optional.of(_parseString(p, ctxt)); break; default: boolean handled = ctxt.handleUnknownProperty(p, this, Dimension.Builder.class, p.getCurrentName()); if (!handled) p.skipChildren(); break; } } // Setup roles for (Map.Entry<String, String> dimRole : roles.entries()) { Dimension.Roles role = Dimension.Roles.valueOf(dimRole.getKey().toUpperCase()); Dimension.Builder dimension = checkNotNull(dims.get(dimRole.getValue()), "could not assign the role {} to the dimension {}. The dimension did not exist", role, dimRole.getValue() ); dimension.withRole(role); } List<Dimension.Builder> orderedDimensions = Lists.newArrayList(); for (String dimensionName : ids) { orderedDimensions.add(dims.get(dimensionName)); } // TODO: Check size? // Check ids and add to the data set. checkArgument(ids.size() == dims.size(), "dimension and size did not match"); if (extension.isPresent()) { builder.withExtension(extension.get()); } return builder.withDimensions(orderedDimensions).withValues(values); }
From source file:com.b2international.snowowl.snomed.datastore.SnomedEditingContext.java
/** * This functions deletes the objects in the deletionplan from the database. * For the sake of acceptable execution speed, the <code>remove(int index)</code> function is used * instead of the <code>remove(object)</code>. This way, the CDO will not iterate through the large * number of data in the resources. /*w ww. j a va 2s . c o m*/ */ private void delete() { // organize elements regarding their index final Multimap<Integer, EObject> itemMap = ArrayListMultimap.create(); for (CDOObject item : deletionPlan.getDeletedItems()) { // Set bogus value here instead of letting it pass when trying to use it (it would silently remove the first member of a list) int index = -1; if (item instanceof Concept) { // from unordered concept wrapper index = getIndexFromDatabase(item, (Concepts) item.eContainer(), "SNOMED_CONCEPTS_CONCEPTS_LIST"); } else if (item instanceof SnomedRefSet) { // from cdoRootResource index = getIndexFromDatabase(item, item.cdoResource(), "ERESOURCE_CDORESOURCE_CONTENTS_LIST"); } else if (item instanceof AttributeConstraint) { // from mrcm concept model index = getIndexFromDatabase(item, (ConceptModel) item.eContainer(), "MRCM_CONCEPTMODEL_CONSTRAINTS_LIST"); } else if (item instanceof SnomedRefSetMember) { // from the refset list final SnomedRefSetMember member = (SnomedRefSetMember) item; if (null == member.eContainer()) { //if the reference set member has been detached from its container. continue; } final SnomedRefSet refSet = member.getRefSet(); if (!(refSet instanceof SnomedStructuralRefSet)) { // XXX: also includes the previous null check for refSet if (refSet instanceof SnomedMappingRefSet) { index = getIndexFromDatabase(item, ((SnomedRefSetMember) item).getRefSet(), "SNOMEDREFSET_SNOMEDMAPPINGREFSET_MEMBERS_LIST"); } else if (refSet instanceof SnomedRegularRefSet) { index = getIndexFromDatabase(item, ((SnomedRefSetMember) item).getRefSet(), "SNOMEDREFSET_SNOMEDREGULARREFSET_MEMBERS_LIST"); } else { throw new RuntimeException("Unknown reference set type"); } } } itemMap.put(index, item); } // iterate through the elements in reverse order for (Entry<Integer, EObject> toDelete : Ordering.from(new Comparator<Entry<Integer, EObject>>() { @Override public int compare(Entry<Integer, EObject> o1, Entry<Integer, EObject> o2) { return o1.getKey() - o2.getKey(); } }).reverse().sortedCopy(itemMap.entries())) { final EObject eObject = toDelete.getValue(); final int index = toDelete.getKey(); if (eObject instanceof Concept) { final Concepts concepts = (Concepts) eObject.eContainer(); concepts.getConcepts().remove(index); } else if (eObject instanceof SnomedRefSet) { refSetEditingContext.getContents().remove(index); } else if (eObject instanceof AttributeConstraint) { getConstraints().remove(index); } else if (eObject instanceof SnomedRefSetMember) { // get the refset and remove the member from its list SnomedRefSetMember member = (SnomedRefSetMember) eObject; SnomedRefSet refSet = member.getRefSet(); if (refSet != null) { if (refSet instanceof SnomedStructuralRefSet) { EcoreUtil.remove(member); } else if (refSet instanceof SnomedRegularRefSet) { ((SnomedRegularRefSet) refSet).getMembers().remove(index); } else { throw new IllegalStateException("Don't know how to remove member from reference set class '" + refSet.eClass().getName() + "'."); } } //in case of relationship or description an index lookup is not necessary } else if (eObject instanceof Relationship) { Relationship relationship = (Relationship) eObject; relationship.setSource(null); relationship.setDestination(null); } else if (eObject instanceof Description) { Description description = (Description) eObject; // maybe description was already removed before save, so the delete is reflected on the ui if (description.getConcept() != null) { description.setConcept(null); } } else { throw new IllegalArgumentException("Don't know how to delete " + eObject.eClass()); } } }
From source file:org.jclouds.rest.internal.RestAnnotationProcessor.java
@Override public GeneratedHttpRequest apply(Invocation invocation) { checkNotNull(invocation, "invocation"); inputParamValidator.validateMethodParametersOrThrow(invocation); Optional<URI> endpoint = Optional.absent(); HttpRequest r = findOrNull(invocation.getArgs(), HttpRequest.class); if (r != null) { endpoint = Optional.fromNullable(r.getEndpoint()); if (endpoint.isPresent()) logger.trace("using endpoint %s from invocation.getArgs() for %s", endpoint, invocation); } else if (caller != null) { endpoint = getEndpointFor(caller); if (endpoint.isPresent()) logger.trace("using endpoint %s from caller %s for %s", endpoint, caller, invocation); else//from w w w . ja v a 2s. c o m endpoint = findEndpoint(invocation); } else { endpoint = findEndpoint(invocation); } if (!endpoint.isPresent()) throw new NoSuchElementException(format("no endpoint found for %s", invocation)); GeneratedHttpRequest.Builder requestBuilder = GeneratedHttpRequest.builder().invocation(invocation) .caller(caller); String requestMethod = null; if (r != null) { requestMethod = r.getMethod(); requestBuilder.fromHttpRequest(r); } else { requestMethod = tryFindHttpMethod(invocation.getInvokable()).get(); requestBuilder.method(requestMethod); } requestBuilder.filters(getFiltersIfAnnotated(invocation)); Multimap<String, Object> tokenValues = LinkedHashMultimap.create(); tokenValues.put(Constants.PROPERTY_API_VERSION, apiVersion); tokenValues.put(Constants.PROPERTY_BUILD_VERSION, buildVersion); // URI template in rfc6570 form UriBuilder uriBuilder = uriBuilder(endpoint.get().toString()); overridePathEncoding(uriBuilder, invocation); if (caller != null) tokenValues.putAll(addPathAndGetTokens(caller, uriBuilder)); tokenValues.putAll(addPathAndGetTokens(invocation, uriBuilder)); Multimap<String, Object> formParams; if (caller != null) { formParams = addFormParams(tokenValues, caller); formParams.putAll(addFormParams(tokenValues, invocation)); } else { formParams = addFormParams(tokenValues, invocation); } Multimap<String, Object> queryParams = addQueryParams(tokenValues, invocation); Multimap<String, String> headers = buildHeaders(tokenValues, invocation); if (r != null) headers.putAll(r.getHeaders()); if (shouldAddHostHeader(invocation)) { StringBuilder hostHeader = new StringBuilder(endpoint.get().getHost()); if (endpoint.get().getPort() != -1) hostHeader.append(":").append(endpoint.get().getPort()); headers.put(HOST, hostHeader.toString()); } Payload payload = null; for (HttpRequestOptions options : findOptionsIn(invocation)) { injector.injectMembers(options);// TODO test case for (Entry<String, String> header : options.buildRequestHeaders().entries()) { headers.put(header.getKey(), replaceTokens(header.getValue(), tokenValues)); } for (Entry<String, String> query : options.buildQueryParameters().entries()) { queryParams.put(query.getKey(), replaceTokens(query.getValue(), tokenValues)); } for (Entry<String, String> form : options.buildFormParameters().entries()) { formParams.put(form.getKey(), replaceTokens(form.getValue(), tokenValues)); } String pathSuffix = options.buildPathSuffix(); if (pathSuffix != null) { uriBuilder.appendPath(pathSuffix); } String stringPayload = options.buildStringPayload(); if (stringPayload != null) payload = Payloads.newStringPayload(stringPayload); } if (queryParams.size() > 0) { uriBuilder.query(queryParams); } requestBuilder.headers(filterOutContentHeaders(headers)); requestBuilder.endpoint(uriBuilder.build(convertUnsafe(tokenValues))); if (payload == null) { PayloadEnclosing payloadEnclosing = findOrNull(invocation.getArgs(), PayloadEnclosing.class); payload = (payloadEnclosing != null) ? payloadEnclosing.getPayload() : findOrNull(invocation.getArgs(), Payload.class); } List<? extends Part> parts = getParts(invocation, ImmutableMultimap.<String, Object>builder().putAll(tokenValues).putAll(formParams).build()); if (parts.size() > 0) { if (formParams.size() > 0) { parts = newLinkedList(concat(transform(formParams.entries(), ENTRY_TO_PART), parts)); } payload = new MultipartForm(MultipartForm.BOUNDARY, parts); } else if (formParams.size() > 0) { payload = Payloads .newUrlEncodedFormPayload(transformValues(formParams, NullableToStringFunction.INSTANCE)); } else if (headers.containsKey(CONTENT_TYPE) && !HttpRequest.NON_PAYLOAD_METHODS.contains(requestMethod)) { if (payload == null) payload = Payloads.newByteArrayPayload(new byte[] {}); payload.getContentMetadata().setContentType(get(headers.get(CONTENT_TYPE), 0)); } if (payload != null) { requestBuilder.payload(payload); } GeneratedHttpRequest request = requestBuilder.build(); org.jclouds.rest.MapBinder mapBinder = getMapPayloadBinderOrNull(invocation); if (mapBinder != null) { Map<String, Object> mapParams; if (caller != null) { mapParams = buildPayloadParams(caller); mapParams.putAll(buildPayloadParams(invocation)); } else { mapParams = buildPayloadParams(invocation); } if (invocation.getInvokable().isAnnotationPresent(PayloadParams.class)) { PayloadParams params = invocation.getInvokable().getAnnotation(PayloadParams.class); addMapPayload(mapParams, params, headers); } request = mapBinder.bindToRequest(request, mapParams); } else { request = decorateRequest(request); } if (request.getPayload() != null) { contentMetadataCodec.fromHeaders(request.getPayload().getContentMetadata(), headers); } utils.checkRequestHasRequiredProperties(request); return request; }
From source file:ai.grakn.graql.internal.reasoner.atom.binary.Relation.java
private Set<Pair<RelationPlayer, RelationPlayer>> getRelationPlayerMappings(Relation parentAtom) { Set<Pair<RelationPlayer, RelationPlayer>> rolePlayerMappings = new HashSet<>(); //establish compatible castings for each parent casting Multimap<RelationPlayer, RelationPlayer> compatibleMappings = HashMultimap.create(); parentAtom.getRoleRelationPlayerMap(); Multimap<RoleType, RelationPlayer> childRoleRPMap = getRoleRelationPlayerMap(); Map<Var, Type> parentVarTypeMap = parentAtom.getParentQuery().getVarTypeMap(); Map<Var, Type> childVarTypeMap = this.getParentQuery().getVarTypeMap(); Set<RoleType> relationRoles = new HashSet<>(getType().asRelationType().relates()); Set<RoleType> childRoles = new HashSet<>(childRoleRPMap.keySet()); parentAtom.getRelationPlayers().stream().filter(prp -> prp.getRoleType().isPresent()).forEach(prp -> { VarPatternAdmin parentRoleTypeVar = prp.getRoleType().orElse(null); TypeLabel parentRoleTypeLabel = parentRoleTypeVar.getTypeLabel().orElse(null); //TODO take into account indirect roles RoleType parentRole = parentRoleTypeLabel != null ? graph().getType(parentRoleTypeLabel) : null; if (parentRole != null) { boolean isMetaRole = Schema.MetaSchema.isMetaLabel(parentRole.getLabel()); Var parentRolePlayer = prp.getRolePlayer().getVarName(); Type parentType = parentVarTypeMap.get(parentRolePlayer); Set<RoleType> compatibleChildRoles = isMetaRole ? childRoles : Sets.intersection(new HashSet<>(parentRole.subTypes()), childRoles); if (parentType != null) { boolean isMetaType = Schema.MetaSchema.isMetaLabel(parentType.getLabel()); Set<RoleType> typeRoles = isMetaType ? childRoles : new HashSet<>(parentType.plays()); //incompatible type if (Sets.intersection(relationRoles, typeRoles).isEmpty()) compatibleChildRoles = new HashSet<>(); else { compatibleChildRoles = compatibleChildRoles.stream().filter( rc -> Schema.MetaSchema.isMetaLabel(rc.getLabel()) || typeRoles.contains(rc)) .collect(toSet()); }// ww w . j a v a2s. c o m } compatibleChildRoles.stream().filter(childRoleRPMap::containsKey).forEach(r -> { Collection<RelationPlayer> childRPs = parentType != null ? childRoleRPMap.get(r).stream().filter(rp -> { Var childRolePlayer = rp.getRolePlayer().getVarName(); Type childType = childVarTypeMap.get(childRolePlayer); return childType == null || !checkTypesDisjoint(parentType, childType); }).collect(toSet()) : childRoleRPMap.get(r); childRPs.forEach(rp -> compatibleMappings.put(prp, rp)); }); } }); //self-consistent procedure until no non-empty mappings present while (compatibleMappings.asMap().values().stream().filter(s -> !s.isEmpty()).count() > 0) { Map.Entry<RelationPlayer, RelationPlayer> entry = compatibleMappings.entries().stream() //prioritise mappings with equivalent types and unambiguous mappings .sorted(Comparator.comparing(e -> { Type parentType = parentVarTypeMap.get(e.getKey().getRolePlayer().getVarName()); Type childType = childVarTypeMap.get(e.getValue().getRolePlayer().getVarName()); return !(parentType != null && childType != null && parentType.equals(childType)); })) //prioritise mappings with sam var substitution (idpredicates) .sorted(Comparator.comparing(e -> { IdPredicate parentId = parentAtom.getIdPredicates().stream() .filter(p -> p.getVarName().equals(e.getKey().getRolePlayer().getVarName())) .findFirst().orElse(null); IdPredicate childId = getIdPredicates().stream() .filter(p -> p.getVarName().equals(e.getValue().getRolePlayer().getVarName())) .findFirst().orElse(null); return !(parentId != null && childId != null && parentId.getPredicate().equals(childId.getPredicate())); })).sorted(Comparator.comparing(e -> compatibleMappings.get(e.getKey()).size())).findFirst() .orElse(null); RelationPlayer parentCasting = entry.getKey(); RelationPlayer childCasting = entry.getValue(); rolePlayerMappings.add(new Pair<>(childCasting, parentCasting)); compatibleMappings.removeAll(parentCasting); compatibleMappings.values().remove(childCasting); } return rolePlayerMappings; }