List of usage examples for java.util Set retainAll
boolean retainAll(Collection<?> c);
From source file:org.dllearner.algorithms.qtl.experiments.PRConvergenceExperiment.java
public void run(int maxNrOfProcessedQueries, int maxTreeDepth, int[] exampleInterval, double[] noiseInterval, HeuristicType[] measures) throws Exception { this.maxTreeDepth = maxTreeDepth; queryTreeFactory.setMaxDepth(maxTreeDepth); if (exampleInterval != null) { nrOfExamplesIntervals = exampleInterval; }/*from ww w . j a va 2s. c o m*/ if (noiseInterval != null) { this.noiseIntervals = noiseInterval; } if (measures != null) { this.measures = measures; } boolean noiseEnabled = noiseIntervals.length > 1 || noiseInterval[0] > 0; boolean posOnly = noiseEnabled ? false : true; logger.info("Started QTL evaluation..."); long t1 = System.currentTimeMillis(); List<String> queries = dataset.getSparqlQueries().values().stream().map(q -> q.toString()) .collect(Collectors.toList()); logger.info("#loaded queries: " + queries.size()); // filter for debugging purposes queries = queries.stream().filter(q -> queriesToProcessTokens.stream().noneMatch(t -> !q.contains(t))) .collect(Collectors.toList()); queries = queries.stream().filter(q -> queriesToOmitTokens.stream().noneMatch(t -> q.contains(t))) .collect(Collectors.toList()); if (maxNrOfProcessedQueries == -1) { maxNrOfProcessedQueries = queries.size(); } // queries = filter(queries, (int) Math.ceil((double) maxNrOfProcessedQueries / maxTreeDepth)); // queries = queries.subList(0, Math.min(queries.size(), maxNrOfProcessedQueries)); logger.info("#queries to process: " + queries.size()); // generate examples for each query logger.info("precomputing pos. and neg. examples..."); for (String query : queries) {//if(!(query.contains("Borough_(New_York_City)")))continue; query2Examples.put(query, generateExamples(query, posOnly, noiseEnabled)); } logger.info("precomputing pos. and neg. examples finished."); // check for queries that do not return any result (should not happen, but we never know) Set<String> emptyQueries = query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.isEmpty()).map(e -> e.getKey()) .collect(Collectors.toSet()); logger.info("got {} empty queries.", emptyQueries.size()); queries.removeAll(emptyQueries); // min. pos examples int min = 3; Set<String> lowNrOfExamplesQueries = query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.size() < min).map(e -> e.getKey()) .collect(Collectors.toSet()); logger.info("got {} queries with < {} pos. examples.", emptyQueries.size(), min); queries.removeAll(lowNrOfExamplesQueries); queries = queries.subList(0, Math.min(80, queries.size())); final int totalNrOfQTLRuns = heuristics.length * this.measures.length * nrOfExamplesIntervals.length * noiseIntervals.length * queries.size(); logger.info("#QTL runs: " + totalNrOfQTLRuns); final AtomicInteger currentNrOfFinishedRuns = new AtomicInteger(0); // loop over heuristics for (final QueryTreeHeuristic heuristic : heuristics) { final String heuristicName = heuristic.getClass().getAnnotation(ComponentAnn.class).shortName(); // loop over heuristics measures for (HeuristicType measure : this.measures) { final String measureName = measure.toString(); heuristic.setHeuristicType(measure); double[][] data = new double[nrOfExamplesIntervals.length][noiseIntervals.length]; // loop over number of positive examples for (int i = 0; i < nrOfExamplesIntervals.length; i++) { final int nrOfExamples = nrOfExamplesIntervals[i]; // loop over noise value for (int j = 0; j < noiseIntervals.length; j++) { final double noise = noiseIntervals[j]; // check if not already processed File logFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".log"); File statsFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".stats"); if (!override && logFile.exists() && statsFile.exists()) { logger.info( "Eval config already processed. For re-running please remove corresponding output files."); continue; } FileAppender appender = null; try { appender = new FileAppender(new SimpleLayout(), logFile.getPath(), false); Logger.getRootLogger().addAppender(appender); } catch (IOException e) { e.printStackTrace(); } logger.info("#examples: " + nrOfExamples + " noise: " + noise); final DescriptiveStatistics nrOfReturnedSolutionsStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselinePrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselinePredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionPrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionPredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionRuntimeStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPositionStats = new SynchronizedDescriptiveStatistics(); MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).reset(); MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).reset(); ExecutorService tp = Executors.newFixedThreadPool(nrOfThreads); // indicates if the execution for some of the queries failed final AtomicBoolean failed = new AtomicBoolean(false); Set<String> queriesToProcess = new TreeSet<>(queries); queriesToProcess.retainAll(query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.size() >= nrOfExamples) .map(e -> e.getKey()).collect(Collectors.toSet())); // loop over SPARQL queries for (final String sparqlQuery : queriesToProcess) { CBDStructureTree cbdStructure = cbdStructureTree != null ? cbdStructureTree : QueryUtils.getOptimalCBDStructure(QueryFactory.create(sparqlQuery)); tp.submit(() -> { logger.info("CBD tree:" + cbdStructure.toStringVerbose()); // update max tree depth this.maxTreeDepth = QueryTreeUtils.getDepth(cbdStructure); logger.info("##############################################################"); logger.info("Processing query\n" + sparqlQuery); // we repeat it n times with different permutations of examples int nrOfPermutations = 1; if (nrOfExamples >= query2Examples.get(sparqlQuery).correctPosExampleCandidates .size()) { nrOfPermutations = 1; } for (int perm = 1; perm <= nrOfPermutations; perm++) { logger.info("Run {}/{}", perm, nrOfPermutations); try { ExamplesWrapper examples = getExamples(sparqlQuery, nrOfExamples, nrOfExamples, noise, cbdStructure); logger.info("pos. examples:\n" + Joiner.on("\n").join(examples.correctPosExamples)); logger.info("neg. examples:\n" + Joiner.on("\n").join(examples.correctNegExamples)); // write examples to disk File dir = new File(benchmarkDirectory, "data/" + hash(sparqlQuery)); dir.mkdirs(); Files.write(Joiner.on("\n").join(examples.correctPosExamples), new File(dir, "examples" + perm + "_" + nrOfExamples + "_" + noise + ".tp"), Charsets.UTF_8); Files.write(Joiner.on("\n").join(examples.correctNegExamples), new File(dir, "examples" + perm + "_" + nrOfExamples + "_" + noise + ".tn"), Charsets.UTF_8); Files.write(Joiner.on("\n").join(examples.falsePosExamples), new File(dir, "examples" + perm + "_" + nrOfExamples + "_" + noise + ".fp"), Charsets.UTF_8); // compute baseline RDFResourceTree baselineSolution = applyBaseLine(examples, Baseline.MOST_INFORMATIVE_EDGE_IN_EXAMPLES); logger.info("Evaluating baseline..."); Score baselineScore = computeScore(sparqlQuery, baselineSolution, noise); logger.info("Baseline score:\n" + baselineScore); String baseLineQuery = QueryTreeUtils.toSPARQLQueryString(baselineSolution, dataset.getBaseIRI(), dataset.getPrefixMapping()); baselinePrecisionStats.addValue(baselineScore.precision); baselineRecallStats.addValue(baselineScore.recall); baselineFMeasureStats.addValue(baselineScore.fmeasure); baselinePredAccStats.addValue(baselineScore.predAcc); baselineMathCorrStats.addValue(baselineScore.mathCorr); // run QTL PosNegLPStandard lp = new PosNegLPStandard(); lp.setPositiveExamples(examples.posExamplesMapping.keySet()); lp.setNegativeExamples(examples.negExamplesMapping.keySet()); // QTL2Disjunctive la = new QTL2Disjunctive(lp, qef); QTL2DisjunctiveMultiThreaded la = new QTL2DisjunctiveMultiThreaded(lp, qef); la.setRenderer(new org.dllearner.utilities.owl.DLSyntaxObjectRenderer()); la.setReasoner(dataset.getReasoner()); la.setEntailment(Entailment.SIMPLE); la.setTreeFactory(queryTreeFactory); la.setPositiveExampleTrees(examples.posExamplesMapping); la.setNegativeExampleTrees(examples.negExamplesMapping); la.setNoise(noise); la.setHeuristic(heuristic); la.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); la.setMaxTreeComputationTimeInSeconds(maxExecutionTimeInSeconds); la.init(); la.start(); List<EvaluatedRDFResourceTree> solutions = new ArrayList<>( la.getSolutions()); // List<EvaluatedRDFResourceTree> solutions = generateSolutions(examples, noise, heuristic); nrOfReturnedSolutionsStats.addValue(solutions.size()); // the best returned solution by QTL EvaluatedRDFResourceTree bestSolution = solutions.get(0); logger.info("Got " + solutions.size() + " query trees."); // logger.info("Best computed solution:\n" + render(bestSolution.asEvaluatedDescription())); logger.info("QTL Score:\n" + bestSolution.getTreeScore()); long runtimeBestSolution = la.getTimeBestSolutionFound(); bestReturnedSolutionRuntimeStats.addValue(runtimeBestSolution); // convert to SPARQL query RDFResourceTree tree = bestSolution.getTree(); tree = filter.apply(tree); String learnedSPARQLQuery = QueryTreeUtils.toSPARQLQueryString(tree, dataset.getBaseIRI(), dataset.getPrefixMapping()); // compute score Score score = computeScore(sparqlQuery, tree, noise); bestReturnedSolutionPrecisionStats.addValue(score.precision); bestReturnedSolutionRecallStats.addValue(score.recall); bestReturnedSolutionFMeasureStats.addValue(score.fmeasure); bestReturnedSolutionPredAccStats.addValue(score.predAcc); bestReturnedSolutionMathCorrStats.addValue(score.mathCorr); logger.info(score.toString()); // find the extensionally best matching tree in the list Pair<EvaluatedRDFResourceTree, Score> bestMatchingTreeWithScore = findBestMatchingTreeFast( solutions, sparqlQuery, noise, examples); EvaluatedRDFResourceTree bestMatchingTree = bestMatchingTreeWithScore .getFirst(); Score bestMatchingScore = bestMatchingTreeWithScore.getSecond(); // position of best tree in list of solutions int positionBestScore = solutions.indexOf(bestMatchingTree); bestSolutionPositionStats.addValue(positionBestScore); Score bestScore = score; if (positionBestScore > 0) { logger.info( "Position of best covering tree in list: " + positionBestScore); logger.info("Best covering solution:\n" + render(bestMatchingTree.asEvaluatedDescription())); logger.info("Tree score: " + bestMatchingTree.getTreeScore()); bestScore = bestMatchingScore; logger.info(bestMatchingScore.toString()); } else { logger.info( "Best returned solution was also the best covering solution."); } bestSolutionRecallStats.addValue(bestScore.recall); bestSolutionPrecisionStats.addValue(bestScore.precision); bestSolutionFMeasureStats.addValue(bestScore.fmeasure); bestSolutionPredAccStats.addValue(bestScore.predAcc); bestSolutionMathCorrStats.addValue(bestScore.mathCorr); for (RDFResourceTree negTree : examples.negExamplesMapping.values()) { if (QueryTreeUtils.isSubsumedBy(negTree, bestMatchingTree.getTree())) { Files.append(sparqlQuery + "\n", new File("/tmp/negCovered.txt"), Charsets.UTF_8); break; } } String bestQuery = QueryFactory .create(QueryTreeUtils.toSPARQLQueryString( filter.apply(bestMatchingTree.getTree()), dataset.getBaseIRI(), dataset.getPrefixMapping())) .toString(); if (write2DB) { write2DB(sparqlQuery, nrOfExamples, examples, noise, baseLineQuery, baselineScore, heuristicName, measureName, QueryFactory.create(learnedSPARQLQuery).toString(), score, runtimeBestSolution, bestQuery, positionBestScore, bestScore); } } catch (Exception e) { failed.set(true); logger.error("Error occured for query\n" + sparqlQuery, e); try { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); Files.append(sparqlQuery + "\n" + sw.toString(), new File(benchmarkDirectory, "failed-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".txt"), Charsets.UTF_8); } catch (IOException e1) { e1.printStackTrace(); } } finally { int cnt = currentNrOfFinishedRuns.incrementAndGet(); logger.info("***********Evaluation Progress:" + NumberFormat.getPercentInstance() .format((double) cnt / totalNrOfQTLRuns) + "(" + cnt + "/" + totalNrOfQTLRuns + ")" + "***********"); } } }); } tp.shutdown(); tp.awaitTermination(12, TimeUnit.HOURS); Logger.getRootLogger().removeAppender(appender); if (!failed.get()) { String result = ""; result += "\nBaseline Precision:\n" + baselinePrecisionStats; result += "\nBaseline Recall:\n" + baselineRecallStats; result += "\nBaseline F-measure:\n" + baselineFMeasureStats; result += "\nBaseline PredAcc:\n" + baselinePredAccStats; result += "\nBaseline MathCorr:\n" + baselineMathCorrStats; result += "#Returned solutions:\n" + nrOfReturnedSolutionsStats; result += "\nOverall Precision:\n" + bestReturnedSolutionPrecisionStats; result += "\nOverall Recall:\n" + bestReturnedSolutionRecallStats; result += "\nOverall F-measure:\n" + bestReturnedSolutionFMeasureStats; result += "\nOverall PredAcc:\n" + bestReturnedSolutionPredAccStats; result += "\nOverall MathCorr:\n" + bestReturnedSolutionMathCorrStats; result += "\nTime until best returned solution found:\n" + bestReturnedSolutionRuntimeStats; result += "\nPositions of best solution:\n" + Arrays.toString(bestSolutionPositionStats.getValues()); result += "\nPosition of best solution stats:\n" + bestSolutionPositionStats; result += "\nOverall Precision of best solution:\n" + bestSolutionPrecisionStats; result += "\nOverall Recall of best solution:\n" + bestSolutionRecallStats; result += "\nOverall F-measure of best solution:\n" + bestSolutionFMeasureStats; result += "\nCBD generation time(total):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getTotal() + "\n"; result += "CBD generation time(avg):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getAvg() + "\n"; result += "Tree generation time(total):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getTotal() + "\n"; result += "Tree generation time(avg):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getAvg() + "\n"; result += "Tree size(avg):\t" + treeSizeStats.getMean() + "\n"; logger.info(result); try { Files.write(result, statsFile, Charsets.UTF_8); } catch (IOException e) { e.printStackTrace(); } data[i][j] = bestReturnedSolutionFMeasureStats.getMean(); if (write2DB) { write2DB(heuristicName, measureName, nrOfExamples, noise, bestReturnedSolutionFMeasureStats.getMean(), bestReturnedSolutionPrecisionStats.getMean(), bestReturnedSolutionRecallStats.getMean(), bestReturnedSolutionPredAccStats.getMean(), bestReturnedSolutionMathCorrStats.getMean(), bestSolutionPositionStats.getMean(), bestSolutionFMeasureStats.getMean(), bestSolutionPrecisionStats.getMean(), bestSolutionRecallStats.getMean(), bestSolutionPredAccStats.getMean(), bestSolutionMathCorrStats.getMean(), baselineFMeasureStats.getMean(), baselinePrecisionStats.getMean(), baselineRecallStats.getMean(), baselinePredAccStats.getMean(), baselineMathCorrStats.getMean(), bestReturnedSolutionRuntimeStats.getMean()); } } } } String content = "###"; String separator = "\t"; for (double noiseInterval1 : noiseIntervals) { content += separator + noiseInterval1; } content += "\n"; for (int i = 0; i < nrOfExamplesIntervals.length; i++) { content += nrOfExamplesIntervals[i]; for (int j = 0; j < noiseIntervals.length; j++) { content += separator + data[i][j]; } content += "\n"; } File examplesVsNoise = new File(benchmarkDirectory, "examplesVsNoise-" + heuristicName + "-" + measureName + ".tsv"); try { Files.write(content, examplesVsNoise, Charsets.UTF_8); } catch (IOException e) { logger.error("failed to write stats to file", e); } } } if (write2DB) { conn.close(); } if (useEmailNotification) { sendFinishedMail(); } long t2 = System.currentTimeMillis(); long duration = t2 - t1; logger.info("QTL evaluation finished in " + DurationFormatUtils.formatDurationHMS(duration) + "ms."); }
From source file:org.lilyproject.indexer.engine.IndexUpdater.java
private void updateDenormalizedData(RecordId recordId, RecordEvent event, Map<Scope, Set<FieldType>> updatedFieldsByScope, Map<Long, Set<String>> vtagsByVersion) { // This algorithm is designed to first collect all the reindex-work, and then to perform it. // Otherwise the same document would be indexed multiple times if it would become invalid // because of different reasons (= different indexFields). ///*from ww w . j av a2 s. c o m*/ // Collect all the relevant IndexFields, and for each the relevant vtags // // This map will contain all the IndexFields we need to treat, and for each one the vtags to be considered Map<IndexField, Set<String>> indexFieldsVTags = new IdentityHashMap<IndexField, Set<String>>() { @Override public Set<String> get(Object key) { if (!this.containsKey(key) && key instanceof IndexField) { this.put((IndexField) key, new HashSet<String>()); } return super.get(key); } }; // There are two cases when denormalized data needs updating: // 1. when the content of a (vtagged) record changes // 2. when vtags change (are added, removed or point to a different version) // We now handle these 2 cases. // === Case 1 === updates in response to changes to this record long version = event.getVersionCreated() == -1 ? event.getVersionUpdated() : event.getVersionCreated(); // Determine the relevant index fields List<IndexField> indexFields; if (event.getType() == RecordEvent.Type.DELETE) { indexFields = indexer.getConf().getDerefIndexFields(); } else { indexFields = new ArrayList<IndexField>(); collectDerefIndexFields(updatedFieldsByScope.get(Scope.NON_VERSIONED), indexFields); if (version != -1 && vtagsByVersion.get(version) != null) { collectDerefIndexFields(updatedFieldsByScope.get(Scope.VERSIONED), indexFields); collectDerefIndexFields(updatedFieldsByScope.get(Scope.VERSIONED_MUTABLE), indexFields); } } // For each indexField, determine the vtags of the referrer that we should consider. // In the context of this algorithm, a referrer is each record whose index might contain // denormalized data from the record of which we are now processing the change event. nextIndexField: for (IndexField indexField : indexFields) { DerefValue derefValue = (DerefValue) indexField.getValue(); FieldType fieldType = derefValue.getTargetField(); // // Determine the vtags of the referrer that we should consider // Set<String> referrerVtags = indexFieldsVTags.get(indexField); // we do not know if the referrer has any versions at all, so always add the versionless tag referrerVtags.add(VersionTag.VERSIONLESS_TAG); if (fieldType.getScope() == Scope.NON_VERSIONED || event.getType() == RecordEvent.Type.DELETE) { // If it is a non-versioned field, then all vtags should be considered. // If it is a delete event, we do not know what vtags existed for the record, so consider them all. referrerVtags.addAll(indexer.getConf().getVtags()); } else { // Otherwise only the vtags of the created/updated version, if any if (version != -1) { Set<String> vtags = vtagsByVersion.get(version); if (vtags != null) referrerVtags.addAll(vtags); } } } // === Case 2 === handle updated/added/removed vtags Set<String> changedVTagFields = VersionTag.filterVTagFields(event.getUpdatedFields(), typeManager); if (!changedVTagFields.isEmpty()) { // In this case, the IndexFields which we need to handle are those that use fields from: // - the previous version to which the vtag pointed (if it is not a new vtag) // - the new version to which the vtag points (if it is not a deleted vtag) // But rather than calculating all that (consider the need to retrieve the versions), // for now we simply consider all IndexFields. // TODO could optimize this to exclude deref fields that use only non-versioned fields? for (IndexField indexField : indexer.getConf().getDerefIndexFields()) { indexFieldsVTags.get(indexField).addAll(changedVTagFields); } } // // Now search the referrers, that is: for each link field, find out which records point to the current record // in a certain versioned view (= a certain vtag) // // This map holds the referrer records to reindex, and for which versions (vtags) they need to be reindexed. Map<RecordId, Set<String>> referrersVTags = new HashMap<RecordId, Set<String>>() { @Override public Set<String> get(Object key) { if (!containsKey(key) && key instanceof RecordId) { put((RecordId) key, new HashSet<String>()); } return super.get(key); } }; int searchedFollowCount = 0; // Run over the IndexFields nextIndexField: for (Map.Entry<IndexField, Set<String>> entry : indexFieldsVTags.entrySet()) { IndexField indexField = entry.getKey(); Set<String> referrerVTags = entry.getValue(); DerefValue derefValue = (DerefValue) indexField.getValue(); // Run over the version tags for (String referrerVtag : referrerVTags) { List<DerefValue.Follow> follows = derefValue.getFollows(); Set<RecordId> referrers = new HashSet<RecordId>(); referrers.add(recordId); for (int i = follows.size() - 1; i >= 0; i--) { searchedFollowCount++; DerefValue.Follow follow = follows.get(i); Set<RecordId> newReferrers = new HashSet<RecordId>(); if (follow instanceof DerefValue.FieldFollow) { String fieldId = ((DerefValue.FieldFollow) follow).getFieldId(); for (RecordId referrer : referrers) { try { Set<RecordId> linkReferrers = linkIndex.getReferrers(referrer, referrerVtag, fieldId); newReferrers.addAll(linkReferrers); } catch (IOException e) { // TODO e.printStackTrace(); } } } else if (follow instanceof DerefValue.VariantFollow) { DerefValue.VariantFollow varFollow = (DerefValue.VariantFollow) follow; Set<String> dimensions = varFollow.getDimensions(); // We need to find out the variants of the current set of referrers which have the // same variant properties as the referrer (= same key/value pairs) and additionally // have the extra dimensions defined in the VariantFollow. nextReferrer: for (RecordId referrer : referrers) { Map<String, String> refprops = referrer.getVariantProperties(); // If the referrer already has one of the dimensions, then skip it for (String dimension : dimensions) { if (refprops.containsKey(dimension)) continue nextReferrer; } // Set<RecordId> variants; try { variants = repository.getVariants(referrer); } catch (Exception e) { // TODO we should probably throw this higher up and let it be handled there throw new RuntimeException(e); } nextVariant: for (RecordId variant : variants) { Map<String, String> varprops = variant.getVariantProperties(); // Check it has each of the variant properties of the current referrer record for (Map.Entry<String, String> refprop : refprops.entrySet()) { if (!ObjectUtils.safeEquals(varprops.get(refprop.getKey()), refprop.getValue())) { // skip this variant continue nextVariant; } } // Check it has the additional dimensions for (String dimension : dimensions) { if (!varprops.containsKey(dimension)) continue nextVariant; } // We have a hit newReferrers.add(variant); } } } else if (follow instanceof DerefValue.MasterFollow) { for (RecordId referrer : referrers) { // A MasterFollow can only point to masters if (referrer.isMaster()) { Set<RecordId> variants; try { variants = repository.getVariants(referrer); } catch (RepositoryException e) { // TODO we should probably throw this higher up and let it be handled there throw new RuntimeException(e); } catch (InterruptedException e) { // TODO we should probably throw this higher up and let it be handled there Thread.currentThread().interrupt(); throw new RuntimeException(e); } variants.remove(referrer); newReferrers.addAll(variants); } } } else { throw new RuntimeException( "Unexpected implementation of DerefValue.Follow: " + follow.getClass().getName()); } referrers = newReferrers; } for (RecordId referrer : referrers) { referrersVTags.get(referrer).add(referrerVtag); } } } if (log.isDebugEnabled()) { log.debug(String.format( "Record %1$s: found %2$s records (times vtags) to be updated because they " + "might contain outdated denormalized data. Checked %3$s follow instances.", recordId, referrersVTags.size(), searchedFollowCount)); } // // Now re-index all the found referrers // nextReferrer: for (Map.Entry<RecordId, Set<String>> entry : referrersVTags.entrySet()) { RecordId referrer = entry.getKey(); Set<String> vtagsToIndex = entry.getValue(); boolean lockObtained = false; try { indexLocker.lock(referrer); lockObtained = true; IdRecord record = null; try { // TODO optimize this: we are only interested to know the vtags and to know if the record has versions record = repository.readWithIds(referrer, null, null); } catch (Exception e) { // TODO handle this // One case to be expected here is that the record has been deleted since we read the list of referrers e.printStackTrace(); } IndexCase indexCase = indexer.getConf().getIndexCase(record.getRecordTypeName(), record.getId().getVariantProperties()); if (indexCase == null) { continue nextReferrer; } try { if (record.getVersion() == null) { if (indexCase.getIndexVersionless() && vtagsToIndex.contains(VersionTag.VERSIONLESS_TAG)) { indexer.index(record, Collections.singleton(VersionTag.VERSIONLESS_TAG)); } } else { Map<String, Long> recordVTags = VersionTag.getTagsById(record, typeManager); vtagsToIndex.retainAll(indexCase.getVersionTags()); // Only keep vtags which exist on the record vtagsToIndex.retainAll(recordVTags.keySet()); indexer.indexRecord(record.getId(), vtagsToIndex, recordVTags); } } catch (Exception e) { // TODO handle this e.printStackTrace(); } } catch (IndexLockException e) { // TODO handle this e.printStackTrace(); } finally { if (lockObtained) { indexLocker.unlockLogFailure(referrer); } } } }
From source file:org.springframework.security.oauth2.provider.approval.ApprovalStoreUserApprovalHandler.java
public AuthorizationRequest checkForPreApproval(AuthorizationRequest authorizationRequest, Authentication userAuthentication) { String clientId = authorizationRequest.getClientId(); Collection<String> requestedScopes = authorizationRequest.getScope(); Set<String> approvedScopes = new HashSet<String>(); Set<String> validUserApprovedScopes = new HashSet<String>(); if (clientDetailsService != null) { try {/* w w w. j ava2 s .c o m*/ ClientDetails client = clientDetailsService.loadClientByClientId(clientId); for (String scope : requestedScopes) { if (client.isAutoApprove(scope) || client.isAutoApprove("all")) { approvedScopes.add(scope); } } if (approvedScopes.containsAll(requestedScopes)) { authorizationRequest.setApproved(true); return authorizationRequest; } } catch (ClientRegistrationException e) { logger.warn("Client registration problem prevent autoapproval check for client=" + clientId); } } if (logger.isDebugEnabled()) { StringBuilder builder = new StringBuilder("Looking up user approved authorizations for "); builder.append("client_id=" + clientId); builder.append(" and username=" + userAuthentication.getName()); logger.debug(builder.toString()); } // Find the stored approvals for that user and client Collection<Approval> userApprovals = approvalStore.getApprovals(userAuthentication.getName(), clientId); // Look at the scopes and see if they have expired Date today = new Date(); for (Approval approval : userApprovals) { if (approval.getExpiresAt().after(today)) { validUserApprovedScopes.add(approval.getScope()); if (approval.getStatus() == ApprovalStatus.APPROVED) { approvedScopes.add(approval.getScope()); } } } if (logger.isDebugEnabled()) { logger.debug("Valid user approved/denied scopes are " + validUserApprovedScopes); } // If the requested scopes have already been acted upon by the user, // this request is approved if (validUserApprovedScopes.containsAll(requestedScopes)) { approvedScopes.retainAll(requestedScopes); // Set only the scopes that have been approved by the user authorizationRequest.setScope(approvedScopes); authorizationRequest.setApproved(true); } return authorizationRequest; }
From source file:org.nuxeo.ecm.core.storage.sql.NXQLQueryMaker.java
public Query buildQuery(SQLInfo sqlInfo, Model model, Session session, String query, QueryFilter queryFilter, Object... params) throws StorageException { this.sqlInfo = sqlInfo; database = sqlInfo.database;/*from ww w .j a va 2 s .c om*/ dialect = sqlInfo.dialect; this.model = model; this.session = session; // transform the query according to the transformers defined by the // security policies SQLQuery sqlQuery = SQLQueryParser.parse(query); for (SQLQuery.Transformer transformer : queryFilter.getQueryTransformers()) { sqlQuery = transformer.transform(queryFilter.getPrincipal(), sqlQuery); } /* * Find all relevant types and keys for the criteria. */ QueryAnalyzer info = new QueryAnalyzer(); try { info.visitQuery(sqlQuery); } catch (QueryCannotMatchException e) { // query cannot match return null; } catch (QueryMakerException e) { throw new StorageException(e.getMessage(), e); } /* * Find all the types to take into account (all concrete types being a * subtype of the passed types) based on the FROM list. */ Set<String> types = new HashSet<String>(); for (String typeName : info.fromTypes) { if ("document".equals(typeName)) { typeName = "Document"; } Set<String> subTypes = model.getDocumentSubTypes(typeName); if (subTypes == null) { throw new StorageException("Unknown type: " + typeName); } types.addAll(subTypes); } types.remove(model.ROOT_TYPE); /* * Restrict types based on toplevel ecm:primaryType and ecm:mixinType * predicates. */ types.removeAll(info.typesExcluded); if (!info.typesAnyRequired.isEmpty()) { types.retainAll(info.typesAnyRequired); } if (types.isEmpty()) { // conflicting types requirement, query cannot match return null; } /* * Merge facet filter into mixin clauses and immutable flag. */ FacetFilter facetFilter = queryFilter.getFacetFilter(); if (facetFilter == null) { facetFilter = FacetFilter.ALLOW; } info.mixinsExcluded.addAll(facetFilter.excluded); if (info.mixinsExcluded.remove(FacetNames.IMMUTABLE)) { if (info.immutableClause == Boolean.TRUE) { // conflict on immutable condition, query cannot match return null; } info.immutableClause = Boolean.FALSE; } info.mixinsAllRequired.addAll(facetFilter.required); if (info.mixinsAllRequired.remove(FacetNames.IMMUTABLE)) { if (info.immutableClause == Boolean.FALSE) { // conflict on immutable condition, query cannot match return null; } info.immutableClause = Boolean.TRUE; } /* * Find the relevant tables to join with. */ Set<String> fragmentNames = new HashSet<String>(); for (String prop : info.props) { PropertyInfo propertyInfo = model.getPropertyInfo(prop); if (propertyInfo == null) { throw new StorageException("Unknown field: " + prop); } fragmentNames.add(propertyInfo.fragmentName); } fragmentNames.remove(model.hierTableName); // Do we need to add the versions table too? if (info.needsVersionsTable || info.immutableClause != null) { fragmentNames.add(model.VERSION_TABLE_NAME); } /* * Build the FROM / JOIN criteria for each select. */ DocKind[] docKinds; if (info.proxyClause == Boolean.TRUE) { if (info.immutableClause == Boolean.FALSE) { // proxy but not immutable: query cannot match return null; } docKinds = new DocKind[] { DocKind.PROXY }; } else if (info.proxyClause == Boolean.FALSE || info.immutableClause == Boolean.FALSE) { docKinds = new DocKind[] { DocKind.DIRECT }; } else { docKinds = new DocKind[] { DocKind.DIRECT, DocKind.PROXY }; } Table hier = database.getTable(model.hierTableName); boolean aliasColumns = docKinds.length > 1; Select select = null; String orderBy = null; List<String> statements = new ArrayList<String>(2); List<Serializable> selectParams = new LinkedList<Serializable>(); for (DocKind docKind : docKinds) { // The hierarchy table, which may be an alias table. Table hierTable; // Quoted id in the hierarchy. This is the id returned by the query. String hierId; // Quoted name in the hierarchy. This is the id returned by the query. String hierName; // The hierarchy table of the data. Table dataHierTable; // Quoted id attached to the data that matches. String dataHierId; List<String> joins = new LinkedList<String>(); LinkedList<String> leftJoins = new LinkedList<String>(); List<Serializable> leftJoinsParams = new LinkedList<Serializable>(); LinkedList<String> implicitJoins = new LinkedList<String>(); List<Serializable> implicitJoinsParams = new LinkedList<Serializable>(); List<String> whereClauses = new LinkedList<String>(); List<Serializable> whereParams = new LinkedList<Serializable>(); switch (docKind) { case DIRECT: hierTable = hier; hierId = hierTable.getColumn(model.MAIN_KEY).getFullQuotedName(); hierName = hierTable.getColumn(model.HIER_CHILD_NAME_KEY).getFullQuotedName(); dataHierTable = hierTable; dataHierId = hierId; joins.add(hierTable.getQuotedName()); break; case PROXY: hierTable = new TableAlias(hier, TABLE_HIER_ALIAS); String hierFrom = hier.getQuotedName() + " " + hierTable.getQuotedName(); // TODO use dialect hierId = hierTable.getColumn(model.MAIN_KEY).getFullQuotedName(); hierName = hierTable.getColumn(model.HIER_CHILD_NAME_KEY).getFullQuotedName(); // joined (data) dataHierTable = hier; dataHierId = hier.getColumn(model.MAIN_KEY).getFullQuotedName(); // proxies Table proxies = database.getTable(model.PROXY_TABLE_NAME); String proxiesid = proxies.getColumn(model.MAIN_KEY).getFullQuotedName(); String proxiestargetid = proxies.getColumn(model.PROXY_TARGET_KEY).getFullQuotedName(); // join all that joins.add(hierFrom); joins.add(String.format(JOIN_ON, proxies.getQuotedName(), hierId, proxiesid)); joins.add(String.format(JOIN_ON, dataHierTable.getQuotedName(), dataHierId, proxiestargetid)); break; default: throw new AssertionError(docKind); } // main data joins for (String fragmentName : fragmentNames) { Table table = database.getTable(fragmentName); // the versions table joins on the real hier table boolean useHier = model.VERSION_TABLE_NAME.equals(fragmentName); leftJoins.add(String.format(JOIN_ON, table.getQuotedName(), useHier ? hierId : dataHierId, table.getColumn(model.MAIN_KEY).getFullQuotedName())); } /* * Filter on facets and mixin types, and create the structural WHERE * clauses for the type. */ List<String> typeStrings = new ArrayList<String>(types.size()); NEXT_TYPE: for (String type : types) { Set<String> facets = model.getDocumentTypeFacets(type); for (String facet : info.mixinsExcluded) { if (facets.contains(facet)) { continue NEXT_TYPE; } } for (String facet : info.mixinsAllRequired) { if (!facets.contains(facet)) { continue NEXT_TYPE; } } if (!info.mixinsAnyRequired.isEmpty()) { Set<String> intersection = new HashSet<String>(info.mixinsAnyRequired); intersection.retainAll(facets); if (intersection.isEmpty()) { continue NEXT_TYPE; } } // this type is good typeStrings.add("?"); whereParams.add(type); } if (typeStrings.isEmpty()) { return null; // mixins excluded all types, no match possible } whereClauses.add(String.format("%s IN (%s)", dataHierTable.getColumn(model.MAIN_PRIMARY_TYPE_KEY).getFullQuotedName(), StringUtils.join(typeStrings, ", "))); /* * Add clause for immutable match. */ if (docKind == DocKind.DIRECT && info.immutableClause != null) { String where = String.format("%s IS %s", database.getTable(model.VERSION_TABLE_NAME).getColumn(model.MAIN_KEY).getFullQuotedName(), info.immutableClause.booleanValue() ? "NOT NULL" : "NULL"); whereClauses.add(where); } /* * Parse the WHERE clause from the original query, and deduce from * it actual WHERE clauses and potential JOINs. */ WhereBuilder whereBuilder; try { whereBuilder = new WhereBuilder(database, session, hierTable, hierId, dataHierTable, dataHierId, docKind == DocKind.PROXY, aliasColumns); } catch (QueryMakerException e) { throw new StorageException(e.getMessage(), e); } if (info.wherePredicate != null) { info.wherePredicate.accept(whereBuilder); // JOINs added by fulltext queries leftJoins.addAll(whereBuilder.leftJoins); leftJoinsParams.addAll(whereBuilder.leftJoinsParams); implicitJoins.addAll(whereBuilder.implicitJoins); implicitJoinsParams.addAll(whereBuilder.implicitJoinsParams); // WHERE clause String where = whereBuilder.buf.toString(); if (where.length() != 0) { whereClauses.add(where); whereParams.addAll(whereBuilder.whereParams); } } /* * Security check. */ if (queryFilter.getPrincipals() != null) { Serializable principals = queryFilter.getPrincipals(); Serializable permissions = queryFilter.getPermissions(); if (!dialect.supportsArrays()) { principals = StringUtils.join((String[]) principals, '|'); permissions = StringUtils.join((String[]) permissions, '|'); } if (dialect.supportsReadAcl()) { /* optimized read acl */ whereClauses.add(dialect.getReadAclsCheckSql("r.acl_id")); whereParams.add(principals); joins.add(String.format("%s AS r ON %s = r.id", model.HIER_READ_ACL_TABLE_NAME, hierId)); } else { whereClauses.add(dialect.getSecurityCheckSql(hierId)); whereParams.add(principals); whereParams.add(permissions); } } /* * Columns on which to do ordering. */ String selectWhat = hierId; // always add the name, it will be used for intalio crm selectWhat += ", " + hierName; if (aliasColumns) { // UNION, so we need all orderable columns, aliased int n = 0; for (String key : info.orderKeys) { Column column = whereBuilder.findColumn(key, false, true); String qname = column.getFullQuotedName(); selectWhat += ", " + qname + " AS " + dialect.openQuote() + COL_ORDER_ALIAS_PREFIX + ++n + dialect.closeQuote(); } } /* * Order by. Compute it just once. May use just aliases. */ if (orderBy == null && sqlQuery.orderBy != null) { whereBuilder.buf.setLength(0); sqlQuery.orderBy.accept(whereBuilder); orderBy = whereBuilder.buf.toString(); } /* * Resulting select. */ select = new Select(null); select.setWhat(selectWhat); leftJoins.addFirst(StringUtils.join(joins, " JOIN ")); String from = StringUtils.join(leftJoins, " LEFT JOIN "); if (!implicitJoins.isEmpty()) { implicitJoins.addFirst(from); from = StringUtils.join(implicitJoins, ", "); } select.setFrom(from); select.setWhere(StringUtils.join(whereClauses, " AND ")); selectParams.addAll(leftJoinsParams); selectParams.addAll(implicitJoinsParams); selectParams.addAll(whereParams); statements.add(select.getStatement()); } /* * Create the whole select. */ if (statements.size() > 1) { select = new Select(null); String selectWhat = hier.getColumn(model.MAIN_KEY).getQuotedName(); selectWhat = selectWhat + ", " + hier.getColumn(model.HIER_CHILD_NAME_KEY).getQuotedName(); select.setWhat(selectWhat); // note that Derby has bizarre restrictions on parentheses placement // around UNION, see http://issues.apache.org/jira/browse/DERBY-2374 String from = '(' + StringUtils.join(statements, " UNION ALL ") + ')'; if (dialect.needsAliasForDerivedTable()) { from += " AS " + dialect.openQuote() + UNION_ALIAS + dialect.closeQuote(); } select.setFrom(from); } select.setOrderBy(orderBy); List<Column> whatColumns = Collections.singletonList(hier.getColumn(model.MAIN_KEY)); Query q = new Query(); q.selectInfo = new SQLInfoSelect(select.getStatement(), whatColumns, null, null); q.selectParams = selectParams; return q; }
From source file:org.eclipse.internal.xtend.type.baseimpl.types.CollectionTypeImpl.java
@Override public Feature[] getContributedFeatures() { return new Feature[] { new OperationImpl(this, "toList", getTypeSystem().getListType(getInnerType())) { @Override/* w w w .j a v a2 s.com*/ public String getDocumentation() { return "converts this collection to List"; } @Override public Object evaluateInternal(final Object target, final Object[] params) { if (target == null) { LOG.warn("toList called with Null argument. Will return an empty list."); return new ArrayList<Object>(0); } return new ArrayList<Object>(((Collection<?>) target)); } @Override public Type getReturnType(final Type targetType, final Type[] paramTypes) { if (!(targetType instanceof ParameterizedType)) return getReturnType(); final TypeSystem ts = getTypeSystem(); return ts.getListType(((ParameterizedType) targetType).getInnerType()); } }, new OperationImpl(this, "toSet", getTypeSystem().getSetType(getInnerType())) { @Override public String getDocumentation() { return "converts this collection to Set"; } @Override public Object evaluateInternal(final Object target, final Object[] params) { if (target == null) { LOG.warn("toSet called with Null argument. Will return an empty set."); return new LinkedHashSet<Object>(0); } return new LinkedHashSet<Object>((Collection<?>) target); } @Override public Type getReturnType(final Type targetType, final Type[] paramTypes) { if (!(targetType instanceof ParameterizedType)) return getReturnType(); final TypeSystem ts = getTypeSystem(); return ts.getSetType(((ParameterizedType) targetType).getInnerType()); } }, new OperationImpl(this, "toString", getTypeSystem().getStringType(), getTypeSystem().getStringType()) { @Override public String getDocumentation() { return "concatenates each contained element (using toString()), separated by the specified String."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { final StringBuffer buff = new StringBuffer(); for (final Iterator<?> iter = ((Collection<?>) target).iterator(); iter.hasNext();) { buff.append(iter.next().toString()); if (iter.hasNext()) { buff.append(params[0].toString()); } } return buff.toString(); } }, new PropertyImpl(this, "size", getTypeSystem().getIntegerType()) { @Override public String getDocumentation() { return "returns the size of this Collection"; } public Object get(final Object target) { return new Long(((Collection<?>) target).size()); } }, new PropertyImpl(this, "isEmpty", getTypeSystem().getBooleanType()) { @Override public String getDocumentation() { return "returns true if this Collection is empty"; } public Object get(final Object target) { if (target == null) { LOG.warn("isEmpty called with Null argument. Will return true."); return Boolean.TRUE; } return new Boolean(((Collection<?>) target).size() == 0); } }, new OperationImpl(this, "add", this, getInnerType()) { @Override public String getDocumentation() { return "adds an element to the Collection (modifies it!). returns this Collection."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { ((Collection<Object>) target).add(params[0]); return target; } }, new OperationImpl(this, "addAll", this, getTypeSystem().getCollectionType(getInnerType())) { @Override public String getDocumentation() { return "adds all elements to the Collection (modifies it!). returns this Collection."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { ((Collection<?>) target).addAll((Collection) params[0]); return target; } }, new OperationImpl(this, "contains", getTypeSystem().getBooleanType(), getTypeSystem().getObjectType()) { @Override public String getDocumentation() { return "returns true if this collection contains the specified object. otherwise false. returns this Collection."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { return Boolean.valueOf(((Collection) target).contains(params[0])); } }, new OperationImpl(this, "containsAll", getTypeSystem().getBooleanType(), getTypeSystem().getCollectionType(getTypeSystem().getObjectType())) { @Override public String getDocumentation() { return "returns true if this collection contains each element contained in the specified collection. otherwise false. returns this Collection."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { return Boolean.valueOf(((Collection) target).containsAll((Collection) params[0])); } }, new OperationImpl(this, "remove", this, getTypeSystem().getObjectType()) { @Override public String getDocumentation() { return "removes the specified element from this Collection if contained (modifies it!). returns this Collection."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { ((Collection) target).remove(params[0]); return target; } }, new OperationImpl(this, "removeAll", this, getTypeSystem().getCollectionType(getTypeSystem().getObjectType())) { @Override public String getDocumentation() { return "removes all elements contained in the specified collection from this Collection if contained (modifies it!). returns this Collection."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { ((Collection) target).removeAll((Collection) params[0]); return target; } }, new OperationImpl(this, "union", getTypeSystem().getSetType(getInnerType()), getTypeSystem().getCollectionType(getTypeSystem().getObjectType())) { @Override public String getDocumentation() { return "returns a new Set, containing all elements from this and the specified Collection"; } @Override public Object evaluateInternal(final Object target, final Object[] params) { final Set r = new LinkedHashSet((Collection) target); if (params != null && params[0] != null) { r.addAll((Collection) params[0]); } else { LOG.warn("Invoking union() with Null as argument. Will return the source collection."); } return r; } }, new OperationImpl(this, "without", getTypeSystem().getSetType(getInnerType()), getTypeSystem().getCollectionType(getTypeSystem().getObjectType())) { @Override public String getDocumentation() { return "returns a new Set, containing all elements from this Collection without the elements from specified Collection"; } @Override public Object evaluateInternal(final Object target, final Object[] params) { final Set r = new LinkedHashSet((Collection) target); r.removeAll((Collection) params[0]); return r; } }, new OperationImpl(this, "intersect", getTypeSystem().getSetType(getInnerType()), getTypeSystem().getCollectionType(getTypeSystem().getObjectType())) { @Override public String getDocumentation() { return "returns a new Set, containing only the elements contained in this and the specified Collection"; } @Override public Object evaluateInternal(final Object target, final Object[] params) { final Set r = new LinkedHashSet((Collection) target); r.retainAll((Collection) params[0]); return r; } }, new OperationImpl(this, "flatten", getTypeSystem().getListType(getInnerTypeRec(CollectionTypeImpl.this)), new Type[0]) { @Override public String getDocumentation() { return "returns a flattened List."; } @Override public Object evaluateInternal(final Object target, final Object[] params) { final List<Object> result = new ArrayList<Object>(); flattenRec(result, (Collection) target); return result; } public void flattenRec(final List<Object> result, final Collection col) { for (final Object element : col) { if (element instanceof Collection) { flattenRec(result, (Collection) element); } else { result.add(element); } } } } }; }
From source file:org.jactr.modules.pm.aural.audicon.map.OnsetFeatureMap.java
/** * @see org.jactr.modules.pm.common.memory.map.IFeatureMap#getCandidateRealObjects(ChunkTypeRequest, Set) *///from www. ja va 2s.c o m public void getCandidateRealObjects(ChunkTypeRequest request, Set<IIdentifier> container) { Set<IIdentifier> identifiers = new HashSet<IIdentifier>(); boolean firstIteration = true; for (IConditionalSlot cSlot : request.getConditionalSlots()) if (cSlot.getName().equalsIgnoreCase(IAuralModule.ONSET_SLOT)) { Object value = cSlot.getValue(); if (_module.getLowestChunk().equals(value)) value = _onsetMap.firstKey(); else if (_module.getHighestChunk().equals(value)) value = _onsetMap.lastKey(); Number val = (Number) value; Collection<IIdentifier> eval = new HashSet<IIdentifier>(); if (val == null) { if (IConditionalSlot.NOT_EQUALS == cSlot.getCondition()) eval.addAll(all()); } else switch (cSlot.getCondition()) { case IConditionalSlot.EQUALS: eval.addAll(equal(val.doubleValue())); break; case IConditionalSlot.NOT_EQUALS: eval.addAll(not(val.doubleValue())); break; case IConditionalSlot.GREATER_THAN_EQUALS: eval.addAll(equal(val.doubleValue())); case IConditionalSlot.GREATER_THAN: eval.addAll(greaterThan(val.doubleValue())); break; case IConditionalSlot.LESS_THAN_EQUALS: eval.addAll(equal(val.doubleValue())); case IConditionalSlot.LESS_THAN: eval.addAll(lessThan(val.doubleValue())); break; default: if (LOGGER.isWarnEnabled()) LOGGER.warn(getClass().getSimpleName() + " can only handle =,!=,<,<=,>,>="); break; } if (eval.size() == 0) break; if (firstIteration) { identifiers.addAll(eval); firstIteration = false; } else identifiers.retainAll(eval); } }
From source file:org.jactr.modules.pm.aural.audicon.map.OffsetFeatureMap.java
/** * @see org.jactr.modules.pm.common.memory.map.IFeatureMap#getCandidateRealObjects(ChunkTypeRequest, Set) *//*from www . ja v a 2 s.co m*/ public void getCandidateRealObjects(ChunkTypeRequest request, Set<IIdentifier> container) { Set<IIdentifier> identifiers = new HashSet<IIdentifier>(); boolean firstIteration = true; for (IConditionalSlot cSlot : request.getConditionalSlots()) if (cSlot.getName().equalsIgnoreCase(IAuralModule.OFFSET_SLOT)) { Object value = cSlot.getValue(); if (_module.getLowestChunk().equals(value)) value = _offsetMap.firstKey(); else if (_module.getHighestChunk().equals(value)) value = _offsetMap.lastKey(); Number val = (Number) value; Collection<IIdentifier> eval = new HashSet<IIdentifier>(); if (val == null) { if (IConditionalSlot.NOT_EQUALS == cSlot.getCondition()) eval.addAll(all()); } else switch (cSlot.getCondition()) { case IConditionalSlot.EQUALS: eval.addAll(equal(val.doubleValue())); break; case IConditionalSlot.NOT_EQUALS: eval.addAll(not(val.doubleValue())); break; case IConditionalSlot.GREATER_THAN_EQUALS: eval.addAll(equal(val.doubleValue())); case IConditionalSlot.GREATER_THAN: eval.addAll(greaterThan(val.doubleValue())); break; case IConditionalSlot.LESS_THAN_EQUALS: eval.addAll(equal(val.doubleValue())); case IConditionalSlot.LESS_THAN: eval.addAll(lessThan(val.doubleValue())); break; default: if (LOGGER.isWarnEnabled()) LOGGER.warn(getClass().getSimpleName() + " can only handle =,!=,<,<=,>,>="); break; } if (eval.size() == 0) break; if (firstIteration) { identifiers.addAll(eval); firstIteration = false; } else identifiers.retainAll(eval); } }
From source file:org.dllearner.algorithms.qtl.qald.QALDExperiment.java
/** * Split the SPARQL query and join the result set of each split. This * allows for the execution of more complex queries. * @param sparqlQuery//from w w w. j av a 2 s .com * @return */ private List<String> getResultSplitted(String sparqlQuery) { Query query = QueryFactory.create(sparqlQuery); logger.trace("Getting result set for\n" + query); QueryUtils queryUtils = new QueryUtils(); Set<Triple> triplePatterns = queryUtils.extractTriplePattern(query); // remove triple patterns with unbound object vars if (triplePatterns.size() > 10) { query = removeUnboundObjectVarTriples(query); triplePatterns = queryUtils.extractTriplePattern(query); } // Virtuoso bug workaround with literals of type xsd:float and xsd:double for (Iterator<Triple> iterator = triplePatterns.iterator(); iterator.hasNext();) { Node object = iterator.next().getObject(); if (object.isLiteral() && object.getLiteralDatatype() != null && (object.getLiteralDatatype().equals(XSDDatatype.XSDfloat) || object.getLiteralDatatype().equals(XSDDatatype.XSDdouble))) { iterator.remove(); } } Var targetVar = query.getProjectVars().get(0); // should be ?x0 Multimap<Var, Triple> var2TriplePatterns = HashMultimap.create(); for (Triple tp : triplePatterns) { var2TriplePatterns.put(Var.alloc(tp.getSubject()), tp); } // we keep only the most specific types for each var filterOutGeneralTypes(var2TriplePatterns); // 1. get the outgoing triple patterns of the target var that do not have // outgoing triple patterns Set<Triple> fixedTriplePatterns = new HashSet<>(); Set<Set<Triple>> clusters = new HashSet<>(); Collection<Triple> targetVarTriplePatterns = var2TriplePatterns.get(targetVar); boolean useSplitting = false; for (Triple tp : targetVarTriplePatterns) { Node object = tp.getObject(); if (object.isConcrete() || !var2TriplePatterns.containsKey(Var.alloc(object))) { fixedTriplePatterns.add(tp); } else { Set<Triple> cluster = new TreeSet<>((Comparator<Triple>) (o1, o2) -> { return ComparisonChain.start().compare(o1.getSubject().toString(), o2.getSubject().toString()) .compare(o1.getPredicate().toString(), o2.getPredicate().toString()) .compare(o1.getObject().toString(), o2.getObject().toString()).result(); }); cluster.add(tp); clusters.add(cluster); useSplitting = true; } } if (!useSplitting) { clusters.add(Sets.newHashSet(fixedTriplePatterns)); } else { logger.trace("Query too complex. Splitting..."); // 2. build clusters for other for (Set<Triple> cluster : clusters) { Triple representative = cluster.iterator().next(); cluster.addAll(var2TriplePatterns.get(Var.alloc(representative.getObject()))); cluster.addAll(fixedTriplePatterns); } } // again split clusters to have only a maximum number of triple patterns int maxNrOfTriplePatternsPerQuery = 20;// number of outgoing triple patterns form the target var in each executed query Set<Set<Triple>> newClusters = new HashSet<>(); for (Set<Triple> cluster : clusters) { int cnt = 0; for (Triple triple : cluster) { if (triple.getSubject().matches(targetVar)) { cnt++; } } if (cnt > maxNrOfTriplePatternsPerQuery) { Set<Triple> newCluster = new HashSet<>(); for (Triple triple : cluster) { if (triple.getSubject().matches(targetVar)) { newCluster.add(triple); } if (newCluster.size() == maxNrOfTriplePatternsPerQuery) { newClusters.add(newCluster); newCluster = new HashSet<>(); } } if (!newCluster.isEmpty()) { newClusters.add(newCluster); } } } for (Set<Triple> cluster : newClusters) { Set<Triple> additionalTriples = new HashSet<>(); for (Triple triple : cluster) { if (triple.getObject().isVariable()) { additionalTriples.addAll(var2TriplePatterns.get(Var.alloc(triple.getObject()))); } } cluster.addAll(additionalTriples); } // clusters = newClusters; Set<String> resources = null; // 3. run query for each cluster for (Set<Triple> cluster : clusters) { Query q = new Query(); q.addProjectVars(Collections.singleton(targetVar)); ElementTriplesBlock el = new ElementTriplesBlock(); for (Triple triple : cluster) { el.addTriple(triple); } q.setQuerySelectType(); q.setDistinct(true); q.setQueryPattern(el); q = rewriteForVirtuosoDateLiteralBug(q); // q = rewriteForVirtuosoFloatingPointIssue(q); logger.trace(q); // sparqlQuery = getPrefixedQuery(sparqlQuery); System.out.println(q); List<String> partialResult = getResult(q.toString()); Set<String> resourcesTmp = new HashSet<>(partialResult); if (resourcesTmp.isEmpty()) { System.err.println("Empty query result"); System.err.println(q); // System.exit(0); return Collections.EMPTY_LIST; } if (resources == null) { resources = resourcesTmp; } else { resources.retainAll(resourcesTmp); } } return new ArrayList<>(resources); }
From source file:org.wso2.carbon.device.mgt.oauth.extensions.validators.RoleBasedScopeValidator.java
@Override public boolean validateScope(AccessTokenDO accessTokenDO, String resourceScope) throws IdentityOAuth2Exception { //Get the list of scopes associated with the access token String[] scopes = accessTokenDO.getScope(); //If no scopes are associated with the token if (scopes == null || scopes.length == 0) { return true; }// w w w .ja v a2 s . c o m OAuthScopeDAOImpl scopeDAO = new OAuthScopeDAOImpl(); List<String> scopeList = new ArrayList<>(Arrays.asList(scopes)); //If the access token does not bear the scope required for accessing the Resource. if (!scopeList.contains(resourceScope)) { if (log.isDebugEnabled() && IdentityUtil.isTokenLoggable(IdentityConstants.IdentityTokens.ACCESS_TOKEN)) { log.debug("Access token '" + accessTokenDO.getAccessToken() + "' does not bear the scope '" + resourceScope + "'"); } return false; } try { User authzUser = accessTokenDO.getAuthzUser(); RealmService realmService = OAuthExtensionsDataHolder.getInstance().getRealmService(); int tenantId = realmService.getTenantManager().getTenantId(authzUser.getTenantDomain()); if (tenantId == 0 || tenantId == -1) { tenantId = IdentityTenantUtil.getTenantIdOfUser(authzUser.getUserName()); } //Get the roles associated with the scope, if any Set<String> rolesOfScope = scopeDAO.getBindingsOfScopeByScopeName(resourceScope, tenantId); //If the scope doesn't have any roles associated with it. if (rolesOfScope == null || rolesOfScope.isEmpty()) { if (log.isDebugEnabled()) { log.debug("Did not find any roles associated to the scope " + resourceScope); } return true; } if (log.isDebugEnabled()) { StringBuilder logMessage = new StringBuilder("Found roles of scope '" + resourceScope + "' "); for (String role : rolesOfScope) { logMessage.append(role); logMessage.append(", "); } log.debug(logMessage.toString()); } UserStoreManager userStoreManager; String[] userRoles; boolean tenantFlowStarted = false; try { //If this is a tenant user if (tenantId != MultitenantConstants.SUPER_TENANT_ID) { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext() .setTenantDomain(realmService.getTenantManager().getDomain(tenantId), true); tenantFlowStarted = true; } userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager(); userRoles = userStoreManager .getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(authzUser.getUserName())); } finally { if (tenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } if (userRoles != null && userRoles.length > 0) { if (log.isDebugEnabled()) { StringBuilder logMessage = new StringBuilder("Found roles of user "); logMessage.append(authzUser.getUserName()); logMessage.append(" "); for (String role : userRoles) { logMessage.append(role); logMessage.append(", "); } log.debug(logMessage.toString()); } //Check if the user still has a valid role for this scope. rolesOfScope.retainAll(Arrays.asList(userRoles)); return !rolesOfScope.isEmpty(); } else { if (log.isDebugEnabled()) { log.debug("No roles associated for the user " + authzUser.getUserName()); } return false; } } catch (UserStoreException e) { //Log and return since we do not want to stop issuing the token in case of scope validation failures. log.error("Error when getting the tenant's UserStoreManager or when getting roles of user ", e); return false; } }
From source file:com.aurel.track.accessControl.AccessBeans.java
/** * Returns a set of personIDs which have one of the specified rights in all * projects// w w w . j a v a 2 s .co m * * @param projects * @param arrRights * an array of rights, null means any right * @return */ public static Set<Integer> getPersonSetWithRightInAllOfTheProjects(Integer[] projects, int[] arrRights) { Set<Integer> personIDs = new HashSet<Integer>(); if (projects == null || projects.length == 0) { return personIDs; } // get the results for first project personIDs = getPersonsFromAcList(AccessControlBL.loadByProjectsAndRights( GeneralUtils.createIntegerArrFromCollection(ProjectBL.getAncestorProjects(projects[0])), arrRights)); if (personIDs == null || personIDs.isEmpty()) { return personIDs; } // verify whether all the persons found for the first project appear // also in the next projects // if not remove it from the result for (int i = 1; i < projects.length; i++) { Set<Integer> personIDsForProject = getPersonsFromAcList(AccessControlBL.loadByProjectsAndRights( GeneralUtils.createIntegerArrFromCollection(ProjectBL.getAncestorProjects(projects[i])), arrRights)); if (personIDsForProject == null || personIDsForProject.isEmpty()) { return new HashSet<Integer>(); } personIDs.retainAll(personIDsForProject); } return personIDs; }