List of usage examples for java.util Set retainAll
boolean retainAll(Collection<?> c);
From source file:org.bdval.DAVMode.java
/** * Filter a task to keep only samples that match a split plan, split id and split type. * * @param task/* w w w.j a va2 s . c o m*/ * @param splitPlan * @param splitId * @param splitType * @return */ private ClassificationTask filterBySplitPlan(final ClassificationTask task, final SplitPlan splitPlan, final int splitId, final String splitType) { ConditionIdentifiers conditionsIdentifiers = task.getConditionsIdentifiers(); final Set<String> samplesForClass0 = conditionsIdentifiers.getLabelGroup(task.getFirstConditionName()); ObjectSet<String> splitPlanSamples = splitPlan.getSampleIds(splitId, splitType); samplesForClass0.retainAll(splitPlanSamples); if (samplesForClass0.size() == 0) { throw new IllegalArgumentException( "Condition 0 (" + task.getFirstConditionName() + ") must have some samples."); } final ConditionIdentifiers cids = new ConditionIdentifiers(); for (final String negativeSample : samplesForClass0) { cids.addIdentifier(task.getFirstConditionName().intern(), negativeSample); } final Set<String> samplesForClass1 = conditionsIdentifiers.getLabelGroup(task.getSecondConditionName()); samplesForClass1.retainAll(splitPlanSamples); if (samplesForClass1.size() == 0) { throw new IllegalArgumentException( "Condition 1 (" + task.getSecondConditionName() + ") must have some samples."); } for (final String positiveSample : samplesForClass1) { cids.addIdentifier(task.getSecondConditionName().intern(), positiveSample); } task.setConditionsIdentifiers(cids); task.setNumberSamplesFirstCondition(samplesForClass0.size()); task.setNumberSamplesSecondCondition(samplesForClass1.size()); return task; }
From source file:org.apache.hms.controller.CommandHandler.java
/** * Check all cluster for duplicated nodes in use. * @param nm//from w w w.j a v a 2 s . com * @return true if node is already used by another cluster. * @throws InterruptedException * @throws KeeperException * @throws IOException */ private boolean checkNodesInUse(NodesManifest nm) throws KeeperException, InterruptedException { Set<String> hosts = convertRolesToHosts(nm, null); List<String> children = zk.getChildren(CommonConfigurationKeys.ZOOKEEPER_CLUSTER_ROOT_DEFAULT, null); Stat stat = new Stat(); boolean result = false; for (String cluster : children) { try { LOG.info("Check " + cluster); String path = ZookeeperUtil.getClusterPath(cluster); byte[] data = zk.getData(path, false, stat); ClusterHistory ch = JAXBUtil.read(data, ClusterHistory.class); int index = ch.getHistory().size() - 1; ClusterManifest cm = ch.getHistory().get(index); Set<String> test = convertRolesToHosts(cm.getNodes(), null); hosts.retainAll(test); if (!hosts.isEmpty()) { result = true; break; } } catch (Exception e) { LOG.error(ExceptionUtil.getStackTrace(e)); } } return result; }
From source file:org.chromium.chrome.browser.payments.PaymentRequestImpl.java
/** * Called after retrieving the list of payment instruments in an app. *///w ww .j a v a 2s . c om @Override public void onInstrumentsReady(PaymentApp app, List<PaymentInstrument> instruments) { if (mClient == null) return; mPendingApps.remove(app); // Place the instruments into either "autofill" or "non-autofill" list to be displayed when // all apps have responded. if (instruments != null) { List<PaymentInstrument> nonAutofillInstruments = new ArrayList<>(); for (int i = 0; i < instruments.size(); i++) { PaymentInstrument instrument = instruments.get(i); Set<String> instrumentMethodNames = new HashSet<>(instrument.getInstrumentMethodNames()); instrumentMethodNames.retainAll(mMethodData.keySet()); if (!instrumentMethodNames.isEmpty()) { if (instrument instanceof AutofillPaymentInstrument) { mPendingAutofillInstruments.add(instrument); } else { nonAutofillInstruments.add(instrument); } } else { instrument.dismissInstrument(); } } if (!nonAutofillInstruments.isEmpty()) { Collections.sort(nonAutofillInstruments, INSTRUMENT_FRECENCY_COMPARATOR); mPendingInstruments.add(nonAutofillInstruments); } } // Some payment apps still have not responded. Continue waiting for them. if (!mPendingApps.isEmpty()) return; if (disconnectIfNoPaymentMethodsSupported()) return; // Load the validation rules for each unique region code in the credit card billing // addresses and check for validity. Set<String> uniqueCountryCodes = new HashSet<>(); for (int i = 0; i < mPendingAutofillInstruments.size(); ++i) { assert mPendingAutofillInstruments.get(i) instanceof AutofillPaymentInstrument; AutofillPaymentInstrument creditCard = (AutofillPaymentInstrument) mPendingAutofillInstruments.get(i); String countryCode = AutofillAddress.getCountryCode(creditCard.getBillingAddress()); if (!uniqueCountryCodes.contains(countryCode)) { uniqueCountryCodes.add(countryCode); PersonalDataManager.getInstance().loadRulesForRegion(countryCode); } // If there's a card on file with a valid number and a name, then // PaymentRequest.canMakePayment() returns true. mCanMakePayment |= creditCard.isValidCard(); } // List order: // > Non-autofill instruments. // > Complete autofill instruments. // > Incomplete autofill instruments. Collections.sort(mPendingAutofillInstruments, COMPLETENESS_COMPARATOR); Collections.sort(mPendingInstruments, APP_FRECENCY_COMPARATOR); if (!mPendingAutofillInstruments.isEmpty()) { mPendingInstruments.add(mPendingAutofillInstruments); } // Log the number of suggested credit cards. mJourneyLogger.setNumberOfSuggestionsShown(PaymentRequestJourneyLogger.SECTION_CREDIT_CARDS, mPendingAutofillInstruments.size()); // Possibly pre-select the first instrument on the list. int selection = SectionInformation.NO_SELECTION; if (!mPendingInstruments.isEmpty()) { PaymentInstrument first = mPendingInstruments.get(0).get(0); if (first instanceof AutofillPaymentInstrument) { AutofillPaymentInstrument creditCard = (AutofillPaymentInstrument) first; if (creditCard.isComplete()) selection = 0; } else { // If a payment app is available, then PaymentRequest.canMakePayment() returns true. mCanMakePayment = true; selection = 0; } } CanMakePaymentQuery query = sCanMakePaymentQueries.get(mOrigin); if (query != null) query.setResponse(mCanMakePayment); // The list of payment instruments is ready to display. List<PaymentInstrument> sortedInstruments = new ArrayList<>(); for (List<PaymentInstrument> a : mPendingInstruments) { sortedInstruments.addAll(a); } mPaymentMethodsSection = new SectionInformation(PaymentRequestUI.TYPE_PAYMENT_METHODS, selection, sortedInstruments); mPendingInstruments.clear(); updateInstrumentModifiedTotals(); // UI has requested the full list of payment instruments. Provide it now. if (mPaymentInformationCallback != null) providePaymentInformation(); triggerPaymentAppUiSkipIfApplicable(); }
From source file:pt.ist.fenixedu.integration.api.FenixAPIv1.java
/** * Written evaluations for students// w w w . j a v a2 s . c om * * @summary Evaluations * @return enrolled and not enrolled student's evaluations * @servicetag EVALUATIONS_SCOPE */ @OAuthEndpoint(EVALUATIONS_SCOPE) @GET @Path("person/evaluations") @Produces(JSON_UTF8) public List<FenixCourseEvaluation.WrittenEvaluation> evaluations(@Context HttpServletResponse response, @Context HttpServletRequest request, @Context ServletContext context) { Person person = getPerson(); final Student student = person.getStudent(); if (!new ActiveStudentsGroup().isMember(person.getUser()) || student == null) { return new ArrayList<FenixCourseEvaluation.WrittenEvaluation>(); } List<FenixCourseEvaluation.WrittenEvaluation> evaluations = new ArrayList<>(); ExecutionYear executionYear = ExecutionYear.readCurrentExecutionYear(); for (Registration registration : student.getRegistrationsSet()) { for (ExecutionSemester executionSemester : executionYear.getExecutionPeriodsSet()) { List<ExecutionCourse> studentExecutionCourses = registration .getAttendingExecutionCoursesFor(executionSemester); List<Evaluation> unenroledEvaluation = new ArrayList<Evaluation>(); unenroledEvaluation.addAll(registration.getUnenroledExams(executionSemester)); unenroledEvaluation.addAll(registration.getUnenroledWrittenTests(executionSemester)); List<Evaluation> enroledEvaluation = new ArrayList<Evaluation>(); enroledEvaluation.addAll(registration.getEnroledExams(executionSemester)); enroledEvaluation.addAll(registration.getEnroledWrittenTests(executionSemester)); for (Evaluation evaluation : unenroledEvaluation) { Set<ExecutionCourse> examExecutionCourses = new HashSet<ExecutionCourse>( evaluation.getAssociatedExecutionCoursesSet()); examExecutionCourses.retainAll(studentExecutionCourses); evaluations.addAll( processEvaluation(evaluation, examExecutionCourses, false, student, executionSemester)); } for (Evaluation evaluation : enroledEvaluation) { Set<ExecutionCourse> examExecutionCourses = new HashSet<ExecutionCourse>( evaluation.getAssociatedExecutionCoursesSet()); examExecutionCourses.retainAll(studentExecutionCourses); evaluations.addAll( processEvaluation(evaluation, examExecutionCourses, true, student, executionSemester)); } } } return evaluations; }
From source file:org.archiviststoolkit.mydomain.DomainAccessObjectImpl.java
/** * Find a collection of domain objects by direct hql query. * * @param editor an AT query editor//from w w w. j av a 2 s. c o m * @return the collection of domain objects */ public final Collection findByQueryEditor(final QueryEditor editor, InfiniteProgressPanel progressPanel) { boolean includeComponents = false; if (persistentClass == Resources.class && editor.getIncludeComponents()) { includeComponents = true; } if (editor.getAlternateQuery()) { return findByQueryEditorAlt(editor, progressPanel); } else if (!includeComponents) { Session session = SessionFactory.getInstance().openSession(null, getPersistentClass(), true); Criteria criteria = processQueryEditorCriteria(session, editor.getClazz(), editor); // if searching digital object then need to see if to only search for parent digital objects if (persistentClass == DigitalObjects.class && !editor.getIncludeComponents()) { criteria.add(Restrictions.isNull("parent")); } Collection collection = criteria.list(); SessionFactory.getInstance().closeSession(session); return collection; } else { Collection<ResourcesComponentsSearchResult> resourcesAndComponetsResults = new ArrayList<ResourcesComponentsSearchResult>(); HashMap<DomainObject, String> contextMap = new HashMap<DomainObject, String>(); HashMap<ResourcesComponents, Resources> componentParentResourceMap = new HashMap<ResourcesComponents, Resources>(); Session session = SessionFactory.getInstance().openSession(null, getPersistentClass(), true); ATSearchCriterion comparison1 = editor.getCriterion1(); ATSearchCriterion comparison2 = editor.getCriterion2(); Criteria criteria = session.createCriteria(editor.getClazz()); criteria.add(comparison1.getCiterion()); Collection collection = criteria.list(); if (comparison2.getCiterion() == null) { addContextInfo(contextMap, collection, comparison1.getContext()); addResourcesCommonToComponetResultSet(collection, resourcesAndComponetsResults, contextMap); humanReadableSearchString = comparison1.getSearchString(); } else { // we have a boolean search Set returnCollection = new HashSet(collection); criteria = session.createCriteria(editor.getClazz()); criteria.add(comparison2.getCiterion()); Collection collection2 = criteria.list(); if (editor.getChosenBoolean1().equalsIgnoreCase("and")) { returnCollection.retainAll(collection2); humanReadableSearchString = comparison1.getSearchString() + " and " + comparison2.getSearchString(); } else { returnCollection.addAll(collection2); humanReadableSearchString = comparison1.getSearchString() + " or " + comparison2.getSearchString(); } addContextInfo(contextMap, collection, comparison1.getContext()); addContextInfo(contextMap, collection2, comparison2.getContext()); addResourcesCommonToComponetResultSet(returnCollection, resourcesAndComponetsResults, contextMap); } SessionFactory.getInstance().closeSession(session); Resources resource; // addResourcesCommonToComponetResultSet(collection, resourcesAndComponetsResults, null); ResourcesDAO resourceDao = new ResourcesDAO(); progressPanel.setTextLine("Searching for components that match the criteria", 2); session = SessionFactory.getInstance().openSession(ResourcesComponents.class); criteria = session.createCriteria(ResourcesComponents.class); criteria.add(comparison1.getCiterion()); Collection components = criteria.list(); addContextInfo(contextMap, components, comparison1.getContext()); ResourcesComponents component; if (comparison2.getCiterion() == null) { int numberOfComponents = components.size(); int count = 1; for (Object object : components) { component = (ResourcesComponents) object; progressPanel.setTextLine( "Gathering resources by component matches " + count++ + " of " + numberOfComponents, 2); resource = resourceDao.findResourceByComponent(component); if (doesUserHaveAccessRightsToResource(resource)) { resourcesAndComponetsResults.add( new ResourcesComponentsSearchResult(resource, component, comparison1.getContext())); } } SessionFactory.getInstance().closeSession(session); return resourcesAndComponetsResults; } else { criteria = session.createCriteria(ResourcesComponents.class); criteria.add(comparison2.getCiterion()); Collection components2 = criteria.list(); addContextInfo(contextMap, components2, comparison2.getContext()); Set returnCollection = new HashSet(components); if (editor.getChosenBoolean1().equalsIgnoreCase("and")) { returnCollection.retainAll(components2); } else { returnCollection.addAll(components2); } //find the parents for all the components for (Object object : returnCollection) { int numberOfComponents = components.size(); int count = 1; component = (ResourcesComponents) object; progressPanel.setTextLine( "Gathering resources by component matches " + count++ + " of " + numberOfComponents, 2); resource = resourceDao.findResourceByComponent(component); componentParentResourceMap.put(component, resource); } addResourcesCommonToComponetResultSet(returnCollection, resourcesAndComponetsResults, contextMap, componentParentResourceMap); return resourcesAndComponetsResults; } // criteria = processQueryEditorCriteria(session, ResourcesComponents.class, editor); //// Collection components = criteria.list(); //// ResourcesComponents component; // int numberOfComponents = components.size(); // int count = 1; // for (Object object : components) { // component = (ResourcesComponents) object; // progressPanel.setTextLine("Gathering resources by component matches " + count++ + " of " + numberOfComponents, 2); // resource = resourceDao.findResourceByComponent(component); // resourcesAndComponetsResults.add(new ResourcesComponentsSearchResult(resource, component, "dummy string")); // } // SessionFactory.getInstance().closeSession(session); // return resourcesAndComponetsResults; } }
From source file:org.apache.solr.client.solrj.impl.CloudSolrClient.java
private NamedList<Object> directUpdate(AbstractUpdateRequest request, String collection, ClusterState clusterState) throws SolrServerException { UpdateRequest updateRequest = (UpdateRequest) request; ModifiableSolrParams params = (ModifiableSolrParams) request.getParams(); ModifiableSolrParams routableParams = new ModifiableSolrParams(); ModifiableSolrParams nonRoutableParams = new ModifiableSolrParams(); if (params != null) { nonRoutableParams.add(params);//w w w. j av a 2s . c o m routableParams.add(params); for (String param : NON_ROUTABLE_PARAMS) { routableParams.remove(param); } } if (collection == null) { throw new SolrServerException( "No collection param specified on request and no default collection has been set."); } //Check to see if the collection is an alias. Aliases aliases = zkStateReader.getAliases(); if (aliases != null) { Map<String, String> collectionAliases = aliases.getCollectionAliasMap(); if (collectionAliases != null && collectionAliases.containsKey(collection)) { collection = collectionAliases.get(collection); } } DocCollection col = getDocCollection(clusterState, collection, null); DocRouter router = col.getRouter(); if (router instanceof ImplicitDocRouter) { // short circuit as optimization return null; } //Create the URL map, which is keyed on slice name. //The value is a list of URLs for each replica in the slice. //The first value in the list is the leader for the slice. final Map<String, List<String>> urlMap = buildUrlMap(col); final Map<String, LBHttpSolrClient.Req> routes = (urlMap == null ? null : updateRequest.getRoutes(router, col, urlMap, routableParams, this.idField)); if (routes == null) { if (directUpdatesToLeadersOnly && hasInfoToFindLeaders(updateRequest, idField)) { // we have info (documents with ids and/or ids to delete) with // which to find the leaders but we could not find (all of) them throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "directUpdatesToLeadersOnly==true but could not find leader(s)"); } else { // we could not find a leader or routes yet - use unoptimized general path return null; } } final NamedList<Throwable> exceptions = new NamedList<>(); final NamedList<NamedList> shardResponses = new NamedList<>(routes.size() + 1); // +1 for deleteQuery long start = System.nanoTime(); if (parallelUpdates) { final Map<String, Future<NamedList<?>>> responseFutures = new HashMap<>(routes.size()); for (final Map.Entry<String, LBHttpSolrClient.Req> entry : routes.entrySet()) { final String url = entry.getKey(); final LBHttpSolrClient.Req lbRequest = entry.getValue(); try { MDC.put("CloudSolrClient.url", url); responseFutures.put(url, threadPool.submit(() -> lbClient.request(lbRequest).getResponse())); } finally { MDC.remove("CloudSolrClient.url"); } } for (final Map.Entry<String, Future<NamedList<?>>> entry : responseFutures.entrySet()) { final String url = entry.getKey(); final Future<NamedList<?>> responseFuture = entry.getValue(); try { shardResponses.add(url, responseFuture.get()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (ExecutionException e) { exceptions.add(url, e.getCause()); } } if (exceptions.size() > 0) { Throwable firstException = exceptions.getVal(0); if (firstException instanceof SolrException) { SolrException e = (SolrException) firstException; throw new RouteException(ErrorCode.getErrorCode(e.code()), exceptions, routes); } else { throw new RouteException(ErrorCode.SERVER_ERROR, exceptions, routes); } } } else { for (Map.Entry<String, LBHttpSolrClient.Req> entry : routes.entrySet()) { String url = entry.getKey(); LBHttpSolrClient.Req lbRequest = entry.getValue(); try { NamedList<Object> rsp = lbClient.request(lbRequest).getResponse(); shardResponses.add(url, rsp); } catch (Exception e) { if (e instanceof SolrException) { throw (SolrException) e; } else { throw new SolrServerException(e); } } } } UpdateRequest nonRoutableRequest = null; List<String> deleteQuery = updateRequest.getDeleteQuery(); if (deleteQuery != null && deleteQuery.size() > 0) { UpdateRequest deleteQueryRequest = new UpdateRequest(); deleteQueryRequest.setDeleteQuery(deleteQuery); nonRoutableRequest = deleteQueryRequest; } Set<String> paramNames = nonRoutableParams.getParameterNames(); Set<String> intersection = new HashSet<>(paramNames); intersection.retainAll(NON_ROUTABLE_PARAMS); if (nonRoutableRequest != null || intersection.size() > 0) { if (nonRoutableRequest == null) { nonRoutableRequest = new UpdateRequest(); } nonRoutableRequest.setParams(nonRoutableParams); List<String> urlList = new ArrayList<>(); urlList.addAll(routes.keySet()); Collections.shuffle(urlList, rand); LBHttpSolrClient.Req req = new LBHttpSolrClient.Req(nonRoutableRequest, urlList); try { LBHttpSolrClient.Rsp rsp = lbClient.request(req); shardResponses.add(urlList.get(0), rsp.getResponse()); } catch (Exception e) { throw new SolrException(ErrorCode.SERVER_ERROR, urlList.get(0), e); } } long end = System.nanoTime(); RouteResponse rr = condenseResponse(shardResponses, (int) TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS)); rr.setRouteResponses(shardResponses); rr.setRoutes(routes); return rr; }
From source file:edu.stanford.muse.util.Util.java
/** * Returns an intersection as Set/*from w w w .j av a 2 s .c om*/ */ public static <E> Set<E> setIntersection(Collection<E> set1, Collection<E> set2) { // see // http://stackoverflow.com/questions/7574311/efficiently-compute-intersection-of-two-sets-in-java boolean set1IsLarger = set1.size() > set2.size(); Set<E> cloneSet = new HashSet<E>(set1IsLarger ? set2 : set1); cloneSet.retainAll(set1IsLarger ? set1 : set2); return cloneSet; // if (s1 == null || s2 == null) return null; // let's trigger exception // as caller may want null to represent "all" // return Sets.intersection(castOrCloneAsSet(s1), castOrCloneAsSet(s2)); }
From source file:netdecoder.NetDecoder.java
public void applyEdgeAnalysis2RandomNetworks() throws Exception { //CONTINUE LATER... String dir = cmd.getOptionValue("out"); //String d = cmd.getOptionValue("d"); String filename = cmd.getOptionValue("f"); String nc = cmd.getOptionValue("nc"); String ncp = cmd.getOptionValue("ncp"); String control = cmd.getOptionValue("control"); String condition = cmd.getOptionValue("condition"); Double corThreshold = Double.valueOf(cmd.getOptionValue("corThreshold")); Double ratioThreshold = Double.valueOf(cmd.getOptionValue("ratioThreshold")); Double ratioSink = Double.valueOf(cmd.getOptionValue("ratioSink")); Double ratioHidden = Double.valueOf(cmd.getOptionValue("ratioHidden")); Integer top = Integer.valueOf(cmd.getOptionValue("top")); RJava rJava = new RJava(); Map<String, Node> controlNetwork = Serialization.deserialize(nc, Map.class); //List<String> controlNetworkPaths = Serialization.deserialize(ncp, List.class); //NEED TO FIGURE OUT HOW TO ITERATE THROUGH ALL NETWORKS/PATHS FROM THE FOLDERS... String ref = "RandomNetwork_ERnegative_Sources"; for (int i = 0; i < 50; i++) { //50 randomizations String path = dir + condition + "/random/random_" + i + "/"; String open = path + ref; System.out.println("Opening file in " + open); Map<String, Node> diseaseNetwork = Serialization.deserialize(open + "_subnet.ser", Map.class); //List<String> diseaseNetworkPaths = Serialization.deserialize(open + "_paths.ser", List.class); Map<String, Map<String, Double>> flowInNetworks = getFlowInNetworks(controlNetwork, diseaseNetwork); Set<Edge> cEdges = getAllEdges(controlNetwork); Set<Edge> dEdges = getAllEdges(diseaseNetwork); Map<String, Map<String, Double>> flowMatrix = new LinkedHashMap(); if (!cmd.hasOption("overlap")) { //use only edges in both networks to infer key edges? dEdges.addAll(cEdges);//from w w w .jav a 2s . co m } else { dEdges.retainAll(cEdges); } Map<String, Double> xxControl = new LinkedHashMap(); Map<String, Double> xxDisease = new LinkedHashMap(); for (Edge e : dEdges) { Edge eControl = NetDecoderUtils.getEdge(controlNetwork, e); Edge eDisease = NetDecoderUtils.getEdge(diseaseNetwork, e); if (eControl != null && eDisease != null) { xxControl.put(e.toString(), eControl.getFlow()); xxDisease.put(e.toString(), eDisease.getFlow()); } else if (eControl != null) { xxControl.put(e.toString(), eControl.getFlow()); xxDisease.put(e.toString(), 0.0); } else if (eDisease != null) { xxControl.put(e.toString(), 0.0); xxDisease.put(e.toString(), eDisease.getFlow()); } } flowMatrix.put(control, xxControl); flowMatrix.put(condition, xxDisease); Map<String, Map<String, Double>> aux = changeMapping(flowMatrix); String name = path + filename + "_flowMatrix"; saveFlowMatrix(aux, control, condition, name + ".txt"); rJava.plotBarplot(path, name, condition, corThreshold, ratioThreshold, filename); } countFeatures(dir, filename, condition); }
From source file:org.structr.core.property.EntityNotionProperty.java
@Override public SearchAttribute getSearchAttribute(SecurityContext securityContext, Occur occur, T searchValue, boolean exactMatch, final Query query) { final Predicate<GraphObject> predicate = query != null ? query.toPredicate() : null; final SourceSearchAttribute attr = new SourceSearchAttribute(occur); final Set<GraphObject> intersectionResult = new LinkedHashSet<>(); boolean alreadyAdded = false; try {// w ww. ja v a2 s. c o m if (searchValue != null && !StringUtils.isBlank(searchValue.toString())) { final App app = StructrApp.getInstance(securityContext); final PropertyKey key = notion.getPrimaryPropertyKey(); final PropertyConverter inputConverter = key != null ? key.inputConverter(securityContext) : null; // transform search values using input convert of notion property final Object transformedValue = inputConverter != null ? inputConverter.convert(searchValue) : searchValue; if (exactMatch) { Result<AbstractNode> result = app.nodeQuery(entityProperty.relatedType()) .and(key, transformedValue).getResult(); for (AbstractNode node : result.getResults()) { switch (occur) { case MUST: if (!alreadyAdded) { // the first result is the basis of all subsequent intersections intersectionResult.addAll(entityProperty.getRelatedNodesReverse(securityContext, node, declaringClass, predicate)); // the next additions are intersected with this one alreadyAdded = true; } else { intersectionResult.retainAll(entityProperty.getRelatedNodesReverse(securityContext, node, declaringClass, predicate)); } break; case SHOULD: intersectionResult.addAll(entityProperty.getRelatedNodesReverse(securityContext, node, declaringClass, predicate)); break; case MUST_NOT: break; } } } else { Result<AbstractNode> result = app.nodeQuery(entityProperty.relatedType(), false) .and(key, transformedValue, false).getResult(); // loose search behaves differently, all results must be combined for (AbstractNode node : result.getResults()) { intersectionResult.addAll(entityProperty.getRelatedNodesReverse(securityContext, node, declaringClass, predicate)); } } attr.setResult(intersectionResult); } else { // experimental filter attribute that // removes entities with a non-empty // value in the given field return new EmptySearchAttribute(this, null); } } catch (FrameworkException fex) { fex.printStackTrace(); } return attr; }