List of usage examples for java.util Set retainAll
boolean retainAll(Collection<?> c);
From source file:com.google.code.linkedinapi.client.impl.BaseLinkedInApiClient.java
/** * {@inheritDoc}// w ww . ja va2s . c o m */ @Override public Connections getConnectionsById(String id, Set<ProfileField> profileFields, Date modificationDate, ConnectionModificationType modificationType) { assertNotNullOrEmpty("id", id); assertNotNull("profile fields", profileFields); assertNotNull("modification date", modificationDate); assertNotNull("modification type", modificationType); profileFields.retainAll(CONNECTION_FIELDS); LinkedInApiUrlBuilder builder = createLinkedInApiUrlBuilder(LinkedInApiUrls.GET_CONNECTIONS_BY_ID); String apiUrl = builder.withField(ParameterNames.ID, id) .withFieldEnumSet(ParameterNames.FIELD_SELECTORS, profileFields) .withParameter(ParameterNames.MODIFIED_SINCE, String.valueOf(modificationDate.getTime())) .withParameterEnum(ParameterNames.MODIFICATION, modificationType).buildUrl(); return readResponse(Connections.class, callApiMethod(apiUrl)); }
From source file:com.google.code.linkedinapi.client.impl.BaseLinkedInApiClient.java
/** * {@inheritDoc}/*from w w w . j ava2 s. co m*/ */ @Override public Connections getConnectionsByUrl(String url, Set<ProfileField> profileFields, Date modificationDate, ConnectionModificationType modificationType) { assertNotNullOrEmpty("url", url); assertNotNull("profile fields", profileFields); assertNotNull("modification date", modificationDate); assertNotNull("modification type", modificationType); profileFields.retainAll(CONNECTION_FIELDS); LinkedInApiUrlBuilder builder = createLinkedInApiUrlBuilder(LinkedInApiUrls.GET_CONNECTIONS_BY_URL); String apiUrl = builder.withField(ParameterNames.URL, url, true) .withFieldEnumSet(ParameterNames.FIELD_SELECTORS, profileFields) .withParameter(ParameterNames.MODIFIED_SINCE, String.valueOf(modificationDate.getTime())) .withParameterEnum(ParameterNames.MODIFICATION, modificationType).buildUrl(); return readResponse(Connections.class, callApiMethod(apiUrl)); }
From source file:org.apache.pig.backend.hadoop.executionengine.tez.plan.optimizer.MultiQueryOptimizerTez.java
@Override public void visitTezOp(TezOperator tezOp) throws VisitorException { try {/* w w w. ja v a 2 s .c om*/ if (!tezOp.isSplitter()) { return; } List<TezOperator> splittees = new ArrayList<TezOperator>(); Set<TezOperator> mergedNonPackageInputSuccessors = new HashSet<TezOperator>(); List<TezOperator> successors = getPlan().getSuccessors(tezOp); for (TezOperator successor : successors) { List<TezOperator> predecessors = new ArrayList<TezOperator>(getPlan().getPredecessors(successor)); predecessors.remove(tezOp); if (!predecessors.isEmpty()) { // If has other dependency that conflicts with other splittees, don't merge into split // For eg: self replicate join/skewed join // But if replicate input is from a different operator allow it, but ensure // that we don't have more than one input coming from that operator into the split // Check if other splittees or its predecessors (till the root) are not present in // the predecessors (till the root) of this splittee. // Need to check the whole predecessors hierarchy till root as the conflict // could be multiple levels up for (TezOperator predecessor : getPlan().getPredecessors(successor)) { if (predecessor != tezOp) { predecessors.add(predecessor); addAllPredecessors(predecessor, predecessors); } } List<TezOperator> toMergeSuccPredecessors = new ArrayList<TezOperator>(successors); toMergeSuccPredecessors.remove(successor); for (TezOperator splittee : splittees) { for (TezOperator spliteePred : getPlan().getPredecessors(splittee)) { if (spliteePred != tezOp) { toMergeSuccPredecessors.add(spliteePred); addAllPredecessors(spliteePred, toMergeSuccPredecessors); } } } if (predecessors.removeAll(toMergeSuccPredecessors)) { continue; } } // Split contains right input of different skewed joins if (successor.getSampleOperator() != null && tezOp.getSampleOperator() != null && !successor.getSampleOperator().equals(tezOp.getSampleOperator())) { continue; } // Detect diamond shape into successor operator, we cannot merge it into split, // since Tez does not handle double edge between vertexes // Successor could be // - union operator (if no union optimizer changing it to vertex group which supports multiple edges) // - self replicate join, self skewed join or scalar // - POPackage (Self hash joins can write to same output edge and is handled by POShuffleTezLoad) Set<TezOperator> mergedSuccessors = new HashSet<TezOperator>(); // These successors should not be merged due to diamond shape Set<TezOperator> toNotMergeSuccessors = new HashSet<TezOperator>(); // These successors can be merged Set<TezOperator> toMergeSuccessors = new HashSet<TezOperator>(); // These successors (Scalar, POFRJoinTez) can be merged if they are the only input. // Only in case of POPackage(POShuffleTezLoad) multiple inputs can be handled from a Split Set<TezOperator> nonPackageInputSuccessors = new HashSet<TezOperator>(); boolean canMerge = true; mergedSuccessors.addAll(successors); for (TezOperator splittee : splittees) { if (getPlan().getSuccessors(splittee) != null) { mergedSuccessors.addAll(getPlan().getSuccessors(splittee)); } } if (getPlan().getSuccessors(successor) != null) { for (TezOperator succSuccessor : getPlan().getSuccessors(successor)) { if (succSuccessor.isUnion()) { if (!(unionOptimizerOn && UnionOptimizer.isOptimizable(succSuccessor, unionUnsupportedStoreFuncs))) { toNotMergeSuccessors.add(succSuccessor); } else { toMergeSuccessors.add(succSuccessor); List<TezOperator> unionSuccessors = getPlan().getSuccessors(succSuccessor); if (unionSuccessors != null) { for (TezOperator unionSuccessor : unionSuccessors) { if (TezCompilerUtil.isNonPackageInput( succSuccessor.getOperatorKey().toString(), unionSuccessor)) { canMerge = canMerge ? nonPackageInputSuccessors.add(unionSuccessor) : false; } else { toMergeSuccessors.add(unionSuccessor); } } } } } else if (TezCompilerUtil.isNonPackageInput(successor.getOperatorKey().toString(), succSuccessor)) { // Output goes to scalar or POFRJoinTez instead of POPackage // POPackage/POShuffleTezLoad can handle multiple inputs from a Split. // But if input is sent to any other operator like // scalar, POFRJoinTez then we need to ensure it is the only one. canMerge = canMerge ? nonPackageInputSuccessors.add(succSuccessor) : false; } else { toMergeSuccessors.add(succSuccessor); } } } if (canMerge) { if (!nonPackageInputSuccessors.isEmpty() || !mergedNonPackageInputSuccessors.isEmpty()) { // If a non-POPackage input successor is already merged or // if there is a POPackage and non-POPackage to be merged, // then skip as it will become diamond shape // For eg: POFRJoinTez+Scalar, POFRJoinTez/Scalar+POPackage if (nonPackageInputSuccessors.removeAll(mergedSuccessors) || toMergeSuccessors.removeAll(mergedNonPackageInputSuccessors) || toMergeSuccessors.removeAll(nonPackageInputSuccessors)) { continue; } } } else { continue; } mergedSuccessors.retainAll(toNotMergeSuccessors); if (mergedSuccessors.isEmpty()) { // no shared edge after merge splittees.add(successor); mergedNonPackageInputSuccessors.addAll(nonPackageInputSuccessors); } } if (splittees.size() == 0) { return; } if (splittees.size() == 1 && successors.size() == 1) { // We don't need a POSplit here, we can merge the splittee into spliter PhysicalOperator firstNodeLeaf = tezOp.plan.getLeaves().get(0); PhysicalOperator firstNodeLeafPred = tezOp.plan.getPredecessors(firstNodeLeaf).get(0); TezOperator singleSplitee = splittees.get(0); PhysicalOperator secondNodeRoot = singleSplitee.plan.getRoots().get(0); PhysicalOperator secondNodeSucc = singleSplitee.plan.getSuccessors(secondNodeRoot).get(0); tezOp.plan.remove(firstNodeLeaf); singleSplitee.plan.remove(secondNodeRoot); tezOp.plan.merge(singleSplitee.plan); tezOp.plan.connect(firstNodeLeafPred, secondNodeSucc); addSubPlanPropertiesToParent(tezOp, singleSplitee); removeSplittee(getPlan(), tezOp, singleSplitee); } else { POValueOutputTez valueOutput = (POValueOutputTez) tezOp.plan.getLeaves().get(0); POSplit split = new POSplit(OperatorKey.genOpKey(valueOutput.getOperatorKey().getScope())); split.copyAliasFrom(valueOutput); for (TezOperator splitee : splittees) { PhysicalOperator spliteeRoot = splitee.plan.getRoots().get(0); splitee.plan.remove(spliteeRoot); split.addPlan(splitee.plan); addSubPlanPropertiesToParent(tezOp, splitee); removeSplittee(getPlan(), tezOp, splitee); valueOutput.removeOutputKey(splitee.getOperatorKey().toString()); } if (valueOutput.getTezOutputs().length > 0) { // We still need valueOutput PhysicalPlan phyPlan = new PhysicalPlan(); phyPlan.addAsLeaf(valueOutput); split.addPlan(phyPlan); } PhysicalOperator pred = tezOp.plan.getPredecessors(valueOutput).get(0); tezOp.plan.disconnect(pred, valueOutput); tezOp.plan.remove(valueOutput); tezOp.plan.add(split); tezOp.plan.connect(pred, split); } } catch (PlanException e) { throw new VisitorException(e); } }
From source file:com.google.code.linkedinapi.client.impl.BaseLinkedInApiClient.java
/** * {@inheritDoc}//from w w w . j av a 2s . c o m */ @Override public Connections getConnectionsForCurrentUser(Set<ProfileField> profileFields, int start, int count, Date modificationDate, ConnectionModificationType modificationType) { assertPositiveNumber("start", start); assertPositiveNumber("count", count); assertNotNull("profile fields", profileFields); assertNotNull("modification date", modificationDate); assertNotNull("modification type", modificationType); profileFields.retainAll(CONNECTION_FIELDS); LinkedInApiUrlBuilder builder = createLinkedInApiUrlBuilder( LinkedInApiUrls.GET_CONNECTIONS_FOR_CURRENT_USER); String apiUrl = builder.withFieldEnumSet(ParameterNames.FIELD_SELECTORS, profileFields) .withParameter(ParameterNames.START, String.valueOf(start)) .withParameter(ParameterNames.COUNT, String.valueOf(count)) .withParameter(ParameterNames.MODIFIED_SINCE, String.valueOf(modificationDate.getTime())) .withParameterEnum(ParameterNames.MODIFICATION, modificationType).buildUrl(); return readResponse(Connections.class, callApiMethod(apiUrl)); }
From source file:gate.creole.ontology.impl.sesame.OntologyServiceImplSesame.java
/** * This method returns a set of all properties where the current resource has * been specified as one of the range resources. Please note that this method * is different from the getAllSetProperties() method which returns a set of * properties set on the resource. For each property in the ontology, this * method checks if the current resource is valid range. If so, the property * is said to be applicable, and otherwise not. * /* ww w . ja v a2 s . c om*/ * @return */ public Property[] getPropertiesWithResourceAsRange(String theResourceURI) { List<Property> list = new ArrayList<Property>(); HashSet<String> toCheck = new HashSet<String>(); try { if (repositoryConnection.hasStatement(getResource(theResourceURI), makeSesameURI(RDF.TYPE.toString()), getResource(OWL.CLASS.toString()), true)) { String queryRep = string2Turtle(theResourceURI); String query = "Select distinct SUPER FROM {" + queryRep + "} rdfs:subClassOf {SUPER}" + " WHERE SUPER!=" + queryRep + " AND SUPER != ALL ( " + " select distinct B FROM {B} owl:equivalentClass {" + queryRep + "} )"; addSerqlQueryResultToCollection(query, toCheck); toCheck.add(theResourceURI); } else if (repositoryConnection.hasStatement(getResource(theResourceURI), makeSesameURI(RDF.TYPE.toString()), getResource(RDF.PROPERTY.toString()), true)) { String queryRep = string2Turtle(theResourceURI); String query = "Select distinct SUPER FROM {" + queryRep + "} rdfs:subPropertyOf {SUPER}" + " WHERE SUPER!=" + queryRep + " AND SUPER != ALL ( " + " select distinct B FROM {B} owl:equivalentProperty {" + queryRep + "})"; addSerqlQueryResultToCollection(query, toCheck); toCheck.add(theResourceURI); } else { // it is an instance String query = "Select DISTINCT B from {X} rdf:type {B} WHERE X=<" + theResourceURI + ">"; addSerqlQueryResultToCollection(query, toCheck, true); } } catch (Exception e) { throw new GateOntologyException("Could not get statements", e); } String query = "Select distinct X FROM {X} rdf:type {<" + OWL.ANNOTATIONPROPERTY + ">}"; UtilTupleQueryIterator result = performSerqlQuery(query); while (result.hasNext()) { String anAnnProp = result.nextFirstAsString(); list.add(new Property(OConstants.ANNOTATION_PROPERTY, anAnnProp)); } result.close(); boolean allowSystemStatements = this.returnSystemStatements; this.returnSystemStatements = true; Property[] props = listToPropertyArray(list); this.returnSystemStatements = allowSystemStatements; // now we obtain all datatype properties list = new ArrayList<Property>(); query = "Select X FROM {X} rdf:type {<" + OWL.OBJECTPROPERTY + ">}"; result = performSerqlQuery(query); while (result.hasNext()) { String anAnnProp = result.nextFirstAsString(); OURI annOURI = ontology.createOURI(anAnnProp); // for each property we obtain its domain and search for the // resourceURI in it query = "select distinct Y from {<" + anAnnProp + ">} rdfs:range {Y}"; Set<String> set = new HashSet<String>(); addSerqlQueryResultToCollection(query, set, true); set = new HashSet<String>(reduceToMostSpecificClasses(set)); byte type = OConstants.OBJECT_PROPERTY; if (set.isEmpty()) { if (isSymmetricProperty(annOURI)) { type = OConstants.SYMMETRIC_PROPERTY; } else if (isTransitiveProperty(annOURI)) { type = OConstants.TRANSITIVE_PROPERTY; } list.add(new Property(type, anAnnProp.toString())); } set.retainAll(toCheck); if (!set.isEmpty()) { if (isSymmetricProperty(annOURI)) { type = OConstants.SYMMETRIC_PROPERTY; } else if (isTransitiveProperty(annOURI)) { type = OConstants.TRANSITIVE_PROPERTY; } list.add(new Property(type, anAnnProp)); } } result.close(); Property[] props1 = listToPropertyArray(list); Property[] toProps = new Property[props.length + props1.length]; for (int i = 0; i < props.length; i++) { toProps[i] = props[i]; } for (int i = 0; i < props1.length; i++) { toProps[props.length + i] = props1[i]; } return toProps; }
From source file:com.google.code.linkedinapi.client.impl.BaseLinkedInApiClient.java
/** * {@inheritDoc}//from w w w . j a va2s . com */ @Override public Connections getConnectionsById(String id, Set<ProfileField> profileFields, int start, int count, Date modificationDate, ConnectionModificationType modificationType) { assertNotNullOrEmpty("id", id); assertPositiveNumber("start", start); assertPositiveNumber("count", count); assertNotNull("profile fields", profileFields); assertNotNull("modification date", modificationDate); assertNotNull("modification type", modificationType); profileFields.retainAll(CONNECTION_FIELDS); LinkedInApiUrlBuilder builder = createLinkedInApiUrlBuilder(LinkedInApiUrls.GET_CONNECTIONS_BY_ID); String apiUrl = builder.withField(ParameterNames.ID, id) .withFieldEnumSet(ParameterNames.FIELD_SELECTORS, profileFields) .withParameter(ParameterNames.START, String.valueOf(start)) .withParameter(ParameterNames.COUNT, String.valueOf(count)) .withParameter(ParameterNames.MODIFIED_SINCE, String.valueOf(modificationDate.getTime())) .withParameterEnum(ParameterNames.MODIFICATION, modificationType).buildUrl(); return readResponse(Connections.class, callApiMethod(apiUrl)); }
From source file:com.google.code.linkedinapi.client.impl.BaseLinkedInApiClient.java
/** * {@inheritDoc}/*from www. ja v a2 s. c o m*/ */ @Override public Connections getConnectionsByUrl(String url, Set<ProfileField> profileFields, int start, int count, Date modificationDate, ConnectionModificationType modificationType) { assertNotNullOrEmpty("url", url); assertPositiveNumber("start", start); assertPositiveNumber("count", count); assertNotNull("profile fields", profileFields); assertNotNull("modification date", modificationDate); assertNotNull("modification type", modificationType); profileFields.retainAll(CONNECTION_FIELDS); LinkedInApiUrlBuilder builder = createLinkedInApiUrlBuilder(LinkedInApiUrls.GET_CONNECTIONS_BY_URL); String apiUrl = builder.withField(ParameterNames.URL, url, true) .withFieldEnumSet(ParameterNames.FIELD_SELECTORS, profileFields) .withParameter(ParameterNames.START, String.valueOf(start)) .withParameter(ParameterNames.COUNT, String.valueOf(count)) .withParameter(ParameterNames.MODIFIED_SINCE, String.valueOf(modificationDate.getTime())) .withParameterEnum(ParameterNames.MODIFICATION, modificationType).buildUrl(); return readResponse(Connections.class, callApiMethod(apiUrl)); }
From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java
public void test14c6() { try {//from w w w .jav a 2 s . com String graphDir = "/Users/josephramsey/Documents/LAB_NOTEBOOK.2012.04.20/2013.11.23/test14/"; File _graphDir = new File(graphDir); if (!_graphDir.exists()) _graphDir.mkdir(); double penalty = 1.0; for (int minGraphs1 = 1; minGraphs1 <= 6; minGraphs1++) { File dir1 = new File(graphDir, "partition.consistent/partition.consistent.images"); String filename1 = "graph.partition." + "images" + "." + penalty + "." + minGraphs1 + ".consistent.txt"; File file1 = new File(dir1, filename1); Graph graph1 = GraphUtils.loadGraphTxt(file1); for (int minGraphs2 = 1; minGraphs2 <= 6; minGraphs2++) { String filename2 = "partition.consistent/partition.consistent.r3/" + "graph.partition.R3." + penalty + "." + minGraphs1 + ".consistent.txt"; File file2 = new File(graphDir, filename2); Graph graph2 = GraphUtils.loadGraphTxt(file2); graph2 = GraphUtils.replaceNodes(graph2, graph1.getNodes()); Set<Edge> edges1 = new HashSet<Edge>(graph1.getEdges()); Set<Edge> edges2 = new HashSet<Edge>(graph2.getEdges()); Set<Edge> both = new HashSet<Edge>(edges1); both.retainAll(edges2); Graph intersection = new EdgeListGraph(graph1.getNodes()); for (Edge edge : both) { intersection.addEdge(edge); } writeFiveGraphFormats(graphDir, intersection, "intersection." + minGraphs1 + "." + minGraphs2 + ".txt"); } } } catch (Exception e) { e.printStackTrace(); } }
From source file:com.google.gwt.emultest.java.util.TreeMapTest.java
public void testNavigableKeySet() { K[] keys = getSortedKeys();//from www .j a v a 2s . c o m V[] values = getSortedValues(); NavigableMap<K, V> map = createNavigableMap(); map.put(keys[0], values[0]); Set<K> keySet = map.navigableKeySet(); _assertEquals(keySet, map.navigableKeySet()); map.put(keys[1], values[1]); map.put(keys[2], values[2]); _assertEquals(map.navigableKeySet(), keySet); _assertEquals(keySet, keySet); try { keySet.add(keys[3]); fail("should throw UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } try { keySet.add(null); fail("should throw UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } try { keySet.addAll(null); fail("should throw NullPointerException"); } catch (NullPointerException expected) { } Collection<K> collection = new ArrayList<K>(); keySet.addAll(collection); try { collection.add(keys[3]); keySet.addAll(collection); fail("should throw UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } Iterator<K> iter = keySet.iterator(); iter.next(); iter.remove(); assertFalse(map.containsKey(keys[0])); collection = new ArrayList<K>(); collection.add(keys[2]); keySet.retainAll(collection); assertEquals(1, map.size()); assertTrue(keySet.contains(keys[2])); keySet.removeAll(collection); _assertEmpty(map); map.put(keys[0], values[0]); assertEquals(1, map.size()); assertTrue(keySet.contains(keys[0])); keySet.clear(); _assertEmpty(map); }
From source file:org.wso2.carbon.apimgt.impl.APIProviderImpl.java
/** * When enabled publishing to external APIStores support,get only the published external apistore details which are * stored in db/*from w w w . j a va 2 s. c o m*/ * @param apiId The API Identifier which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException * If failed to update subscription status */ @Override public Set<APIStore> getPublishedExternalAPIStores(APIIdentifier apiId) throws APIManagementException { Set<APIStore> storesSet; SortedSet<APIStore> configuredAPIStores = new TreeSet<APIStore>(new APIStoreNameComparator()); configuredAPIStores.addAll(APIUtil.getExternalStores(tenantId)); if (APIUtil.isAPIsPublishToExternalAPIStores(tenantId)) { storesSet = apiMgtDAO.getExternalAPIStoresDetails(apiId); //Retains only the stores that contained in configuration storesSet.retainAll(configuredAPIStores); return storesSet; } else { return null; } }