List of usage examples for java.util Set clear
void clear();
From source file:io.ecarf.core.cloud.task.processor.reason.phase0.DoReasonTask4.java
/** * //from w w w . j av a 2 s . c om * @param term * @param select * @param schemaTriples * @param rows * @param table * @param writer * @return * @throws IOException */ private int inferAndSaveTriplesToFile(Term term, List<String> select, Set<Triple> schemaTriples, BigInteger rows, String table, PrintWriter writer) throws IOException { int inferredTriples = 0; int failedTriples = 0; // loop through the instance triples probably stored in a file and generate all the triples matching the schema triples set try (BufferedReader r = new BufferedReader(new FileReader(term.getFilename()), Constants.GZIP_BUF_SIZE)) { Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(r); // records will contain lots of duplicates Set<String> inferredAlready = new HashSet<String>(); try { for (CSVRecord record : records) { String values = ((select.size() == 1) ? record.get(0) : StringUtils.join(record.values(), ',')); if (!inferredAlready.contains(values)) { inferredAlready.add(values); NTriple instanceTriple = new NTriple(); if (select.size() == 1) { instanceTriple.set(select.get(0), record.get(0)); } else { instanceTriple.set(select, record.values()); } for (Triple schemaTriple : schemaTriples) { Rule rule = GenericRule.getRule(schemaTriple); Triple inferredTriple = rule.head(schemaTriple, instanceTriple); writer.println(inferredTriple.toCsv()); inferredTriples++; } // this is just to avoid any memory issues if (inferredAlready.size() > MAX_CACHE) { inferredAlready.clear(); log.info("Cleared cache of inferred terms"); } } else { this.duplicates++; } } } catch (Exception e) { log.error("Failed to parse selected terms", e); failedTriples++; } } //inferredFiles.add(inferredTriplesFile); log.info("\nSelect Triples: " + rows + ", Inferred: " + inferredTriples + ", Triples for term: " + term + ", Failed Triples: " + failedTriples); return inferredTriples; }
From source file:org.apache.hadoop.hbase.master.balancer.FavoredNodeAssignmentHelper.java
private ServerName[] multiRackCaseWithRestrictions(Map<ServerName, Set<HRegionInfo>> serverToPrimaries, Map<HRegionInfo, ServerName[]> secondaryAndTertiaryMap, String primaryRack, ServerName primaryRS, HRegionInfo regionInfo) throws IOException { // Random to choose the secondary and tertiary region server // from another rack to place the secondary and tertiary // Random to choose one rack except for the current rack Set<String> rackSkipSet = new HashSet<String>(); rackSkipSet.add(primaryRack);/*from ww w. java 2 s. c o m*/ String secondaryRack = getOneRandomRack(rackSkipSet); List<ServerName> serverList = getServersFromRack(secondaryRack); Set<ServerName> serverSet = new HashSet<ServerName>(); serverSet.addAll(serverList); ServerName[] favoredNodes; if (serverList.size() >= 2) { // Randomly pick up two servers from this secondary rack // Skip the secondary for the tertiary placement // skip the servers which share the primary already Set<HRegionInfo> primaries = serverToPrimaries.get(primaryRS); Set<ServerName> skipServerSet = new HashSet<ServerName>(); while (true) { ServerName[] secondaryAndTertiary = null; if (primaries.size() > 1) { // check where his tertiary and secondary are for (HRegionInfo primary : primaries) { secondaryAndTertiary = secondaryAndTertiaryMap.get(primary); if (secondaryAndTertiary != null) { if (regionServerToRackMap.get(secondaryAndTertiary[0]).equals(secondaryRack)) { skipServerSet.add(secondaryAndTertiary[0]); } if (regionServerToRackMap.get(secondaryAndTertiary[1]).equals(secondaryRack)) { skipServerSet.add(secondaryAndTertiary[1]); } } } } if (skipServerSet.size() + 2 <= serverSet.size()) break; skipServerSet.clear(); rackSkipSet.add(secondaryRack); // we used all racks if (rackSkipSet.size() == getTotalNumberOfRacks()) { // remove the last two added and break skipServerSet.remove(secondaryAndTertiary[0]); skipServerSet.remove(secondaryAndTertiary[1]); break; } secondaryRack = getOneRandomRack(rackSkipSet); serverList = getServersFromRack(secondaryRack); serverSet = new HashSet<ServerName>(); serverSet.addAll(serverList); } // Place the secondary RS ServerName secondaryRS = getOneRandomServer(secondaryRack, skipServerSet); skipServerSet.add(secondaryRS); // Place the tertiary RS ServerName tertiaryRS = getOneRandomServer(secondaryRack, skipServerSet); if (secondaryRS == null || tertiaryRS == null) { LOG.error("Cannot place the secondary and tertiary" + " region server for region " + regionInfo.getRegionNameAsString()); } // Create the secondary and tertiary pair favoredNodes = new ServerName[2]; favoredNodes[0] = secondaryRS; favoredNodes[1] = tertiaryRS; } else { // Pick the secondary rs from this secondary rack // and pick the tertiary from another random rack favoredNodes = new ServerName[2]; ServerName secondary = getOneRandomServer(secondaryRack); favoredNodes[0] = secondary; // Pick the tertiary if (getTotalNumberOfRacks() == 2) { // Pick the tertiary from the same rack of the primary RS Set<ServerName> serverSkipSet = new HashSet<ServerName>(); serverSkipSet.add(primaryRS); favoredNodes[1] = getOneRandomServer(primaryRack, serverSkipSet); } else { // Pick the tertiary from another rack rackSkipSet.add(secondaryRack); String tertiaryRandomRack = getOneRandomRack(rackSkipSet); favoredNodes[1] = getOneRandomServer(tertiaryRandomRack); } } return favoredNodes; }
From source file:gaffer.accumulostore.operation.spark.handler.GetRDDOfElementsHandlerTest.java
@Test public void checkGetCorrectElementsInRDDForEdgeSeed() throws OperationException, IOException { final Graph graph1 = new Graph.Builder() .addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")) .addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")) .addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")) .storeProperties(getClass().getResourceAsStream("/store.properties")).build(); final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity(ENTITY_GROUP); entity.setVertex("" + i); final Edge edge1 = new Edge(EDGE_GROUP); edge1.setSource("" + i); edge1.setDestination("B"); edge1.setDirected(false);/*from w w w. ja v a 2 s. c om*/ edge1.putProperty("count", 2); final Edge edge2 = new Edge(EDGE_GROUP); edge2.setSource("" + i); edge2.setDestination("C"); edge2.setDirected(false); edge2.putProperty("count", 4); elements.add(edge1); elements.add(edge2); elements.add(entity); } final User user = new User(); graph1.execute(new AddElements(elements), user); final SparkConf sparkConf = new SparkConf().setMaster("local") .setAppName("testCheckGetCorrectElementsInRDDForEdgeSeed") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.registrator", "gaffer.serialisation.kryo.Registrator") .set("spark.driver.allowMultipleContexts", "true"); final SparkContext sparkContext = new SparkContext(sparkConf); // Create Hadoop configuration and serialise to a string final Configuration configuration = new Configuration(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); configuration.write(new DataOutputStream(baos)); final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8); // Check get correct edges for EdgeSeed 1 -> B GetRDDOfElements<EdgeSeed> rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EdgeSeed("1", "B", false))) .setIncludeEdges(GetOperation.IncludeEdgeType.ALL).setIncludeEntities(false).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); RDD<Element> rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } Set<Element> results = new HashSet<>(); // NB: IDE suggests the cast in the following line is unnecessary but compilation fails without it Element[] returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } final Set<Element> expectedElements = new HashSet<>(); final Edge edge1B = new Edge(EDGE_GROUP); edge1B.setSource("1"); edge1B.setDestination("B"); edge1B.setDirected(false); edge1B.putProperty("count", 2); expectedElements.add(edge1B); assertEquals(expectedElements, results); // Check get entity for 1 when query for 1 -> B and specify entities only rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EdgeSeed("1", "B", false))).setIncludeEntities(true) .setIncludeEdges(GetOperation.IncludeEdgeType.NONE).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } expectedElements.clear(); final Entity entity1 = new Entity(ENTITY_GROUP); entity1.setVertex("1"); expectedElements.add(entity1); assertEquals(expectedElements, results); // Check get correct edges for 1 -> B when specify edges only rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EdgeSeed("1", "B", false))) .view(new View.Builder().edge(EDGE_GROUP).build()).setIncludeEntities(false) .setIncludeEdges(GetOperation.IncludeEdgeType.ALL).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } expectedElements.clear(); expectedElements.add(edge1B); assertEquals(expectedElements, results); // Check get correct edges for 1 -> B and 5 -> C Set<EdgeSeed> seeds = new HashSet<>(); seeds.add(new EdgeSeed("1", "B", false)); seeds.add(new EdgeSeed("5", "C", false)); rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext).setIncludeEntities(false) .seeds(seeds).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } final Edge edge5C = new Edge(EDGE_GROUP); edge5C.setSource("5"); edge5C.setDestination("C"); edge5C.setDirected(false); edge5C.putProperty("count", 4); expectedElements.clear(); expectedElements.add(edge1B); expectedElements.add(edge5C); assertEquals(expectedElements, results); sparkContext.stop(); }
From source file:com.geniusgithub.contact.common.ContactPhotoManager.java
/** * Populates an array of photo IDs that need to be loaded. Also decodes bitmaps that we have * already loaded/*from w w w.j a va2s. c o m*/ */ private void obtainPhotoIdsAndUrisToLoad(Set<Long> photoIds, Set<String> photoIdsAsStrings, Set<Request> uris) { photoIds.clear(); photoIdsAsStrings.clear(); uris.clear(); boolean jpegsDecoded = false; /* * Since the call is made from the loader thread, the map could be * changing during the iteration. That's not really a problem: * ConcurrentHashMap will allow those changes to happen without throwing * exceptions. Since we may miss some requests in the situation of * concurrent change, we will need to check the map again once loading * is complete. */ Iterator<Request> iterator = mPendingRequests.values().iterator(); while (iterator.hasNext()) { Request request = iterator.next(); final BitmapHolder holder = mBitmapHolderCache.get(request.getKey()); if (holder != null && holder.bytes != null && holder.fresh && (holder.bitmapRef == null || holder.bitmapRef.get() == null)) { // This was previously loaded but we don't currently have the inflated Bitmap inflateBitmap(holder, request.getRequestedExtent()); jpegsDecoded = true; } else { if (holder == null || !holder.fresh) { if (request.isUriRequest()) { uris.add(request); } else { photoIds.add(request.getId()); photoIdsAsStrings.add(String.valueOf(request.mId)); } } } } if (jpegsDecoded) mMainThreadHandler.sendEmptyMessage(MESSAGE_PHOTOS_LOADED); }
From source file:me.piebridge.prevent.ui.PreventActivity.java
private boolean onClick(int id) { int position = mPager.getCurrentItem(); Set<String> selections = mPageSelections.get(position); if (id == R.id.prevent || id == R.string.prevent) { PreventUtils.update(this, selections.toArray(new String[selections.size()]), true); for (String packageName : selections) { preventPackages.put(packageName, !running.containsKey(packageName)); }//from w w w. jav a 2 s. com savePackages(); } else if (id == R.id.remove || id == R.string.remove) { PreventUtils.update(this, selections.toArray(new String[selections.size()]), false); for (String packageName : selections) { preventPackages.remove(packageName); } savePackages(); } else if (id == R.string.advanced_settings) { startActivity(new Intent(this, AdvancedSettingsActivity.class)); } else if (id == R.string.user_guide) { startActivity(new Intent(this, UserGuideActivity.class)); } selections.clear(); checkSelection(); return true; }
From source file:org.apache.wiki.WikiSession.java
/** * Listens for WikiEvents generated by source objects such as the * GroupManager. This method adds Principals to the private Subject managed * by the WikiSession.//from w ww . ja v a2s . co m * @see org.apache.wiki.event.WikiEventListener#actionPerformed(org.apache.wiki.event.WikiEvent) */ public final void actionPerformed(WikiEvent event) { if (event instanceof WikiSecurityEvent) { WikiSecurityEvent e = (WikiSecurityEvent) event; if (e.getTarget() != null) { switch (e.getType()) { case WikiSecurityEvent.GROUP_ADD: { Group group = (Group) e.getTarget(); if (isInGroup(group)) { m_subject.getPrincipals().add(group.getPrincipal()); } break; } case WikiSecurityEvent.GROUP_REMOVE: { Group group = (Group) e.getTarget(); if (m_subject.getPrincipals().contains(group.getPrincipal())) { m_subject.getPrincipals().remove(group.getPrincipal()); } break; } case WikiSecurityEvent.GROUP_CLEAR_GROUPS: { m_subject.getPrincipals().removeAll(m_subject.getPrincipals(GroupPrincipal.class)); break; } case WikiSecurityEvent.LOGIN_INITIATED: { // Do nothing } case WikiSecurityEvent.PRINCIPAL_ADD: { WikiSession target = (WikiSession) e.getTarget(); if (this.equals(target) && m_status == AUTHENTICATED) { Set<Principal> principals = m_subject.getPrincipals(); principals.add((Principal) e.getPrincipal()); } break; } case WikiSecurityEvent.LOGIN_ANONYMOUS: { WikiSession target = (WikiSession) e.getTarget(); if (this.equals(target)) { m_status = ANONYMOUS; // Set the login/user principals and login status Set<Principal> principals = m_subject.getPrincipals(); m_loginPrincipal = (Principal) e.getPrincipal(); m_userPrincipal = m_loginPrincipal; // Add the login principal to the Subject, and set the built-in roles principals.clear(); principals.add(m_loginPrincipal); principals.add(Role.ALL); principals.add(Role.ANONYMOUS); } break; } case WikiSecurityEvent.LOGIN_ASSERTED: { WikiSession target = (WikiSession) e.getTarget(); if (this.equals(target)) { m_status = ASSERTED; // Set the login/user principals and login status Set<Principal> principals = m_subject.getPrincipals(); m_loginPrincipal = (Principal) e.getPrincipal(); m_userPrincipal = m_loginPrincipal; // Add the login principal to the Subject, and set the built-in roles principals.clear(); principals.add(m_loginPrincipal); principals.add(Role.ALL); principals.add(Role.ASSERTED); } break; } case WikiSecurityEvent.LOGIN_AUTHENTICATED: { WikiSession target = (WikiSession) e.getTarget(); if (this.equals(target)) { m_status = AUTHENTICATED; // Set the login/user principals and login status Set<Principal> principals = m_subject.getPrincipals(); m_loginPrincipal = (Principal) e.getPrincipal(); m_userPrincipal = m_loginPrincipal; // Add the login principal to the Subject, and set the built-in roles principals.clear(); principals.add(m_loginPrincipal); principals.add(Role.ALL); principals.add(Role.AUTHENTICATED); // Add the user and group principals injectUserProfilePrincipals(); // Add principals for the user profile injectGroupPrincipals(); // Inject group principals } break; } case WikiSecurityEvent.PROFILE_SAVE: { WikiSession source = e.getSrc(); if (this.equals(source)) { injectUserProfilePrincipals(); // Add principals for the user profile injectGroupPrincipals(); // Inject group principals } break; } case WikiSecurityEvent.PROFILE_NAME_CHANGED: { // Refresh user principals based on new user profile WikiSession source = e.getSrc(); if (this.equals(source) && m_status == AUTHENTICATED) { // To prepare for refresh, set the new full name as the primary principal UserProfile[] profiles = (UserProfile[]) e.getTarget(); UserProfile newProfile = profiles[1]; if (newProfile.getFullname() == null) { throw new IllegalStateException("User profile FullName cannot be null."); } Set<Principal> principals = m_subject.getPrincipals(); m_loginPrincipal = new WikiPrincipal(newProfile.getLoginName()); // Add the login principal to the Subject, and set the built-in roles principals.clear(); principals.add(m_loginPrincipal); principals.add(Role.ALL); principals.add(Role.AUTHENTICATED); // Add the user and group principals injectUserProfilePrincipals(); // Add principals for the user profile injectGroupPrincipals(); // Inject group principals } break; } // // No action, if the event is not recognized. // default: break; } } } }
From source file:gaffer.accumulostore.operation.spark.handler.GetJavaRDDOfElementsHandlerTest.java
@Test public void checkGetCorrectElementsInJavaRDDForEntitySeed() throws OperationException, IOException { final Graph graph1 = new Graph.Builder() .addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")) .addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")) .addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")) .storeProperties(getClass().getResourceAsStream("/store.properties")).build(); final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity(ENTITY_GROUP); entity.setVertex("" + i); final Edge edge1 = new Edge(EDGE_GROUP); edge1.setSource("" + i); edge1.setDestination("B"); edge1.setDirected(false);//w ww.j av a2 s .com edge1.putProperty("count", 2); final Edge edge2 = new Edge(EDGE_GROUP); edge2.setSource("" + i); edge2.setDestination("C"); edge2.setDirected(false); edge2.putProperty("count", 4); elements.add(edge1); elements.add(edge2); elements.add(entity); } final User user = new User(); graph1.execute(new AddElements(elements), user); final SparkConf sparkConf = new SparkConf().setMaster("local") .setAppName("testCheckGetCorrectElementsInJavaRDDForEntitySeed") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.registrator", "gaffer.serialisation.kryo.Registrator") .set("spark.driver.allowMultipleContexts", "true"); final JavaSparkContext sparkContext = new JavaSparkContext(sparkConf); // Create Hadoop configuration and serialise to a string final Configuration configuration = new Configuration(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); configuration.write(new DataOutputStream(baos)); final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8); // Check get correct edges for "1" GetJavaRDDOfElements<EntitySeed> rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>() .javaSparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); JavaRDD<Element> rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } final Set<Element> results = new HashSet<>(rdd.collect()); final Set<Element> expectedElements = new HashSet<>(); final Entity entity1 = new Entity(ENTITY_GROUP); entity1.setVertex("1"); final Edge edge1B = new Edge(EDGE_GROUP); edge1B.setSource("1"); edge1B.setDestination("B"); edge1B.setDirected(false); edge1B.putProperty("count", 2); final Edge edge1C = new Edge(EDGE_GROUP); edge1C.setSource("1"); edge1C.setDestination("C"); edge1C.setDirected(false); edge1C.putProperty("count", 4); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify entities only rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>().javaSparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))) .view(new View.Builder().entity(ENTITY_GROUP).build()).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); expectedElements.clear(); expectedElements.add(entity1); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify edges only rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>().javaSparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))).view(new View.Builder().edge(EDGE_GROUP).build()) .build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); expectedElements.clear(); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" and "5" Set<EntitySeed> seeds = new HashSet<>(); seeds.add(new EntitySeed("1")); seeds.add(new EntitySeed("5")); rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>().javaSparkContext(sparkContext).seeds(seeds) .build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); final Entity entity5 = new Entity(ENTITY_GROUP); entity5.setVertex("5"); final Edge edge5B = new Edge(EDGE_GROUP); edge5B.setSource("5"); edge5B.setDestination("B"); edge5B.setDirected(false); edge5B.putProperty("count", 2); final Edge edge5C = new Edge(EDGE_GROUP); edge5C.setSource("5"); edge5C.setDestination("C"); edge5C.setDirected(false); edge5C.putProperty("count", 4); expectedElements.clear(); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); expectedElements.add(entity5); expectedElements.add(edge5B); expectedElements.add(edge5C); assertEquals(expectedElements, results); sparkContext.stop(); }
From source file:eu.stratosphere.nephele.executiongraph.ExecutionGraph.java
public void repairInstanceSharing() { final Set<AllocatedResource> availableResources = new LinkedHashSet<AllocatedResource>(); final Iterator<ExecutionGroupVertex> it = new ExecutionGroupVertexIterator(this, true, -1); while (it.hasNext()) { final ExecutionGroupVertex groupVertex = it.next(); if (groupVertex.getVertexToShareInstancesWith() == null) { availableResources.clear(); groupVertex.repairInstanceSharing(availableResources); }/*from ww w .ja va2 s . c om*/ } }
From source file:com.kalessil.phpStorm.phpInspectionsEA.inspectors.forEach.DisconnectedForeachInstructionInspector.java
@Override @NotNull//from www. java 2 s. co m public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, final boolean isOnTheFly) { return new BasePhpElementVisitor() { @Override public void visitPhpForeach(@NotNull ForeachStatement foreach) { final GroupStatement foreachBody = ExpressionSemanticUtil.getGroupStatement(foreach); /* ensure foreach structure is ready for inspection */ if (foreachBody != null) { final PsiElement[] statements = foreachBody.getChildren(); if (statements.length > 0 && Stream.of(statements).anyMatch(s -> OpenapiTypesUtil.is(s, PhpElementTypes.HTML))) { return; } /* pre-collect introduced and internally used variables */ final Set<String> allModifiedVariables = this.collectCurrentAndOuterLoopVariables(foreach); final Map<PsiElement, Set<String>> instructionDependencies = new HashMap<>(); /* iteration 1 - investigate what are dependencies and influence */ for (final PsiElement oneInstruction : statements) { if (oneInstruction instanceof PhpPsiElement && !(oneInstruction instanceof PsiComment)) { final Set<String> individualDependencies = new HashSet<>(); instructionDependencies.put(oneInstruction, individualDependencies); investigateInfluence((PhpPsiElement) oneInstruction, individualDependencies, allModifiedVariables); } } /* iteration 2 - analyse dependencies */ for (final PsiElement oneInstruction : statements) { if (oneInstruction instanceof PhpPsiElement && !(oneInstruction instanceof PsiComment)) { boolean isDependOnModified = false; /* check if any dependency is overridden */ final Set<String> individualDependencies = instructionDependencies.get(oneInstruction); if (individualDependencies != null && !individualDependencies.isEmpty()) { isDependOnModified = individualDependencies.stream() .anyMatch(allModifiedVariables::contains); individualDependencies.clear(); } /* verify and report if violation detected */ if (!isDependOnModified) { final ExpressionType target = getExpressionType(oneInstruction); if (ExpressionType.NEW != target && ExpressionType.ASSIGNMENT != target && ExpressionType.CLONE != target && ExpressionType.INCREMENT != target && ExpressionType.DECREMENT != target && ExpressionType.DOM_ELEMENT_CREATE != target && ExpressionType.ACCUMULATE_IN_ARRAY != target && ExpressionType.CONTROL_STATEMENTS != target) { /* loops, ifs, switches, try's needs to be reported on keyword, others - complete */ final PsiElement reportingTarget = oneInstruction instanceof ControlStatement || oneInstruction instanceof Try || oneInstruction instanceof PhpSwitch ? oneInstruction.getFirstChild() : oneInstruction; /* secure exceptions with '<?= ?>' constructions, false-positives with html */ if (!OpenapiTypesUtil.isPhpExpressionImpl(oneInstruction) && oneInstruction.getTextLength() > 0) { /* inner looping termination/continuation should be taken into account */ final PsiElement loopInterrupter = PsiTreeUtil.findChildOfAnyType( oneInstruction, true, PhpBreak.class, PhpContinue.class, PhpThrow.class, PhpReturn.class); /* operating with variables should be taken into account */ final boolean isVariablesUsed = PsiTreeUtil.findChildOfAnyType( oneInstruction, true, (Class) Variable.class) != null; if (null == loopInterrupter && isVariablesUsed) { holder.registerProblem(reportingTarget, messageDisconnected); } } } if (SUGGEST_USING_CLONE && (ExpressionType.DOM_ELEMENT_CREATE == target || ExpressionType.NEW == target)) { holder.registerProblem(oneInstruction, messageUseClone); } } } } /* release containers content */ allModifiedVariables.clear(); instructionDependencies.values().forEach(Set::clear); instructionDependencies.clear(); } } private Set<String> collectCurrentAndOuterLoopVariables(@NotNull ForeachStatement foreach) { final Set<String> variables = new HashSet<>(); PsiElement current = foreach; while (current != null && !(current instanceof Function) && !(current instanceof PsiFile)) { if (current instanceof ForeachStatement) { ((ForeachStatement) current).getVariables().forEach(v -> variables.add(v.getName())); } current = current.getParent(); } return variables; } private void investigateInfluence(@Nullable PhpPsiElement oneInstruction, @NotNull Set<String> individualDependencies, @NotNull Set<String> allModifiedVariables) { for (final PsiElement variable : PsiTreeUtil.findChildrenOfType(oneInstruction, Variable.class)) { final String variableName = ((Variable) variable).getName(); PsiElement valueContainer = variable; PsiElement parent = variable.getParent(); while (parent instanceof FieldReference) { valueContainer = parent; parent = parent.getParent(); } /* a special case: `[] = ` and `array() = ` unboxing */ if (OpenapiTypesUtil.is(parent, PhpElementTypes.ARRAY_VALUE)) { parent = parent.getParent().getParent(); } final PsiElement grandParent = parent.getParent(); /* writing into variable */ if (parent instanceof AssignmentExpression) { /* php-specific `list(...) =` , `[...] =` construction */ if (parent instanceof MultiassignmentExpression) { final MultiassignmentExpression assignment = (MultiassignmentExpression) parent; if (assignment.getValue() != variable) { allModifiedVariables.add(variableName); individualDependencies.add(variableName); continue; } } else { final AssignmentExpression assignment = (AssignmentExpression) parent; if (assignment.getVariable() == valueContainer) { /* we are modifying the variable */ allModifiedVariables.add(variableName); /* self-assignment and field assignment counted as the variable dependent on itself */ if (assignment instanceof SelfAssignmentExpression || valueContainer instanceof FieldReference) { individualDependencies.add(variableName); } /* assignments as call arguments counted as the variable dependent on itself */ if (grandParent instanceof ParameterList) { individualDependencies.add(variableName); } continue; } } } /* adding into an arrays; we both depend and modify the container */ if (parent instanceof ArrayAccessExpression && valueContainer == ((ArrayAccessExpression) parent).getValue()) { allModifiedVariables.add(variableName); individualDependencies.add(variableName); } if (parent instanceof ParameterList) { if (grandParent instanceof MethodReference) { /* an object consumes the variable, perhaps modification takes place */ final MethodReference reference = (MethodReference) grandParent; final PsiElement referenceOperator = OpenapiPsiSearchUtil .findResolutionOperator(reference); if (OpenapiTypesUtil.is(referenceOperator, PhpTokenTypes.ARROW)) { final PsiElement variableCandidate = reference.getFirstPsiChild(); if (variableCandidate instanceof Variable) { allModifiedVariables.add(((Variable) variableCandidate).getName()); continue; } } } else if (OpenapiTypesUtil.isFunctionReference(grandParent)) { /* php will create variable, if it is by reference */ final FunctionReference reference = (FunctionReference) grandParent; final int position = ArrayUtils.indexOf(reference.getParameters(), variable); if (position != -1) { final PsiElement resolved = OpenapiResolveUtil.resolveReference(reference); if (resolved instanceof Function) { final Parameter[] parameters = ((Function) resolved).getParameters(); if (parameters.length > position && parameters[position].isPassByRef()) { allModifiedVariables.add(variableName); individualDependencies.add(variableName); continue; } } } } } /* increment/decrement are also write operations */ final ExpressionType type = this.getExpressionType(parent); if (ExpressionType.INCREMENT == type || ExpressionType.DECREMENT == type) { allModifiedVariables.add(variableName); individualDependencies.add(variableName); continue; } /* TODO: lookup for array access and property access */ individualDependencies.add(variableName); } /* handle compact function usage */ for (final FunctionReference reference : PsiTreeUtil.findChildrenOfType(oneInstruction, FunctionReference.class)) { if (OpenapiTypesUtil.isFunctionReference(reference)) { final String functionName = reference.getName(); if (functionName != null && functionName.equals("compact")) { for (final PsiElement argument : reference.getParameters()) { if (argument instanceof StringLiteralExpression) { final String compactedVariableName = ((StringLiteralExpression) argument) .getContents(); if (!compactedVariableName.isEmpty()) { individualDependencies.add(compactedVariableName); } } } } } } } @NotNull private ExpressionType getExpressionType(@Nullable PsiElement expression) { if (expression instanceof PhpBreak || expression instanceof PhpContinue || expression instanceof PhpReturn) { return ExpressionType.CONTROL_STATEMENTS; } /* regular '...;' statements */ if (OpenapiTypesUtil.isStatementImpl(expression)) { return getExpressionType(((Statement) expression).getFirstPsiChild()); } /* unary operations */ if (expression instanceof UnaryExpression) { final PsiElement operation = ((UnaryExpression) expression).getOperation(); if (OpenapiTypesUtil.is(operation, PhpTokenTypes.opINCREMENT)) { return ExpressionType.INCREMENT; } if (OpenapiTypesUtil.is(operation, PhpTokenTypes.opDECREMENT)) { return ExpressionType.DECREMENT; } } /* different types of assignments */ if (expression instanceof AssignmentExpression) { final AssignmentExpression assignment = (AssignmentExpression) expression; final PsiElement variable = assignment.getVariable(); if (variable instanceof Variable) { final PsiElement value = assignment.getValue(); if (value instanceof NewExpression) { return ExpressionType.NEW; } else if (value instanceof UnaryExpression) { if (OpenapiTypesUtil.is(((UnaryExpression) value).getOperation(), PhpTokenTypes.kwCLONE)) { return ExpressionType.CLONE; } } else if (value instanceof MethodReference) { final MethodReference call = (MethodReference) value; final String methodName = call.getName(); if (methodName != null && methodName.equals("createElement")) { final PsiElement resolved = OpenapiResolveUtil.resolveReference(call); if (resolved instanceof Method && ((Method) resolved).getFQN().equals("\\DOMDocument.createElement")) { return ExpressionType.DOM_ELEMENT_CREATE; } } } /* allow all assignations afterwards */ return ExpressionType.ASSIGNMENT; } /* accumulating something in external container */ if (variable instanceof ArrayAccessExpression) { final ArrayAccessExpression storage = (ArrayAccessExpression) variable; if (null == storage.getIndex() || null == storage.getIndex().getValue()) { return ExpressionType.ACCUMULATE_IN_ARRAY; } } } return ExpressionType.OTHER; } }; }
From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java
private void removeCandidates(SortedSet<IOBehaviour> sequencesFront, SortedSet<IOBehaviour> sequencesBack, Set<IOBehaviour> newSequences, Set<IOBehaviour> rmSequences) { removeSubSequences(sequencesFront, sequencesBack, newSequences, rmSequences); //new->front,back ; set rm sequencesBack.removeAll(rmSequences); sequencesFront.removeAll(rmSequences); newSequences.removeAll(rmSequences); if (rmSequences.size() > 0) { rmSub += rmSequences.size();//from w w w. ja va2 s. com logger.debug("rmSub removed " + rmSequences.size() + " candidates"); } // checkFalling(newSequences, rmSequences, term, relevant, partitions); //set rm // sequencesBack.removeAll(rmSequences); // sequencesFront.removeAll(rmSequences); newSequences.clear(); // if(rmSequences.size() > 0) { // rmFall += rmSequences.size(); // logger.debug("chkFall removed " + rmSequences.size() + " candidates"); // } }