List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:iristk.util.Record.java
public synchronized Set<String> getFields() { HashSet<String> fields = new HashSet<>(); fields.addAll(dynamicFields.keySet()); RecordInfo info = getRecordInfo();/*from www. j a v a 2 s .c o m*/ fields.addAll(info.classFields.keySet()); fields.addAll(info.getMethodFields.keySet()); return fields; }
From source file:org.apache.zeppelin.rest.NotebookRestApi.java
/** * Search for a Notes with permissions//from w w w . j ava 2 s . c o m */ @GET @Path("search") @ZeppelinApi public Response search(@QueryParam("q") String queryTerm) { LOG.info("Searching notes for: {}", queryTerm); String principal = SecurityUtils.getPrincipal(); HashSet<String> roles = SecurityUtils.getRoles(); HashSet<String> userAndRoles = new HashSet<>(); userAndRoles.add(principal); userAndRoles.addAll(roles); List<Map<String, String>> notesFound = noteSearchService.query(queryTerm); for (int i = 0; i < notesFound.size(); i++) { String[] Id = notesFound.get(i).get("id").split("/", 2); String noteId = Id[0]; if (!notebookAuthorization.isOwner(noteId, userAndRoles) && !notebookAuthorization.isReader(noteId, userAndRoles) && !notebookAuthorization.isWriter(noteId, userAndRoles)) { notesFound.remove(i); i--; } } LOG.info("{} notes found", notesFound.size()); return new JsonResponse<>(Status.OK, notesFound).build(); }
From source file:iristk.util.Record.java
public synchronized Set<String> getPersistentFields() { HashSet<String> fields = new HashSet<>(); fields.addAll(dynamicFields.keySet()); RecordInfo info = getRecordInfo();/* w w w . jav a2 s . c o m*/ fields.addAll(info.classFields.keySet()); for (String f : info.getMethodFields.keySet()) { if (info.setMethodFields.containsKey(f)) fields.add(f); } return fields; }
From source file:org.apache.hadoop.hbase.backup.impl.BackupAdminImpl.java
@Override public int deleteBackups(String[] backupIds) throws IOException { int totalDeleted = 0; Map<String, HashSet<TableName>> allTablesMap = new HashMap<>(); boolean deleteSessionStarted; boolean snapshotDone; try (final BackupSystemTable sysTable = new BackupSystemTable(conn)) { // Step 1: Make sure there is no active session // is running by using startBackupSession API // If there is an active session in progress, exception will be thrown try {//from ww w . j ava 2 s . c o m sysTable.startBackupExclusiveOperation(); deleteSessionStarted = true; } catch (IOException e) { LOG.warn("You can not run delete command while active backup session is in progress. \n" + "If there is no active backup session running, run backup repair utility to " + "restore \nbackup system integrity."); return -1; } // Step 2: Make sure there is no failed session List<BackupInfo> list = sysTable.getBackupInfos(BackupState.RUNNING); if (list.size() != 0) { // ailed sessions found LOG.warn("Failed backup session found. Run backup repair tool first."); return -1; } // Step 3: Record delete session sysTable.startDeleteOperation(backupIds); // Step 4: Snapshot backup system table if (!BackupSystemTable.snapshotExists(conn)) { BackupSystemTable.snapshot(conn); } else { LOG.warn("Backup system table snapshot exists"); } snapshotDone = true; try { for (int i = 0; i < backupIds.length; i++) { BackupInfo info = sysTable.readBackupInfo(backupIds[i]); if (info != null) { String rootDir = info.getBackupRootDir(); HashSet<TableName> allTables = allTablesMap.get(rootDir); if (allTables == null) { allTables = new HashSet<>(); allTablesMap.put(rootDir, allTables); } allTables.addAll(info.getTableNames()); totalDeleted += deleteBackup(backupIds[i], sysTable); } } finalizeDelete(allTablesMap, sysTable); // Finish sysTable.finishDeleteOperation(); // delete snapshot BackupSystemTable.deleteSnapshot(conn); } catch (IOException e) { // Fail delete operation // Step 1 if (snapshotDone) { if (BackupSystemTable.snapshotExists(conn)) { BackupSystemTable.restoreFromSnapshot(conn); // delete snapshot BackupSystemTable.deleteSnapshot(conn); // We still have record with unfinished delete operation LOG.error("Delete operation failed, please run backup repair utility to restore " + "backup system integrity", e); throw e; } else { LOG.warn("Delete operation succeeded, there were some errors: ", e); } } } finally { if (deleteSessionStarted) { sysTable.finishBackupExclusiveOperation(); } } } return totalDeleted; }
From source file:com.msopentech.odatajclient.testservice.utils.AbstractUtilities.java
public void putLinksInMemory(final String basePath, final String entitySetName, final String entityKey, final String linkName, final Collection<String> links) throws IOException { final HashSet<String> uris = new HashSet<String>(); if (Commons.linkInfo.get(version).isFeed(entitySetName, linkName)) { try {// w ww . j av a2 s . co m final Map.Entry<String, List<String>> currents = JSONUtilities.extractLinkURIs( readLinks(entitySetName, entityKey, linkName, Accept.JSON_FULLMETA).getLinks()); uris.addAll(currents.getValue()); } catch (Exception ignore) { } } uris.addAll(links); putLinksInMemory(basePath, entitySetName, linkName, uris); }
From source file:au.com.redboxresearchdata.fascinator.harvester.BaseJsonHarvester.java
/** * Gets a list of digital object IDs successfully harvested. * //from ww w .j a v a 2 s . c o m * If there are no objects, this method should return an empty list, not * null. * * @return a list of object IDs, possibly empty * @throws HarvesterException * if there was an error retrieving the objects */ public Set<String> getObjectIdList() throws HarvesterException { HashSet<String> oidSet = new HashSet<String>(); if (data != null && successOidList != null) { oidSet.addAll(successOidList); } return oidSet; }
From source file:com.lynnlyc.web.taintanalysis.JSTaintAnalysis.java
private HashSet<String> getReachingTags(CGNode node, SSAInstruction inst) { HashSet<String> tags = new HashSet<>(); if (node == null || inst == null) return tags; // DefUse du = node.getDU(); if (inst instanceof JavaScriptInvoke) { JavaScriptInvoke invoke_inst = (JavaScriptInvoke) inst; tags.addAll(getReachingTags(node, invoke_inst.getFunction())); } else if (inst instanceof SSANewInstruction) { SSANewInstruction new_inst = (SSANewInstruction) inst; tags.add(new_inst.getNewSite().toString()); } else if (inst instanceof SSAGetInstruction) { SSAGetInstruction get_inst = (SSAGetInstruction) inst; FieldReference field = get_inst.getDeclaredField(); tags.add(field.getSignature());// www . j av a 2 s .co m } else if (inst instanceof AstLexicalRead) { for (AstLexicalAccess.Access access : ((AstLexicalRead) inst).getAccesses()) { tags.add(access.variableName); } } for (int j = 0; j < inst.getNumberOfUses(); j++) { tags.addAll(getReachingTags(node, inst.getUse(j))); } return tags; }
From source file:it.iit.genomics.cru.igb.bundles.mi.business.MIResult.java
public float getScore() { if (score > -1) { return score; }// w w w . jav a 2s .c o m HashSet<AAPosition> residuesOnProteins = new HashSet<>(); HashSet<AAPosition> residuesOnContacts = new HashSet<>(); residuesOnProteins.addAll(queryResiduesA); residuesOnProteins.addAll(queryResiduesB); residuesOnContacts.addAll(structureMapper.getInterfaceAAPositionsA()); residuesOnContacts.addAll(structureMapper.getInterfaceAAPositionsB()); if (residuesOnProteins.isEmpty()) { return 0; } float residueScore = ((float) residuesOnContacts.size()) / residuesOnProteins.size(); this.score = residueScore; return residueScore; }
From source file:org.commonjava.maven.cartographer.agg.DefaultGraphAggregator.java
@Override public void connectIncomplete(final RelationshipGraph graph, final AggregationOptions config) throws CartoDataException { if (graph != null && config.isDiscoveryEnabled()) { final Set<ProjectVersionRef> missing = new HashSet<ProjectVersionRef>(); logger.debug("Loading existing cycle participants..."); // final Set<ProjectVersionRef> cycleParticipants = loadExistingCycleParticipants( net ); final Set<ProjectVersionRef> seen = new HashSet<ProjectVersionRef>(); logger.debug("Loading initial set of GAVs to be resolved..."); final List<DiscoveryTodo> pending = loadInitialPending(graph, seen); final HashSet<DiscoveryTodo> done = new HashSet<DiscoveryTodo>(); int pass = 0; while (!pending.isEmpty()) { // final HashSet<DiscoveryTodo> current = new HashSet<DiscoveryTodo>( pending ); // pending.clear(); final HashSet<DiscoveryTodo> current = new HashSet<DiscoveryTodo>(MAX_BATCHSIZE); while (!pending.isEmpty() && current.size() < MAX_BATCHSIZE) { current.add(pending.remove(0)); }/* w w w . j ava 2 s .c o m*/ done.addAll(current); logger.debug("{}. {} in next batch of TODOs:\n {}", pass, current.size(), new JoinString("\n ", current)); final Set<DiscoveryTodo> newTodos = discover(current, config, /*cycleParticipants,*/missing, seen, pass); if (newTodos != null) { logger.debug("{}. Uncovered new batch of TODOs:\n {}", pass, new JoinString("\n ", newTodos)); for (final DiscoveryTodo todo : newTodos) { if (!done.contains(todo) && !pending.contains(todo)) { logger.debug("+= {}", todo); pending.add(todo); } } } pass++; } logger.info("Discovery complete. {} seen, {} missing in {} passes.", seen.size(), missing.size(), pass - 1); } }
From source file:gr.demokritos.iit.cru.creativity.reasoning.diagrammatic.DiagrammaticComputationalTools.java
public HashSet<String> FindConcepts(String concept, int difficulty, String category) throws InstantiationException, IllegalAccessException, SQLException, ClassNotFoundException, Exception { HashSet<String> concepts = new HashSet<String>(); HashSet<String> TempConcepts = new HashSet<String>(); ///---------translate concept = Translator.bingTranslate(concept, this.language, "en", "general"); if (wn.getCommonPos(concept) == null) { ////new addition if the word cannot be translated to english return concepts; }/* www . jav a 2 s .c om*/ if (category.equalsIgnoreCase("equivalence")) { //to be done } else if (category.equalsIgnoreCase("subsumption")) { for (int i = 0; i < difficulty; i++) {//for the given dificculty, abstract as many times TempConcepts = ConceptGraphPolymerismEngine(concept); //polymerism doesn't return many if (TempConcepts.isEmpty()) { break; } concepts = TempConcepts;//define the temporary conecpts as new conecpts in case the next concepts are empty //take one concept at random from the abstraction, and make it the new concept int pointer = new Random().nextInt(concepts.size()); int c = 0; for (String k : concepts) { if (c == pointer) { concept = k; } c = c + 1; } } } else if (category.equalsIgnoreCase("supersumption")) { for (int i = 0; i < difficulty; i++) { TempConcepts = ConceptGraphAbstractionEngine(concept); if (TempConcepts.isEmpty()) { break; } concepts = TempConcepts;//define the temporary concepts as new conecpts in case the next concepts are empty //take one concept at random from the abstraction, and make it the new concept int pointer = new Random().nextInt(concepts.size()); int c = 0; for (String k : concepts) { if (c == pointer) { concept = k; } c = c + 1; } } } else { String type = "subject"; //take alternatively the object of the subject and the subject of the object until diff is reached for (int i = 0; i < difficulty; i++) { TempConcepts = FactRetriever(concept, type); if (TempConcepts.isEmpty()) { break; } concepts = TempConcepts; int pointer = new Random().nextInt(concepts.size()); int c = 0; for (String k : concepts) { if (c == pointer) { if (type.equalsIgnoreCase("subject")) { if (!k.split("---")[1].isEmpty()) { concept = k.split("---")[1];//take the object type = "object"; } } else { if (!k.split("---")[0].isEmpty()) { concept = k.split("---")[0];//take the subject type = "subject"; } } } c = c + 1; } } } HashSet<String> finCon = new HashSet<String>(); ///---------translate if (!this.language.equalsIgnoreCase("en")) { for (String s : concepts) { String n = Translator.bingTranslate(s, "en", this.language, "general"); if (wn.getCommonPos(n) == null) {//if the word is not english finCon.add(n); } } } else { finCon.addAll(concepts); } return finCon; }