List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:org.madsonic.ajax.PlayQueueService.java
public PlayQueueInfo playPandoraRadio(String[] artist, String[] albums, String[] genres, String[] tags) throws Exception { HttpServletRequest request = WebContextFactory.get().getHttpServletRequest(); HttpServletResponse response = WebContextFactory.get().getHttpServletResponse(); int userGroupId = securityService.getCurrentUserGroupId(request); Player player = getCurrentPlayer(request, response); List<MediaFile> result = new ArrayList<MediaFile>(); //RESULT: ALBUM MultiSearchCriteria criteria1 = new MultiSearchCriteria(settingsService.getPandoraResultAlbum(), artist, albums, null, null, null, null, null, userGroupId); result.addAll(searchService.getRandomSongs(criteria1)); if ("TEST".contains(settingsService.getLogfileLevel())) { System.out.println("-=Pandora-R1=-"); for (MediaFile m : searchService.getRandomSongs(criteria1)) { System.out.println(m.getPath()); }/*from w w w. ja va 2s . c o m*/ } //RESULT: ARTIST MultiSearchCriteria criteria2 = new MultiSearchCriteria(settingsService.getPandoraResultArtist(), artist, null, null, null, null, null, null, userGroupId); result.addAll(searchService.getRandomSongs(criteria2)); if ("TEST".contains(settingsService.getLogfileLevel())) { System.out.println("-=Pandora-R2=-"); for (MediaFile m : searchService.getRandomSongs(criteria2)) { System.out.println(m.getPath()); } } //RESULT: GENRE GenreSearchCriteria criteria3 = new GenreSearchCriteria(settingsService.getPandoraResultGenre(), null, genres, null, null, null, userGroupId); result.addAll(searchService.getRandomSongs(criteria3)); if ("TEST".contains(settingsService.getLogfileLevel())) { System.out.println("-=Pandora-R3=-"); for (MediaFile m : searchService.getRandomSongs(criteria3)) { System.out.println(m.getPath()); } } //RESULT: MOODS MoodsSearchCriteria criteria4 = new MoodsSearchCriteria(settingsService.getPandoraResultMood(), null, tags, null, null, null, userGroupId); result.addAll(searchService.getRandomSongs(criteria4)); if ("TEST".contains(settingsService.getLogfileLevel())) { System.out.println("-=Pandora-R4=-"); for (MediaFile m : searchService.getRandomSongs(criteria4)) { System.out.println(m.getPath()); } } //RESULT: OTHER List<String> allArtists = new ArrayList<String>(); List<String> similar = new ArrayList<String>(); for (String _artist : artist) { similar = lastFMService.getSimilarArtist(_artist); allArtists.addAll(similar); if (allArtists.size() > 10) { break; } } String[] array = allArtists.toArray(new String[allArtists.size()]); //RESULT: RANDOM MultiSearchCriteria criteria5 = new MultiSearchCriteria(settingsService.getPandoraResultSimilar(), array, null, null, null, null, null, null, userGroupId); result.addAll(searchService.getRandomSongs(criteria5)); if ("TEST".contains(settingsService.getLogfileLevel())) { System.out.println("-=Pandora-R5=-"); for (MediaFile m : searchService.getRandomSongs(criteria5)) { System.out.println(m.getPath()); } } //RESULT: ARTIST TOPTRACKS List<MediaFile> resultTopTrack = new ArrayList<MediaFile>(); if (mediaFileDao.getTopTracks(artist[0], userGroupId).size() == 0) { resultTopTrack = lastFMService.getTopTrack(artist[0], 20, userGroupId); } if (artist.length > 1) { for (String _artist : artist) { // resultTopTrack = lastFMService.getTopTrack(_artist, 20, userGroupId); resultTopTrack = mediaFileDao.getTopTracks(artist[0], userGroupId); for (int i = 0; i < settingsService.getPandoraResultArtistTopTrack(); i++) { Random myRandomizer = new Random(); MediaFile randomTopTrack = resultTopTrack.get(myRandomizer.nextInt(resultTopTrack.size())); result.add(randomTopTrack); } } } if (artist.length == 1) { // resultTopTrack = lastFMService.getTopTrack(artist[0], settingsService.getPandoraResultArtistTopTrack(), userGroupId); resultTopTrack = mediaFileDao.getTopTracks(artist[0], userGroupId); for (int i = 0; i < settingsService.getPandoraResultArtistTopTrack(); i++) { Random myRandomizer = new Random(); MediaFile randomTopTrack = resultTopTrack.get(myRandomizer.nextInt(resultTopTrack.size())); result.add(randomTopTrack); } } // MoodsSearchCriteria criteria6 = new MoodsSearchCriteria (settingsService.getPandoraResultSimilar(), null, tags, null, null, null, userGroupId); // result.addAll(searchService.getRandomSongs(criteria6)); // if ("TEST".contains(settingsService.getLogfileLevel())){ // System.out.println("-=Pandora-R6=-"); // for (MediaFile m : searchService.getRandomSongs(criteria6)) {System.out.println( m.getPath());} // } // Filter out duplicates HashSet<MediaFile> hs = new HashSet<MediaFile>(); hs.addAll(result); result.clear(); result.addAll(hs); player.getPlayQueue().addFiles(false, result); player.getPlayQueue().setRandomSearchCriteria(null); player.getPlayQueue().shuffle(); return convert(request, player, true); }
From source file:org.apache.accumulo.server.tabletserver.MemKeyComparator.java
public void delete(long waitTime) { synchronized (this) { if (deleted) throw new IllegalStateException("Double delete"); deleted = true;/*from www. ja v a2 s . co m*/ } long t1 = System.currentTimeMillis(); while (activeIters.size() > 0 && System.currentTimeMillis() - t1 < waitTime) { UtilWaitThread.sleep(50); } if (activeIters.size() > 0) { // dump memmap exactly as is to a tmp file on disk, and switch scans to that temp file try { Configuration conf = CachedConfiguration.getInstance(); FileSystem fs = TraceFileSystem.wrap(FileSystem.getLocal(conf)); String tmpFile = memDumpDir + "/memDump" + UUID.randomUUID() + "." + RFile.EXTENSION; Configuration newConf = new Configuration(conf); newConf.setInt("io.seqfile.compress.blocksize", 100000); FileSKVWriter out = new RFileOperations().openWriter(tmpFile, fs, newConf, ServerConfiguration.getSiteConfiguration()); InterruptibleIterator iter = map.skvIterator(); HashSet<ByteSequence> allfams = new HashSet<ByteSequence>(); for (Entry<String, Set<ByteSequence>> entry : lggroups.entrySet()) { allfams.addAll(entry.getValue()); out.startNewLocalityGroup(entry.getKey(), entry.getValue()); iter.seek(new Range(), entry.getValue(), true); dumpLocalityGroup(out, iter); } out.startDefaultLocalityGroup(); iter.seek(new Range(), allfams, false); dumpLocalityGroup(out, iter); out.close(); log.debug("Created mem dump file " + tmpFile); memDumpFile = tmpFile; synchronized (activeIters) { for (MemoryIterator mi : activeIters) { mi.switchNow(); } } // rely on unix behavior that file will be deleted when last // reader closes it fs.delete(new Path(memDumpFile), true); } catch (IOException ioe) { log.error("Failed to create mem dump file ", ioe); while (activeIters.size() > 0) { UtilWaitThread.sleep(100); } } } SimpleMap tmpMap = map; synchronized (this) { map = null; } tmpMap.delete(); }
From source file:org.acmsl.queryj.customsql.xml.SqlXmlParserImpl.java
/** * Post-processes given items./*from w w w . j av a2s .com*/ * @param items the items. * @param <T> the type of the items. * @return the processed items. */ @NotNull protected <T extends IdentifiableElement<String>> List<T> postProcess(@NotNull final List<T> items) { final List<T> result; final HashSet<T> aux = new HashSet<>(items.size()); aux.addAll(items); result = new ArrayList<>(aux.size()); result.addAll(aux); return result; }
From source file:io.hops.hopsworks.api.zeppelin.socket.NotebookServer.java
@OnMessage public void onMessage(String msg, Session conn) { Notebook notebook = impl.notebook(); try {/*from w w w . j ava2 s . co m*/ Message messagereceived = deserializeMessage(msg); LOG.log(Level.FINE, "RECEIVE << {0}", messagereceived.op); LOG.log(Level.FINE, "RECEIVE PRINCIPAL << {0}", messagereceived.principal); LOG.log(Level.FINE, "RECEIVE TICKET << {0}", messagereceived.ticket); LOG.log(Level.FINE, "RECEIVE ROLES << {0}", messagereceived.roles); String ticket = TicketContainer.instance.getTicket(messagereceived.principal); Users user = userBean.findByEmail(this.sender); if (ticket != null && (messagereceived.ticket == null || !ticket.equals(messagereceived.ticket))) { /* * not to pollute logs, log instead of exception */ if (StringUtils.isEmpty(messagereceived.ticket)) { LOG.log(Level.INFO, "{0} message: invalid ticket {1} != {2}", new Object[] { messagereceived.op, messagereceived.ticket, ticket }); } else if (!messagereceived.op.equals(OP.PING)) { impl.sendMsg(conn, serializeMessage(new Message(OP.SESSION_LOGOUT).put("info", "Your ticket is invalid possibly due to server restart. Please login again."))); } try { session.close(new CloseReason(CloseReason.CloseCodes.NORMAL_CLOSURE, "Invalid ticket " + messagereceived.ticket + " != " + ticket)); } catch (IOException ex) { LOG.log(Level.SEVERE, null, ex); } } boolean allowAnonymous = impl.getConf().getConf().isAnonymousAllowed(); if (!allowAnonymous && messagereceived.principal.equals("anonymous")) { throw new Exception("Anonymous access not allowed "); } messagereceived.principal = this.project.getProjectGenericUser(); HashSet<String> userAndRoles = new HashSet<>(); userAndRoles.add(messagereceived.principal); if (!messagereceived.roles.equals("")) { HashSet<String> roles = gson.fromJson(messagereceived.roles, new TypeToken<HashSet<String>>() { }.getType()); if (roles != null) { userAndRoles.addAll(roles); } } AuthenticationInfo subject = new AuthenticationInfo(messagereceived.principal, messagereceived.roles, messagereceived.ticket); /** * Lets be elegant here */ switch (messagereceived.op) { case LIST_NOTES: impl.unicastNoteList(conn, subject, userAndRoles); break; case RELOAD_NOTES_FROM_REPO: impl.broadcastReloadedNoteList(subject, userAndRoles); break; case GET_HOME_NOTE: impl.sendHomeNote(conn, userAndRoles, notebook, messagereceived, user); break; case GET_NOTE: impl.sendNote(conn, userAndRoles, notebook, messagereceived, user, this.hdfsUsername); break; case NEW_NOTE: impl.createNote(conn, userAndRoles, notebook, messagereceived); break; case DEL_NOTE: impl.removeNote(conn, userAndRoles, notebook, messagereceived, user); break; case REMOVE_FOLDER: impl.removeFolder(conn, userAndRoles, notebook, messagereceived, user); break; case MOVE_NOTE_TO_TRASH: impl.moveNoteToTrash(conn, userAndRoles, notebook, messagereceived, user); break; case MOVE_FOLDER_TO_TRASH: impl.moveFolderToTrash(conn, userAndRoles, notebook, messagereceived, user); break; case EMPTY_TRASH: impl.emptyTrash(conn, userAndRoles, notebook, messagereceived, user); break; case RESTORE_FOLDER: impl.restoreFolder(conn, userAndRoles, notebook, messagereceived, user); break; case RESTORE_NOTE: impl.restoreNote(conn, userAndRoles, notebook, messagereceived, user); break; case RESTORE_ALL: impl.restoreAll(conn, userAndRoles, notebook, messagereceived, user); break; case CLONE_NOTE: impl.cloneNote(conn, userAndRoles, notebook, messagereceived); break; case IMPORT_NOTE: impl.importNote(conn, userAndRoles, notebook, messagereceived); break; case COMMIT_PARAGRAPH: impl.updateParagraph(conn, userAndRoles, notebook, messagereceived, user); break; case RUN_PARAGRAPH: impl.runParagraph(conn, userAndRoles, notebook, messagereceived, user, certificateMaterializer, settings, dfsService); break; case PARAGRAPH_EXECUTED_BY_SPELL: impl.broadcastSpellExecution(conn, userAndRoles, notebook, messagereceived, user); break; case RUN_ALL_PARAGRAPHS: impl.runAllParagraphs(conn, userAndRoles, notebook, messagereceived, user, certificateMaterializer, settings, dfsService); break; case CANCEL_PARAGRAPH: impl.cancelParagraph(conn, userAndRoles, notebook, messagereceived, user); break; case MOVE_PARAGRAPH: impl.moveParagraph(conn, userAndRoles, notebook, messagereceived, user); break; case INSERT_PARAGRAPH: impl.insertParagraph(conn, userAndRoles, notebook, messagereceived, user); break; case COPY_PARAGRAPH: impl.copyParagraph(conn, userAndRoles, notebook, messagereceived, user); break; case PARAGRAPH_REMOVE: impl.removeParagraph(conn, userAndRoles, notebook, messagereceived, user); break; case PARAGRAPH_CLEAR_OUTPUT: impl.clearParagraphOutput(conn, userAndRoles, notebook, messagereceived, user, this.hdfsUsername); break; case PARAGRAPH_CLEAR_ALL_OUTPUT: impl.clearAllParagraphOutput(conn, userAndRoles, notebook, messagereceived, user); break; case NOTE_UPDATE: impl.updateNote(conn, userAndRoles, notebook, messagereceived, user); break; case NOTE_RENAME: impl.renameNote(conn, userAndRoles, notebook, messagereceived, user); break; case FOLDER_RENAME: impl.renameFolder(conn, userAndRoles, notebook, messagereceived, user); break; case UPDATE_PERSONALIZED_MODE: impl.updatePersonalizedMode(conn, userAndRoles, notebook, messagereceived, user); break; case COMPLETION: impl.completion(conn, userAndRoles, notebook, messagereceived); break; case PING: break; //do nothing case ANGULAR_OBJECT_UPDATED: impl.angularObjectUpdated(conn, userAndRoles, notebook, messagereceived); break; case ANGULAR_OBJECT_CLIENT_BIND: impl.angularObjectClientBind(conn, userAndRoles, notebook, messagereceived); break; case ANGULAR_OBJECT_CLIENT_UNBIND: impl.angularObjectClientUnbind(conn, userAndRoles, notebook, messagereceived); break; case LIST_CONFIGURATIONS: impl.sendAllConfigurations(conn, userAndRoles, notebook); break; case CHECKPOINT_NOTE: impl.checkpointNote(conn, notebook, messagereceived); break; case LIST_REVISION_HISTORY: impl.listRevisionHistory(conn, notebook, messagereceived); break; case SET_NOTE_REVISION: impl.setNoteRevision(conn, userAndRoles, notebook, messagereceived, user); break; case NOTE_REVISION: impl.getNoteByRevision(conn, notebook, messagereceived); break; case LIST_NOTE_JOBS: impl.unicastNoteJobInfo(conn, messagereceived); break; case UNSUBSCRIBE_UPDATE_NOTE_JOBS: impl.unsubscribeNoteJobInfo(conn); break; case GET_INTERPRETER_BINDINGS: impl.getInterpreterBindings(conn, messagereceived); break; case SAVE_INTERPRETER_BINDINGS: impl.saveInterpreterBindings(conn, messagereceived, zeppelinResource); break; case EDITOR_SETTING: impl.getEditorSetting(conn, messagereceived); break; case GET_INTERPRETER_SETTINGS: impl.getInterpreterSettings(conn, subject); break; case WATCHER: impl.switchConnectionToWatcher(conn, messagereceived, this.hdfsUsername, notebookServerImplFactory); break; default: break; } } catch (Exception e) { Level logLevel = Level.SEVERE; if (e.getMessage().contains("is not allowed to empty the Trash")) { logLevel = Level.INFO; } LOG.log(logLevel, "Can't handle message", e); } }
From source file:com.vmware.demo.sgf.lucene.impl.LuceneGemFireRepositoryImpl.java
@Override public Iterable findAll(Iterable ids) { HashSet set = new HashSet(); for (Object id : ids) { if (!(id instanceof SearcheCriteria)) throw new IllegalArgumentException("Require SearcheCriteria parameter."); List r = (List) searchStoreForMultipleItems((SearcheCriteria) id, false); // Concatenate results in to a unique set if (!r.isEmpty()) set.addAll(r); }/* ww w . j a va 2 s.c o m*/ return Arrays.asList(set); }
From source file:edu.tum.cs.conqat.quamoco.qiesl.QIESLEngine.java
/** Returns the referenced variables in the AST */ private HashSet<String> getReferencedVariables(Node node) throws QIESLException { HashSet<String> identifiers = new HashSet<String>(); if (node instanceof ASTSizeFunction) { throw new QIESLException("Function size does not exist. Use extent instead"); }// ww w .j av a2 s . c om if (node instanceof ASTIdentifier) { if (node.jjtGetParent() instanceof ASTMethodNode) { // TODO(FD): Validate functions } else if (!isLeftHandSideOfAssignment((ASTIdentifier) node)) { identifiers.add(((ASTIdentifier) node).image); } } for (int i = 0; i < node.jjtGetNumChildren(); i++) { identifiers.addAll(getReferencedVariables(node.jjtGetChild(i))); } return identifiers; }
From source file:org.hyperic.hq.authz.server.session.ResourceGroupManagerImpl.java
/** * Get the resource groups with the specified ids * @param ids the resource group ids/*from w w w . j av a 2 s . c om*/ * @param pc Paging information for the request * */ @Transactional(readOnly = true) public PageList<ResourceGroupValue> getResourceGroupsById(AuthzSubject whoami, Integer[] ids, PageControl pc) throws PermissionException { if (ids.length == 0) { return new PageList<ResourceGroupValue>(); } PageControl allPc = new PageControl(); // get all roles, sorted but not paged allPc.setSortattribute(pc.getSortattribute()); allPc.setSortorder(pc.getSortorder()); Collection<ResourceGroup> all = getAllResourceGroups(whoami, false); // build an index of ids HashSet<Integer> index = new HashSet<Integer>(); index.addAll(Arrays.asList(ids)); int numToFind = index.size(); // find the requested roles List<ResourceGroup> groups = new ArrayList<ResourceGroup>(numToFind); Iterator<ResourceGroup> i = all.iterator(); while (i.hasNext() && (groups.size() < numToFind)) { ResourceGroup g = i.next(); if (index.contains(g.getId())) { groups.add(g); } } // TODO: G PageList<ResourceGroupValue> plist = _groupPager.seek(groups, pc.getPagenum(), pc.getPagesize()); plist.setTotalSize(groups.size()); return plist; }
From source file:org.apache.ambari.server.orm.dao.HostConfigMappingDAO.java
@RequiresSession public Set<HostConfigMapping> findSelectedByHosts(long clusterId, Collection<String> hostNames) { populateCache();/*from ww w . j a v a 2s. c o m*/ if (hostNames == null || hostNames.isEmpty()) { return Collections.emptySet(); } HashSet<HostConfigMapping> result = new HashSet<HostConfigMapping>(); for (final String hostName : hostNames) { if (!hostConfigMappingByHost.containsKey(hostName)) continue; Set<HostConfigMapping> set = new HashSet<HostConfigMapping>(hostConfigMappingByHost.get(hostName)); CollectionUtils.filter(set, new Predicate() { @Override public boolean evaluate(Object arg0) { return ((HostConfigMapping) arg0).getHostName().equals(hostName) && ((HostConfigMapping) arg0).getSelected() > 0; } }); result.addAll(set); } return result; }
From source file:com.polyvi.xface.extension.contact.XContactsExt.java
/** * JSONArray??HashSet?/*w w w. j av a2 s . com*/ * * @param fields * js??? */ protected HashSet<String> getContactFieldSet(JSONArray fields) { HashSet<String> requiredFieldsSet = new HashSet<String>(); try { // fields ["*"] if (fields.length() == 1 && fields.getString(0).equals("*")) { // ?field name?? requiredFieldsSet.addAll(XContactAccessor.LOGIC_CONTACT_FIELDS); } else { int len = fields.length(); for (int i = 0; i < len; i++) { // JS ???? Impl requiredFieldsSet.add(fields.getString(i)); } } } catch (JSONException e) { e.printStackTrace(); XLog.e(CLASS_NAME, e.getMessage(), e); } return requiredFieldsSet; }
From source file:com.redhat.victims.database.VictimsSqlDB.java
public HashSet<String> getVulnerabilities(HashMap<String, String> props) throws VictimsException { try {/*from w w w . jav a 2 s .c o m*/ HashSet<String> cves = new HashSet<String>(); int requiredMinCount = props.size(); ResultSet rs; PreparedStatement ps; Connection connection = getConnection(); try { ps = setObjects(connection, Query.PROPERTY_MATCH, props.keySet().toArray(), props.values().toArray()); rs = ps.executeQuery(); while (rs.next()) { Integer id = rs.getInt("record"); Integer count = rs.getInt("count"); if (count == requiredMinCount) { cves.addAll(getVulnerabilities(id)); } } rs.close(); ps.close(); } finally { connection.close(); } return cves; } catch (SQLException e) { throw new VictimsException("Failed to search on properties", e); } }