List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:org.epics.archiverappliance.config.DefaultConfigService.java
@Override public Set<String> getPVsForApplianceMatchingRegex(String nameToMatch) { logger.debug("Finding matching names for " + nameToMatch); LinkedList<String> fixedStringParts = new LinkedList<String>(); String[] parts = this.pvName2KeyConverter.breakIntoParts(nameToMatch); Pattern fixedStringParttern = Pattern.compile("[a-zA-Z_0-9-]+"); for (String part : parts) { if (fixedStringParttern.matcher(part).matches()) { logger.debug("Fixed string part " + part); fixedStringParts.add(part);//w w w . j a va 2s . c o m } else { logger.debug("Regex string part " + part); } } if (fixedStringParts.size() > 0) { HashSet<String> ret = new HashSet<String>(); HashSet<String> namesSubset = new HashSet<String>(); // This reverse is probably specific to SLAC's namespace rules but it does make a big difference. // Perhaps we can use a more intelligent way of choosing the specific path thru the trie. Collections.reverse(fixedStringParts); for (String fixedStringPart : fixedStringParts) { ConcurrentSkipListSet<String> pvNamesForPart = parts2PVNamesForThisAppliance.get(fixedStringPart); if (pvNamesForPart != null) { if (namesSubset.isEmpty()) { namesSubset.addAll(pvNamesForPart); } else { namesSubset.retainAll(pvNamesForPart); } } } logger.debug("Using fixed string path matching against names " + namesSubset.size()); Pattern pattern = Pattern.compile(nameToMatch); for (String pvName : namesSubset) { if (pattern.matcher(pvName).matches()) { ret.add(pvName); } } return ret; } else { // The use pattern did not have any fixed elements at all. // In this case we do brute force matching; should take longer. // This is also not optimal but probably don't want yet another list of PV's Pattern pattern = Pattern.compile(nameToMatch); HashSet<String> allNames = new HashSet<String>(); HashSet<String> ret = new HashSet<String>(); logger.debug("Using brute force pattern matching against names"); for (ConcurrentSkipListSet<String> pvNamesForPart : parts2PVNamesForThisAppliance.values()) { allNames.addAll(pvNamesForPart); } for (String pvName : allNames) { if (pattern.matcher(pvName).matches()) { ret.add(pvName); } } return ret; } }
From source file:edu.ku.brc.dbsupport.ImportExportDB.java
@SuppressWarnings("unchecked") private List removeDuplicates(List list) { HashSet<List> set = new HashSet<List>(); set.addAll(list); list.clear();//from www.java2 s.co m list.addAll(set); return list; }
From source file:nl.umcg.westrah.binarymetaanalyzer.BinaryMetaAnalysis.java
private void createProbeIndex(String outdir) throws IOException { HashSet<String> confineToTheseProbes = null; HashSet<String> probePreselection = null; if (settings.getSNPProbeSelection() != null) { if (settings.getSNPProbeSelection() != null) { System.out.println(/* w w w.ja v a 2 s.com*/ "Getting Probes from SNP/Probe selection file: " + settings.getSNPProbeSelection()); probePreselection = new HashSet<String>(); TextFile tf = new TextFile(settings.getSNPProbeSelection(), TextFile.R); String[] elems = tf.readLineElems(TextFile.tab); while (elems != null) { if (elems.length >= 2) { String probe = elems[1]; probePreselection.add(probe); } elems = tf.readLineElems(TextFile.tab); } tf.close(); System.out.println( "Found " + probePreselection.size() + " unique probes in SNP/Probe selection file."); if (probePreselection.isEmpty()) { System.err.println("Error: SNP/Probe selection file defined, but no Probes found."); System.exit(-1); } } } if (settings.getProbeselection() != null) { System.out.println("Selecting Probes from file: " + settings.getProbeselection()); confineToTheseProbes = new HashSet<String>(); TextFile tf = new TextFile(settings.getProbeselection(), TextFile.R); if (probePreselection == null) { confineToTheseProbes.addAll(tf.readAsArrayList()); } else { ArrayList<String> confineTMP = tf.readAsArrayList(); for (String p : confineTMP) { if (probePreselection.contains(p)) { confineToTheseProbes.add(p); } } } tf.close(); System.out.println(confineToTheseProbes.size() + " Probes loaded."); } else if (probePreselection != null) { confineToTheseProbes = probePreselection; } System.out.println(""); // TODO: write probe list of probes that we didn't find in the annotation probeIndex = new Integer[traitList.length][datasets.length]; for (int d = 0; d < datasets.length; d++) { String[] probes = datasets[d].getProbeList(); int platformId = probeAnnotation.getPlatformId(datasets[d].getPlatform()); HashMap<String, MetaQTL4MetaTrait> traitHashForPlatform = probeAnnotation .getTraitHashForPlatform(platformId); // System.out.println(probeAnnotation.getTraitHashPerPlatform().size()); // // System.out.println(datasets[d].getName() + "\t" + platformId + "\t" + datasets[d].getPlatform() + "\t" + traitHashForPlatform.size()); for (int p = 0; p < probes.length; p++) { MetaQTL4MetaTrait t = traitHashForPlatform.get(probes[p]); if (t != null) { int index = traitMap.get(t); // if (confineToTheseProbes == null || confineToTheseProbes.contains(probes[p]) || confineToTheseProbes.contains(t.getMetaTraitName())) { if (confineToTheseProbes == null || confineToTheseProbes.contains(t.getMetaTraitName())) { // TODO: was there a reason we selected specific platform probes/identifiers? probeIndex[index][d] = p; } } // else { // probeIndex[index][d] = -1; // } } } System.out.println(""); TextFile out = new TextFile(outdir + "probeindex.txt", TextFile.W); String header = "metaID"; for (int d = 0; d < datasets.length; d++) { header += "\t" + datasets[d].getName() + "-pid\t" + datasets[d].getName() + "-probename"; } out.writeln(header); for (int p = 0; p < probeIndex.length; p++) { String lnout = "" + traitList[p].getMetaTraitId(); for (int d = 0; d < datasets.length; d++) { Integer pid = probeIndex[p][d]; String probeName = null; if (pid != null) { probeName = datasets[d].getProbeList()[pid]; } lnout += "\t" + pid + "\t" + probeName; } out.writeln(lnout); } out.close(); }
From source file:org.lol.reddit.reddit.api.RedditAPIIndividualSubredditListRequester.java
private void doSubredditListRequest(final RedditSubredditManager.SubredditListType type, final RequestResponseHandler<WritableHashSet, SubredditRequestFailure> handler, final String after) { URI uri;//w w w .j a v a 2 s . c o m switch (type) { case SUBSCRIBED: uri = Constants.Reddit.getUri(Constants.Reddit.PATH_SUBREDDITS_MINE_SUBSCRIBER); break; case MODERATED: uri = Constants.Reddit.getUri(Constants.Reddit.PATH_SUBREDDITS_MINE_MODERATOR); break; case MOST_POPULAR: uri = Constants.Reddit.getUri(Constants.Reddit.PATH_SUBREDDITS_POPULAR); break; default: throw new UnexpectedInternalStateException(type.name()); } if (after != null) { // TODO move this logic to General? final Uri.Builder builder = Uri.parse(uri.toString()).buildUpon(); builder.appendQueryParameter("after", after); uri = General.uriFromString(builder.toString()); } final CacheRequest aboutSubredditCacheRequest = new CacheRequest(uri, user, null, Constants.Priority.API_SUBREDDIT_INVIDIVUAL, 0, CacheRequest.DownloadType.FORCE, Constants.FileType.SUBREDDIT_LIST, true, true, false, context) { @Override protected void onCallbackException(Throwable t) { handler.onRequestFailed( new SubredditRequestFailure(RequestFailureType.PARSE, t, null, "Internal error", url)); } @Override protected void onDownloadNecessary() { } @Override protected void onDownloadStarted() { } @Override protected void onProgress(long bytesRead, long totalBytes) { } @Override protected void onFailure(RequestFailureType type, Throwable t, StatusLine status, String readableMessage) { handler.onRequestFailed( new SubredditRequestFailure(type, t, status, readableMessage, url.toString())); } @Override protected void onSuccess(CacheManager.ReadableCacheFile cacheFile, long timestamp, UUID session, boolean fromCache, String mimetype) { } @Override public void onJsonParseStarted(JsonValue result, long timestamp, UUID session, boolean fromCache) { try { final HashSet<String> output = new HashSet<String>(); final ArrayList<RedditSubreddit> toWrite = new ArrayList<RedditSubreddit>(); final JsonBufferedObject redditListing = result.asObject().getObject("data"); final JsonBufferedArray subreddits = redditListing.getArray("children"); final JsonBuffered.Status joinStatus = subreddits.join(); if (joinStatus == JsonBuffered.Status.FAILED) { handler.onRequestFailed(new SubredditRequestFailure(RequestFailureType.PARSE, null, null, "Unknown parse error", url.toString())); return; } if (type == RedditSubredditManager.SubredditListType.SUBSCRIBED && subreddits.getCurrentItemCount() == 0 && after == null) { doSubredditListRequest(RedditSubredditManager.SubredditListType.MOST_POPULAR, handler, null); return; } for (final JsonValue v : subreddits) { final RedditThing thing = v.asObject(RedditThing.class); final RedditSubreddit subreddit = thing.asSubreddit(); subreddit.downloadTime = timestamp; toWrite.add(subreddit); output.add(subreddit.getCanonicalName()); } RedditSubredditManager.getInstance(context, user).offerRawSubredditData(toWrite, timestamp); final String receivedAfter = redditListing.getString("after"); if (receivedAfter != null && type != RedditSubredditManager.SubredditListType.MOST_POPULAR) { doSubredditListRequest(type, new RequestResponseHandler<WritableHashSet, SubredditRequestFailure>() { public void onRequestFailed(SubredditRequestFailure failureReason) { handler.onRequestFailed(failureReason); } public void onRequestSuccess(WritableHashSet result, long timeCached) { output.addAll(result.toHashset()); handler.onRequestSuccess( new WritableHashSet(output, timeCached, type.name()), timeCached); if (after == null) { Log.i("SubredditListRequester", "Got " + output.size() + " subreddits in multiple requests"); } } }, receivedAfter); } else { handler.onRequestSuccess(new WritableHashSet(output, timestamp, type.name()), timestamp); if (after == null) { Log.i("SubredditListRequester", "Got " + output.size() + " subreddits in 1 request"); } } } catch (Exception e) { handler.onRequestFailed(new SubredditRequestFailure(RequestFailureType.PARSE, e, null, "Parse error", url.toString())); } } }; CacheManager.getInstance(context).makeRequest(aboutSubredditCacheRequest); }
From source file:org.meresco.lucene.Lucene.java
private List<Hit> topDocsResponse(QueryData q, Collectors collectors) throws Exception { int totalHits = collectors.topCollector.getTotalHits(); DeDupFilterSuperCollector dedupCollector = collectors.dedupCollector; GroupSuperCollector groupingCollector = collectors.groupingCollector; HashSet<String> seenIds = new HashSet<>(); int count = q.start; List<LuceneResponse.Hit> hits = new ArrayList<>(); for (ScoreDoc scoreDoc : collectors.topCollector.topDocs(q.stop == 0 ? 1 : q.start).scoreDocs) { //TODO: temp fix for start/stop = 0 if (count >= q.stop) break; if (dedupCollector != null) { DeDupFilterSuperCollector.Key keyForDocId = dedupCollector.keyForDocId(scoreDoc.doc); int newDocId = keyForDocId == null ? scoreDoc.doc : keyForDocId.getDocId(); DedupHit hit = new DedupHit(getDocument(newDocId).get(ID_FIELD), scoreDoc.score); hit.duplicateField = dedupCollector.getKeyName(); hit.duplicateCount = 1;// w w w . ja va2 s .com if (keyForDocId != null) hit.duplicateCount = keyForDocId.getCount(); hit.score = scoreDoc.score; hits.add(hit); } else if (groupingCollector != null) { GroupingHit hit = new GroupingHit(getDocument(scoreDoc.doc).get(ID_FIELD), scoreDoc.score); if (seenIds.contains(hit.id)) continue; List<String> duplicateIds = new ArrayList<>(); duplicateIds.add(hit.id); if (totalHits > (q.stop - q.start)) { List<Integer> groupedDocIds = groupingCollector.group(scoreDoc.doc); if (groupedDocIds != null) for (int docId : groupedDocIds) { String id = getDocument(docId).get(ID_FIELD); if (!id.equals(hit.id)) duplicateIds.add(id); } } seenIds.addAll(duplicateIds); hit.groupingField = groupingCollector.getKeyName(); hit.duplicates = duplicateIds; hit.score = scoreDoc.score; hits.add(hit); } else { Hit hit = new Hit(getDocument(scoreDoc.doc).get(ID_FIELD), scoreDoc.score); hits.add(hit); } count++; } return hits; }
From source file:org.apache.zeppelin.socket.NotebookServer.java
@Override public void onMessage(NotebookSocket conn, String msg) { Notebook notebook = notebook();//from w ww . jav a2 s . c om try { Message messagereceived = deserializeMessage(msg); LOG.debug("RECEIVE << " + messagereceived.op); LOG.debug("RECEIVE PRINCIPAL << " + messagereceived.principal); LOG.debug("RECEIVE TICKET << " + messagereceived.ticket); LOG.debug("RECEIVE ROLES << " + messagereceived.roles); if (LOG.isTraceEnabled()) { LOG.trace("RECEIVE MSG = " + messagereceived); } String ticket = TicketContainer.instance.getTicket(messagereceived.principal); if (ticket != null && !ticket.equals(messagereceived.ticket)) { /* not to pollute logs, log instead of exception */ if (StringUtils.isEmpty(messagereceived.ticket)) { LOG.debug("{} message: invalid ticket {} != {}", messagereceived.op, messagereceived.ticket, ticket); } else { if (!messagereceived.op.equals(OP.PING)) { conn.send(serializeMessage(new Message(OP.ERROR_INFO).put("info", "Your ticket is invalid possibly due to server restart. " + "Please refresh the page and login again."))); } } return; } ZeppelinConfiguration conf = ZeppelinConfiguration.create(); boolean allowAnonymous = conf.isAnonymousAllowed(); if (!allowAnonymous && messagereceived.principal.equals("anonymous")) { throw new Exception("Anonymous access not allowed "); } HashSet<String> userAndRoles = new HashSet<>(); userAndRoles.add(messagereceived.principal); if (!messagereceived.roles.equals("")) { HashSet<String> roles = gson.fromJson(messagereceived.roles, new TypeToken<HashSet<String>>() { }.getType()); if (roles != null) { userAndRoles.addAll(roles); } } if (StringUtils.isEmpty(conn.getUser())) { addUserConnection(messagereceived.principal, conn); } AuthenticationInfo subject = new AuthenticationInfo(messagereceived.principal, messagereceived.ticket); /** Lets be elegant here */ switch (messagereceived.op) { case LIST_NOTES: unicastNoteList(conn, subject, userAndRoles); break; case RELOAD_NOTES_FROM_REPO: broadcastReloadedNoteList(subject, userAndRoles); break; case GET_HOME_NOTE: sendHomeNote(conn, userAndRoles, notebook, messagereceived); break; case GET_NOTE: sendNote(conn, userAndRoles, notebook, messagereceived); break; case NEW_NOTE: createNote(conn, userAndRoles, notebook, messagereceived); break; case DEL_NOTE: removeNote(conn, userAndRoles, notebook, messagereceived); break; case REMOVE_FOLDER: removeFolder(conn, userAndRoles, notebook, messagereceived); break; case MOVE_NOTE_TO_TRASH: moveNoteToTrash(conn, userAndRoles, notebook, messagereceived); break; case MOVE_FOLDER_TO_TRASH: moveFolderToTrash(conn, userAndRoles, notebook, messagereceived); break; case EMPTY_TRASH: emptyTrash(conn, userAndRoles, notebook, messagereceived); break; case RESTORE_FOLDER: restoreFolder(conn, userAndRoles, notebook, messagereceived); break; case RESTORE_NOTE: restoreNote(conn, userAndRoles, notebook, messagereceived); break; case RESTORE_ALL: restoreAll(conn, userAndRoles, notebook, messagereceived); break; case CLONE_NOTE: cloneNote(conn, userAndRoles, notebook, messagereceived); break; case IMPORT_NOTE: importNote(conn, userAndRoles, notebook, messagereceived); break; case COMMIT_PARAGRAPH: updateParagraph(conn, userAndRoles, notebook, messagereceived); break; case RUN_PARAGRAPH: runParagraph(conn, userAndRoles, notebook, messagereceived); break; case PARAGRAPH_EXECUTED_BY_SPELL: broadcastSpellExecution(conn, userAndRoles, notebook, messagereceived); break; case RUN_ALL_PARAGRAPHS: runAllParagraphs(conn, userAndRoles, notebook, messagereceived); break; case CANCEL_PARAGRAPH: cancelParagraph(conn, userAndRoles, notebook, messagereceived); break; case MOVE_PARAGRAPH: moveParagraph(conn, userAndRoles, notebook, messagereceived); break; case INSERT_PARAGRAPH: insertParagraph(conn, userAndRoles, notebook, messagereceived); break; case COPY_PARAGRAPH: copyParagraph(conn, userAndRoles, notebook, messagereceived); break; case PARAGRAPH_REMOVE: removeParagraph(conn, userAndRoles, notebook, messagereceived); break; case PARAGRAPH_CLEAR_OUTPUT: clearParagraphOutput(conn, userAndRoles, notebook, messagereceived); break; case PARAGRAPH_CLEAR_ALL_OUTPUT: clearAllParagraphOutput(conn, userAndRoles, notebook, messagereceived); break; case NOTE_UPDATE: updateNote(conn, userAndRoles, notebook, messagereceived); break; case NOTE_RENAME: renameNote(conn, userAndRoles, notebook, messagereceived); break; case FOLDER_RENAME: renameFolder(conn, userAndRoles, notebook, messagereceived); break; case UPDATE_PERSONALIZED_MODE: updatePersonalizedMode(conn, userAndRoles, notebook, messagereceived); break; case COMPLETION: completion(conn, userAndRoles, notebook, messagereceived); break; case PING: break; //do nothing case ANGULAR_OBJECT_UPDATED: angularObjectUpdated(conn, userAndRoles, notebook, messagereceived); break; case ANGULAR_OBJECT_CLIENT_BIND: angularObjectClientBind(conn, userAndRoles, notebook, messagereceived); break; case ANGULAR_OBJECT_CLIENT_UNBIND: angularObjectClientUnbind(conn, userAndRoles, notebook, messagereceived); break; case LIST_CONFIGURATIONS: sendAllConfigurations(conn, userAndRoles, notebook); break; case CHECKPOINT_NOTE: checkpointNote(conn, notebook, messagereceived); break; case LIST_REVISION_HISTORY: listRevisionHistory(conn, notebook, messagereceived); break; case SET_NOTE_REVISION: setNoteRevision(conn, userAndRoles, notebook, messagereceived); break; case NOTE_REVISION: getNoteByRevision(conn, notebook, messagereceived); break; case LIST_NOTE_JOBS: unicastNoteJobInfo(conn, messagereceived); break; case UNSUBSCRIBE_UPDATE_NOTE_JOBS: unsubscribeNoteJobInfo(conn); break; case GET_INTERPRETER_BINDINGS: getInterpreterBindings(conn, messagereceived); break; case SAVE_INTERPRETER_BINDINGS: saveInterpreterBindings(conn, messagereceived); break; case EDITOR_SETTING: getEditorSetting(conn, messagereceived); break; case GET_INTERPRETER_SETTINGS: getInterpreterSettings(conn, subject); break; case WATCHER: switchConnectionToWatcher(conn, messagereceived); break; default: break; } } catch (Exception e) { LOG.error("Can't handle message", e); } }
From source file:org.madsonic.controller.RESTController.java
public void getPandoraSongs(HttpServletRequest request, HttpServletResponse response) throws Exception { request = wrapRequest(request);// ww w .j a va 2 s . co m Player player = playerService.getPlayer(request, response); String username = securityService.getCurrentUsername(request); List<String> _artist = new ArrayList<String>(); List<String> _albums = new ArrayList<String>(); List<String> _genres = new ArrayList<String>(); List<String> _mood = new ArrayList<String>(); int userGroupId = securityService.getCurrentUserGroupId(request); XMLBuilder builder = createXMLBuilder(request, response, true); builder.add("pandoraSongs", false); List<MediaFile> resultList = new ArrayList<MediaFile>(); List<MediaFile> mediaFiles = null; try { mediaFiles = getMediaFiles(request, userGroupId); } catch (SecurityException x) { // Ignored } for (MediaFile media : mediaFiles) { MediaFile mediaFile = mediaFileService.getMediaFile(media.getId()); if (mediaFile != null) { _artist.add(media.getArtist()); _albums.add(media.getAlbumName()); _genres.add(media.getGenre()); _mood.add(media.getMood()); } } String[] artist = _artist.toArray(new String[_artist.size()]); String[] albums = _albums.toArray(new String[_albums.size()]); String[] genres = _genres.toArray(new String[_genres.size()]); String[] mood = _mood.toArray(new String[_mood.size()]); if (mediaFiles.size() > 0) { //RESULT: ALBUM MultiSearchCriteria criteria1 = new MultiSearchCriteria(settingsService.getPandoraResultAlbum(), artist, albums, null, null, null, null, null, userGroupId); resultList.addAll(searchService.getRandomSongs(criteria1)); if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R1=-"); for (MediaFile m : searchService.getRandomSongs(criteria1)) { System.out.println(m.getPath()); } } //RESULT: ARTIST MultiSearchCriteria criteria2 = new MultiSearchCriteria(settingsService.getPandoraResultArtist(), artist, null, null, null, null, null, null, userGroupId); resultList.addAll(searchService.getRandomSongs(criteria2)); if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R2=-"); for (MediaFile m : searchService.getRandomSongs(criteria2)) { System.out.println(m.getPath()); } } //RESULT: GENRE GenreSearchCriteria criteria3 = new GenreSearchCriteria(settingsService.getPandoraResultGenre(), null, genres, null, null, null, userGroupId); resultList.addAll(searchService.getRandomSongs(criteria3)); if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R3=-"); for (MediaFile m : searchService.getRandomSongs(criteria3)) { System.out.println(m.getPath()); } } //RESULT: MOODS MoodsSearchCriteria criteria4 = new MoodsSearchCriteria(settingsService.getPandoraResultMood(), null, mood, null, null, null, userGroupId); resultList.addAll(searchService.getRandomSongs(criteria4)); if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R4=-"); for (MediaFile m : searchService.getRandomSongs(criteria4)) { System.out.println(m.getPath()); } } //RESULT: OTHER List<String> allArtists = new ArrayList<String>(); List<String> similar = new ArrayList<String>(); for (String __artist : artist) { similar = lastFMService.getSimilarArtist(__artist); allArtists.addAll(similar); if (allArtists.size() > 10) { break; } } String[] array = allArtists.toArray(new String[allArtists.size()]); //RESULT: RANDOM MultiSearchCriteria criteria5 = new MultiSearchCriteria(settingsService.getPandoraResultSimilar(), array, null, null, null, null, null, null, userGroupId); resultList.addAll(searchService.getRandomSongs(criteria5)); if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R5=-"); for (MediaFile m : searchService.getRandomSongs(criteria5)) { System.out.println(m.getPath()); } } //RESULT: ARTIST TOPTRACKS List<MediaFile> resultTopTrack = new ArrayList<MediaFile>(); if (mediaFileDao.getTopTracks(artist[0], userGroupId).size() == 0) { resultTopTrack = lastFMService.getTopTrack(artist[0], 20, userGroupId); } if (artist.length > 1) { for (String __artist : artist) { // resultTopTrack = lastFMService.getTopTrack(_artist, 20, userGroupId); resultTopTrack = mediaFileDao.getTopTracks(__artist, userGroupId); if (resultTopTrack.size() > 0) { for (int i = 0; i < settingsService.getPandoraResultArtistTopTrack(); i++) { Random myRandomizer = new Random(); MediaFile randomTopTrack = resultTopTrack .get(myRandomizer.nextInt(resultTopTrack.size())); resultList.add(randomTopTrack); } if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R6=-"); for (MediaFile m : resultTopTrack) { System.out.println(m.getPath()); } } } } } if (artist.length == 1) { // resultTopTrack = lastFMService.getTopTrack(artist[0], settingsService.getPandoraResultArtistTopTrack(), userGroupId); resultTopTrack = mediaFileDao.getTopTracks(artist[0], userGroupId); if (resultTopTrack.size() > 0) { for (int i = 0; i < settingsService.getPandoraResultArtistTopTrack(); i++) { Random myRandomizer = new Random(); MediaFile randomTopTrack = resultTopTrack.get(myRandomizer.nextInt(resultTopTrack.size())); resultList.add(randomTopTrack); } if ("TEST".contains(SettingsService.getLogfileLevel())) { System.out.println("-=Pandora-R7=-"); for (MediaFile m : resultTopTrack) { System.out.println(m.getPath()); } } } } } // Filter out duplicates HashSet<MediaFile> hs = new HashSet<MediaFile>(); hs.addAll(resultList); resultList.clear(); resultList.addAll(hs); for (MediaFile mediaFile : resultList) { AttributeSet attributes = createAttributesForMediaFile(player, mediaFile, username); builder.add("song", attributes, true); } builder.endAll(); response.getWriter().print(builder); }
From source file:amie.keys.CSAKey.java
public void discoverConditionalKeysForComplexConditions(Graph graph, HashSet<Node> candidateKeys, Rule conditionRule, Set<Rule> output) { HashSet<Node> childrenCandidateKeys = new HashSet<>(); // System.out.println("candidates:" + candidateKeys); for (Node candidateKey : candidateKeys) { // System.out.println("candidate:" + candidateKey); if (candidateKey.toExplore) { // System.out.println("candidate:" + candidateKey); if (candidateKey.toExplore) { List<String> properties = candidateKey.mapToString(id2Property); // System.out.println("properties:"+properties); Rule amieRule = buildAMIERule(properties, conditionRule); // System.out.println("amieRule:" + amieRule.getDatalogFullRuleString()); boolean isConditionalKey = isConditionaKey(amieRule); if (amieRule.getSupport() < support || isConditionalKey) { candidateKey.toExplore = false; // System.out.println("key"); flagChildren(graph, candidateKey); }// w w w . jav a2 s.c o m // If the rule is a conditional above the support // and there is no a simpler key already discovered // then output it if (isConditionalKey && amieRule.getSupport() >= support && !isSubsumedByKey(amieRule, conditionRule, conditions2Keys)) { synchronized (output) { output.add(amieRule); } //System.out.println(Utilities.formatKey(amieRule) + "\tThread " + Thread.currentThread().getId()); System.out.println(Utilities.formatKey(amieRule)); conditions2Keys.put(conditionRule, amieRule); } if (candidateKey.toExplore) { if (graph.graph.containsKey(candidateKey)) { childrenCandidateKeys.addAll(graph.graph.get(candidateKey)); } } } } else { flagChildren(graph, candidateKey); } } //System.out.println("OUT"); if (!childrenCandidateKeys.isEmpty()) { discoverConditionalKeysForComplexConditions(graph, childrenCandidateKeys, conditionRule, output); } }
From source file:org.alfresco.solr.SolrInformationServer.java
private void doUpdateDescendantDocs(NodeMetaData parentNodeMetaData, boolean overwrite, SolrQueryRequest request, UpdateRequestProcessor processor, LinkedHashSet<Long> stack) throws AuthenticationException, IOException, JSONException { if ((skipDescendantDocsForSpecificTypes && typesForSkippingDescendantDocs.contains(parentNodeMetaData.getType())) || (skipDescendantDocsForSpecificAspects && shouldBeIgnoredByAnyAspect(parentNodeMetaData.getAspects()))) { return;//from ww w . ja va 2 s . co m } HashSet<Long> childIds = new HashSet<Long>(); if (parentNodeMetaData.getChildIds() != null) { childIds.addAll(parentNodeMetaData.getChildIds()); } String query = FIELD_PARENT + ":\"" + parentNodeMetaData.getNodeRef() + "\""; ModifiableSolrParams params = new ModifiableSolrParams(request.getParams()); params.set("q", query).set("fl", FIELD_SOLR4_ID); if (skippingDocsQueryString != null && !skippingDocsQueryString.isEmpty()) { params.set("fq", "NOT ( " + skippingDocsQueryString + " )"); } SolrDocumentList docs = cloud.getSolrDocumentList(nativeRequestHandler, request, params); for (SolrDocument doc : docs) { String id = getFieldValueString(doc, FIELD_SOLR4_ID); TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id); childIds.add(ids.dbId); } for (Long childId : childIds) { NodeMetaDataParameters nmdp = new NodeMetaDataParameters(); nmdp.setFromNodeId(childId); nmdp.setToNodeId(childId); nmdp.setIncludeAclId(false); nmdp.setIncludeAspects(false); nmdp.setIncludeChildAssociations(false); nmdp.setIncludeChildIds(true); nmdp.setIncludeNodeRef(false); nmdp.setIncludeOwner(false); nmdp.setIncludeParentAssociations(false); // We only care about the path and ancestors (which is included) for this case nmdp.setIncludePaths(true); nmdp.setIncludeProperties(false); nmdp.setIncludeType(false); nmdp.setIncludeTxnId(false); // Gets only one List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1); if (!nodeMetaDatas.isEmpty()) { NodeMetaData nodeMetaData = nodeMetaDatas.get(0); if (mayHaveChildren(nodeMetaData)) { updateDescendantDocs(nodeMetaData, overwrite, request, processor, stack); } if (log.isDebugEnabled()) { log.debug("... cascade update child doc " + childId); } // Gets the document that we have from the content store and updates it String fixedTenantDomain = AlfrescoSolrDataModel.getTenantId(nodeMetaData.getTenantDomain()); SolrInputDocument cachedDoc = retrieveDocFromSolrContentStore(fixedTenantDomain, nodeMetaData.getId()); if (cachedDoc != null) { updatePathRelatedFields(nodeMetaData, cachedDoc); updateNamePathRelatedFields(nodeMetaData, cachedDoc); updateAncestorRelatedFields(nodeMetaData, cachedDoc); AddUpdateCommand addDocCmd = new AddUpdateCommand(request); addDocCmd.overwrite = overwrite; addDocCmd.solrDoc = cachedDoc; processor.processAdd(addDocCmd); storeDocOnSolrContentStore(fixedTenantDomain, nodeMetaData.getId(), cachedDoc); } else { if (log.isDebugEnabled()) { log.debug("... no child doc found to update " + childId); } } } } }
From source file:GUI.MainWindow.java
private void handleCVELookup(File save_file) { final File sf = save_file; // Best to do this as a background task it'll take time Runnable r = new Runnable() { public void run() { HashSet cves = new HashSet(); // Find all selected vulns in the tree. TreePath[] paths = VulnTree.getSelectionPaths(); for (int i = 0; i < paths.length; i++) { // Loop through them and merge all CVEs into the cves HashSet TreePath path = paths[i]; DefaultMutableTreeNode node = (DefaultMutableTreeNode) path.getLastPathComponent(); Object obj = node.getUserObject(); if (obj instanceof Vulnerability) { Vulnerability vuln = (Vulnerability) obj; // add these references to the HashSet cves.addAll(vuln.getCVEReferences()); }//from w ww. j ava2 s . c o m } // Get the answers from our local CSV file CVEUtils cveu = new CVEUtils(); Vector answers = cveu.getCVEs(cves); try { String[] headerrow = { "CVE ID", "Risk Score", "Summary" }; // Write header column to file writeCSVLine(sf, headerrow); // Now get all the details and make a CSV for the user. Enumeration enums = answers.elements(); while (enums.hasMoreElements()) { CVE c = (CVE) enums.nextElement(); System.out.println(c.getCveId() + ":" + c.getRiskScore()); writeCSVLine(sf, c.toStringArray()); } // Open file in user's default programme Desktop.getDesktop().open(sf); } catch (Exception ex) { JOptionPane.showMessageDialog(null, ex.getMessage()); } } }; new Thread(r).start(); }