List of usage examples for java.util TreeSet add
public boolean add(E e)
From source file:com.basho.riak.client.http.util.logging.ConcurrentLoggingTest.java
/** * Test method for//from ww w . ja v a 2s . co m * {@link com.basho.riak.client.http.util.logging.LogNoHttpResponseRetryHandler#retryMethod(org.apache.commons.httpclient.HttpMethod, java.io.IOException, int)} * . * * @throws InterruptedException */ @Test public void retry_concurrentLogAndDump() throws InterruptedException { // create a bunch of threads // each must log 10 statements and call flush // ALL the statements must be present BUT ONCE in // the mock delegate appender (order does not matter) final int numThreads = 10; final LogNoHttpResponseRetryHandler handler = new LogNoHttpResponseRetryHandler(); ExecutorService es = Executors.newFixedThreadPool(numThreads); List<Callable<Void>> tasks = new ArrayList<Callable<Void>>(numThreads); final CountDownLatch startLatch = new CountDownLatch(1); final CountDownLatch dumpLatch = new CountDownLatch(10); for (int i = 0; i < numThreads; i++) { final int threadCounter = i; tasks.add(new Callable<Void>() { @Override public Void call() { Logger logger = Logger.getLogger("httpclient.wire"); try { startLatch.await(); for (int j = 0; j < 10; j++) { logger.debug(String.format(MESSAGE, new Object[] { threadCounter, j })); } dumpLatch.countDown(); dumpLatch.await(); handler.retryMethod(new GetMethod(), new NoHttpResponseException(), 0); return null; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } }); } startLatch.countDown(); es.invokeAll(tasks); verify(mockLogger, times(100)).callAppenders(logEventCaptor.capture()); TreeSet<Integer> check = new TreeSet<Integer>(); for (LoggingEvent le : logEventCaptor.getAllValues()) { // verify that each of Thread:Iter is present for 0-90-9 int loc = Integer.parseInt(le.getMessage().toString()); check.add(loc); } assertEquals(100, check.size()); assertEquals(0, (int) check.first()); assertEquals(99, (int) check.last()); }
From source file:com.qpark.maven.plugin.springintegration.SpringIntegrationConfigGenerator.java
public void generate() { this.log.debug("+generate"); final TreeSet<String> serviceIds = new TreeSet<String>(); for (final ElementType element : this.config.getElementTypes()) { if (element.isRequest()) { serviceIds.add(element.getServiceId()); }/*from w w w .j a va 2 s.c o m*/ } for (final String serviceId : serviceIds) { this.generateService(serviceId); } this.log.debug("-generate"); }
From source file:de.ks.file.FileViewController.java
@FXML void openFolder(ActionEvent event) { TreeSet<File> files = new TreeSet<>(); ObservableList<File> items = fileList.getSelectionModel().getSelectedItems(); for (File item : items) { if (item.isDirectory()) { files.add(item); } else {//from ww w . j ava 2 s.c o m files.add(item.getParentFile()); } } for (File file : files) { executor.submit(() -> { try { log.info("Opening {}", file); desktop.open(file); } catch (IOException e) { log.error("Could not open {}", file, e); } }); } }
From source file:com.qpark.eip.core.spring.security.EipRoleVoter.java
public String getRequiredRoles(final Collection<ConfigAttribute> attributes) { /* Handle logging of required roles. */ TreeSet<String> ts = new TreeSet<String>(); for (ConfigAttribute attribute : attributes) { if (this.supports(attribute)) { ts.add(attribute.toString()); }/*from ww w. j a v a 2 s.co m*/ } StringBuffer sb = new StringBuffer(1024); for (String string : ts) { if (sb.length() > 0) { sb.append(", "); } sb.append(string); } return sb.toString(); }
From source file:com.att.nsa.mr.client.impl.MRBaseClient.java
protected Set<String> jsonArrayToSet(JSONArray a) { if (a == null) return null; final TreeSet<String> set = new TreeSet<String>(); for (int i = 0; i < a.length(); i++) { set.add(a.getString(i)); }//from w ww .jav a 2 s .c o m return set; }
From source file:ldbc.snb.datagen.generator.PostGenerator.java
public long createPosts(RandomGeneratorFarm randomFarm, final Forum forum, final ArrayList<ForumMembership> memberships, long numPosts, long startId, PersonActivityExporter exporter) throws IOException { long postId = startId; Properties prop = new Properties(); prop.setProperty("type", "post"); ArrayList<Post> result = new ArrayList<Post>(); for (ForumMembership member : memberships) { double numPostsMember = numPosts / (double) memberships.size(); if (numPostsMember < 1.0) { double prob = randomFarm.get(RandomGeneratorFarm.Aspect.NUM_POST).nextDouble(); if (prob < numPostsMember) numPostsMember = 1.0;// w w w. j a v a 2 s.c o m } else { numPostsMember = Math.ceil(numPostsMember); } for (int i = 0; i < (int) (numPostsMember); ++i) { PostInfo postInfo = generatePostInfo(randomFarm.get(RandomGeneratorFarm.Aspect.TAG), randomFarm.get(RandomGeneratorFarm.Aspect.DATE), forum, member); if (postInfo != null) { String content = ""; // crear properties class para passar content = this.generator_.generateText(member.person(), postInfo.tags, prop); post_.initialize(SN.formId(SN.composeId(postId++, postInfo.date)), postInfo.date, member.person(), forum.id(), content, postInfo.tags, Dictionaries.ips.getIP(randomFarm.get(RandomGeneratorFarm.Aspect.IP), randomFarm.get(RandomGeneratorFarm.Aspect.DIFF_IP), randomFarm.get(RandomGeneratorFarm.Aspect.DIFF_IP_FOR_TRAVELER), member.person().ipAddress(), postInfo.date), Dictionaries.browsers.getPostBrowserId( randomFarm.get(RandomGeneratorFarm.Aspect.DIFF_BROWSER), randomFarm.get(RandomGeneratorFarm.Aspect.BROWSER), member.person().browserId()), forum.language()); if (richRdf) { post_.richRdf(true); if (randomFarm.get(RandomGeneratorFarm.Aspect.POST_MENTIONED).nextDouble() > 0.6) { TreeSet<Long> t = new TreeSet<Long>(); // The user mentions one or more (up to 4) members of the forum t.add(memberships .get(randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_POST_MENTIONED) .nextInt(memberships.size())) .person().accountId()); double probabilityForNumberOfMentions = randomFarm .get(RandomGeneratorFarm.Aspect.POST_MENTIONED_NUM).nextDouble(); if (probabilityForNumberOfMentions > 0.5) t.add(memberships.get( randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_POST_MENTIONED) .nextInt(memberships.size())) .person().accountId()); if (probabilityForNumberOfMentions > 0.75) t.add(memberships.get( randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_POST_MENTIONED) .nextInt(memberships.size())) .person().accountId()); if (probabilityForNumberOfMentions > 0.95) t.add(memberships.get( randomFarm.get(RandomGeneratorFarm.Aspect.MEMBERSHIP_INDEX_POST_MENTIONED) .nextInt(memberships.size())) .person().accountId()); post_.mentioned(t); } if (randomFarm.get(RandomGeneratorFarm.Aspect.POST_VISIBILITY).nextDouble() > 0.95) { if (post_.mentioned() == null || randomFarm .get(RandomGeneratorFarm.Aspect.POST_VISIBILITY_TF).nextDouble() > 0.5) post_.setPublic(true); else post_.setPublic(false); } if (randomFarm.get(RandomGeneratorFarm.Aspect.POST_LINK).nextDouble() > 0.57) { post_.link("http://ld.bc/" + RandomStringUtils.random(6, true, false)); } } if (richRdf && randomFarm.get(RandomGeneratorFarm.Aspect.POST_COUNTRY).nextDouble() > 0.02) post_.countryKnown(false); exporter.export(post_); if (randomFarm.get(RandomGeneratorFarm.Aspect.NUM_LIKE).nextDouble() <= 0.1) { likeGenerator_.generateLikes(randomFarm.get(RandomGeneratorFarm.Aspect.NUM_LIKE), forum, post_, Like.LikeType.POST, exporter); } //// generate comments int numComments = randomFarm.get(RandomGeneratorFarm.Aspect.NUM_COMMENT) .nextInt(DatagenParams.maxNumComments + 1); postId = commentGenerator_.createComments(randomFarm, forum, post_, numComments, postId, exporter); } } } return postId; }
From source file:edu.mbl.jif.imaging.mmtiff.FileSet.java
/** * Completes the current time point of an aborted acquisition with blank images, so that it can * be opened correctly by ImageJ/BioForamts *///from w ww . j a v a 2 s .c o m private void completeFrameWithBlankImages(int frame) throws JSONException, MMScriptException { int numFrames = MDUtils.getNumFrames(mpTiff_.summaryMetadata_); int numSlices = MDUtils.getNumSlices(mpTiff_.summaryMetadata_); int numChannels = MDUtils.getNumChannels(mpTiff_.summaryMetadata_); if (numFrames > frame + 1) { TreeSet<String> writtenImages = new TreeSet<String>(); for (MultipageTiffWriter w : tiffWriters_) { writtenImages.addAll(w.getIndexMap().keySet()); w.setAbortedNumFrames(frame + 1); } int positionIndex = MDUtils.getIndices(writtenImages.first())[3]; if (mpTiff_.omeTiff_) { mpTiff_.omeMetadata_.setNumFrames(positionIndex, frame + 1); } TreeSet<String> lastFrameLabels = new TreeSet<String>(); for (int c = 0; c < numChannels; c++) { for (int z = 0; z < numSlices; z++) { lastFrameLabels.add(MDUtils.generateLabel(c, z, frame, positionIndex)); } } lastFrameLabels.removeAll(writtenImages); try { for (String label : lastFrameLabels) { tiffWriters_.getLast().writeBlankImage(label); if (mpTiff_.omeTiff_) { JSONObject dummyTags = new JSONObject(); int channel = Integer.parseInt(label.split("_")[0]); int slice = Integer.parseInt(label.split("_")[1]); MDUtils.setChannelIndex(dummyTags, channel); MDUtils.setFrameIndex(dummyTags, frame); MDUtils.setSliceIndex(dummyTags, slice); mpTiff_.omeMetadata_.addImageTagsToOME(dummyTags, ifdCount_, baseFilename_, currentTiffFilename_); } } } catch (IOException ex) { ReportingUtils.logError("problem writing dummy image"); } } }
From source file:gui.WikiCorpusClusteringView.java
public Graph<Number, Number> getGraph() { UndirectedGraph<Number, Number> g1 = new UndirectedSparseMultigraph<Number, Number>(); // KERN//ww w .j a va 2s. co m TreeSet<Number> s0 = new TreeSet<Number>(); s0.add(0); colorCluster(s0, Color.BLACK); linksHASH.put(0, centerPage); g1.addVertex(0); int i = 1; // add direct linked pages TreeSet<Number> s1 = new TreeSet<Number>(); for (String l : links) { WikiNode n = new WikiNode(centerPage.wiki, l); linksHASH.put(i, n); linksHASH_INV.put(n, i); s0.add(0); g1.addEdge(i, 0, i); g1.addVertex(i); i++; } colorCluster(s1, Color.BLUE); // add IWL-Pages TreeSet<Number> s2 = new TreeSet<Number>(); for (String key : iwlinks.keySet()) { String value = iwlinks.get(key); System.out.println("val=" + key + "_" + value); WikiNode n = new WikiNode(key, value); linksHASH.put(i, n); linksHASH_INV.put(n, i); s2.add(i); g1.addVertex(i); if (!linksHASH_INV.containsKey(value)) { g1.addEdge(i, 0, i); } i++; } colorCluster(s2, Color.RED); // add Categorie Members TreeSet<Number> s3 = new TreeSet<Number>(); for (String l : catMembs) { WikiNode n = new WikiNode(centerPage.wiki, l); g1.addVertex(i); if (linksHASH_INV.containsKey(l)) { g1.addEdge(i, 0, i); linksHASH.put(i, n); } i++; s3.add(i); } colorCluster(s3, Color.GREEN); return g1; }
From source file:com.cloudera.recordbreaker.analyzer.FSCrawler.java
/** * <code>getStartNonblockingCrawl</code> traverses a given filesystem. It returns immediately * and does not wait for the crawl to complete. * If the crawl is created or is already ongoing, it returns true. * If the crawl is not currently going and cannot start, it returns false. *//*from w w w.java2s . c o m*/ public synchronized boolean getStartNonblockingCrawl(final URI fsURI) { try { final int subdirDepth = INFINITE_CRAWL_DEPTH; long fsId = analyzer.getCreateFilesystem(fsURI, true); if (fsId < 0) { return false; } LOG.info("Grabbing filesystem: " + fsURI); final FileSystem fs = FileSystem.get(fsURI, new Configuration()); final Path startDir = fs.makeQualified(new Path(fsURI.getPath())); final long crawlid = analyzer.getCreatePendingCrawl(fsId, true); Thread pendingThread = pendingCrawls.get(crawlid); if (pendingThread == null) { Thread t = new Thread() { public void run() { try { synchronized (pendingCrawls) { pendingCrawls.put(crawlid, this); } synchronized (crawlStatusInfo) { crawlStatusInfo.put(crawlid, new CrawlRuntimeStatus("Initializing crawl")); } // Build the file and dir-level todo lists List<Path> todoFileList = new ArrayList<Path>(); List<Path> todoDirList = new ArrayList<Path>(); recursiveCrawlBuildList(fs, startDir, subdirDepth, crawlid, todoFileList, todoDirList); // Get the files to process TreeSet<String> observedFilenames = new TreeSet<String>(); for (Path p : analyzer.getFilesForCrawl(crawlid)) { observedFilenames.add(p.toString()); } for (Iterator<Path> it = todoFileList.iterator(); it.hasNext();) { Path p = it.next(); if (observedFilenames.contains(p.toString())) { it.remove(); } } // Get the dirs to process TreeSet<String> observedDirnames = new TreeSet<String>(); for (Path p : analyzer.getDirsForCrawl(crawlid)) { observedDirnames.add(p.toString()); } for (Iterator<Path> it = todoDirList.iterator(); it.hasNext();) { Path p = it.next(); if (observedDirnames.contains(p.toString())) { it.remove(); } } synchronized (crawlStatusInfo) { CrawlRuntimeStatus cstatus = crawlStatusInfo.get(crawlid); cstatus.setMessage("Processing files"); cstatus.setNumToProcess(todoFileList.size()); cstatus.setNumDone(0); } int numDone = 0; for (Path p : todoDirList) { try { analyzer.addSingleFile(fs, p, crawlid); } catch (IOException iex) { iex.printStackTrace(); } } for (Path p : todoFileList) { synchronized (crawlStatusInfo) { CrawlRuntimeStatus cstatus = crawlStatusInfo.get(crawlid); cstatus.setMessage("Processing file " + p.toString()); } try { analyzer.addSingleFile(fs, p, crawlid); } catch (Exception iex) { iex.printStackTrace(); } numDone++; synchronized (crawlStatusInfo) { CrawlRuntimeStatus cstatus = crawlStatusInfo.get(crawlid); cstatus.setNumDone(numDone); if (cstatus.shouldFinish()) { break; } } } } catch (IOException iex) { iex.printStackTrace(); } finally { try { synchronized (pendingCrawls) { pendingCrawls.remove(crawlid); analyzer.completeCrawl(crawlid); } } catch (SQLiteException sle) { } } } }; t.start(); } return true; } catch (Exception iex) { iex.printStackTrace(); } return false; }
From source file:edu.clemson.lph.utils.CSVParserWrapper.java
/** * Sort everything including the top row * @param iCol column to sort (see LabeledCSVParser.getLabelIdx( ColumnLabel ) ) * @param bNumbericCompare set to true to parse column to an number before comparing *//*from w w w .j av a2s . c om*/ protected void sort(int iCol, boolean bNumericCompare, boolean bUniqueSort) { // final ArrayList<List<String>> aRowHold = (ArrayList<List<String>>)aRows.clone(); final int iSortCol = iCol; final boolean bUnique = bUniqueSort; final boolean bNumeric = bNumericCompare; if (aRows == null || iRows <= 0 || iCol < 0 || iCol >= aRows.get(0).size()) return; Comparator<List<String>> compRows = new Comparator<List<String>>() { // Compare Strings in the indicated column. The only weird part is // numeric comparison. Try casting to Double. If both fail they are equal // if one fails it is GREATER than the other so it sorts later in the list. @Override public int compare(List<String> arg0, List<String> arg1) { int iRet = 0; String s0 = arg0.get(iSortCol); String s1 = arg1.get(iSortCol); if (bNumeric) { Double d0 = null; Double d1 = null; try { d0 = Double.parseDouble(s0); } catch (NumberFormatException e) { } try { d1 = Double.parseDouble(s1); } catch (NumberFormatException e) { } if (d0 != null && d1 != null) iRet = Double.compare(d0, d1); else if (d0 != null && d1 == null) iRet = -1; else if (d0 == null && d1 != null) iRet = 1; else iRet = 0; } else { iRet = s0.compareTo(s1); } // If the compared column values are equal find SOMETHING different or the set logic // will only include the first row with that value if (!bUnique && iRet == 0) { for (int i = arg0.size() - 1; i >= 0; i--) { if (i != iSortCol) { String s0a = arg0.get(i); String s1a = arg1.get(i); iRet = s0a.compareTo(s1a); if (iRet != 0) { break; } } } } return iRet; } }; TreeSet<List<String>> setRows = new TreeSet<List<String>>(compRows); for (List<String> lRow : aRows) setRows.add(lRow); aRows.clear(); for (List<String> lRow : setRows) { aRows.add(lRow); } iRows = aRows.size(); }