List of usage examples for java.util Set clear
void clear();
From source file:de.ingrid.interfaces.csw.harvest.impl.AbstractHarvester.java
@Override public void run(Date lastExecutionDate) throws Exception { log.info("Running harvester " + this.getId()); if (this.cache == null) { throw new RuntimeException("Harvester is not configured properly: cache not set."); }/* w ww. jav a 2 s .c om*/ // get cached record ids (for later removal of records that do not exist // anymore) Set<Serializable> cachedRecordIds = this.cache.getCachedIds(); // delegate execution to specialized method List<Serializable> allRecordIds = new ArrayList<Serializable>(); try { allRecordIds = this.fetchRecords(lastExecutionDate); } catch (Exception e) { log.error("Error fetching records from harvester: " + this.getId(), e); } // remove deprecated records for (Serializable cachedRecordId : cachedRecordIds) { if (!allRecordIds.contains(cachedRecordId)) this.cache.remove(cachedRecordId); } // duplicates are filtered out automatically by the cache, so there is // no need for action here int duplicates = allRecordIds.size() - new HashSet<Serializable>(allRecordIds).size(); log.info("Fetched " + allRecordIds.size() + " records. Duplicates: " + duplicates); if (duplicates > 0) { statusProvider.addState(this.getId() + "_duplicates", "Remove " + duplicates + " duplicates."); } allRecordIds.clear(); cachedRecordIds.clear(); }
From source file:org.drftpd.protocol.speedtest.net.slave.SpeedTestHandler.java
private float getUploadSpeed(String url) { long totalTime = 0L; long totalBytes = 0L; long startTime = System.currentTimeMillis(); RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(60000).setConnectTimeout(5000) .setConnectionRequestTimeout(5000).build(); HttpPost httpPost = new HttpPost(url); httpPost.setHeader("content-type", "application/x-www-form-urlencoded"); httpPost.setConfig(requestConfig);// w ww .ja v a 2s.com String payload = _payload; // Initial payload StopWatch watch = new StopWatch(); SpeedTestCallable[] speedTestCallables = new SpeedTestCallable[_upThreads]; for (int i = 0; i < _upThreads; i++) { speedTestCallables[i] = new SpeedTestCallable(); } ExecutorService executor = Executors.newFixedThreadPool(_upThreads); List<Future<Long>> threadList; Set<Callable<Long>> callables = new HashSet<Callable<Long>>(); boolean limitReached = false; int i = 2; while (true) { if ((System.currentTimeMillis() - startTime) > _upTime) { break; } List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(); nameValuePairs.add(new BasicNameValuePair("content1", payload)); try { httpPost.setEntity(new UrlEncodedFormEntity(nameValuePairs)); } catch (UnsupportedEncodingException e) { logger.error("Unsupported encoding of payload for speedtest upload: " + e.getMessage()); close(executor, callables); return 0; } callables.clear(); for (int k = 0; k < _upThreads; k++) { speedTestCallables[k].setHttpPost(httpPost); callables.add(speedTestCallables[k]); } for (int j = 0; j < _payloadLoop; j++) { try { watch.reset(); Thread.sleep(_sleep); watch.start(); threadList = executor.invokeAll(callables); for (Future<Long> fut : threadList) { Long bytes = fut.get(); totalBytes += bytes; } watch.stop(); totalTime += watch.getTime(); } catch (InterruptedException e) { logger.error(e.getMessage()); close(executor, callables); return 0; } catch (ExecutionException e) { if (e.getMessage().contains("Error code 413")) { limitReached = true; payload = StringUtils.repeat(_payload, i - 2); } else { logger.error(e.getMessage()); close(executor, callables); return 0; } } if ((System.currentTimeMillis() - startTime) > _upTime) { break; } } if (!limitReached) { // Increase payload size if not too big payload = StringUtils.repeat(_payload, i); i++; } } if (totalBytes == 0L || totalTime == 0L) { close(executor, callables); return 0; } close(executor, callables); return (float) (((totalBytes * 8) / totalTime) * 1000) / 1000000; }
From source file:edu.uci.ics.pregelix.example.util.TestExecutor.java
private boolean equalStrings(String s1, String s2) { String[] rowsOne = s1.split("\n"); String[] rowsTwo = s2.split("\n"); for (int i = 0; i < rowsOne.length; i++) { String row1 = rowsOne[i]; String row2 = rowsTwo[i]; if (row1.equals(row2)) continue; String[] fields1 = row1.split(" "); String[] fields2 = row2.split(" "); boolean bagEncountered = false; Set<String> bagElements1 = new HashSet<String>(); Set<String> bagElements2 = new HashSet<String>(); for (int j = 0; j < fields1.length; j++) { if (j >= fields2.length) { return false; } else if (fields1[j].equals(fields2[j])) { if (fields1[j].equals("{{")) bagEncountered = true; if (fields1[j].startsWith("}}")) { if (!bagElements1.equals(bagElements2)) return false; bagEncountered = false; bagElements1.clear(); bagElements2.clear(); }/*from ww w .j av a2s .c o m*/ continue; } else if (fields1[j].indexOf('.') < 0) { if (bagEncountered) { bagElements1.add(fields1[j].replaceAll(",$", "")); bagElements2.add(fields2[j].replaceAll(",$", "")); continue; } return false; } else { // If the fields are floating-point numbers, test them // for equality safely fields1[j] = fields1[j].split(",")[0]; fields2[j] = fields2[j].split(",")[0]; try { Double double1 = Double.parseDouble(fields1[j]); Double double2 = Double.parseDouble(fields2[j]); float float1 = (float) double1.doubleValue(); float float2 = (float) double2.doubleValue(); if (Math.abs(float1 - float2) == 0) continue; else { return false; } } catch (NumberFormatException ignored) { // Guess they weren't numbers - must simply not be equal return false; } } } } return true; }
From source file:fr.landel.utils.assertor.predicate.PredicateAssertorIterableTest.java
/** * Test method for {@link AssertorIterable#isNotEmpty}. * /*from www. jav a2 s . c o m*/ * @throws IOException * On empty iterable */ @Test public void testIsNotEmpty() throws IOException { final String el = "element"; final Set<String> set = new HashSet<>(); set.add(el); Assertor.<Set<String>, String>ofIterable().isNotEmpty().that(set).orElseThrow(); assertException(() -> { Assertor.<Set<String>, String>ofIterable().not().isNotEmpty().that(set) .orElseThrow("iterable is not empty"); fail(ERROR); }, IllegalArgumentException.class, "iterable is not empty"); set.clear(); assertException(() -> { Assertor.<Set<String>, String>ofIterable().isNotEmpty().that(set).orElseThrow(); fail(ERROR); }, IllegalArgumentException.class); assertException(() -> { Assertor.<Set<String>, String>ofIterable().isNotEmpty().that(set).orElseThrow("iterable is empty"); fail(ERROR); }, IllegalArgumentException.class, "iterable is empty"); assertException(() -> { Assertor.<Set<String>, String>ofIterable().isNotEmpty().that(set).orElseThrow(new IOException(), true); fail(ERROR); }, IOException.class); assertException(() -> { Assertor.<Iterable<String>, String>ofIterable().isNotEmpty().that((Iterable<String>) null) .orElseThrow(); fail(); }, IllegalArgumentException.class, "the iterable 'null' should be NOT empty and NOT null"); }
From source file:edu.toronto.cs.ontools.taxonomy.AbstractTaxonomy.java
protected void display(PrintStream out) { Map<String, Boolean> visited = new HashMap<String, Boolean>(); Set<String> crt = new TreeSet<String>(); Set<String> next = new TreeSet<String>(); crt.add(this.getRootId()); CounterMap<Integer> h = new CounterMap<Integer>(); int min = this.size(), max = 0; double avg = 0; while (!crt.isEmpty()) { for (String id : crt) { if (Boolean.TRUE.equals(visited.get(id))) { continue; }/*from w w w .j a va 2 s . co m*/ TaxonomyTerm term = this.getTerm(id); int p = term.getParents().size(); h.addTo(p); if (min > p) { min = p; } if (max < p) { max = p; } avg += p; out.println(term); visited.put(id, true); next.addAll(term.getChildren()); } crt.clear(); crt.addAll(next); next.clear(); } avg /= this.size(); out.println(h); out.println(); out.println("SIZE " + this.size()); out.println("MIN: " + min); out.println("MAX: " + max); out.println("AVG: " + avg); }
From source file:com.meltmedia.cadmium.servlets.guice.CadmiumListener.java
@Override public void contextDestroyed(ServletContextEvent event) { Set<Class<? extends Closeable>> closed = new HashSet<Class<? extends Closeable>>(); Injector injector = this.injector; if (jsr250Executor != null) { jsr250Executor.preDestroy(); jsr250Executor = null;//from www . j ava2 s . c om } Set<Object> singletons = Jsr250Utils.findInstancesInScopes(injector, Singleton.class); Set<Object> otherSingletons = Jsr250Utils.findInstancesInScopes(injector, Scopes.SINGLETON); closeAll(closed, singletons); closeAll(closed, otherSingletons); closed.clear(); if (executor != null) { try { executor.shutdown(); } catch (Throwable t) { } try { if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { log.warn("Thread pool executor did not terminate after 10 seconds, forcing shutdown."); for (Runnable terminated : executor.shutdownNow()) { log.warn("Terminated task of type {}.", terminated.getClass().getName()); } } } catch (Throwable t) { log.warn("Throwable thrown while terminating thread pool executor.", t); } } injector = null; executor = null; members.clear(); members = null; configManager = null; context = null; reflections = null; super.contextDestroyed(event); }
From source file:com.intuit.tank.project.UsersAndTimes.java
public List<JobRegion> getJobRegions() { if (jobRegions == null) { Set<JobRegion> regions = projectBean.getJobConfiguration().getJobRegions(); Set<VMRegion> configuredRegions = new HashSet<VMRegion>( tankConfig.getVmManagerConfig().getConfiguredRegions()); if (tankConfig.getStandalone()) { JobRegion standaloneRegion = new JobRegion(VMRegion.STANDALONE, "0"); boolean found = false; for (JobRegion region : regions) { if (region.getRegion() == VMRegion.US_EAST || region.getRegion() == VMRegion.STANDALONE) { standaloneRegion = region; standaloneRegion.setRegion(VMRegion.STANDALONE); found = true;//from w ww.j a v a 2 s. com break; } } if (!found) { regions.clear(); regions.add(standaloneRegion); } } else { for (JobRegion region : regions) { configuredRegions.remove(region.getRegion()); } for (VMRegion region : configuredRegions) { if (tankConfig.getStandalone()) { } else { regions.add(new JobRegion(region, "0")); } } } jobRegions = new ArrayList<JobRegion>(regions); Collections.sort(jobRegions); } return jobRegions; }
From source file:org.apache.ctakes.ytex.kernel.metric.ConceptSimilarityServiceImpl.java
public int getLCS(String concept1, String concept2, Set<String> lcses, List<LCSPath> lcsPaths) { int lcsDist = 0; ConcRel cr1 = getConceptGraph().getConceptMap().get(concept1); ConcRel cr2 = getConceptGraph().getConceptMap().get(concept2); if (cr1 != null && cr2 != null) { lcses.clear(); if (lcsPaths == null) { // no need to get paths which we don't cache - look in the cache lcsDist = getLCSFromCache(cr1, cr2, lcses); } else {// w ww.jav a 2s .c o m lcsPaths.clear(); // need to get paths - compute the lcses and their paths lcsDist = lcs(concept1, concept2, lcsPaths); for (LCSPath lcsPath : lcsPaths) { lcses.add(lcsPath.getLcs()); } } } else { if (log.isDebugEnabled()) { if (cr1 == null) log.debug("could not find concept:" + concept1); if (cr2 == null) log.debug("could not find concept:" + concept2); } } return lcsDist; }
From source file:org.broadleafcommerce.common.extensibility.context.merge.AbstractMergeBeanPostProcessor.java
@Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { if (statusProvider != null && !statusProvider.isProcessingEnabled(bean, beanName, applicationContext)) { if (LOG.isTraceEnabled()) { LOG.trace(String.format( "Not performing post-processing on targetRef [%s] because the registered " + "status provider [%s] returned false", targetRef, statusProvider.getClass().getSimpleName())); }// w w w . j a v a2 s . c o m return bean; } if (beanName.equals(targetRef)) { Object mergeCollection = applicationContext.getBean(collectionRef); if (bean instanceof ListFactoryBean || bean instanceof List) { try { List mergeList = (List) mergeCollection; List sourceList; if (bean instanceof ListFactoryBean) { Field field = ListFactoryBean.class.getDeclaredField("sourceList"); field.setAccessible(true); sourceList = (List) field.get(bean); } else { sourceList = (List) bean; } switch (placement) { case APPEND: sourceList.addAll(mergeList); break; case PREPEND: sourceList.addAll(0, mergeList); break; case SPECIFIC: sourceList.addAll(position, mergeList); break; } } catch (Exception e) { throw new BeanCreationException(e.getMessage()); } } else if (bean instanceof SetFactoryBean || bean instanceof Set) { try { Set mergeSet = (Set) mergeCollection; Set sourceSet; if (bean instanceof SetFactoryBean) { Field field = SetFactoryBean.class.getDeclaredField("sourceSet"); field.setAccessible(true); sourceSet = (Set) field.get(bean); } else { sourceSet = (Set) bean; } List tempList = new ArrayList(sourceSet); switch (placement) { case APPEND: tempList.addAll(mergeSet); break; case PREPEND: tempList.addAll(0, mergeSet); break; case SPECIFIC: tempList.addAll(position, mergeSet); break; } sourceSet.clear(); sourceSet.addAll(tempList); } catch (Exception e) { throw new BeanCreationException(e.getMessage()); } } else if (bean instanceof MapFactoryBean || bean instanceof Map) { try { Map mergeMap = (Map) mergeCollection; Map sourceMap; if (bean instanceof MapFactoryBean) { Field field = MapFactoryBean.class.getDeclaredField("sourceMap"); field.setAccessible(true); sourceMap = (Map) field.get(bean); } else { sourceMap = (Map) bean; } LinkedHashMap tempMap = new LinkedHashMap(); switch (placement) { case APPEND: tempMap.putAll(sourceMap); tempMap.putAll(mergeMap); break; case PREPEND: tempMap.putAll(mergeMap); tempMap.putAll(sourceMap); break; case SPECIFIC: boolean added = false; int j = 0; for (Object key : sourceMap.keySet()) { if (j == position) { tempMap.putAll(mergeMap); added = true; } tempMap.put(key, sourceMap.get(key)); j++; } if (!added) { tempMap.putAll(mergeMap); } break; } sourceMap.clear(); sourceMap.putAll(tempMap); } catch (Exception e) { throw new BeanCreationException(e.getMessage()); } } else { throw new IllegalArgumentException("Bean (" + beanName + ") is specified as a merge target, " + "but is not" + " of type ListFactoryBean, SetFactoryBean or MapFactoryBean"); } } return bean; }
From source file:gov.nih.nci.ncicb.tcga.dcc.dam.dao.DAMQueriesCGCCLevelTwoAndThree.java
private void makeDataFilesForBaseName(final List<DataFile> dataFiles, final List<DataSetLevelTwoThree> dataSets, final boolean consolidateFiles) { // will hold all experiment ids for this basename final Set<Integer> experimentIds = new HashSet<Integer>(); final Set<String> barcodes = new HashSet<String>(); final Map<String, DataFileLevelTwoThree> dataFilesBySourceFileType = new HashMap<String, DataFileLevelTwoThree>(); for (final DataSetLevelTwoThree dataSet : dataSets) { if ((experimentIds.size() + barcodes.size() + 1 + dataSet.getBarcodes().size()) > maxInClauseSize) { // need to run the query now, before the number of items in the in clause gets too big! makeOneBatchOfDataFilesForBaseName(dataFiles, dataSets, consolidateFiles, experimentIds, barcodes, dataFilesBySourceFileType); experimentIds.clear(); barcodes.clear();/* w w w.ja v a 2 s.c o m*/ } experimentIds.add(dataSet.getExperimentID()); barcodes.addAll(dataSet.getBarcodes()); } // run the last batch (or only one, if the total number never approached the max in clause size) makeOneBatchOfDataFilesForBaseName(dataFiles, dataSets, consolidateFiles, experimentIds, barcodes, dataFilesBySourceFileType); }