List of usage examples for java.lang ThreadLocal get
public T get()
From source file:org.apache.sling.resourceresolver.impl.CommonResourceResolverFactoryImpl.java
/** * Inform about a closed resource resolver. * Make sure to remove it from the current thread context. * @param resourceResolverImpl The resource resolver * @param ctrl The resource resolver control *///from w w w . j a va2 s .c o m public void unregister(final ResourceResolver resourceResolverImpl, final ResourceResolverControl ctrl) { // close the context ctrl.close(); // remove it from the set of weak references. refs.remove(ctrl.hashCode()); // on shutdown, the factory might already be closed before the resolvers close // therefore we have to check for null final ThreadLocal<Stack<WeakReference<ResourceResolver>>> tl = resolverStackHolder; if (tl != null) { final Stack<WeakReference<ResourceResolver>> resolverStack = tl.get(); if (resolverStack != null) { final Iterator<WeakReference<ResourceResolver>> i = resolverStack.iterator(); while (i.hasNext()) { final WeakReference<ResourceResolver> ref = i.next(); if (ref.get() == null || ref.get() == resourceResolverImpl) { i.remove(); } } if (resolverStack.isEmpty()) { tl.remove(); } } } }
From source file:org.codice.alliance.transformer.nitf.NitfPreStoragePlugin.java
private BufferedImage renderImage(ContentItem contentItem) throws IOException, ParseException, NitfFormatException { final ThreadLocal<BufferedImage> bufferedImage = new ThreadLocal<>(); if (contentItem != null && contentItem.getInputStream() != null) { NitfRenderer renderer = new NitfRenderer(); new NitfParserInputFlow().inputStream(contentItem.getInputStream()).allData() .forEachImageSegment(segment -> { if (bufferedImage.get() == null) { try { bufferedImage.set(renderer.render(segment)); } catch (IOException e) { LOGGER.error(e.getMessage(), e); }//from w w w . jav a2 s . c o m } }); } return bufferedImage.get(); }
From source file:org.apache.jmeter.protocol.http.control.TestCacheManager.java
private Map<String, CacheManager.CacheEntry> getThreadCache() throws Exception { Field threadLocalfield = CacheManager.class.getDeclaredField("threadCache"); threadLocalfield.setAccessible(true); @SuppressWarnings("unchecked") ThreadLocal<Map<String, CacheEntry>> threadLocal = (ThreadLocal<Map<String, CacheManager.CacheEntry>>) threadLocalfield .get(this.cacheManager); return threadLocal.get(); }
From source file:org.geowebcache.layer.TileLayer.java
protected ByteArrayResource getImageBuffer(ThreadLocal<ByteArrayResource> tl) { ByteArrayResource buffer = tl.get(); if (buffer == null) { buffer = new ByteArrayResource(16 * 1024); tl.set(buffer);//from w w w . ja v a2 s .co m } buffer.truncate(); return buffer; }
From source file:org.codice.alliance.plugin.nitf.NitfPostIngestPlugin.java
private BufferedImage render(InputStream inputStream, Function<Pair<ImageSegment, NitfRenderer>, BufferedImage> imageSegmentFunction) throws NitfFormatException { final ThreadLocal<BufferedImage> bufferedImage = new ThreadLocal<>(); if (inputStream != null) { NitfRenderer renderer = getNitfRenderer(); nitfParserService.parseNitf(inputStream, true).forEachImageSegment(segment -> { if (bufferedImage.get() == null) { BufferedImage bi = imageSegmentFunction.apply(new ImmutablePair<>(segment, renderer)); if (bi != null) { bufferedImage.set(bi); }// w w w . ja va 2 s .c o m } }).end(); } return bufferedImage.get(); }
From source file:org.codice.alliance.plugin.nitf.NitfPreStoragePlugin.java
private BufferedImage render(ContentItem contentItem, Function<Pair<ImageSegment, NitfRenderer>, BufferedImage> imageSegmentFunction) throws IOException, ParseException, NitfFormatException { final ThreadLocal<BufferedImage> bufferedImage = new ThreadLocal<>(); if (contentItem != null) { InputStream inputStream = contentItem.getInputStream(); if (inputStream != null) { try { NitfRenderer renderer = getNitfRenderer(); new NitfParserInputFlowImpl().inputStream(inputStream).allData() .forEachImageSegment(segment -> { if (bufferedImage.get() == null) { BufferedImage bi = imageSegmentFunction .apply(new ImmutablePair<>(segment, renderer)); if (bi != null) { bufferedImage.set(bi); }/* w ww . j av a2 s . co m*/ } }).end(); } finally { IOUtils.closeQuietly(inputStream); } } } return bufferedImage.get(); }
From source file:org.codice.alliance.plugin.nitf.NitfPostProcessPlugin.java
private BufferedImage render(InputStream inputStream, Function<Pair<ImageSegment, NitfRenderer>, BufferedImage> imageSegmentFunction) throws InterruptedException, NitfFormatException { final ThreadLocal<BufferedImage> bufferedImage = new ThreadLocal<>(); if (inputStream != null) { try {/*from ww w. j ava 2 s. co m*/ available.acquire(); NitfRenderer renderer = nitfRendererSupplier.get(); NitfParserInputFlow parserInputFlow = nitfParserSupplier.get(); parserInputFlow.inputStream(inputStream).allData().forEachImageSegment(segment -> { if (bufferedImage.get() == null) { BufferedImage bi = imageSegmentFunction.apply(new ImmutablePair<>(segment, renderer)); if (bi != null) { bufferedImage.set(bi); } } }).end(); } finally { IOUtils.closeQuietly(inputStream); available.release(); } } BufferedImage image = bufferedImage.get(); bufferedImage.remove(); return image; }
From source file:com.strategicgains.docussandra.controller.perf.remote.parent.PerfTestParent.java
public void loadData() throws IOException, ParseException, InterruptedException { logger.info("------------Loading Data into: " + this.getDb().name() + " with Docussandra!------------"); ArrayList<Thread> workers = new ArrayList<>(NUM_WORKERS + 1); int numDocs = getNumDocuments(); int docsPerWorker = numDocs / NUM_WORKERS; try {/*from www.j av a 2 s . c o m*/ List<Document> docs = getDocumentsFromFS(); ArrayList<List<Document>> documentQueues = new ArrayList<>(NUM_WORKERS + 1); int numDocsAssigned = 0; while ((numDocsAssigned + 1) < numDocs) { int start = numDocsAssigned; int end = numDocsAssigned + docsPerWorker; if (end > numDocs) { end = numDocs - 1; } documentQueues.add(new ArrayList(docs.subList(start, end))); numDocsAssigned = end; } for (final List<Document> queue : documentQueues) { workers.add(new Thread() { @Override public void run() { for (Document d : queue) { //logger.debug("Processing document: " + d.toString()); postDocument(getDb(), getTb(), d); } logger.info("Thread " + Thread.currentThread().getName() + " is done."); } }); } } catch (UnsupportedOperationException e)//we can't read everything in at once { //all we need to do in this block is find a way to set "workers" for (int i = 0; i < NUM_WORKERS; i++) { workers.add(new Thread() { private final int chunk = (int) (Math.random() * 100) + 150;//pick a random chunk so we are not going back to the FS all at the same time and potentially causing a bottle neck @Override public void run() { ThreadLocal<Integer> counter = new ThreadLocal<>(); counter.set(new Integer(0)); try { List<Document> docs = getDocumentsFromFS(chunk);//grab a handful of documents while (docs.size() > 0) { for (Document d : docs)//process the documents we grabbed { //logger.debug("Processing document: " + d.toString()); postDocument(getDb(), getTb(), d);//post them up counter.set(counter.get() + 1); } docs = getDocumentsFromFS(chunk);//grab another handful of documents } logger.info("Thread " + Thread.currentThread().getName() + " is done. It processed " + counter.get() + " documents."); } catch (IOException | ParseException e) { logger.error("Couldn't read from document", e); } } }); } } //long start = new Date().getTime(); StopWatch sw = new StopWatch(); sw.start(); //start your threads! for (Thread t : workers) { t.start(); } logger.info("All threads started, waiting for completion."); boolean allDone = false; boolean first = true; while (!allDone || first) { first = false; boolean done = true; for (Thread t : workers) { if (t.isAlive()) { done = false; logger.info("Thread " + t.getName() + " is still running."); break; } } if (done) { allDone = true; sw.stop(); } else { logger.info("We still have workers running..."); Thread.sleep(5000); } } long miliseconds = sw.getTime(); double seconds = (double) miliseconds / 1000d; output.info("Doc: Done loading data using: " + NUM_WORKERS + " and URL: " + BASE_URI + ". Took: " + seconds + " seconds"); double tpms = (double) numDocs / (double) miliseconds; double tps = tpms * 1000; double transactionTime = (double) miliseconds / (double) numDocs; output.info(this.getDb().name() + " Doc: Average Transactions Per Second: " + tps); output.info(this.getDb().name() + " Doc: Average Transactions Time (in miliseconds): " + transactionTime); Thread.sleep(100000);//sleep a bit to let the DB digest that before trying anything else }
From source file:org.kaaproject.kaa.server.appenders.cassandra.appender.CassandraLogEventDao.java
private String formatTs(String tsValue, ColumnMappingElement element) { if (tsValue == null) { long ts = System.currentTimeMillis(); final String pattern = element.getValue(); if (pattern == null || pattern.isEmpty()) { tsValue = ts + ""; } else {/*from www . ja v a 2 s .com*/ ThreadLocal<SimpleDateFormat> formatterTL = dateFormatMap.get(pattern); if (formatterTL == null) { formatterTL = new ThreadLocal<SimpleDateFormat>() { @Override protected SimpleDateFormat initialValue() { return new SimpleDateFormat(pattern); } }; dateFormatMap.putIfAbsent(pattern, formatterTL); } SimpleDateFormat formatter = formatterTL.get(); if (formatter == null) { formatter = new SimpleDateFormat(pattern); formatterTL.set(formatter); } tsValue = formatter.format(new Date(ts)); } } return tsValue; }
From source file:pl.psnc.ep.rt.web.servlets.CollXMLServlet.java
private static void addDateTimes(Element metadata, Element content) throws RemoteException, DLibraException, InterruptedException, ExecutionException { final ContentServer cs = ServicesManager.getInstance().getContetServer(); final ThreadLocal<DocumentBuilder> documentBuilderTL = new ThreadLocal<DocumentBuilder>() { @Override//from www .ja v a2s .c o m protected DocumentBuilder initialValue() { try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); return factory.newDocumentBuilder(); } catch (ParserConfigurationException e) { throw new RuntimeException("Default parser configuration failed?", e); } } }; Date created = null, revised = null; NodeList modules = content.getElementsByTagNameNS(Namespace.COL.URI, "module"); List<Future<Document>> parsedModules = new ArrayList<Future<Document>>(); for (int i = 0; i < modules.getLength(); i++) { final File file = (File) modules.item(i).getUserData(KEY_FILE); final VersionId versionId = file.getVersionIds().get(0); parsedModules.add(threadPool.submit(new Callable<Document>() { @SuppressWarnings("resource") @Override public Document call() throws Exception { InputStream inputStream = cs.getVersionInputStream(versionId); Document module = null; try { try { module = documentBuilderTL.get().parse(inputStream); } finally { inputStream.close(); } } catch (Exception e) { logger.warn("Could not parse file version " + versionId, e); } finally { cs.releaseElement(versionId); } return module; } })); } SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm z"); for (Future<Document> futureModule : parsedModules) { Document module = futureModule.get(); created = findDate(created, df, module, TAG_CREATED, false); revised = findDate(revised, df, module, TAG_REVISED, true); } if (created != null) { NodeList nodeList = metadata.getElementsByTagNameNS(Namespace.MD.URI, TAG_CREATED); nodeList.item(0).setTextContent(df.format(created)); } if (revised != null) { NodeList nodeList = metadata.getElementsByTagNameNS(Namespace.MD.URI, TAG_REVISED); nodeList.item(0).setTextContent(df.format(revised)); } }