List of usage examples for java.util.concurrent Executors newSingleThreadExecutor
public static ExecutorService newSingleThreadExecutor()
From source file:or.tango.android.activity.PicActivity.java
@Override protected void setLayout() { setContentView(R.layout.activity_pic); singleThread = Executors.newSingleThreadExecutor(); String picPath = getIntent().getStringExtra(CameraActivity.PHOTO_PATH_KEY); uploadFile = new File(picPath); }
From source file:de.dfki.iui.mmds.scxml.engine.SCXMLEngineActivator.java
public static void postScxmlOnTransitionEvent(final String id, final TransitionTarget from, final TransitionTarget to, final Transition transition) { if (getEventAdmin() == null) return;/*from w ww . jav a2s . co m*/ Executors.newSingleThreadExecutor().execute(new Runnable() { @Override public void run() { getEventAdmin().postEvent(new SCXMLOnTransitionEvent(id, from, to, transition)); } }); }
From source file:net.openid.appauthdemo.TokenActivity.java
@Override protected void onStart() { super.onStart(); if (mExecutor.isShutdown()) { mExecutor = Executors.newSingleThreadExecutor(); }/*w w w .ja v a 2 s .c o m*/ if (mStateManager.getCurrent().isAuthorized()) { displayAuthorized(); return; } // the stored AuthState is incomplete, so check if we are currently receiving the result of // the authorization flow from the browser. AuthorizationResponse response = AuthorizationResponse.fromIntent(getIntent()); AuthorizationException ex = AuthorizationException.fromIntent(getIntent()); if (response != null || ex != null) { mStateManager.updateAfterAuthorization(response, ex); } if (response != null && response.authorizationCode != null) { // authorization code exchange is required mStateManager.updateAfterAuthorization(response, ex); exchangeAuthorizationCode(response); } else if (ex != null) { displayNotAuthorized("Authorization flow failed: " + ex.getMessage()); } else { displayNotAuthorized("No authorization state retained - reauthorization required"); } }
From source file:com.netflix.curator.framework.recipes.cache.TestNodeCache.java
@Test public void testRebuildAgainstOtherProcesses() throws Exception { NodeCache cache = null;/*from ww w. j av a 2 s.co m*/ final CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); client.start(); try { client.create().forPath("/test"); client.create().forPath("/test/snafu", "original".getBytes()); final CountDownLatch latch = new CountDownLatch(1); cache = new NodeCache(client, "/test/snafu"); cache.getListenable().addListener(new NodeCacheListener() { @Override public void nodeChanged() throws Exception { latch.countDown(); } }); cache.rebuildTestExchanger = new Exchanger<Object>(); ExecutorService service = Executors.newSingleThreadExecutor(); final NodeCache finalCache = cache; Future<Object> future = service.submit(new Callable<Object>() { @Override public Object call() throws Exception { finalCache.rebuildTestExchanger.exchange(new Object(), 10, TimeUnit.SECONDS); // simulate another process updating the node while we're rebuilding client.setData().forPath("/test/snafu", "other".getBytes()); ChildData currentData = finalCache.getCurrentData(); Assert.assertNotNull(currentData); finalCache.rebuildTestExchanger.exchange(new Object(), 10, TimeUnit.SECONDS); return null; } }); cache.start(false); future.get(); Assert.assertTrue(latch.await(10, TimeUnit.SECONDS)); Assert.assertNotNull(cache.getCurrentData()); Assert.assertEquals(cache.getCurrentData().getData(), "other".getBytes()); } finally { IOUtils.closeQuietly(cache); IOUtils.closeQuietly(client); } }
From source file:org.rdv.viz.spectrum.SpectrumAnalyzerPanel.java
/** * Creates a spectrum analyzer panel./* w w w. ja v a 2 s . c om*/ */ public SpectrumAnalyzerPanel() { super(null, true); sampleRate = 256; numberOfSamples = 256; windowFunction = WindowFunction.Hamming; segmentSize = 256; overlap = 0; inputData = new double[numberOfSamples]; oldestDataIndex = 0; createWindow(); fft = new DoubleFFT_1D(segmentSize); fftExecutor = Executors.newSingleThreadExecutor(); initChart(); }
From source file:oz.hadoop.yarn.api.core.LocalApplicationLaunchTests.java
@Test(timeout = 60000) public void validateLongLivedJavaContainerLaunchWithGracefullShutdown() throws Exception { ExecutorService executor = Executors.newSingleThreadExecutor(); YarnApplication<DataProcessor> yarnApplication = YarnAssembly .forApplicationContainer(SimpleRandomDelayContainer.class).containerCount(4).withApplicationMaster() .build("sample-yarn-application"); final DataProcessor dataProcessor = yarnApplication.launch(); assertEquals(4, dataProcessor.containers()); executor.execute(new Runnable() { @Override/*w ww . j ava 2 s . com*/ public void run() { for (int i = 0; i < 1000000; i++) { for (int j = 0; j < dataProcessor.containers(); j++) { try { dataProcessor.process(ByteBuffer.wrap(("Hello Yarn!-" + i).getBytes())); } catch (Exception e) { e.printStackTrace(); throw new IllegalStateException("Failed to submit data for processing", e); } } } } }); assertTrue(yarnApplication.isRunning()); Thread.sleep(new Random().nextInt(5000)); yarnApplication.shutDown(); assertFalse(yarnApplication.isRunning()); }
From source file:com.hurence.logisland.connect.opc.CommonOpcSourceTask.java
@Override public void start(Map<String, String> props) { setConfigurationProperties(props);/*from w w w. j a va 2 s . c om*/ transferQueue = new LinkedTransferQueue<>(); opcOperations = new SmartOpcOperations<>(createOpcOperations()); ConnectionProfile connectionProfile = createConnectionProfile(); host = connectionProfile.getConnectionUri().getHost(); tagInfoMap = CommonUtils.parseTagsFromProperties(props).stream() .collect(Collectors.toMap(TagInfo::getTagId, Function.identity())); minWaitTime = Math.min(10, tagInfoMap.values().stream().map(TagInfo::getSamplingInterval) .mapToLong(Duration::toMillis).min().getAsLong()); opcOperations.connect(connectionProfile); if (!opcOperations.awaitConnected()) { throw new ConnectException("Unable to connect"); } //set up polling source emission pollingScheduler = Executors.newSingleThreadScheduledExecutor(); streamingThread = Executors.newSingleThreadExecutor(); Map<Duration, List<TagInfo>> pollingMap = tagInfoMap.values().stream() .filter(tagInfo -> StreamingMode.POLL.equals(tagInfo.getStreamingMode())) .collect(Collectors.groupingBy(TagInfo::getSamplingInterval)); final Map<String, OpcData> lastValues = Collections.synchronizedMap(new HashMap<>()); pollingMap.forEach((k, v) -> pollingScheduler.scheduleAtFixedRate(() -> { final Instant now = Instant.now(); v.stream().map(TagInfo::getTagId).map(lastValues::get).filter(Functions.not(Objects::isNull)) .map(data -> Pair.of(now, data)).forEach(transferQueue::add); }, 0, k.toNanos(), TimeUnit.NANOSECONDS)); //then subscribe for all final SubscriptionConfiguration subscriptionConfiguration = new SubscriptionConfiguration() .withDefaultSamplingInterval(Duration.ofMillis(10_000)); tagInfoMap.values().forEach(tagInfo -> subscriptionConfiguration .withTagSamplingIntervalForTag(tagInfo.getTagId(), tagInfo.getSamplingInterval())); running.set(true); streamingThread.submit(() -> { while (running.get()) { try { createSessionIfNeeded(); if (session == null) { return; } session.stream(subscriptionConfiguration, tagInfoMap.keySet().toArray(new String[tagInfoMap.size()])).forEach(opcData -> { if (tagInfoMap.get(opcData.getTag()).getStreamingMode() .equals(StreamingMode.SUBSCRIBE)) { transferQueue.add(Pair.of( hasServerSideSampling() ? opcData.getTimestamp() : Instant.now(), opcData)); } else { lastValues.put(opcData.getTag(), opcData); } }); } catch (Exception e) { if (running.get()) { logger.warn("Stream interrupted while reading from " + host, e); safeCloseSession(); lastValues.clear(); } } } }); }
From source file:com.linemetrics.monk.api.ApiClient.java
public List<DataItem> getRangeOptimized(final Number dataStreamId, long time_from, long time_to, TDB tdb, TimeZone tz) throws ApiException { try {/*www . jav a 2s .c o m*/ long timeDiff = time_to - time_from; long maxTimeRange = tdb.getQueryLimit(); long queryRange = tdb.getQueryRange(); if (timeDiff < maxTimeRange) { return this.getRange(dataStreamId, time_from, time_to, tdb, tz); } long millis = System.currentTimeMillis(); ExecutorService executorService = Executors.newSingleThreadExecutor(); Set<Future<List<DataItem>>> callables = new HashSet<Future<List<DataItem>>>(); long queryStart = time_from; long queryEnd = time_from + queryRange; while (queryStart < time_to) { callables.add(executorService .submit(new CallableRangeQuery(dataStreamId, queryStart, queryEnd, tdb, tz))); queryStart += queryRange; queryEnd += queryRange; } executorService.shutdown(); List<DataItem> list = new ArrayList<>(); for (Future<List<DataItem>> future : callables) { List<DataItem> slice = future.get(); if (slice == null) { throw new ApiException("Error while retrieving slice :("); } else { list.addAll(slice); } } executorService.awaitTermination(60 * 60 * 1000L, TimeUnit.MILLISECONDS); System.out.print("Optimized Range Query takes: " + (System.currentTimeMillis() - millis) + "ms "); // System.out.println(list.size()); // Collections.sort(list, DataItemComparator.getInstance()); DataItem prevItem = null, currItem; DataItem beginSlice = null; Iterator<DataItem> itemIterator = list.iterator(); while (itemIterator.hasNext()) { currItem = itemIterator.next(); if (prevItem != null) { if (prevItem.getTimestamp().equals(currItem.getTimestamp())) { itemIterator.remove(); continue; } if (beginSlice == null) { if (currItem.getTimestamp() - prevItem.getTimestamp() > tdb.getMilliseconds()) { beginSlice = prevItem; } } else { if (currItem.getTimestamp() - prevItem.getTimestamp() == tdb.getMilliseconds()) { System.out.println("TimeRange " + beginSlice.getTimestamp() + " - " + prevItem.getTimestamp() + " " + (prevItem.getTimestamp() - beginSlice.getTimestamp()) + " ms missing!"); beginSlice = null; } } } prevItem = currItem; } if (beginSlice != null) { System.out.println("TimeRange " + beginSlice.getTimestamp() + " - " + prevItem.getTimestamp() + " " + (prevItem.getTimestamp() - beginSlice.getTimestamp()) + " ms missing!"); } long expectedItems = ((time_to - time_from) / tdb.getMilliseconds()) - 1; System.out.println(" (" + (list.size() - expectedItems) + ")"); return list; } catch (Exception e) { throw new ApiException(e.getMessage()); } }
From source file:tech.sirwellington.alchemy.http.AlchemyHttpBuilder.java
public AlchemyHttpBuilder enableAsyncCallbacks() { return usingExecutorService(Executors.newSingleThreadExecutor()); }