List of usage examples for java.util.concurrent ThreadPoolExecutor ThreadPoolExecutor
public ThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue)
From source file:ca.ualberta.cmput301w14t08.geochan.managers.ThreadManager.java
/** * Private constructor due to singleton pattern. *//*from w w w . j a va 2 s . c o m*/ private ThreadManager() { commentListCache = new LruCache<String, CommentList>(MAXIMUM_CACHE_SIZE); getImageCache = new LruCache<String, Bitmap>(MAXIMUM_CACHE_SIZE); getPOICache = new LruCache<String, String>(MAXIMUM_CACHE_SIZE); getCommentListRunnableQueue = new LinkedBlockingQueue<Runnable>(); getCommentsRunnableQueue = new LinkedBlockingQueue<Runnable>(); postImageRunnableQueue = new LinkedBlockingQueue<Runnable>(); postRunnableQueue = new LinkedBlockingQueue<Runnable>(); updateRunnableQueue = new LinkedBlockingQueue<Runnable>(); getImageRunnableQueue = new LinkedBlockingQueue<Runnable>(); getThreadCommentsRunnableQueue = new LinkedBlockingQueue<Runnable>(); getPOIRunnableQueue = new LinkedBlockingQueue<Runnable>(); getCommentsTaskQueue = new LinkedBlockingQueue<GetCommentsTask>(); postTaskQueue = new LinkedBlockingQueue<PostTask>(); getImageTaskQueue = new LinkedBlockingQueue<GetImageTask>(); getThreadCommentsTaskQueue = new LinkedBlockingQueue<GetThreadCommentsTask>(); getPOITaskQueue = new LinkedBlockingQueue<GetPOITask>(); getCommentListPool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, getCommentListRunnableQueue); getCommentsPool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, getCommentsRunnableQueue); postImagePool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, postImageRunnableQueue); postPool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, postRunnableQueue); updatePool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, updateRunnableQueue); getImagePool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, getImageRunnableQueue); getThreadCommentsPool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, getThreadCommentsRunnableQueue); getPOIPool = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE_TIME, KEEP_ALIVE_TIME_UNIT, getPOIRunnableQueue); handler = new Handler(Looper.getMainLooper()) { @Override public void handleMessage(Message inputMessage) { switch (inputMessage.what) { case POST_TASK_COMPLETE: PostTask postTaskComplete = (PostTask) inputMessage.obj; if (postTaskComplete.getDialog() != null) { postTaskComplete.getDialog().dismiss(); } ThreadComment threadComment = postTaskComplete.getThreadComment(); if (threadComment != null) { if (!postTaskComplete.isEdit()) { // Update the model and sort accordingly ThreadList.addThread(threadComment); SortUtil.sortThreads(PreferencesManager.getInstance().getThreadSort(), ThreadList.getThreads()); } FragmentActivity activity = (FragmentActivity) context; ThreadListFragment fragment = (ThreadListFragment) activity.getSupportFragmentManager() .findFragmentByTag("threadListFrag"); if (fragment != null) { fragment.finishReload(); } } break; case GET_THREADS_COMPLETE: GetThreadCommentsTask threadTask = (GetThreadCommentsTask) inputMessage.obj; threadTask.getFragment().finishReload(); recycleGetThreadCommentsTask(threadTask); break; case GET_THREADS_FAILED: GetThreadCommentsTask threadTaskFail = (GetThreadCommentsTask) inputMessage.obj; threadTaskFail.getFragment().finishReload(); recycleGetThreadCommentsTask(threadTaskFail); break; case GET_COMMENTS_COMPLETE: GetCommentsTask task = (GetCommentsTask) inputMessage.obj; task.getFragment().finishReload(); recycleCommentsTask(task); break; case GET_COMMENTS_FAILED: GetCommentsTask taskFail = (GetCommentsTask) inputMessage.obj; taskFail.getFragment().finishReload(); recycleCommentsTask(taskFail); break; case GET_COMMENT_LIST_RUNNING: break; case GET_COMMENT_LIST_FAILED: GetCommentsTask taskListFail = (GetCommentsTask) inputMessage.obj; taskListFail.getFragment().finishReload(); recycleCommentsTask(taskListFail); break; case GET_IMAGE_RUNNING: GetImageTask imageTask = (GetImageTask) inputMessage.obj; if (imageTask.getDialog() != null) { imageTask.getDialog().show(); } break; case GET_IMAGE_FAILED: GetImageTask imageTaskFail = (GetImageTask) inputMessage.obj; if (imageTaskFail.getDialog() != null) { imageTaskFail.getDialog().dismiss(); } recycleGetImageTask(imageTaskFail); break; case GET_IMAGE_COMPLETE: GetImageTask imageTaskComplete = (GetImageTask) inputMessage.obj; if (imageTaskComplete.getDialog() != null) { imageTaskComplete.getDialog().dismiss(); } Bitmap bitmap = imageTaskComplete.getImageCache(); String id = imageTaskComplete.getId(); ImageView view = imageTaskComplete.getmImageWeakRef().get(); if (view != null) { view.setImageBitmap(bitmap); } CacheManager.getInstance().serializeImage(bitmap, id); recycleGetImageTask(imageTaskComplete); break; case GET_POI_RUNNING: GetPOITask poiTaskRunning = (GetPOITask) inputMessage.obj; if (poiTaskRunning.getDialog() != null) { poiTaskRunning.getDialog().show(); } break; case GET_POI_COMPLETE: GetPOITask poiTaskComplete = (GetPOITask) inputMessage.obj; if (poiTaskComplete.getDialog() != null) { poiTaskComplete.getDialog().dismiss(); } if (poiTaskComplete.getMarker() != null) { poiTaskComplete.getMarker().setSubDescription((poiTaskComplete.getPOICache())); poiTaskComplete.getMarker().showInfoWindow(); } poiTaskComplete.getLocation().setLocationDescription(poiTaskComplete.getPOICache()); recycleGetPOITask(poiTaskComplete); break; case GET_POI_FAILED: GetPOITask poiTaskFailed = (GetPOITask) inputMessage.obj; if (poiTaskFailed.getDialog() != null) { poiTaskFailed.getDialog().dismiss(); } if (poiTaskFailed.getMarker() != null) { poiTaskFailed.getMarker().setSubDescription(("Unknown Location")); poiTaskFailed.getMarker().showInfoWindow(); } poiTaskFailed.getLocation().setLocationDescription("Unknown Location"); recycleGetPOITask(poiTaskFailed); break; case POST_GET_POI_RUNNING: PostTask postPoiTaskRunning = (PostTask) inputMessage.obj; if (postPoiTaskRunning.getDialog() != null) { postPoiTaskRunning.getDialog().show(); } break; case POST_GET_POI_COMPLETE: PostTask postPoiTaskComplete = (PostTask) inputMessage.obj; if (postPoiTaskComplete.getDialog() != null) { postPoiTaskComplete.getDialog().setMessage("Posting to Server"); } break; case POST_GET_POI_FAILED: PostTask postPoiTaskFailed = (PostTask) inputMessage.obj; if (postPoiTaskFailed.getDialog() != null) { postPoiTaskFailed.getDialog().dismiss(); } break; case UPDATE_FAILED: PostTask postTaskUpdateFailed = (PostTask) inputMessage.obj; if (postTaskUpdateFailed.getDialog() != null) { postTaskUpdateFailed.getDialog().dismiss(); } break; case POST_FAILED: PostTask postTaskFailed = (PostTask) inputMessage.obj; if (postTaskFailed.getDialog() != null) { postTaskFailed.getDialog().dismiss(); } break; case POST_RUNNING: PostTask postTaskRun = (PostTask) inputMessage.obj; if (postTaskRun.getDialog() != null && !postTaskRun.getDialog().isShowing()) { postTaskRun.getDialog().show(); } break; case POST_IMAGE_FAILED: PostTask postTaskImageFailed = (PostTask) inputMessage.obj; if (postTaskImageFailed.getDialog() != null) { postTaskImageFailed.getDialog().dismiss(); } break; default: super.handleMessage(inputMessage); break; } } }; }
From source file:org.apache.kylin.job.impl.threadpool.DefaultScheduler.java
@Override public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerException { if (!initialized) { initialized = true;//from w ww .java2 s . c o m } else { return; } String ZKConnectString = getZKConnectString(jobEngineConfig); if (StringUtils.isEmpty(ZKConnectString)) { throw new IllegalArgumentException("ZOOKEEPER_QUORUM is empty!"); } this.jobEngineConfig = jobEngineConfig; RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3); this.zkClient = CuratorFrameworkFactory.newClient(ZKConnectString, retryPolicy); this.zkClient.start(); this.sharedLock = new InterProcessMutex(zkClient, schedulerId()); boolean hasLock = false; try { hasLock = sharedLock.acquire(3, TimeUnit.SECONDS); } catch (Exception e) { logger.warn("error acquire lock", e); } if (!hasLock) { logger.warn("fail to acquire lock, scheduler has not been started"); zkClient.close(); return; } executableManager = ExecutableManager.getInstance(jobEngineConfig.getConfig()); //load all executable, set them to a consistent status fetcherPool = Executors.newScheduledThreadPool(1); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>()); context = new DefaultContext(Maps.<String, Executable>newConcurrentMap(), jobEngineConfig.getConfig()); for (AbstractExecutable executable : executableManager.getAllExecutables()) { if (executable.getStatus() == ExecutableState.READY) { executableManager.updateJobOutput(executable.getId(), ExecutableState.ERROR, null, "scheduler initializing work to reset job to ERROR status"); } } executableManager.updateAllRunningJobsToError(); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { logger.debug("Closing zk connection"); try { shutdown(); } catch (SchedulerException e) { logger.error("error shutdown scheduler", e); } } }); fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 10, ExecutableConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); hasStarted = true; }
From source file:com.mirth.connect.connectors.tcp.TcpReceiver.java
@Override public void onStart() throws ConnectorTaskException { disposing.set(false);/*from w w w. j a v a 2s . c o m*/ results.clear(); clientReaders.clear(); if (connectorProperties.isServerMode()) { // If we're in server mode, use the max connections property to initialize the thread pool executor = new ThreadPoolExecutor(0, maxConnections, 60L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>()); } else { // If we're in client mode, only a single thread is needed executor = Executors.newSingleThreadExecutor(); } if (connectorProperties.isServerMode()) { try { createServerSocket(); } catch (IOException e) { throw new ConnectorTaskException("Failed to create server socket (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ").", e); } } // Create the acceptor thread thread = new Thread( "TCP Receiver Server Acceptor Thread on " + getChannel().getName() + " (" + getChannelId() + ")") { @Override public void run() { while (getCurrentState() == DeployedState.STARTED) { Socket socket = null; if (connectorProperties.isServerMode()) { // Server mode; wait to accept a client socket on the ServerSocket try { logger.debug("Waiting for new client socket (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ")."); socket = serverSocket.accept(); logger.trace("Accepted new socket: " + socket.getRemoteSocketAddress().toString() + " -> " + socket.getLocalSocketAddress()); } catch (java.io.InterruptedIOException e) { logger.debug("Interruption during server socket accept operation (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ").", e); } catch (Exception e) { logger.debug("Error accepting new socket (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ").", e); } } else { // Client mode, manually initiate a client socket try { logger.debug("Initiating for new client socket (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ")."); if (connectorProperties.isOverrideLocalBinding()) { socket = SocketUtil.createSocket(configuration, getLocalAddress(), getLocalPort()); } else { socket = SocketUtil.createSocket(configuration); } clientSocket = socket; SocketUtil.connectSocket(socket, getRemoteAddress(), getRemotePort(), timeout); } catch (Exception e) { logger.error("Error initiating new socket (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ").", e); closeSocketQuietly(socket); socket = null; clientSocket = null; } } try { ThreadUtils.checkInterruptedStatus(); if (socket != null) { synchronized (clientReaders) { TcpReader reader = null; try { // Only allow worker threads to be submitted if we're not currently trying to stop the connector if (disposing.get()) { return; } reader = new TcpReader(socket); clientReaders.add(reader); results.add(executor.submit(reader)); } catch (RejectedExecutionException | SocketException e) { if (e instanceof RejectedExecutionException) { logger.debug("Executor rejected new task (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ").", e); } else { logger.debug("Error initializing socket (" + connectorProperties.getName() + " \"Source\" on channel " + getChannelId() + ").", e); } clientReaders.remove(reader); closeSocketQuietly(socket); } } } if (connectorProperties.isServerMode()) { // Remove any completed tasks from the list, but don't try to retrieve currently running tasks cleanup(false, false, true); } else { // Wait until the TcpReader is done cleanup(true, false, true); String info = "Client socket finished, waiting " + connectorProperties.getReconnectInterval() + " ms..."; eventController.dispatchEvent(new ConnectionStatusEvent(getChannelId(), getMetaDataId(), getSourceName(), ConnectionStatusEventType.INFO, info)); // Use the reconnect interval to determine how long to wait until creating another socket sleep(reconnectInterval); } } catch (InterruptedException e) { return; } } } }; thread.start(); }
From source file:it.geosolutions.geobatch.flow.file.FileBasedFlowManager.java
/** * @param flowCfg/*from ww w.j a va 2 s .c o m*/ * @throws IOException */ private void initialize(FileBasedFlowConfiguration flowCfg, File geoBatchConfigDir, File geoBatchTempDir) throws Exception, NullPointerException { this.initialized = false; this.name = flowCfg.getName(); this.description = flowCfg.getDescription(); flowConfigDir = initConfigDir(flowCfg, geoBatchConfigDir); flowTempDir = initTempDir(flowCfg, geoBatchTempDir); // get global config final GBSettingsCatalog settingsCatalog = CatalogHolder.getSettingsCatalog(); final GBSettings settings; final FlowSettings fs; settings = settingsCatalog.find("FLOW"); if ((settings != null) && (settings instanceof FlowSettings)) { fs = (FlowSettings) settings; } else { fs = new FlowSettings(); // store the file for further flow loads settingsCatalog.save(fs); } this.keepConsumers = flowCfg.isKeepConsumers(); if (fs.isKeepConsumers() && keepConsumers == null) this.keepConsumers = true; else this.keepConsumers = false; this.maxStoredConsumers = flowCfg.getMaxStoredConsumers(); if (maxStoredConsumers == null || maxStoredConsumers < 1) { this.maxStoredConsumers = fs.getMaxStoredConsumers(); } final int queueSize = (flowCfg.getWorkQueueSize() > 0) ? flowCfg.getWorkQueueSize() : fs.getWorkQueueSize(); final int corePoolSize = (flowCfg.getCorePoolSize() > 0) ? flowCfg.getCorePoolSize() : fs.getCorePoolSize(); final int maximumPoolSize = (flowCfg.getMaximumPoolSize() > 0) ? flowCfg.getMaximumPoolSize() : fs.getMaximumPoolSize(); final long keepAlive = (flowCfg.getKeepAliveTime() > 0) ? flowCfg.getKeepAliveTime() : fs.getKeepAliveTime(); // seconds final BlockingQueue<Runnable> queue = new ArrayBlockingQueue<Runnable>(queueSize); this.executor = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAlive, TimeUnit.SECONDS, queue); this.paused = false; this.terminationRequest = false; this.autorun = flowCfg.isAutorun(); if (this.autorun) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Automatic Flow Startup for '" + getId() + "'"); } this.resume(); } }
From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java
private static String executeTPUTask(final String[] watchFolderFiles, final String resourceWatchFolder, final Optional<String> optionalOutputDataModelID, final Optional<String> optionalExportMimeType, final Optional<String> optionalExportFileExtension, final Integer engineThreads, final String serviceName, final Properties config) throws Exception { // create job list final LinkedList<Callable<String>> transforms = new LinkedList<>(); int cnt = 1;//from w w w.j a v a 2s.c o m for (final String watchFolderFile : watchFolderFiles) { LOG.info("[{}][{}] do TPU task execution '{}' for file '{}'", serviceName, cnt, cnt, watchFolderFile); transforms.add(new TPUTask(config, watchFolderFile, resourceWatchFolder, optionalOutputDataModelID, optionalExportMimeType, optionalExportFileExtension, serviceName, cnt)); cnt++; } // work on jobs final ThreadPoolExecutor pool = new ThreadPoolExecutor(engineThreads, engineThreads, 0L, TimeUnit.SECONDS, new LinkedBlockingQueue<>()); try { final List<Future<String>> futureList = pool.invokeAll(transforms); final StringBuilder resultSB = new StringBuilder(); for (final Future<String> f : futureList) { final String message = f.get(); LOG.info(message); resultSB.append(message).append("\n"); } return resultSB.toString(); } catch (final Exception e) { LOG.error("something went wrong", e); throw e; } finally { pool.shutdown(); } }
From source file:com.pinterest.rocksplicator.controller.DispatcherTest.java
@Test public void testRetryTask() throws Exception { final String errorMsg = "Boom!!!"; TaskBase task = new ThrowingTask(errorMsg).retry(3).getEntity(); final CountDownLatch latch = new CountDownLatch(3); FIFOTaskQueue tq = new FIFOTaskQueue(10) { @Override// w w w. j a va 2 s . c o m public boolean finishTask(final long id, final String output) { latch.countDown(); return super.finishTask(id, output); } @Override public long finishTaskAndEnqueueRunningTask(final long id, final String output, final TaskBase newTask, final String worker) { latch.countDown(); return super.finishTaskAndEnqueueRunningTask(id, output, newTask, worker); } }; tq.enqueueTask(task, Integer.toString(++nameCounter), 0); Semaphore idleWorkersSemaphore = new Semaphore(2); ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(2, 2, 0, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(2)); WorkerPool workerPool = new WorkerPool(threadPoolExecutor, idleWorkersSemaphore, tq); TaskDispatcher dispatcher = new TaskDispatcher(2, idleWorkersSemaphore, workerPool, tq); dispatcher.start(); Assert.assertTrue(latch.await(30, TimeUnit.SECONDS)); Assert.assertEquals(tq.getResult(0), errorMsg); Assert.assertEquals(tq.getResult(1), errorMsg); Assert.assertEquals(tq.getResult(2), errorMsg); dispatcher.stop(); }
From source file:com.kenshoo.freemarker.services.FreeMarkerService.java
@PostConstruct public void postConstruct() { int actualMaxQueueLength = maxQueueLength != null ? maxQueueLength : Math.max(MIN_DEFAULT_MAX_QUEUE_LENGTH, (int) (MAX_DEFAULT_MAX_QUEUE_LENGTH_MILLISECONDS / maxTemplateExecutionTime)); ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(maxThreads, maxThreads, THREAD_KEEP_ALIVE_TIME, TimeUnit.MILLISECONDS, new BlockingArrayQueue<Runnable>(actualMaxQueueLength)); threadPoolExecutor.allowCoreThreadTimeOut(true); templateExecutor = threadPoolExecutor; }
From source file:nl.uva.sne.disambiguators.Wikidata.java
private Map<String, List<String>> getbroaderIDS(Set<Term> terms) throws MalformedURLException, InterruptedException, ExecutionException { Map<String, List<String>> map = new HashMap<>(); if (terms.size() > 0) { int maxT = 2; BlockingQueue<Runnable> workQueue = new ArrayBlockingQueue(maxT); ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, 500L, TimeUnit.MICROSECONDS, workQueue); // ExecutorService pool = new ThreadPoolExecutor(maxT, maxT, // 5000L, TimeUnit.MILLISECONDS, // new ArrayBlockingQueue<>(maxT, true), new ThreadPoolExecutor.CallerRunsPolicy()); Set<Future<Map<String, List<String>>>> set1 = new HashSet<>(); String prop = "P31"; for (Term t : terms) { URL url = new URL( page + "?action=wbgetclaims&format=json&props=&property=" + prop + "&entity=" + t.getUID()); System.err.println(url); WikiRequestor req = new WikiRequestor(url, t.getUID(), 1); Future<Map<String, List<String>>> future = pool.submit(req); set1.add(future);//from w w w . j av a 2 s. com } pool.shutdown(); for (Future<Map<String, List<String>>> future : set1) { while (!future.isDone()) { // Logger.getLogger(Wikipedia.class.getName()).log(Level.INFO, "Task is not completed yet...."); Thread.currentThread().sleep(10); } Map<String, List<String>> c = future.get(); if (c != null) { map.putAll(c); } } } return map; }
From source file:org.apache.hive.jdbc.TestJdbcWithMiniHS2.java
private static void startConcurrencyTest(Connection conn, String tableName, int numTasks) { // Start concurrent testing int POOL_SIZE = 100; int TASK_COUNT = numTasks; SynchronousQueue<Runnable> executorQueue = new SynchronousQueue<Runnable>(); ExecutorService workers = new ThreadPoolExecutor(1, POOL_SIZE, 20, TimeUnit.SECONDS, executorQueue); List<Future<Boolean>> list = new ArrayList<Future<Boolean>>(); int i = 0;/*from w ww. j a va 2 s.c o m*/ while (i < TASK_COUNT) { try { Future<Boolean> future = workers.submit(new JDBCTask(conn, i, tableName)); list.add(future); i++; } catch (RejectedExecutionException ree) { try { TimeUnit.MILLISECONDS.sleep(100); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } for (Future<Boolean> future : list) { try { Boolean result = future.get(30, TimeUnit.SECONDS); assertTrue(result); } catch (ExecutionException ee) { fail("Concurrent Statement failed: " + ee.getCause()); } catch (TimeoutException te) { System.out.println("Task was timeout after 30 second: " + te); } catch (CancellationException ce) { System.out.println("Task was interrupted: " + ce); } catch (InterruptedException ie) { System.out.println("Thread was interrupted: " + ie); } } workers.shutdown(); }
From source file:com.lucidtechnics.blackboard.Blackboard.java
private void init() { java.io.File file = new java.io.File(getAppsHome()); if (file.exists() == false) { file.mkdirs();/*from w w w .jav a 2 s . c om*/ } java.io.File appsDirectory = new java.io.File(getAppsHome()); if (appsDirectory.isDirectory() != true) { throw new RuntimeException( "Directory: " + getAppsHome() + " as set in blackboard.apps.home is not a directory"); } java.io.File[] directoryFiles = appsDirectory.listFiles(); for (int i = 0; i < directoryFiles.length; i++) { if (directoryFiles[i].isDirectory() == true) { String appName = directoryFiles[i].getName(); if (logger.isInfoEnabled() == true) { logger.info("Configuring app: " + appName); } java.io.File[] workspaceDirectoryFiles = directoryFiles[i].listFiles(); for (int j = 0; j < workspaceDirectoryFiles.length; j++) { if (workspaceDirectoryFiles[j].isDirectory() == true) { String workspaceName = workspaceDirectoryFiles[j].getName(); if (logger.isInfoEnabled() == true) { logger.info("Processing workspace: " + workspaceName); } java.io.File[] eventDirectoryFiles = workspaceDirectoryFiles[j].listFiles(); WorkspaceConfiguration workspaceConfiguration = configureWorkspace(appName, workspaceName, workspaceDirectoryFiles[j]); for (int k = 0; k < eventDirectoryFiles.length; k++) { if (eventDirectoryFiles[k].isDirectory() == true) { processEventPlans(appName, workspaceName, workspaceConfiguration, eventDirectoryFiles[k]); } } } } } } if (logger.isInfoEnabled() == true) { logger.info("Loaded event configurations: " + getEventToWorkspaceMap()); } setBlackboardExecutor(new ThreadPoolExecutor(1, 1, 100, TimeUnit.SECONDS, new LinkedBlockingQueue())); setScheduledBlackboardExecutor(new ScheduledThreadPoolExecutor(getMaxScheduledBlackboardThread())); for (int i = 0; i <= getMaxWorkspaceThread(); i++) { getWorkspaceExecutorMap().put(i, new ThreadPoolExecutor(1, 1, 100, TimeUnit.SECONDS, new LinkedBlockingQueue())); } setPersistenceExecutor(new ThreadPoolExecutor(getMaxPersistenceThread(), getMaxPersistenceThread(), 100, TimeUnit.SECONDS, new LinkedBlockingQueue())); if (logger.isInfoEnabled() == true) { logger.info("Blackboard Workspace Server Initialization Inception."); logger.info("Apache 2.0 Open Source License."); logger.info("Copyright Owner - Lucid Technics, LLC."); logger.info("Authors - Bediako Ntodi George and David Yuctan Hodge."); logger.info("Initialization was successful."); } org.apache.jcs.JCS.setConfigFilename("/blackboard.ccf"); }