List of usage examples for java.lang Thread setPriority
public final void setPriority(int newPriority)
From source file:org.geotools.utils.imagemosaic.MosaicIndexBuilder.java
/** * Entry point for the index builder.//from w w w .j a va2s .c o m * * @param args */ public static void main(String[] args) { final MosaicIndexBuilder mosaicIndexBuilder = new MosaicIndexBuilder(); mosaicIndexBuilder.addProcessingEventListener(mosaicIndexBuilder); if (mosaicIndexBuilder.parseArgs(args)) { final Thread t = new Thread(mosaicIndexBuilder, "MosaicIndexBuilder"); t.setPriority(mosaicIndexBuilder.getPriority()); t.start(); try { t.join(); } catch (InterruptedException e) { LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e); } } else LOGGER.fine("Exiting..."); }
From source file:org.geotools.utils.imageoverviews.OverviewsEmbedder.java
/** * This tool is designed to be used by the command line using this main * class but it can also be used from an GUI by using the setters and * getters.//from w w w . j a v a 2s.com * * @param args * @throws IOException * @throws IllegalArgumentException * @throws InterruptedException */ public static void main(String[] args) throws IllegalArgumentException, IOException, InterruptedException { // creating an overviews embedder final OverviewsEmbedder overviewsEmbedder = new OverviewsEmbedder(); // adding the embedder itself as a listener overviewsEmbedder.addProcessingEventListener(overviewsEmbedder); // parsing input argumentBuilder if (overviewsEmbedder.parseArgs(args)) { // creating a thread to execute the request process, with the // provided priority final Thread t = new Thread(overviewsEmbedder, "OverviewsEmbedder"); t.setPriority(overviewsEmbedder.getPriority()); t.start(); try { t.join(); } catch (InterruptedException e) { LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e); } } else if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine("Unable to parse command line argumentBuilder, exiting..."); }
From source file:com.moss.bdbadmin.client.ui.BdbAdminClient.java
public static void main(String[] args) throws Exception { Thread t = new Thread() { public void run() { try { Magic.initialize();//from w w w . j a va 2 s . co m } catch (Exception ex) { ex.printStackTrace(); } } }; t.setPriority(Thread.MIN_PRIORITY); t.start(); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); HttpClient httpClient = new HttpClient(); BdbClient.init(); JFrame frame = new JFrame(); ProxyFactory proxyFactory = VeracityProxyFactory.create(); BdbAdminClient client = new BdbAdminClient(httpClient, frame, new File("config.xml"), proxyFactory); frame.setTitle("Bdb Network Explorer"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().add(client); frame.setSize(1024, 768); frame.setLocationRelativeTo(null); frame.setVisible(true); }
From source file:Main.java
public static Thread getLowPriorityBackgroundThread(Runnable runnable) { Thread thread = getBackgroundThread(runnable); thread.setPriority(Thread.MIN_PRIORITY + 1); // keep other stuff running // smoothly// w ww. j av a2 s. c o m return thread; }
From source file:Main.java
/** Start all given threads and wait on each of them until all are done. * From Stephan Preibisch's Multithreading.java class. See: * http://repo.or.cz/w/trakem2.git?a=blob;f=mpi/fruitfly/general/MultiThreading.java;hb=HEAD * @param threads /*from ww w .j a v a2s.co m*/ */ public static void startAndJoin(Thread[] threads) { for (Thread thread : threads) { thread.setPriority(Thread.NORM_PRIORITY); thread.start(); } try { for (Thread thread : threads) { thread.join(); } } catch (InterruptedException ie) { throw new RuntimeException(ie); } }
From source file:Main.java
/** * Starts the given {@link Runnable} tasks as daemons * //from w ww . ja va2s . c o m * @param tasks */ public static void startDaemon(Runnable... tasks) { for (Runnable task : tasks) { Thread thread = new Thread(task); thread.setDaemon(true); thread.setPriority(Thread.NORM_PRIORITY); thread.start(); } }
From source file:org.polymap.core.runtime.UnboundPoolExecutor.java
public static ExecutorService newInstance() { final int procs = Runtime.getRuntime().availableProcessors(); final int maxThreads = procs * MAX_THREADS_PER_PROC; // thread factory ThreadFactory threadFactory = new ThreadFactory() { volatile int threadNumber = 0; public Thread newThread(Runnable r) { String prefix = "polymap-"; Thread t = new Thread(r, prefix + threadNumber++); t.setDaemon(false);/*from www . j ava2 s. c o m*/ t.setPriority(DEFAULT_THREAD_PRIORITY); return t; } }; // thread pool ThreadPoolExecutor executor = new ThreadPoolExecutor(procs, maxThreads, 180L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), threadFactory); // rejected? -> wait and try again executor.setRejectedExecutionHandler(new RejectedExecutionHandler() { Random rand = new Random(); public void rejectedExecution(Runnable r, ThreadPoolExecutor _executor) { do { try { Thread.sleep(rand.nextInt(1000) + 100); } catch (InterruptedException e) { } } while (_executor.getActiveCount() >= maxThreads); _executor.execute(r); } }); //executor.allowCoreThreadTimeOut( true ); return executor; }
From source file:com.lightbox.android.bitmap.BitmapFileCleanerTask.java
private static ExecutorService getExecutor() { if (sBitmapFileCleanerExecutor == null) { sBitmapFileCleanerExecutor = Executors.newSingleThreadExecutor(new ThreadFactory() { @Override/*from w w w .ja v a2s. c o m*/ public Thread newThread(Runnable r) { Thread thread = new Thread(r); thread.setName(TAG + " | " + thread.getName()); thread.setPriority(Thread.MIN_PRIORITY); return thread; } }); } return sBitmapFileCleanerExecutor; }
From source file:org.mwc.cmap.media.views.images.ImageLoader.java
public synchronized static ImageLoader getInstance() { if (instance == null) { instance = new ImageLoader(); Thread thread = new Thread(instance); thread.setDaemon(true);/*from w w w . j a v a 2 s .c o m*/ thread.setPriority(Thread.NORM_PRIORITY - 2); thread.start(); } return instance; }
From source file:at.ac.ait.ubicity.fileloader.FileLoader.java
/** * //from w w w . j a va2 s .co m * @param _fileInfo A FileInformation object representing usage information on the file we are supposed to load: line count already ingested, last usage time... * @param _keySpace Cassandra key space into which to ingest * @param _host Cassandra host / server * @param _batchSize MutationBatch size * @throws Exception Shouldn't happen, although the Disruptor may throw an Exception under duress */ @SuppressWarnings("unchecked") public final static void load(final FileInformation _fileInfo, final String _keySpace, final String _host, final int _batchSize) throws Exception { if (!cassandraInitialized) { keySpace = AstyanaxInitializer.doInit("Test Cluster", _host, _keySpace); cassandraInitialized = true; } LongTimeStampSorter tsSorter = new LongTimeStampSorter(); Thread tTSSorter = new Thread(tsSorter); tTSSorter.setPriority(Thread.MAX_PRIORITY - 1); tTSSorter.setName("long timestamp sorter "); tTSSorter.start(); //get the log id from the file's URI final String log_id = _fileInfo.getURI().toString(); final MutationBatch batch = keySpace.prepareMutationBatch(); logger.info("got keyspace " + keySpace.getKeyspaceName() + " from Astyanax initializer"); final LineIterator onLines = FileUtils.lineIterator(new File(_fileInfo.getURI())); final ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2); ColumnFamily crawl_stats = null; AggregationJob aggregationJob = new AggregationJob(keySpace, crawl_stats); Thread tAggJob = new Thread(aggregationJob); tAggJob.setName("Monitrix loader / aggregation job "); tAggJob.setPriority(Thread.MIN_PRIORITY + 1); tAggJob.start(); logger.info("[FILELOADER] started aggregation job, ring buffer running"); final Disruptor<SingleLogLineAsString> disruptor = new Disruptor(SingleLogLineAsString.EVENT_FACTORY, (int) Math.pow(TWO, 17), exec); SingleLogLineAsStringEventHandler.batch = batch; SingleLogLineAsStringEventHandler.keySpace = keySpace; SingleLogLineAsStringEventHandler.batchSize = _batchSize; SingleLogLineAsStringEventHandler.LOG_ID = log_id; SingleLogLineAsStringEventHandler.tsSorter = tsSorter; SingleLogLineAsStringEventHandler.aggregationJob = aggregationJob; //The EventHandler contains the actual logic for ingesting final EventHandler<SingleLogLineAsString> handler = new SingleLogLineAsStringEventHandler(); disruptor.handleEventsWith(handler); //get our Aggregate job in place //we are almost ready to start final RingBuffer<SingleLogLineAsString> rb = disruptor.start(); int _lineCount = 0; long _start, _lapse; _start = System.nanoTime(); int _linesAlreadyProcessed = _fileInfo.getLineCount(); //cycle through the lines already processed while (_lineCount < _linesAlreadyProcessed) { onLines.nextLine(); _lineCount++; } //now get down to the work we actually must do, and fill the ring buffer logger.info("begin proccessing of file " + _fileInfo.getURI() + " @line #" + _lineCount); while (onLines.hasNext()) { final long _seq = rb.next(); final SingleLogLineAsString event = rb.get(_seq); event.setValue(onLines.nextLine()); rb.publish(_seq); _lineCount++; } _lapse = System.nanoTime() - _start; logger.info("ended proccessing of file " + _fileInfo.getURI() + " @line #" + _lineCount); //stop, waiting for last threads still busy to finish their work disruptor.shutdown(); //update the file info, this will land in the cache _fileInfo.setLineCount(_lineCount); _fileInfo.setLastAccess(System.currentTimeMillis()); int _usageCount = _fileInfo.getUsageCount(); _fileInfo.setUsageCount(_usageCount++); //make sure we release resources onLines.close(); logger.info( "handled " + (_lineCount - _linesAlreadyProcessed) + " log lines in " + _lapse + " nanoseconds"); //now go to aggregation step SortedSet<Long> timeStamps = new TreeSet(tsSorter.timeStamps); long _minTs = timeStamps.first(); long _maxTs = timeStamps.last(); logger.info("**** min TimeStamp = " + _minTs); logger.info("**** max TimeStamp = " + _maxTs); StatsTableActualizer.update(_fileInfo.getURI().toString(), _minTs, _maxTs, _lineCount); // AggregationJob aggJob = new AggregationJob( keySpace, _host, _batchSize ); // Thread tAgg = new Thread( aggJob ); // tAgg.setName( "aggregation job " ); // tAgg.setPriority( Thread.MAX_PRIORITY - 1 ); // tAgg.start(); }