List of usage examples for java.util.concurrent TimeUnit DAYS
TimeUnit DAYS
To view the source code for java.util.concurrent TimeUnit DAYS.
Click Source Link
From source file:com.netflix.aegisthus.tools.DirectoryWalker.java
private void awaitTermination() { if (threaded && !es.isShutdown()) { es.shutdown();//from w w w . ja v a 2 s . c o m try { es.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); } catch (InterruptedException e) { } } }
From source file:com.marklogic.contentpump.LocalJobRunner.java
/** * Run the job. Get the input splits, create map tasks and submit it to * the thread pool if there is one; otherwise, runs the the task one by * one./* www. j av a 2 s . com*/ * * @param <INKEY> * @param <INVALUE> * @param <OUTKEY> * @param <OUTVALUE> * @throws Exception */ @SuppressWarnings("unchecked") public <INKEY, INVALUE, OUTKEY, OUTVALUE, T extends org.apache.hadoop.mapreduce.InputSplit> void run() throws Exception { Configuration conf = job.getConfiguration(); InputFormat<INKEY, INVALUE> inputFormat = (InputFormat<INKEY, INVALUE>) ReflectionUtils .newInstance(job.getInputFormatClass(), conf); List<InputSplit> splits = inputFormat.getSplits(job); T[] array = (T[]) splits.toArray(new org.apache.hadoop.mapreduce.InputSplit[splits.size()]); // sort the splits into order based on size, so that the biggest // goes first Arrays.sort(array, new SplitLengthComparator()); OutputFormat<OUTKEY, OUTVALUE> outputFormat = (OutputFormat<OUTKEY, OUTVALUE>) ReflectionUtils .newInstance(job.getOutputFormatClass(), conf); Class<? extends Mapper<?, ?, ?, ?>> mapperClass = job.getMapperClass(); Mapper<INKEY, INVALUE, OUTKEY, OUTVALUE> mapper = (Mapper<INKEY, INVALUE, OUTKEY, OUTVALUE>) ReflectionUtils .newInstance(mapperClass, conf); try { outputFormat.checkOutputSpecs(job); } catch (Exception ex) { if (LOG.isDebugEnabled()) { LOG.debug("Error checking output specification: ", ex); } else { LOG.error("Error checking output specification: "); LOG.error(ex.getMessage()); } return; } conf = job.getConfiguration(); progress = new AtomicInteger[splits.size()]; for (int i = 0; i < splits.size(); i++) { progress[i] = new AtomicInteger(); } Monitor monitor = new Monitor(); monitor.start(); reporter = new ContentPumpReporter(); List<Future<Object>> taskList = new ArrayList<Future<Object>>(); for (int i = 0; i < array.length; i++) { InputSplit split = array[i]; if (pool != null) { LocalMapTask<INKEY, INVALUE, OUTKEY, OUTVALUE> task = new LocalMapTask<INKEY, INVALUE, OUTKEY, OUTVALUE>( inputFormat, outputFormat, conf, i, split, reporter, progress[i]); availableThreads = assignThreads(i, array.length); Class<? extends Mapper<?, ?, ?, ?>> runtimeMapperClass = job.getMapperClass(); if (availableThreads > 1 && availableThreads != threadsPerSplit) { // possible runtime adjustment if (runtimeMapperClass != (Class) MultithreadedMapper.class) { runtimeMapperClass = (Class<? extends Mapper<INKEY, INVALUE, OUTKEY, OUTVALUE>>) cmd .getRuntimeMapperClass(job, mapperClass, threadsPerSplit, availableThreads); } if (runtimeMapperClass != mapperClass) { task.setMapperClass(runtimeMapperClass); } if (runtimeMapperClass == (Class) MultithreadedMapper.class) { task.setThreadCount(availableThreads); if (LOG.isDebugEnabled()) { LOG.debug("Thread Count for Split#" + i + " : " + availableThreads); } } } if (runtimeMapperClass == (Class) MultithreadedMapper.class) { synchronized (pool) { taskList.add(pool.submit(task)); pool.wait(); } } else { pool.submit(task); } } else { // single-threaded JobID jid = new JobID(); TaskID taskId = new TaskID(jid.getJtIdentifier(), jid.getId(), TaskType.MAP, i); TaskAttemptID taskAttemptId = new TaskAttemptID(taskId, 0); TaskAttemptContext context = ReflectionUtil.createTaskAttemptContext(conf, taskAttemptId); RecordReader<INKEY, INVALUE> reader = inputFormat.createRecordReader(split, context); RecordWriter<OUTKEY, OUTVALUE> writer = outputFormat.getRecordWriter(context); OutputCommitter committer = outputFormat.getOutputCommitter(context); TrackingRecordReader trackingReader = new TrackingRecordReader(reader, progress[i]); Mapper.Context mapperContext = ReflectionUtil.createMapperContext(mapper, conf, taskAttemptId, trackingReader, writer, committer, reporter, split); trackingReader.initialize(split, mapperContext); // no thread pool (only 1 thread specified) Class<? extends Mapper<?, ?, ?, ?>> mapClass = job.getMapperClass(); mapperContext.getConfiguration().setClass(CONF_MAPREDUCE_JOB_MAP_CLASS, mapClass, Mapper.class); mapper = (Mapper<INKEY, INVALUE, OUTKEY, OUTVALUE>) ReflectionUtils.newInstance(mapClass, mapperContext.getConfiguration()); mapper.run(mapperContext); trackingReader.close(); writer.close(mapperContext); committer.commitTask(context); } } // wait till all tasks are done if (pool != null) { for (Future<Object> f : taskList) { f.get(); } pool.shutdown(); while (!pool.awaitTermination(1, TimeUnit.DAYS)) ; jobComplete.set(true); } monitor.interrupt(); monitor.join(1000); // report counters Iterator<CounterGroup> groupIt = reporter.counters.iterator(); while (groupIt.hasNext()) { CounterGroup group = groupIt.next(); LOG.info(group.getDisplayName() + ": "); Iterator<Counter> counterIt = group.iterator(); while (counterIt.hasNext()) { Counter counter = counterIt.next(); LOG.info(counter.getDisplayName() + ": " + counter.getValue()); } } LOG.info("Total execution time: " + (System.currentTimeMillis() - startTime) / 1000 + " sec"); }
From source file:com.adithya321.sharesanalysis.fragments.DetailFragment.java
private void setShareSales(View view) { TextView totalSharesPurchasedTV = (TextView) view.findViewById(R.id.detail_total_shares_sold); TextView totalValueTV = (TextView) view.findViewById(R.id.detail_total_value_sold); TextView targetSalePriceTV = (TextView) view.findViewById(R.id.detail_target); TextView differenceTV = (TextView) view.findViewById(detail_difference); int totalSharesSold = 0; int totalSharesPurchased = 0; double totalValueSold = 0; double totalValuePurchased = 0; double averageShareValue = 0; double targetSalePrice = 0; double difference = 0; RealmList<Purchase> purchases = share.getPurchases(); for (Purchase purchase : purchases) { if (purchase.getType().equals("sell")) { totalSharesSold += purchase.getQuantity(); totalValueSold += (purchase.getQuantity() * purchase.getPrice()); } else if (purchase.getType().equals("buy")) { totalSharesPurchased += purchase.getQuantity(); totalValuePurchased += (purchase.getQuantity() * purchase.getPrice()); }/*from ww w . j a va 2s . c o m*/ } if (totalSharesPurchased != 0) averageShareValue = totalValuePurchased / totalSharesPurchased; Date today = new Date(); Date start = share.getDateOfInitialPurchase(); long noOfDays = DateUtils.getDateDiff(start, today, TimeUnit.DAYS); SharedPreferences sharedPreferences = getActivity().getSharedPreferences("prefs", 0); double target = sharedPreferences.getFloat("target", 0); targetSalePrice = averageShareValue * Math.pow((1 + (target / 100)), ((double) noOfDays / 365)); difference = share.getCurrentShareValue() - targetSalePrice; if (difference < 0) differenceTV.setTextColor(getResources().getColor((android.R.color.holo_red_dark))); else differenceTV.setTextColor(getResources().getColor((R.color.colorPrimary))); totalSharesPurchasedTV.setText(String.valueOf(totalSharesSold)); totalValueTV.setText(String.valueOf(NumberUtils.round(totalValueSold, 2))); targetSalePriceTV.setText(String.valueOf(NumberUtils.round(targetSalePrice, 2))); differenceTV.setText(String.valueOf(NumberUtils.round(difference, 2))); }
From source file:com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig.java
public TimeUnit getSegmentTimeUnit() { if (_segmentTimeUnit != null) { return _segmentTimeUnit; } else {// w w w.ja v a 2 s. c o m if (_schema.getTimeFieldSpec() != null) { if (_schema.getTimeFieldSpec().getOutgoingGranularitySpec() != null) { return _schema.getTimeFieldSpec().getOutgoingGranularitySpec().getTimeType(); } if (_schema.getTimeFieldSpec().getIncomingGranularitySpec() != null) { return _schema.getTimeFieldSpec().getIncomingGranularitySpec().getTimeType(); } } return TimeUnit.DAYS; } }
From source file:CV.java
public CV(AbstractCELA la, AbstractLearningProblem lp, final AbstractReasonerComponent rs, int folds, boolean leaveOneOut) { //console rendering of class expressions ManchesterOWLSyntaxOWLObjectRendererImpl renderer = new ManchesterOWLSyntaxOWLObjectRendererImpl(); ToStringRenderer.getInstance().setRenderer(renderer); ToStringRenderer.getInstance().setShortFormProvider(new SimpleShortFormProvider()); // the training and test sets used later on List<Set<OWLIndividual>> trainingSetsPos = new LinkedList<Set<OWLIndividual>>(); List<Set<OWLIndividual>> trainingSetsNeg = new LinkedList<Set<OWLIndividual>>(); List<Set<OWLIndividual>> testSetsPos = new LinkedList<Set<OWLIndividual>>(); List<Set<OWLIndividual>> testSetsNeg = new LinkedList<Set<OWLIndividual>>(); // get examples and shuffle them too Set<OWLIndividual> posExamples; Set<OWLIndividual> negExamples; if (lp instanceof PosNegLP) { posExamples = OWLAPIConverter.getOWLAPIIndividuals(((PosNegLP) lp).getPositiveExamples()); negExamples = OWLAPIConverter.getOWLAPIIndividuals(((PosNegLP) lp).getNegativeExamples()); } else if (lp instanceof PosOnlyLP) { posExamples = OWLAPIConverter.getOWLAPIIndividuals(((PosNegLP) lp).getPositiveExamples()); negExamples = new HashSet<OWLIndividual>(); } else {/*from www . jav a 2s .co m*/ throw new IllegalArgumentException("Only PosNeg and PosOnly learning problems are supported"); } List<OWLIndividual> posExamplesList = new LinkedList<OWLIndividual>(posExamples); List<OWLIndividual> negExamplesList = new LinkedList<OWLIndividual>(negExamples); Collections.shuffle(posExamplesList, new Random(1)); Collections.shuffle(negExamplesList, new Random(2)); // sanity check whether nr. of folds makes sense for this benchmark if (!leaveOneOut && (posExamples.size() < folds && negExamples.size() < folds)) { System.out.println("The number of folds is higher than the number of " + "positive/negative examples. This can result in empty test sets. Exiting."); System.exit(0); } // if (leaveOneOut) { // note that leave-one-out is not identical to k-fold with // k = nr. of examples in the current implementation, because // with n folds and n examples there is no guarantee that a fold // is never empty (this is an implementation issue) int nrOfExamples = posExamples.size() + negExamples.size(); for (int i = 0; i < nrOfExamples; i++) { // ... } System.out.println("Leave-one-out not supported yet."); System.exit(1); } else { // calculating where to split the sets, ; note that we split // positive and negative examples separately such that the // distribution of positive and negative examples remains similar // (note that there are better but more complex ways to implement this, // which guarantee that the sum of the elements of a fold for pos // and neg differs by at most 1 - it can differ by 2 in our implementation, // e.g. with 3 folds, 4 pos. examples, 4 neg. examples) int[] splitsPos = calculateSplits(posExamples.size(), folds); int[] splitsNeg = calculateSplits(negExamples.size(), folds); // System.out.println(splitsPos[0]); // System.out.println(splitsNeg[0]); // calculating training and test sets for (int i = 0; i < folds; i++) { Set<OWLIndividual> testPos = getTestingSet(posExamplesList, splitsPos, i); Set<OWLIndividual> testNeg = getTestingSet(negExamplesList, splitsNeg, i); testSetsPos.add(i, testPos); testSetsNeg.add(i, testNeg); trainingSetsPos.add(i, getTrainingSet(posExamples, testPos)); trainingSetsNeg.add(i, getTrainingSet(negExamples, testNeg)); } } // run the algorithm if (multiThreaded && lp instanceof Cloneable && la instanceof Cloneable) { ExecutorService es = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() - 1); for (int currFold = 0; currFold < folds; currFold++) { try { final AbstractLearningProblem lpClone = (AbstractLearningProblem) lp.getClass() .getMethod("clone").invoke(lp); final Set<OWLIndividual> trainPos = trainingSetsPos.get(currFold); final Set<OWLIndividual> trainNeg = trainingSetsNeg.get(currFold); final Set<OWLIndividual> testPos = testSetsPos.get(currFold); final Set<OWLIndividual> testNeg = testSetsNeg.get(currFold); if (lp instanceof PosNegLP) { ((PosNegLP) lpClone).setPositiveExamples(OWLAPIConverter.convertIndividuals(trainPos)); ((PosNegLP) lpClone).setNegativeExamples(OWLAPIConverter.convertIndividuals(trainNeg)); } else if (lp instanceof PosOnlyLP) { ((PosOnlyLP) lpClone).setPositiveExamples( new TreeSet<Individual>(OWLAPIConverter.convertIndividuals(trainPos))); } final AbstractCELA laClone = (AbstractCELA) la.getClass().getMethod("clone").invoke(la); final int i = currFold; es.submit(new Runnable() { @Override public void run() { try { validate(laClone, lpClone, rs, i, trainPos, trainNeg, testPos, testNeg); } catch (Exception e) { e.printStackTrace(); } } }); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } catch (NoSuchMethodException e) { e.printStackTrace(); } catch (SecurityException e) { e.printStackTrace(); } } es.shutdown(); try { es.awaitTermination(1, TimeUnit.DAYS); } catch (InterruptedException e) { e.printStackTrace(); } } else { for (int currFold = 0; currFold < folds; currFold++) { final Set<OWLIndividual> trainPos = trainingSetsPos.get(currFold); final Set<OWLIndividual> trainNeg = trainingSetsNeg.get(currFold); final Set<OWLIndividual> testPos = testSetsPos.get(currFold); final Set<OWLIndividual> testNeg = testSetsNeg.get(currFold); if (lp instanceof PosNegLP) { ((PosNegLP) lp).setPositiveExamples(OWLAPIConverter.convertIndividuals(trainPos)); ((PosNegLP) lp).setNegativeExamples(OWLAPIConverter.convertIndividuals(trainNeg)); } else if (lp instanceof PosOnlyLP) { Set<Individual> convertIndividuals = OWLAPIConverter.convertIndividuals(trainPos); ((PosOnlyLP) lp).setPositiveExamples(new TreeSet<Individual>(convertIndividuals)); } validate(la, lp, rs, currFold, trainPos, trainNeg, testPos, testNeg); } } outputWriter(""); outputWriter("Finished " + folds + "-folds cross-validation."); outputWriter("runtime: " + statOutput(df, runtime, "s")); outputWriter("length: " + statOutput(df, length, "")); outputWriter("F-Measure on training set: " + statOutput(df, fMeasureTraining, "%")); outputWriter("F-Measure: " + statOutput(df, fMeasure, "%")); outputWriter("predictive accuracy on training set: " + statOutput(df, accuracyTraining, "%")); outputWriter("predictive accuracy: " + statOutput(df, accuracy, "%")); }
From source file:org.apache.flume.sink.elasticsearch.ElasticSearchSink.java
@Override public void configure(Context context) { if (!isLocal) { String[] hostNames = null; if (StringUtils.isNotBlank(context.getString(HOSTNAMES))) { hostNames = context.getString(HOSTNAMES).split(","); }/*from www .java 2s . c o m*/ Preconditions.checkState(hostNames != null && hostNames.length > 0, "Missing Param:" + HOSTNAMES); serverAddresses = new InetSocketTransportAddress[hostNames.length]; for (int i = 0; i < hostNames.length; i++) { String[] hostPort = hostNames[i].split(":"); String host = hostPort[0]; int port = hostPort.length == 2 ? Integer.parseInt(hostPort[1]) : DEFAULT_PORT; serverAddresses[i] = new InetSocketTransportAddress(host, port); } Preconditions.checkState(serverAddresses != null && serverAddresses.length > 0, "Missing Param:" + HOSTNAMES); } if (StringUtils.isNotBlank(context.getString(INDEX_NAME))) { this.indexName = context.getString(INDEX_NAME); } if (StringUtils.isNotBlank(context.getString(INDEX_TYPE))) { this.indexType = context.getString(INDEX_TYPE); } if (StringUtils.isNotBlank(context.getString(CLUSTER_NAME))) { this.clusterName = context.getString(CLUSTER_NAME); } if (StringUtils.isNotBlank(context.getString(BATCH_SIZE))) { this.batchSize = Integer.parseInt(context.getString(BATCH_SIZE)); } if (StringUtils.isNotBlank(context.getString(TTL))) { this.ttlMs = TimeUnit.DAYS.toMillis(Integer.parseInt(context.getString(TTL))); Preconditions.checkState(ttlMs > 0, TTL + " must be greater than 0 or not set."); } String serializerClazz = "org.apache.flume.sink.elasticsearch.ElasticSearchLogStashEventSerializer"; if (StringUtils.isNotBlank(context.getString(SERIALIZER))) { serializerClazz = context.getString(SERIALIZER); } Context serializerContext = new Context(); serializerContext.putAll(context.getSubProperties(SERIALIZER_PREFIX)); try { @SuppressWarnings("unchecked") Class<? extends Configurable> clazz = (Class<? extends Configurable>) Class.forName(serializerClazz); Configurable serializer = clazz.newInstance(); if (serializer instanceof ElasticSearchIndexRequestBuilderFactory) { indexRequestFactory = (ElasticSearchIndexRequestBuilderFactory) serializer; } else if (serializer instanceof ElasticSearchEventSerializer) { indexRequestFactory = new EventSerializerIndexRequestBuilderFactory( (ElasticSearchEventSerializer) serializer); } else { throw new IllegalArgumentException(serializerClazz + " is neither an ElasticSearchEventSerializer" + " nor an ElasticSearchIndexRequestBuilderFactory."); } indexRequestFactory.configure(serializerContext); } catch (Exception e) { logger.error("Could not instantiate event serializer.", e); Throwables.propagate(e); } if (sinkCounter == null) { sinkCounter = new SinkCounter(getName()); } Preconditions.checkState(StringUtils.isNotBlank(indexName), "Missing Param:" + INDEX_NAME); Preconditions.checkState(StringUtils.isNotBlank(indexType), "Missing Param:" + INDEX_TYPE); Preconditions.checkState(StringUtils.isNotBlank(clusterName), "Missing Param:" + CLUSTER_NAME); Preconditions.checkState(batchSize >= 1, BATCH_SIZE + " must be greater than 0"); }
From source file:org.codice.ddf.commands.catalog.DuplicateCommands.java
protected long getFilterStartTime(long now) { long startTime = 0; if (lastHours > 0) { startTime = now - TimeUnit.HOURS.toMillis(lastHours); } else if (lastDays > 0) { startTime = now - TimeUnit.DAYS.toMillis(lastDays); } else if (lastWeeks > 0) { Calendar weeks = GregorianCalendar.getInstance(); weeks.setTimeInMillis(now);//from w w w. j a va2 s .c om weeks.add(Calendar.WEEK_OF_YEAR, -1 * lastWeeks); startTime = weeks.getTimeInMillis(); } else if (lastMonths > 0) { Calendar months = GregorianCalendar.getInstance(); months.setTimeInMillis(now); months.add(Calendar.MONTH, -1 * lastMonths); startTime = months.getTimeInMillis(); } return startTime; }
From source file:org.apps8os.motivator.ui.MainActivity.java
@Override public void onResume() { super.onResume(); mCurrentSprint = mSprintDataHandler.getCurrentSprint(); // Check if there is a sprint ongoing if (mCurrentSprint == null) { mCurrentSprint = mSprintDataHandler.getLatestEndedSprint(); if (mCurrentSprint == null) { Intent intent = new Intent(this, StartGuideActivity.class); startActivity(intent);/*w w w . ja va2s .co m*/ finish(); } else { mActionBar.setTitle(getString(R.string.app_name)); mActionBar.setSubtitle(getString(R.string.no_active_sprint)); EventDataHandler eventHandler = new EventDataHandler(this); ArrayList<MotivatorEvent> plannedEvents = eventHandler .getUncheckedEventsBetween(mCurrentSprint.getStartTime(), mCurrentSprint.getEndTime()); int succeededPlans = 0; for (MotivatorEvent event : plannedEvents) { MotivatorEvent checkedEvent = eventHandler.getCheckedEvent(event.getId()); if (checkedEvent == null) { succeededPlans += 1; } else { if (event.getPlannedDrinks() >= checkedEvent.getPlannedDrinks()) { succeededPlans += 1; } } } mSucceededPlans = "" + succeededPlans + "/" + plannedEvents.size(); } } else { mActionBar.setSubtitle(mCurrentSprint.getSprintTitle()); mActionBar.setTitle(getString(R.string.day) + " " + mCurrentSprint.getCurrentDayOfTheSprint() + "/" + mCurrentSprint.getDaysInSprint()); } final int eventAdded = mPrefs.getInt(AddEventActivity.EVENT_ADDED, -1); // Move to appropriate page after adding an event. if (eventAdded == MotivatorEvent.TODAY) { mViewPager.setCurrentItem(1); Editor editor = mPrefs.edit(); editor.putInt(AddEventActivity.EVENT_ADDED, -1); editor.commit(); } else if (eventAdded == MotivatorEvent.PLAN) { mViewPager.setCurrentItem(2); Editor editor = mPrefs.edit(); editor.putInt(AddEventActivity.EVENT_ADDED, -1); editor.commit(); } DayDataHandler dataHandler = new DayDataHandler(this); DayInHistory yesterday = dataHandler .getDayInHistory(System.currentTimeMillis() - TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS)); yesterday.setEvents(); final Context context = this; final ArrayList<MotivatorEvent> yesterdayEvents = yesterday.getUncheckedEvents(this); // Set up the alert dialog for checking yesterdays events if (!yesterdayEvents.isEmpty()) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(getString(R.string.you_had_an_event_yesterday)) .setPositiveButton(getString(R.string.yes), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Intent resultIntent = new Intent(context, MoodQuestionActivity.class); resultIntent.putExtra(MotivatorEvent.YESTERDAYS_EVENTS, yesterdayEvents); resultIntent.putExtra(EventDataHandler.EVENTS_TO_CHECK, true); startActivity(resultIntent); } }).setNegativeButton(getString(R.string.no), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }).setMessage(getString(R.string.check_the_event)); Dialog dialog = builder.create(); dialog.show(); } }
From source file:org.eclipse.skalli.core.rest.admin.StatisticsQueryTest.java
@Test public void testFromPeriodQuery() throws Exception { assertFromPeriodQuery("3m", 3, TimeUnit.MINUTES); assertFromPeriodQuery("2d", 1, TimeUnit.DAYS); // to must not be in the future assertFromPeriodQuery("-3m", 3, TimeUnit.MINUTES); }