Example usage for java.util.concurrent TimeUnit DAYS

List of usage examples for java.util.concurrent TimeUnit DAYS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit DAYS.

Prototype

TimeUnit DAYS

To view the source code for java.util.concurrent TimeUnit DAYS.

Click Source Link

Document

Time unit representing twenty four hours.

Usage

From source file:org.opencastproject.staticfiles.impl.StaticFileServiceImpl.java

/**
 * Deletes all files found in the temporary storage section of all known organizations.
 *
 * @throws IOException/*from  w  w  w  . j  a  v a  2  s  .c om*/
 *           if there was an error while deleting the files.
 */
void purgeTemporaryStorageSection() throws IOException {
    logger.info("Start purging temporary storage section of all known organizations");
    for (Organization org : orgDirectory.getOrganizations()) {
        purgeTemporaryStorageSection(org.getId(), TimeUnit.DAYS.toMillis(1));
    }
}

From source file:pt.ua.tm.neji.train.batch.TrainBatchExecutor.java

private int processMultipleFiles(final String inputFolderPath, final int numThreads, TrainContext context,
        final Class<? extends Processor> processorCls, Object... args) {

    int filesProcessed = 0;

    // Getting folder files
    File inputFolder = new File(inputFolderPath);
    File[] files = inputFolder.listFiles();
    File[] annotations = null;//  w  ww. ja  v a 2s.  c  o m
    boolean hasAnnotationFiles = false;

    // Determine if format is A1, to separate .txt and .a1 files, before processing
    if (context.getConfiguration().getInputFormat().equals(InputFormat.A1)) {
        A1Pairs a1Pairs = A1Utils.separateTextAnnotations(files);
        files = a1Pairs.getFiles();
        annotations = a1Pairs.getAnnotations();
        hasAnnotationFiles = true;
    }

    // Multi-threading 
    try {
        logger.info("Installing multi-threading support...");
        context.addMultiThreadingSupport(numThreads);
    } catch (NejiException ex) {
        String m = "There was a problem installing multi-threading support.";
        logger.error(m, ex);
        throw new RuntimeException(m, ex);
    }

    // Start thread pool
    logger.info("Starting thread pool with support for {} threads...", numThreads);
    ExecutorService executor = Executors.newFixedThreadPool(numThreads);

    LinkedList<Future> futures = new LinkedList<>();

    // Iterate over files
    for (int i = 0; i < files.length; i++) {

        // Make corpus, output file
        Corpus corpus = new Corpus(LabelFormat.BIO, context.getEntity());

        // By default, the corpus identifier is the file name
        corpus.setIdentifier(FilenameUtils.getBaseName(files[i].getName()));

        // Make in/out corpus wrappers
        InputFile inputSentencesFile = new InputFile(corpus, files[i], false);
        InputFile inputAnnotationsFile = null;

        // Verify ih has annotation files
        if (hasAnnotationFiles) {
            inputAnnotationsFile = new InputFile(corpus, annotations[i], false);
        }

        Processor processor;
        try {
            if (!hasAnnotationFiles)
                processor = newProcessor(processorCls, context, inputSentencesFile, args);
            else
                processor = newProcessor(processorCls, context, inputSentencesFile, inputAnnotationsFile, args);
        } catch (NejiException ex) {
            String m = "There was a problem creating the processor of the file: " + files[i].getAbsolutePath();
            logger.error(m, ex);
            throw new RuntimeException(m, ex);
        }

        Future submit = executor.submit(processor);
        futures.add(submit);
    }

    logger.info("");
    logger.info("{} file(s) to process.", futures.size());
    logger.info("Started processing...");

    Iterator<Future> it = futures.iterator();
    while (it.hasNext()) {
        Future future = it.next();
        try {
            Object o = future.get();
            future = null;
            it.remove();
            filesProcessed++;
        } catch (ExecutionException | InterruptedException ex) {
            String m = "There was a problem running the processor.";
            logger.error(m, ex);
        }
    }

    executor.shutdown();
    try {
        executor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
    } catch (InterruptedException ex) {
        String m = "There was a problem executing the processing tasks.";
        logger.error(m, ex);
        throw new RuntimeException(m, ex);
    }

    return filesProcessed;
}

From source file:org.deri.iris.performance.IRISPerformanceTest.java

/**
 * Executes a set of datalog queries using the given configuration
 * @param queries The set of Datalog queries
 * @param config The configuration for the test suite
 * @return a list of IRISTestCase objects with the result of the test campaign
 *///from  w w w .jav a2  s  .com
public List<IRISTestCase> executeTests(final List<String> queries, final TestConfiguration config) {

    // Get the logger
    LOGGER = Logger.getLogger(IRISPerformanceTest.class.getName());

    // Construct a valid IRIS+- program using the queries and the configuration file
    String program = "";

    // add the query and its IRIS execution command to the program
    program += "/// Query ///\n";
    for (final String s : queries) {
        program += s + "\n";
        program += "?-" + s.substring(0, s.indexOf(":-")) + ".\n";
    }
    program += "\n";

    // If reasoning is enabled, add the TBOX to the program
    program += "/// TBox ///\n";
    if (config.getReasoning()) {
        String tboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/tbox";
        if (config.getExpressiveness().compareTo("RDFS") == 0) {
            tboxPath += "/rdfs";
        }
        if (config.getExpressiveness().compareTo("OWL-QL") == 0) {
            tboxPath += "/owlql";
        }
        final String tbox = loadFile(tboxPath + "/" + config.getDataset() + ".dtg");
        program += tbox + "\n";
    } else {
        program += "/// EMPTY ///\n";
    }

    // Add the SBox
    program += "/// SBox ///\n";
    String sboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/sbox";
    if (config.getExpressiveness().compareTo("RDFS") == 0) {
        sboxPath += "/rdfs";
    }
    if (config.getExpressiveness().compareTo("OWL-QL") == 0) {
        sboxPath += "/owlql";
    }
    final String sbox = loadFile(sboxPath + "/" + config.getDataset() + ".dtg");
    program += sbox + "\n\n";

    LOGGER.debug(program);

    // Get the parser
    final Parser parser = new Parser();

    // Parse the program
    try {
        parser.parse(program);
    } catch (final ParserException e) {
        e.printStackTrace();
    }

    // Get the TGDs from the set of rules
    final List<IRule> tgds = RewritingUtils.getTGDs(parser.getRules(), parser.getQueries());

    // Get the query bodies
    final List<IRule> bodies = new ArrayList<IRule>(parser.getRules());
    final List<IRule> datalogQueries = RewritingUtils.getQueries(bodies, parser.getQueries());

    // Get the constraints from the set of rules
    final Set<IRule> constraints = RewritingUtils.getConstraints(parser.getRules(), parser.getQueries());

    // Get the SBox rules from the set of rules
    final List<IRule> storageRules = RewritingUtils.getSBoxRules(parser.getRules(), parser.getQueries());

    // Check that the TBox is FO-reducible
    IRuleSafetyProcessor ruleProc = new LinearReducibleRuleSafetyProcessor();
    try {
        ruleProc.process(tgds);
    } catch (final RuleUnsafeException e) {
        e.printStackTrace();
    }

    // Check that the SBox rules are Safe Datalog
    ruleProc = new StandardRuleSafetyProcessor();
    try {
        ruleProc.process(storageRules);
    } catch (final RuleUnsafeException e) {
        e.printStackTrace();
    }

    // Connect to the storage
    StorageManager.getInstance();
    try {
        StorageManager.connect(config.getDBVendor(), config.getDBProtocol(), config.getDBHost(),
                config.getDBPort(), config.getDBName(), config.getSchemaName(), config.getDBUsername(),
                config.getDBPassword());
    } catch (final SQLException e) {
        e.printStackTrace();
    }

    // Evaluate the queries
    final List<IRISTestCase> output = new LinkedList<IRISTestCase>();
    for (final IQuery q : parser.getQueries()) {
        // Generate a new test-case
        final IRISTestCase currentTest = new IRISTestCase();
        int nTask = -10;

        // Get the Factories
        final IRelationFactory rf = new RelationFactory();

        // Get the Rewriter Engine
        final ParallelRewriter rewriter = new ParallelRewriter(DecompositionStrategy.DECOMPOSE,
                RewritingLanguage.UCQ, SubCheckStrategy.TAIL, NCCheck.TAIL);

        // Get and log the rule corresponding to the query
        final IRule ruleQuery = getRuleQuery(q, datalogQueries);
        currentTest.setQuery(ruleQuery);

        final Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);

        // Compute and log the FO-Rewriting
        LOGGER.info("Computing TBox Rewriting");
        float duration = -System.nanoTime();
        final Set<IRule> rewriting = rewriter.getRewriting(ruleQuery, tgds, constraints, deps, exprs);
        duration = ((duration + System.nanoTime()) / 1000000);
        currentTest.getTasks()
                .add(new Task(nTask++, "TBox Rewriting", duration, 0, 0, "ms", rewriting.toString()));
        LOGGER.info("done.");
        int count = 0;
        for (final IRule r : rewriting) {
            LOGGER.debug("(Qr" + ++count + ")" + r);
        }

        // Produce the rewriting according to the Nyaya Data Model
        final IQueryRewriter ndmRewriter = new NDMRewriter(storageRules);

        // Create a buffer for the output
        final IRelation outRelation = rf.createRelation();

        // Get the SBox rewriting
        try {
            LOGGER.info("Computing SBox Rewriting");
            final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>();
            duration = -System.nanoTime();
            for (final IRule pr : rewriting) {
                sboxRewriting.addAll(ndmRewriter.getRewriting(pr));
            }
            duration = ((duration + System.nanoTime()) / 1000000);
            currentTest.getTasks()
                    .add(new Task(nTask++, "SBox Rewriting", duration, 0, 0, "ms", sboxRewriting.toString()));
            LOGGER.info("done.");
            count = 0;
            for (final IRule n : sboxRewriting) {
                LOGGER.debug("(Qn" + ++count + ")" + n);
            }

            // Produce the SQL rewriting for each query in the program
            final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting);

            // Get the SQL rewriting as Union of Conjunctive Queries (UCQ)
            LOGGER.info("Computing SQL Rewriting");
            duration = -System.nanoTime();
            final List<String> ucqSQLRewriting = new LinkedList<String>();
            ucqSQLRewriting.add(sqlRewriter.getUCQSQLRewriting("", 10000, 0));
            duration = ((duration + System.nanoTime()) / 1000000);
            currentTest.getTasks()
                    .add(new Task(nTask++, "SQL Rewriting", duration, 0, 0, "ms", ucqSQLRewriting.toString()));
            LOGGER.info("done.");
            count = 0;
            for (final String s : ucqSQLRewriting) {
                LOGGER.debug("(Qs" + ++count + ") " + s);
            }

            // Execute the UCQ
            LOGGER.info("Executing SQL");

            // float ansConstructOverall = 0;

            // The synchronized structure to store the output tuples
            final Set<ITuple> result = Collections.synchronizedSet(new HashSet<ITuple>());

            /*
             * Prepare a set of runnable objects representing each partial rewriting to be executed in parallel
             */
            final List<RunnableQuery> rql = new LinkedList<RunnableQuery>();
            for (final String cq : ucqSQLRewriting) {
                // Construct a Runnable Query
                rql.add(new RunnableQuery(cq, result, currentTest.getTasks()));
            }

            // Get an executor that allows a number of parallel threads equals to the number of available processors
            // ExecutorService queryExecutor =
            // Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()*5);
            final ExecutorService queryExecutor = Executors.newSingleThreadScheduledExecutor();

            // Execute all the partial rewritings in parallel
            float ucqExecOverall = -System.nanoTime();
            for (final RunnableQuery rq : rql) {
                queryExecutor.execute(rq);
            }
            queryExecutor.shutdown();
            if (queryExecutor.awaitTermination(1, TimeUnit.DAYS)) {
                LOGGER.info("done.");
            } else
                throw new InterruptedException("Timeout Occured");
            ucqExecOverall = ((ucqExecOverall + System.nanoTime()) / 1000000);
            StorageManager.disconnect();

            // inizio aggiunta
            float minTime = System.nanoTime();
            float maxTime = 0;
            float avgTime = 0;
            int n = 0;
            for (final Task t : currentTest.getTasks()) {
                if (t.getName().contains("Execution")) {
                    avgTime += (t.getFinalTime() - t.getInitTime()) / 1000000;
                    n++;
                    if (t.getFinalTime() > maxTime) {
                        maxTime = t.getFinalTime();
                    }
                    if (t.getInitTime() < minTime) {
                        minTime = t.getInitTime();
                    }
                }
            }
            ucqExecOverall = (maxTime - minTime) / 1000000;
            // fine aggiunta

            currentTest.getTasks()
                    .add(new Task(nTask++, "UCQ Overall Execution Time", ucqExecOverall, 0, 0, "ms"));

            // inizio aggiunta
            avgTime = (avgTime / n);
            System.out.println(n);
            currentTest.getTasks().add(new Task(nTask++, "UCQ Average Execution Time", avgTime, 0, 0, "ms"));
            Collections.sort(currentTest.getTasks());
            // fine aggiunta

            for (final ITuple t : result) {
                outRelation.add(t);
            }

        } catch (final SQLException e) {
            e.printStackTrace();
        } catch (final EvaluationException e) {
            e.printStackTrace();
        } catch (final InterruptedException e) {
            e.printStackTrace();
        }
        currentTest.setAnswer(outRelation);
        output.add(currentTest);
    }
    return (output);
}

From source file:org.kontalk.xmppserver.KontalkIqRegister.java

@Override
public void init(Map<String, Object> settings) throws TigaseDBException {
    requests = new HashMap<>();
    throttlingRequests = new HashMap<>();

    // registration providers
    providers = new LinkedHashMap<>();
    String[] providersList = (String[]) settings.get("providers");
    if (providersList == null || providersList.length == 0)
        throw new TigaseDBException("No providers configured");

    String defaultProviderName = (String) settings.get("default-provider");
    String fallbackProviderName = (String) settings.get("fallback-provider");

    for (String providerStr : providersList) {
        String[] providerDef = providerStr.split("=");
        if (providerDef.length != 2)
            throw new TigaseDBException("Bad provider definition: " + providerStr);

        String providerName = providerDef[0];
        String providerClassName = providerDef[1];

        try {/* w ww  . jav a  2  s . c  o  m*/
            @SuppressWarnings("unchecked")
            Class<? extends PhoneNumberVerificationProvider> providerClass = (Class<? extends PhoneNumberVerificationProvider>) Class
                    .forName(providerClassName);
            PhoneNumberVerificationProvider provider = providerClass.newInstance();
            provider.init(getPrefixedSettings(settings, providerName + "-"));
            // init was successful
            providers.put(providerName, provider);

            if (defaultProviderName != null) {
                // this is the default provider
                if (defaultProviderName.equals(providerName))
                    defaultProvider = provider;
            } else if (defaultProvider == null) {
                // no default provider defined, use the first one found
                defaultProvider = provider;
            }

            if (fallbackProviderName != null) {
                // this is the fallback provider
                if (fallbackProviderName.equals(providerName))
                    fallbackProvider = provider;
            } else if (fallbackProvider == null && defaultProvider != null) {
                // no fallback provider defined and default provider already set
                // use the second provider found
                fallbackProvider = provider;
            }
        } catch (ClassNotFoundException e) {
            throw new TigaseDBException("Provider class not found: " + providerClassName);
        } catch (InstantiationException | IllegalAccessException e) {
            throw new TigaseDBException("Unable to create provider instance for " + providerClassName);
        } catch (ConfigurationException e) {
            throw new TigaseDBException("configuration error", e);
        }
    }

    // user repository for periodical purge of old users
    String uri = (String) settings.get("db-uri");
    userRepository.initRepository(uri, null);

    // delete expired users once a day
    long timeout = TimeUnit.DAYS.toMillis(1);
    Timer taskTimer = new Timer(ID + " tasks", true);
    taskTimer.scheduleAtFixedRate(new TimerTask() {
        @Override
        public void run() {
            try {
                if (log.isLoggable(Level.FINEST)) {
                    log.finest("Purging expired users.");
                }
                // TODO seconds should be in configuration
                List<BareJID> users = userRepository.getExpiredUsers(DEF_EXPIRE_SECONDS);
                for (BareJID user : users) {
                    removeUser(user);
                }
            } catch (TigaseDBException e) {
                log.log(Level.WARNING, "error purging expired users", e);
            }
        }
    }, timeout, timeout);
}

From source file:com.linkedin.pinot.queries.FastHllQueriesTest.java

private void buildAndLoadSegment(boolean hasPreGeneratedHllColumns) throws Exception {
    FileUtils.deleteQuietly(INDEX_DIR);/*from w  w w  .  j  av a 2 s .  c om*/

    // Get resource file path
    URL resource;
    if (hasPreGeneratedHllColumns) {
        resource = getClass().getClassLoader().getResource(AVRO_DATA_WITH_PRE_GENERATED_HLL_COLUMNS);
    } else {
        resource = getClass().getClassLoader().getResource(AVRO_DATA_WITHOUT_PRE_GENERATED_HLL_COLUMNS);
    }
    Assert.assertNotNull(resource);
    String filePath = resource.getFile();

    // Build the segment schema
    Schema.SchemaBuilder schemaBuilder = new Schema.SchemaBuilder().setSchemaName("testTable")
            .addMetric("column1", FieldSpec.DataType.INT).addMetric("column3", FieldSpec.DataType.INT)
            .addSingleValueDimension("column5", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column6", FieldSpec.DataType.INT)
            .addSingleValueDimension("column7", FieldSpec.DataType.INT)
            .addSingleValueDimension("column9", FieldSpec.DataType.INT)
            .addSingleValueDimension("column11", FieldSpec.DataType.STRING)
            .addSingleValueDimension("column12", FieldSpec.DataType.STRING)
            .addMetric("column17", FieldSpec.DataType.INT).addMetric("column18", FieldSpec.DataType.INT)
            .addTime("daysSinceEpoch", TimeUnit.DAYS, FieldSpec.DataType.INT);
    if (hasPreGeneratedHllColumns) {
        schemaBuilder.addSingleValueDimension("column17_HLL", FieldSpec.DataType.STRING)
                .addSingleValueDimension("column18_HLL", FieldSpec.DataType.STRING);
    }

    // Create the segment generator config
    SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(schemaBuilder.build());
    segmentGeneratorConfig.setInputFilePath(filePath);
    segmentGeneratorConfig.setTableName("testTable");
    segmentGeneratorConfig.setOutDir(INDEX_DIR.getAbsolutePath());
    segmentGeneratorConfig.setInvertedIndexCreationColumns(
            Arrays.asList("column6", "column7", "column11", "column17", "column18"));
    if (hasPreGeneratedHllColumns) {
        segmentGeneratorConfig.setHllConfig(new HllConfig(HLL_LOG2M));
    } else {
        segmentGeneratorConfig.enableStarTreeIndex(null);
        // Intentionally use the non-default suffix
        segmentGeneratorConfig.setHllConfig(
                new HllConfig(HLL_LOG2M, new HashSet<>(Arrays.asList("column17", "column18")), "_HLL"));
    }

    // Build the index segment
    SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();
    driver.init(segmentGeneratorConfig);
    driver.build();

    _indexSegment = ColumnarSegmentLoader.load(new File(INDEX_DIR, SEGMENT_NAME), ReadMode.heap);
    _segmentDataManagers = Arrays.<SegmentDataManager>asList(new OfflineSegmentDataManager(_indexSegment),
            new OfflineSegmentDataManager(_indexSegment));
}

From source file:se.chalmers.watchme.ui.MovieListFragment.java

/**
 * Set up adapter and set adapter./*from www  . j a  v  a 2  s.com*/
 */
private void setUpAdapter() {

    // Bind columns from the table Movies to items in the rows.
    String[] from = new String[] { MoviesTable.COLUMN_TITLE, MoviesTable.COLUMN_RATING, MoviesTable.COLUMN_DATE,
            MoviesTable.COLUMN_POSTER_SMALL };

    int[] to = new int[] { R.id.title, R.id.raiting, R.id.date, R.id.poster };

    getActivity().getSupportLoaderManager().initLoader(LOADER_ID, null, this);
    setAdapter(new SimpleCursorAdapter(getActivity(), R.layout.list_item_movie, null, from, to, 0));

    /**
     * Manipulate the shown date in list
     */
    getAdapter().setViewBinder(new ViewBinder() {

        public boolean setViewValue(View view, Cursor cursor, int columnIndex) {

            if (columnIndex == cursor.getColumnIndexOrThrow(MoviesTable.COLUMN_DATE)) {

                String dateString = cursor.getString(columnIndex);
                TextView textView = (TextView) view;
                Calendar date = Calendar.getInstance();
                date.setTimeInMillis(Long.parseLong(dateString));

                /*
                 * If the movie's release date is within a given threshold (fetched 
                 * from resource file), change the text color of the field. 
                 */
                int threshold = Integer.parseInt(getString(R.string.days_threshold));

                if (DateTimeUtils.isDateInInterval(date, threshold, TimeUnit.DAYS)) {
                    String color = getString(R.string.color_threshold);
                    textView.setTextColor(Color.parseColor(color));
                }
                /*
                 * Set to original color if not in threshold
                 */
                else {
                    textView.setTextColor(R.string.list_date_color);
                }

                // Format the date to relative form ("two days left")
                String formattedDate = DateTimeUtils.toHumanDate(date);
                textView.setText(formattedDate);

                return true;
            }

            /*
             * Handle rating bar conversion
             */
            else if (columnIndex == cursor.getColumnIndexOrThrow(MoviesTable.COLUMN_RATING)) {
                int rating = cursor.getInt(columnIndex);
                RatingBar bar = (RatingBar) view;
                bar.setRating(rating);

                return true;
            }

            /*
             * Handle poster images
             */

            else if (columnIndex == cursor.getColumnIndexOrThrow(MoviesTable.COLUMN_POSTER_SMALL)) {
                String smallImageUrl = cursor.getString(columnIndex);
                final ImageView imageView = (ImageView) view;

                if (smallImageUrl != null && !smallImageUrl.isEmpty()) {

                    // Fetch the image in an async task
                    imageTask = new ImageDownloadTask(new ImageDownloadTask.TaskActions() {

                        // When task is finished, set the resulting
                        // image on the poster view
                        public void onFinished(Bitmap image) {
                            if (image != null) {
                                ((ImageView) imageView).setImageBitmap(image);
                            }
                        }
                    });

                    imageTask.execute(new String[] { smallImageUrl });
                }

                return true;
            }

            return false;
        }
    });
}

From source file:com.adithya321.sharesanalysis.fragments.DetailFragment.java

private void setShareHoldings(View view) {
    TextView percentageChangeTV = (TextView) view.findViewById(R.id.detail__percent_change);
    TextView noOfDaysTV = (TextView) view.findViewById(R.id.detail_no_of_days);
    TextView totalProfitTV = (TextView) view.findViewById(R.id.detail_total_profit);
    TextView currentNoOfSharesTV = (TextView) view.findViewById(R.id.detail_currents_no_of_shares);
    TextView currentStockValueTV = (TextView) view.findViewById(R.id.detail_current_value);
    TextView targetTotalProfitTV = (TextView) view.findViewById(R.id.detail_target_total_profit);
    TextView rewardTV = (TextView) view.findViewById(detail_reward);

    int totalSharesPurchased = 0;
    int totalSharesSold = 0;
    double totalValuePurchased = 0;
    double totalValueSold = 0;
    double averageShareValue = 0;
    double percentageChange = 0;
    double totalProfit = 0;
    double targetTotalProfit = 0;
    double reward = 0;
    double currentStockValue = 0;

    RealmList<Purchase> purchases = share.getPurchases();
    for (Purchase purchase : purchases) {
        if (purchase.getType().equals("buy")) {
            totalSharesPurchased += purchase.getQuantity();
            totalValuePurchased += (purchase.getQuantity() * purchase.getPrice());
        } else if (purchase.getType().equals("sell")) {
            totalSharesSold += purchase.getQuantity();
            totalValueSold += (purchase.getQuantity() * purchase.getPrice());
        }/*from   w w  w.  ja v a 2 s .c o  m*/
    }
    if (totalSharesPurchased != 0)
        averageShareValue = totalValuePurchased / totalSharesPurchased;

    if (averageShareValue != 0)
        percentageChange = ((share.getCurrentShareValue() - averageShareValue) / averageShareValue) * 100;
    Date today = new Date();
    Date start = share.getDateOfInitialPurchase();
    long noOfDays = DateUtils.getDateDiff(start, today, TimeUnit.DAYS);

    SharedPreferences sharedPreferences = getActivity().getSharedPreferences("prefs", 0);

    int currentNoOfShares = totalSharesPurchased - totalSharesSold;
    totalProfit = totalValueSold - totalValuePurchased;
    currentStockValue = currentNoOfShares * share.getCurrentShareValue();
    double target = sharedPreferences.getFloat("target", 0);
    targetTotalProfit = (target / 100) * totalValuePurchased * ((double) noOfDays / 365);
    reward = totalProfit - targetTotalProfit;
    if (reward < 0)
        rewardTV.setTextColor(getResources().getColor((android.R.color.holo_red_dark)));
    else
        rewardTV.setTextColor(getResources().getColor((R.color.colorPrimary)));

    currentNoOfSharesTV.setText(String.valueOf(currentNoOfShares));
    percentageChangeTV.setText(String.valueOf(NumberUtils.round(percentageChange, 2)));
    noOfDaysTV.setText(String.valueOf(noOfDays));
    totalProfitTV.setText(String.valueOf(NumberUtils.round(totalProfit, 2)));
    currentStockValueTV.setText(String.valueOf(NumberUtils.round(currentStockValue, 2)));
    targetTotalProfitTV.setText(String.valueOf(NumberUtils.round(targetTotalProfit, 2)));
    rewardTV.setText(String.valueOf(NumberUtils.round(reward, 2)));
}

From source file:org.dcache.util.collector.pools.PoolInfoCollectorUtils.java

/**
 * <p>Generates a histogram model which tracks
 * the average, maximum, minimum or standard deviation of lifetime
 * values over a fixed window of two months.</p>
 *
 * <p>An empty list of values is used to initialize the bins.</p>
 *
 * @param type       MAX, MIN, AVG, STDDEV
 * @param identifier for the histogram//w  ww . j  av  a 2  s .  co m
 * @return the corresponding histogram model
 */
public static TimeseriesHistogram newLifetimeTimeSeriesHistogram(String type, String identifier) {
    double highestBin = (double) TimeFrame.computeHighTimeFromNow(BinType.DAY).getTimeInMillis();
    double unit = (double) TimeUnit.DAYS.toMillis(1);
    TimeseriesHistogram histogram = new TimeseriesHistogram();
    histogram.setData(Collections.EMPTY_LIST);
    histogram.setBinUnit(unit);
    histogram.setBinUnitLabel("DATE");
    histogram.setDataUnitLabel(type);
    histogram.setBinCount(61);
    histogram.setHighestBin(highestBin);
    histogram.setIdentifier(identifier);
    histogram.configure();
    return histogram;
}

From source file:com.sonicle.webtop.core.app.OTPManager.java

public boolean isThisDeviceTrusted(UserProfileId pid, TrustedDeviceCookie tdc) {
    if (tdc == null)
        return false;
    CoreServiceSettings css = new CoreServiceSettings(CoreManifest.ID, pid.getDomainId());

    // Checks (if enabled) cookie duration
    int duration = css.getOTPDeviceTrustDuration();
    if (duration > 0) {
        long now = new Date().getTime();
        long expires = tdc.timestamp + TimeUnit.DAYS.toMillis(duration);
        if (now > expires) {
            logger.trace("Device cookie expired [{}days, {} > {}]", duration, now, expires);
            return false;
        }/*from   w  w  w. j a v  a2 s  .  co m*/
    }

    // Checks if device is registered
    JsTrustedDevice td = getTrustedDevice(pid, tdc.deviceId);
    if (td == null) {
        logger.trace("Device ID not registered before [{}]", tdc.deviceId);
        return false;
    }

    // Checks account match
    if (!td.account.equals(tdc.account)) {
        logger.trace("Device ID not bound to the right account [{} != {}]", tdc.account, td.account);
        return false;
    }
    return true;
}

From source file:org.jvnet.hudson.update_center.Main.java

/**
 * Loads a certificate chain and makes sure it's valid.
 *//*from  w  ww.java  2s . c  o m*/
protected List<X509Certificate> getCertificateChain() throws IOException, GeneralSecurityException {
    CertificateFactory cf = CertificateFactory.getInstance("X509");
    List<X509Certificate> certs = new ArrayList<X509Certificate>();
    for (File f : certificates) {
        X509Certificate c = loadCertificate(cf, f);
        c.checkValidity(new Date(System.currentTimeMillis() + TimeUnit.DAYS.toMillis(30)));
        certs.add(c);
    }

    Set<TrustAnchor> rootCAs = CertificateUtil.getDefaultRootCAs();
    rootCAs.add(new TrustAnchor(
            (X509Certificate) cf.generateCertificate(getClass().getResourceAsStream("/hudson-community.cert")),
            null));
    for (File f : rootCA) {
        rootCAs.add(new TrustAnchor(loadCertificate(cf, f), null));
    }

    try {
        CertificateUtil.validatePath(certs, rootCAs);
    } catch (GeneralSecurityException e) {
        e.printStackTrace();
    }
    return certs;
}