Example usage for java.util Date toInstant

List of usage examples for java.util Date toInstant

Introduction

In this page you can find the example usage for java.util Date toInstant.

Prototype

public Instant toInstant() 

Source Link

Document

Converts this Date object to an Instant .

Usage

From source file:IndexImportMediawiki_p.java

/**
 * Run conditions ://  w w w. j a va2s.c  o m
 * - no MediaWiki import thread is running : allow to start a new import by filling the "file" parameter
 * - the MediaWiki import thread is running : returns monitoring information.
 * @param header servlet request header
 * @param post request parameters. Supported keys :
 *            <ul>
 *            <li>file : a dump URL or file path on this YaCy server local file system</li>
 *            <li>iffresh : when set to true, the dump file is imported only if its last modified date is unknown or after the last import trial date on this same file.  </li>
 *            <li>report : when set, display the currently running thread monitoring info, or the last import report when no one is running.
 *            Ignored when no import thread is known.</li>
 *            </ul>
 * @param env server environment
 * @return the servlet answer object
 */
public static serverObjects respond(final RequestHeader header, final serverObjects post,
        final serverSwitch env) {
    final serverObjects prop = new serverObjects();
    final Switchboard sb = (Switchboard) env;

    if (MediawikiImporter.job != null
            && (MediawikiImporter.job.isAlive() || (post != null && post.containsKey("report")))) {
        /* one import is running, or report was explicitly requested : no option to insert anything */
        prop.put("import", 1);
        /* Only refresh automatically when the job is running */
        prop.put("refresh", MediawikiImporter.job.isAlive() ? 1 : 0);
        final String jobErrorMessage = MediawikiImporter.job.status();
        if (jobErrorMessage != null && !jobErrorMessage.isEmpty()) {
            prop.put("import_status", 1);
            prop.put("import_status_message", jobErrorMessage);
        }
        prop.put("import_thread", MediawikiImporter.job.isAlive() ? 2 : 0);
        prop.put("import_dump", MediawikiImporter.job.source());
        prop.put("import_count", MediawikiImporter.job.count());
        prop.put("import_speed", MediawikiImporter.job.speed());
        prop.put("import_runningHours", (MediawikiImporter.job.runningTime() / 60) / 60);
        prop.put("import_runningMinutes", (MediawikiImporter.job.runningTime() / 60) % 60);
        prop.put("import_remainingHours", (MediawikiImporter.job.remainingTime() / 60) / 60);
        prop.put("import_remainingMinutes", (MediawikiImporter.job.remainingTime() / 60) % 60);
    } else {
        prop.put("import", 0);
        prop.put("refresh", 0);
        prop.put("import_prevReport", MediawikiImporter.job != null ? 1 : 0);
        if (post == null) {
            prop.put("import_status", 0);

            /* Acquire a transaction token for the next POST form submission */
            final String token = TransactionManager.getTransactionToken(header);
            prop.put(TransactionManager.TRANSACTION_TOKEN_PARAM, token);
            prop.put("import_" + TransactionManager.TRANSACTION_TOKEN_PARAM, token);

        } else {
            if (post.containsKey("file")) {
                /* Check the transaction is valid */
                TransactionManager.checkPostTransaction(header, post);

                String file = post.get("file");
                MultiProtocolURL sourceURL = null;
                int status = 0;
                String sourceFilePath = "";
                final Row lastExecutedCall = WorkTables.selectLastExecutedApiCall("IndexImportMediawiki_p.html",
                        post, sb);
                Date lastExecutionDate = null;
                if (lastExecutedCall != null) {
                    lastExecutionDate = lastExecutedCall.get(WorkTables.TABLE_API_COL_DATE_LAST_EXEC,
                            (Date) null);
                }
                try {
                    sourceURL = new MultiProtocolURL(file);
                    if (sourceURL.isFile()) {
                        final File sourcefile = sourceURL.getFSFile();
                        sourceFilePath = sourcefile.getAbsolutePath();
                        if (!sourcefile.exists()) {
                            status = 2;
                        } else if (!sourcefile.canRead()) {
                            status = 3;
                        } else if (sourcefile.isDirectory()) {
                            status = 4;
                        }
                    }

                    if (status == 0 && post.getBoolean("iffresh")) {
                        long lastModified = getLastModified(sourceURL);
                        if (lastExecutionDate != null && lastModified != 0L
                                && Instant.ofEpochMilli(lastModified).isBefore(lastExecutionDate.toInstant())) {
                            status = 5;
                            prop.put("import_status_lastImportDate", GenericFormatter.formatSafely(
                                    lastExecutionDate.toInstant(), GenericFormatter.FORMAT_SIMPLE));

                            /* the import is not performed, but we increase here the api call count */
                            if (sb.tables != null) {
                                byte[] lastExecutedCallPk = lastExecutedCall.getPK();
                                if (lastExecutedCallPk != null
                                        && !post.containsKey(WorkTables.TABLE_API_COL_APICALL_PK)) {
                                    post.add(WorkTables.TABLE_API_COL_APICALL_PK,
                                            UTF8.String(lastExecutedCallPk));
                                }
                                sb.tables.recordAPICall(post, "IndexImportMediawiki_p.html",
                                        WorkTables.TABLE_API_TYPE_DUMP,
                                        "MediaWiki Dump Import for " + sourceURL);
                            }
                        }
                    }
                } catch (MalformedURLException e) {
                    status = 1;
                }
                if (status == 0) {
                    /* store this call as an api call */
                    if (sb.tables != null) {
                        /* We avoid creating a duplicate of any already recorded API call with the same parameters */
                        if (lastExecutedCall != null
                                && !post.containsKey(WorkTables.TABLE_API_COL_APICALL_PK)) {
                            byte[] lastExecutedCallPk = lastExecutedCall.getPK();
                            if (lastExecutedCallPk != null) {
                                post.add(WorkTables.TABLE_API_COL_APICALL_PK, UTF8.String(lastExecutedCallPk));
                            }
                        }
                        sb.tables.recordAPICall(post, "IndexImportMediawiki_p.html",
                                WorkTables.TABLE_API_TYPE_DUMP, "MediaWiki Dump Import for " + sourceURL);
                    }

                    MediawikiImporter.job = new MediawikiImporter(sourceURL, sb.surrogatesInPath);
                    MediawikiImporter.job.start();
                    prop.put("import_dump", MediawikiImporter.job.source());
                    prop.put("import_thread", 1);
                    prop.put("import", 1);
                    prop.put("refresh", 1);
                } else {
                    prop.put("import_status", status);
                    prop.put("import_status_sourceFile", sourceFilePath);

                    /* Acquire a transaction token for the next POST form submission */
                    final String token = TransactionManager.getTransactionToken(header);
                    prop.put(TransactionManager.TRANSACTION_TOKEN_PARAM, token);
                    prop.put("import_" + TransactionManager.TRANSACTION_TOKEN_PARAM, token);
                }
                prop.put("import_count", 0);
                prop.put("import_speed", 0);
                prop.put("import_runningHours", 0);
                prop.put("import_runningMinutes", 0);
                prop.put("import_remainingHours", 0);
                prop.put("import_remainingMinutes", 0);
            }
        }
    }
    return prop;
}

From source file:me.childintime.childintime.ui.window.tool.BmiToolDialog.java

/**
 * Creates a sample dataset./*from  w  w  w  .j a va2s .  c  om*/
 *
 * @return a sample dataset.
 */
private void updateChartDataset() {
    // Clear the data set
    this.dataset.removeAllSeries();

    try {
        // Create the data series
        final XYSeries lengthSeries = new XYSeries("Length (cm)");
        final XYSeries weightSeries = new XYSeries("Weight (kg)");
        final XYSeries bmiSeries = new XYSeries("BMI");

        // Get the student
        Student student = (Student) this.studentList.getSelectedItem();

        // Make sure a student is selected
        if (student == null)
            return;

        // Get the student birthdate
        Date birthdate = (Date) student.getField(StudentFields.BIRTHDAY);

        // Age
        final long age = ChronoUnit.YEARS
                .between(birthdate.toInstant().atZone(ZoneId.systemDefault()).toLocalDate(), LocalDate.now());

        // Loop through the list of body states
        for (AbstractEntity abstractEntity : Core.getInstance().getBodyStateManager().getEntities()) {
            // Cast the entity to a body state
            BodyState bodyState = (BodyState) abstractEntity;

            // Make sure the student owns this body state
            try {
                if (!bodyState.getField(BodyStateFields.STUDENT_ID).equals(student))
                    continue;

            } catch (Exception e) {
                e.printStackTrace();
            }

            // Get the measurement date
            final Date measurementDate = (Date) bodyState.getField(BodyStateFields.DATE);

            // Age
            final long measurementAge = ChronoUnit.YEARS.between(
                    measurementDate.toInstant().atZone(ZoneId.systemDefault()).toLocalDate(), LocalDate.now());

            // Get the length and weight
            final int length = (int) bodyState.getField(BodyStateFields.LENGTH);
            final double weight = ((int) bodyState.getField(BodyStateFields.WEIGHT)) / 1000.0;

            // Calculate the BMI
            final double bmi = weight / Math.pow((length / 100.0), 2);

            // Add the data to the sets
            lengthSeries.add(age - measurementAge, length);
            weightSeries.add(age - measurementAge, weight);
            bmiSeries.add(age - measurementAge, bmi);
        }

        // Add the data series to the set
        this.dataset.addSeries(bmiSeries);
        this.dataset.addSeries(lengthSeries);
        this.dataset.addSeries(weightSeries);

    } catch (Exception e) {
        e.printStackTrace();
    }

    // Re set the dataset
    this.chart.getXYPlot().setDataset(this.dataset);
}

From source file:org.haiku.haikudepotserver.job.controller.JobController.java

/**
 * <p>This is helper-code that can be used to check to see if the data is stale and
 * will then enqueue the job, run it and then redirect the user to the data
 * download.</p>/*w ww .j  ava2s.  c  o m*/
 * @param response is the HTTP response to send the redirect to.
 * @param ifModifiedSinceHeader is the inbound header from the client.
 * @param lastModifyTimestamp is the actual last modified date for the data.
 * @param jobSpecification is the job that would be run if the data is newer than in the
 *                         inbound header.
 */

public static void handleRedirectToJobData(HttpServletResponse response, JobService jobService,
        String ifModifiedSinceHeader, Date lastModifyTimestamp, JobSpecification jobSpecification)
        throws IOException {

    if (!Strings.isNullOrEmpty(ifModifiedSinceHeader)) {
        try {
            Date requestModifyTimestamp = new Date(Instant
                    .from(DateTimeFormatter.RFC_1123_DATE_TIME.parse(ifModifiedSinceHeader)).toEpochMilli());

            if (requestModifyTimestamp.getTime() >= lastModifyTimestamp.getTime()) {
                response.setStatus(HttpStatus.NOT_MODIFIED.value());
                return;
            }
        } catch (DateTimeParseException dtpe) {
            LOGGER.warn("bad [{}] header on request; [{}] -- will ignore", HttpHeaders.IF_MODIFIED_SINCE,
                    StringUtils.abbreviate(ifModifiedSinceHeader, 128));
        }
    }

    // what happens here is that we get the report and if it is too old, delete it and try again.

    JobSnapshot jobSnapshot = getJobSnapshotStartedAfter(jobService, lastModifyTimestamp, jobSpecification);
    Set<String> jobDataGuids = jobSnapshot.getDataGuids();

    if (1 != jobDataGuids.size()) {
        throw new IllegalStateException("found [" + jobDataGuids.size()
                + "] job data guids related to the job [" + jobSnapshot.getGuid() + "] - was expecting 1");
    }

    String lastModifiedValue = DateTimeFormatter.RFC_1123_DATE_TIME
            .format(ZonedDateTime.ofInstant(lastModifyTimestamp.toInstant(), ZoneOffset.UTC));
    String destinationLocationUrl = UriComponentsBuilder.newInstance()
            .pathSegment(AuthenticationFilter.SEGMENT_SECURED).pathSegment(JobController.SEGMENT_JOBDATA)
            .pathSegment(jobDataGuids.iterator().next()).pathSegment(JobController.SEGMENT_DOWNLOAD)
            .toUriString();

    response.addHeader(HttpHeaders.LAST_MODIFIED, lastModifiedValue);
    response.sendRedirect(destinationLocationUrl);
}

From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraAssignmentsRepository.java

List<Date> getUserAssignmentPartitions(Date fromTime, Date toTime) {
    final LocalDateTime startTime = LocalDateTime.ofInstant(fromTime.toInstant(), ZoneId.systemDefault())
            .withMinute(0).withSecond(0).withNano(0);
    final LocalDateTime endTime = LocalDateTime.ofInstant(toTime.toInstant(), ZoneId.systemDefault())
            .withMinute(0).withSecond(0).withNano(0);
    final long hours = Duration.between(startTime, endTime).toHours();
    return LongStream.rangeClosed(0, hours).mapToObj(startTime::plusHours)
            .map(t -> Date.from(t.atZone(ZoneId.systemDefault()).toInstant())).collect(Collectors.toList());
}

From source file:org.apache.nifi.avro.AvroTypeUtil.java

@SuppressWarnings("unchecked")
private static Object convertToAvroObject(final Object rawValue, final Schema fieldSchema,
        final String fieldName, final Charset charset) {
    if (rawValue == null) {
        return null;
    }/*from w  w  w. j a va  2 s.co m*/

    switch (fieldSchema.getType()) {
    case INT: {
        final LogicalType logicalType = fieldSchema.getLogicalType();
        if (logicalType == null) {
            return DataTypeUtils.toInteger(rawValue, fieldName);
        }

        if (LOGICAL_TYPE_DATE.equals(logicalType.getName())) {
            final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat();
            final Date date = DataTypeUtils.toDate(rawValue, () -> DataTypeUtils.getDateFormat(format),
                    fieldName);
            final Duration duration = Duration.between(new Date(0L).toInstant(),
                    new Date(date.getTime()).toInstant());
            final long days = duration.toDays();
            return (int) days;
        } else if (LOGICAL_TYPE_TIME_MILLIS.equals(logicalType.getName())) {
            final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat();
            final Time time = DataTypeUtils.toTime(rawValue, () -> DataTypeUtils.getDateFormat(format),
                    fieldName);
            final Date date = new Date(time.getTime());
            final Duration duration = Duration.between(date.toInstant().truncatedTo(ChronoUnit.DAYS),
                    date.toInstant());
            final long millisSinceMidnight = duration.toMillis();
            return (int) millisSinceMidnight;
        }

        return DataTypeUtils.toInteger(rawValue, fieldName);
    }
    case LONG: {
        final LogicalType logicalType = fieldSchema.getLogicalType();
        if (logicalType == null) {
            return DataTypeUtils.toLong(rawValue, fieldName);
        }

        if (LOGICAL_TYPE_TIME_MICROS.equals(logicalType.getName())) {
            final long longValue = getLongFromTimestamp(rawValue, fieldSchema, fieldName);
            final Date date = new Date(longValue);
            final Duration duration = Duration.between(date.toInstant().truncatedTo(ChronoUnit.DAYS),
                    date.toInstant());
            return duration.toMillis() * 1000L;
        } else if (LOGICAL_TYPE_TIMESTAMP_MILLIS.equals(logicalType.getName())) {
            final String format = AvroTypeUtil.determineDataType(fieldSchema).getFormat();
            Timestamp t = DataTypeUtils.toTimestamp(rawValue, () -> DataTypeUtils.getDateFormat(format),
                    fieldName);
            return getLongFromTimestamp(rawValue, fieldSchema, fieldName);
        } else if (LOGICAL_TYPE_TIMESTAMP_MICROS.equals(logicalType.getName())) {
            return getLongFromTimestamp(rawValue, fieldSchema, fieldName) * 1000L;
        }

        return DataTypeUtils.toLong(rawValue, fieldName);
    }
    case BYTES:
    case FIXED:
        final LogicalType logicalType = fieldSchema.getLogicalType();
        if (logicalType != null && LOGICAL_TYPE_DECIMAL.equals(logicalType.getName())) {
            final LogicalTypes.Decimal decimalType = (LogicalTypes.Decimal) logicalType;
            final BigDecimal rawDecimal;
            if (rawValue instanceof BigDecimal) {
                rawDecimal = (BigDecimal) rawValue;

            } else if (rawValue instanceof Double) {
                rawDecimal = BigDecimal.valueOf((Double) rawValue);

            } else if (rawValue instanceof String) {
                rawDecimal = new BigDecimal((String) rawValue);

            } else if (rawValue instanceof Integer) {
                rawDecimal = new BigDecimal((Integer) rawValue);

            } else if (rawValue instanceof Long) {
                rawDecimal = new BigDecimal((Long) rawValue);

            } else {
                throw new IllegalTypeConversionException("Cannot convert value " + rawValue + " of type "
                        + rawValue.getClass() + " to a logical decimal");
            }
            // If the desired scale is different than this value's coerce scale.
            final int desiredScale = decimalType.getScale();
            final BigDecimal decimal = rawDecimal.scale() == desiredScale ? rawDecimal
                    : rawDecimal.setScale(desiredScale, BigDecimal.ROUND_HALF_UP);
            return new Conversions.DecimalConversion().toBytes(decimal, fieldSchema, logicalType);
        }
        if (rawValue instanceof byte[]) {
            return ByteBuffer.wrap((byte[]) rawValue);
        }
        if (rawValue instanceof String) {
            return ByteBuffer.wrap(((String) rawValue).getBytes(charset));
        }
        if (rawValue instanceof Object[]) {
            return AvroTypeUtil.convertByteArray((Object[]) rawValue);
        } else {
            throw new IllegalTypeConversionException("Cannot convert value " + rawValue + " of type "
                    + rawValue.getClass() + " to a ByteBuffer");
        }
    case MAP:
        if (rawValue instanceof Record) {
            final Record recordValue = (Record) rawValue;
            final Map<String, Object> map = new HashMap<>();
            for (final RecordField recordField : recordValue.getSchema().getFields()) {
                final Object v = recordValue.getValue(recordField);
                if (v != null) {
                    map.put(recordField.getFieldName(), v);
                }
            }

            return map;
        } else if (rawValue instanceof Map) {
            final Map<String, Object> objectMap = (Map<String, Object>) rawValue;
            final Map<String, Object> map = new HashMap<>(objectMap.size());
            for (final String s : objectMap.keySet()) {
                final Object converted = convertToAvroObject(objectMap.get(s), fieldSchema.getValueType(),
                        fieldName + "[" + s + "]", charset);
                map.put(s, converted);
            }
            return map;
        } else {
            throw new IllegalTypeConversionException(
                    "Cannot convert value " + rawValue + " of type " + rawValue.getClass() + " to a Map");
        }
    case RECORD:
        final GenericData.Record avroRecord = new GenericData.Record(fieldSchema);

        final Record record = (Record) rawValue;
        for (final RecordField recordField : record.getSchema().getFields()) {
            final Object recordFieldValue = record.getValue(recordField);
            final String recordFieldName = recordField.getFieldName();

            final Field field = fieldSchema.getField(recordFieldName);
            if (field == null) {
                continue;
            }

            final Object converted = convertToAvroObject(recordFieldValue, field.schema(),
                    fieldName + "/" + recordFieldName, charset);
            avroRecord.put(recordFieldName, converted);
        }
        return avroRecord;
    case UNION:
        return convertUnionFieldValue(rawValue, fieldSchema,
                schema -> convertToAvroObject(rawValue, schema, fieldName, charset), fieldName);
    case ARRAY:
        final Object[] objectArray = (Object[]) rawValue;
        final List<Object> list = new ArrayList<>(objectArray.length);
        int i = 0;
        for (final Object o : objectArray) {
            final Object converted = convertToAvroObject(o, fieldSchema.getElementType(),
                    fieldName + "[" + i + "]", charset);
            list.add(converted);
            i++;
        }
        return list;
    case BOOLEAN:
        return DataTypeUtils.toBoolean(rawValue, fieldName);
    case DOUBLE:
        return DataTypeUtils.toDouble(rawValue, fieldName);
    case FLOAT:
        return DataTypeUtils.toFloat(rawValue, fieldName);
    case NULL:
        return null;
    case ENUM:
        return new GenericData.EnumSymbol(fieldSchema, rawValue);
    case STRING:
        return DataTypeUtils.toString(rawValue, (String) null, charset);
    }

    return rawValue;
}

From source file:eu.clarin.cmdi.vlo.importer.MetadataImporter.java

/**
 * Update "days since last import" field for all Solr records of dataRoot.
 * Notice that it will not touch records that have a "last seen" value newer
 * than today. Therefore this should be called <em>after</em> normal 
 * processing of data root!//from   w w  w . j a  v a 2 s.  co m
 *
 * @param dataRoot
 * @throws SolrServerException
 * @throws IOException
 */
private void updateDaysSinceLastImport(DataRoot dataRoot) throws SolrServerException, IOException {
    LOG.info("Updating \"days since last import\" in Solr for: {}", dataRoot.getOriginName());

    SolrQuery query = new SolrQuery();
    query.setQuery(
            //we're going to process all records in the current data root...
            FacetConstants.FIELD_DATA_PROVIDER + ":" + ClientUtils.escapeQueryChars(dataRoot.getOriginName())
                    + " AND "
                    // ...that have a "last seen" value _older_ than today (on update/initialisation all records get 0 so we can skip the rest)
                    + FacetConstants.FIELD_LAST_SEEN + ":[* TO NOW-1DAY]");
    query.setFields(FacetConstants.FIELD_ID, FacetConstants.FIELD_LAST_SEEN);
    int fetchSize = 1000;
    query.setRows(fetchSize);
    QueryResponse rsp = solrServer.query(query);

    final long totalResults = rsp.getResults().getNumFound();
    final LocalDate nowDate = LocalDate.now();

    final int docsListSize = config.getMaxDocsInList();
    List<SolrInputDocument> updateDocs = new ArrayList<>(docsListSize);

    Boolean updatedDocs = false;
    int offset = 0;

    while (offset < totalResults) {
        query.setStart(offset);
        query.setRows(fetchSize);

        for (SolrDocument doc : solrServer.query(query).getResults()) {
            updatedDocs = true;

            String recordId = (String) doc.getFieldValue(FacetConstants.FIELD_ID);
            Date lastImportDate = (Date) doc.getFieldValue(FacetConstants.FIELD_LAST_SEEN);
            LocalDate oldDate = lastImportDate.toInstant().atZone(ZoneId.systemDefault()).toLocalDate();
            long daysSinceLastSeen = DAYS.between(oldDate, nowDate);

            SolrInputDocument updateDoc = new SolrInputDocument();
            updateDoc.setField(FacetConstants.FIELD_ID, recordId);

            Map<String, Long> partialUpdateMap = new HashMap<>();
            partialUpdateMap.put("set", daysSinceLastSeen);
            updateDoc.setField(FacetConstants.FIELD_DAYS_SINCE_LAST_SEEN, partialUpdateMap);

            updateDocs.add(updateDoc);

            if (updateDocs.size() == docsListSize) {
                solrServer.add(updateDocs);
                if (serverError != null) {
                    throw new SolrServerException(serverError);
                }
                updateDocs = new ArrayList<>(docsListSize);
            }
        }
        offset += fetchSize;
        LOG.info("Updating \"days since last import\": {} out of {} records updated", offset, totalResults);
    }

    if (!updateDocs.isEmpty()) {
        solrServer.add(updateDocs);
        if (serverError != null) {
            throw new SolrServerException(serverError);
        }
    }

    if (updatedDocs) {
        solrServer.commit();
    }

    LOG.info("Updating \"days since last import\" done.");
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

protected void setRecordRate(MetricsType metrics, Map<String, TemporalCoverageHolder> timeMap) {
    TemporalCoverageHolder tc = timeMap.containsKey("modified")
            ? (TemporalCoverageHolder) timeMap.get("modified")
            : (timeMap.containsKey("effective") ? (TemporalCoverageHolder) timeMap.get("effective")
                    : (TemporalCoverageHolder) timeMap.get("created"));
    try {/* w  w  w  .  j av  a  2  s . co  m*/
        if (tc != null) {
            Date startDate = tc.getStartDate();
            if (startDate != null) {
                long totalHits = metrics.getCount();
                LocalDateTime start = LocalDateTime.ofInstant(startDate.toInstant(), ZoneId.systemDefault());
                Duration duration = Duration.between(start, LocalDateTime.now());
                RecordRateType rate = new RecordRateType();
                metrics.setRecordRate(rate);
                long dur = totalHits / duration.toHours();
                if (dur < 15L) {
                    dur = totalHits / duration.toDays();
                    if (dur < 4L) {
                        dur = totalHits * 30L / duration.toDays();
                        if (dur < 10L) {
                            dur = totalHits * 365L / duration.toDays();
                            rate.setFrequency("Yearly");
                        } else {
                            rate.setFrequency("Monthly");
                        }
                    } else {
                        rate.setFrequency("Daily");
                    }
                } else if (totalHits > 1000L) {
                    dur = duration.toMinutes();
                    if (totalHits > 1000L) {
                        dur = duration.toMillis() / 1000L;
                        rate.setFrequency("Second");
                    } else {
                        rate.setFrequency("Minute");
                    }
                } else {
                    rate.setFrequency("Hourly");
                }

                rate.setValue((int) dur);
            }
        }
    } catch (Exception e) {
        LOGGER.warn("Could not set record rate: {}", e.getMessage(), e);
    }
}

From source file:org.sakaiproject.portal.service.BullhornServiceImpl.java

private void doSocialInsert(String from, String to, String event, String ref, Date eventDate, String url) {

    TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);

    transactionTemplate.execute(new TransactionCallbackWithoutResult() {

        protected void doInTransactionWithoutResult(TransactionStatus status) {

            BullhornAlert ba = new BullhornAlert();
            ba.setAlertType(SOCIAL);/*from w w w.j  a va 2s.  c  o m*/
            ba.setFromUser(from);
            ba.setToUser(to);
            ba.setEvent(event);
            ba.setRef(ref);
            ba.setTitle("");
            ba.setSiteId("");
            ba.setEventDate(eventDate.toInstant());
            ba.setUrl(url);
            ba.setDeferred(false);

            sessionFactory.getCurrentSession().persist(ba);
        }
    });
}

From source file:org.sakaiproject.portal.service.BullhornServiceImpl.java

private void doAcademicInsert(String from, String to, String event, String ref, String title, String siteId,
        Date eventDate, String url) {

    TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);

    transactionTemplate.execute(new TransactionCallbackWithoutResult() {

        protected void doInTransactionWithoutResult(TransactionStatus status) {

            BullhornAlert ba = new BullhornAlert();
            ba.setAlertType(ACADEMIC);/*from  w w w .  j  a v  a2 s.c  o m*/
            ba.setFromUser(from);
            ba.setToUser(to);
            ba.setEvent(event);
            ba.setRef(ref);
            ba.setTitle(title);
            ba.setSiteId(siteId);
            ba.setEventDate(eventDate.toInstant());
            ba.setUrl(url);
            try {
                ba.setDeferred(!siteService.getSite(siteId).isPublished());
            } catch (IdUnusedException iue) {
                log.warn("Failed to find site with id {} while setting deferred to published", siteId);
            }

            sessionFactory.getCurrentSession().persist(ba);
        }
    });
}

From source file:com.example.app.support.service.AppUtil.java

/**
 * Convert the given Date to a ZonedDateTime at the given TimeZone
 *
 * @param date the Date//from  ww  w .  ja va2s  .co  m
 * @param zone the TimeZone to convert to
 *
 * @return a ZonedDateTime that represents the same instant as the Date, at the TimeZone specified.
 */
@Nullable
public ZonedDateTime toZonedDateTime(@Nullable Date date, @Nullable TimeZone zone) {
    if (date == null || zone == null)
        return null;
    return ZonedDateTime.ofInstant(date.toInstant(), zone.toZoneId());
}