Example usage for org.joda.time DateTime plusDays

List of usage examples for org.joda.time DateTime plusDays

Introduction

In this page you can find the example usage for org.joda.time DateTime plusDays.

Prototype

public DateTime plusDays(int days) 

Source Link

Document

Returns a copy of this datetime plus the specified number of days.

Usage

From source file:loci.formats.in.BaseZeissReader.java

License:Open Source License

/**
 * Parse timestamp from string.  Note this may be ZVI-specific
 * due to the use of locale-specific date formats in the TIFF XML.
 * @param s//  ww  w  .j a  va2  s.  co  m
 * @return a timestamp
 */
private long parseTimestamp(String s) {
    try {
        double dstamp = Double.parseDouble(s); // Time in days since the ZVI epoch
        DateTime epoch = new DateTime(1900, 1, 1, 0, 0, DateTimeZone.UTC); // 1900-01-01 00:00:00

        long millisInDay = 24L * 60L * 60L * 1000L;
        int days = (int) Math.floor(dstamp);
        int millis = (int) ((dstamp - days) * millisInDay + 0.5);

        days -= 1; // We start on day 1
        if (days > 60) { // Date prior to 1900-03-01
            days -= 1; // 1900-02-29 is considered a valid date by Excel; correct for this.
        }

        DateTime dtstamp = epoch.plusDays(days);
        dtstamp = dtstamp.plusMillis(millis);

        return dtstamp.getMillis();
    } catch (NumberFormatException e) {
        // Not a Double; one file (BY4741NQ2.zvi) contains a string in
        // place of the float64 defined in the spec, so try parsing
        // using US date format.  May be a historical AxioVision bug.
        String us_format = "MM/dd/yyyy hh:mm:ss aa";
        return DateTools.getTime(s, us_format, null);
    }
}

From source file:main.RandomHelper.java

License:Apache License

public static DateTime GenerateDate(DateTime startDate, DateTime endDate) {

    int difference = Days.daysBetween(endDate, startDate).getDays();

    int newDays = GenerateIntegerFromRange(0, difference);

    return startDate.plusDays(newDays);
}

From source file:mbsystem.controller.BookDetailsController.java

/**
 * Borrow the current book//from ww w .jav a 2s  .  c om
 */
public void borrowBook() {
    // Find current book
    currentBook = findCurrentBook();
    // Find current patron
    currentPatron = findCurrentPatron();
    // If the current patron is able to borrow the current book
    if (currentBook != null && currentPatron != null) {
        // Calculate current date and due date    
        DateTimeFormatter dtf = DateTimeFormat.forPattern("dd/MM/yyyy hh:mm:ss");
        DateTime now = new DateTime();
        DateTime added = now.plusDays(14);
        String currentDate = now.toString(dtf);
        String dueDate = added.toString(dtf);
        // Setup a new loan
        Loan newLoan = new Loan();
        newLoan.setPatron(currentPatron);
        newLoan.setBook(currentBook);
        newLoan.setLoanDate(currentDate);
        newLoan.setDueDate(dueDate);
        // Write to database
        try {
            lb.create(newLoan);
            currentBook.setAvailable(false);
            currentBook.getLoanList().add(newLoan);
            currentPatron.getLoanList().add(newLoan);
            bb.update(currentBook);
            ub.update(currentPatron);
            JsfUtil.addSuccessMessage("Loan Created");
        } catch (Exception e) {
            JsfUtil.addErrorMessage(e, "Persistence Error Occured");
        }
    }
}

From source file:net.chrissearle.flickrvote.web.admin.ChallengeAction.java

License:Apache License

@Override
public String execute() throws Exception {
    DateTime start = new DateTime(challenge.getStartDate());

    Date startDate = start.plusHours(FlickrVoteWebConstants.START_CHALLENGE_TIME).toDate();
    Date voteDate = start.plusDays(7).plusHours(FlickrVoteWebConstants.START_VOTE_TIME).toDate();
    Date endDate = start.plusDays(9).plusHours(FlickrVoteWebConstants.END_CHALLENGE_TIME).toDate();

    challengeService.saveChallenge(challenge.getTag(), challenge.getTitle(), challenge.getNotes(), startDate,
            voteDate, endDate);//  w w w. j av  a2s  .co m

    return SUCCESS;
}

From source file:net.ftlines.wicket.fullcalendar.callback.AbstractShiftedEventParam.java

License:Apache License

private DateTime shift(DateTime start) {
    return start.plusDays(daysDelta).plusMinutes(minutesDelta);
}

From source file:net.link.util.test.pkix.PkiTestUtils.java

License:Open Source License

public static X509Certificate generateTestSelfSignedCert(@Nullable URI ocspUri) throws NoSuchAlgorithmException,
        InvalidAlgorithmParameterException, IOException, OperatorCreationException, CertificateException {

    KeyPair keyPair = generateKeyPair();
    DateTime now = new DateTime();
    DateTime notBefore = now.minusDays(1);
    DateTime notAfter = now.plusDays(1);
    return generateCertificate(keyPair.getPublic(), "CN=Test", keyPair.getPrivate(), null, notBefore, notAfter,
            null, true, true, false, ocspUri);
}

From source file:net.longfalcon.newsj.Releases.java

License:Open Source License

public void processReleases() {
    String startDateString = DateUtil.displayDateFormatter.print(System.currentTimeMillis());
    _log.info(String.format("Starting release update process (%s)", startDateString));

    // get site config TODO: use config service
    Site site = siteDAO.getDefaultSite();

    int retcount = 0;

    Directory nzbBaseDir = fileSystemService.getDirectory("/nzbs");

    checkRegexesUptoDate(site.getLatestRegexUrl(), site.getLatestRegexRevision());

    // Stage 0/*from   w  w w.j a  v a  2s. c  o  m*/

    // this is a hack - tx is not working ATM
    TransactionStatus transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));

    //
    // Get all regexes for all groups which are to be applied to new binaries
    // in order of how they should be applied
    //
    List<ReleaseRegex> releaseRegexList = releaseRegexDAO.getRegexes(true, "-1", false);
    for (ReleaseRegex releaseRegex : releaseRegexList) {

        String releaseRegexGroupName = releaseRegex.getGroupName();
        _log.info(String.format("Applying regex %d for group %s", releaseRegex.getId(),
                ValidatorUtil.isNull(releaseRegexGroupName) ? "all" : releaseRegexGroupName));

        // compile the regex early, to test them
        String regex = releaseRegex.getRegex();
        Pattern pattern = Pattern.compile(fixRegex(regex), Pattern.CASE_INSENSITIVE); // remove '/' and '/i'

        HashSet<Long> groupMatch = new LinkedHashSet<>();

        //
        // Groups ending in * need to be like matched when getting out binaries for groups and children
        //
        Matcher matcher = _wildcardPattern.matcher(releaseRegexGroupName);
        if (matcher.matches()) {
            releaseRegexGroupName = releaseRegexGroupName.substring(0, releaseRegexGroupName.length() - 1);
            List<Group> groups = groupDAO.findGroupsByName(releaseRegexGroupName);
            for (Group group : groups) {
                groupMatch.add(group.getId());
            }
        } else if (!ValidatorUtil.isNull(releaseRegexGroupName)) {
            Group group = groupDAO.getGroupByName(releaseRegexGroupName);
            if (group != null) {
                groupMatch.add(group.getId());
            }
        }

        List<Binary> binaries = new ArrayList<>();
        if (groupMatch.size() > 0) {
            // Get out all binaries of STAGE0 for current group
            binaries = binaryDAO.findByGroupIdsAndProcStat(groupMatch, Defaults.PROCSTAT_NEW);
        }

        Map<String, String> arrNoPartBinaries = new LinkedHashMap<>();
        DateTime fiveHoursAgo = DateTime.now().minusHours(5);

        // this for loop should probably be a single transaction
        for (Binary binary : binaries) {
            String testMessage = "Test run - Binary Name " + binary.getName();

            Matcher groupRegexMatcher = pattern.matcher(binary.getName());
            if (groupRegexMatcher.find()) {
                String reqIdGroup = null;
                try {
                    reqIdGroup = groupRegexMatcher.group("reqid");
                } catch (IllegalArgumentException e) {
                    _log.debug(e.toString());
                }
                String partsGroup = null;
                try {
                    partsGroup = groupRegexMatcher.group("parts");
                } catch (IllegalArgumentException e) {
                    _log.debug(e.toString());
                }
                String nameGroup = null;
                try {
                    nameGroup = groupRegexMatcher.group("name");
                } catch (Exception e) {
                    _log.debug(e.toString());
                }
                _log.debug(testMessage + " matches with: \n reqId = " + reqIdGroup + " parts = " + partsGroup
                        + " and name = " + nameGroup);

                if ((ValidatorUtil.isNotNull(reqIdGroup) && ValidatorUtil.isNumeric(reqIdGroup))
                        && ValidatorUtil.isNull(nameGroup)) {
                    nameGroup = reqIdGroup;
                }

                if (ValidatorUtil.isNull(nameGroup)) {
                    _log.warn(String.format(
                            "regex applied which didnt return right number of capture groups - %s", regex));
                    _log.warn(String.format("regex matched: reqId = %s parts = %s and name = %s", reqIdGroup,
                            partsGroup, nameGroup));
                    continue;
                }

                // If theres no number of files data in the subject, put it into a release if it was posted to usenet longer than five hours ago.
                if ((ValidatorUtil.isNull(partsGroup) && fiveHoursAgo.isAfter(binary.getDate().getTime()))) {
                    //
                    // Take a copy of the name of this no-part release found. This can be used
                    // next time round the loop to find parts of this set, but which have not yet reached 3 hours.
                    //
                    arrNoPartBinaries.put(nameGroup, "1");
                    partsGroup = "01/01";
                }

                if (ValidatorUtil.isNotNull(nameGroup) && ValidatorUtil.isNotNull(partsGroup)) {

                    if (partsGroup.indexOf('/') == -1) {
                        partsGroup = partsGroup.replaceFirst("(-)|(~)|(\\sof\\s)", "/"); // replace weird parts delimiters
                    }

                    Integer regexCategoryId = releaseRegex.getCategoryId();
                    Integer reqId = null;
                    if (ValidatorUtil.isNotNull(reqIdGroup) && ValidatorUtil.isNumeric(reqIdGroup)) {
                        reqId = Integer.parseInt(reqIdGroup);
                    }

                    //check if post is repost
                    Pattern repostPattern = Pattern.compile("(repost\\d?|re\\-?up)", Pattern.CASE_INSENSITIVE);
                    Matcher binaryNameRepostMatcher = repostPattern.matcher(binary.getName());

                    if (binaryNameRepostMatcher.find()
                            && !nameGroup.toLowerCase().matches("^[\\s\\S]+(repost\\d?|re\\-?up)")) {
                        nameGroup = nameGroup + (" " + binaryNameRepostMatcher.group(1));
                    }

                    String partsStrings[] = partsGroup.split("/");
                    int relpart = Integer.parseInt(partsStrings[0]);
                    int relTotalPart = Integer.parseInt(partsStrings[1]);

                    binary.setRelName(nameGroup.replace("_", " "));
                    binary.setRelPart(relpart);
                    binary.setRelTotalPart(relTotalPart);
                    binary.setProcStat(Defaults.PROCSTAT_TITLEMATCHED);
                    binary.setCategoryId(regexCategoryId);
                    binary.setRegexId(releaseRegex.getId());
                    binary.setReqId(reqId);
                    binaryDAO.updateBinary(binary);

                }
            }
        }

    }

    transactionManager.commit(transaction);

    // this is a hack - tx is not working ATM
    transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));
    //
    // Move all binaries from releases which have the correct number of files on to the next stage.
    //
    _log.info("Stage 2");
    List<MatchedReleaseQuery> matchedReleaseQueries = binaryDAO
            .findBinariesByProcStatAndTotalParts(Defaults.PROCSTAT_TITLEMATCHED);
    matchedReleaseQueries = combineMatchedQueries(matchedReleaseQueries);

    int siteMinFilestoFormRelease = site.getMinFilesToFormRelease();

    for (MatchedReleaseQuery matchedReleaseQuery : matchedReleaseQueries) {
        retcount++;

        //
        // Less than the site permitted number of files in a release. Dont discard it, as it may
        // be part of a set being uploaded.
        //
        int minFiles = siteMinFilestoFormRelease;
        String releaseName = matchedReleaseQuery.getReleaseName();
        long matchedReleaseQueryGroup = matchedReleaseQuery.getGroup();
        Long matchedReleaseQueryNumberOfBinaries = matchedReleaseQuery.getNumberOfBinaries();
        int matchecReleaseTotalParts = matchedReleaseQuery.getReleaseTotalParts();
        String fromName = matchedReleaseQuery.getFromName();
        Integer reqId = matchedReleaseQuery.getReqId();

        Group group = groupDAO.findGroupByGroupId(matchedReleaseQueryGroup);
        if (group != null && group.getMinFilesToFormRelease() != null) {
            minFiles = group.getMinFilesToFormRelease();
        }

        if (matchedReleaseQueryNumberOfBinaries < minFiles) {

            _log.warn(String.format("Number of files in release %s less than site/group setting (%s/%s)",
                    releaseName, matchedReleaseQueryNumberOfBinaries, minFiles));

            binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED,
                    matchedReleaseQueryGroup, fromName);
        } else if (matchedReleaseQueryNumberOfBinaries >= matchecReleaseTotalParts) {
            // Check that the binary is complete
            List<Binary> releaseBinaryList = binaryDAO.findBinariesByReleaseNameProcStatGroupIdFromName(
                    releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);

            boolean incomplete = false;
            for (Binary binary : releaseBinaryList) {
                long partsCount = partDAO.countPartsByBinaryId(binary.getId());
                if (partsCount < binary.getTotalParts()) {
                    float percentComplete = ((float) partsCount / (float) binary.getTotalParts()) * 100;
                    _log.warn(String.format("binary %s from %s has missing parts = %s/%s (%s%% complete)",
                            binary.getId(), releaseName, partsCount, binary.getTotalParts(), percentComplete));

                    // Allow to binary to release if posted to usenet longer than four hours ago and we still don't have all the parts
                    DateTime fourHoursAgo = DateTime.now().minusHours(4);
                    if (fourHoursAgo.isAfter(new DateTime(binary.getDate()))) {
                        _log.info("allowing incomplete binary " + binary.getId());
                    } else {
                        incomplete = true;
                    }
                }
            }

            if (incomplete) {
                _log.warn(String.format("Incorrect number of parts %s-%s-%s", releaseName,
                        matchedReleaseQueryNumberOfBinaries, matchecReleaseTotalParts));
                binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED,
                        matchedReleaseQueryGroup, fromName);
            }

            //
            // Right number of files, but see if the binary is a allfilled/reqid post, in which case it needs its name looked up
            // TODO: Does this even work anymore?
            else if (ValidatorUtil.isNotNull(site.getReqIdUrl()) && ValidatorUtil.isNotNull(reqId)) {

                //
                // Try and get the name using the group
                //
                _log.info("Looking up " + reqId + " in " + group.getName() + "...");
                String newTitle = getReleaseNameForReqId(site.getReqIdUrl(), group, reqId, true);

                //
                // if the feed/group wasnt supported by the scraper, then just use the release name as the title.
                //
                if (ValidatorUtil.isNull(newTitle) || newTitle.equals("no feed")) {
                    newTitle = releaseName;
                    _log.warn("Group not supported");
                }

                //
                // Valid release with right number of files and title now, so move it on
                //
                if (ValidatorUtil.isNotNull(newTitle)) {
                    binaryDAO.updateBinaryNameAndStatus(newTitle, Defaults.PROCSTAT_READYTORELEASE, releaseName,
                            Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);
                } else {
                    //
                    // Item not found, if the binary was added to the index yages ago, then give up.
                    //
                    Timestamp timestamp = binaryDAO.findMaxDateAddedBinaryByReleaseNameProcStatGroupIdFromName(
                            releaseName, Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);
                    DateTime maxAddedDate = new DateTime(timestamp);
                    DateTime twoDaysAgo = DateTime.now().minusDays(2);

                    if (maxAddedDate.isBefore(twoDaysAgo)) {
                        binaryDAO.updateBinaryNameAndStatus(releaseName,
                                Defaults.PROCSTAT_NOREQIDNAMELOOKUPFOUND, releaseName,
                                Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);
                        _log.warn("Not found in 48 hours");
                    }
                }
            } else {
                binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_READYTORELEASE, releaseName,
                        Defaults.PROCSTAT_TITLEMATCHED, matchedReleaseQueryGroup, fromName);

            }
        } else {
            //
            // Theres less than the expected number of files, so update the attempts and move on.
            //

            _log.info(String.format("Incorrect number of files for %s (%d/%d)", releaseName,
                    matchedReleaseQueryNumberOfBinaries, matchecReleaseTotalParts));
            binaryDAO.updateBinaryIncrementProcAttempts(releaseName, Defaults.PROCSTAT_TITLEMATCHED,
                    matchedReleaseQueryGroup, fromName);
        }

        if (retcount % 10 == 0) {
            _log.info(String.format("-processed %d binaries stage two", retcount));
        }

    }
    transactionManager.commit(transaction);

    retcount = 0;
    int nfoCount = 0;

    // this is a hack - tx is not working ATM
    transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));
    //
    // Get out all distinct relname, group from binaries of STAGE2
    //
    _log.info("Stage 3");
    List<MatchedReleaseQuery> readyReleaseQueries = binaryDAO
            .findBinariesByProcStatAndTotalParts(Defaults.PROCSTAT_READYTORELEASE);
    readyReleaseQueries = combineMatchedQueries(readyReleaseQueries);
    for (MatchedReleaseQuery readyReleaseQuery : readyReleaseQueries) {
        retcount++;

        String releaseName = readyReleaseQuery.getReleaseName();
        int numParts = readyReleaseQuery.getReleaseTotalParts();
        long binaryCount = readyReleaseQuery.getNumberOfBinaries();
        long groupId = readyReleaseQuery.getGroup();
        //
        // Get the last post date and the poster name from the binary
        //
        String fromName = readyReleaseQuery.getFromName();
        Timestamp timestamp = binaryDAO.findMaxDateAddedBinaryByReleaseNameProcStatGroupIdFromName(releaseName,
                Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);
        DateTime addedDate = new DateTime(timestamp);

        //
        // Get all releases with the same name with a usenet posted date in a +1-1 date range.
        //
        Date oneDayBefore = addedDate.minusDays(1).toDate();
        Date oneDayAfter = addedDate.plusDays(1).toDate();
        List<Release> relDupes = releaseDAO.findReleasesByNameAndDateRange(releaseName, oneDayBefore,
                oneDayAfter);

        if (!relDupes.isEmpty()) {
            binaryDAO.updateBinaryNameAndStatus(releaseName, Defaults.PROCSTAT_DUPLICATE, releaseName,
                    Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);
            continue;
        }

        //
        // Get total size of this release
        // Done in a big OR statement, not an IN as the mysql binaryID index on parts table
        // was not being used.
        //

        // SM: TODO this should be revisited, using hb mappings

        long totalSize = 0;
        int regexAppliedCategoryId = 0;
        long regexIdUsed = 0;
        int reqIdUsed = 0;
        int relTotalParts = 0;
        float relCompletion;
        List<Binary> binariesForSize = binaryDAO.findBinariesByReleaseNameProcStatGroupIdFromName(releaseName,
                Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);

        long relParts = 0;
        for (Binary binary : binariesForSize) {
            if (ValidatorUtil.isNotNull(binary.getCategoryId()) && regexAppliedCategoryId == 0) {
                regexAppliedCategoryId = binary.getCategoryId();
            }

            if (ValidatorUtil.isNotNull(binary.getRegexId()) && regexIdUsed == 0) {
                regexIdUsed = binary.getRegexId();
            }

            if (ValidatorUtil.isNotNull(binary.getReqId()) && reqIdUsed == 0) {
                reqIdUsed = binary.getReqId();
            }

            relTotalParts += binary.getTotalParts();
            relParts += partDAO.countPartsByBinaryId(binary.getId());
            totalSize += partDAO.sumPartsSizeByBinaryId(binary.getId());
        }
        relCompletion = ((float) relParts / (float) relTotalParts) * 100f;

        //
        // Insert the release
        //

        String releaseGuid = UUID.randomUUID().toString();
        int categoryId;
        Category category = null;
        Long regexId;
        Integer reqId;
        if (regexAppliedCategoryId == 0) {
            categoryId = categoryService.determineCategory(groupId, releaseName);
        } else {
            categoryId = regexAppliedCategoryId;
        }
        if (categoryId > 0) {
            category = categoryService.getCategory(categoryId);
        }

        if (regexIdUsed == 0) {
            regexId = null;
        } else {
            regexId = regexIdUsed;
        }

        if (reqIdUsed == 0) {
            reqId = null;
        } else {
            reqId = reqIdUsed;
        }

        //Clean release name of '#', '@', '$', '%', '^', '', '', '', ''
        String cleanReleaseName = releaseName.replaceAll("[^A-Za-z0-9-_\\ \\.]+", "");
        Release release = new Release();
        release.setName(cleanReleaseName);
        release.setSearchName(cleanReleaseName);
        release.setTotalpart(numParts);
        release.setGroupId(groupId);
        release.setAddDate(new Date());
        release.setGuid(releaseGuid);
        release.setCategory(category);
        release.setRegexId(regexId);
        release.setRageId((long) -1);
        release.setPostDate(addedDate.toDate());
        release.setFromName(fromName);
        release.setSize(totalSize);
        release.setReqId(reqId);
        release.setPasswordStatus(site.getCheckPasswordedRar() == 1 ? -1 : 0); // magic constants
        release.setCompletion(relCompletion);
        releaseDAO.updateRelease(release);
        long releaseId = release.getId();
        _log.info("Added release " + cleanReleaseName);

        //
        // Tag every binary for this release with its parent release id
        // remove the release name from the binary as its no longer required
        //
        binaryDAO.updateBinaryNameStatusReleaseID("", Defaults.PROCSTAT_RELEASED, releaseId, releaseName,
                Defaults.PROCSTAT_READYTORELEASE, groupId, fromName);

        //
        // Find an .nfo in the release
        //
        ReleaseNfo releaseNfo = nfo.determineReleaseNfo(release);
        if (releaseNfo != null) {
            nfo.addReleaseNfo(releaseNfo);
            nfoCount++;
        }

        //
        // Write the nzb to disk
        //
        nzb.writeNZBforReleaseId(release, nzbBaseDir, true);

        if (retcount % 5 == 0) {
            _log.info("-processed " + retcount + " releases stage three");
        }

    }

    _log.info("Found " + nfoCount + " nfos in " + retcount + " releases");

    //
    // Process nfo files
    //
    if (site.getLookupNfo() != 1) {
        _log.info("Site config (site.lookupnfo) prevented retrieving nfos");
    } else {
        nfo.processNfoFiles(site.getLookupImdb(), site.getLookupTvRage());
    }

    //
    // Lookup imdb if enabled
    //
    if (site.getLookupImdb() == 1) {
        movieService.processMovieReleases();
    }

    //
    // Lookup music if enabled
    //
    if (site.getLookupMusic() == 1) {
        musicService.processMusicReleases();
    }

    //
    // Lookup games if enabled
    //
    if (site.getLookupGames() == 1) {
        gameService.processConsoleReleases();
    }

    //
    // Check for passworded releases
    //
    if (site.getCheckPasswordedRar() != 1) {
        _log.info("Site config (site.checkpasswordedrar) prevented checking releases are passworded");
    } else {
        processPasswordedReleases(true);
    }

    //
    // Process all TV related releases which will assign their series/episode/rage data
    //
    tvRageService.processTvReleases(site.getLookupTvRage() == 1);

    //
    // Get the current datetime again, as using now() in the housekeeping queries prevents the index being used.
    //
    DateTime now = new DateTime();

    //
    // Tidy away any binaries which have been attempted to be grouped into
    // a release more than x times (SM: or is it days?)
    //
    int attemtpGroupBinDays = site.getAttemtpGroupBinDays();
    _log.info(String.format("Tidying away binaries which cant be grouped after %s days", attemtpGroupBinDays));

    DateTime maxGroupBinDays = now.minusDays(attemtpGroupBinDays);
    binaryDAO.updateProcStatByProcStatAndDate(Defaults.PROCSTAT_WRONGPARTS, Defaults.PROCSTAT_NEW,
            maxGroupBinDays.toDate());

    //
    // Delete any parts and binaries which are older than the site's retention days
    //
    int maxRetentionDays = site.getRawRetentionDays();
    DateTime maxRetentionDate = now.minusDays(maxRetentionDays);
    _log.info(String.format("Deleting parts which are older than %d days", maxRetentionDays));
    partDAO.deletePartByDate(maxRetentionDate.toDate());

    _log.info(String.format("Deleting binaries which are older than %d days", maxRetentionDays));
    binaryDAO.deleteBinaryByDate(maxRetentionDate.toDate());

    //
    // Delete any releases which are older than site's release retention days
    //
    int releaseretentiondays = site.getReleaseRetentionDays();
    if (releaseretentiondays != 0) {
        _log.info("Determining any releases past retention to be deleted.");

        DateTime maxReleaseRetentionDate = DateTime.now().minusDays(releaseretentiondays);
        List<Release> releasesToDelete = releaseDAO.findReleasesBeforeDate(maxReleaseRetentionDate.toDate());
        for (Iterator<Release> iterator = releasesToDelete.iterator(); iterator.hasNext();) {
            Release release = iterator.next();
            releaseDAO.deleteRelease(release);
        }
    }
    transaction.flush(); // may be unneeded
    transactionManager.commit(transaction);

    _log.info(String.format("Processed %d releases", retcount));
    if (!transaction.isCompleted()) {
        throw new IllegalStateException("Transaction is not completed or rolled back.");
    }
    //return retcount;
}

From source file:net.lshift.diffa.adapter.scanning.TimeRangeConstraint.java

License:Apache License

private static DateTime maybeParse(String dateStr, boolean isEnd) {
    if (dateStr == null) {
        return null;
    } else {/* w w  w. j a  v  a2 s.c  o  m*/
        try {
            // Attempt to parse a yyyy-MM-dd format and widen
            DateTime date = dateParser.parseDateTime(dateStr);
            if (isEnd)
                return date.plusDays(1).minusMillis(1);
            else
                return date;
        } catch (IllegalArgumentException e) {
            // The format is not yyyy-MM-dd, so don't widen
            return dateTimeParser.parseDateTime(dateStr);
        }
    }
}

From source file:net.ripe.rpki.commons.crypto.cms.manifest.ManifestCms.java

License:BSD License

private void checkManifestValidityTimes(ValidationOptions options, ValidationResult result) {
    DateTime now = new DateTime();
    DateTime nextUpdateTime = getNextUpdateTime();

    if (now.isAfter(nextUpdateTime)) {
        if (nextUpdateTime.plusDays(options.getMaxStaleDays()).isAfter(now)) {
            result.warnIfTrue(true, ValidationString.MANIFEST_PAST_NEXT_UPDATE_TIME);
        } else {/*from   ww w .  j av  a 2  s.c  o m*/
            result.rejectIfTrue(true, ValidationString.MANIFEST_PAST_NEXT_UPDATE_TIME);
        }
    }
}

From source file:net.ripe.rpki.commons.crypto.crl.X509CrlValidator.java

License:BSD License

private void checkNextUpdate(X509Crl crl) {
    DateTime now = new DateTime();
    DateTime nextUpdateTime = crl.getNextUpdateTime();

    if (now.isAfter(nextUpdateTime)) {
        if (nextUpdateTime.plusDays(options.getMaxStaleDays()).isAfter(now)) {
            result.warnIfTrue(true, ValidationString.CRL_NEXT_UPDATE_BEFORE_NOW, nextUpdateTime.toString());
        } else {//w w  w.  ja va 2 s  . c  om
            result.rejectIfTrue(true, ValidationString.CRL_NEXT_UPDATE_BEFORE_NOW, nextUpdateTime.toString());
        }
    }

}