Example usage for org.hibernate.criterion Restrictions lt

List of usage examples for org.hibernate.criterion Restrictions lt

Introduction

In this page you can find the example usage for org.hibernate.criterion Restrictions lt.

Prototype

public static SimpleExpression lt(String propertyName, Object value) 

Source Link

Document

Apply a "less than" constraint to the named property

Usage

From source file:com.denimgroup.threadfix.data.dao.hibernate.VulnerabilitySearchCriteriaConstructor.java

License:Mozilla Public License

private void addAgeRestrictions() {
    // Limit scanner if present
    if (parameters.getDaysOldModifier() != null
            && (parameters.getDaysOldModifier().equals("Less")
                    || parameters.getDaysOldModifier().equals("More"))
            && parameters.getDaysOld() != null && parameters.getDaysOld() > 0) {

        Criteria subCriteria = session.createCriteria(Finding.class);

        Calendar targetDate = Calendar.getInstance();
        targetDate.add(Calendar.DAY_OF_YEAR, -parameters.getDaysOld());

        subCriteria.createAlias("scan", "scanAlias");
        if (parameters.getDaysOldModifier().equals("More")) {
            //subCriteria.add(Restrictions.lt("scanAlias.openTime", targetDate));
            criteria.add(Restrictions.lt("openTime", targetDate));
            LOG.debug("Set age restriction to after " + parameters.getDaysOld() + " days ago.");
        } else if (parameters.getDaysOldModifier().equals("Less")) {
            //subCriteria.add(Restrictions.gt("scanAlias.openTime", targetDate));
            criteria.add(Restrictions.gt("openTime", targetDate));
            LOG.debug("Set age restriction to before " + parameters.getDaysOld() + " days ago.");
        }/*from   www  .  j a v  a  2s . c o m*/
    }
}

From source file:com.eharmony.matching.seeking.translator.hibernate.HibernateQueryTranslator.java

License:Apache License

@Override
public Criterion lt(String fieldName, Object value) {
    return Restrictions.lt(fieldName, value);
}

From source file:com.eryansky.common.orm.core.hibernate.restriction.support.LtRestriction.java

License:Apache License

public Criterion build(String propertyName, Object value) {
    return Restrictions.lt(propertyName, value);
}

From source file:com.eucalyptus.autoscaling.activities.PersistenceScalingActivities.java

License:Open Source License

@Override
public int deleteByCreatedAge(@Nullable final OwnerFullName ownerFullName, final long createdBefore)
        throws AutoScalingMetadataException {
    return persistenceSupport.deleteByExample(ScalingActivity.withOwner(ownerFullName),
            Restrictions.lt("creationTimestamp", new Date(createdBefore)),
            Collections.<String, String>emptyMap()).size();
}

From source file:com.eucalyptus.blockstorage.async.SnapshotCreator.java

License:Open Source License

private SnapshotInfo fetchPreviousSnapshot(int maxDeltas) throws Exception {

    SnapshotInfo prevSnapToAssign = null;
    SnapshotInfo currSnap = Transactions.find(new SnapshotInfo(snapshotId));

    try (TransactionResource tr = Entities.transactionFor(SnapshotInfo.class)) {

        // Find the most recent snapshot that is not in one of the states that
        // is ineligible to use for creating a snap delta.  
        SnapshotInfo prevEligibleSnapSearch = new SnapshotInfo();
        prevEligibleSnapSearch.setVolumeId(currSnap.getVolumeId());
        Criteria search = Entities.createCriteria(SnapshotInfo.class);
        search.add(Example.create(prevEligibleSnapSearch).enableLike(MatchMode.EXACT));
        search.add(Restrictions.and(StorageProperties.SNAPSHOT_DELTA_GENERATION_CRITERION,
                Restrictions.lt("startTime", currSnap.getStartTime())));
        search.addOrder(Order.desc("startTime"));
        search.setReadOnly(true);//www . j  av  a 2 s  . c  o  m
        search.setMaxResults(1); // only return the latest one

        List<SnapshotInfo> prevEligibleSnapList = (List<SnapshotInfo>) search.list();

        boolean committed = false;

        if (prevEligibleSnapList != null && prevEligibleSnapList.size() > 0
                && (prevSnapToAssign = prevEligibleSnapList.get(0)) != null) {
            // Found an eligible previous snapshot to use as a parent for this 
            // snapshot, if we make it a delta.
            if (prevSnapToAssign.getSnapshotLocation() != null && prevSnapToAssign.getIsOrigin() != null) {
                LOG.info(this.volumeId
                        + " has been snapshotted and uploaded before. Most recent such snapshot is "
                        + prevSnapToAssign.getSnapshotId());

                // Get all the restorable snapshots for this volume, earlier than the current snapshot
                SnapshotInfo prevRestorableSnapsSearch = new SnapshotInfo();
                prevRestorableSnapsSearch.setVolumeId(currSnap.getVolumeId());
                search = Entities.createCriteria(SnapshotInfo.class);
                search.add(Example.create(prevRestorableSnapsSearch).enableLike(MatchMode.EXACT));
                search.add(Restrictions.and(StorageProperties.SNAPSHOT_DELTA_RESTORATION_CRITERION,
                        Restrictions.lt("startTime", currSnap.getStartTime())));
                search.addOrder(Order.desc("startTime"));
                search.setReadOnly(true);
                List<SnapshotInfo> prevRestorableSnapsList = (List<SnapshotInfo>) search.list();
                tr.commit();
                committed = true;

                // Get the snap chain ending with the previous snapshot (not the current)
                List<SnapshotInfo> snapChain = blockStorageUtilSvc.getSnapshotChain(prevRestorableSnapsList,
                        prevSnapToAssign.getSnapshotId());
                int numDeltas = 0;
                if (snapChain == null || snapChain.size() == 0) {
                    // This should never happen. The chain should always include the 
                    // parent (previous) snapshot we already found. But create it as a 
                    // full snapshot instead of failing, to account for the unknown case
                    // that might not prevent an OK full snapshot.
                    LOG.error("Did not find the current snapshot's previous snapshot "
                            + prevSnapToAssign.getSnapshotId() + " in the restorable snapshots list. "
                            + "The current snapshot " + currSnap.getSnapshotId()
                            + " will be created as a full snapshot.");
                } else if (snapChain.get(0).getPreviousSnapshotId() != null) {
                    // This should never happen. The first snapshot in the chain
                    // should always be a full snapshot. But create it as a 
                    // full snapshot instead of failing, to account for the unknown case
                    // that might not prevent an OK full snapshot.
                    LOG.error("First snapshot " + snapChain.get(0).getSnapshotId() + " in the chain of "
                            + snapChain.size() + " snapshots is not a full snapshot. The current snapshot "
                            + currSnap.getSnapshotId() + " will be created as a full snapshot.");
                } else {
                    numDeltas = snapChain.size() - 1;
                    LOG.info(this.volumeId + " has " + numDeltas
                            + " delta(s) since the last full checkpoint. Max limit is " + maxDeltas);
                    if (numDeltas < maxDeltas) {
                        return prevSnapToAssign;
                    }
                }
            } else {
                LOG.info(this.volumeId
                        + " has not been snapshotted and/or uploaded after the support for incremental snapshots was added");
            }
        } else {
            LOG.info(this.volumeId + " has no prior active snapshots in the system");
        }
        if (!committed) {
            tr.commit();
        }
    } catch (Exception e) {
        LOG.warn("Failed to look up previous snapshots for " + this.volumeId, e); // return null on exception, forces entire snapshot to get uploaded
    }
    return null;
}

From source file:com.eucalyptus.blockstorage.util.BlockStorageUtil.java

License:Open Source License

public static final Criterion getExpriedCriterion(Integer deletedResourceExpiration) {
    return Restrictions.lt("deletionTime", new Date(System.currentTimeMillis()
            - TimeUnit.MILLISECONDS.convert(deletedResourceExpiration, TimeUnit.MINUTES)));
}

From source file:com.eucalyptus.cloudwatch.common.internal.domain.metricdata.MetricManager.java

License:Open Source License

public static List<Collection<MetricStatistics>> getManyMetricStatistics(
        List<GetMetricStatisticsParams> getMetricStatisticsParamses) {
    if (getMetricStatisticsParamses == null)
        throw new IllegalArgumentException("getMetricStatisticsParamses can not be null");
    Date now = new Date();
    Map<GetMetricStatisticsParams, Collection<MetricStatistics>> resultMap = Maps.newHashMap();
    Multimap<Class, GetMetricStatisticsParams> hashGroupMap = LinkedListMultimap.create();
    for (GetMetricStatisticsParams getMetricStatisticsParams : getMetricStatisticsParamses) {
        if (getMetricStatisticsParams == null)
            throw new IllegalArgumentException("getMetricStatisticsParams can not be null");
        getMetricStatisticsParams.validate(now);
        Class metricEntityClass = MetricEntityFactory.getClassForEntitiesGet(
                getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash());
        hashGroupMap.put(metricEntityClass, getMetricStatisticsParams);
    }//from ww  w . j  a v a2 s.c om
    for (Class metricEntityClass : hashGroupMap.keySet()) {
        try (final TransactionResource db = Entities.transactionFor(metricEntityClass)) {
            // set some global criteria to start (for narrowing?)
            Date minDate = null;
            Date maxDate = null;
            Junction disjunction = Restrictions.disjunction();
            Map<GetMetricStatisticsParams, TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>> multiAggregationMap = Maps
                    .newHashMap();
            for (GetMetricStatisticsParams getMetricStatisticsParams : hashGroupMap.get(metricEntityClass)) {
                multiAggregationMap.put(getMetricStatisticsParams,
                        new TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>(
                                GetMetricStatisticsAggregationKey.COMPARATOR_WITH_NULLS.INSTANCE));
                Junction conjunction = Restrictions.conjunction();
                conjunction = conjunction
                        .add(Restrictions.lt("timestamp", getMetricStatisticsParams.getEndTime()));
                conjunction = conjunction
                        .add(Restrictions.ge("timestamp", getMetricStatisticsParams.getStartTime()));
                conjunction = conjunction
                        .add(Restrictions.eq("accountId", getMetricStatisticsParams.getAccountId()));
                conjunction = conjunction
                        .add(Restrictions.eq("metricName", getMetricStatisticsParams.getMetricName()));
                conjunction = conjunction
                        .add(Restrictions.eq("namespace", getMetricStatisticsParams.getNamespace()));
                conjunction = conjunction.add(
                        Restrictions.eq("dimensionHash", hash(getMetricStatisticsParams.getDimensionMap())));
                if (getMetricStatisticsParams.getUnits() != null) {
                    conjunction = conjunction
                            .add(Restrictions.eq("units", getMetricStatisticsParams.getUnits()));
                }
                disjunction = disjunction.add(conjunction);
                if (minDate == null || getMetricStatisticsParams.getStartTime().before(minDate)) {
                    minDate = getMetricStatisticsParams.getStartTime();
                }
                if (maxDate == null || getMetricStatisticsParams.getEndTime().after(maxDate)) {
                    maxDate = getMetricStatisticsParams.getEndTime();
                }
            }
            Criteria criteria = Entities.createCriteria(metricEntityClass);
            criteria = criteria.add(Restrictions.lt("timestamp", maxDate));
            criteria = criteria.add(Restrictions.ge("timestamp", minDate));
            criteria = criteria.add(disjunction);

            ProjectionList projectionList = Projections.projectionList();
            projectionList.add(Projections.max("sampleMax"));
            projectionList.add(Projections.min("sampleMin"));
            projectionList.add(Projections.sum("sampleSize"));
            projectionList.add(Projections.sum("sampleSum"));
            projectionList.add(Projections.groupProperty("units"));
            projectionList.add(Projections.groupProperty("timestamp"));
            projectionList.add(Projections.groupProperty("accountId"));
            projectionList.add(Projections.groupProperty("metricName"));
            projectionList.add(Projections.groupProperty("metricType"));
            projectionList.add(Projections.groupProperty("namespace"));
            projectionList.add(Projections.groupProperty("dimensionHash"));
            criteria.setProjection(projectionList);
            criteria.addOrder(Order.asc("timestamp"));

            ScrollableResults results = criteria.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY);
            while (results.next()) {
                MetricEntity me = getMetricEntity(results);
                for (GetMetricStatisticsParams getMetricStatisticsParams : hashGroupMap
                        .get(metricEntityClass)) {
                    if (metricDataMatches(getMetricStatisticsParams, me)) {
                        Map<GetMetricStatisticsAggregationKey, MetricStatistics> aggregationMap = multiAggregationMap
                                .get(getMetricStatisticsParams);
                        GetMetricStatisticsAggregationKey key = new GetMetricStatisticsAggregationKey(me,
                                getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(),
                                getMetricStatisticsParams.getDimensionHash());
                        MetricStatistics item = new MetricStatistics(me,
                                getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(),
                                getMetricStatisticsParams.getDimensions());
                        if (!aggregationMap.containsKey(key)) {
                            aggregationMap.put(key, item);
                        } else {
                            MetricStatistics totalSoFar = aggregationMap.get(key);
                            totalSoFar.setSampleMax(Math.max(item.getSampleMax(), totalSoFar.getSampleMax()));
                            totalSoFar.setSampleMin(Math.min(item.getSampleMin(), totalSoFar.getSampleMin()));
                            totalSoFar.setSampleSize(totalSoFar.getSampleSize() + item.getSampleSize());
                            totalSoFar.setSampleSum(totalSoFar.getSampleSum() + item.getSampleSum());
                        }
                    }
                }
            }
            for (GetMetricStatisticsParams getMetricStatisticsParams : multiAggregationMap.keySet()) {
                resultMap.put(getMetricStatisticsParams,
                        multiAggregationMap.get(getMetricStatisticsParams).values());
            }
        }
    }
    List<Collection<MetricStatistics>> resultList = Lists.newArrayList();
    for (GetMetricStatisticsParams getMetricStatisticsParams : getMetricStatisticsParamses) {
        if (resultMap.get(getMetricStatisticsParams) == null) {
            resultList.add(new ArrayList<MetricStatistics>());
        } else {
            resultList.add(resultMap.get(getMetricStatisticsParams));
        }
    }
    return resultList;
}

From source file:com.eucalyptus.cloudwatch.common.internal.domain.metricdata.MetricManager.java

License:Open Source License

public static Collection<MetricStatistics> getMetricStatistics(
        GetMetricStatisticsParams getMetricStatisticsParams) {
    if (getMetricStatisticsParams == null)
        throw new IllegalArgumentException("getMetricStatisticsParams can not be null");
    Date now = new Date();
    getMetricStatisticsParams.validate(now);
    Class metricEntityClass = MetricEntityFactory.getClassForEntitiesGet(
            getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash());
    Map<GetMetricStatisticsAggregationKey, MetricStatistics> aggregationMap = new TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>(
            GetMetricStatisticsAggregationKey.COMPARATOR_WITH_NULLS.INSTANCE);
    try (final TransactionResource db = Entities.transactionFor(metricEntityClass)) {
        Criteria criteria = Entities.createCriteria(metricEntityClass);
        criteria = criteria.add(Restrictions.eq("accountId", getMetricStatisticsParams.getAccountId()));
        criteria = criteria.add(Restrictions.eq("metricName", getMetricStatisticsParams.getMetricName()));
        criteria = criteria.add(Restrictions.eq("namespace", getMetricStatisticsParams.getNamespace()));
        criteria = criteria.add(Restrictions.lt("timestamp", getMetricStatisticsParams.getEndTime()));
        criteria = criteria.add(Restrictions.ge("timestamp", getMetricStatisticsParams.getStartTime()));
        criteria = criteria.add(Restrictions.eq("dimensionHash", getMetricStatisticsParams.getDimensionHash()));
        if (getMetricStatisticsParams.getUnits() != null) {
            criteria = criteria.add(Restrictions.eq("units", getMetricStatisticsParams.getUnits()));
        }/*from ww  w  . j ava2 s .  c o  m*/

        ProjectionList projectionList = Projections.projectionList();
        projectionList.add(Projections.max("sampleMax"));
        projectionList.add(Projections.min("sampleMin"));
        projectionList.add(Projections.sum("sampleSize"));
        projectionList.add(Projections.sum("sampleSum"));
        projectionList.add(Projections.groupProperty("units"));
        projectionList.add(Projections.groupProperty("timestamp"));
        criteria.setProjection(projectionList);
        criteria.addOrder(Order.asc("timestamp"));
        ScrollableResults results = criteria.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY);
        while (results.next()) {
            MetricEntity me = getMetricEntity(getMetricStatisticsParams.getAccountId(),
                    getMetricStatisticsParams.getMetricName(), getMetricStatisticsParams.getNamespace(),
                    getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash(),
                    results);
            GetMetricStatisticsAggregationKey key = new GetMetricStatisticsAggregationKey(me,
                    getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(),
                    getMetricStatisticsParams.getDimensionHash());
            MetricStatistics item = new MetricStatistics(me, getMetricStatisticsParams.getStartTime(),
                    getMetricStatisticsParams.getPeriod(), getMetricStatisticsParams.getDimensions());
            if (!aggregationMap.containsKey(key)) {
                aggregationMap.put(key, item);
            } else {
                MetricStatistics totalSoFar = aggregationMap.get(key);
                totalSoFar.setSampleMax(Math.max(item.getSampleMax(), totalSoFar.getSampleMax()));
                totalSoFar.setSampleMin(Math.min(item.getSampleMin(), totalSoFar.getSampleMin()));
                totalSoFar.setSampleSize(totalSoFar.getSampleSize() + item.getSampleSize());
                totalSoFar.setSampleSum(totalSoFar.getSampleSum() + item.getSampleSum());
            }
        }
    }
    return Lists.newArrayList(aggregationMap.values());
}

From source file:com.eucalyptus.cloudwatch.domain.metricdata.MetricManager.java

License:Open Source License

public static Collection<MetricStatistics> getMetricStatistics(String accountId, String metricName,
        String namespace, Map<String, String> dimensionMap, MetricType metricType, Units units, Date startTime,
        Date endTime, Integer period) {
    if (dimensionMap == null) {
        dimensionMap = new HashMap<String, String>();
    } else if (dimensionMap.size() > MetricEntity.MAX_DIM_NUM) {
        throw new IllegalArgumentException("Too many dimensions for metric, " + dimensionMap.size());
    }// ww w  .j ava 2 s  .c o  m
    TreeSet<DimensionEntity> dimensions = new TreeSet<DimensionEntity>();
    for (Map.Entry<String, String> entry : dimensionMap.entrySet()) {
        DimensionEntity d = new DimensionEntity();
        d.setName(entry.getKey());
        d.setValue(entry.getValue());
        dimensions.add(d);
    }
    Date now = new Date();
    if (endTime == null)
        endTime = now;
    if (startTime == null)
        startTime = new Date(now.getTime() - 60 * 60 * 1000L);
    startTime = stripSeconds(startTime);
    endTime = stripSeconds(endTime);
    if (startTime.after(endTime)) {
        throw new IllegalArgumentException("Start time must be after end time");
    }
    if (period == null) {
        period = 60;
    }
    if (period % 60 != 0) {
        throw new IllegalArgumentException("Period must be a multiple of 60");
    }
    if (period < 0) {
        throw new IllegalArgumentException("Period must be greater than 0");
    }
    if (period == 0) {
        throw new IllegalArgumentException("Period must not equal 0");
    }
    if (metricType == null) {
        throw new IllegalArgumentException("metricType must not be null");
    }
    if (accountId == null) {
        throw new IllegalArgumentException("accountId must not be null");
    }
    if (metricName == null) {
        throw new IllegalArgumentException("metricName must not be null");
    }
    if (namespace == null) {
        throw new IllegalArgumentException("namespace must not be null");
    }
    String hash = hash(dimensions);
    Class metricEntityClass = MetricEntityFactory.getClassForEntitiesGet(metricType, hash);
    Map<GetMetricStatisticsAggregationKey, MetricStatistics> aggregationMap = new TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>(
            GetMetricStatisticsAggregationKey.COMPARATOR_WITH_NULLS.INSTANCE);
    EntityTransaction db = Entities.get(metricEntityClass);
    try {
        Criteria criteria = Entities.createCriteria(metricEntityClass);
        criteria = criteria.add(Restrictions.eq("accountId", accountId));
        criteria = criteria.add(Restrictions.eq("metricName", metricName));
        criteria = criteria.add(Restrictions.eq("namespace", namespace));
        criteria = criteria.add(Restrictions.lt("timestamp", endTime));
        criteria = criteria.add(Restrictions.ge("timestamp", startTime));
        criteria = criteria.add(Restrictions.eq("dimensionHash", hash));
        if (units != null) {
            criteria = criteria.add(Restrictions.eq("units", units));
        }
        criteria = criteria.addOrder(Order.asc("creationTimestamp"));
        criteria = criteria.addOrder(Order.asc("naturalId"));
        Collection results = criteria.list();
        for (Object o : results) {
            MetricEntity me = (MetricEntity) o;
            // Note: dimensions from metric entity are the actual dimensions for the point.  dimensions passed in are from the
            // hash (used for aggregation).  The hash dimensions are what we want.
            GetMetricStatisticsAggregationKey key = new GetMetricStatisticsAggregationKey(me, startTime, period,
                    hash);
            MetricStatistics item = new MetricStatistics(me, startTime, period, dimensions);
            if (!aggregationMap.containsKey(key)) {
                aggregationMap.put(key, item);
            } else {
                MetricStatistics totalSoFar = aggregationMap.get(key);
                totalSoFar.setSampleMax(Math.max(item.getSampleMax(), totalSoFar.getSampleMax()));
                totalSoFar.setSampleMin(Math.min(item.getSampleMin(), totalSoFar.getSampleMin()));
                totalSoFar.setSampleSize(totalSoFar.getSampleSize() + item.getSampleSize());
                totalSoFar.setSampleSum(totalSoFar.getSampleSum() + item.getSampleSum());
            }
        }
        db.commit();
    } catch (RuntimeException ex) {
        Logs.extreme().error(ex, ex);
        throw ex;
    } finally {
        if (db.isActive())
            db.rollback();
    }
    return Lists.newArrayList(aggregationMap.values());
}

From source file:com.eucalyptus.compute.vpc.VpcWorkflow.java

License:Open Source License

/**
 * Delete NAT gateways that have timed out in a terminal state (failed or deleted)
 *//*  w  w  w.  j a  va2s .com*/
private void natGatewayTimeout() {
    List<String> timedOutNatGateways = Collections.emptyList();
    try (final TransactionResource tx = Entities.transactionFor(NatGateway.class)) {
        timedOutNatGateways = natGateways.list(null,
                Restrictions.and(
                        Restrictions.or(Example.create(NatGateway.exampleWithState(NatGateway.State.failed)),
                                Example.create(NatGateway.exampleWithState(NatGateway.State.deleted))),
                        Restrictions.lt("lastUpdateTimestamp",
                                new Date(System.currentTimeMillis() - NatGateways.EXPIRY_AGE))),
                Collections.<String, String>emptyMap(), Predicates.<NatGateway>alwaysTrue(),
                CloudMetadatas.<NatGateway>toDisplayName());
    } catch (final Exception e) {
        logger.error("Error listing timed out NAT gateways", e);
    }

    for (final String natGatewayId : timedOutNatGateways) {
        try (final TransactionResource tx = Entities.transactionFor(NatGateway.class)) {
            final NatGateway natGateway = natGateways.lookupByName(null, natGatewayId,
                    Functions.<NatGateway>identity());
            logger.info("Deleting NAT gateway " + natGateway.getDisplayName() + " with state "
                    + natGateway.getState());
            natGateways.delete(natGateway);
            tx.commit();
        } catch (final VpcMetadataNotFoundException e) {
            logger.info("NAT gateway " + natGatewayId + " not found for deletion");
        } catch (final Exception e) {
            logger.error("Error deleting timed out NAT gateway " + natGatewayId, e);
        }
    }
}