List of usage examples for org.hibernate ScrollMode FORWARD_ONLY
ScrollMode FORWARD_ONLY
To view the source code for org.hibernate ScrollMode FORWARD_ONLY.
Click Source Link
From source file:at.treedb.db.Iterator.java
License:Open Source License
/** * <p>/* ww w . ja v a 2s . co m*/ * Returns the next single object of an entity. This method is used to * enumerate large (binary) objects of a entity set. Single object fetching * should avoid running into OutOfMemory exceptions. * </p> * <p> * <b>Implementation details:</b> * <ol> * <li>Hibernate: Statless session<br> * </li> * <li>JPA/EclipseLink: <a href= * "http://wiki.eclipse.org/Using_Advanced_Query_API_%28ELUG%29#Example_107-12"> * ReadAllQuery/CursoredStream</a> (streaming data) wasn't really working - * every time the whole entity data set was loaded by the first access! * Actual a native SQL statement is used to pre-load all object IDs. This * list is used to retrieve all objects.</li> * <li>JPA/ObjectDB: Slow query with setting first position/max data set * size.</li> * </ol> * * @return entity object * @throws Exception */ @SuppressWarnings("unchecked") public List<Object> nextObject() throws Exception { if (!hasNext) { return null; } int size = 1; List<Object> list = null; // Hibernate environment if (dao.isHibernate() || dao.getJPAimpl() == DAO.JPA_IMPL.HIBERNATEJPA) { if (sresult == null) { Query query = ((DAOhibernate) dao).createQuery(queryString, map); query.setReadOnly(true); // MIN_VALUE gives hint to JDBC driver to stream results - but // this magic // is not working for every DB! if (dao.getDB() != DAO.DB.H2) { query.setFetchSize(Integer.MIN_VALUE); } sresult = query.scroll(ScrollMode.FORWARD_ONLY); } if (sresult.next()) { list = new ArrayList<Object>(); list.add(sresult.get(0)); } } else { if (dao.getJPAimpl() != DAO.JPA_IMPL.OBJECTDB) { if (idList == null) { idList = (List<Integer>) dao.nativeQuery(nativeQueryString); if (idList.size() == 0) { return null; } } if (listIndex < idList.size()) { list = new ArrayList<Object>(); Object o = Base.load(dao, (Class<? extends Base>) clazz, idList.get(listIndex)); if (o == null) { throw new Exception("Iterator.nextObject(): loading JPA object for ID " + idList.get(listIndex) + " failed"); } list.add(o); ++listIndex; } } else { // TODO: fallback for ObjectDB - working, but slow, very slow list = (List<Object>) dao.query(queryString, index, size, map); } } index += size; toRead -= size; if (toRead == 0) { hasNext = false; } return list; }
From source file:br.com.gvt.eng.vod.dao.AssetDAO.java
public Map<String, Integer> retrieveSearchTerms(String where) { // Efetuando a consulta na base ScrollableResults scrollableResults = getSession() .createSQLQuery("SELECT title, original_title, description, director, actors from IPVOD_ASSET a " + "left join IPVOD_VISUAL_MENU_ASSET_INDEX ma on (ma.ASSET_ID = A.ASSET_ID) " + where) .setFetchSize(2000).setCacheable(false).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY); List<String> list = new ArrayList<String>(); Map<String, Integer> map = new HashMap<String, Integer>(); // Lendo o resultSet while (scrollableResults.next()) { for (int i = 0; i < 3; i++) { if (scrollableResults.get(i) != null && !"".equals(scrollableResults.get(i).toString()) && !"null".equals(scrollableResults.get(i).toString())) { list.add(scrollableResults.get(i).toString()); map.put(scrollableResults.get(i).toString(), 0); }//from ww w .j av a 2 s . c o m } } Map<String, Integer> mp = null; mp = uniqueTermsCounter(list); ValueComparator bvc = new ValueComparator(mp); TreeMap<String, Integer> sorted_map = new TreeMap<String, Integer>(bvc); sorted_map.putAll(mp); return sorted_map; }
From source file:ca.ualberta.physics.cssdp.catalogue.dao.UrlDataProductDao.java
License:Apache License
public void process(UrlDataProductUpdateMap urlDataProductUpdateMap) { if (urlDataProductUpdateMap.getUrls().size() == 0) { return;//from w w w . j a v a 2s. c o m } /* * The size of scannedUrlDataProducts should be <= jdbc batch size * configured. */ // we have to resort to hibernate directly because JPA does not have // scrolling capability Session session = emp.get().unwrap(Session.class).getSessionFactory().openSession(); Transaction tx = session.beginTransaction(); // "in" clause limit is 2^16 on Postgresql, it might be different on // other dbs String hqlString = "from UrlDataProduct urldp where urldp.url in (:urls)"; // the fastest way to scroll through the existing data Query q = session.createQuery(hqlString); q.setParameterList("urls", urlDataProductUpdateMap.getUrls()); q.setCacheMode(CacheMode.IGNORE); ScrollableResults existingData = q.scroll(ScrollMode.FORWARD_ONLY); while (existingData.next()) { UrlDataProduct existing = (UrlDataProduct) existingData.get(0); UrlDataProduct updated = urlDataProductUpdateMap.get(existing.getUrl()); if (updated != null) { /* * Only bother to update the record if it's actually changed. * Note that the scan timestamp is ignored in the check because * that isn't something the provider changed. A change can also * mean the url was deleted, and now it's back. */ if (existing.hasChanged(updated)) { // existing.setDataProduct(updated.getDataProduct()); existing.setUrl(updated.getUrl()); existing.setStartTimestamp(updated.getStartTimestamp()); existing.setEndTimestamp(updated.getEndTimestamp()); existing.setScanTimestamp(updated.getScanTimestamp()); existing.setDeleted(false); urlDataProductUpdateMap.remove(updated.getUrl()); session.update(existing); } else { // remove it so it's not duplicated urlDataProductUpdateMap.remove(existing.getUrl()); } } else { // if we get here it means the existing url has been removed // from the server, set "delete" it from the catalogue existing.setDeleted(true); existing.setScanTimestamp(new LocalDateTime()); } } // persist the new url mappings for (String newUrl : urlDataProductUpdateMap.getUrls()) { UrlDataProduct newUrlDataProduct = urlDataProductUpdateMap.get(newUrl); session.save(newUrlDataProduct); logger.debug("saved a mapping: " + newUrlDataProduct.getUrl()); } session.flush(); session.clear(); tx.commit(); session.close(); }
From source file:com.amalto.core.storage.hibernate.FullTextQueryHandler.java
License:Open Source License
@Override public StorageResults visit(Select select) { // TMDM-4654: Checks if entity has a composite PK. Set<ComplexTypeMetadata> compositeKeyTypes = new HashSet<ComplexTypeMetadata>(); // TMDM-7496: Search should include references to reused types Collection<ComplexTypeMetadata> types = new HashSet<ComplexTypeMetadata>( select.accept(new SearchTransitiveClosure(storage))); for (ComplexTypeMetadata type : types) { if (type.getKeyFields().size() > 1) { compositeKeyTypes.add(type); }/*from w ww .ja v a 2 s. c o m*/ } if (!compositeKeyTypes.isEmpty()) { StringBuilder message = new StringBuilder(); Iterator it = compositeKeyTypes.iterator(); while (it.hasNext()) { ComplexTypeMetadata compositeKeyType = (ComplexTypeMetadata) it.next(); message.append(compositeKeyType.getName()); if (it.hasNext()) { message.append(','); } } throw new FullTextQueryCompositeKeyException(message.toString()); } // Removes Joins and joined fields. List<Join> joins = select.getJoins(); if (!joins.isEmpty()) { Set<ComplexTypeMetadata> joinedTypes = new HashSet<ComplexTypeMetadata>(); for (Join join : joins) { joinedTypes.add(join.getRightField().getFieldMetadata().getContainingType()); } for (ComplexTypeMetadata joinedType : joinedTypes) { types.remove(joinedType); } List<TypedExpression> filteredFields = new LinkedList<TypedExpression>(); for (TypedExpression expression : select.getSelectedFields()) { if (expression instanceof Field) { FieldMetadata fieldMetadata = ((Field) expression).getFieldMetadata(); if (joinedTypes.contains(fieldMetadata.getContainingType())) { TypeMapping mapping = mappings.getMappingFromDatabase(fieldMetadata.getContainingType()); filteredFields.add(new Alias(new StringConstant(StringUtils.EMPTY), mapping.getUser(fieldMetadata).getName())); } else { filteredFields.add(expression); } } else { filteredFields.add(expression); } } selectedFields.clear(); selectedFields.addAll(filteredFields); } // Handle condition Condition condition = select.getCondition(); if (condition == null) { throw new IllegalArgumentException("Expected a condition in select clause but got 0."); } // Create Lucene query (concatenates all sub queries together). FullTextSession fullTextSession = Search.getFullTextSession(session); Query parsedQuery = select.getCondition().accept(new LuceneQueryGenerator(types)); // Create Hibernate Search query Set<Class> classes = new HashSet<Class>(); for (ComplexTypeMetadata type : types) { String className = ClassCreator.getClassName(type.getName()); try { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); classes.add(contextClassLoader.loadClass(className)); } catch (ClassNotFoundException e) { throw new RuntimeException("Could not find class '" + className + "'.", e); } } FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(parsedQuery, classes.toArray(new Class<?>[classes.size()])); // Very important to leave this null (would disable ability to search across different types) fullTextQuery.setCriteriaQuery(null); fullTextQuery.setSort(Sort.RELEVANCE); // Default sort (if no order by specified). query = EntityFinder.wrap(fullTextQuery, (HibernateStorage) storage, session, select.getTypes()); // ensures only MDM entity objects are returned. // Order by for (OrderBy current : select.getOrderBy()) { current.accept(this); } // Paging Paging paging = select.getPaging(); paging.accept(this); pageSize = paging.getLimit(); boolean hasPaging = pageSize < Integer.MAX_VALUE; if (!hasPaging) { return createResults(query.scroll(ScrollMode.FORWARD_ONLY)); } else { return createResults(query.list()); } }
From source file:com.amalto.core.storage.hibernate.StandardQueryHandler.java
License:Open Source License
@SuppressWarnings("rawtypes") protected StorageResults createResults(Select select) { Paging paging = select.getPaging();// ww w. j a v a 2 s. c o m int pageSize = paging.getLimit(); boolean hasPaging = pageSize < Integer.MAX_VALUE; // Results if (!hasPaging) { if (storage instanceof HibernateStorage) { RDBMSDataSource dataSource = (RDBMSDataSource) storage.getDataSource(); if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.DB2) { // TMDM-7701: DB2 doesn't like use of SCROLL_INSENSITIVE for projections including a CLOB. if (select.isProjection()) { return createResults(criteria.scroll(ScrollMode.FORWARD_ONLY), true); } else { return createResults(criteria.scroll(ScrollMode.SCROLL_INSENSITIVE), false); } } else { return createResults(criteria.scroll(ScrollMode.SCROLL_INSENSITIVE), select.isProjection()); } } return createResults(criteria.scroll(ScrollMode.SCROLL_INSENSITIVE), select.isProjection()); } else { List list = criteria.list(); return createResults(list, select.isProjection()); } }
From source file:com.aw.core.dao.DAOHbm.java
License:Open Source License
public List listAbortable(SQLQuery sqlQuery) { ScrollableResults ss = sqlQuery.scroll(ScrollMode.FORWARD_ONLY); AWQueryAbortable queryAbortable = AWQueryAbortable.instance(); List results = new ArrayList(AWQueryAbortable.DEF_LIST_SIZE); queryAbortable.resetRowCount();//from w w w . j av a2 s. c o m while (ss.next()) { if (queryAbortable.isAborted()) break; results.add(ss.get()); queryAbortable.incRowCount(); } return results; }
From source file:com.eharmony.matching.seeking.executor.hibernate.IterativeHibernateQueryExecutor.java
License:Apache License
@Override public <T, R> Iterable<R> find(Query<T, R> query) { return getMapper().mapResults(getCriteria(query).scroll(ScrollMode.FORWARD_ONLY), query); }
From source file:com.eucalyptus.cloudwatch.common.internal.domain.metricdata.MetricManager.java
License:Open Source License
public static List<Collection<MetricStatistics>> getManyMetricStatistics( List<GetMetricStatisticsParams> getMetricStatisticsParamses) { if (getMetricStatisticsParamses == null) throw new IllegalArgumentException("getMetricStatisticsParamses can not be null"); Date now = new Date(); Map<GetMetricStatisticsParams, Collection<MetricStatistics>> resultMap = Maps.newHashMap(); Multimap<Class, GetMetricStatisticsParams> hashGroupMap = LinkedListMultimap.create(); for (GetMetricStatisticsParams getMetricStatisticsParams : getMetricStatisticsParamses) { if (getMetricStatisticsParams == null) throw new IllegalArgumentException("getMetricStatisticsParams can not be null"); getMetricStatisticsParams.validate(now); Class metricEntityClass = MetricEntityFactory.getClassForEntitiesGet( getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash()); hashGroupMap.put(metricEntityClass, getMetricStatisticsParams); }//from w w w . java 2 s . c o m for (Class metricEntityClass : hashGroupMap.keySet()) { try (final TransactionResource db = Entities.transactionFor(metricEntityClass)) { // set some global criteria to start (for narrowing?) Date minDate = null; Date maxDate = null; Junction disjunction = Restrictions.disjunction(); Map<GetMetricStatisticsParams, TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>> multiAggregationMap = Maps .newHashMap(); for (GetMetricStatisticsParams getMetricStatisticsParams : hashGroupMap.get(metricEntityClass)) { multiAggregationMap.put(getMetricStatisticsParams, new TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>( GetMetricStatisticsAggregationKey.COMPARATOR_WITH_NULLS.INSTANCE)); Junction conjunction = Restrictions.conjunction(); conjunction = conjunction .add(Restrictions.lt("timestamp", getMetricStatisticsParams.getEndTime())); conjunction = conjunction .add(Restrictions.ge("timestamp", getMetricStatisticsParams.getStartTime())); conjunction = conjunction .add(Restrictions.eq("accountId", getMetricStatisticsParams.getAccountId())); conjunction = conjunction .add(Restrictions.eq("metricName", getMetricStatisticsParams.getMetricName())); conjunction = conjunction .add(Restrictions.eq("namespace", getMetricStatisticsParams.getNamespace())); conjunction = conjunction.add( Restrictions.eq("dimensionHash", hash(getMetricStatisticsParams.getDimensionMap()))); if (getMetricStatisticsParams.getUnits() != null) { conjunction = conjunction .add(Restrictions.eq("units", getMetricStatisticsParams.getUnits())); } disjunction = disjunction.add(conjunction); if (minDate == null || getMetricStatisticsParams.getStartTime().before(minDate)) { minDate = getMetricStatisticsParams.getStartTime(); } if (maxDate == null || getMetricStatisticsParams.getEndTime().after(maxDate)) { maxDate = getMetricStatisticsParams.getEndTime(); } } Criteria criteria = Entities.createCriteria(metricEntityClass); criteria = criteria.add(Restrictions.lt("timestamp", maxDate)); criteria = criteria.add(Restrictions.ge("timestamp", minDate)); criteria = criteria.add(disjunction); ProjectionList projectionList = Projections.projectionList(); projectionList.add(Projections.max("sampleMax")); projectionList.add(Projections.min("sampleMin")); projectionList.add(Projections.sum("sampleSize")); projectionList.add(Projections.sum("sampleSum")); projectionList.add(Projections.groupProperty("units")); projectionList.add(Projections.groupProperty("timestamp")); projectionList.add(Projections.groupProperty("accountId")); projectionList.add(Projections.groupProperty("metricName")); projectionList.add(Projections.groupProperty("metricType")); projectionList.add(Projections.groupProperty("namespace")); projectionList.add(Projections.groupProperty("dimensionHash")); criteria.setProjection(projectionList); criteria.addOrder(Order.asc("timestamp")); ScrollableResults results = criteria.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY); while (results.next()) { MetricEntity me = getMetricEntity(results); for (GetMetricStatisticsParams getMetricStatisticsParams : hashGroupMap .get(metricEntityClass)) { if (metricDataMatches(getMetricStatisticsParams, me)) { Map<GetMetricStatisticsAggregationKey, MetricStatistics> aggregationMap = multiAggregationMap .get(getMetricStatisticsParams); GetMetricStatisticsAggregationKey key = new GetMetricStatisticsAggregationKey(me, getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(), getMetricStatisticsParams.getDimensionHash()); MetricStatistics item = new MetricStatistics(me, getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(), getMetricStatisticsParams.getDimensions()); if (!aggregationMap.containsKey(key)) { aggregationMap.put(key, item); } else { MetricStatistics totalSoFar = aggregationMap.get(key); totalSoFar.setSampleMax(Math.max(item.getSampleMax(), totalSoFar.getSampleMax())); totalSoFar.setSampleMin(Math.min(item.getSampleMin(), totalSoFar.getSampleMin())); totalSoFar.setSampleSize(totalSoFar.getSampleSize() + item.getSampleSize()); totalSoFar.setSampleSum(totalSoFar.getSampleSum() + item.getSampleSum()); } } } } for (GetMetricStatisticsParams getMetricStatisticsParams : multiAggregationMap.keySet()) { resultMap.put(getMetricStatisticsParams, multiAggregationMap.get(getMetricStatisticsParams).values()); } } } List<Collection<MetricStatistics>> resultList = Lists.newArrayList(); for (GetMetricStatisticsParams getMetricStatisticsParams : getMetricStatisticsParamses) { if (resultMap.get(getMetricStatisticsParams) == null) { resultList.add(new ArrayList<MetricStatistics>()); } else { resultList.add(resultMap.get(getMetricStatisticsParams)); } } return resultList; }
From source file:com.eucalyptus.cloudwatch.common.internal.domain.metricdata.MetricManager.java
License:Open Source License
public static Collection<MetricStatistics> getMetricStatistics( GetMetricStatisticsParams getMetricStatisticsParams) { if (getMetricStatisticsParams == null) throw new IllegalArgumentException("getMetricStatisticsParams can not be null"); Date now = new Date(); getMetricStatisticsParams.validate(now); Class metricEntityClass = MetricEntityFactory.getClassForEntitiesGet( getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash()); Map<GetMetricStatisticsAggregationKey, MetricStatistics> aggregationMap = new TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>( GetMetricStatisticsAggregationKey.COMPARATOR_WITH_NULLS.INSTANCE); try (final TransactionResource db = Entities.transactionFor(metricEntityClass)) { Criteria criteria = Entities.createCriteria(metricEntityClass); criteria = criteria.add(Restrictions.eq("accountId", getMetricStatisticsParams.getAccountId())); criteria = criteria.add(Restrictions.eq("metricName", getMetricStatisticsParams.getMetricName())); criteria = criteria.add(Restrictions.eq("namespace", getMetricStatisticsParams.getNamespace())); criteria = criteria.add(Restrictions.lt("timestamp", getMetricStatisticsParams.getEndTime())); criteria = criteria.add(Restrictions.ge("timestamp", getMetricStatisticsParams.getStartTime())); criteria = criteria.add(Restrictions.eq("dimensionHash", getMetricStatisticsParams.getDimensionHash())); if (getMetricStatisticsParams.getUnits() != null) { criteria = criteria.add(Restrictions.eq("units", getMetricStatisticsParams.getUnits())); }/* w ww . j av a 2s . c o m*/ ProjectionList projectionList = Projections.projectionList(); projectionList.add(Projections.max("sampleMax")); projectionList.add(Projections.min("sampleMin")); projectionList.add(Projections.sum("sampleSize")); projectionList.add(Projections.sum("sampleSum")); projectionList.add(Projections.groupProperty("units")); projectionList.add(Projections.groupProperty("timestamp")); criteria.setProjection(projectionList); criteria.addOrder(Order.asc("timestamp")); ScrollableResults results = criteria.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY); while (results.next()) { MetricEntity me = getMetricEntity(getMetricStatisticsParams.getAccountId(), getMetricStatisticsParams.getMetricName(), getMetricStatisticsParams.getNamespace(), getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash(), results); GetMetricStatisticsAggregationKey key = new GetMetricStatisticsAggregationKey(me, getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(), getMetricStatisticsParams.getDimensionHash()); MetricStatistics item = new MetricStatistics(me, getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(), getMetricStatisticsParams.getDimensions()); if (!aggregationMap.containsKey(key)) { aggregationMap.put(key, item); } else { MetricStatistics totalSoFar = aggregationMap.get(key); totalSoFar.setSampleMax(Math.max(item.getSampleMax(), totalSoFar.getSampleMax())); totalSoFar.setSampleMin(Math.min(item.getSampleMin(), totalSoFar.getSampleMin())); totalSoFar.setSampleSize(totalSoFar.getSampleSize() + item.getSampleSize()); totalSoFar.setSampleSum(totalSoFar.getSampleSum() + item.getSampleSum()); } } } return Lists.newArrayList(aggregationMap.values()); }
From source file:com.eucalyptus.cluster.callback.reporting.FullTableScanAbsoluteMetricConverter.java
License:Open Source License
protected static List<AbsoluteMetricQueueItem> dealWithAbsoluteMetrics( Iterable<AbsoluteMetricQueueItem> dataBatch) { List<AbsoluteMetricQueueItem> regularMetrics = Lists.newArrayList(); List<SimpleAbsoluteMetricHistory> absoluteMetricsToInsert = Lists.newArrayList(); SortedAbsoluteMetrics sortedAbsoluteMetrics = sortAbsoluteMetrics(dataBatch); regularMetrics.addAll(sortedAbsoluteMetrics.getRegularMetrics()); AbsoluteMetricMap absoluteMetricMap = sortedAbsoluteMetrics.getAbsoluteMetricMap(); try (final TransactionResource db = Entities.transactionFor(AbsoluteMetricHistory.class)) { int count = 0; Criteria criteria = Entities.createCriteria(AbsoluteMetricHistory.class); ScrollableResults absoluteMetrics = criteria.setCacheMode(CacheMode.IGNORE) .scroll(ScrollMode.FORWARD_ONLY); while (absoluteMetrics.next()) { AbsoluteMetricHistory absoluteMetricHistory = (AbsoluteMetricHistory) absoluteMetrics.get(0); if (absoluteMetricMap.containsKey(absoluteMetricHistory.getNamespace(), absoluteMetricHistory.getMetricName(), absoluteMetricHistory.getDimensionName(), absoluteMetricHistory.getDimensionValue())) { MetricsAndOtherFields metricsAndOtherFields = absoluteMetricMap.getMetricsAndOtherFields( absoluteMetricHistory.getNamespace(), absoluteMetricHistory.getMetricName(), absoluteMetricHistory.getDimensionName(), absoluteMetricHistory.getDimensionValue()); Map<TimestampAndMetricValue, MetricDatum> metricDatumMap = metricsAndOtherFields .getMetricDatumMap(); SequentialMetrics sequentialMetrics = calculateSequentialMetrics(absoluteMetricHistory, metricDatumMap, metricsAndOtherFields.getAccountId(), metricsAndOtherFields.getRelativeMetricName()); absoluteMetricMap.removeEntries(absoluteMetricHistory.getNamespace(), absoluteMetricHistory.getMetricName(), absoluteMetricHistory.getDimensionName(), absoluteMetricHistory.getDimensionValue()); for (AbsoluteMetricQueueItem regularMetric : sequentialMetrics.getRegularMetrics()) { if (AbsoluteMetricHelper.AWS_EBS_NAMESPACE.equals(regularMetric.getNamespace())) { if (AbsoluteMetricHelper.VOLUME_READ_OPS_METRIC_NAME .equals(regularMetric.getMetricDatum().getMetricName())) { // special case regularMetrics.add(AbsoluteMetricHelper.createVolumeThroughputMetric( regularMetric.getAccountId(), regularMetric.getNamespace(), regularMetric.getMetricDatum())); } else if (AbsoluteMetricHelper.VOLUME_TOTAL_READ_WRITE_TIME_METRIC_NAME .equals(regularMetric.getMetricDatum().getMetricName())) { AbsoluteMetricHelper.convertVolumeTotalReadWriteTimeToVolumeIdleTime( regularMetric.getMetricDatum()); }/*ww w .ja v a 2 s. c o m*/ } regularMetrics.add(regularMetric); } absoluteMetricHistory.setTimestamp(sequentialMetrics.getUpdateTimestamp()); absoluteMetricHistory.setLastMetricValue(sequentialMetrics.getUpdateValue()); if (++count % AbsoluteMetricQueue.ABSOLUTE_METRIC_NUM_DB_OPERATIONS_UNTIL_SESSION_FLUSH == 0) { Entities.flushSession(AbsoluteMetricHistory.class); Entities.clearSession(AbsoluteMetricHistory.class); } } } db.commit(); } // Now parse entries only in the map... for (AbsoluteMetricMap.NamespaceMetricNameAndDimension namespaceMetricNameAndDimension : absoluteMetricMap .keySet()) { AbsoluteMetricHistory absoluteMetricHistory = new AbsoluteMetricHistory(); absoluteMetricHistory.setNamespace(namespaceMetricNameAndDimension.getNamespace()); absoluteMetricHistory.setMetricName(namespaceMetricNameAndDimension.getMetricName()); absoluteMetricHistory.setDimensionName(namespaceMetricNameAndDimension.getDimensionName()); absoluteMetricHistory.setDimensionValue(namespaceMetricNameAndDimension.getDimensionValue()); MetricsAndOtherFields metricsAndOtherFields = absoluteMetricMap.get(namespaceMetricNameAndDimension); Map<TimestampAndMetricValue, MetricDatum> metricDataMap = metricsAndOtherFields.getMetricDatumMap(); if (metricDataMap.size() == 0) continue; TimestampAndMetricValue firstValue = metricDataMap.keySet().iterator().next(); metricDataMap.remove(firstValue); absoluteMetricHistory.setLastMetricValue(firstValue.getMetricValue()); absoluteMetricHistory.setTimestamp(firstValue.getTimestamp()); if (metricDataMap.size() != 0) { SequentialMetrics sequentialMetrics = calculateSequentialMetrics(absoluteMetricHistory, metricDataMap, metricsAndOtherFields.getAccountId(), metricsAndOtherFields.getRelativeMetricName()); for (AbsoluteMetricQueueItem regularMetric : sequentialMetrics.getRegularMetrics()) { if (AbsoluteMetricHelper.AWS_EBS_NAMESPACE.equals(regularMetric.getNamespace())) { if (AbsoluteMetricHelper.VOLUME_READ_OPS_METRIC_NAME .equals(regularMetric.getMetricDatum().getMetricName())) { // special case regularMetrics.add( AbsoluteMetricHelper.createVolumeThroughputMetric(regularMetric.getAccountId(), regularMetric.getNamespace(), regularMetric.getMetricDatum())); } else if (AbsoluteMetricHelper.VOLUME_TOTAL_READ_WRITE_TIME_METRIC_NAME .equals(regularMetric.getMetricDatum().getMetricName())) { AbsoluteMetricHelper.convertVolumeTotalReadWriteTimeToVolumeIdleTime( regularMetric.getMetricDatum()); } } regularMetrics.add(regularMetric); } absoluteMetricHistory.setTimestamp(sequentialMetrics.getUpdateTimestamp()); absoluteMetricHistory.setLastMetricValue(sequentialMetrics.getUpdateValue()); } absoluteMetricsToInsert.add(convertToSimpleAbsoluteMetricHistory(absoluteMetricHistory)); } // insert all new points try (final TransactionResource db = Entities.transactionFor(AbsoluteMetricHistory.class)) { int count = 0; for (SimpleAbsoluteMetricHistory simpleAbsoluteMetricHistory : absoluteMetricsToInsert) { Entities.persist(convertToAbsoluteMetricHistory(simpleAbsoluteMetricHistory)); if (++count % AbsoluteMetricQueue.ABSOLUTE_METRIC_NUM_DB_OPERATIONS_UNTIL_SESSION_FLUSH == 0) { Entities.flushSession(AbsoluteMetricHistory.class); Entities.clearSession(AbsoluteMetricHistory.class); } } db.commit(); } return regularMetrics; }