List of usage examples for java.util ArrayList isEmpty
public boolean isEmpty()
From source file:com.mtgi.analytics.servlet.BehaviorTrackingListener.java
private synchronized void checkInit(ServletRequestEvent event) { if (!initialized) { ServletContext context = event.getServletContext(); boolean hasFilter = BehaviorTrackingFilter.isFiltered(context); ArrayList<ServletRequestBehaviorTrackingAdapter> beans = new ArrayList<ServletRequestBehaviorTrackingAdapter>(); //find registered request adapters in all mvc servlet contexts. for (Enumeration<?> atts = context.getAttributeNames(); atts.hasMoreElements();) { String name = (String) atts.nextElement(); if (name.startsWith(FrameworkServlet.SERVLET_CONTEXT_PREFIX)) { Object value = context.getAttribute(name); if (value instanceof ListableBeanFactory) addRequestAdapters(beans, (ListableBeanFactory) value, hasFilter); }/* w ww.j a va2s .c o m*/ } //look for shared application context, loaded by ContextLoaderListener. ListableBeanFactory parent = WebApplicationContextUtils.getWebApplicationContext(context); if (parent != null) addRequestAdapters(beans, parent, hasFilter); if (!beans.isEmpty()) { adapters = beans.toArray(new ServletRequestBehaviorTrackingAdapter[beans.size()]); log.info("BehaviorTracking for HTTP servlet requests started"); } initialized = true; } }
From source file:edu.isi.misd.scanner.network.registry.data.service.RegistryServiceImpl.java
/** * Updates a single Study, altering UserRole references (where required). * @param study/*from w ww . ja v a 2s . c om*/ */ @Override @Transactional public void updateStudy(Study study) { ArrayList<StudyRole> defaultStudyRolesToUserRoles = new ArrayList<StudyRole>(); List<StudyRole> studyRoles = studyRoleRepository.findByStudyStudyId(study.getStudyId()); /* * Find the set of default study roles that need to have corresponding * user roles automatically created. */ for (StandardRole standardRole : standardRoleRepository.findAll()) { if (standardRole.getAddToUserRoleByDefault()) { for (StudyRole studyRole : studyRoles) { if (studyRole.getRoleWithinStudy().equalsIgnoreCase(standardRole.getStandardRoleName())) defaultStudyRolesToUserRoles.add(studyRole); } } } /* * Create default user roles for the (potentially) new owner of the * study, if those roles do not exist already. If the owner has not * changed, then the roles will exist already and no UserRole update * will take place. */ ArrayList<UserRole> newUserRoles = new ArrayList<UserRole>(); List<StudyRole> studyRolesForUser = studyRoleRepository .findByStudyStudyIdAndScannerUsersUserName(study.getStudyId(), study.getStudyOwner().getUserName()); for (StudyRole studyRole : defaultStudyRolesToUserRoles) { if (studyRolesForUser.contains(studyRole)) { continue; } UserRole userRole = new UserRole(); userRole.setUser(study.getStudyOwner()); userRole.setStudyRole(studyRole); newUserRoles.add(userRole); } if (!newUserRoles.isEmpty()) { userRoleRepository.save(newUserRoles); } studyRepository.save(study); }
From source file:net.sf.taverna.t2.security.credentialmanager.impl.CredentialManagerImplTest.java
/** * Test method for {@link net.sf.taverna.t2.security.credentialmanager.impl.CredentialManagerImpl#getAliases(net.sf.taverna.t2.security.credentialmanager.CredentialManager.KeystoreType)}. * @throws CMException /*from w ww . j av a2s . co m*/ */ @Test public void testGetAliases() throws CMException { ArrayList<String> keystoreAliases = credentialManager.getAliases(KeystoreType.KEYSTORE); ArrayList<String> truststoreAliases = credentialManager.getAliases(KeystoreType.TRUSTSTORE); // Initially Keystore/Truststore is empty assertTrue(keystoreAliases.isEmpty()); String aliasPassword = credentialManager.addUsernameAndPasswordForService(usernamePassword, serviceURI); String aliasKeyPair = credentialManager.addKeyPair(privateKey, privateKeyCertChain); String aliasTrustedCert = credentialManager.addTrustedCertificate(trustedCertficate); keystoreAliases = credentialManager.getAliases(KeystoreType.KEYSTORE); truststoreAliases = credentialManager.getAliases(KeystoreType.TRUSTSTORE); assertTrue(keystoreAliases.size() == 2); assertTrue(truststoreAliases.size() >= 1); // we at least have the one we inserted but could be more copied from Java's defauls truststore assertTrue(keystoreAliases.contains(aliasPassword)); assertTrue(keystoreAliases.contains(aliasKeyPair)); assertTrue(truststoreAliases.contains(aliasTrustedCert)); }
From source file:com.telefonica.iot.cygnus.sinks.NGSICartoDBSink.java
private void persistDistanceEvent(NGSIEvent event) throws CygnusBadConfiguration, CygnusPersistenceError { // Get some values String schema = buildSchemaName(event.getServiceForNaming(enableNameMappings)); String service = event.getServiceForData(); String servicePath = event.getServicePathForData(); String entityId = event.getContextElement().getId(); String entityType = event.getContextElement().getType(); // Iterate on all this context element attributes, if there are attributes ArrayList<ContextAttribute> contextAttributes = event.getContextElement().getAttributes(); if (contextAttributes == null || contextAttributes.isEmpty()) { return;//from www. jav a2 s . c o m } // if ((CartoDBBackendImpl) backends.get(schema)).startTransaction(); for (ContextAttribute contextAttribute : contextAttributes) { long recvTimeTs = event.getRecvTimeTs(); String attrType = contextAttribute.getType(); String attrValue = contextAttribute.getContextValue(false); String attrMetadata = contextAttribute.getContextMetadata(); ImmutablePair<String, Boolean> location = NGSIUtils.getGeometry(attrValue, attrType, attrMetadata, swapCoordinates); String tableName = buildTableName(event.getServicePathForNaming(enableGrouping, enableNameMappings), event.getEntityForNaming(enableGrouping, enableNameMappings, enableEncoding), event.getAttributeForNaming(enableNameMappings)) + CommonConstants.CONCATENATOR + "distance"; if (location.getRight()) { // Try creating the table... the cost of checking if it exists and creating it is higher than directly // attempting to create it. If existing, nothing will be re-created and the new values will be inserted try { String typedFields = "(recvTimeMs bigint, fiwareServicePath text, entityId text, entityType text, " + "stageDistance float, stageTime float, stageSpeed float, sumDistance float, " + "sumTime float, sumSpeed float, sum2Distance float, sum2Time float, sum2Speed float, " + "maxDistance float, minDistance float, maxTime float, minTime float, maxSpeed float, " + "minSpeed float, numSamples bigint)"; backends.get(schema).createTable(schema, tableName, typedFields); // Once created, insert the first row String withs = ""; String fields = "(recvTimeMs, fiwareServicePath, entityId, entityType, the_geom, stageDistance," + "stageTime, stageSpeed, sumDistance, sumTime, sumSpeed, sum2Distance, sum2Time," + "sum2Speed, maxDistance, minDistance, maxTime, mintime, maxSpeed, minSpeed, numSamples)"; String rows = "(" + recvTimeTs + ",'" + servicePath + "','" + entityId + "','" + entityType + "'," + location.getLeft() + ",0,0,0,0,0,0,0,0,0," + Float.MIN_VALUE + "," + Float.MAX_VALUE + "," + Float.MIN_VALUE + "," + Float.MAX_VALUE + "," + Float.MIN_VALUE + "," + Float.MAX_VALUE + ",1)"; LOGGER.info("[" + this.getName() + "] Persisting data at NGSICartoDBSink. Schema (" + schema + "), Table (" + tableName + "), Data (" + rows + ")"); backends.get(schema).insert(schema, tableName, withs, fields, rows); } catch (Exception e1) { String withs = "" + "WITH geom AS (" + " SELECT " + location.getLeft() + " AS point" + "), calcs AS (" + " SELECT" + " cartodb_id," + " ST_Distance(the_geom::geography, geom.point::geography) AS stage_distance," + " (" + recvTimeTs + " - recvTimeMs) AS stage_time" + " FROM " + tableName + ", geom" + " ORDER BY cartodb_id DESC" + " LIMIT 1" + "), speed AS (" + " SELECT" + " (calcs.stage_distance / NULLIF(calcs.stage_time, 0)) AS stage_speed" + " FROM calcs" + "), inserts AS (" + " SELECT" + " (-1 * ((-1 * t1.sumDistance) - calcs.stage_distance)) AS sum_dist," + " (-1 * ((-1 * t1.sumTime) - calcs.stage_time)) AS sum_time," + " (-1 * ((-1 * t1.sumSpeed) - speed.stage_speed)) AS sum_speed," + " (-1 * ((-1 * t1.sumDistance) - calcs.stage_distance)) " + " * (-1 * ((-1 * t1.sumDistance) - calcs.stage_distance)) AS sum2_dist," + " (-1 * ((-1 * t1.sumTime) - calcs.stage_time)) " + " * (-1 * ((-1 * t1.sumTime) - calcs.stage_time)) AS sum2_time," + " (-1 * ((-1 * t1.sumSpeed) - speed.stage_speed)) " + " * (-1 * ((-1 * t1.sumSpeed) - speed.stage_speed)) AS sum2_speed," + " t1.max_distance," + " t1.min_distance," + " t1.max_time," + " t1.min_time," + " t1.max_speed," + " t1.min_speed," + " t2.num_samples" + " FROM" + " (" + " SELECT" + " GREATEST(calcs.stage_distance, maxDistance) AS max_distance," + " LEAST(calcs.stage_distance, minDistance) AS min_distance," + " GREATEST(calcs.stage_time, maxTime) AS max_time," + " LEAST(calcs.stage_time, minTime) AS min_time," + " GREATEST(speed.stage_speed, maxSpeed) AS max_speed," + " LEAST(speed.stage_speed, minSpeed) AS min_speed," + " sumDistance," + " sumTime," + " sumSpeed" + " FROM " + tableName + ", speed, calcs" + " ORDER BY " + tableName + ".cartodb_id DESC" + " LIMIT 1" + " ) AS t1," + " (" + " SELECT (-1 * ((-1 * COUNT(*)) - 1)) AS num_samples" + " FROM " + tableName + " ) AS t2," + " speed," + " calcs" + ")"; String fields = "(recvTimeMs, fiwareServicePath, entityId, entityType, the_geom, stageDistance," + "stageTime, stageSpeed, sumDistance, sumTime, sumSpeed, sum2Distance, sum2Time," + "sum2Speed, maxDistance, minDistance, maxTime, mintime, maxSpeed, minSpeed, numSamples)"; String rows = "(" + recvTimeTs + ",'" + servicePath + "','" + entityId + "','" + entityType + "'," + "(SELECT point FROM geom),(SELECT stage_distance FROM calcs)," + "(SELECT stage_time FROM calcs),(SELECT stage_speed FROM speed)," + "(SELECT sum_dist FROM inserts),(SELECT sum_time FROM inserts)," + "(SELECT sum_speed FROM inserts),(SELECT sum2_dist FROM inserts)," + "(SELECT sum2_time FROM inserts),(SELECT sum2_speed FROM inserts)," + "(SELECT max_distance FROM inserts),(SELECT min_distance FROM inserts)," + "(SELECT max_time FROM inserts),(SELECT min_time FROM inserts)," + "(SELECT max_speed FROM inserts),(SELECT min_speed FROM inserts)," + "(SELECT num_samples FROM inserts))"; LOGGER.info("[" + this.getName() + "] Persisting data at NGSICartoDBSink. Schema (" + schema + "), Table (" + tableName + "), Data (" + rows + ")"); try { backends.get(schema).insert(schema, tableName, withs, fields, rows); } catch (Exception e2) { ImmutablePair<Long, Long> bytes = ((CartoDBBackendImpl) backends.get(schema)) .finishTransaction(); serviceMetrics.add(service, servicePath, 0, 0, 0, 0, 0, 0, bytes.left, bytes.right, 0); throw new CygnusPersistenceError("-, " + e2.getMessage()); } // try catch } // try catch } // if } // for ImmutablePair<Long, Long> bytes = ((CartoDBBackendImpl) backends.get(schema)).finishTransaction(); serviceMetrics.add(service, servicePath, 0, 0, 0, 0, 0, 0, bytes.left, bytes.right, 0); }
From source file:net.ontopia.topicmaps.utils.MergeUtils.java
private static boolean equals(TMObjectIF obj1, TMObjectIF obj2) { // can't be topics, or we wouldn't be here if (obj1 instanceof AssociationIF && obj2 instanceof AssociationIF) { AssociationIF a1 = (AssociationIF) obj1; AssociationIF a2 = (AssociationIF) obj2; if (a1.getType() == a2.getType() && a1.getRoles().size() == a2.getRoles().size() && a1.getScope().equals(a2.getScope())) { ArrayList<AssociationRoleIF> roles2 = new ArrayList<AssociationRoleIF>(a2.getRoles()); Iterator<AssociationRoleIF> it1 = a1.getRoles().iterator(); while (it1.hasNext()) { AssociationRoleIF role1 = it1.next(); Iterator<AssociationRoleIF> it2 = roles2.iterator(); boolean found = false; while (it2.hasNext()) { AssociationRoleIF role2 = it2.next(); if (role2.getPlayer() == role1.getPlayer() && role1.getType() == role2.getType()) { roles2.remove(role2); found = true;//w w w. java 2 s . c o m break; } } if (!found) break; } return roles2.isEmpty(); } } else if (obj1 instanceof TopicNameIF && obj2 instanceof TopicNameIF) { TopicNameIF bn1 = (TopicNameIF) obj1; TopicNameIF bn2 = (TopicNameIF) obj2; return (bn1.getTopic().equals(bn2.getTopic()) && sameAs(bn1.getValue(), bn2.getValue()) && sameAs(bn1.getType(), bn2.getType()) && sameAs(bn1.getScope(), bn2.getScope())); } else if (obj1 instanceof OccurrenceIF && obj2 instanceof OccurrenceIF) { OccurrenceIF occ1 = (OccurrenceIF) obj1; OccurrenceIF occ2 = (OccurrenceIF) obj2; return (occ1.getTopic().equals(occ2.getTopic()) && sameAs(occ1.getValue(), occ2.getValue()) && sameAs(occ1.getDataType(), occ2.getDataType()) && sameAs(occ1.getType(), occ2.getType()) && sameAs(occ1.getScope(), occ2.getScope())); } return false; }
From source file:com.erudika.para.search.ElasticSearch.java
/** * Processes the results of searcQueryRaw() and fetches the results from the data store (can be disabled). * @param <P> type of object//from ww w.jav a2s. c om * @param appid name of the {@link com.erudika.para.core.App} * @param hits the search results from a query * @return the list of object found */ private <P extends ParaObject> List<P> searchQuery(String appid, SearchHits hits) { if (hits == null) { return Collections.emptyList(); } ArrayList<P> results = new ArrayList<P>(hits.getHits().length); ArrayList<String> keys = new ArrayList<String>(hits.getHits().length); try { for (SearchHit hit : hits) { keys.add(hit.getId()); if (Config.READ_FROM_INDEX) { P pobj = ParaObjectUtils.setAnnotatedFields(hit.getSource()); results.add(pobj); } } if (!Config.READ_FROM_INDEX && !keys.isEmpty()) { Map<String, P> fromDB = dao.readAll(appid, keys, true); if (!fromDB.isEmpty()) { results.addAll(fromDB.values()); } } int sizeBefore = results.size(); results.removeAll(Collections.singleton(null)); int sizeAfter = results.size(); if (sizeBefore > (sizeAfter + 1)) { ArrayList<String> nullz = new ArrayList<String>(); for (int i = 0; i < results.size(); i++) { P obj = results.get(i); if (obj == null) { nullz.add(keys.get(i)); } } logger.warn("Found {} objects that are indexed but no longer exist in the database. Ids: {}", sizeBefore - sizeAfter, nullz); } logger.debug("Search.searchQuery() {}", results.size()); } catch (Exception e) { logger.warn(null, e); } return results; }
From source file:com.ibm.bi.dml.runtime.matrix.mapred.MapperBase.java
public void configure(JobConf job) { super.configure(job); //get the indexes that this matrix file represents, //since one matrix file can occur multiple times in a statement try {//www .j ava 2s . com representativeMatrixes = MRJobConfiguration.getInputMatrixIndexesInMapper(job); } catch (IOException e) { throw new RuntimeException(e); } //get input converter information inputConverter = MRJobConfiguration.getInputConverter(job, representativeMatrixes.get(0)); DataGenMRInstruction[] allDataGenIns; MRInstruction[] allMapperIns; ReblockInstruction[] allReblockIns; CSVReblockInstruction[] allCSVReblockIns; try { allDataGenIns = MRJobConfiguration.getDataGenInstructions(job); //parse the instructions on the matrices that this file represent allMapperIns = MRJobConfiguration.getInstructionsInMapper(job); //parse the reblock instructions on the matrices that this file represent allReblockIns = MRJobConfiguration.getReblockInstructions(job); allCSVReblockIns = MRJobConfiguration.getCSVReblockInstructions(job); } catch (DMLUnsupportedOperationException e) { throw new RuntimeException(e); } catch (DMLRuntimeException e) { throw new RuntimeException(e); } //get all the output indexes byte[] outputs = MRJobConfiguration.getOutputIndexesInMapper(job); //get the dimension of all the representative matrices rlens = new long[representativeMatrixes.size()]; clens = new long[representativeMatrixes.size()]; for (int i = 0; i < representativeMatrixes.size(); i++) { rlens[i] = MRJobConfiguration.getNumRows(job, representativeMatrixes.get(i)); clens[i] = MRJobConfiguration.getNumColumns(job, representativeMatrixes.get(i)); // System.out.println("get dimension for "+representativeMatrixes.get(i)+": "+rlens[i]+", "+clens[i]); } //get the block sizes of the representative matrices brlens = new int[representativeMatrixes.size()]; bclens = new int[representativeMatrixes.size()]; for (int i = 0; i < representativeMatrixes.size(); i++) { brlens[i] = MRJobConfiguration.getNumRowsPerBlock(job, representativeMatrixes.get(i)); bclens[i] = MRJobConfiguration.getNumColumnsPerBlock(job, representativeMatrixes.get(i)); // System.out.println("get blocksize for "+representativeMatrixes.get(i)+": "+brlens[i]+", "+bclens[i]); } rbounds = new long[representativeMatrixes.size()]; cbounds = new long[representativeMatrixes.size()]; lastblockrlens = new int[representativeMatrixes.size()]; lastblockclens = new int[representativeMatrixes.size()]; //calculate upper boundaries for key value pairs if (valueClass.equals(MatrixBlock.class)) { for (int i = 0; i < representativeMatrixes.size(); i++) { rbounds[i] = (long) Math.ceil((double) rlens[i] / (double) brlens[i]); cbounds[i] = (long) Math.ceil((double) clens[i] / (double) bclens[i]); lastblockrlens[i] = (int) (rlens[i] % brlens[i]); lastblockclens[i] = (int) (clens[i] % bclens[i]); if (lastblockrlens[i] == 0) lastblockrlens[i] = brlens[i]; if (lastblockclens[i] == 0) lastblockclens[i] = bclens[i]; /* * what is this for???? // DRB: the row indexes need to be fixed rbounds[i] = rlens[i];*/ } } else { for (int i = 0; i < representativeMatrixes.size(); i++) { rbounds[i] = rlens[i]; cbounds[i] = clens[i]; lastblockrlens[i] = 1; lastblockclens[i] = 1; // System.out.println("get bound for "+representativeMatrixes.get(i)+": "+rbounds[i]+", "+cbounds[i]); } } //load data from distributed cache (if required, reuse if jvm_reuse) try { setupDistCacheFiles(job); } catch (IOException ex) { throw new RuntimeException(ex); } //collect unary instructions for each representative matrix HashSet<Byte> set = new HashSet<Byte>(); for (int i = 0; i < representativeMatrixes.size(); i++) { set.clear(); set.add(representativeMatrixes.get(i)); //collect the relavent datagen instructions for this representative matrix ArrayList<DataGenMRInstruction> dataGensForThisMatrix = new ArrayList<DataGenMRInstruction>(); if (allDataGenIns != null) { for (DataGenMRInstruction ins : allDataGenIns) { if (set.contains(ins.getInput())) { dataGensForThisMatrix.add(ins); set.add(ins.output); } } } if (dataGensForThisMatrix.size() > 1) throw new RuntimeException("only expects at most one rand instruction per input"); if (dataGensForThisMatrix.isEmpty()) dataGen_instructions.add(null); else dataGen_instructions.add(dataGensForThisMatrix.get(0)); //collect the relavent instructions for this representative matrix ArrayList<MRInstruction> opsForThisMatrix = new ArrayList<MRInstruction>(); if (allMapperIns != null) { for (MRInstruction ins : allMapperIns) { try { /* boolean toAdd=true; for(byte input: ins.getInputIndexes()) if(!set.contains(input)) { toAdd=false; break; } */ boolean toAdd = false; for (byte input : ins.getInputIndexes()) if (set.contains(input)) { toAdd = true; break; } if (toAdd) { opsForThisMatrix.add(ins); set.add(ins.output); } } catch (DMLRuntimeException e) { throw new RuntimeException(e); } } } mapper_instructions.add(opsForThisMatrix); //collect the relavent reblock instructions for this representative matrix ArrayList<ReblockInstruction> reblocksForThisMatrix = new ArrayList<ReblockInstruction>(); if (allReblockIns != null) { for (ReblockInstruction ins : allReblockIns) { if (set.contains(ins.input)) { reblocksForThisMatrix.add(ins); set.add(ins.output); } } } reblock_instructions.add(reblocksForThisMatrix); //collect the relavent reblock instructions for this representative matrix ArrayList<CSVReblockInstruction> csvReblocksForThisMatrix = new ArrayList<CSVReblockInstruction>(); if (allCSVReblockIns != null) { for (CSVReblockInstruction ins : allCSVReblockIns) { if (set.contains(ins.input)) { csvReblocksForThisMatrix.add(ins); set.add(ins.output); } } } csv_reblock_instructions.add(csvReblocksForThisMatrix); //collect the output indexes for this representative matrix ArrayList<Byte> outsForThisMatrix = new ArrayList<Byte>(); for (byte output : outputs) { if (set.contains(output)) outsForThisMatrix.add(output); } outputIndexes.add(outsForThisMatrix); } }
From source file:android.framework.util.jar.JarVerifier.java
/** * Invoked for each new JAR entry read operation from the input * stream. This method constructs and returns a new {@link VerifierEntry} * which contains the certificates used to sign the entry and its hash value * as specified in the JAR MANIFEST format. * * @param name/*from ww w . j a v a 2 s .c om*/ * the name of an entry in a JAR file which is <b>not</b> in the * {@code META-INF} directory. * @return a new instance of {@link VerifierEntry} which can be used by * callers as an {@link OutputStream}. */ VerifierEntry initEntry(String name) { // If no manifest is present by the time an entry is found, // verification cannot occur. If no signature files have // been found, do not verify. if (man == null || signatures.size() == 0) { return null; } Attributes attributes = man.getAttributes(name); // entry has no digest if (attributes == null) { return null; } ArrayList<Certificate> certs = new ArrayList<Certificate>(); Iterator<Map.Entry<String, HashMap<String, Attributes>>> it = signatures.entrySet().iterator(); while (it.hasNext()) { Map.Entry<String, HashMap<String, Attributes>> entry = it.next(); HashMap<String, Attributes> hm = entry.getValue(); if (hm.get(name) != null) { // Found an entry for entry name in .SF file String signatureFile = entry.getKey(); certs.addAll(getSignerCertificates(signatureFile, certificates)); } } // entry is not signed if (certs.isEmpty()) { return null; } Certificate[] certificatesArray = certs.toArray(new Certificate[certs.size()]); String algorithms = attributes.getValue("Digest-Algorithms"); if (algorithms == null) { algorithms = "SHA SHA1"; } StringTokenizer tokens = new StringTokenizer(algorithms); while (tokens.hasMoreTokens()) { String algorithm = tokens.nextToken(); String hash = attributes.getValue(algorithm + "-Digest"); if (hash == null) { continue; } byte[] hashBytes = hash.getBytes(Charsets.ISO_8859_1); try { return new VerifierEntry(name, MessageDigest.getInstance(algorithm), hashBytes, certificatesArray); } catch (NoSuchAlgorithmException e) { // ignored } } return null; }
From source file:com.datatorrent.apps.logstream.DimensionOperator.java
@Override public void endWindow() { if (outTimeBuckets == null || outTimeBuckets.isEmpty()) { return;//from www . j a v a 2 s. c o m } long time = LogstreamUtil.extractTime(currentWindowId, windowWidth); // get time buckets for current window id List<String> timeBucketList = getTimeBucketList(time); // get list of timebuckets to be emitted and replace them in outTimeBuckets with next time bucket to be emitted ArrayList<String> emitTimeBucketList = new ArrayList<String>(); for (int i = 0; i < timeBucketList.size(); i++) { String timeBucket = timeBucketList.get(i); if (!timeBucket.equals(outTimeBuckets.get(i))) { emitTimeBucketList.add(outTimeBuckets.get(i)); outTimeBuckets.set(i, timeBucket); } } // emit the computations for each time bucket in emitTimeBucketList and remove those buckets from the cache since they are now already processed if (!emitTimeBucketList.isEmpty()) { ArrayList<String> obsoleteKeys = new ArrayList<String>(); for (String outTimeStr : emitTimeBucketList) { HashMap<String, DimensionObject<String>> outputAggregationsObject; for (Entry<String, Map<String, Map<AggregateOperation, Number>>> keys : cacheObject.entrySet()) { String key = keys.getKey(); if (key.startsWith(outTimeStr)) { Map<String, Map<AggregateOperation, Number>> dimValues = keys.getValue(); for (Entry<String, Map<AggregateOperation, Number>> dimValue : dimValues.entrySet()) { String dimValueName = dimValue.getKey(); Map<AggregateOperation, Number> operations = dimValue.getValue(); outputAggregationsObject = new HashMap<String, DimensionObject<String>>(); for (Entry<AggregateOperation, Number> operation : operations.entrySet()) { AggregateOperation aggrOperationType = operation.getKey(); Number aggr = operation.getValue(); String outKey = key + "." + aggrOperationType.name(); DimensionObject<String> outDimObj = new DimensionObject<String>( (MutableDouble) aggr, dimValueName); outputAggregationsObject.put(outKey, outDimObj); } aggregationsOutput.emit(outputAggregationsObject); } // remove emitted key obsoleteKeys.add(key); } } for (String key : obsoleteKeys) { cacheObject.remove(key); } } } }