List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:com.healthcit.cacure.businessdelegates.GeneratedModuleDataManager.java
@SuppressWarnings({ "unchecked" }) private JSONObject generateRandomAnswerValueForUniqueKey(Map question, Map uniqueKey, Map<String, JSONObject> lastUniqueKey, Map<Object, List<Object>> previousAnswers, Object groupId) { JSONObject randomAnswerValueObject;//from w ww . j a va 2s . c om // Get the question's UUID String questionUUID = (String) question.get(UUID_VALUE); // If a random answer has already been generated for this question // (example, if this is a unique-per-all-modules question that is also // a unique-per-entity question, and the unique-per-entity question was processed first), // then simply copy the previously generated random answer for this unique key if (uniqueKey.containsKey(questionUUID)) { randomAnswerValueObject = (JSONObject) uniqueKey.get(questionUUID); return randomAnswerValueObject; } // else: else { // Get the previously generated unique keys as an ordered list LinkedList<Map.Entry> previousUniqueKeys = new LinkedList(((LinkedHashMap) previousAnswers).entrySet()); // Get the value of the field which identifies the last unique group which was processed Map.Entry last = (previousUniqueKeys.isEmpty() ? null : previousUniqueKeys.getLast()); List list = (last == null ? null : (List) last.getValue()); Object lastGroupId = list == null ? null : Collections.synchronizedList(list).get(list.size() - 1); // If the current groupId matches the lastGroupId, // then it means that the same answer value should be used for the current unique key // ( since uniqueness is determined by the group ) if (groupId.equals(lastGroupId)) { randomAnswerValueObject = new JSONObject(); randomAnswerValueObject.putAll(lastUniqueKey.get(questionUUID)); return randomAnswerValueObject; } // Otherwise, a new group is being processed, so generate a new random answer value else { // Determine the last generated random value for this question JSONObject lastRandomAnswerValueObject = lastUniqueKey.get(questionUUID); Object lastRandomAnswerValue = lastRandomAnswerValueObject == null ? null : getAnswerValue(lastRandomAnswerValueObject); return generateRandomAnswerValue(question, lastRandomAnswerValue, (lastRandomAnswerValue == null ? Algorithm.PSEUDORANDOM : Algorithm.EVEN)); } } }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtil.java
/** * Reduce the number of edits by eliminating semantically trivial equalities. * * @param diffs LinkedList of Diff objects. *///from w w w .jav a 2s .co m public void diff_cleanupSemantic(LinkedList<Diff<T>> diffs) { if (diffs.isEmpty()) { return; } boolean changes = false; Stack<Diff<T>> equalities = new Stack<Diff<T>>(); // Stack of qualities. List<T> lastequality = null; // Always equal to equalities.lastElement().text ListIterator<Diff<T>> pointer = diffs.listIterator(); // Number of characters that changed prior to the equality. int length_insertions1 = 0; int length_deletions1 = 0; // Number of characters that changed after the equality. int length_insertions2 = 0; int length_deletions2 = 0; Diff<T> thisDiff = pointer.next(); while (thisDiff != null) { if (thisDiff.operation == Operation.EQUAL) { // Equality found. equalities.push(thisDiff); length_insertions1 = length_insertions2; length_deletions1 = length_deletions2; length_insertions2 = 0; length_deletions2 = 0; lastequality = thisDiff.text; } else { // An insertion or deletion. if (thisDiff.operation == Operation.INSERT) { length_insertions2 += thisDiff.text.size(); } else { length_deletions2 += thisDiff.text.size(); } // Eliminate an equality that is smaller or equal to the edits on both // sides of it. if (lastequality != null && (lastequality.size() <= Math.max(length_insertions1, length_deletions1)) && (lastequality.size() <= Math.max(length_insertions2, length_deletions2))) { //System.out.println("Splitting: '" + lastequality + "'"); // Walk back to offending equality. while (thisDiff != equalities.lastElement()) { thisDiff = pointer.previous(); } pointer.next(); // Replace equality with a delete. pointer.set(new Diff(Operation.DELETE, lastequality)); // Insert a corresponding an insert. pointer.add(new Diff(Operation.INSERT, lastequality)); equalities.pop(); // Throw away the equality we just deleted. if (!equalities.empty()) { // Throw away the previous equality (it needs to be reevaluated). equalities.pop(); } if (equalities.empty()) { // There are no previous equalities, walk back to the start. while (pointer.hasPrevious()) { pointer.previous(); } } else { // There is a safe equality we can fall back to. thisDiff = equalities.lastElement(); while (thisDiff != pointer.previous()) { // Intentionally empty loop. } } length_insertions1 = 0; // Reset the counters. length_insertions2 = 0; length_deletions1 = 0; length_deletions2 = 0; lastequality = null; changes = true; } } thisDiff = pointer.hasNext() ? pointer.next() : null; } // Normalize the diff. if (changes) { diff_cleanupMerge(diffs); } diff_cleanupSemanticLossless(diffs); // Find any overlaps between deletions and insertions. // e.g: <del>abcxxx</del><ins>xxxdef</ins> // -> <del>abc</del>xxx<ins>def</ins> // e.g: <del>xxxabc</del><ins>defxxx</ins> // -> <ins>def</ins>xxx<del>abc</del> // Only extract an overlap if it is as big as the edit ahead or behind it. pointer = diffs.listIterator(); Diff<T> prevDiff = null; thisDiff = null; if (pointer.hasNext()) { prevDiff = pointer.next(); if (pointer.hasNext()) { thisDiff = pointer.next(); } } while (thisDiff != null) { if (prevDiff.operation == Operation.DELETE && thisDiff.operation == Operation.INSERT) { List<T> deletion = prevDiff.text; List<T> insertion = thisDiff.text; int overlap_length1 = this.diff_commonOverlap(deletion, insertion); int overlap_length2 = this.diff_commonOverlap(insertion, deletion); if (overlap_length1 >= overlap_length2) { if (overlap_length1 >= deletion.size() / 2.0 || overlap_length1 >= insertion.size() / 2.0) { // Overlap found. Insert an equality and trim the surrounding edits. pointer.previous(); pointer.add(new Diff<T>(Operation.EQUAL, insertion.subList(0, overlap_length1))); prevDiff.text = deletion.subList(0, deletion.size() - overlap_length1); thisDiff.text = insertion.subList(overlap_length1, insertion.size()); // pointer.add inserts the element before the cursor, so there is // no need to step past the new element. } } else { if (overlap_length2 >= deletion.size() / 2.0 || overlap_length2 >= insertion.size() / 2.0) { // Reverse overlap found. // Insert an equality and swap and trim the surrounding edits. pointer.previous(); pointer.add(new Diff<T>(Operation.EQUAL, deletion.subList(0, overlap_length2))); prevDiff.operation = Operation.INSERT; prevDiff.text = insertion.subList(0, insertion.size() - overlap_length2); thisDiff.operation = Operation.DELETE; thisDiff.text = deletion.subList(overlap_length2, deletion.size()); // pointer.add inserts the element before the cursor, so there is // no need to step past the new element. } } thisDiff = pointer.hasNext() ? pointer.next() : null; } prevDiff = thisDiff; thisDiff = pointer.hasNext() ? pointer.next() : null; } }
From source file:com.android.talkback.menurules.RuleViewPager.java
@Override public List<ContextMenuItem> getMenuItemsForNode(TalkBackService service, ContextMenuItemBuilder menuItemBuilder, AccessibilityNodeInfoCompat node) { final LinkedList<ContextMenuItem> items = new LinkedList<>(); AccessibilityNodeInfoCompat pagerNode = null; try {/*ww w .j ava 2 s . c om*/ pagerNode = AccessibilityNodeInfoUtils.getSelfOrMatchingAncestor(node, FILTER_PAGED); if (pagerNode == null) { return items; } if (AccessibilityNodeInfoUtils.supportsAnyAction(pagerNode, AccessibilityNodeInfoCompat.ACTION_SCROLL_BACKWARD)) { final ContextMenuItem prevPage = menuItemBuilder.createMenuItem(service, Menu.NONE, R.id.viewpager_breakout_prev_page, Menu.NONE, service.getString(R.string.title_viewpager_breakout_prev_page)); items.add(prevPage); } if (AccessibilityNodeInfoUtils.supportsAnyAction(pagerNode, AccessibilityNodeInfoCompat.ACTION_SCROLL_FORWARD)) { final ContextMenuItem nextPage = menuItemBuilder.createMenuItem(service, Menu.NONE, R.id.viewpager_breakout_next_page, Menu.NONE, service.getString(R.string.title_viewpager_breakout_next_page)); items.add(nextPage); } if (items.isEmpty()) { return items; } final AccessibilityNodeInfoCompat pagerNodeClone = AccessibilityNodeInfoCompat.obtain(pagerNode); final ViewPagerItemClickListener itemClickListener = new ViewPagerItemClickListener(pagerNodeClone); for (ContextMenuItem item : items) { item.setOnMenuItemClickListener(itemClickListener); } return items; } finally { AccessibilityNodeInfoUtils.recycleNodes(pagerNode); } }
From source file:org.mycore.common.content.transformer.MCRXSLTransformer.java
protected LinkedList<TransformerHandler> getTransformHandlerList(MCRParameterCollector parameterCollector) throws TransformerConfigurationException, SAXException { checkTemplateUptodate();//w ww . j a v a 2s . co m LinkedList<TransformerHandler> xslSteps = new LinkedList<TransformerHandler>(); //every transformhandler shares the same ErrorListener instance MCRErrorListener errorListener = MCRErrorListener.getInstance(); for (Templates template : templates) { TransformerHandler handler = tFactory.newTransformerHandler(template); parameterCollector.setParametersTo(handler.getTransformer()); handler.getTransformer().setErrorListener(errorListener); if (TRACE_LISTENER_ENABLED) { TransformerImpl transformer = (TransformerImpl) handler.getTransformer(); TraceManager traceManager = transformer.getTraceManager(); try { traceManager.addTraceListener(TRACE_LISTENER); } catch (TooManyListenersException e) { LOGGER.warn("Could not add MCRTraceListener.", e); } } if (!xslSteps.isEmpty()) { Result result = new SAXResult(handler); xslSteps.getLast().setResult(result); } xslSteps.add(handler); } return xslSteps; }
From source file:com.erudika.para.persistence.MongoDBDAO.java
@Override public <P extends ParaObject> List<P> readPage(String appid, Pager pager) { LinkedList<P> results = new LinkedList<P>(); if (StringUtils.isBlank(appid)) { return results; }//from w w w . j a va 2 s. co m if (pager == null) { pager = new Pager(); } try { String lastKey = pager.getLastKey(); MongoCursor<Document> cursor; Bson filter = Filters.gt(_OBJECT_ID, lastKey); if (lastKey == null) { cursor = getTable(appid).find().batchSize(pager.getLimit()).limit(pager.getLimit()).iterator(); } else { cursor = getTable(appid).find(filter).batchSize(pager.getLimit()).limit(pager.getLimit()) .iterator(); } while (cursor.hasNext()) { Map<String, Object> row = documentToMap(cursor.next()); P obj = fromRow(row); if (obj != null) { results.add(obj); pager.setLastKey((String) row.get(_OBJECT_ID)); } } if (!results.isEmpty()) { pager.setCount(pager.getCount() + results.size()); } } catch (Exception e) { logger.error(null, e); } logger.debug("readPage() page: {}, results:", pager.getPage(), results.size()); return results; }
From source file:com.ikanow.aleph2.logging.service.LoggingService.java
@Override public Tuple2<String, List<BasicMessageBean>> validateSchema(final LoggingSchemaBean schema, final DataBucketBean bucket) { final LinkedList<BasicMessageBean> errors = new LinkedList<>(); //check the fields are actually log4j levels and kick back any errors if (schema.log_level() != null) try {/*from w w w. j a va 2 s . c om*/ Level.valueOf(schema.log_level()); } catch (Exception ex) { errors.add(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(), "log_level '" + schema.log_level() + " is not a log4j.Levels enum value", ex.getMessage())); } if (schema.log_level_overrides() != null) { schema.log_level_overrides().forEach((k, v) -> { try { Level.valueOf(v); } catch (Exception ex) { errors.add(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(), "log_level_override [" + k + "] '" + v + " is not a log4j.Levels enum value", ex.getMessage())); } }); } return errors.isEmpty() ? Tuples._2T( BucketUtils.getUniqueSignature( BucketUtils.convertDataBucketBeanToLogging(bucket).full_name(), Optional.empty()), Collections.emptyList()) : Tuples._2T("", errors); }
From source file:com.redhat.persistence.oql.QFrame.java
private void render(LinkedList joins, List where, QFrame oroot, QFrame root, Set emitted) { // If the first non empty frame is outer we treat it as inner. if (m_outer && !joins.isEmpty()) { oroot = this; }/*from w ww. jav a2 s .c o m*/ Code table = null; if (m_table != null && m_duplicate == null) { table = new Code(m_table).add(" ").add(alias()); } else if (m_tableExpr != null && m_duplicate == null) { table = m_tableExpr.emit(m_generator).add(" ").add(alias()); } if (table != null) { joins.addFirst(JFrame.leaf(table, this, oroot)); } List children = getChildren(); for (int i = 0; i < children.size(); i++) { QFrame child = (QFrame) children.get(i); child.render(joins, where, oroot, root, emitted); } if (m_condition != null) { Code c = m_condition.emit(m_generator); if (!c.isTrue() && !emitted.contains(c)) { m_used.clear(); frames(m_condition, m_used); boolean join = false; for (Iterator it = joins.iterator(); it.hasNext();) { JFrame frame = (JFrame) it.next(); boolean modified = m_used.removeAll(frame.defined); if (m_used.isEmpty()) { // We default to putting things in the where // clause here because oracle won't resolve // external variable references correctly when // they appear in join conditions. if (oroot.equals(root)) { where.add(c); } else if (frame.froot != null && oroot.equals(frame.froot)) { frame.join = frame.join.add(" and ").add(c); } else { throw new IllegalStateException( "unable to place condition: " + m_condition + " " + c + trace(joins)); } } else if (modified) { join = true; break; } } if (join) { JFrame right = (JFrame) joins.removeFirst(); if (joins.isEmpty()) { throw new IllegalStateException( "unresolved variable in condition: " + m_condition + " " + c + trace(joins)); } LinkedList skipped = null; JFrame left = (JFrame) joins.removeFirst(); while (true) { m_used.clear(); frames(m_condition, m_used); m_used.removeAll(right.defined); boolean cross = m_used.removeAll(left.defined); if (m_used.isEmpty()) { joins.addFirst(JFrame.join(left, right, c)); break; } else if (joins.isEmpty()) { throw new IllegalStateException( "unresolved variable in condition: " + m_condition + " " + c + trace(joins)); } else if (cross) { JFrame lefter = (JFrame) joins.removeFirst(); left = JFrame.cross(lefter, left); } else { if (skipped == null) { skipped = new LinkedList(); } skipped.addLast(left); left = (JFrame) joins.removeFirst(); } } if (skipped != null) { while (!skipped.isEmpty()) { joins.addFirst(skipped.removeLast()); } } } emitted.add(c); } } }
From source file:com.caricah.iotracah.datastore.ignitecache.internal.impl.SubscriptionFilterHandler.java
public Observable<IotSubscriptionFilter> createTree(String partitionId, List<String> topicFilterTreeRoute) { return Observable.create(observer -> { try {/*from w ww .j a v a2 s. co m*/ List<String> growingTitles = new ArrayList<>(); LinkedList<Long> growingParentIds = new LinkedList<>(); ListIterator<String> pathIterator = topicFilterTreeRoute.listIterator(); while (pathIterator.hasNext()) { growingTitles.add(pathIterator.next()); IotSubscriptionFilterKey iotSubscriptionFilterKey = keyFromList(partitionId, growingTitles); Observable<IotSubscriptionFilter> filterObservable = getByKeyWithDefault( iotSubscriptionFilterKey, null); filterObservable.subscribe(internalSubscriptionFilter -> { if (null == internalSubscriptionFilter) { internalSubscriptionFilter = new IotSubscriptionFilter(); internalSubscriptionFilter.setPartitionId(partitionId); internalSubscriptionFilter.setName(iotSubscriptionFilterKey.getName()); internalSubscriptionFilter.setId(getIdSequence().incrementAndGet()); if (growingParentIds.isEmpty()) { internalSubscriptionFilter.setParentId(0l); } else { internalSubscriptionFilter.setParentId(growingParentIds.getLast()); } save(iotSubscriptionFilterKey, internalSubscriptionFilter); } growingParentIds.add(internalSubscriptionFilter.getId()); if (growingTitles.size() == topicFilterTreeRoute.size()) observer.onNext(internalSubscriptionFilter); }, throwable -> { }, () -> { if (!pathIterator.hasNext()) { observer.onCompleted(); } }); } } catch (Exception e) { observer.onError(e); } }); }
From source file:org.myrian.persistence.oql.QFrame.java
private void render(LinkedList joins, List where, QFrame oroot, QFrame root, Set emitted) { // If the first non empty frame is outer we treat it as inner. if (m_outer && !joins.isEmpty()) { oroot = this; }// ww w . j av a2 s . c o m Code table = null; if (m_table != null && m_duplicate == null) { table = new Code(m_table).add(" ").add(alias()); } else if (m_tableExpr != null && m_duplicate == null) { table = m_tableExpr.emit(m_generator).add(" ").add(alias()); } if (table != null) { joins.addFirst(JFrame.leaf(table, this, oroot)); } List children = getChildren(); for (int i = 0; i < children.size(); i++) { QFrame child = (QFrame) children.get(i); child.render(joins, where, oroot, root, emitted); } if (m_condition != null) { Code c = m_condition.emit(m_generator); if (!c.isTrue() && !emitted.contains(c)) { m_used.clear(); frames(m_condition, m_used); boolean join = false; for (Iterator it = joins.iterator(); it.hasNext();) { JFrame frame = (JFrame) it.next(); boolean modified = m_used.removeAll(frame.defined); if (m_used.isEmpty()) { // We default to putting things in the where // clause here because oracle won't resolve // external variable references correctly when // they appear in join conditions. if (oroot.equals(root)) { where.add(c); } else if (frame.froot != null && oroot.equals(frame.froot)) { frame.join = frame.join.add(" and ").add(c); } else { /* * XXX rhs needs to make sure this is * the right thing to do */ where.add(c); } } else if (modified) { join = true; break; } } if (join) { JFrame right = (JFrame) joins.removeFirst(); if (joins.isEmpty()) { throw new IllegalStateException( "unresolved variable in condition: " + m_condition + " " + c + trace(joins)); } LinkedList skipped = null; JFrame left = (JFrame) joins.removeFirst(); while (true) { m_used.clear(); frames(m_condition, m_used); m_used.removeAll(right.defined); boolean cross = m_used.removeAll(left.defined); if (m_used.isEmpty()) { joins.addFirst(JFrame.join(left, right, c)); break; } else if (joins.isEmpty()) { throw new IllegalStateException( "unresolved variable in condition: " + m_condition + " " + c + trace(joins)); } else if (cross) { JFrame lefter = (JFrame) joins.removeFirst(); left = JFrame.cross(lefter, left); } else { if (skipped == null) { skipped = new LinkedList(); } skipped.addLast(left); left = (JFrame) joins.removeFirst(); } } if (skipped != null) { while (!skipped.isEmpty()) { joins.addFirst(skipped.removeLast()); } } } emitted.add(c); } } }
From source file:org.epics.archiverappliance.etl.DataReductionDailyETLTest.java
/** * 1) Set up the raw and reduced PV's//from ww w .j a v a 2 s.c om * 2) Generate data in STS * 3) Run ETL * 4) Compare */ @Test public void testReducedETL() throws Exception { // Set up the raw and reduced PV's PlainPBStoragePlugin etlSTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=STS&rootFolder=" + shortTermFolderName + "/&partitionGranularity=PARTITION_HOUR", configService); ; PlainPBStoragePlugin etlMTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=MTS&rootFolder=" + mediumTermFolderName + "/&partitionGranularity=PARTITION_DAY", configService); PlainPBStoragePlugin etlLTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin( "pb://localhost?name=LTS&rootFolder=" + longTermFolderName + "/&partitionGranularity=PARTITION_YEAR&reducedata=" + reduceDataUsing, configService); { PVTypeInfo typeInfo = new PVTypeInfo(rawPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1); String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(), etlLTS.getURLRepresentation() }; typeInfo.setDataStores(dataStores); configService.updateTypeInfoForPV(rawPVName, typeInfo); configService.registerPVToAppliance(rawPVName, configService.getMyApplianceInfo()); } { PVTypeInfo typeInfo = new PVTypeInfo(reducedPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1); String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(), etlLTS.getURLRepresentation() }; typeInfo.setDataStores(dataStores); configService.updateTypeInfoForPV(reducedPVName, typeInfo); configService.registerPVToAppliance(reducedPVName, configService.getMyApplianceInfo()); } // Control ETL manually configService.getETLLookup().manualControlForUnitTests(); short currentYear = TimeUtils.getCurrentYear(); for (int day = 0; day < 365; day++) { // Generate data into the STS on a daily basis ArrayListEventStream genDataRaw = new ArrayListEventStream(86400, new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, rawPVName, currentYear)); ArrayListEventStream genDataReduced = new ArrayListEventStream(86400, new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, reducedPVName, currentYear)); for (int second = 0; second < 86400; second++) { YearSecondTimestamp ysts = new YearSecondTimestamp(currentYear, day * 86400 + second, 0); Timestamp ts = TimeUtils.convertFromYearSecondTimestamp(ysts); genDataRaw.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts, new ScalarValue<Double>(second * 1.0), 0, 0)); genDataReduced.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts, new ScalarValue<Double>(second * 1.0), 0, 0)); } try (BasicContext context = new BasicContext()) { etlSTS.appendData(context, rawPVName, genDataRaw); etlSTS.appendData(context, reducedPVName, genDataReduced); } logger.debug("Done generating data into the STS for day " + day); // Run ETL at the end of the day Timestamp timeETLruns = TimeUtils .convertFromYearSecondTimestamp(new YearSecondTimestamp(currentYear, day * 86400 + 86399, 0)); ETLExecutor.runETLs(configService, timeETLruns); logger.debug("Done performing ETL as though today is " + TimeUtils.convertToHumanReadableString(timeETLruns)); // Compare data for raw+postprocessor and reduced PV's. PostProcessor postProcessor = PostProcessors.findPostProcessor(reduceDataUsing); postProcessor.initialize(reduceDataUsing, rawPVName); int rawWithPPCount = 0; int reducedCount = 0; try (BasicContext context = new BasicContext()) { Timestamp startTime = TimeUtils.minusDays(TimeUtils.now(), 10 * 366); Timestamp endTime = TimeUtils.plusDays(TimeUtils.now(), 10 * 366); LinkedList<Timestamp> rawTimestamps = new LinkedList<Timestamp>(); LinkedList<Timestamp> reducedTimestamps = new LinkedList<Timestamp>(); try (EventStream rawWithPP = new CurrentThreadWorkerEventStream(rawPVName, etlLTS.getDataForPV(context, rawPVName, startTime, endTime, postProcessor))) { for (Event e : rawWithPP) { rawTimestamps.add(e.getEventTimeStamp()); rawWithPPCount++; } } try (EventStream reduced = new CurrentThreadWorkerEventStream(reducedPVName, etlLTS.getDataForPV(context, reducedPVName, startTime, endTime))) { for (Event e : reduced) { reducedTimestamps.add(e.getEventTimeStamp()); reducedCount++; } } logger.debug("For day " + day + " we have " + rawWithPPCount + " rawWithPP events and " + reducedCount + " reduced events"); if (rawTimestamps.size() != reducedTimestamps.size()) { while (!rawTimestamps.isEmpty() || !reducedTimestamps.isEmpty()) { if (!rawTimestamps.isEmpty()) logger.info("Raw/PP " + TimeUtils.convertToHumanReadableString(rawTimestamps.pop())); if (!reducedTimestamps.isEmpty()) logger.info( "Reduced" + TimeUtils.convertToHumanReadableString(reducedTimestamps.pop())); } } assertTrue("For day " + day + " we have " + rawWithPPCount + " rawWithPP events and " + reducedCount + " reduced events", rawWithPPCount == reducedCount); } if (day > 2) { assertTrue("For day " + day + ", seems like no events were moved by ETL into LTS for " + rawPVName + " Count = " + rawWithPPCount, (rawWithPPCount != 0)); assertTrue("For day " + day + ", seems like no events were moved by ETL into LTS for " + reducedPVName + " Count = " + reducedCount, (reducedCount != 0)); } } }