List of usage examples for java.util LinkedList pop
public E pop()
From source file:org.graphipedia.wikipedia.parser.SimpleStaxParser.java
/** * Parses the elements in the XML file./* w w w. j ava 2 s. c om*/ * @param reader The XML stream. * @throws XMLStreamException when something goes wrong while parsing the XML file. */ private void parseElements(XMLStreamReader reader) throws XMLStreamException { LinkedList<String> elementStack = new LinkedList<String>(); StringBuilder textBuffer = new StringBuilder(); List<String> attributeValues = new ArrayList<String>(); while (reader.hasNext()) { switch (reader.next()) { case XMLEvent.START_ELEMENT: String startElement = reader.getName().getLocalPart(); elementStack.push(startElement); attributeValues = new ArrayList<String>(); if (isInterestingWithAttributes(startElement)) { int noAttributes = reader.getAttributeCount(); for (int i = 0; i < noAttributes; i += 1) attributeValues.add(reader.getAttributeValue(i)); } textBuffer.setLength(0); break; case XMLEvent.END_ELEMENT: String element = elementStack.pop(); if (isInterestingWithAttributes(element)) { if (!handleElement(element, textBuffer.toString().trim(), attributeValues)) return; } else if (isInteresting(element)) { if (!handleElement(element, textBuffer.toString().trim())) return; } break; case XMLEvent.CHARACTERS: if (isInteresting(elementStack.peek())) { textBuffer.append(reader.getText()); } break; } } }
From source file:org.pircbotx.ReplayServer.java
public static void replay(Configuration.Builder config, InputStream input, String title) throws Exception { log.info("---Replaying {}---", title); StopWatch timer = new StopWatch(); timer.start();// w w w . ja va 2 s.c o m //Wrap listener manager with ours that siphons off events final Queue<Event> eventQueue = Lists.newLinkedList(); WrapperListenerManager newManager = new WrapperListenerManager(config.getListenerManager(), eventQueue); config.setListenerManager(newManager); config.addListener(new ReplayListener()); final LinkedList<String> outputQueue = Lists.newLinkedList(); ReplayPircBotX bot = new ReplayPircBotX(config.buildConfiguration(), outputQueue); BufferedReader fileInput = new BufferedReader(new InputStreamReader(input)); boolean skippedHeader = false; while (true) { String lineRaw = fileInput.readLine(); if (bot.isClosed() && StringUtils.isNotBlank(lineRaw)) { throw new RuntimeException("bot is closed but file still has line " + lineRaw); } else if (!bot.isClosed() && StringUtils.isBlank(lineRaw)) { throw new RuntimeException("bot is not closed but file doesn't have any more lines"); } else if (bot.isClosed() && StringUtils.isBlank(lineRaw)) { log.debug("(done) Bot is closed and file doesn't have any more lines"); break; } log.debug("(line) " + lineRaw); String[] lineParts = StringUtils.split(lineRaw, " ", 2); String command = lineParts[0]; String line = lineParts[1]; //For now skip the info lines PircBotX is supposed to send on connect //They are only sent when connect() is called which requires multithreading if (!skippedHeader) { if (command.equals("pircbotx.output")) continue; else if (command.equals("pircbotx.input")) { log.debug("Finished skipping header"); skippedHeader = true; } else throw new RuntimeException("Unknown line " + lineRaw); } if (command.equals("pircbotx.input")) { bot.getInputParser().handleLine(line); } else if (command.equals("pircbotx.output")) { String lastOutput = outputQueue.isEmpty() ? null : outputQueue.pop(); if (StringUtils.startsWith(line, "JOIN")) { log.debug("Skipping JOIN output, server should send its own JOIN"); } else if (StringUtils.startsWith(line, "QUIT")) { log.debug("Skipping QUIT output, server should send its own QUIT"); } else if (!line.equals(lastOutput)) { log.error("Expected last output: " + line); log.error("Given last output: " + lastOutput); for (String curOutput : outputQueue) { log.error("Queued output: " + curOutput); } throw new RuntimeException("Failed to verify output (see log)"); } } else { throw new RuntimeException("Unknown line " + lineRaw); } for (Event curEvent : Iterables.consumingIterable(eventQueue)) log.debug("(events) " + curEvent); log.debug(""); } timer.stop(); log.debug("---Replay successful in {}---", DurationFormatUtils.formatDuration(timer.getTime(), "mm'min'ss'sec'SSS'ms'")); }
From source file:com.act.lcms.v2.TraceIndexExtractor.java
/** * Initiate a data feast of all traces within some window allocation. OM NOM NOM. * @param iter An iterator over an LCMS data file. * @return The windows, time points, and per-window traces. */// ww w .j a v a 2 s.c o m private IndexedTraces runSweepLine(List<Double> targetMZs, Iterator<LCMSSpectrum> iter) throws RocksDBException, IOException { // Create windows for sweep-linin'. List<MZWindow> windows = new ArrayList<MZWindow>() { { int i = 0; for (Double targetMZ : targetMZs) { add(new MZWindow(i, targetMZ)); i++; } } }; /* We *must* ensure the windows are sorted in m/z order for the sweep line to work. However, we don't know anything * about the input targetMZs list, which may be immutable or may be in some order the client wants to preserve. * Rather than mess with that array, we'll sort the windows in our internal array and leave be he client's targets. */ Collections.sort(windows, (a, b) -> a.getTargetMZ().compareTo(b.getTargetMZ())); List<Double> times = new ArrayList<>(); List<List<Double>> allTraces = new ArrayList<List<Double>>(windows.size()) { { for (int i = 0; i < windows.size(); i++) { add(new ArrayList<>()); } } }; // Keep an array of accumulators around to reduce the overhead of accessing the trace matrix for accumulation. double[] sumIntensitiesInEachWindow = new double[windows.size()]; int timepointCounter = 0; while (iter.hasNext()) { LCMSSpectrum spectrum = iter.next(); Double time = spectrum.getTimeVal(); // Store one list of the time values so we can knit times and intensity sums later to form XZs. times.add(time); for (int i = 0; i < sumIntensitiesInEachWindow.length; i++) { sumIntensitiesInEachWindow[i] = 0.0; } timepointCounter++; if (timepointCounter % 100 == 0) { LOGGER.info("Extracted %d timepoints (now at %.3fs)", timepointCounter, time); } /* We use a sweep-line approach to scanning through the m/z windows so that we can aggregate all intensities in * one pass over the current LCMSSpectrum (this saves us one inner loop in our extraction process). The m/z * values in the LCMSSpectrum become our "critical" or "interesting points" over which we sweep our m/z ranges. * The next window in m/z order is guaranteed to be the next one we want to consider since we address the points * in m/z order as well. As soon as we've passed out of the range of one of our windows, we discard it. It is * valid for a window to be added to and discarded from the working queue in one application of the work loop. */ LinkedList<MZWindow> workingQueue = new LinkedList<>(); // TODO: can we reuse these instead of creating fresh? LinkedList<MZWindow> tbdQueue = new LinkedList<>(windows); // Assumption: these arrive in m/z order. for (Pair<Double, Double> mzIntensity : spectrum.getIntensities()) { Double mz = mzIntensity.getLeft(); Double intensity = mzIntensity.getRight(); // First, shift any applicable ranges onto the working queue based on their minimum mz. while (!tbdQueue.isEmpty() && tbdQueue.peekFirst().getMin() <= mz) { workingQueue.add(tbdQueue.pop()); } // Next, remove any ranges we've passed. while (!workingQueue.isEmpty() && workingQueue.peekFirst().getMax() < mz) { workingQueue.pop(); } if (workingQueue.isEmpty()) { if (tbdQueue.isEmpty()) { // If both queues are empty, there are no more windows to consider at all. One to the next timepoint! break; } // If there's nothing that happens to fit in this range, skip it! continue; } // The working queue should now hold only ranges that include this m/z value. Sweep line swept! /* Now add this intensity to accumulator value for each of the items in the working queue. * By the end of the outer loop, trace(t) = Sum(intensity) | win_min <= m/z <= win_max @ time point # t */ for (MZWindow window : workingQueue) { // TODO: count the number of times we add intensities to each window's accumulator for MS1-style warnings. sumIntensitiesInEachWindow[window.getIndex()] += intensity; } } /* Extend allTraces to add a row of accumulated intensity values for this time point. We build this incrementally * because the LCMSSpectrum iterator doesn't tell us how many time points to expect up front. */ for (int i = 0; i < sumIntensitiesInEachWindow.length; i++) { allTraces.get(i).add(sumIntensitiesInEachWindow[i]); } } // Trace data has been devoured. Might want to loosen the belt at this point... LOGGER.info("Done extracting %d traces", allTraces.size()); return new IndexedTraces(windows, times, allTraces); }
From source file:org.eclipse.che.api.vfs.server.VirtualFileSystemImpl.java
@Path("replace/{path:.*}") @Override//from ww w.jav a 2 s. c o m public void replace(@PathParam("path") String path, List<ReplacementSet> replacements, @QueryParam("lockToken") String lockToken) throws NotFoundException, ForbiddenException, ConflictException, ServerException { VirtualFile projectRoot = mountPoint.getVirtualFile(path); if (!projectRoot.isFolder()) { throw new ConflictException("Given path must be an project root folder. "); } final Map<String, ReplacementContainer> changesPerFile = new HashMap<>(); // fill changes matrix first for (final ReplacementSet replacement : replacements) { for (final String regex : replacement.getFiles()) { Pattern pattern = Pattern.compile(regex); ItemNode rootNode = getTree(projectRoot.getId(), -1, false, PropertyFilter.ALL_FILTER); LinkedList<ItemNode> q = new LinkedList<>(); q.add(rootNode); while (!q.isEmpty()) { ItemNode node = q.pop(); Item item = node.getItem(); if (item.getItemType().equals(ItemType.FOLDER)) { q.addAll(node.getChildren()); } else if (item.getItemType().equals(ItemType.FILE)) { // for cases like: src/main/java/(.*) String itemInternalPath = item.getPath().substring(projectRoot.getPath().length() + 1); if (pattern.matcher(item.getName()).matches() || pattern.matcher(itemInternalPath).matches()) { ReplacementContainer container = (changesPerFile.get(item.getPath()) != null) ? changesPerFile.get(item.getPath()) : new ReplacementContainer(); for (Variable variable : replacement.getEntries()) { String replaceMode = variable.getReplacemode(); if (replaceMode == null || "variable_singlepass".equals(replaceMode)) { container.getVariableProps().put(variable.getFind(), variable.getReplace()); } else if ("text_multipass".equals(replaceMode)) { container.getTextProps().put(variable.getFind(), variable.getReplace()); } } changesPerFile.put(item.getPath(), container); } } } } } //now apply changes matrix for (Map.Entry<String, ReplacementContainer> entry : changesPerFile.entrySet()) { try { if (entry.getValue().hasReplacements()) { ContentStream cs = mountPoint.getVirtualFile(entry.getKey()).getContent(); String content = IoUtil.readAndCloseQuietly(cs.getStream()); String modified = Deserializer.resolveVariables(content, entry.getValue().getVariableProps(), false); for (Map.Entry<String, String> replacement : entry.getValue().getTextProps().entrySet()) { if (modified.contains(replacement.getKey())) { modified = modified.replace(replacement.getKey(), replacement.getValue()); } } //better to compare big strings by hash codes first if (!(content.hashCode() == modified.hashCode()) || !content.equals(modified)) { mountPoint.getVirtualFile(entry.getKey()).updateContent( new ByteArrayInputStream(modified.getBytes(StandardCharsets.UTF_8)), lockToken); } } } catch (IOException e) { LOG.warn(e.getMessage(), e); } } }
From source file:com.nhncorp.lucy.security.xss.XssSaxFilter.java
/** * @param writer//from w w w. java 2 s .c o m * @param neloLogWriter * @param stackForObjectTag * @param stackForAllowNetworkingValue * @throws IOException */ private boolean doObjectEndTagProcess(Writer writer, StringWriter neloLogWriter, LinkedList<Element> stackForObjectTag, LinkedList<String> stackForAllowNetworkingValue) throws IOException { List<String> paramNameList = new ArrayList<String>(); Element item = null; while (stackForObjectTag.size() > 0) { item = stackForObjectTag.pop(); if ("object".equalsIgnoreCase(item.getName())) { break; } else { Attribute nameAttr = item.getAttribute("name"); if (nameAttr != null) { paramNameList.add(nameAttr.getValue()); } } } if (item == null || !"object".equalsIgnoreCase(item.getName())) { return false; } else if (item != null && item.isDisabled()) { return true; } // PARAMLIST ( ?(param) )? param(paramNameList)? ? object ? . for (int index = 0; index < PARAMLIST.length; index++) { Pattern pattern = PARAMLIST[index]; boolean exist = false; for (String paramName : paramNameList) { if (pattern.matcher(paramName).matches()) { exist = true; break; } } if (!exist) { // ? param switch (index) { // <param name="invokeURLs" value="false" /> case 0: Element invokeURLs = new Element("param"); invokeURLs.putAttribute("name", "\"invokeURLs\""); invokeURLs.putAttribute("value", "\"false\""); this.serialize(writer, invokeURLs, neloLogWriter); break; // <param name="autostart" value="false" /> case 1: Element autostart = new Element("param"); autostart.putAttribute("name", "\"autostart\""); autostart.putAttribute("value", "\"false\""); this.serialize(writer, autostart, neloLogWriter); break; // <param name="allowScriptAccess" value="never" /> case 2: Element allowScriptAccess = new Element("param"); allowScriptAccess.putAttribute("name", "\"allowScriptAccess\""); allowScriptAccess.putAttribute("value", "\"never\""); this.serialize(writer, allowScriptAccess, neloLogWriter); break; // <param name="allowNetworking" value="all|internal" /> case 3: Element allowNetworking = new Element("param"); allowNetworking.putAttribute("name", "\"allowNetworking\""); allowNetworking.putAttribute("value", stackForAllowNetworkingValue.size() == 0 ? "\"internal\"" : stackForAllowNetworkingValue.pop()); this.serialize(writer, allowNetworking, neloLogWriter); break; // <param name="autoplay" value="false" /> case 4: Element autoplay = new Element("param"); autoplay.putAttribute("name", "\"autoplay\""); autoplay.putAttribute("value", "\"false\""); this.serialize(writer, autoplay, neloLogWriter); break; // <param name="enablehref" value="flase" /> case 5: Element enablehref = new Element("param"); enablehref.putAttribute("name", "\"enablehref\""); enablehref.putAttribute("value", "\"false\""); this.serialize(writer, enablehref, neloLogWriter); break; // <param name="enablejavascript" value="flase" /> case 6: Element enablejavascript = new Element("param"); enablejavascript.putAttribute("name", "\"enablejavascript\""); enablejavascript.putAttribute("value", "\"false\""); this.serialize(writer, enablejavascript, neloLogWriter); break; // <param name="nojava" value="true" /> case 7: Element nojava = new Element("param"); nojava.putAttribute("name", "\"nojava\""); nojava.putAttribute("value", "\"true\""); this.serialize(writer, nojava, neloLogWriter); break; // <param name="AllowHtmlPopupwindow" value="false" /> case 8: Element allowHtmlPopupwindow = new Element("param"); allowHtmlPopupwindow.putAttribute("name", "\"AllowHtmlPopupwindow\""); allowHtmlPopupwindow.putAttribute("value", "\"false\""); this.serialize(writer, allowHtmlPopupwindow, neloLogWriter); break; // <param name="enableHtmlAccess" value="false" /> case 9: Element enableHtmlAccess = new Element("param"); enableHtmlAccess.putAttribute("name", "\"enableHtmlAccess\""); enableHtmlAccess.putAttribute("value", "\"false\""); this.serialize(writer, enableHtmlAccess, neloLogWriter); break; default: System.out.println("? ?."); } } } return false; }
From source file:org.bimserver.charting.Containers.TreeNode.java
public void padTreeSoThatLeafNodesAreAllTheSameDepth() { int maximumLeafDepth = maximumLeafDepth(); ///*ww w .j a va2 s. c o m*/ LinkedList<TreeNode> majorBranches = new LinkedList<>(); Iterator<TreeNode> leaves = iterateLeafNodes(); while (leaves.hasNext()) { TreeNode node = leaves.next(); int depth = node.depth(); boolean nodeMustBePadded = depth != maximumLeafDepth; if (nodeMustBePadded) { TreeNode branch = node.getMajorBranch(); if (!majorBranches.contains(branch)) majorBranches.add(branch); } } // while (majorBranches.size() > 0) { // Get branch. TreeNode branch = majorBranches.pop(); LinkedList<TreeNode> nodesToConsider = new LinkedList<>(Arrays.asList(branch)); TreeNode thisNode = null; do { thisNode = nodesToConsider.pop(); if (thisNode.isLeaf()) { // This node is the only relevant one. Push it to the edge of the tree. int maximumDepthOfBranch = thisNode.depth(); int delta = maximumLeafDepth - maximumDepthOfBranch; padParentWithXNodesThatCollapseIntoThisNode(thisNode, delta); } else if (thisNode.leavesAreAtSameDepth()) { // This node and all its children are only relevant. Push them all to the edge of the tree. int maximumDepthOfBranch = thisNode.maximumLeafDepth(); int delta = maximumLeafDepth - maximumDepthOfBranch; padParentWithXNodesThatCollapseIntoThisNode(thisNode, delta); } else { // Prepare to move to next on next iteration of do-while loop. Only add children with an aggregate set of leaves less than the maximum leaf depth in the tree. for (TreeNode child : thisNode.Children) { int minimumLeafDepth = child.minimumLeafDepth(); if (minimumLeafDepth < maximumLeafDepth) nodesToConsider.add(child); } } } while (!branch.leavesAreAtSameDepth()); } }
From source file:org.jdto.util.expression.Expression.java
/** * Parse the expression into something easily evaluable. * @param expression/*from ww w .ja va2 s . c om*/ * @return */ private synchronized ExpressionTerm parseExpression(String expression) { position = 0; LinkedList<String> precedenceStack = new LinkedList<String>(); //add the first imaginary parentheses. precedenceStack.push("("); //append a closing parenthesis to the expression. expression = expression + ")"; //the previous token. String token = null; StringBuilder postFix = new StringBuilder(); /** * Go through the expression. */ while (!precedenceStack.isEmpty() && position < expression.length()) { //use the token from previous iteration token = readToken(token, expression); //if is a left parentheses if ("(".equals(token)) { precedenceStack.push(token); postFix.append(" "); //a separation continue; } //check if it is an operator Operator operator = Operator.getOperaorByString(token); if (operator != null) { postFix.append(" "); //add a seprarator char to the result. while (operator.precedence(precedenceStack.peek())) { postFix.append(precedenceStack.pop()); postFix.append(" "); } precedenceStack.push(token); continue; } //check if it is a right parenthesis if (")".equals(token)) { postFix.append(" "); //add a separator to the result. while (!"(".equals(precedenceStack.peek())) { String stackElement = precedenceStack.pop(); if (isOperator(stackElement)) { postFix.append(stackElement); postFix.append(" "); } } //remove the extra parenthesis precedenceStack.pop(); continue; } //if everything else fails, just add the token to the postfix expr postFix.append(token); //and we're done with the loop here } //at this point we need to convert the postfix expression into terms. if (!precedenceStack.isEmpty()) { throw new IllegalArgumentException("Could not parse expression!"); } return parsePostfixExpr(postFix.toString()); }
From source file:org.squashtest.tm.service.internal.batchimport.Model.java
/** * returns all parameters available to a test case. This includes every * ParameterTarget from the test cases being called directly or indirectly * by this test case, not just the one owner by the test case (unlike * getOwnParameters). Parameters from downstream test cases will be included * iif they are inherited in some ways.//from w w w . j a v a2s.c om */ public Collection<ParameterTarget> getAllParameters(TestCaseTarget testCase) { if (!callGraph.knowsNode(testCase)) { initCallGraph(testCase); } Collection<ParameterTarget> result = new HashSet<>(); LinkedList<Node> processing = new LinkedList<>(); Set<Node> processed = new HashSet<>(); processing.add(callGraph.getNode(testCase)); while (!processing.isEmpty()) { Node current = processing.pop(); result.addAll(getOwnParameters(current.getKey())); // modification patron for (Node child : current.getOutbounds()) { List<InternalStepModel> steps = testCaseStepsByTarget.get(current.getKey()); extractParametersFromSteps(processing, processed, child, steps); processed.add(current); } } return result; }
From source file:org.bimserver.charting.SupportFunctions.java
public static ArrayList<LinkedHashMap<String, Object>> getIfcByClassificationReferenceWithTreeStructure( String structureKeyword, IfcModelInterface model, Chart chart, boolean includeClassificationSystem) { ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>(); // Prepare for static iteration. LinkedHashMap<IfcRelAssociatesClassification, Integer> ifcClassificationWithCounts = new LinkedHashMap<>(); // Iterate only the products. for (IfcRelAssociatesClassification ifcRelAssociatesClassification : model .getAllWithSubTypes(IfcRelAssociatesClassification.class)) { IfcRelAssociatesClassification key = ifcRelAssociatesClassification; Integer value = 0;//from w ww .ja v a2 s . c om if (ifcClassificationWithCounts.containsKey(key)) value = ifcClassificationWithCounts.get(key); // Count. EList<IfcRoot> a = ifcRelAssociatesClassification.getRelatedObjects(); ifcClassificationWithCounts.put(key, value + a.size()); } // Derive the column names. ArrayList<String> hierarchyColumnNames = new ArrayList<>(); int extraColumns = (includeClassificationSystem) ? 1 : 0; String leafColumnName = String.format("%s%d", structureKeyword, extraColumns + 1); for (int i = 0; i < extraColumns + 1; i++) hierarchyColumnNames.add(String.format("%s%d", structureKeyword, i + 1)); // Update the chart configuration. chart.setDimensionLookupKeys(structureKeyword, hierarchyColumnNames); chart.setDimensionLookupKey("size", "size"); chart.setDimensionLookupKey("label", "label"); chart.setDimensionLookupKey("color", "size"); // Add each entry. for (Entry<IfcRelAssociatesClassification, Integer> countedEntry : ifcClassificationWithCounts.entrySet()) { // Integer count = countedEntry.getValue(); IfcRelAssociatesClassification ifcRelAssociatesClassification = countedEntry.getKey(); // LinkedList<String> itemReferenceNames = new LinkedList<>(); String classificationSystem = (ifcRelAssociatesClassification.isSetName()) ? ifcRelAssociatesClassification.getName() : "(no name)"; // IfcClassificationNotationSelect notationOrReference = ifcRelAssociatesClassification .getRelatingClassification(); if (notationOrReference instanceof IfcClassificationNotation) { // Get notation. IfcClassificationNotation notation = (IfcClassificationNotation) notationOrReference; // Go through the facets of this annotation. for (IfcClassificationNotationFacet facet : notation.getNotationFacets()) { String notationValue = facet.getNotationValue(); itemReferenceNames.add(notationValue); } // TODO: Look up notation in classification. No inverse lookup is available. } else if (notationOrReference instanceof IfcClassificationReference) { // Get reference. IfcClassificationReference reference = (IfcClassificationReference) notationOrReference; // Get the reference name. String itemReferenceName = reference.getItemReference(); itemReferenceNames.add(itemReferenceName); // Get the classification the reference links out to. IfcClassification classification = reference.getReferencedSource(); // Use it. if (classification != null) classificationSystem = classification.getName(); } // while (itemReferenceNames.size() > 0) { String itemReferenceName = itemReferenceNames.pop(); // Prepare to store this raw data entry. LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>(); // Name the group. String name = String.format("%s (%s)", itemReferenceName, count); dataEntry.put(leafColumnName, name); if (includeClassificationSystem) dataEntry.put(hierarchyColumnNames.get(0), classificationSystem); dataEntry.put("size", count); dataEntry.put("label", name); // Push the entry into the data pool. rawData.add(dataEntry); } } // Send it all back. return rawData; }
From source file:org.epics.archiverappliance.etl.DataReductionDailyETLTest.java
/** * 1) Set up the raw and reduced PV's//from w ww .j a va 2 s . c o m * 2) Generate data in STS * 3) Run ETL * 4) Compare */ @Test public void testReducedETL() throws Exception { // Set up the raw and reduced PV's PlainPBStoragePlugin etlSTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=STS&rootFolder=" + shortTermFolderName + "/&partitionGranularity=PARTITION_HOUR", configService); ; PlainPBStoragePlugin etlMTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=MTS&rootFolder=" + mediumTermFolderName + "/&partitionGranularity=PARTITION_DAY", configService); PlainPBStoragePlugin etlLTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin( "pb://localhost?name=LTS&rootFolder=" + longTermFolderName + "/&partitionGranularity=PARTITION_YEAR&reducedata=" + reduceDataUsing, configService); { PVTypeInfo typeInfo = new PVTypeInfo(rawPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1); String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(), etlLTS.getURLRepresentation() }; typeInfo.setDataStores(dataStores); configService.updateTypeInfoForPV(rawPVName, typeInfo); configService.registerPVToAppliance(rawPVName, configService.getMyApplianceInfo()); } { PVTypeInfo typeInfo = new PVTypeInfo(reducedPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1); String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(), etlLTS.getURLRepresentation() }; typeInfo.setDataStores(dataStores); configService.updateTypeInfoForPV(reducedPVName, typeInfo); configService.registerPVToAppliance(reducedPVName, configService.getMyApplianceInfo()); } // Control ETL manually configService.getETLLookup().manualControlForUnitTests(); short currentYear = TimeUtils.getCurrentYear(); for (int day = 0; day < 365; day++) { // Generate data into the STS on a daily basis ArrayListEventStream genDataRaw = new ArrayListEventStream(86400, new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, rawPVName, currentYear)); ArrayListEventStream genDataReduced = new ArrayListEventStream(86400, new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, reducedPVName, currentYear)); for (int second = 0; second < 86400; second++) { YearSecondTimestamp ysts = new YearSecondTimestamp(currentYear, day * 86400 + second, 0); Timestamp ts = TimeUtils.convertFromYearSecondTimestamp(ysts); genDataRaw.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts, new ScalarValue<Double>(second * 1.0), 0, 0)); genDataReduced.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts, new ScalarValue<Double>(second * 1.0), 0, 0)); } try (BasicContext context = new BasicContext()) { etlSTS.appendData(context, rawPVName, genDataRaw); etlSTS.appendData(context, reducedPVName, genDataReduced); } logger.debug("Done generating data into the STS for day " + day); // Run ETL at the end of the day Timestamp timeETLruns = TimeUtils .convertFromYearSecondTimestamp(new YearSecondTimestamp(currentYear, day * 86400 + 86399, 0)); ETLExecutor.runETLs(configService, timeETLruns); logger.debug("Done performing ETL as though today is " + TimeUtils.convertToHumanReadableString(timeETLruns)); // Compare data for raw+postprocessor and reduced PV's. PostProcessor postProcessor = PostProcessors.findPostProcessor(reduceDataUsing); postProcessor.initialize(reduceDataUsing, rawPVName); int rawWithPPCount = 0; int reducedCount = 0; try (BasicContext context = new BasicContext()) { Timestamp startTime = TimeUtils.minusDays(TimeUtils.now(), 10 * 366); Timestamp endTime = TimeUtils.plusDays(TimeUtils.now(), 10 * 366); LinkedList<Timestamp> rawTimestamps = new LinkedList<Timestamp>(); LinkedList<Timestamp> reducedTimestamps = new LinkedList<Timestamp>(); try (EventStream rawWithPP = new CurrentThreadWorkerEventStream(rawPVName, etlLTS.getDataForPV(context, rawPVName, startTime, endTime, postProcessor))) { for (Event e : rawWithPP) { rawTimestamps.add(e.getEventTimeStamp()); rawWithPPCount++; } } try (EventStream reduced = new CurrentThreadWorkerEventStream(reducedPVName, etlLTS.getDataForPV(context, reducedPVName, startTime, endTime))) { for (Event e : reduced) { reducedTimestamps.add(e.getEventTimeStamp()); reducedCount++; } } logger.debug("For day " + day + " we have " + rawWithPPCount + " rawWithPP events and " + reducedCount + " reduced events"); if (rawTimestamps.size() != reducedTimestamps.size()) { while (!rawTimestamps.isEmpty() || !reducedTimestamps.isEmpty()) { if (!rawTimestamps.isEmpty()) logger.info("Raw/PP " + TimeUtils.convertToHumanReadableString(rawTimestamps.pop())); if (!reducedTimestamps.isEmpty()) logger.info( "Reduced" + TimeUtils.convertToHumanReadableString(reducedTimestamps.pop())); } } assertTrue("For day " + day + " we have " + rawWithPPCount + " rawWithPP events and " + reducedCount + " reduced events", rawWithPPCount == reducedCount); } if (day > 2) { assertTrue("For day " + day + ", seems like no events were moved by ETL into LTS for " + rawPVName + " Count = " + rawWithPPCount, (rawWithPPCount != 0)); assertTrue("For day " + day + ", seems like no events were moved by ETL into LTS for " + reducedPVName + " Count = " + reducedCount, (reducedCount != 0)); } } }