List of usage examples for java.util LinkedList peekLast
public E peekLast()
From source file:Main.java
public static void main(String[] args) { // create a LinkedList LinkedList<String> list = new LinkedList<String>(); // add some elements list.add("Hello"); list.add("from java2s.com"); list.add("10"); // print the list System.out.println("LinkedList:" + list); // peek at the last element System.out.println("Last element of the list:" + list.peekLast()); }
From source file:com.mohawk.webcrawler.ScriptCompiler.java
/** * * @param tokens// w w w.j a v a 2 s .c o m * @param parentScope */ private static void addScope(Queue<String> tokens, Queue<? super BaseToken> parentScope) throws CompilationException { while (!tokens.isEmpty()) { String token = tokens.poll(); if ("end".equals(token) || "else".equals(token) || "elseif".equals(token)) { parentScope.add(new BaseEndScope(token)); break; } else if ("if".equals(token)) { String expression = tokens.poll(); If_Verb ifVerb = new If_Verb(); ifVerb.setExpression(expression); parentScope.add(ifVerb); addScope(tokens, ifVerb.createScope()); // check if elseif or else is defined LinkedList<BaseToken> ifScope = ifVerb.getScope(); Object elseToken = ifScope.peekLast(); if (elseToken instanceof BaseEndScope) { // remove elseif or else from if scope ifScope.pollLast(); while (elseToken instanceof BaseEndScope) { String elseStr = ((BaseEndScope) elseToken).getName(); if ("end".equals(elseStr)) break; else if ("elseif".equals(elseStr)) { String exp = tokens.poll(); ElseIf_Verb elseIfVerb = new ElseIf_Verb(); elseIfVerb.setExpression(exp); ifVerb.addElseIf(elseIfVerb); addScope(tokens, elseIfVerb.createScope()); elseToken = elseIfVerb.getScope().pollLast(); } else if ("else".equals(elseStr)) { Else_Verb elseVerb = new Else_Verb(); ifVerb.setElse(elseVerb); addScope(tokens, elseVerb.createScope()); elseToken = elseVerb.getScope().pollLast(); } } } } else if ("while".equals(token)) { String evaluation = tokens.poll(); While_Verb whileVerb = new While_Verb(); whileVerb.setExpression(evaluation); parentScope.add(whileVerb); addScope(tokens, whileVerb.createScope()); } else if (LangCore.isVerb(token)) { // verb try { parentScope.add(LangCore.createVerbToken((String) token)); } catch (Exception e) { e.printStackTrace(); throw new CompilationException(e.getLocalizedMessage()); } } else if (LangCore.isLiteral(token)) { // literal try { parentScope.add(new BaseLiteral(LangCore.createLiteralObject(token))); } catch (LanguageException e) { throw new CompilationException(e.getLocalizedMessage()); } } else if (LangCore.isOperator(token)) { // operator try { parentScope.add(LangCore.createOperatorToken(token)); } catch (LanguageException e) { throw new CompilationException(e.getLocalizedMessage()); } } else // default to variable parentScope.add(new BaseVariable(token)); } }
From source file:eulermind.importer.LineNode.java
private static LinkedList<LineNode> popSameBlankLineNodes(LinkedList<LineNode> stack) { int lastBlankLines = stack.peekLast().m_blankLines; LinkedList<LineNode> lastSameLineNodes = new LinkedList<LineNode>(); while (!stack.isEmpty() && stack.peekLast().m_blankLines == lastBlankLines) { //pollLast? addFirst?? lastSameLineNodes.addFirst(stack.pollLast()); }// w ww . j av a 2 s . c o m return lastSameLineNodes; }
From source file:com.ikanow.aleph2.analytics.services.DeduplicationService.java
/** The heart of the dedup logic * (everything gets ordered in the correct order except if policy is custom, in which case the ordering is a * bit arbitrary anyway)//w w w.j a va 2 s. com * @param policy * @param context * @param timestamp_field * @param new_record * @param old_record * @param key * @param mutable_obj_map * @returns a stream of objects to delete efficiently */ protected static Stream<JsonNode> handleDuplicateRecord(final DocumentSchemaBean config, Optional<Tuple2<IEnrichmentBatchModule, DeduplicationEnrichmentContext>> custom_handler, final String timestamp_field, final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records, final List<JsonNode> old_records, final JsonNode key, final Map<JsonNode, LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> mutable_obj_map) { return Patterns.match(config.deduplication_policy()).<Stream<JsonNode>>andReturn() .when(p -> p == DeduplicationPolicy.leave, __ -> { mutable_obj_map.remove(key); //(drop new record) return Stream.empty(); }).when(p -> p == DeduplicationPolicy.update, __ -> { final Tuple3<Long, IBatchRecord, ObjectNode> last_record = new_records.peekLast(); final JsonNode old_record = old_records.stream().findFirst().get(); if (newRecordUpdatesOld(timestamp_field, last_record._2().getJson(), old_record)) { last_record._3().set(AnnotationBean._ID, old_record.get(AnnotationBean._ID)); return config.delete_unhandled_duplicates() ? deleteOtherDuplicates(old_records.stream()) : Stream.empty(); } else { mutable_obj_map.remove(key); //(drop new record) return Stream.empty(); } }).when(p -> p == DeduplicationPolicy.overwrite, __ -> { final Tuple3<Long, IBatchRecord, ObjectNode> last_record = new_records.peekLast(); // Just update the new record's "_id" field final JsonNode old_record = old_records.stream().findFirst().get(); last_record._3().set(AnnotationBean._ID, old_record.get(AnnotationBean._ID)); return config.delete_unhandled_duplicates() ? deleteOtherDuplicates(old_records.stream()) : Stream.empty(); }).when(p -> p == DeduplicationPolicy.custom_update, __ -> { final Tuple3<Long, IBatchRecord, ObjectNode> last_record = new_records.peekLast(); final JsonNode old_record = old_records.stream().findFirst().get(); if (newRecordUpdatesOld(timestamp_field, last_record._2().getJson(), old_record)) { mutable_obj_map.remove(key); // (since the "final step" logic is responsible for calling the update code) return handleCustomDeduplication(custom_handler, new_records, old_records, key); } else { mutable_obj_map.remove(key); //(drop new record) return Stream.empty(); } }).otherwise(__ -> { mutable_obj_map.remove(key); // (since the "final step" logic is responsible for calling the update code) return handleCustomDeduplication(custom_handler, new_records, old_records, key); }); }
From source file:eulermind.importer.LineNode.java
private static LineNode reduceToChapterTreeByBlankLine(List<LineNode> lineNodes) { LinkedList<LineNode> newlineNodes = new LinkedList<LineNode>(); //??, //from ww w . j a v a 2s. com for (LineNode lineNode : lineNodes) { if (lineNode.m_blankLines > 200) { lineNode.m_blankLines = 200; } } Iterator<LineNode> iterator = lineNodes.iterator(); newlineNodes.add(iterator.next()); //lineNode ?????newLineNodes // //????? { int maxBlankLines = 0; while (iterator.hasNext()) { LineNode lineNode = iterator.next(); maxBlankLines = Math.max(maxBlankLines, lineNode.m_blankLines); // for (int i = newlineNodes.peekLast().m_blankLines + 1; i < lineNode.m_blankLines; i++) { newlineNodes.add(new LineNode(i)); } newlineNodes.add(lineNode); } // for (int i = newlineNodes.peekLast().m_blankLines + 1; i <= maxBlankLines + 1; i++) { newlineNodes.add(new LineNode(i)); } } //? LinkedList<LineNode> stack = new LinkedList<LineNode>(); for (LineNode newLineNode : newlineNodes) { if (!stack.isEmpty() && stack.peekLast().m_blankLines < newLineNode.m_blankLines) { List<LineNode> reducedLineNodes = popSameBlankLineNodes(stack); for (LineNode reducedLineNode : reducedLineNodes) { newLineNode.add(reducedLineNode); } } stack.add(newLineNode); } assert stack.size() == 1; return stack.peekFirst(); }
From source file:bamboo.trove.full.FullReindexWarcManager.java
private boolean checkWorkComplete() throws InterruptedException, IOException { if (currentBatch == null || currentBatch.isEmpty()) { // Get a new batch LinkedList<ToIndex> newBatch = getNextBatchWithRetry(); if (newBatch == null || newBatch.isEmpty()) { log.info("Retrieved empty batch from Bamboo. Work completed."); return true; }/* w ww . j a v a 2s . co m*/ // Including a separate reference just to ensure we know when to persist back to the DB LinkedList<ToIndex> persistTracking = new LinkedList<>(); for (ToIndex w : newBatch) { persistTracking.add(w); } allBatches.add(persistTracking); // Update state endOfBatchId = newBatch.peekLast().getId(); currentBatch = newBatch; return false; } else { // We are still in this batch return false; } }
From source file:dk.dma.ais.decode.DecodeTest.java
/** * Decode all messages in a file Tries to handle proprietary messages * //from w w w. j ava 2 s. co m * Demonstrates and tests the process of decoding lines into Vdm messages, and the decoding into AIS messages * * @throws IOException */ @Test public void readLoopTest() throws IOException { // Make a list of proprietary handlers // Open file URL url = ClassLoader.getSystemResource("stream_example.txt"); Assert.assertNotNull(url); try (BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream()))) { Assert.assertNotNull(in); String line; // Prepare message classes AisMessage message; Vdm vdm = new Vdm(); LinkedList<IProprietaryTag> tags = new LinkedList<>(); while ((line = in.readLine()) != null) { // Ignore everything else than sentences if (!line.startsWith("$") && !line.startsWith("!")) { continue; } // Check if proprietary line if (ProprietaryFactory.isProprietaryTag(line)) { // Try to parse with one the registered factories in // META-INF/services/dk.dma.ais.proprietary.ProprietaryFactory IProprietaryTag tag = ProprietaryFactory.parseTag(new SentenceLine(line)); if (tag != null) { tags.add(tag); } continue; } // Handle VDM/VDO line try { int result = vdm.parse(new SentenceLine(line)); // LOG.info("result = " + result); if (result == 0) { message = AisMessage.getInstance(vdm); Assert.assertNotNull(message); if (tags.size() > 0) { message.setTags(tags); } // Message ready for handling } else if (result == 1) { // Wait for more data continue; } else { LOG.error("Failed to parse line: " + line + " result = " + result); Assert.assertTrue(false); } } catch (Exception e) { LOG.info("VDM failed: " + e.getMessage() + " line: " + line + " tag: " + (tags.size() > 0 ? tags.peekLast() : "null")); Assert.assertTrue(false); } // Create new VDM vdm = new Vdm(); tags.clear(); } } }
From source file:org.broadinstitute.gatk.utils.MathUtils.java
/** * Returns a series of integer values between start and stop, inclusive, * expontentially distributed between the two. That is, if there are * ten values between 0-10 there will be 10 between 10-100. * * WARNING -- BADLY TESTED//w ww . j a v a2 s . c om * @param start * @param stop * @param eps * @return */ public static List<Integer> log10LinearRange(final int start, final int stop, final double eps) { final LinkedList<Integer> values = new LinkedList<>(); final double log10range = Math.log10(stop - start); if (start == 0) values.add(0); double i = 0.0; while (i <= log10range) { final int index = (int) Math.round(Math.pow(10, i)) + start; if (index < stop && (values.peekLast() == null || values.peekLast() != index)) values.add(index); i += eps; } if (values.peekLast() == null || values.peekLast() != stop) values.add(stop); return values; }
From source file:org.cvrgrid.hl7.fileparse.PicuDataLoader.java
public static void main(String[] args) throws Exception { PicuDataLoader picuDataLoader = new PicuDataLoader(); SimpleDateFormat fromUser = new SimpleDateFormat("yyyyMMddHHmmss"); SimpleDateFormat myFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); OpenTSDBConfiguration openTSDBConfiguration = picuDataLoader.getOpenTSDBConfiguration(); String urlString = openTSDBConfiguration.getOpenTSDBUrl(); HL7Measurements hl7Measurements = new HL7Measurements(); HashMap<String, String> measurementNames = hl7Measurements.getMeasurementNames(); XSSFWorkbook wb = readFile(openTSDBConfiguration.getAwareSupportedParams()); XSSFSheet sheet = wb.getSheetAt(0);/*from w ww.j a va 2s .co m*/ for (int r = 1; r < 280; r++) { XSSFRow row = sheet.getRow(r); if (row == null) { continue; } String key = row.getCell(2).getStringCellValue(); String value = row.getCell(1).getStringCellValue(); value = value.replaceAll(":", "/"); measurementNames.put(key, value); } HashMap<String, PatientInfo> idMatch = new HashMap<String, PatientInfo>(); File f = new File(openTSDBConfiguration.getIdMatch()); if (f.exists()) { wb = readFile(openTSDBConfiguration.getIdMatch()); sheet = wb.getSheetAt(0); for (int r = 1; r < sheet.getLastRowNum() + 1; r++) { XSSFRow row = sheet.getRow(r); PatientInfo patInfo = new PatientInfo(); patInfo.setPicuSubject(row.getCell(1).getBooleanCellValue()); patInfo.setFirstName(row.getCell(3).getStringCellValue()); patInfo.setLastName(row.getCell(4).getStringCellValue()); patInfo.setBirthDateTime(row.getCell(5).getStringCellValue()); patInfo.setGender(row.getCell(6).getStringCellValue()); patInfo.setBirthplace(row.getCell(7).getStringCellValue()); patInfo.setEarliestDataPoint(row.getCell(8).getStringCellValue()); LinkedList<String> locations = new LinkedList<String>(); String lSet = row.getCell(10).getStringCellValue(); lSet = lSet.replaceAll("\\[", ""); lSet = lSet.replaceAll("\\]", ""); String[] locationSet = lSet.split(","); for (String location : locationSet) { locations.add(location.trim()); } patInfo.setLocations(locations); LinkedList<String> variables = new LinkedList<String>(); String vSet = row.getCell(12).getStringCellValue(); vSet = vSet.replaceAll("\\[", ""); vSet = vSet.replaceAll("\\]", ""); String[] variableSet = vSet.split(","); for (String variable : variableSet) { variables.add(variable.trim()); } patInfo.setVariables(variables); idMatch.put(patInfo.getHash(), patInfo); } } System.out.println("Existing Subject Count: " + idMatch.size()); String processedFile = openTSDBConfiguration.getProcessedFile(); String rootDir = openTSDBConfiguration.getRootDir(); ArrayList<String> processedFiles = new ArrayList<String>(); File processedFileContents = new File(processedFile); getProcessedFiles(processedFileContents, processedFiles); ArrayList<String> messageFiles = new ArrayList<String>(); File rootDirContents = new File(rootDir); getDirectoryContents(rootDirContents, processedFiles, messageFiles); XSSFWorkbook workbook; XSSFSheet sheetOut, sheetOut2; if (processedFiles.size() > 1) { workbook = readFile(openTSDBConfiguration.getIdMatch()); sheetOut = workbook.getSheetAt(0); sheetOut2 = workbook.getSheetAt(1); } else { workbook = new XSSFWorkbook(); sheetOut = workbook.createSheet("idMatch"); sheetOut2 = workbook.createSheet(openTSDBConfiguration.getIdMatchSheet()); } for (String filePath : messageFiles) { System.out.println(" File: " + filePath); FileReader reader = new FileReader(filePath); Hl7InputStreamMessageIterator iter = new Hl7InputStreamMessageIterator(reader); while (iter.hasNext()) { HashMap<String, String> tags = new HashMap<String, String>(); Message next = iter.next(); ORU_R01 oru = new ORU_R01(); oru.parse(next.encode()); PatientInfo patInfo = new PatientInfo(); if (Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 5, 0, 2, 1) != null) patInfo.setFirstName(Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 5, 0, 2, 1).trim()); if (Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 5, 0, 1, 1) != null) patInfo.setLastName(Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 5, 0, 1, 1).trim()); if (Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 7, 0, 1, 1) != null) patInfo.setBirthDateTime( Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 7, 0, 1, 1).trim()); if (Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 8, 0, 1, 1) != null) patInfo.setGender(Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 8, 0, 1, 1).trim()); if (Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 23, 0, 1, 1) != null) patInfo.setBirthplace(Terser.get(oru.getRESPONSE().getPATIENT().getPID(), 23, 0, 1, 1).trim()); LinkedList<String> locations = new LinkedList<String>(); LinkedList<String> variables = new LinkedList<String>(); if (idMatch.get(patInfo.getHash()) != null) { patInfo = idMatch.get(patInfo.getHash()); locations = patInfo.getLocations(); variables = patInfo.getVariables(); } if (!locations .contains(Terser.get(oru.getRESPONSE().getPATIENT().getVISIT().getPV1(), 3, 0, 1, 1))) { locations.add(Terser.get(oru.getRESPONSE().getPATIENT().getVISIT().getPV1(), 3, 0, 1, 1)); if (locations.peekLast().startsWith("ZB04")) patInfo.setPicuSubject(true); } tags.put("subjectId", patInfo.getHash()); String time = Terser.get(oru.getRESPONSE().getORDER_OBSERVATION().getOBR(), 7, 0, 1, 1); Date timepoint = fromUser.parse(time); String reformattedTime = myFormat.format(timepoint); if (patInfo.getEarliestDataPoint().equalsIgnoreCase("")) { patInfo.setEarliestDataPoint(reformattedTime); } List<ORU_R01_OBSERVATION> observations = oru.getRESPONSE().getORDER_OBSERVATION() .getOBSERVATIONAll(); for (ORU_R01_OBSERVATION observation : observations) { String seriesName = Terser.get(observation.getOBX(), 3, 0, 1, 1); if (measurementNames.get(seriesName) != null) { seriesName = measurementNames.get(seriesName); } else { seriesName = seriesName.replaceFirst("\\d", "#"); seriesName = measurementNames.get(seriesName); } StringBuffer buff = new StringBuffer(); String[] tokens = seriesName.split(" "); for (String i : tokens) { i = i.replaceAll("\\(", ""); i = i.replaceAll("\\)", ""); buff.append(StringUtils.capitalize(i)); } String measurementValue = Terser.get(observation.getOBX(), 5, 0, 1, 1); String units = Terser.get(observation.getOBX(), 6, 0, 1, 1); if (units != null) { units = units.replaceAll(":", ""); units = units.replaceAll("cm_h2o", "cmH2O"); units = units.replaceAll("\\(min/m2\\)", "MinPerMeterSquared"); units = units.replaceAll("l", "liters"); units = units.replaceAll("mliters", "milliliters"); units = units.replaceAll("g.m", "gramMeters"); units = units.replaceAll("dyn.sec.cm-5", "dyneSecondsPerQuinticCentimeter"); units = units.replaceAll("dyneSecondsPerQuinticCentimeter.m2", "dyneSecondsPerQuinticCentimeterPerMeterSquared"); units = units.replaceAll("m2", "MeterSquared"); units = units.replaceAll("min", "Min"); units = units.replaceAll("/", "Per"); units = units.replaceAll("%", "percent"); units = units.replaceAll("#", "Count"); units = units.replaceAll("celiters", "Celsius"); units = units.replaceAll("mm\\(hg\\)", "mmHg"); } else { units = "percent"; } seriesName = "vitals." + StringUtils.uncapitalize(units); seriesName += "." + StringUtils.uncapitalize(buff.toString()); seriesName = seriesName.trim(); if (!variables.contains(StringUtils.uncapitalize(buff.toString()))) variables.add(StringUtils.uncapitalize(buff.toString())); IncomingDataPoint dataPoint = new IncomingDataPoint(seriesName, timepoint.getTime(), measurementValue, tags); TimeSeriesStorer.storeTimePoint(urlString, dataPoint); } patInfo.setLocations(locations); patInfo.setVariables(variables); idMatch.put(patInfo.getHash(), patInfo); } System.out.println(" Subject Count: " + idMatch.size()); int rowNum = 0; Set<String> keys = idMatch.keySet(); TreeSet<String> sortedKeys = new TreeSet<String>(keys); for (String key : sortedKeys) { XSSFRow row = sheetOut.createRow(rowNum); XSSFRow row2 = sheetOut2.createRow(rowNum); XSSFCell cell, cell2; if (rowNum == 0) { cell = row.createCell(0); cell.setCellValue("Count"); cell = row.createCell(1); cell.setCellValue("PICU Subject?"); cell = row.createCell(2); cell.setCellValue("Hash"); cell = row.createCell(3); cell.setCellValue("First Name"); cell = row.createCell(4); cell.setCellValue("Last Name"); cell = row.createCell(5); cell.setCellValue("Birth Date/Time"); cell = row.createCell(6); cell.setCellValue("Gender"); cell = row.createCell(7); cell.setCellValue("Birthplace"); cell = row.createCell(8); cell.setCellValue("First Time Point"); cell = row.createCell(9); cell.setCellValue("Location Count"); cell = row.createCell(10); cell.setCellValue("Locations"); cell = row.createCell(11); cell.setCellValue("Variable Count"); cell = row.createCell(12); cell.setCellValue("Variables"); cell2 = row2.createCell(0); cell2.setCellValue("Count"); cell2 = row2.createCell(1); cell2.setCellValue("PICU Subject?"); cell2 = row2.createCell(2); cell2.setCellValue("Hash"); cell2 = row2.createCell(3); cell2.setCellValue("First Name"); cell2 = row2.createCell(4); cell2.setCellValue("Last Name"); cell2 = row2.createCell(5); cell2.setCellValue("Birth Date/Time"); cell2 = row2.createCell(6); cell2.setCellValue("Gender"); cell2 = row2.createCell(7); cell2.setCellValue("Birthplace"); cell2 = row2.createCell(8); cell2.setCellValue("First Time Point"); cell2 = row2.createCell(9); cell2.setCellValue("Location Count"); cell2 = row2.createCell(10); cell2.setCellValue("Locations"); cell2 = row2.createCell(11); cell2.setCellValue("Variable Count"); cell2 = row2.createCell(12); cell2.setCellValue("Variables"); } else { cell = row.createCell(0); cell.setCellValue(rowNum); cell = row.createCell(1); cell.setCellValue(idMatch.get(key).isPicuSubject()); cell = row.createCell(2); cell.setCellValue(key); cell = row.createCell(3); cell.setCellValue(idMatch.get(key).getFirstName()); cell = row.createCell(4); cell.setCellValue(idMatch.get(key).getLastName()); cell = row.createCell(5); cell.setCellValue(idMatch.get(key).getBirthDateTime()); cell = row.createCell(6); cell.setCellValue(idMatch.get(key).getGender()); cell = row.createCell(7); cell.setCellValue(idMatch.get(key).getBirthplace()); cell = row.createCell(8); cell.setCellValue(idMatch.get(key).getEarliestDataPoint()); cell = row.createCell(9); cell.setCellValue(idMatch.get(key).getLocations().size()); cell = row.createCell(10); cell.setCellValue(idMatch.get(key).getLocations().toString()); cell = row.createCell(11); cell.setCellValue(idMatch.get(key).getVariables().size()); cell = row.createCell(12); cell.setCellValue(idMatch.get(key).getVariables().toString()); if (idMatch.get(key).isPicuSubject()) { cell2 = row2.createCell(0); cell2.setCellValue(rowNum); cell2 = row2.createCell(1); cell2.setCellValue(idMatch.get(key).isPicuSubject()); cell2 = row2.createCell(2); cell2.setCellValue(key); cell2 = row2.createCell(3); cell2.setCellValue(idMatch.get(key).getFirstName()); cell2 = row2.createCell(4); cell2.setCellValue(idMatch.get(key).getLastName()); cell2 = row2.createCell(5); cell2.setCellValue(idMatch.get(key).getBirthDateTime()); cell2 = row2.createCell(6); cell2.setCellValue(idMatch.get(key).getGender()); cell2 = row2.createCell(7); cell2.setCellValue(idMatch.get(key).getBirthplace()); cell2 = row2.createCell(8); cell2.setCellValue(idMatch.get(key).getEarliestDataPoint()); cell2 = row2.createCell(9); cell2.setCellValue(idMatch.get(key).getLocations().size()); cell2 = row2.createCell(10); cell2.setCellValue(idMatch.get(key).getLocations().toString()); cell2 = row2.createCell(11); cell2.setCellValue(idMatch.get(key).getVariables().size()); cell2 = row2.createCell(12); cell2.setCellValue(idMatch.get(key).getVariables().toString()); } } rowNum++; } } if (messageFiles.size() > 0) { try { FileOutputStream out = new FileOutputStream(new File(openTSDBConfiguration.getIdMatch())); workbook.write(out); out.close(); System.out.println("Excel written successfully..."); PrintWriter writer = new PrintWriter(rootDir + "done.txt", "UTF-8"); for (String filePath : processedFiles) { writer.println(filePath); } for (String filePath : messageFiles) { writer.println(filePath); } writer.close(); System.out.println("done.txt written successfully..."); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } else { System.out.println("Nothing new to process..."); } }
From source file:syndeticlogic.memento.LfuStrategy.java
@Override public void removeLeastValuableNode() { LinkedList lfu = getLowestNonEmptyLru(); LinkedListNode lln = lfu.peekLast(); if (lln != null) { Cache.CacheNode node = (Cache.CacheNode) lln.getValue(); delete(node);//from w ww. j a v a 2 s.c o m node = null; } }