List of usage examples for java.util LinkedList clear
public void clear()
From source file:org.apache.hadoop.hdfs.server.namenode.FSImageTransactionalStorageInspector.java
/** * @return the image files that have the most recent associated * transaction IDs. If there are multiple storage directories which * contain equal images, we'll return them all. * /*from w w w . ja va 2 s . c om*/ * @throws FileNotFoundException if not images are found. */ @Override List<FSImageFile> getLatestImages() throws IOException { LinkedList<FSImageFile> ret = new LinkedList<FSImageFile>(); for (FSImageFile img : foundImages) { if (ret.isEmpty()) { ret.add(img); } else { FSImageFile cur = ret.getFirst(); if (cur.txId == img.txId) { ret.add(img); } else if (cur.txId < img.txId) { ret.clear(); ret.add(img); } } } if (ret.isEmpty()) { throw new FileNotFoundException("No valid image files found"); } return ret; }
From source file:com.amazonaws.services.logs.connectors.kinesis.KinesisTransformer.java
@Override public Collection<Record> fromClasses(Collection<CloudWatchLogsEvent> events) throws IOException { LinkedList<Record> records = new LinkedList<Record>(); LinkedList<CloudWatchLogsEvent> buffer = new LinkedList<CloudWatchLogsEvent>(); String key = null;//from w w w . j av a2 s .c om Iterator<CloudWatchLogsEvent> iter = events.iterator(); while (iter.hasNext()) { CloudWatchLogsEvent event = iter.next(); String nextKey = getPartitionKey(event); if (key != null && !key.equals(nextKey)) { records.addAll(createRecords(buffer)); buffer.clear(); } buffer.add(event); } if (buffer.size() > 0) records.addAll(createRecords(buffer)); return records; }
From source file:com.commander4j.util.JUtility.java
public static String getTimeStampStringFormat(Timestamp ts, String fmt) { String result = ""; LinkedList<String> fmtList = new LinkedList<String>(); LinkedList<String> valList = new LinkedList<String>(); fmtList.clear(); valList.clear();/*from w ww . j av a2 s. c o m*/ result = ts.toString(); fmtList.add("yyyy"); valList.add(result.substring(0, 4)); fmtList.add("yy"); valList.add(result.substring(2, 4)); fmtList.add("mm"); valList.add(result.substring(5, 7)); fmtList.add("dd"); valList.add(result.substring(8, 10)); fmtList.add("hh"); valList.add(result.substring(11, 13)); fmtList.add("mi"); valList.add(result.substring(14, 16)); fmtList.add("ss"); valList.add(result.substring(17, 19)); fmtList.add("yymmdd"); valList.add(result.substring(2, 4) + result.substring(5, 7) + result.substring(8, 10)); int pos = fmtList.indexOf(fmt); if (pos >= 0) { result = valList.get(pos); } else { result = ""; } return result; }
From source file:org.apache.hadoop.hdfs.server.namenode.JournalSet.java
public static void chainAndMakeRedundantStreams(Collection<EditLogInputStream> outStreams, PriorityQueue<EditLogInputStream> allStreams, long fromTxId) { // We want to group together all the streams that start on the same start // transaction ID. To do this, we maintain an accumulator (acc) of all // the streams we've seen at a given start transaction ID. When we see a // higher start transaction ID, we select a stream from the accumulator and // clear it. Then we begin accumulating streams with the new, higher start // transaction ID. LinkedList<EditLogInputStream> acc = new LinkedList<EditLogInputStream>(); EditLogInputStream elis;//from ww w. ja v a 2 s.co m while ((elis = allStreams.poll()) != null) { if (acc.isEmpty()) { acc.add(elis); } else { EditLogInputStream accFirst = acc.get(0); long accFirstTxId = accFirst.getFirstTxId(); if (accFirstTxId == elis.getFirstTxId()) { // if we have a finalized log segment available at this txid, // we should throw out all in-progress segments at this txid if (elis.isInProgress()) { if (accFirst.isInProgress()) { acc.add(elis); } } else { if (accFirst.isInProgress()) { acc.clear(); } acc.add(elis); } } else if (accFirstTxId < elis.getFirstTxId()) { // try to read from the local logs first since the throughput should // be higher Collections.sort(acc, LOCAL_LOG_PREFERENCE_COMPARATOR); outStreams.add(new RedundantEditLogInputStream(acc, fromTxId)); acc.clear(); acc.add(elis); } else if (accFirstTxId > elis.getFirstTxId()) { throw new RuntimeException("sorted set invariants violated! " + "Got stream with first txid " + elis.getFirstTxId() + ", but the last firstTxId was " + accFirstTxId); } } } if (!acc.isEmpty()) { Collections.sort(acc, LOCAL_LOG_PREFERENCE_COMPARATOR); outStreams.add(new RedundantEditLogInputStream(acc, fromTxId)); acc.clear(); } }
From source file:com.manydesigns.portofino.navigation.Navigation.java
private void buildTree() { int rootPageIndex = dispatch.getClosestSubtreeRootIndex(); PageInstance[] pageInstances = dispatch.getPageInstancePath(rootPageIndex); if (pageInstances == null || pageInstances.length == 0) { return;// ww w .j a v a 2s. c o m } PageInstance rootPageInstance = pageInstances[0]; String prefix = ""; if (rootPageIndex > 0) { prefix += rootPageInstance.getParent().getPath() + "/" + rootPageInstance.getName(); } boolean rootSelected = pageInstances.length == 1; Page rootPage = rootPageInstance.getPage(); boolean rootGhost = rootPage.getActualNavigationRoot() == NavigationRoot.GHOST_ROOT; rootNavigationItem = new NavigationItem(rootPageInstance.getTitle(), rootPageInstance.getDescription(), prefix, true, rootSelected, rootGhost); LinkedList<Page> pages = new LinkedList<Page>(); PageInstance[] allInstances = dispatch.getPageInstancePath(); for (int i = 0; i <= rootPageIndex; i++) { pages.add(allInstances[i].getPage()); } Permissions basePermissions = SecurityLogic.calculateActualPermissions(new Permissions(), pages); pages.clear(); List<ChildPage> childPages; NavigationItem currentNavigationItem = rootNavigationItem; for (int i = 0, pageInstancesLength = pageInstances.length; i < pageInstancesLength; i++) { PageInstance current = pageInstances[i]; PageInstance next; if (i < pageInstancesLength - 1) { next = pageInstances[i + 1]; } else { next = null; } Layout layout = current.getLayout(); if (layout != null) { childPages = layout.getChildPages(); } else { childPages = new ArrayList<ChildPage>(); } List<NavigationItem> currentChildNavigationItems = currentNavigationItem.getChildNavigationItems(); prefix = currentNavigationItem.getPath() + "/"; for (String param : current.getParameters()) { prefix += param + "/"; } currentNavigationItem = null; for (ChildPage childPage : childPages) { File pageDir = current.getChildPageDirectory(childPage.getName()); Page page; try { page = DispatcherLogic.getPage(pageDir); } catch (Exception e) { logger.warn("Nonexisting child page: " + pageDir, e); continue; } String path = prefix + childPage.getName(); boolean inPath = false; boolean selected = false; String description = page.getDescription(); if (next != null) { if (next.getName().equals(childPage.getName())) { inPath = true; selected = (i == pageInstancesLength - 2); description = next.getDescription(); } } pages.add(page); if (!skipPermissions) { Permissions permissions = SecurityLogic.calculateActualPermissions(basePermissions, pages); if (!SecurityLogic.hasPermissions(configuration, permissions, subject, AccessLevel.VIEW)) { pages.removeLast(); continue; } } pages.removeLast(); if (!childPage.isShowInNavigation() && !inPath) { continue; } NavigationItem childNavigationItem = new NavigationItem(page.getTitle(), description, path, inPath, selected, false); currentChildNavigationItems.add(childNavigationItem); if (inPath) { currentNavigationItem = childNavigationItem; } } if (currentNavigationItem == null && next != null) { boolean selected = (i == pageInstancesLength - 2); String path = prefix + next.getName(); currentNavigationItem = new NavigationItem(next.getTitle(), next.getDescription(), path, true, selected, false); currentChildNavigationItems.add(currentNavigationItem); } if (next != null) { pages.add(next.getPage()); } } }
From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule.java
private static Pair<Boolean, Boolean> pushThroughOp(HashSet<LogicalVariable> toPush, Mutable<ILogicalOperator> opRef2, ILogicalOperator initialOp, IOptimizationContext context) throws AlgebricksException { List<LogicalVariable> initProjectList = new ArrayList<LogicalVariable>(toPush); AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue(); do {//from w ww .j av a 2 s . c o m if (op2.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE || op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE || op2.getOperatorTag() == LogicalOperatorTag.PROJECT || op2.getOperatorTag() == LogicalOperatorTag.REPLICATE || op2.getOperatorTag() == LogicalOperatorTag.UNIONALL) { return new Pair<Boolean, Boolean>(false, false); } if (!op2.isMap()) { break; } LinkedList<LogicalVariable> usedVars = new LinkedList<LogicalVariable>(); VariableUtilities.getUsedVariables(op2, usedVars); toPush.addAll(usedVars); LinkedList<LogicalVariable> producedVars = new LinkedList<LogicalVariable>(); VariableUtilities.getProducedVariables(op2, producedVars); toPush.removeAll(producedVars); // we assume pipelineable ops. have only one input opRef2 = op2.getInputs().get(0); op2 = (AbstractLogicalOperator) opRef2.getValue(); } while (true); LinkedList<LogicalVariable> produced2 = new LinkedList<LogicalVariable>(); VariableUtilities.getProducedVariables(op2, produced2); LinkedList<LogicalVariable> used2 = new LinkedList<LogicalVariable>(); VariableUtilities.getUsedVariables(op2, used2); boolean canCommuteProjection = initProjectList.containsAll(toPush) && initProjectList.containsAll(produced2) && initProjectList.containsAll(used2); // if true, we can get rid of the initial projection // get rid of useless decor vars. if (!canCommuteProjection && op2.getOperatorTag() == LogicalOperatorTag.GROUP) { boolean gbyChanged = false; GroupByOperator gby = (GroupByOperator) op2; List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> newDecorList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(); for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) { LogicalVariable decorVar = GroupByOperator.getDecorVariable(p); if (!toPush.contains(decorVar)) { used2.remove(decorVar); gbyChanged = true; } else { newDecorList.add(p); } } gby.getDecorList().clear(); gby.getDecorList().addAll(newDecorList); if (gbyChanged) { context.computeAndSetTypeEnvironmentForOperator(gby); } } used2.clear(); VariableUtilities.getUsedVariables(op2, used2); toPush.addAll(used2); // remember that toPush is a Set toPush.removeAll(produced2); if (toPush.isEmpty()) { return new Pair<Boolean, Boolean>(false, false); } boolean smthWasPushed = false; for (Mutable<ILogicalOperator> c : op2.getInputs()) { if (pushNeededProjections(toPush, c, context, initialOp)) { smthWasPushed = true; } } if (op2.hasNestedPlans()) { AbstractOperatorWithNestedPlans n = (AbstractOperatorWithNestedPlans) op2; for (ILogicalPlan p : n.getNestedPlans()) { for (Mutable<ILogicalOperator> r : p.getRoots()) { if (pushNeededProjections(toPush, r, context, initialOp)) { smthWasPushed = true; } } } } return new Pair<Boolean, Boolean>(smthWasPushed, canCommuteProjection); }
From source file:com.codenjoy.dojo.services.dao.PlayerGameSaverTest.java
@Test public void shouldWorks_saveLoadChat() { ChatServiceImpl chat = new ChatServiceImpl(); LinkedList<ChatMessage> messages = new LinkedList<ChatMessage>(); chat.setMessages(messages);//from ww w .jav a 2 s. c o m setTime(0); chat.chat("apofig", "message1"); chat.chat("apofig", "message2"); chat.chat("apofig", "message3"); chat.chat("apofig", "message4"); chat.chat("apofig", "message5"); chat.chat("apofig", "message6"); chat.chat("apofig", "message7"); saver.saveChat(chat.getMessages()); messages.clear(); List<ChatMessage> chatMessages = saver.loadChat(); chat.setMessages(chatMessages); assertEquals("apofig, ? ?\n" + "[15:03] apofig: message6\n" + "[15:03] apofig: message5\n" + "[15:03] apofig: message4\n" + "[15:03] apofig: message3\n" + "[15:03] apofig: message2\n" + "[15:03] apofig: message1\n", StringEscapeUtils.unescapeJava(chat.getChatLog())); }
From source file:net.sourceforge.sqlexplorer.EDriverName.java
/** * DOC qzhang Comment method "getJars".//from w ww .java 2s .c o m * * @return */ public LinkedList<String> getJars() { // find jdbc jar path from 'lib/java'.if not found,find it from "librariesIndex.xml" LinkedList<String> linkedList = new LinkedList<String>(); if (this.getLibManagerServic() != null) { boolean jarNotFound = false; for (String jarName : jars) { String libPath = libManagerServic.getJarPath(jarName); if (libPath == null) { jarNotFound = true; break; } linkedList.add(libPath); } if (jarNotFound) { linkedList.clear(); } } return linkedList; }
From source file:com.frostwire.platform.FileSystemWalkTest.java
@Test public void testDir() throws IOException { File f1 = File.createTempFile("aaa", null); File d1 = f1.getParentFile(); final File d2 = new File(d1, "d2"); if (d2.exists()) { FileUtils.deleteDirectory(d2);//from w w w .ja v a2 s .c o m } assertTrue(d2.mkdir()); File f2 = new File(d2, "bbb"); assertTrue(f2.createNewFile()); final LinkedList<File> l = new LinkedList<>(); fs.walk(d1, new FileFilter() { @Override public boolean accept(File file) { return true; } @Override public void file(File file) { l.add(file); } }); Set<File> set = new LinkedHashSet<>(l); assertEquals(set.size(), l.size()); assertFalse(l.contains(d1)); assertTrue(l.contains(f1)); assertTrue(l.contains(d2)); assertTrue(l.contains(f2)); l.clear(); fs.walk(d1, new FileFilter() { @Override public boolean accept(File file) { return !file.equals(d2); } @Override public void file(File file) { l.add(file); } }); assertFalse(l.contains(d1)); assertTrue(l.contains(f1)); assertFalse(l.contains(d2)); assertFalse(l.contains(f2)); assertTrue(f2.delete()); assertTrue(d2.delete()); assertTrue(f1.delete()); }
From source file:org.paxle.data.db.impl.CommandDBTest.java
public void _testVeryLargeURLSet() throws MalformedURLException, InterruptedException { final int MAX = 1000000; final int chunkSize = 1000; System.setProperty("derby.storage.pageCacheSize", "2000"); // default 1000 //System.setProperty("derby.storage.pageSize", "32768"); // default 4096 bytes // setup DB/*from w w w .j a v a 2s . c o m*/ // this.setupDB(POSTGRESQL_CONFIG_FILE, String.format(POSTGRESQL_CONNECTION_URL,"192.168.10.201")); //this.setupDB(H2_CONFIG_FILE, H2_CONNECTION_URL, "sa", ""); this.setupDB(DERBY_CONFIG_FILE, DERBY_CONNECTION_URL, null, null); // command-tracker must be called MAX times checking(new Expectations() { { exactly(MAX).of(cmdTracker).commandCreated(with(equal("org.paxle.data.db.ICommandDB")), with(any(ICommand.class))); } }); final Semaphore s = new Semaphore(-MAX + 1); new Thread() { public void run() { try { Thread.sleep(10000); } catch (InterruptedException e) { } // create a dummy data-sink cmdDB.setDataSink(new DummyDataSink(s)); }; }.start(); // store new commands long start = System.currentTimeMillis(); LinkedList<URI> testURI = new LinkedList<URI>(); for (int i = 1; i <= MAX; i++) { URI nextCommand = URI.create("http://test.paxle.net/" + i); testURI.add(nextCommand); if (i % chunkSize == 0 || i == MAX) { int known = this.cmdDB.storeUnknownLocations(0, 1, testURI); assertEquals(0, known); testURI.clear(); } } // wait for all commands to be enqueued s.acquire(); System.out.println(String.format("Storing and loading %d URL took %d ms", Integer.valueOf(MAX), Long.valueOf(System.currentTimeMillis() - start))); }