List of usage examples for java.util ArrayDeque ArrayDeque
public ArrayDeque()
From source file:com.google.gwt.emultest.java.util.ArrayDequeTest.java
public void testPoll() { Object o1 = new Object(); Object o2 = new Object(); ArrayDeque<Object> deque = new ArrayDeque<>(); assertNull(deque.poll());/* ww w. jav a 2s . c o m*/ deque.add(o1); assertEquals(o1, deque.poll()); assertTrue(deque.isEmpty()); deque.add(o1); deque.add(o2); assertEquals(o1, deque.poll()); checkDequeSizeAndContent(deque, o2); assertEquals(o2, deque.poll()); assertTrue(deque.isEmpty()); assertNull(deque.poll()); }
From source file:org.apache.hadoop.hbase.tool.LoadIncrementalHFiles.java
/** * Perform a bulk load of the given directory into the given pre-existing table. This method is * not threadsafe./*from w w w . j a va2 s . c o m*/ * @param hfofDir the directory that was provided as the output path of a job using * HFileOutputFormat * @param admin the Admin * @param table the table to load into * @param regionLocator region locator * @param silence true to ignore unmatched column families * @param copyFile always copy hfiles if true * @throws TableNotFoundException if table does not yet exist */ public Map<LoadQueueItem, ByteBuffer> doBulkLoad(Path hfofDir, final Admin admin, Table table, RegionLocator regionLocator, boolean silence, boolean copyFile) throws TableNotFoundException, IOException { if (!admin.isTableAvailable(regionLocator.getName())) { throw new TableNotFoundException("Table " + table.getName() + " is not currently available."); } /* * Checking hfile format is a time-consuming operation, we should have an option to skip this * step when bulkloading millions of HFiles. See HBASE-13985. */ boolean validateHFile = getConf().getBoolean("hbase.loadincremental.validate.hfile", true); if (!validateHFile) { LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " + "are not correct. If you fail to read data from your table after using this " + "option, consider removing the files and bulkload again without this option. " + "See HBASE-13985"); } // LQI queue does not need to be threadsafe -- all operations on this queue // happen in this thread Deque<LoadQueueItem> queue = new ArrayDeque<>(); ExecutorService pool = null; SecureBulkLoadClient secureClient = null; try { prepareHFileQueue(hfofDir, table, queue, validateHFile, silence); if (queue.isEmpty()) { LOG.warn( "Bulk load operation did not find any files to load in directory {}. " + "Does it contain files in subdirectories that correspond to column family names?", (hfofDir != null ? hfofDir.toUri().toString() : "")); return Collections.emptyMap(); } pool = createExecutorService(); secureClient = new SecureBulkLoadClient(table.getConfiguration(), table); return performBulkLoad(admin, table, regionLocator, queue, pool, secureClient, copyFile); } finally { cleanup(admin, queue, pool, secureClient); } }
From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java
private void findOceans() { for (Graph.Center c : graph.centers) { c.ocean = false;//from w w w . j a v a2s . com c.water = false; } for (Graph.Corner c : graph.corners) { c.ocean = false; } //set water parameter of centers float LAKE_THRESHOLD = 0.3f; Queue<Graph.Center> queue = new ArrayDeque<>(); for (Graph.Center p : graph.centers) { int numWater = 0; for (Graph.Corner c : p.corners) { if (c.border || c.ocean) { p.border = true; p.water = true; p.ocean = true; queue.add(p); break; } if (c.water) { numWater++; } } p.water = (p.ocean || numWater >= p.corners.size() * LAKE_THRESHOLD); } LOG.info("border cells: " + queue.size()); //float fill borders to distinguish between ocean and likes while (!queue.isEmpty()) { Graph.Center c = queue.poll(); for (Graph.Center r : c.neighbors) { if (r.water && !r.ocean) { r.ocean = true; queue.add(r); } } } //assign coast tag for (Graph.Corner q : graph.corners) { q.coast = false; } for (Graph.Center c : graph.centers) { if (c.ocean) { for (Graph.Corner q : c.corners) { if (!q.water) { q.coast = true; } else { q.ocean = true; } } } } //assign basic biomes int oceanCount = 0; int lakeCount = 0; int landCount = 0; for (Graph.Center c : graph.centers) { if (c.ocean) { c.biome = Biome.OCEAN; oceanCount++; } else if (c.water) { c.biome = Biome.LAKE; lakeCount++; } else { c.biome = Biome.BEACH; lakeCount++; } } LOG.log(Level.INFO, "ocean cells: {0}, lake cells: {1}, land cells: {2}", new Object[] { oceanCount, lakeCount, landCount }); }
From source file:com.espertech.esper.core.EPRuntimeIsolatedImpl.java
private void processScheduleHandles(ArrayBackedCollection<ScheduleHandle> handles) { if (ThreadLogUtil.ENABLED_TRACE) { ThreadLogUtil.trace("Found schedules for", handles.size()); }//from w ww. j a v a2 s . c om if (handles.size() == 0) { return; } // handle 1 result separatly for performance reasons if (handles.size() == 1) { Object[] handleArray = handles.getArray(); EPStatementHandleCallback handle = (EPStatementHandleCallback) handleArray[0]; EPRuntimeImpl.processStatementScheduleSingle(handle, unisolatedServices, isolatedTimeEvalContext); handles.clear(); return; } Object[] matchArray = handles.getArray(); int entryCount = handles.size(); // sort multiple matches for the event into statements Map<EPStatementHandle, Object> stmtCallbacks = schedulePerStmtThreadLocal.get(); stmtCallbacks.clear(); for (int i = 0; i < entryCount; i++) // need to use the size of the collection { EPStatementHandleCallback handleCallback = (EPStatementHandleCallback) matchArray[i]; EPStatementHandle handle = handleCallback.getEpStatementHandle(); ScheduleHandleCallback callback = handleCallback.getScheduleCallback(); Object entry = stmtCallbacks.get(handle); // This statement has not been encountered before if (entry == null) { stmtCallbacks.put(handle, callback); continue; } // This statement has been encountered once before if (entry instanceof ScheduleHandleCallback) { ScheduleHandleCallback existingCallback = (ScheduleHandleCallback) entry; ArrayDeque<ScheduleHandleCallback> entries = new ArrayDeque<ScheduleHandleCallback>(); entries.add(existingCallback); entries.add(callback); stmtCallbacks.put(handle, entries); continue; } // This statement has been encountered more then once before ArrayDeque<ScheduleHandleCallback> entries = (ArrayDeque<ScheduleHandleCallback>) entry; entries.add(callback); } handles.clear(); for (Map.Entry<EPStatementHandle, Object> entry : stmtCallbacks.entrySet()) { EPStatementHandle handle = entry.getKey(); Object callbackObject = entry.getValue(); EPRuntimeImpl.processStatementScheduleMultiple(handle, callbackObject, unisolatedServices, isolatedTimeEvalContext); if ((isPrioritized) && (handle.isPreemptive())) { break; } } }
From source file:com.google.gwt.emultest.java.util.ArrayDequeTest.java
public void testPollFirst() { Object o1 = new Object(); Object o2 = new Object(); ArrayDeque<Object> deque = new ArrayDeque<>(); assertNull(deque.pollFirst());/*w ww . j ava2 s . co m*/ assertTrue(deque.isEmpty()); deque.add(o1); assertEquals(o1, deque.pollFirst()); assertTrue(deque.isEmpty()); assertNull(deque.pollFirst()); deque.add(o1); deque.add(o2); assertEquals(o1, deque.pollFirst()); checkDequeSizeAndContent(deque, o2); assertEquals(o2, deque.pollFirst()); assertTrue(deque.isEmpty()); assertNull(deque.pollFirst()); }
From source file:nl.knaw.huc.di.tag.tagml.importer.TAGMLListener.java
private void closeTextVariationMarkup(final String extendedMarkupName, final Set<String> layers) { removeFromMarkupStack2(extendedMarkupName, state.allOpenMarkup); TAGMarkup markup;// w ww . j a v a 2 s .co m for (String l : layers) { state.openMarkup.putIfAbsent(l, new ArrayDeque<>()); Deque<TAGMarkup> markupStack = state.openMarkup.get(l); markup = removeFromMarkupStack2(extendedMarkupName, markupStack); document.closeMarkupInLayer(markup, l); } }
From source file:com.vgi.mafscaling.VECalc.java
protected void loadLogFile() { fileChooser.setMultiSelectionEnabled(true); if (JFileChooser.APPROVE_OPTION != fileChooser.showOpenDialog(this)) return;//from w ww . ja va 2s . co m File[] files = fileChooser.getSelectedFiles(); for (File file : files) { BufferedReader br = null; ArrayDeque<String[]> buffer = new ArrayDeque<String[]>(); try { br = new BufferedReader(new FileReader(file.getAbsoluteFile())); String line = br.readLine(); if (line != null) { String[] elements = line.split("(\\s*)?,(\\s*)?", -1); getColumnsFilters(elements); boolean resetColumns = false; if (logThrottleAngleColIdx >= 0 || logFfbColIdx >= 0 || logSdColIdx >= 0 || (logWbAfrColIdx >= 0 && isOl) || (logStockAfrColIdx >= 0 && !isOl) || (logAfLearningColIdx >= 0 && !isOl) || (logAfCorrectionColIdx >= 0 && !isOl) || logRpmColIdx >= 0 || logMafColIdx >= 0 || logIatColIdx >= 0 || logMpColIdx >= 0) { if (JOptionPane.YES_OPTION == JOptionPane.showConfirmDialog(null, "Would you like to reset column names or filter values?", "Columns/Filters Reset", JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE)) resetColumns = true; } if (resetColumns || logThrottleAngleColIdx < 0 || logFfbColIdx < 0 || logSdColIdx < 0 || (logWbAfrColIdx < 0 && isOl) || (logStockAfrColIdx < 0 && !isOl) || (logAfLearningColIdx < 0 && !isOl) || (logAfCorrectionColIdx < 0 && !isOl) || logRpmColIdx < 0 || logMafColIdx < 0 || logIatColIdx < 0 || logMpColIdx < 0) { ColumnsFiltersSelection selectionWindow = new VEColumnsFiltersSelection(false); if (!selectionWindow.getUserSettings(elements) || !getColumnsFilters(elements)) return; } if (logClOlStatusColIdx == -1) clValue = -1; String[] flds; String[] afrflds; boolean removed = false; int i = 2; int clol = -1; int row = getLogTableEmptyRow(); double thrtlMaxChange2 = thrtlMaxChange + thrtlMaxChange / 2.0; double throttle = 0; double pThrottle = 0; double ppThrottle = 0; double afr = 0; double rpm; double ffb; double iat; clearRunTables(); setCursor(new Cursor(Cursor.WAIT_CURSOR)); for (int k = 0; k <= afrRowOffset && line != null; ++k) { line = br.readLine(); if (line != null) buffer.addFirst(line.split(",", -1)); } try { while (line != null && buffer.size() > afrRowOffset) { afrflds = buffer.getFirst(); flds = buffer.removeLast(); line = br.readLine(); if (line != null) buffer.addFirst(line.split(",", -1)); ppThrottle = pThrottle; pThrottle = throttle; throttle = Double.valueOf(flds[logThrottleAngleColIdx]); try { if (row > 0 && Math.abs(pThrottle - throttle) > thrtlMaxChange) { if (!removed) Utils.removeRow(row--, logDataTable); removed = true; } else if (row <= 0 || Math.abs(ppThrottle - throttle) <= thrtlMaxChange2) { // Filters afr = (isOl ? Double.valueOf(afrflds[logWbAfrColIdx]) : Double.valueOf(afrflds[logStockAfrColIdx])); rpm = Double.valueOf(flds[logRpmColIdx]); ffb = Double.valueOf(flds[logFfbColIdx]); iat = Double.valueOf(flds[logIatColIdx]); if (clValue != -1) clol = Integer.valueOf(flds[logClOlStatusColIdx]); boolean flag = isOl ? ((afr <= afrMax || throttle >= thrtlMin) && afr <= afrMax) : (afrMin <= afr); if (flag && clol == clValue && rpmMin <= rpm && ffbMin <= ffb && ffb <= ffbMax && iat <= iatMax) { removed = false; if (!isOl) trims.add(Double.valueOf(flds[logAfLearningColIdx]) + Double.valueOf(flds[logAfCorrectionColIdx])); Utils.ensureRowCount(row + 1, logDataTable); logDataTable.setValueAt(rpm, row, 0); logDataTable.setValueAt(iat, row, 1); logDataTable.setValueAt(Double.valueOf(flds[logMpColIdx]), row, 2); logDataTable.setValueAt(ffb, row, 3); logDataTable.setValueAt(afr, row, 4); logDataTable.setValueAt(Double.valueOf(flds[logMafColIdx]), row, 5); logDataTable.setValueAt(Double.valueOf(flds[logSdColIdx]), row, 6); row += 1; } else removed = true; } else removed = true; } catch (NumberFormatException e) { logger.error(e); JOptionPane.showMessageDialog(null, "Error parsing number at " + file.getName() + " line " + i + ": " + e, "Error processing file", JOptionPane.ERROR_MESSAGE); return; } i += 1; } } finally { setCursor(new Cursor(Cursor.DEFAULT_CURSOR)); } } } catch (Exception e) { logger.error(e); JOptionPane.showMessageDialog(null, e, "Error opening file", JOptionPane.ERROR_MESSAGE); } finally { if (br != null) { try { br.close(); } catch (IOException e) { logger.error(e); } } } } }
From source file:com.britesnow.snow.web.RequestContext.java
public void setFramePaths(String[] framePaths) { if (framePaths != null) { ArrayDeque<String> stack = new ArrayDeque<String>(); stack.addAll(Arrays.asList(framePaths)); framePathsStack = stack;/*from w w w. jav a 2 s. c o m*/ } }
From source file:com.google.gwt.emultest.java.util.ArrayDequeTest.java
public void testPollLast() { Object o1 = new Object(); Object o2 = new Object(); ArrayDeque<Object> deque = new ArrayDeque<>(); assertNull(deque.pollLast());// ww w. j a v a 2 s .c o m assertTrue(deque.isEmpty()); deque.add(o1); assertEquals(o1, deque.pollLast()); assertTrue(deque.isEmpty()); assertNull(deque.pollFirst()); deque.add(o1); deque.add(o2); assertEquals(o2, deque.pollLast()); checkDequeSizeAndContent(deque, o1); assertEquals(o1, deque.pollLast()); assertTrue(deque.isEmpty()); assertNull(deque.pollLast()); }
From source file:org.apache.pig.newplan.BaseOperatorPlan.java
/** * Move everything below a given operator to the new operator plan. The specified operator will * be moved and will be the root of the new operator plan * @param root Operator to move everything after * @param newPlan new operator plan to move things into * @throws PlanException /*from w ww .j a va2 s . co m*/ */ public void moveTree(Operator root, BaseOperatorPlan newPlan) throws FrontendException { Deque<Operator> queue = new ArrayDeque<Operator>(); newPlan.add(root); root.setPlan(newPlan); queue.addLast(root); while (!queue.isEmpty()) { Operator node = queue.poll(); if (getSuccessors(node) != null) { for (Operator succ : getSuccessors(node)) { if (!queue.contains(succ)) { queue.addLast(succ); newPlan.add(succ); succ.setPlan(newPlan); newPlan.connect(node, succ); } } } } trimBelow(root); }