List of usage examples for java.util ArrayDeque size
public int size()
From source file:com.espertech.esper.core.start.EPPreparedExecuteIUDSingleStream.java
/** * Executes the prepared query.// w ww . j av a2 s. c om * @return query results */ public EPPreparedQueryResult execute(ContextPartitionSelector[] contextPartitionSelectors) { try { if (contextPartitionSelectors != null && contextPartitionSelectors.length != 1) { throw new IllegalArgumentException("Number of context partition selectors must be one"); } ContextPartitionSelector optionalSingleSelector = contextPartitionSelectors != null && contextPartitionSelectors.length > 0 ? contextPartitionSelectors[0] : null; // validate context if (processor.getContextName() != null && statementSpec.getOptionalContextName() != null && !processor.getContextName().equals(statementSpec.getOptionalContextName())) { throw new EPException("Context for named window is '" + processor.getContextName() + "' and query specifies context '" + statementSpec.getOptionalContextName() + "'"); } // handle non-specified context if (statementSpec.getOptionalContextName() == null) { FireAndForgetInstance processorInstance = processor.getProcessorInstanceNoContext(); if (processorInstance != null) { EventBean[] rows = executor.execute(processorInstance); if (rows != null && rows.length > 0) { dispatch(); } return new EPPreparedQueryResult(processor.getEventTypePublic(), rows); } } // context partition runtime query Collection<Integer> agentInstanceIds = EPPreparedExecuteMethodHelper.getAgentInstanceIds(processor, optionalSingleSelector, services.getContextManagementService(), processor.getContextName()); // collect events and agent instances if (agentInstanceIds.isEmpty()) { return new EPPreparedQueryResult(processor.getEventTypeResultSetProcessor(), CollectionUtil.EVENTBEANARRAY_EMPTY); } if (agentInstanceIds.size() == 1) { int agentInstanceId = agentInstanceIds.iterator().next(); FireAndForgetInstance processorInstance = processor .getProcessorInstanceContextById(agentInstanceId); EventBean[] rows = executor.execute(processorInstance); if (rows.length > 0) { dispatch(); } return new EPPreparedQueryResult(processor.getEventTypeResultSetProcessor(), rows); } ArrayDeque<EventBean> allRows = new ArrayDeque<EventBean>(); for (int agentInstanceId : agentInstanceIds) { FireAndForgetInstance processorInstance = processor .getProcessorInstanceContextById(agentInstanceId); if (processorInstance != null) { EventBean[] rows = executor.execute(processorInstance); allRows.addAll(Arrays.asList(rows)); } } if (allRows.size() > 0) { dispatch(); } return new EPPreparedQueryResult(processor.getEventTypeResultSetProcessor(), allRows.toArray(new EventBean[allRows.size()])); } finally { if (hasTableAccess) { services.getTableService().getTableExprEvaluatorContext().releaseAcquiredLocks(); } } }
From source file:androidx.navigation.NavDestination.java
/** * Build an array containing the hierarchy from the root down to this destination. * * @return An array containing all of the ids from the root to this destination *//*ww w. jav a 2 s.c o m*/ @NonNull int[] buildDeepLinkIds() { ArrayDeque<NavDestination> hierarchy = new ArrayDeque<>(); NavDestination current = this; do { NavGraph parent = current.getParent(); if (parent == null || parent.getStartDestination() != current.getId()) { hierarchy.addFirst(current); } current = parent; } while (current != null); int[] deepLinkIds = new int[hierarchy.size()]; int index = 0; for (NavDestination destination : hierarchy) { deepLinkIds[index++] = destination.getId(); } return deepLinkIds; }
From source file:edu.oregonstate.eecs.mcplan.domains.planetwars.PwSimulator.java
@Override public void untakeLastAction() { final ArrayDeque<PwEvent> frame = event_history.pop(); final int frame_size = frame.size(); while (!frame.isEmpty()) { final PwEvent e = frame.pop(); e.undoAction(s);/*w ww .java 2 s . c o m*/ } s.t -= 1; depth_ -= frame_size; }
From source file:edu.oregonstate.eecs.mcplan.domains.planetwars.PwSimulator.java
@Override public void takeAction(final JointAction<PwEvent> a) { final ArrayDeque<PwEvent> frame = new ArrayDeque<PwEvent>(); event_history.push(frame);// w w w . j av a2 s . c o m for (final PwEvent e : a) { applyEvent(e); } advance(); s.t += 1; depth_ += frame.size(); }
From source file:oracle.kv.hadoop.hive.table.TableStorageHandlerBase.java
/** * Method required by the HiveStoragePredicateHandler interface. * <p>//w ww. ja v a 2s . c om * This method validates the components of the given predicate and * ultimately produces the following artifacts: * * <ul> * <li>a Hive object representing the predicate that will be pushed to * the backend for server side filtering * <li>the String form of the computed predicate to push; which can be * passed to the server via the ONSQL query mechanism * <li>a Hive object consisting of the remaining components of the * original predicate input to this method -- referred to as the * 'residual' predicate; which represents the criteria the Hive * infrastructure will apply (on the client side) to the results * returned after server side filtering has been performed * </ul> * * The predicate analysis model that Hive employs is basically a two * step process. First, an instance of the Hive IndexPredicateAnalyzer * class is created and its analyzePredicate method is invoked, which * returns a Hive class representing the residual predicate, and also * populates a Collection whose contents is dependent on the particular * implementation of IndexPredicateAnalyzer that is used. After * analyzePredicate is invoked, the analyzer's translateSearchConditions * method is invoked to convert the contents of the populated Collection * to a Hive object representing the predicate that can be pushed to * the server side. Finally, the object that is returned is an instance * of the Hive DecomposedPredicate class; which contains the computed * predicate to push and the residual predicate. * <p> * Note that because the Hive built-in IndexPredicateAnalyzer produces * only predicates that consist of 'AND' statements, and which correspond * to PrimaryKey based or IndexKey based predicates, if the Hive built-in * analyzer does not produce a predicate to push, then a custom analyzer * that extends the capabilities of the Hive built-in analyzer is * employed. This extended analyzer handles statements that the built-in * analyzer does not handle. Additionally, whereas the built-in analyzer * populates a List of Hive IndexSearchConditions corresponding to the * filtering criteria of the predicate to push, the extended analyzer * populates an ArrayDeque in which the top (first element) of the * Deque is a Hive object consisting of all the components of the original * input predicate, but with 'invalid' operators replaced with 'valid' * operators; for example, with 'IN <list>' replaced with 'OR' statements. * <p> * In each case, translateSearchConditions constructs the appropriate * Hive predicate to push from the contents of the given Collection; * either List of IndexSearchCondition, or ArrayDeque. */ @Override @SuppressWarnings("deprecation") public DecomposedPredicate decomposePredicate(JobConf jobConfig, org.apache.hadoop.hive.serde2.Deserializer deserializer, ExprNodeDesc predicate) { /* Reset query state to default values. */ TableHiveInputFormat.resetQueryInfo(); DecomposedPredicate decomposedPredicate = null; /* * Try the Hive built-in analyzer first; which will validate the * components of the given predicate and separate them into two * disjoint sets: a set of search conditions that correspond to * either a valid PrimaryKey or IndexKey (and optional FieldRange) * that can be scanned (in the backend KVStore server) using one * of the TableIterators; and a set containing the remaining components * of the predicate (the 'residual' predicate), which Hive will * apply to the results returned after the search conditions have * first been applied on (pushed to) the backend. */ final IndexPredicateAnalyzer analyzer = TableHiveInputFormat.sargablePredicateAnalyzer(predicate, (TableSerDe) deserializer); if (analyzer != null) { /* Use TableScan or IndexScan */ /* Decompose predicate into search conditions and residual. */ final List<IndexSearchCondition> searchConditions = new ArrayList<IndexSearchCondition>(); final ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) analyzer .analyzePredicate(predicate, searchConditions); decomposedPredicate = new DecomposedPredicate(); decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(searchConditions); decomposedPredicate.residualPredicate = residualPredicate; /* * Valid search conditions and residual have been obtained. * Determine whether the search conditions are index based or * based on the table's primary key. If index based, then tell * the InputFormat to build splits and scan (iterate) based on * shards; otherwise, tell the InputFormat to base the iterator * on partition sets. */ final StringBuilder whereBuf = new StringBuilder(); TableHiveInputFormat.buildPushPredicate(decomposedPredicate.pushedPredicate, whereBuf); final String whereStr = whereBuf.toString(); TableHiveInputFormat.setQueryInfo(searchConditions, (TableSerDe) deserializer, whereStr); if (LOG.isDebugEnabled()) { LOG.debug("-----------------------------"); LOG.debug("residual = " + decomposedPredicate.residualPredicate); LOG.debug("predicate = " + decomposedPredicate.pushedPredicate); LOG.debug("search conditions = " + searchConditions); switch (TableHiveInputFormat.getQueryBy()) { case TableInputSplit.QUERY_BY_INDEX: LOG.debug("push predicate to secondary index [" + "WHERE " + whereStr + "]"); break; case TableInputSplit.QUERY_BY_PRIMARY_ALL_PARTITIONS: case TableInputSplit.QUERY_BY_PRIMARY_SINGLE_PARTITION: LOG.debug("push predicate to primary index [" + "WHERE " + whereStr + "]"); break; default: break; } LOG.debug("-----------------------------"); } } else { /* IndexPredicateAnalyzer == null ==> Use native query */ /* * The given predicate does not consist of search conditions that * correspond to either a valid PrimaryKey or IndexKey (or * FieldRange). Thus, employ the extended analyzer to handle * statements the built-in analyzer cannot handle. */ final TableHiveInputFormat.ExtendedPredicateAnalyzer extendedAnalyzer = TableHiveInputFormat .createPredicateAnalyzerForOnql((TableSerDe) deserializer); if (extendedAnalyzer == null) { LOG.debug("extended predicate analyzer = null ... " + "NO PREDICATE PUSHDOWN"); return null; } final ArrayDeque<ExprNodeDesc> pushPredicateDeque = new ArrayDeque<ExprNodeDesc>(); final ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) extendedAnalyzer .analyzePredicate(predicate, pushPredicateDeque); if (LOG.isTraceEnabled()) { final ExprNodeDesc[] qElements = pushPredicateDeque .toArray(new ExprNodeDesc[pushPredicateDeque.size()]); LOG.trace("-----------------------------"); LOG.trace("push predicate queue elements:"); for (int i = 0; i < qElements.length; i++) { LOG.trace("element[" + i + "] = " + qElements[i]); } LOG.trace("-----------------------------"); } decomposedPredicate = new DecomposedPredicate(); final StringBuilder whereBuf = new StringBuilder(); decomposedPredicate.residualPredicate = residualPredicate; decomposedPredicate.pushedPredicate = extendedAnalyzer.translateSearchConditions(pushPredicateDeque, whereBuf); if (decomposedPredicate.pushedPredicate != null) { if (LOG.isTraceEnabled()) { TableHiveInputFormat.ExtendedPredicateAnalyzer .displayNodeTree(decomposedPredicate.pushedPredicate); } final String whereStr = whereBuf.toString(); if (LOG.isDebugEnabled()) { LOG.debug("-----------------------------"); LOG.debug("residual = " + decomposedPredicate.residualPredicate); LOG.debug("predicate = " + decomposedPredicate.pushedPredicate); LOG.debug("push predicate via native query [" + "WHERE " + whereStr + "]"); LOG.debug("-----------------------------"); } TableHiveInputFormat.setQueryInfo((TableSerDe) deserializer, whereStr); } else { LOG.debug("Extended predicate analyzer found no predicate " + "to push. Will use all of residual for filtering."); } } /* endif: IndexPredicteAnalyzer != null or == null */ return decomposedPredicate; }
From source file:stroom.util.StreamGrepTool.java
private void processFile(final StreamStore streamStore, final long streamId, final String match) { try {// w ww .ja v a 2s . c o m final StreamSource streamSource = streamStore.openStreamSource(streamId); if (streamSource != null) { final InputStream inputStream = streamSource.getInputStream(); // Build up 2 buffers so we can output the content either side // of // the matching line final ArrayDeque<String> preBuffer = new ArrayDeque<>(); final ArrayDeque<String> postBuffer = new ArrayDeque<>(); final LineNumberReader lineNumberReader = new LineNumberReader( new InputStreamReader(inputStream, StreamUtil.DEFAULT_CHARSET)); String aline = null; while ((aline = lineNumberReader.readLine()) != null) { String lines[] = new String[] { aline }; if (addLineBreak != null) { lines = aline.split(addLineBreak); } for (final String line : lines) { if (match == null) { System.out.println(lineNumberReader.getLineNumber() + ":" + line); } else { postBuffer.add(lineNumberReader.getLineNumber() + ":" + line); if (postBuffer.size() > 5) { final String searchLine = postBuffer.pop(); checkMatch(match, preBuffer, postBuffer, searchLine); preBuffer.add(searchLine); if (preBuffer.size() > 5) { preBuffer.pop(); } } } } } // Look at the end while (postBuffer.size() > 0) { final String searchLine = postBuffer.pop(); checkMatch(match, preBuffer, postBuffer, searchLine); preBuffer.add(searchLine); if (preBuffer.size() > 5) { preBuffer.pop(); } } inputStream.close(); streamStore.closeStreamSource(streamSource); } } catch (final Exception ex) { ex.printStackTrace(); } }
From source file:com.espertech.esper.epl.expression.ExprTimePeriodImpl.java
public void validate(ExprValidationContext validationContext) throws ExprValidationException { evaluators = ExprNodeUtility.getEvaluators(this.getChildNodes()); for (ExprNode childNode : this.getChildNodes()) { validate(childNode);/*from w w w.j a v a2 s. c o m*/ } ArrayDeque<TimePeriodAdder> list = new ArrayDeque<TimePeriodAdder>(); if (hasYear) { list.add(new TimePeriodAdderYear()); } if (hasMonth) { list.add(new TimePeriodAdderMonth()); } if (hasWeek) { list.add(new TimePeriodAdderWeek()); } if (hasDay) { list.add(new TimePeriodAdderDay()); } if (hasHour) { list.add(new TimePeriodAdderHour()); } if (hasMinute) { list.add(new TimePeriodAdderMinute()); } if (hasSecond) { list.add(new TimePeriodAdderSecond()); } if (hasMillisecond) { list.add(new TimePeriodAdderMSec()); } adders = list.toArray(new TimePeriodAdder[list.size()]); }
From source file:com.google.gwt.emultest.java.util.ArrayDequeTest.java
public void testRemoveLast() { Object o1 = new Object(); Object o2 = new Object(); ArrayDeque<Object> deque = new ArrayDeque<>(); try {//from ww w. j a va2 s .c o m deque.removeLast(); fail(); } catch (NoSuchElementException expected) { } deque.add(o1); assertEquals(o1, deque.removeLast()); assertTrue(deque.isEmpty()); deque.add(o1); deque.add(o2); assertEquals(o2, deque.removeLast()); checkDequeSizeAndContent(deque, o1); assertEquals(o1, deque.removeLast()); assertEquals(0, deque.size()); try { deque.removeLast(); fail(); } catch (NoSuchElementException expected) { } }
From source file:com.comcast.oscar.dictionary.DictionaryTLV.java
/** * //from ww w . j a v a2 s .c o m * @param adqsTypeHierarchyStack * @param sValue * @param jaTlvDictionary JSONArray */ private static void searchAndUpdateDictionaryValue(ArrayDeque<String> adqsTypeHierarchyStack, JSONArray jaTlvDictionary, final String sValue) { boolean localDebug = Boolean.FALSE; //Create a local copy ArrayDeque<String> adqsTypeHierarchyStackLocal = adqsTypeHierarchyStack.clone(); //Cycle thru JSON Array and inspect each JSON Object for (int iJsonArrayIndex = 0; iJsonArrayIndex < jaTlvDictionary.length(); iJsonArrayIndex++) { if (debug | localDebug) System.out.println("DictionaryTLV.searchAndUpdateDictionaryValue(ad,ja,s) +-----INDEX: " + iJsonArrayIndex + "-----+"); JSONObject joTlvDictionary = null; try { joTlvDictionary = jaTlvDictionary.getJSONObject(iJsonArrayIndex); } catch (JSONException e1) { e1.printStackTrace(); } try { //Check to see if this object have SubTypes , if so go into SubType Array if ((joTlvDictionary.getBoolean(Dictionary.ARE_SUBTYPES)) && (joTlvDictionary .getString(Dictionary.TLV_NAME).equalsIgnoreCase(adqsTypeHierarchyStackLocal.peekLast()))) { if (debug | localDebug) { System.out.println( "DictionaryTLV.searchAndUpdateDictionaryValue(ad,ja,s) SUB-TYPE-ARRAY-TRUE: " + joTlvDictionary); System.out.println( "DictionaryTLV.searchAndUpdateDictionaryValue(ad,ja,s) -> TLV-SUB-NAME: -> " + adqsTypeHierarchyStackLocal.peekLast()); } adqsTypeHierarchyStackLocal.removeLast(); searchAndUpdateDictionaryValue(adqsTypeHierarchyStackLocal, joTlvDictionary.getJSONArray(Dictionary.SUBTYPE_ARRAY), sValue); } else if (adqsTypeHierarchyStackLocal.size() == 1) { if (debug | localDebug) System.out.println( "DictionaryTLV.searchAndUpdateDictionaryValue(ad,ja,s) SUB-TYPE-ARRAY-FALSE: " + joTlvDictionary); searchAndUpdateDictionaryValue(adqsTypeHierarchyStackLocal, joTlvDictionary, sValue); } } catch (JSONException e) { e.printStackTrace(); } } }
From source file:com.espertech.esper.core.service.EPRuntimeImpl.java
/** * Processing multiple filter matches for a statement. * @param handle statement handle// w w w .j a v a2s . c o m * @param callbackList object containing callbacks * @param theEvent to process * @param version filter version */ public void processStatementFilterMultiple(EPStatementAgentInstanceHandle handle, Object callbackList, EventBean theEvent, long version) { handle.getStatementAgentInstanceLock().acquireWriteLock(services.getStatementLockFactory()); try { if (handle.isHasVariables()) { services.getVariableService().setLocalVersion(); } if (!handle.isCurrentFilter(version)) { if (handle.getFilterFaultHandler() != null) { handle.getFilterFaultHandler().handleFilterFault(theEvent, version); } ArrayDeque<FilterHandle> callbackListNew = getCallbackList(theEvent, handle.getStatementId()); if (callbackListNew.isEmpty()) { callbackList = Collections.emptyList(); } else if (callbackListNew.size() == 1) { callbackList = ((EPStatementHandleCallback) callbackListNew.getFirst()).getFilterCallback(); } else { ArrayDeque<FilterHandleCallback> q = new ArrayDeque<FilterHandleCallback>( callbackListNew.size()); callbackList = q; for (FilterHandle callback : callbackListNew) { EPStatementHandleCallback handleCallbackFilter = (EPStatementHandleCallback) callback; q.add(handleCallbackFilter.getFilterCallback()); } } } if (callbackList instanceof Collection) { Collection<FilterHandleCallback> callbackColl = (Collection<FilterHandleCallback>) callbackList; if (isSubselectPreeval) { // sub-selects always go first for (FilterHandleCallback callback : callbackColl) { if (callback.isSubSelect()) { callback.matchFound(theEvent, callbackColl); } } for (FilterHandleCallback callback : callbackColl) { if (!callback.isSubSelect()) { callback.matchFound(theEvent, callbackColl); } } } else { // sub-selects always go last for (FilterHandleCallback callback : callbackColl) { if (!callback.isSubSelect()) { callback.matchFound(theEvent, callbackColl); } } for (FilterHandleCallback callback : callbackColl) { if (callback.isSubSelect()) { callback.matchFound(theEvent, callbackColl); } } } } else { FilterHandleCallback single = (FilterHandleCallback) callbackList; single.matchFound(theEvent, null); } // internal join processing, if applicable handle.internalDispatch(this.engineFilterAndDispatchTimeContext); } catch (RuntimeException ex) { services.getExceptionHandlingService().handleException(ex, handle); } finally { handle.getStatementAgentInstanceLock().releaseWriteLock(services.getStatementLockFactory()); } }