List of usage examples for java.util Deque push
void push(E e);
From source file:org.finra.herd.service.impl.JobServiceImpl.java
@Override public Job deleteJob(String jobId, JobDeleteRequest jobDeleteRequest) throws Exception { Assert.hasText(jobId, "jobId must be specified"); Assert.notNull(jobDeleteRequest, "jobDeleteRequest must be specified"); Assert.hasText(jobDeleteRequest.getDeleteReason(), "deleteReason must be specified"); // Trim input parameters. String localJobId = jobId.trim(); ProcessInstance mainProcessInstance = activitiService.getProcessInstanceById(localJobId); if (mainProcessInstance != null) { checkPermissions(mainProcessInstance.getProcessDefinitionKey(), new NamespacePermissionEnum[] { NamespacePermissionEnum.EXECUTE }); // Load all processes (main process and sub-processes) into a deque to be later deleted. Deque<String> processInstanceIds = new ArrayDeque<>(); processInstanceIds.push(mainProcessInstance.getProcessInstanceId()); Deque<String> superProcessInstanceIds = new ArrayDeque<>(); superProcessInstanceIds.push(mainProcessInstance.getProcessInstanceId()); while (!superProcessInstanceIds.isEmpty()) { String superProcessInstanceId = superProcessInstanceIds.pop(); // Get all executions with the parent id equal to the super process instance id. for (Execution execution : activitiRuntimeService.createExecutionQuery() .parentId(superProcessInstanceId).list()) { processInstanceIds.push(execution.getId()); }// w w w.ja v a2s.com // Get all active sub-processes for the super process instance id. for (ProcessInstance subProcessInstance : activitiRuntimeService.createProcessInstanceQuery() .superProcessInstanceId(superProcessInstanceId).active().list()) { processInstanceIds.push(subProcessInstance.getId()); superProcessInstanceIds.push(subProcessInstance.getId()); } } // Delete all processes individually in LIFO order. while (!processInstanceIds.isEmpty()) { activitiService.deleteProcessInstance(processInstanceIds.pop(), jobDeleteRequest.getDeleteReason()); } } else { throw new ObjectNotFoundException( String.format("Job with ID \"%s\" does not exist or is already completed.", localJobId)); } return getJob(localJobId, false, false); }
From source file:com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocStyleCheck.java
/** * Checks to see if there are any unclosed tags on the stack. The token * represents a html tag that has been closed and has a corresponding open * tag on the stack. Any tags, except single tags, that were opened * (pushed on the stack) after the token are missing a close. * * @param htmlStack the stack of opened HTML tags. * @param token the current HTML tag name that has been closed. *//*w w w .j av a 2 s .c om*/ private void checkUnclosedTags(Deque<HtmlTag> htmlStack, String token) { final Deque<HtmlTag> unclosedTags = new ArrayDeque<>(); HtmlTag lastOpenTag = htmlStack.pop(); while (!token.equalsIgnoreCase(lastOpenTag.getId())) { // Find unclosed elements. Put them on a stack so the // output order won't be back-to-front. if (isSingleTag(lastOpenTag)) { lastOpenTag = htmlStack.pop(); } else { unclosedTags.push(lastOpenTag); lastOpenTag = htmlStack.pop(); } } // Output the unterminated tags, if any // Skip multiples, like <b>..<b> String lastFound = ""; for (final HtmlTag htag : unclosedTags) { lastOpenTag = htag; if (lastOpenTag.getId().equals(lastFound)) { continue; } lastFound = lastOpenTag.getId(); log(lastOpenTag.getLineNo(), lastOpenTag.getPosition(), UNCLOSED_HTML, lastOpenTag); } }
From source file:org.apache.xml.security.stax.impl.processor.input.AbstractDecryptInputProcessor.java
private EncryptedDataType parseEncryptedDataStructure(boolean isSecurityHeaderEvent, XMLSecEvent xmlSecEvent, InputProcessorChain subInputProcessorChain) throws XMLStreamException, XMLSecurityException { Deque<XMLSecEvent> xmlSecEvents = new ArrayDeque<XMLSecEvent>(); xmlSecEvents.push(xmlSecEvent); XMLSecEvent encryptedDataXMLSecEvent; int count = 0; int keyInfoCount = 0; do {//from w w w .jav a2 s . co m subInputProcessorChain.reset(); if (isSecurityHeaderEvent) { encryptedDataXMLSecEvent = subInputProcessorChain.processHeaderEvent(); } else { encryptedDataXMLSecEvent = subInputProcessorChain.processEvent(); } xmlSecEvents.push(encryptedDataXMLSecEvent); if (++count >= maximumAllowedEncryptedDataEvents) { throw new XMLSecurityException("stax.xmlStructureSizeExceeded", new Object[] { maximumAllowedEncryptedDataEvents }); } //the keyInfoCount is necessary to prevent early while-loop abort when the KeyInfo also contains a CipherValue. if (encryptedDataXMLSecEvent.getEventType() == XMLStreamConstants.START_ELEMENT && encryptedDataXMLSecEvent.asStartElement().getName() .equals(XMLSecurityConstants.TAG_dsig_KeyInfo)) { keyInfoCount++; } else if (encryptedDataXMLSecEvent.getEventType() == XMLStreamConstants.END_ELEMENT && encryptedDataXMLSecEvent.asEndElement().getName() .equals(XMLSecurityConstants.TAG_dsig_KeyInfo)) { keyInfoCount--; } } while (!((encryptedDataXMLSecEvent.getEventType() == XMLStreamConstants.START_ELEMENT && encryptedDataXMLSecEvent.asStartElement().getName() .equals(XMLSecurityConstants.TAG_xenc_CipherValue) || encryptedDataXMLSecEvent.getEventType() == XMLStreamConstants.END_ELEMENT && encryptedDataXMLSecEvent.asEndElement().getName() .equals(XMLSecurityConstants.TAG_xenc_EncryptedData)) && keyInfoCount == 0)); xmlSecEvents.push(XMLSecEventFactory.createXmlSecEndElement(XMLSecurityConstants.TAG_xenc_CipherValue)); xmlSecEvents.push(XMLSecEventFactory.createXmlSecEndElement(XMLSecurityConstants.TAG_xenc_CipherData)); xmlSecEvents.push(XMLSecEventFactory.createXmlSecEndElement(XMLSecurityConstants.TAG_xenc_EncryptedData)); EncryptedDataType encryptedDataType; try { Unmarshaller unmarshaller = XMLSecurityConstants .getJaxbUnmarshaller(getSecurityProperties().isDisableSchemaValidation()); @SuppressWarnings("unchecked") JAXBElement<EncryptedDataType> encryptedDataTypeJAXBElement = (JAXBElement<EncryptedDataType>) unmarshaller .unmarshal(new XMLSecurityEventReader(xmlSecEvents, 0)); encryptedDataType = encryptedDataTypeJAXBElement.getValue(); } catch (JAXBException e) { throw new XMLSecurityException(e); } return encryptedDataType; }
From source file:org.apache.asterix.om.typecomputer.impl.RecordRemoveFieldsTypeComputer.java
private ARecordType deepCheckAndCopy(Deque<String> fieldPath, ARecordType srcRecType, List<List<String>> pathList, boolean isOpen) throws AlgebricksException { // Make sure the current path is valid before going further if (isRemovePath(fieldPath, pathList)) { return null; }//from www. j a v a 2 s. c o m String srcFieldNames[] = srcRecType.getFieldNames(); IAType srcFieldTypes[] = srcRecType.getFieldTypes(); List<IAType> destFieldTypes = new ArrayList<>(); List<String> destFieldNames = new ArrayList<>(); for (int i = 0; i < srcFieldNames.length; i++) { fieldPath.push(srcFieldNames[i]); if (!isRemovePath(fieldPath, pathList)) { if (srcFieldTypes[i].getTypeTag() == ATypeTag.RECORD) { ARecordType subRecord = (ARecordType) srcFieldTypes[i]; subRecord = deepCheckAndCopy(fieldPath, subRecord, pathList, isOpen); if (subRecord != null) { destFieldNames.add(srcFieldNames[i]); destFieldTypes.add(subRecord); } } else { destFieldNames.add(srcFieldNames[i]); destFieldTypes.add(srcFieldTypes[i]); } } fieldPath.pop(); } int n = destFieldNames.size(); if (n == 0) { return null; } return new ARecordType(srcRecType.getTypeName(), destFieldNames.toArray(new String[n]), destFieldTypes.toArray(new IAType[n]), isOpen); }
From source file:org.teavm.flavour.templates.parsing.Parser.java
private void pushVar(String name, ValueType type) { Deque<ValueType> stack = variables.get(name); if (stack == null) { stack = new ArrayDeque<>(); variables.put(name, stack);/*from w w w . j a v a 2s.c o m*/ } stack.push(type); }
From source file:de.escalon.hypermedia.hydra.serialize.JacksonHydraSerializer.java
private void serializeContext(Object bean, JsonGenerator jgen, SerializerProvider serializerProvider, Deque<String> deque) throws IOException { try {/*from ww w . jav a 2 s. c o m*/ // TODO use serializerProvider.getAttributes to hold a stack of contexts // and check if we need to write a context for the current bean at all // If it is in the same vocab: no context // If the terms are already defined in the context: no context SerializationConfig config = serializerProvider.getConfig(); final Class<?> mixInClass = config.findMixInClassFor(bean.getClass()); String vocab = getVocab(bean, mixInClass); Map<String, Object> terms = getTerms(bean, mixInClass); final String currentVocab = deque.peek(); deque.push(vocab); boolean mustWriteContext; if (currentVocab == null || !vocab.equals(currentVocab)) { mustWriteContext = true; } else { // only write if bean has terms if (terms.isEmpty()) { mustWriteContext = false; } else { // TODO actually, need not repeat vocab in context if same mustWriteContext = true; } } if (mustWriteContext) { // begin context // default context: schema.org vocab or vocab package annotation jgen.writeObjectFieldStart("@context"); // TODO do not repeat vocab if already defined in current context if (currentVocab == null || !vocab.equals(currentVocab)) { jgen.writeStringField(AT_VOCAB, vocab); } for (Map.Entry<String, Object> termEntry : terms.entrySet()) { if (termEntry.getValue() instanceof String) { jgen.writeStringField(termEntry.getKey(), termEntry.getValue().toString()); } else { jgen.writeObjectField(termEntry.getKey(), termEntry.getValue()); } } jgen.writeEndObject(); } // end context // TODO build the context from @Vocab and @Term and @Expose and write it as local or external context with // TODO jsonld extension (using apt?) // TODO also allow manually created jsonld contexts // TODO how to define a context containing several context objects? @context is then an array of // TODO external context strings pointing to json-ld, and json objects containing terms // TODO another option: create custom vocabulary without reference to public vocabs // TODO support additionalType from goodrelations } catch (Exception e) { throw new RuntimeException(e); } }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private Map<String, Object> consumeMap(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { int buf;// www . ja v a 2 s.c o m boolean emptyMap = true; while ((buf = in.read()) != '[') { if (buf == -1) { throw new IOException("Unexpect end of map"); } } HashMap<String, Object> m = new HashMap<String, Object>(); ByteArrayOutputStream mOut = new ByteArrayOutputStream(BUFFER_SIZE); while (true) { // Read key (assume key can not contains special character such as #, (, [, {, }, ], ) while ((buf = in.read()) != '#') { // end of map if (emptyMap && buf == ']') { return m; } if (buf == -1) { throw new IOException("Unexpect end of map"); } emptyMap = false; mOut.write(buf); } String key = bytesToCharArray(mOut.toByteArray()); if (key.length() == 0) throw new IOException("Map key can not be null"); // Read value mOut.reset(); Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray while (true) { buf = in.read(); if (buf == -1) { throw new IOException("Unexpect end of map"); } if (buf == '[' || buf == '{' || buf == '(') { level.push((char) buf); } else if (buf == ']' && level.isEmpty()) // End of map break; else if (buf == ']' || buf == '}' || buf == ')') { if (level.isEmpty()) throw new IOException("Malformed map"); if (level.peek() == findStartChar((char) buf)) level.pop(); } else if (buf == ',' && level.isEmpty()) { // Current map item complete break; } mOut.write(buf); } Object value = null; if (fieldSchema != null && fieldSchema.getSchema() != null && mOut.size() > 0) { value = bytesToObject(mOut.toByteArray(), fieldSchema.getSchema().getFields()[0]); } else if (mOut.size() > 0) { // untyped map value = new DataByteArray(mOut.toByteArray()); } m.put(key, value); mOut.reset(); if (buf == ']') break; } return m; }
From source file:com.grepcurl.random.ObjectGenerator.java
protected <T> T generate(Class<T> klass, SetterOverrides setterOverrides, Deque<Object> objectStack, Object... constructorArgs) { Validate.notNull(klass);//from w ww.ja v a2s . co m Validate.notNull(constructorArgs); if (verbose) { log(String.format("generating object of type: %s, with args: %s, with overrides: %s", klass, Arrays.toString(constructorArgs), setterOverrides)); } try { Class[] constructorTypes = _toClasses(constructorArgs); T t; if (klass.isEnum()) { int randomOrdinal = randomInt(0, klass.getEnumConstants().length - 1); t = klass.getEnumConstants()[randomOrdinal]; } else { t = klass.getConstructor(constructorTypes).newInstance(constructorArgs); } objectStack.push(t); Method[] methods = klass.getMethods(); for (Method method : methods) { _processMethod(method, setterOverrides, t, objectStack); } objectStack.pop(); return t; } catch (Exception e) { e.printStackTrace(); throw new FailedRandomObjectGenerationException(e); } }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private Tuple consumeTuple(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { if (fieldSchema == null) { throw new IOException("Schema is null"); }/* www. jav a2s .com*/ int buf; ByteArrayOutputStream mOut; while ((buf = in.read()) != '(' || buf == '}') { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == '}') { in.unread(buf); return null; } } Tuple t = TupleFactory.getInstance().newTuple(); if (fieldSchema.getSchema() != null && fieldSchema.getSchema().getFields().length != 0) { ResourceFieldSchema[] fss = fieldSchema.getSchema().getFields(); // Interpret item inside tuple one by one based on the inner schema for (int i = 0; i < fss.length; i++) { Object field; ResourceFieldSchema fs = fss[i]; int delimit = ','; if (i == fss.length - 1) delimit = ')'; if (DataType.isComplex(fs.getType())) { field = consumeComplexType(in, fs); while ((buf = in.read()) != delimit) { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } } } else { mOut = new ByteArrayOutputStream(BUFFER_SIZE); while ((buf = in.read()) != delimit) { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == delimit) break; mOut.write(buf); } field = parseSimpleType(mOut.toByteArray(), fs); } t.append(field); } } else { // No inner schema, treat everything inside tuple as bytearray Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray mOut = new ByteArrayOutputStream(BUFFER_SIZE); while (true) { buf = in.read(); if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == '[' || buf == '{' || buf == '(') { level.push((char) buf); mOut.write(buf); } else if (buf == ')' && level.isEmpty()) // End of tuple { DataByteArray value = new DataByteArray(mOut.toByteArray()); t.append(value); break; } else if (buf == ',' && level.isEmpty()) { DataByteArray value = new DataByteArray(mOut.toByteArray()); t.append(value); mOut.reset(); } else if (buf == ']' || buf == '}' || buf == ')') { if (level.peek() == findStartChar((char) buf)) level.pop(); else throw new IOException("Malformed tuple"); mOut.write(buf); } else mOut.write(buf); } } return t; }