List of usage examples for java.util Deque pop
E pop();
From source file:org.apache.openmeetings.web.room.wb.WbPanel.java
private UndoObject getUndo(Long wbId) { if (wbId == null || !undoList.containsKey(wbId)) { return null; }// w w w . j a v a2 s . c o m Deque<UndoObject> deq = undoList.get(wbId); return deq.isEmpty() ? null : deq.pop(); }
From source file:org.apache.phoenix.hive.HiveTestUtil.java
/** * Given the current configurations (e.g., hadoop version and execution mode), return * the correct file name to compare with the current test run output. * * @param outDir The directory where the reference log files are stored. * @param testName The test file name (terminated by ".out"). * @return The file name appended with the configuration values if it exists. *//*from w w w. ja v a 2 s.co m*/ public String outPath(String outDir, String testName) { String ret = (new File(outDir, testName)).getPath(); // List of configurations. Currently the list consists of hadoop version and execution // mode only List<String> configs = new ArrayList<String>(); configs.add(this.hadoopVer); Deque<String> stack = new LinkedList<String>(); StringBuilder sb = new StringBuilder(); sb.append(testName); stack.push(sb.toString()); // example file names are input1.q.out_0.20.0_minimr or input2.q.out_0.17 for (String s : configs) { sb.append('_'); sb.append(s); stack.push(sb.toString()); } while (stack.size() > 0) { String fileName = stack.pop(); File f = new File(outDir, fileName); if (f.exists()) { ret = f.getPath(); break; } } return ret; }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private Tuple consumeTuple(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { if (fieldSchema == null) { throw new IOException("Schema is null"); }// w w w . j a v a 2 s.c om int buf; ByteArrayOutputStream mOut; while ((buf = in.read()) != '(' || buf == '}') { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == '}') { in.unread(buf); return null; } } Tuple t = TupleFactory.getInstance().newTuple(); if (fieldSchema.getSchema() != null && fieldSchema.getSchema().getFields().length != 0) { ResourceFieldSchema[] fss = fieldSchema.getSchema().getFields(); // Interpret item inside tuple one by one based on the inner schema for (int i = 0; i < fss.length; i++) { Object field; ResourceFieldSchema fs = fss[i]; int delimit = ','; if (i == fss.length - 1) delimit = ')'; if (DataType.isComplex(fs.getType())) { field = consumeComplexType(in, fs); while ((buf = in.read()) != delimit) { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } } } else { mOut = new ByteArrayOutputStream(BUFFER_SIZE); while ((buf = in.read()) != delimit) { if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == delimit) break; mOut.write(buf); } field = parseSimpleType(mOut.toByteArray(), fs); } t.append(field); } } else { // No inner schema, treat everything inside tuple as bytearray Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray mOut = new ByteArrayOutputStream(BUFFER_SIZE); while (true) { buf = in.read(); if (buf == -1) { throw new IOException("Unexpect end of tuple"); } if (buf == '[' || buf == '{' || buf == '(') { level.push((char) buf); mOut.write(buf); } else if (buf == ')' && level.isEmpty()) // End of tuple { DataByteArray value = new DataByteArray(mOut.toByteArray()); t.append(value); break; } else if (buf == ',' && level.isEmpty()) { DataByteArray value = new DataByteArray(mOut.toByteArray()); t.append(value); mOut.reset(); } else if (buf == ']' || buf == '}' || buf == ')') { if (level.peek() == findStartChar((char) buf)) level.pop(); else throw new IOException("Malformed tuple"); mOut.write(buf); } else mOut.write(buf); } } return t; }
From source file:org.apache.pig.builtin.Utf8StorageConverter.java
private Map<String, Object> consumeMap(PushbackInputStream in, ResourceFieldSchema fieldSchema) throws IOException { int buf;/*from w w w . j a v a2s . com*/ boolean emptyMap = true; while ((buf = in.read()) != '[') { if (buf == -1) { throw new IOException("Unexpect end of map"); } } HashMap<String, Object> m = new HashMap<String, Object>(); ByteArrayOutputStream mOut = new ByteArrayOutputStream(BUFFER_SIZE); while (true) { // Read key (assume key can not contains special character such as #, (, [, {, }, ], ) while ((buf = in.read()) != '#') { // end of map if (emptyMap && buf == ']') { return m; } if (buf == -1) { throw new IOException("Unexpect end of map"); } emptyMap = false; mOut.write(buf); } String key = bytesToCharArray(mOut.toByteArray()); if (key.length() == 0) throw new IOException("Map key can not be null"); // Read value mOut.reset(); Deque<Character> level = new LinkedList<Character>(); // keep track of nested tuple/bag/map. We do not interpret, save them as bytearray while (true) { buf = in.read(); if (buf == -1) { throw new IOException("Unexpect end of map"); } if (buf == '[' || buf == '{' || buf == '(') { level.push((char) buf); } else if (buf == ']' && level.isEmpty()) // End of map break; else if (buf == ']' || buf == '}' || buf == ')') { if (level.isEmpty()) throw new IOException("Malformed map"); if (level.peek() == findStartChar((char) buf)) level.pop(); } else if (buf == ',' && level.isEmpty()) { // Current map item complete break; } mOut.write(buf); } Object value = null; if (fieldSchema != null && fieldSchema.getSchema() != null && mOut.size() > 0) { value = bytesToObject(mOut.toByteArray(), fieldSchema.getSchema().getFields()[0]); } else if (mOut.size() > 0) { // untyped map value = new DataByteArray(mOut.toByteArray()); } m.put(key, value); mOut.reset(); if (buf == ']') break; } return m; }
From source file:org.apache.tez.dag.api.DAG.java
@Private public synchronized DAGPlan createDag(Configuration tezConf, Credentials extraCredentials, Map<String, LocalResource> tezJarResources, LocalResource binaryConfig, boolean tezLrsAsArchive, ServicePluginsDescriptor servicePluginsDescriptor, JavaOptsChecker javaOptsChecker) { Deque<String> topologicalVertexStack = verify(true); verifyLocalResources(tezConf);/*from w w w . j ava 2 s .c om*/ DAGPlan.Builder dagBuilder = DAGPlan.newBuilder(); dagBuilder.setName(this.name); if (this.callerContext != null) { dagBuilder.setCallerContext(DagTypeConverters.convertCallerContextToProto(callerContext)); } if (this.dagInfo != null && !this.dagInfo.isEmpty()) { dagBuilder.setDagInfo(this.dagInfo); } // Setup default execution context. VertexExecutionContext defaultContext = getDefaultExecutionContext(); verifyExecutionContext(defaultContext, servicePluginsDescriptor, "DAGDefault"); if (defaultContext != null) { DAGProtos.VertexExecutionContextProto contextProto = DagTypeConverters.convertToProto(defaultContext); dagBuilder.setDefaultExecutionContext(contextProto); } if (!vertexGroups.isEmpty()) { for (VertexGroup av : vertexGroups) { GroupInfo groupInfo = av.getGroupInfo(); PlanVertexGroupInfo.Builder groupBuilder = PlanVertexGroupInfo.newBuilder(); groupBuilder.setGroupName(groupInfo.getGroupName()); for (Vertex v : groupInfo.getMembers()) { groupBuilder.addGroupMembers(v.getName()); } groupBuilder.addAllOutputs(groupInfo.outputs); for (Map.Entry<String, InputDescriptor> entry : groupInfo.edgeMergedInputs.entrySet()) { groupBuilder.addEdgeMergedInputs( PlanGroupInputEdgeInfo.newBuilder().setDestVertexName(entry.getKey()) .setMergedInput(DagTypeConverters.convertToDAGPlan(entry.getValue()))); } dagBuilder.addVertexGroups(groupBuilder); } } Credentials dagCredentials = new Credentials(); if (extraCredentials != null) { dagCredentials.mergeAll(extraCredentials); } dagCredentials.mergeAll(credentials); if (!commonTaskLocalFiles.isEmpty()) { dagBuilder.addAllLocalResource(DagTypeConverters.convertToDAGPlan(commonTaskLocalFiles)); } Preconditions.checkArgument(topologicalVertexStack.size() == vertices.size(), "size of topologicalVertexStack is:" + topologicalVertexStack.size() + " while size of vertices is:" + vertices.size() + ", make sure they are the same in order to sort the vertices"); while (!topologicalVertexStack.isEmpty()) { Vertex vertex = vertices.get(topologicalVertexStack.pop()); // infer credentials, resources and parallelism from data source Resource vertexTaskResource = vertex.getTaskResource(); if (vertexTaskResource == null) { vertexTaskResource = Resource.newInstance( tezConf.getInt(TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB, TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB_DEFAULT), tezConf.getInt(TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES, TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES_DEFAULT)); } Map<String, LocalResource> vertexLRs = Maps.newHashMap(); vertexLRs.putAll(vertex.getTaskLocalFiles()); List<DataSourceDescriptor> dataSources = vertex.getDataSources(); for (DataSourceDescriptor dataSource : dataSources) { if (dataSource.getCredentials() != null) { dagCredentials.addAll(dataSource.getCredentials()); } if (dataSource.getAdditionalLocalFiles() != null) { TezCommonUtils.addAdditionalLocalResources(dataSource.getAdditionalLocalFiles(), vertexLRs, "Vertex " + vertex.getName()); } } if (tezJarResources != null) { TezCommonUtils.addAdditionalLocalResources(tezJarResources, vertexLRs, "Vertex " + vertex.getName()); } if (binaryConfig != null) { vertexLRs.put(TezConstants.TEZ_PB_BINARY_CONF_NAME, binaryConfig); } int vertexParallelism = vertex.getParallelism(); VertexLocationHint vertexLocationHint = vertex.getLocationHint(); if (dataSources.size() == 1) { DataSourceDescriptor dataSource = dataSources.get(0); if (vertexParallelism == -1 && dataSource.getNumberOfShards() > -1) { vertexParallelism = dataSource.getNumberOfShards(); } if (vertexLocationHint == null && dataSource.getLocationHint() != null) { vertexLocationHint = dataSource.getLocationHint(); } } if (vertexParallelism == -1) { Preconditions.checkState(vertexLocationHint == null, "Cannot specify vertex location hint without specifying vertex parallelism. Vertex: " + vertex.getName()); } else if (vertexLocationHint != null) { Preconditions.checkState(vertexParallelism == vertexLocationHint.getTaskLocationHints().size(), "vertex task location hint must equal vertex parallelism. Vertex: " + vertex.getName()); } for (DataSinkDescriptor dataSink : vertex.getDataSinks()) { if (dataSink.getCredentials() != null) { dagCredentials.addAll(dataSink.getCredentials()); } } VertexPlan.Builder vertexBuilder = VertexPlan.newBuilder(); vertexBuilder.setName(vertex.getName()); vertexBuilder.setType(PlanVertexType.NORMAL); // vertex type is implicitly NORMAL until TEZ-46. vertexBuilder .setProcessorDescriptor(DagTypeConverters.convertToDAGPlan(vertex.getProcessorDescriptor())); // Vertex ExecutionContext setup VertexExecutionContext execContext = vertex.getVertexExecutionContext(); verifyExecutionContext(execContext, servicePluginsDescriptor, vertex.getName()); if (execContext != null) { DAGProtos.VertexExecutionContextProto contextProto = DagTypeConverters.convertToProto(execContext); vertexBuilder.setExecutionContext(contextProto); } // End of VertexExecutionContext setup. if (vertex.getInputs().size() > 0) { for (RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor> input : vertex.getInputs()) { vertexBuilder.addInputs(DagTypeConverters.convertToDAGPlan(input)); } } if (vertex.getOutputs().size() > 0) { for (RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor> output : vertex .getOutputs()) { vertexBuilder.addOutputs(DagTypeConverters.convertToDAGPlan(output)); } } if (vertex.getConf() != null && vertex.getConf().size() > 0) { ConfigurationProto.Builder confBuilder = ConfigurationProto.newBuilder(); TezUtils.populateConfProtoFromEntries(vertex.getConf().entrySet(), confBuilder); vertexBuilder.setVertexConf(confBuilder); } //task config PlanTaskConfiguration.Builder taskConfigBuilder = PlanTaskConfiguration.newBuilder(); taskConfigBuilder.setNumTasks(vertexParallelism); taskConfigBuilder.setMemoryMb(vertexTaskResource.getMemory()); taskConfigBuilder.setVirtualCores(vertexTaskResource.getVirtualCores()); try { taskConfigBuilder.setJavaOpts(TezClientUtils .addDefaultsToTaskLaunchCmdOpts(vertex.getTaskLaunchCmdOpts(), tezConf, javaOptsChecker)); } catch (TezException e) { throw new TezUncheckedException( "Invalid TaskLaunchCmdOpts defined for Vertex " + vertex.getName() + " : " + e.getMessage(), e); } taskConfigBuilder.setTaskModule(vertex.getName()); if (!vertexLRs.isEmpty()) { taskConfigBuilder.addAllLocalResource(DagTypeConverters.convertToDAGPlan(vertexLRs)); } Map<String, String> taskEnv = Maps.newHashMap(vertex.getTaskEnvironment()); TezYARNUtils.setupDefaultEnv(taskEnv, tezConf, TezConfiguration.TEZ_TASK_LAUNCH_ENV, TezConfiguration.TEZ_TASK_LAUNCH_ENV_DEFAULT, TezConfiguration.TEZ_TASK_LAUNCH_CLUSTER_DEFAULT_ENV, TezConfiguration.TEZ_TASK_LAUNCH_CLUSTER_DEFAULT_ENV_DEFAULT, tezLrsAsArchive); for (Map.Entry<String, String> entry : taskEnv.entrySet()) { PlanKeyValuePair.Builder envSettingBuilder = PlanKeyValuePair.newBuilder(); envSettingBuilder.setKey(entry.getKey()); envSettingBuilder.setValue(entry.getValue()); taskConfigBuilder.addEnvironmentSetting(envSettingBuilder); } if (vertexLocationHint != null) { if (vertexLocationHint.getTaskLocationHints() != null) { for (TaskLocationHint hint : vertexLocationHint.getTaskLocationHints()) { PlanTaskLocationHint.Builder taskLocationHintBuilder = PlanTaskLocationHint.newBuilder(); // we can allow this later on if needed if (hint.getAffinitizedTask() != null) { throw new TezUncheckedException( "Task based affinity may not be specified via the DAG API"); } if (hint.getHosts() != null) { taskLocationHintBuilder.addAllHost(hint.getHosts()); } if (hint.getRacks() != null) { taskLocationHintBuilder.addAllRack(hint.getRacks()); } vertexBuilder.addTaskLocationHint(taskLocationHintBuilder); } } } if (vertex.getVertexManagerPlugin() != null) { vertexBuilder.setVertexManagerPlugin( DagTypeConverters.convertToDAGPlan(vertex.getVertexManagerPlugin())); } for (Edge inEdge : vertex.getInputEdges()) { vertexBuilder.addInEdgeId(inEdge.getId()); } for (Edge outEdge : vertex.getOutputEdges()) { vertexBuilder.addOutEdgeId(outEdge.getId()); } vertexBuilder.setTaskConfig(taskConfigBuilder); dagBuilder.addVertex(vertexBuilder); } for (Edge edge : edges) { EdgePlan.Builder edgeBuilder = EdgePlan.newBuilder(); edgeBuilder.setId(edge.getId()); edgeBuilder.setInputVertexName(edge.getInputVertex().getName()); edgeBuilder.setOutputVertexName(edge.getOutputVertex().getName()); edgeBuilder.setDataMovementType( DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getDataMovementType())); edgeBuilder.setDataSourceType( DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getDataSourceType())); edgeBuilder.setSchedulingType( DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getSchedulingType())); edgeBuilder.setEdgeSource(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeSource())); edgeBuilder.setEdgeDestination( DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeDestination())); if (edge.getEdgeProperty().getDataMovementType() == DataMovementType.CUSTOM) { if (edge.getEdgeProperty().getEdgeManagerDescriptor() != null) { edgeBuilder.setEdgeManager( DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeManagerDescriptor())); } // else the AM will deal with this. } dagBuilder.addEdge(edgeBuilder); } if (dagAccessControls != null) { dagBuilder.setAclInfo(DagTypeConverters.convertDAGAccessControlsToProto(dagAccessControls)); } ConfigurationProto.Builder confProtoBuilder = ConfigurationProto.newBuilder(); if (!this.dagConf.isEmpty()) { TezUtils.populateConfProtoFromEntries(this.dagConf.entrySet(), confProtoBuilder); } // Copy historyLogLevel from tezConf into dagConf if its not overridden in dagConf. String logLevel = this.dagConf.get(TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL); if (logLevel != null) { // The config is from dagConf, we have already added it to the proto above, just check if // the value is valid. if (!HistoryLogLevel.validateLogLevel(logLevel)) { throw new IllegalArgumentException("Config: " + TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL + " is set to invalid value: " + logLevel); } } else { // Validate and set value from tezConf. logLevel = tezConf.get(TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL); if (logLevel != null) { if (!HistoryLogLevel.validateLogLevel(logLevel)) { throw new IllegalArgumentException("Config: " + TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL + " is set to invalid value: " + logLevel); } PlanKeyValuePair.Builder kvp = PlanKeyValuePair.newBuilder(); kvp.setKey(TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL); kvp.setValue(logLevel); confProtoBuilder.addConfKeyValues(kvp); } } dagBuilder.setDagConf(confProtoBuilder); if (dagCredentials != null) { dagBuilder.setCredentialsBinary(DagTypeConverters.convertCredentialsToProto(dagCredentials)); TezCommonUtils.logCredentials(LOG, dagCredentials, "dag"); } return dagBuilder.build(); }
From source file:org.decojer.cavaj.utils.SwitchTypes.java
/** * Is used for string-switches. Execute switch case BB to create the case value map: string to * BB.// w ww . j a va2 s. com * * @param caseBb * case BB * @param stringReg * string register * @param hash * hash for string * @param defaultCase * default case * @param string2bb * case value map: string to BB * @return {@code true} - success */ private static boolean executeBbStringHashCond(final BB caseBb, final int stringReg, final int hash, final BB defaultCase, final Map<String, BB> string2bb) { final Deque<Object> stack = Queues.newArrayDeque(); String str = null; for (int i = 0; i < caseBb.getOps(); ++i) { final Op op = caseBb.getOp(i); switch (op.getOptype()) { case LOAD: stack.push(((LOAD) op).getReg()); break; case PUSH: stack.push(((PUSH) op).getValue()); break; case INVOKE: final M m = ((INVOKE) op).getM(); if (!"equals".equals(m.getName()) || !"(Ljava/lang/Object;)Z".equals(m.getDescriptor())) { return false; } final Object value = stack.pop(); if (!(value instanceof String)) { return false; } if (value.hashCode() != hash) { return false; } final Object reg = stack.pop(); if ((Integer) reg != stringReg) { return false; } stack.push(true); str = (String) value; break; case JCND: final Object equalsResult = stack.pop(); if (!(equalsResult instanceof Boolean)) { return false; } boolean dir = ((Boolean) equalsResult).booleanValue(); if (((JCND) op).getCmpType() == CmpType.T_EQ) { dir = !dir; } string2bb.put(str, dir ? caseBb.getTrueSucc() : caseBb.getFalseSucc()); final E out = dir ? caseBb.getFalseOut() : caseBb.getTrueOut(); if (out == null) { assert false; return false; } if (out.getRelevantEnd() == defaultCase) { return true; } return executeBbStringHashCond(out.getEnd(), stringReg, hash, defaultCase, string2bb); default: return false; } } return false; }
From source file:org.decojer.cavaj.utils.SwitchTypes.java
/** * Is used for JDK-Bytecode mode string-switches. Execute switch case BB to create the case * value map: index to string./*from w w w .j a va2 s. c om*/ * * @param caseBb * case BB * @param indexReg * index register * @param str * string * @param defaultCase * default case * @param index2string * case value map: index to string * @return {@code true} - success */ private static boolean executeBbStringIndex(@Nonnull final BB caseBb, final int indexReg, @Nonnull final String str, @Nonnull final BB defaultCase, @Nonnull final Map<Integer, String> index2string) { assert defaultCase != null; // prevent warning for now, later check more final Deque<Object> stack = Queues.newArrayDeque(); for (int i = 0; i < caseBb.getOps(); ++i) { final Op op = caseBb.getOp(i); switch (op.getOptype()) { case PUSH: stack.push(((PUSH) op).getValue()); break; case STORE: if (((STORE) op).getReg() != indexReg) { return false; } final Object index = stack.pop(); if (!(index instanceof Integer)) { return false; } index2string.put((Integer) index, str); return true; default: return false; } } return false; }
From source file:org.deeplearning4j.text.corpora.treeparser.BinarizeTreeTransformer.java
@Override public Tree transform(Tree t) { if (t == null) return null; Deque<Pair<Tree, String>> stack = new ArrayDeque<>(); stack.add(new Pair<>(t, t.label())); String originalLabel = t.label(); while (!stack.isEmpty()) { Pair<Tree, String> curr = stack.pop(); Tree node = curr.getFirst();/*from www .j a va 2s .c o m*/ for (Tree child : node.children()) stack.add(new Pair<>(child, curr.getSecond())); if (node.children().size() > 2) { List<String> children = new ArrayList<>(); for (int i = 0; i < node.children().size(); i++) children.add(node.children().get(i).label()); Tree copy = node.clone(); //clear out children node.children().clear(); Tree currNode = node; for (int i = 1; i < children.size() - 1; i++) { if (factor.equals("right")) { Tree newNode = new Tree(currNode); List<String> subChildren = children.subList(i, Math.min(i + horizontonalMarkov, children.size())); newNode.setLabel(originalLabel + "-" + "(" + StringUtils.join(subChildren, "-")); newNode.setParent(currNode); currNode.children().add(copy.children().remove(0)); currNode.firstChild().setParent(currNode); currNode.children().add(newNode); currNode = newNode; } else { Tree newNode = new Tree(currNode); newNode.setParent(copy.firstChild()); List<String> childLabels = children .subList(Math.max(children.size() - i - horizontonalMarkov, 0), i); Collections.reverse(childLabels); newNode.setLabel(originalLabel + "-" + "(" + StringUtils.join(childLabels, "-")); currNode.children().add(newNode); currNode.firstChild().setParent(currNode); currNode.children().add(copy.children().remove(copy.children().size() - 1)); currNode.lastChild().setParent(currNode); currNode = newNode; } } currNode.children().addAll(new ArrayList<>(copy.children())); } } addPreTerminal(t); return t; }
From source file:org.finra.herd.service.impl.JobServiceImpl.java
@Override public Job deleteJob(String jobId, JobDeleteRequest jobDeleteRequest) throws Exception { Assert.hasText(jobId, "jobId must be specified"); Assert.notNull(jobDeleteRequest, "jobDeleteRequest must be specified"); Assert.hasText(jobDeleteRequest.getDeleteReason(), "deleteReason must be specified"); // Trim input parameters. String localJobId = jobId.trim(); ProcessInstance mainProcessInstance = activitiService.getProcessInstanceById(localJobId); if (mainProcessInstance != null) { checkPermissions(mainProcessInstance.getProcessDefinitionKey(), new NamespacePermissionEnum[] { NamespacePermissionEnum.EXECUTE }); // Load all processes (main process and sub-processes) into a deque to be later deleted. Deque<String> processInstanceIds = new ArrayDeque<>(); processInstanceIds.push(mainProcessInstance.getProcessInstanceId()); Deque<String> superProcessInstanceIds = new ArrayDeque<>(); superProcessInstanceIds.push(mainProcessInstance.getProcessInstanceId()); while (!superProcessInstanceIds.isEmpty()) { String superProcessInstanceId = superProcessInstanceIds.pop(); // Get all executions with the parent id equal to the super process instance id. for (Execution execution : activitiRuntimeService.createExecutionQuery() .parentId(superProcessInstanceId).list()) { processInstanceIds.push(execution.getId()); }//from w ww . j av a2s.co m // Get all active sub-processes for the super process instance id. for (ProcessInstance subProcessInstance : activitiRuntimeService.createProcessInstanceQuery() .superProcessInstanceId(superProcessInstanceId).active().list()) { processInstanceIds.push(subProcessInstance.getId()); superProcessInstanceIds.push(subProcessInstance.getId()); } } // Delete all processes individually in LIFO order. while (!processInstanceIds.isEmpty()) { activitiService.deleteProcessInstance(processInstanceIds.pop(), jobDeleteRequest.getDeleteReason()); } } else { throw new ObjectNotFoundException( String.format("Job with ID \"%s\" does not exist or is already completed.", localJobId)); } return getJob(localJobId, false, false); }
From source file:org.graphwalker.io.factory.yed.YEdContextFactory.java
private Vertex addVertices(Model model, Context context, GraphmlDocument document, Map<String, Vertex> elements) throws XmlException { Vertex startVertex = null;//w ww . ja v a 2 s. c o m Deque<XmlObject> workQueue = new ArrayDeque<>(); workQueue.addAll(Arrays.asList(document.selectPath(NAMESPACE + "$this/xq:graphml/xq:graph/xq:node"))); while (!workQueue.isEmpty()) { XmlObject object = workQueue.pop(); if (object instanceof NodeType) { NodeType node = (NodeType) object; if (0 < node.getGraphArray().length) { for (GraphType subgraph : node.getGraphArray()) { workQueue.addAll(Arrays.asList(subgraph.getNodeArray())); } } else { String description = ""; for (DataType data : node.getDataArray()) { if (0 < data.getDomNode().getChildNodes().getLength()) { if (data.getKey().equals("d5")) { description = ((DataTypeImpl) data).getStringValue(); } if (isSupportedNode(data.xmlText())) { StringBuilder label = new StringBuilder(); for (NodeLabelType nodeLabel : getSupportedNode(data.xmlText()) .getNodeLabelArray()) { label.append(((NodeLabelTypeImpl) nodeLabel).getStringValue()); } YEdVertexParser parser = new YEdVertexParser(getTokenStream(label.toString())); parser.removeErrorListeners(); parser.addErrorListener(YEdDescriptiveErrorListener.INSTANCE); YEdVertexParser.ParseContext parseContext = parser.parse(); Vertex vertex = new Vertex(); if (!description.isEmpty()) { vertex.setProperty("description", description); } vertex.setProperty("x", getSupportedNode(data.xmlText()).getGeometry().getX()); vertex.setProperty("y", getSupportedNode(data.xmlText()).getGeometry().getY()); boolean blocked = false; if (null != parseContext.start()) { elements.put(node.getId(), vertex); vertex.setId(node.getId()); startVertex = vertex; } else { for (YEdVertexParser.FieldContext field : parseContext.field()) { if (null != field.names()) { vertex.setName(field.names().getText()); } if (null != field.shared() && null != field.shared().Identifier()) { vertex.setSharedState(field.shared().Identifier().getText()); } if (null != field.reqtags()) { vertex.setRequirements(convertVertexRequirement( field.reqtags().reqtagList().reqtag())); } if (null != field.actions()) { model.addActions(convertVertexAction(field.actions().action())); } if (null != field.blocked()) { blocked = true; } } if (!blocked) { elements.put(node.getId(), vertex); vertex.setId(node.getId()); model.addVertex(vertex); } } } } } } } } return startVertex; }