List of usage examples for java.util LinkedList push
public void push(E e)
From source file:org.bimserver.charting.Containers.TreeNode.java
public Iterator<TreeNode> iterateFromRootToLeafNodes() { LinkedList<TreeNode> nodes = new LinkedList<TreeNode>(Arrays.asList(this)); LinkedList<TreeNode> returningNodes = new LinkedList<TreeNode>(); TreeNode thisNode = null;/*from w w w . j a va 2s. com*/ while (nodes.size() > 0) { thisNode = nodes.pop(); returningNodes.add(thisNode); if (thisNode.Children.length > 0) { int n = thisNode.Children.length; while (--n >= 0) nodes.push(thisNode.Children[n]); } } return returningNodes.iterator(); }
From source file:ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition.java
@SuppressWarnings("unchecked") public BaseRuntimeElementCompositeDefinition(String theName, Class<? extends T> theImplementingClass, boolean theStandardType, FhirContext theContext, Map<Class<? extends IBase>, BaseRuntimeElementDefinition<?>> theClassToElementDefinitions) { super(theName, theImplementingClass, theStandardType); myContext = theContext;//from w w w .j ava2s . c om myClassToElementDefinitions = theClassToElementDefinitions; /* * We scan classes for annotated fields in the class but also all of its superclasses */ Class<? extends IBase> current = theImplementingClass; LinkedList<Class<? extends IBase>> classes = new LinkedList<Class<? extends IBase>>(); do { if (forcedOrder == null) { ChildOrder childOrder = current.getAnnotation(ChildOrder.class); if (childOrder != null) { forcedOrder = new HashMap<String, Integer>(); for (int i = 0; i < childOrder.names().length; i++) { forcedOrder.put(childOrder.names()[i], i); } } } classes.push(current); if (IBase.class.isAssignableFrom(current.getSuperclass())) { current = (Class<? extends IBase>) current.getSuperclass(); } else { current = null; } } while (current != null); Set<Field> fields = new HashSet<Field>(); for (Class<? extends IBase> nextClass : classes) { int fieldIndexInClass = 0; for (Field next : nextClass.getDeclaredFields()) { if (fields.add(next)) { ScannedField scannedField = new ScannedField(next, theImplementingClass, fieldIndexInClass == 0); if (scannedField.getChildAnnotation() != null) { myScannedFields.add(scannedField); fieldIndexInClass++; } } } } }
From source file:act.installer.reachablesexplorer.FreemarkerRenderer.java
private Pair<Object, String> buildPathModel(ReactionPath p, List<DnaDesignTableProperties> designs) throws IOException { Map<String, Object> model = new HashMap<>(); LinkedList<Object> pathwayItems = new LinkedList<>(); List<String> chemicalNames = new ArrayList<>(); model.put("pathwayitems", pathwayItems); // Pathway nodes start at the target and work back, so reverse them to make page order go from start to finish. for (Cascade.NodeInformation i : p.getPath()) { Map<String, Object> nodeModel = new HashMap<>(); pathwayItems.push(nodeModel); nodeModel.put("isreaction", i.getIsReaction()); if (i.getIsReaction()) { String label = i.getLabel(); label = label.replaceAll("&+", " "); List<String> ecNums = Arrays.stream(label.split("\\s")).filter(s -> !s.isEmpty()) .collect(Collectors.toList()); nodeModel.put("ecnums", ecNums); // TODO: clean up this title to make mediawiki happy with it. List<String> organisms = new ArrayList<>(i.getOrganisms()); Collections.sort(organisms); nodeModel.put("organisms", organisms); List<String> pmids = new ArrayList<>(i.getPmids()); Collections.sort(pmids); nodeModel.put("pmids", pmids); } else {//from w ww . j a v a 2s. c o m Reachable r = getReachable(i.getId()); if (r == null) { LOGGER.error("Unable to locate pathway chemical %d in reachables db", i.getId()); nodeModel.put("name", "(unknown)"); } else { nodeModel.put("link", r.getInchiKey()); // TODO: we really need a way of picking a good name for each molecule. // If the page name is the InChI, we reduce it to the formula for the purpose of pathway visualisation. String name = r.getPageName().startsWith("InChI") ? r.getPageName().split("/")[1] : r.getPageName(); nodeModel.put("name", name); chemicalNames.add(name); if (r.getStructureFilename() != null) { nodeModel.put("structureRendering", r.getStructureFilename()); } else { LOGGER.warn("No structure filename for %s", r.getPageName()); } } } } String pageTitle = StringUtils.join(chemicalNames, " <- "); model.put("pageTitle", pageTitle); List<Map<String, Object>> dna = new ArrayList<>(); int i = 1; for (DnaDesignTableProperties design : designs) { final int num = i; // Sigh, must be final to use in this initialization block. dna.add(new HashMap<String, Object>() { { put("file", design.getDnaConstructFileName()); put("sample", design.getDnaSeqShortName()); put("num", Integer.valueOf(num).toString()); put("proteinFile", design.getProteinConstructFileName()); put("org_ec", renderDNADesignMetadata(design.getDnaSeq())); } }); i++; } if (dna.size() > 0) { model.put("dna", dna); } return Pair.of(model, pageTitle); }
From source file:org.eclipse.che.vfs.impl.fs.LocalFileSystemTest.java
protected List<String> flattenDirectory(String vfsPath) { java.io.File directory = getIoFile(vfsPath); assertTrue("Not a directory ", directory.isDirectory()); final int splitIndex = directory.getAbsolutePath().length() + 1; List<String> files = new ArrayList<>(); LinkedList<java.io.File> q = new LinkedList<>(); q.add(directory);// w w w . j a v a 2 s. c o m while (!q.isEmpty()) { java.io.File current = q.pop(); java.io.File[] list = current.listFiles(SERVICE_DIR_FILTER); if (list != null) { for (java.io.File f : list) { files.add(f.getAbsolutePath().substring(splitIndex)); if (f.isDirectory()) { q.push(f); } } } } if (!files.isEmpty()) { java.util.Collections.sort(files); } return files; }
From source file:de.dfki.madm.anomalydetection.evaluator.cluster_based.CMGOSEvaluator.java
private HashMap<Double, LinkedList<CovarianceMatrix>> getInit10(double[][] data, int[] indexArray, int h, int n, int p) {//from ww w. j a v a2 s . c o m class Worker extends Thread { private double[][] data; private int[] indexArray; private int h; private int n; private int p; private int runs; @SuppressWarnings("unused") private int id; private HashMap<Double, LinkedList<CovarianceMatrix>> map = new HashMap<Double, LinkedList<CovarianceMatrix>>(); public HashMap<Double, LinkedList<CovarianceMatrix>> getMap() { return this.map; } public Worker(double[][] data, int[] indexArray, int h, int n, int p, int runs, int id) { this.data = data; this.indexArray = indexArray; this.h = h; this.n = n; this.p = p; this.runs = runs; this.id = id; } public void run() { boolean zero = hasZeroVariance(data, indexArray); // repeat (say) 500 times: for (int run = 0; run < this.runs; run++) { LinkedList<double[]> list = new LinkedList<double[]>(); boolean[] taken = new boolean[n]; int count = 0; // Draw a random (p + 1)-subset J, and then compute To // := ave(J) and So := cov(J). while (count < (p + 1)) { for (int index : generator.nextIntSetWithRange(0, n, (p + 1))) { if (!taken[index]) { list.push(data[indexArray[index]]); taken[index] = true; count++; } } } CovarianceMatrix ret = new CovarianceMatrix(list, 1); Matrix mat = new Matrix(ret.getCovMat()); if (zero) { ret.addMinimum(); mat = new Matrix(ret.getCovMat()); } // If det(S_0) = 0, then extend J by adding another // random observation, and continue adding observations // until det(S_0) > 0. while (mat.det() == 0) { int index; do { index = generator.nextInt(n); } while (taken[index]); taken[index] = true; boolean b = true; for (boolean t : taken) b &= t; list.push(data[indexArray[index]]); ret = new CovarianceMatrix(list, 1); if (b) { //all Points are taken ret.addMinimum(); } mat = new Matrix(ret.getCovMat()); } list = null; // carry out two C-steps for (int rep = 0; rep < 2; rep++) { ret = Cstep(ret, data, indexArray, h); } map = getSorted(map, ret, 10); } } } Worker[] wa = new Worker[this.numberOfThreads]; int runs = (int) (this.initIteration / this.numberOfThreads); for (int i = 0; i < this.numberOfThreads; i++) { Worker w = new Worker(data, indexArray, h, n, p, runs, i); w.start(); wa[i] = w; } for (int i = 0; i < this.numberOfThreads; i++) { try { wa[i].join(); } catch (InterruptedException e) { e.printStackTrace(); } } HashMap<Double, LinkedList<CovarianceMatrix>> map = new HashMap<Double, LinkedList<CovarianceMatrix>>(); for (int i = 0; i < this.numberOfThreads; i++) { for (Double k : wa[i].getMap().keySet()) { for (CovarianceMatrix mat : wa[i].getMap().get(k)) map = getSorted(map, mat, 10); } wa[i] = null; } wa = null; return map; }
From source file:org.nd4j.linalg.util.ArrayUtil.java
/** Convert an arbitrary-dimensional rectangular double array to flat vector.<br> * Can pass double[], double[][], double[][][], etc. *///from ww w . j a v a2 s . c o m public static double[] flattenDoubleArray(Object doubleArray) { if (doubleArray instanceof double[]) return (double[]) doubleArray; LinkedList<Object> stack = new LinkedList<>(); stack.push(doubleArray); int[] shape = arrayShape(doubleArray); int length = ArrayUtil.prod(shape); double[] flat = new double[length]; int count = 0; while (!stack.isEmpty()) { Object current = stack.pop(); if (current instanceof double[]) { double[] arr = (double[]) current; for (int i = 0; i < arr.length; i++) flat[count++] = arr[i]; } else if (current instanceof Object[]) { Object[] o = (Object[]) current; for (int i = o.length - 1; i >= 0; i--) stack.push(o[i]); } else throw new IllegalArgumentException("Base array is not double[]"); } if (count != flat.length) throw new IllegalArgumentException("Fewer elements than expected. Array is ragged?"); return flat; }
From source file:org.nd4j.linalg.util.ArrayUtil.java
/** Convert an arbitrary-dimensional rectangular float array to flat vector.<br> * Can pass float[], float[][], float[][][], etc. *//* w w w . j a v a 2s .c om*/ public static float[] flattenFloatArray(Object floatArray) { if (floatArray instanceof float[]) return (float[]) floatArray; LinkedList<Object> stack = new LinkedList<>(); stack.push(floatArray); int[] shape = arrayShape(floatArray); int length = ArrayUtil.prod(shape); float[] flat = new float[length]; int count = 0; while (!stack.isEmpty()) { Object current = stack.pop(); if (current instanceof float[]) { float[] arr = (float[]) current; for (int i = 0; i < arr.length; i++) flat[count++] = arr[i]; } else if (current instanceof Object[]) { Object[] o = (Object[]) current; for (int i = o.length - 1; i >= 0; i--) stack.push(o[i]); } else throw new IllegalArgumentException("Base array is not float[]"); } if (count != flat.length) throw new IllegalArgumentException("Fewer elements than expected. Array is ragged?"); return flat; }
From source file:org.bimserver.charting.Containers.TreeNode.java
public void collapseAllNodesWithNullNames() { // Copied from: walkNodesFromRootToLeaves. LinkedList<TreeNode> nodes = new LinkedList<TreeNode>(Arrays.asList(this)); TreeNode thisNode = null;/*from w w w . j av a2 s.com*/ while (nodes.size() > 0) { thisNode = nodes.pop(); // If not root and name is string "null", this node will be collapsed. Otherwise, keep iterating tree. if (!thisNode.isRoot() && thisNode.Name.equals("null")) { // Collapse non-leaf node: reparent children to parent of collapsing node. if (thisNode.Children.length > 0) { TreeNode newParent = thisNode.Parent; // int n = thisNode.Children.length; while (--n >= 0) { // Remove child node from the node that's getting collapsed. TreeNode childNodeToBeReparented = thisNode.remove(n); // Reparent the child. newParent.add(childNodeToBeReparented); // Add the child to be processed. nodes.push(childNodeToBeReparented); } // Remove the node that's being collapsed. thisNode.removeFromParent(); } // Collapse leaf node: remove this node. else { thisNode.removeFromParent(); } } else { if (thisNode.Children.length > 0) { int n = thisNode.Children.length; while (--n >= 0) nodes.push(thisNode.Children[n]); } } } }
From source file:org.eclipse.che.vfs.impl.fs.LocalFileSystemTest.java
protected void compareDirectories(java.io.File a, java.io.File b, boolean checkServiceDirs) throws IOException { if (!a.isDirectory() || !b.isDirectory()) { fail();/*from w w w. j av a 2 s.c om*/ } LinkedList<Pair<java.io.File, java.io.File>> q = new LinkedList<>(); q.add(new Pair<>(a, b)); while (!q.isEmpty()) { Pair<java.io.File, java.io.File> current = q.pop(); java.io.File[] files1 = current.first.listFiles(checkServiceDirs ? null : SERVICE_DIR_FILTER); java.io.File[] files2 = current.second.listFiles(checkServiceDirs ? null : SERVICE_DIR_FILTER); if (files1 == null || files2 == null || files1.length != files2.length) { fail(); } Arrays.sort(files1); Arrays.sort(files2); for (int i = 0; i < files1.length; i++) { java.io.File file1 = files1[i]; java.io.File file2 = files2[i]; if (!file1.getName().equals(file2.getName())) { fail(); } if (file1.isFile()) { try (FileInputStream in1 = new FileInputStream(file1); FileInputStream in2 = new FileInputStream(file2)) { compareStreams(in1, in2); } } else { q.push(new Pair<>(file1, file2)); } } } }
From source file:org.jembi.rhea.impl.ApelonServiceImpl.java
/** * Export a specified namespace as a CSV string. Traversal is done depth-first. *//* ww w . j ava 2s . c o m*/ public String exportNamespace(int namespaceId) throws TerminologyService.TSException { StringBuilder res = new StringBuilder(); try { LinkedList<_TSTreeNode> cStack = new LinkedList<_TSTreeNode>(); List<TSTerm> breadth = getRootTerms(namespaceId); List<TSProperty> props = getAllPropertyTypes(namespaceId); int i = 0; ThesaurusConceptQuery cQuery = ThesaurusConceptQuery.createInstance(getConn()); res.append("\"Code\",\"Name\""); for (TSProperty prop : props) res.append(",\"" + prop.getName() + "\""); res.append("\n"); while (i < breadth.size()) { ApelonTerm term = (ApelonTerm) breadth.get(i); //properties aren't being fetched for sub-concepts, so look up the term to fetch it's properties DTSProperty[] termProps = cQuery.findConceptById(term.concept.getId(), namespaceId, asd) .getFetchedProperties(); res.append("\"" + term.getCode() + "\",\"" + term.getName() + "\""); for (TSProperty prop : props) { boolean addedProp = false; for (DTSProperty termProp : termProps) { if (termProp.getName().equals(prop.getName())) { res.append(",\"" + termProp.getValue() + "\""); addedProp = true; } } if (!addedProp) res.append(",\"\""); } res.append("\n"); if (term.getHasSubConcepts()) { cStack.push(new _TSTreeNode(breadth, i)); breadth = term.getSubConcepts(); i = 0; continue; } while (i + 1 == breadth.size() && !cStack.isEmpty()) { _TSTreeNode node = cStack.pop(); breadth = node.breadth; i = node.i; } i++; } } catch (Exception ex) { throw new TerminologyService.TSException(ex); } return res.toString(); }