List of usage examples for java.util Deque addFirst
void addFirst(E e);
From source file:cc.kave.commons.pointsto.analysis.unification.UnificationAnalysisVisitorContext.java
/** * Reruns the unification until all lazily added locations have propagated * and no more changes are detected./*from ww w . jav a 2s. co m*/ * * {@link LocationIdentifier} are added lazily to * {@link ExtendedReferenceLocation} instances. If a location is added to an * already unified {@link ExtendedReferenceLocation}, the unification has to * be applied again to ensure correctness of the result. */ private void finalizePendingUnifications() { Deque<Pair<ReferenceLocation, ReferenceLocation>> worklist = new ArrayDeque<>(); for (Map.Entry<ReferenceLocation, ReferenceLocation> locations : pendingUnifications.entries()) { ReferenceLocation refLoc1 = locations.getKey(); ReferenceLocation refLoc2 = locations.getValue(); int loc1Identifiers = refLoc1.getIdentifiers().size(); int loc2Identifiers = refLoc2.getIdentifiers().size(); if (loc1Identifiers != loc2Identifiers) { worklist.addFirst(ImmutablePair.of(refLoc1, refLoc2)); } } while (!worklist.isEmpty()) { Pair<ReferenceLocation, ReferenceLocation> locations = worklist.removeFirst(); ReferenceLocation loc1 = locations.getLeft(); ReferenceLocation loc2 = locations.getRight(); int previousIdentifiersLoc1 = loc1.getIdentifiers().size(); int previousIdentifiersLoc2 = loc2.getIdentifiers().size(); unify(loc1, loc2); updateUnificationWorklist(worklist, previousIdentifiersLoc1, loc1, loc2); updateUnificationWorklist(worklist, previousIdentifiersLoc2, loc2, loc1); } }
From source file:de.interactive_instruments.ShapeChange.Target.ArcGISWorkspace.ArcGISWorkspace.java
private int establishEAPackageHierarchy(ClassInfo ci, int mainWorkspaceSubPkgId) throws EAException { // get path up to but not including the application schema package Deque<PackageInfo> pathToAppSchemaAsStack = new ArrayDeque<PackageInfo>(); if (ci.pkg() != this.appSchemaPkg) { PackageInfo pkg = ci.pkg();// w w w . j av a 2 s. c o m while (pkg != null && pkg != this.appSchemaPkg) { pathToAppSchemaAsStack.addFirst(pkg); pkg = pkg.owner(); } } if (pathToAppSchemaAsStack.isEmpty()) { // class is situated in app schema package and thus shall be created // in main workspace sub-package return mainWorkspaceSubPkgId; } else { // walk down the path, create packages as needed Map<PackageInfo, Integer> eaPkgIdByModelPkg = eaPkgIdByModelPkg_byWorkspaceSubPkgId .get(mainWorkspaceSubPkgId); Integer eaParentPkgId = mainWorkspaceSubPkgId; Integer eaPkgId = null; while (!pathToAppSchemaAsStack.isEmpty()) { PackageInfo pi = pathToAppSchemaAsStack.removeFirst(); if (eaPkgIdByModelPkg.containsKey(pi)) { eaPkgId = eaPkgIdByModelPkg.get(pi); } else { // create the EA package eaPkgId = EAModelUtil.createEAPackage(rep, pi, eaParentPkgId); eaPkgIdByModelPkg.put(pi, eaPkgId); } eaParentPkgId = eaPkgId; } return eaPkgId; } }
From source file:ict.ocrabase.main.java.client.bulkload.LoadHFiles.java
/** * Attempt to load the given load queue item into its target region server. * If the hfile boundary no longer fits into a region, physically splits * the hfile such that the new bottom half will fit, and adds the two * resultant hfiles back into the load queue. *//*from w ww.ja v a2 s . c o m*/ private void tryLoad(final LoadQueueItem item, HConnection conn, final byte[] table, final Deque<LoadQueueItem> queue, Configuration config) throws IOException { final Path hfilePath = item.hfilePath; final FileSystem fs = hfilePath.getFileSystem(getConf()); //FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs,hfilePath); //long fileSize = fs.getFileStatus(hfilePath).getLen(); //FixedFileTrailer trailer = // FixedFileTrailer.readFromStream(fsdis.getStream(false), fileSize); HFile.Reader hfr = HFile.createReader(fs, hfilePath, new CacheConfig(config), config); //new HFileReaderV3(hfilePath, trailer, fsdis, 0, new CacheConfig(config), new HFileSystem(fs), config); //HFileReaderV3 hfr = new HFileReaderV3(hfilePath, null, //null, 0, new CacheConfig(config), new HFileSystem(fs), config); //HFileReaderV3(fs, hfilePath, null, false); final byte[] first, last; try { hfr.loadFileInfo(); first = hfr.getFirstRowKey(); last = hfr.getLastRowKey(); System.out.println("hfr first row key:" + first); System.out.println("hfr last row key:" + last); } finally { hfr.close(); } LOG.info("Trying to load hfile=" + hfilePath + " first=" + Bytes.toStringBinary(first) + " last=" + Bytes.toStringBinary(last)); if (first == null || last == null) { assert first == null && last == null; LOG.info("hfile " + hfilePath + " has no entries, skipping"); return; } // We use a '_' prefix which is ignored when walking directory trees // above. final Path tmpDir = new Path(item.hfilePath.getParent(), "_tmp"); //RpcRetryingCallerFactory rpc_caller_factory = // RpcRetryingCallerFactory.instantiate(conn.getConfiguration(),null); RegionServerCallable<Void> callable = // conn.getRegionServerWithRetries( new RegionServerCallable<Void>(conn, TableName.valueOf(Bytes.toString(table)), first) { public Void call(int arg) throws Exception { LOG.debug( "Going to connect to server " + location + "for row " + Bytes.toStringBinary(row)); HRegionInfo hri = location.getRegionInfo(); if (!hri.containsRange(first, last)) { LOG.info("HFile at " + hfilePath + " no longer fits inside a single " + "region. Splitting..."); HColumnDescriptor familyDesc = ((HConnection) this.connection) .getHTableDescriptor(hri.getTable()).getFamily(item.family); Path botOut = new Path(tmpDir, hri.getEncodedName() + ".bottom"); Path topOut = new Path(tmpDir, hri.getEncodedName() + ".top"); splitStoreFile(getConf(), hfilePath, familyDesc, hri.getEndKey(), botOut, topOut); // Add these back at the *front* of the queue, so there's a lower // chance that the region will just split again before we get there. queue.addFirst(new LoadQueueItem(item.family, botOut)); queue.addFirst(new LoadQueueItem(item.family, topOut)); LOG.info("Successfully split into new HFiles " + botOut + " and " + topOut); return null; } byte[] regionName = location.getRegionInfo().getRegionName(); // server.bulkLoadHFile(hfilePath.toString(), regionName, item.family); final List<Pair<byte[], String>> famPaths = new ArrayList<Pair<byte[], String>>(); famPaths.add(Pair.newPair(item.family, hfilePath.toString())); BulkLoadHFileRequest request = RequestConverter.buildBulkLoadHFileRequest(famPaths, regionName, false); this.getStub().bulkLoadHFile(null, request); return null; } }; Void succcess = RpcRetryingCallerFactory.instantiate(conn.getConfiguration(), null).<Void>newCaller() .callWithRetries(callable, Integer.MAX_VALUE); System.out.println("loadHfiles- line:295"); //rpc_caller_factory.<Void> newCaller().callWithRetries(callable); }
From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java
private Point[][] findEdgePoints(int[] edgeData) { List<Deque<Point>> components = new ArrayList<Deque<Point>>(); // find close paths for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { if (edgeData[x + y * width] == BLACK && isBridge(edgeData, x, y)) { edgeData[x + y * width] = WHITE; Deque<Point> firstPart = null, secondPart = null; for (int k = 0; k < DX.length; k++) { int x2 = x + DX[k]; int y2 = y + DY[k]; if (x2 < 0 || x2 >= width || y2 < 0 || y2 >= height) { continue; }/*from w w w . j a v a 2 s .c om*/ if (edgeData[x2 + y2 * width] == BLACK) { Deque<Point> points = findConnectedComponent(edgeData, x2, y2); if (firstPart == null) { firstPart = points; } else { secondPart = points; } } } firstPart.addFirst(new Point(x, y)); if (secondPart != null) { // the path is not closed join(firstPart, true, secondPart, true); } components.add(firstPart); } } } // remove contained components for (int i = 0; i < components.size() - 1; i++) { Rectangle r1 = getBounds(components.get(i)); for (int j = i + 1; j < components.size();) { Rectangle r2 = getBounds(components.get(j)); if (r1.contains(r2)) { components.remove(j); } else if (r2.contains(r1)) { components.set(i, components.get(j)); components.remove(j); } else { j++; } } } // try to connect some paths int connectedCount; do { connectedCount = 0; for (int i = 0; i < components.size() - 1; i++) { for (int j = i + 1; j < components.size(); j++) { Deque<Point> a = components.get(i); Deque<Point> b = components.get(j); double d0 = d(a.getFirst(), a.getLast()) + d(b.getFirst(), b.getLast()); double d1 = d(a.getFirst(), b.getFirst()) + d(a.getLast(), b.getLast()); double d2 = d(a.getFirst(), b.getLast()) + d(a.getLast(), b.getFirst()); double d3 = d(a.getFirst(), b.getFirst()); double d4 = d(a.getFirst(), b.getLast()); double d5 = d(a.getLast(), b.getFirst()); double d6 = d(a.getLast(), b.getLast()); if (d3 <= CLOSE_THRESHOLD && d3 <= d4) { join(a, true, b, true); components.remove(j); connectedCount++; } else if (d4 <= CLOSE_THRESHOLD && d4 <= d3) { join(a, true, b, false); components.remove(j); connectedCount++; } else if (d5 <= CLOSE_THRESHOLD && d5 <= d6) { join(a, false, b, true); components.remove(j); connectedCount++; } else if (d6 <= CLOSE_THRESHOLD && d6 <= d5) { join(a, false, b, false); components.remove(j); connectedCount++; } else if (d1 <= d0 && d1 <= d2) { if (d3 < d6) { join(a, true, b, true); } else { join(a, false, b, false); } components.remove(j); connectedCount++; } else if (d2 <= d0 && d2 <= d1) { if (d4 < d5) { join(a, true, b, false); } else { join(a, false, b, true); } components.remove(j); connectedCount++; } } // end of for j } // end of for i } while (connectedCount > 0); // choose (componentCount) biggest components SortedMap<Integer, Deque<Point>> componentMap = new TreeMap<Integer, Deque<Point>>(); for (Deque<Point> c : components) { componentMap.put(-c.size(), c); } // remove noise boolean firstPoint = true; for (Iterator<Entry<Integer, Deque<Point>>> iterator = componentMap.entrySet().iterator(); iterator .hasNext();) { Entry<Integer, Deque<Point>> entry = iterator.next(); Rectangle r = getBounds(entry.getValue()); if (r.width <= 10 && r.height <= 10) { if (firstPoint) { firstPoint = false; } else { iterator.remove(); } } } // convert components: normalize points, to array int foundComponentCount = Math.min(componentCount, componentMap.size()); componentArr = new Point[foundComponentCount][]; Rectangle r = getBounds(componentMap.get(componentMap.firstKey())); for (int c = 0; c < foundComponentCount; c++) { int key = componentMap.firstKey(); componentArr[c] = new Point[componentMap.get(key).size()]; normalize(componentMap.get(key)).toArray(componentArr[c]); componentMap.remove(key); for (int i = 0; i < componentArr[c].length; i++) { componentArr[c][i].x = (componentArr[c][i].x - r.x) / r.width; componentArr[c][i].y = (componentArr[c][i].y - r.y) / r.height; } } return componentArr; }
From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java
/** * Join b to a, return a.// ww w .j av a 2 s . c om * @param a * @param afirst join at the first point of a? * @param b * @param bfirst join at the first point of b? * @return */ private static Deque<Point> join(Deque<Point> a, boolean afirst, Deque<Point> b, boolean bfirst) { if (!bfirst) { Collections.reverse((List<Point>) b); } // don't reverse a, may confuse the network if (afirst) { for (Point p : b) { a.addFirst(p); } } else { for (Point p : b) { a.addLast(p); } } return a; }
From source file:org.anarres.cpp.Main.java
static List<TokenS> replay(Deque<TokenS> input, List<Action> actions) { List<TokenS> result = new ArrayList<TokenS>(); for (Action action : actions) { // System.out.println("Action:" + action + " rest:" + input); if (action instanceof Skip) { TokenS actual = ((Skip) action).token; TokenS expected = input.removeFirst(); if (!expected.equals(actual)) { throw new RuntimeException("Skipping " + actual + ", found " + expected + " input " + input); }//from w w w.j a va 2 s . c o m result.add(actual); } else { Replace replace = (Replace) action; for (TokenS actual : replace.original) { TokenS expected = input.removeFirst(); if (!expected.equals(actual)) { System.err.println("At " + expected.token.getFile()); throw new RuntimeException( "Expected " + expected + " old " + actual + " instead\n" + replace.toJson()); } } List<TokenS> replSeq = new ArrayList<TokenS>(); for (MapSeg mapSeg : replace.mapping) { if (mapSeg instanceof New) { for (Token token : ((New) mapSeg).tokens) { replSeq.add(new TokenS(token, Empty.bag())); } } else { Sub sub = (Sub) mapSeg; Deque<TokenS> subInput = new LinkedList<>(); for (int i : sub.indicies) { subInput.add(replace.original.get(i)); } for (TokenS tokenS : replay(subInput, sub.actions)) { replSeq.add(tokenS); } } } for (int i = replSeq.size() - 1; i >= 0; i--) { TokenS tokenS = replSeq.get(i); input.addFirst(new TokenS(tokenS.token, tokenS.disables.plusAll(replace.disables))); } } } return result; }
From source file:org.apache.sling.etcd.testing.tree.Node.java
@Nonnull private Deque<String> path(@Nonnull Node node, @Nonnull Deque<String> acc) { Node parent = node.parent();//from w ww.j av a 2 s . c o m if (parent != null) { acc.addFirst(node.name()); return path(parent, acc); } else { return acc; } }
From source file:org.gdms.source.DefaultSourceManager.java
private void removeFromSchema(String name) { if (name.isEmpty()) { throw new IllegalArgumentException("Empty table name!"); }//from www. j a va 2 s. c o m // split on the dots '.' into // schema1.schema2.schema3.table1 String[] l = DOT.split(name); if (l.length <= 1) { // just a table, we remove it from the root schema schema.removeTable(name); } else { Deque<Schema> path = new ArrayDeque<Schema>(); path.add(schema); // we get down // to the last schema before the table for (int i = 0; i < l.length - 1; i++) { final Schema n = path.getFirst().getSubSchemaByName(l[i]); path.addFirst(n); } boolean stop = false; while (!path.isEmpty() && !stop) { // take the last schema in the path (top of the pile) final Schema n = path.pollFirst(); n.removeTable(l[l.length - 1]); if (n.getTableCount() != 0 || n.getSubSchemaNames().length != 0) { // the schema is still needed, we must not remove it stop = true; } else { Schema p = n.getParentSchema(); if (p != null) { p.removeSubSchema(n.getName()); } else { // we have reached root, it stays were it is... stop = true; } } } } }
From source file:org.jaffa.qm.util.PropertyFilter.java
private static void getFieldList(Class clazz, List<String> fieldList, String prefix, Deque<Class> classStack) throws IntrospectionException { //To avoid recursion, bail out if the input Class has already been introspected if (classStack.contains(clazz)) { if (log.isDebugEnabled()) log.debug("Fields from " + clazz + " prefixed by " + prefix + " will be ignored, since the class has already been introspected as per the stack " + classStack);/* w ww . j av a 2 s .c o m*/ return; } else classStack.addFirst(clazz); //Introspect the input Class BeanInfo beanInfo = Introspector.getBeanInfo(clazz); if (beanInfo != null) { PropertyDescriptor[] pds = beanInfo.getPropertyDescriptors(); if (pds != null) { for (PropertyDescriptor pd : pds) { if (pd.getReadMethod() != null && pd.getWriteMethod() != null) { String name = pd.getName(); String qualifieldName = prefix == null || prefix.length() == 0 ? name : prefix + '.' + name; Class type = pd.getPropertyType(); if (type.isArray()) type = type.getComponentType(); if (type == String.class || type == Boolean.class || Number.class.isAssignableFrom(type) || IDateBase.class.isAssignableFrom(type) || Currency.class.isAssignableFrom(type) || type.isPrimitive() || type.isEnum()) fieldList.add(qualifieldName); else getFieldList(type, fieldList, qualifieldName, classStack); } } } } classStack.removeFirst(); }
From source file:org.opensingular.form.wicket.util.WicketFormUtils.java
@SuppressWarnings("unchecked") private static void addTitle(Deque<String> titles, String title, SInstance instance, SInstance lastInstance) { if ((lastInstance != null) && (instance instanceof SIList<?>)) { int pos = findPos((SIList<SInstance>) instance, lastInstance); if (pos != -1) { titles.addFirst(title + " [" + pos + "]"); }// www . j a v a 2 s . c om } else { titles.addFirst(title); } }