List of usage examples for java.util ArrayList ensureCapacity
public void ensureCapacity(int minCapacity)
From source file:eu.stratosphere.pact.generic.io.FileInputFormat.java
protected FileBaseStatistics getFileStats(FileBaseStatistics cachedStats, Path filePath, FileSystem fs, ArrayList<FileStatus> files) throws IOException { // get the file info and check whether the cached statistics are still valid. final FileStatus file = fs.getFileStatus(filePath); long latestModTime = file.getModificationTime(); // enumerate all files and check their modification time stamp. if (file.isDir()) { FileStatus[] fss = fs.listStatus(filePath); files.ensureCapacity(fss.length); for (FileStatus s : fss) { if (!s.isDir()) { files.add(s);// ww w. ja v a 2 s . c o m latestModTime = Math.max(s.getModificationTime(), latestModTime); } } } else { files.add(file); } // check whether the cached statistics are still valid, if we have any if (cachedStats != null && latestModTime <= cachedStats.getLastModificationTime()) { return cachedStats; } // calculate the whole length long len = 0; for (FileStatus s : files) { len += s.getLen(); } // sanity check if (len <= 0) { len = BaseStatistics.SIZE_UNKNOWN; } return new FileBaseStatistics(latestModTime, len, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); }
From source file:com.chap.memo.memoNodes.MemoNode.java
/** * Remove this node.(=setting its value to null and removing all arcs) This method will delete * entire subgraphs. Removing large subgraphs can be * an expensive operation.//www . ja va2 s. com * */ public void delete() { MemoNode current = this; ArrayList<UUID> todo = new ArrayList<UUID>(20); while (current != null) { UUID[] children = current.children.getNodesIds(); UUID[] parents = current.parents.getNodesIds(); current.update((byte[]) null); if (children.length > 0) { todo.ensureCapacity(todo.size() + children.length); todo.addAll(Arrays.asList(children)); current.children.clear(); } if (parents.length > 0) { current.parents.clear(); } if (todo.size() > 0) { current = new MemoNode(todo.remove(0)); } else { break; } } }
From source file:eu.stratosphere.api.common.io.FileInputFormat.java
protected FileBaseStatistics getFileStats(FileBaseStatistics cachedStats, Path filePath, FileSystem fs, ArrayList<FileStatus> files) throws IOException { // get the file info and check whether the cached statistics are still valid. final FileStatus file = fs.getFileStatus(filePath); long latestModTime = file.getModificationTime(); // enumerate all files and check their modification time stamp. if (file.isDir()) { FileStatus[] fss = fs.listStatus(filePath); files.ensureCapacity(fss.length); for (FileStatus s : fss) { if (!s.isDir()) { files.add(s);/*w w w . j av a 2 s . c o m*/ latestModTime = Math.max(s.getModificationTime(), latestModTime); testForUnsplittable(s); } } } else { files.add(file); testForUnsplittable(file); } // check whether the cached statistics are still valid, if we have any if (cachedStats != null && latestModTime <= cachedStats.getLastModificationTime()) { return cachedStats; } // calculate the whole length long len = 0; for (FileStatus s : files) { len += s.getLen(); } // sanity check if (len <= 0) { len = BaseStatistics.SIZE_UNKNOWN; } return new FileBaseStatistics(latestModTime, len, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); }
From source file:FastArrayList.java
/** * Increase the capacity of this <code>ArrayList</code> instance, if * necessary, to ensure that it can hold at least the number of elements * specified by the minimum capacity argument. * * @param capacity The new minimum capacity *//*from ww w . j a v a 2 s.c o m*/ public void ensureCapacity(int capacity) { if (fast) { synchronized (this) { ArrayList temp = (ArrayList) list.clone(); temp.ensureCapacity(capacity); list = temp; } } else { synchronized (list) { list.ensureCapacity(capacity); } } }
From source file:eulermind.MindDB.java
public ArrayList<EdgeVertex> getChildrenAndReferents(Vertex parent) { List<EdgeVertexId> outEdgeIdPairs = getOutEdgeVertexIds(parent); ArrayList<EdgeVertex> children = new ArrayList<EdgeVertex>(); children.ensureCapacity(outEdgeIdPairs.size()); for (EdgeVertexId edgeVertexId : outEdgeIdPairs) { children.add(edgeVertexId.getEdgeVertex()); }// ww w . j a v a2 s . c om return children; }
From source file:ca.uhn.fhir.parser.JsonParser.java
private void addToHeldIds(int theValueIdx, ArrayList<String> theListToAddTo, String theId) { theListToAddTo.ensureCapacity(theValueIdx); while (theListToAddTo.size() <= theValueIdx) { theListToAddTo.add(null);// ww w . j a v a 2 s . c o m } if (theListToAddTo.get(theValueIdx) == null) { theListToAddTo.set(theValueIdx, theId); } }
From source file:ca.uhn.fhir.parser.JsonParser.java
private boolean addToHeldComments(int valueIdx, List<String> theCommentsToAdd, ArrayList<ArrayList<String>> theListToAddTo) { if (theCommentsToAdd.size() > 0) { theListToAddTo.ensureCapacity(valueIdx); while (theListToAddTo.size() <= valueIdx) { theListToAddTo.add(null);/* w w w . j a v a 2 s . c om*/ } if (theListToAddTo.get(valueIdx) == null) { theListToAddTo.set(valueIdx, new ArrayList<String>()); } theListToAddTo.get(valueIdx).addAll(theCommentsToAdd); return true; } else { return false; } }
From source file:org.alfresco.repo.web.scripts.solr.NodesMetaDataGet.java
@Override protected Map<String, Object> executeImpl(WebScriptRequest req, Status status) { try {//from www. ja v a 2 s . co m Content content = req.getContent(); if (content == null) { throw new WebScriptException("Failed to convert request to String"); } JSONObject o = new JSONObject(content.getContent()); List<Long> nodeIds = null; if (o.has("nodeIds")) { JSONArray jsonNodeIds = o.getJSONArray("nodeIds"); nodeIds = new ArrayList<Long>(jsonNodeIds.length()); for (int i = 0; i < jsonNodeIds.length(); i++) { Long nodeId = jsonNodeIds.getLong(i); nodeIds.add(nodeId); } } Long fromNodeId = o.has("fromNodeId") ? o.getLong("fromNodeId") : null; Long toNodeId = o.has("toNodeId") ? o.getLong("toNodeId") : null; // 0 or Integer.MAX_VALUE => ignore int maxResults = o.has("maxResults") ? o.getInt("maxResults") : 0; int size = 0; if (maxResults != 0 && maxResults != Integer.MAX_VALUE) { size = maxResults; } else if (nodeIds != null) { size = nodeIds.size(); } else if (fromNodeId != null && toNodeId != null) { if ((toNodeId.longValue() - fromNodeId.longValue()) > Integer.MAX_VALUE) { throw new WebScriptException("Too many nodes expected, try changing the criteria"); } size = (int) (toNodeId - fromNodeId); } final boolean noSizeCalculated = (size == 0); // filters, defaults are 'true' MetaDataResultsFilter filter = new MetaDataResultsFilter(); if (o.has("includeAclId")) { filter.setIncludeAclId(o.getBoolean("includeAclId")); } if (o.has("includeAspects")) { filter.setIncludeAspects(o.getBoolean("includeAspects")); } if (o.has("includeNodeRef")) { filter.setIncludeNodeRef(o.getBoolean("includeNodeRef")); } if (o.has("includeOwner")) { filter.setIncludeOwner(o.getBoolean("includeOwner")); } if (o.has("includeProperties")) { filter.setIncludeProperties(o.getBoolean("includeProperties")); } if (o.has("includePaths")) { filter.setIncludePaths(o.getBoolean("includePaths")); } if (o.has("includeType")) { filter.setIncludeType(o.getBoolean("includeType")); } if (o.has("includeParentAssociations")) { filter.setIncludeParentAssociations(o.getBoolean("includeParentAssociations")); } if (o.has("includeChildIds")) { filter.setIncludeChildIds(o.getBoolean("includeChildIds")); } if (o.has("includeTxnId")) { filter.setIncludeTxnId(o.getBoolean("includeTxnId")); } final ArrayList<FreemarkerNodeMetaData> nodesMetaData = new ArrayList<FreemarkerNodeMetaData>( size > 0 ? size : INITIAL_DEFAULT_SIZE); NodeMetaDataParameters params = new NodeMetaDataParameters(); params.setNodeIds(nodeIds); params.setFromNodeId(fromNodeId); params.setToNodeId(toNodeId); params.setMaxResults(maxResults); solrTrackingComponent.getNodesMetadata(params, filter, new NodeMetaDataQueryCallback() { private int counter = BATCH_SIZE; private int numBatches = 0; @Override public boolean handleNodeMetaData(NodeMetaData nodeMetaData) { // need to perform data structure conversions that are compatible with Freemarker // e.g. Serializable -> String, QName -> String (because map keys must be string, number) try { FreemarkerNodeMetaData fNodeMetaData = new FreemarkerNodeMetaData(solrSerializer, nodeMetaData); nodesMetaData.add(fNodeMetaData); } catch (Exception e) { throw new AlfrescoRuntimeException("Problem converting to Freemarker using node " + nodeMetaData.getNodeRef().toString(), e); } if (noSizeCalculated && --counter == 0) { counter = BATCH_SIZE; nodesMetaData.ensureCapacity(++numBatches * BATCH_SIZE); } return true; } }); Map<String, Object> model = new HashMap<String, Object>(1, 1.0f); model.put("nodes", nodesMetaData); model.put("filter", filter); if (logger.isDebugEnabled()) { logger.debug("Result: \n\tRequest: " + req + "\n\tModel: " + model); } return model; } catch (IOException e) { throw new WebScriptException("IO exception parsing request", e); } catch (JSONException e) { throw new WebScriptException("Invalid JSON", e); } }
From source file:ca.uhn.fhir.parser.JsonParser.java
private boolean addToHeldExtensions(int valueIdx, List<? extends IBaseExtension<?, ?>> ext, ArrayList<ArrayList<HeldExtension>> list, boolean theIsModifier, CompositeChildElement theChildElem) { if (ext.size() > 0) { list.ensureCapacity(valueIdx); while (list.size() <= valueIdx) { list.add(null);/*ww w.j a v a 2 s. c om*/ } if (list.get(valueIdx) == null) { list.set(valueIdx, new ArrayList<JsonParser.HeldExtension>()); } for (IBaseExtension<?, ?> next : ext) { list.get(valueIdx).add(new HeldExtension(next, theIsModifier, theChildElem)); } return true; } else { return false; } }
From source file:geogebra.kernel.implicit.GeoImplicitPoly.java
private void startPath(int w, int h, double x, double y, GeoLocus locus) { double sx = x; double sy = y; double lx = Double.NaN; //no previous point double ly = Double.NaN; boolean first = true; double stepSize = START_STEP_SIZE * Math.max(scaleX, scaleY); double startX = x; double startY = y; ArrayList<MyPoint> firstDirPoints = new ArrayList<MyPoint>(); firstDirPoints.add(new MyPoint(x, y, true)); int s = 0;// w w w . j a v a2 s .co m int lastW = w; int lastH = h; int startW = w; int startH = h; int stepCount = 0; boolean nearSing = false; double lastGradX = Double.POSITIVE_INFINITY; double lastGradY = Double.POSITIVE_INFINITY; while (true) { s++; boolean reachedSingularity = false; boolean reachedEnd = false; if (!Double.isNaN(lx) && !Double.isNaN(ly)) { if ((scaledNormSquared(startX - sx, startY - sy) < MAX_STEP_SIZE * MAX_STEP_SIZE) && (scaledNormSquared(startX - sx, startY - sy) < scaledNormSquared(startX - lx, startY - ly))) { /* loop found */ if (firstDirPoints != null) { MyPoint firstPoint = firstDirPoints.get(0); firstPoint.lineTo = false; locus.getPoints().addAll(firstDirPoints); } locus.insertPoint(x, y, true); return; } } while (sx < grid[w][h].x) { if (w > 0) w--; else { reachedEnd = true; break; } } while (sx > grid[w][h].x + grid[w][h].width) { if (w < grid.length - 1) w++; else { reachedEnd = true; break; } } while (sy < grid[w][h].y) { if (h > 0) h--; else { reachedEnd = true; break; } } while (sy > grid[w][h].y + grid[w][h].height) { if (h < grid[w].length - 1) h++; else { reachedEnd = true; break; } } if (reachedEnd) { //we reached the boundary boundaryIntersectCollection.add(new double[] { sx, sy }); } if (lastW != w || lastH != h) { int dw = (int) Math.signum(lastW - w); int dh = (int) Math.signum(lastH - h); for (int i = 0; i <= Math.abs(lastW - w); i++) { for (int j = 0; j <= Math.abs(lastH - h); j++) { remember[lastW - dw * i][lastH - dh * j] = false; } } } lastW = w; lastH = h; double gradX = 0; double gradY = 0; if (!reachedEnd) { gradX = evalDiffXPolyAt(sx, sy, true); gradY = evalDiffYPolyAt(sx, sy, true); /* * Dealing with singularities: tries to reach the singularity but stops there. * Assuming that the singularity is on or at least near the curve. (Since first * derivative is zero this can be assumed for 'nice' 2nd derivative) */ if (nearSing || (Math.abs(gradX) < NEAR_SING && Math.abs(gradY) < NEAR_SING)) { for (double[] pair : singularitiesCollection) { //check if this singularity is already known if ((scaledNormSquared(pair[0] - sx, pair[1] - sy) < SING_RADIUS * SING_RADIUS)) { sx = pair[0]; sy = pair[1]; reachedSingularity = true; reachedEnd = true; break; } } if (!reachedEnd) { if (gradX * gradX + gradY * gradY > lastGradX * lastGradX + lastGradY * lastGradY) { //going away from the singularity, stop here singularitiesCollection.add(new double[] { sx, sy }); reachedEnd = true; reachedSingularity = true; } else if (Math.abs(gradX) < MIN_GRAD && Math.abs(gradY) < MIN_GRAD) { //singularity singularitiesCollection.add(new double[] { sx, sy }); reachedEnd = true; reachedSingularity = true; } lastGradX = gradX; lastGradY = gradY; nearSing = true; } } } double a = 0, nX = 0, nY = 0; if (!reachedEnd) { a = 1 / (Math.abs(gradX) + Math.abs(gradY)); //trying to increase numerical stability gradX = a * gradX; gradY = a * gradY; a = Math.sqrt(gradX * gradX + gradY * gradY); gradX = gradX / a; //scale vector gradY = gradY / a; nX = -gradY; nY = gradX; if (!Double.isNaN(lx) && !Double.isNaN(ly)) { double c = (lx - sx) * nX + (ly - sy) * nY; if (c > 0) { nX = -nX; nY = -nY; } } else { if (!first) { //other dir now nX = -nX; nY -= nY; } } lx = sx; ly = sy; } while (!reachedEnd) { sx = lx + nX * stepSize; //go in "best" direction sy = ly + nY * stepSize; int e = epsSignum(evalPolyAt(sx, sy, true)); if (e == 0) { if (stepSize * 2 <= MAX_STEP_SIZE * Math.max(scaleX, scaleY)) stepSize *= 2; break; } else { gradX = evalDiffXPolyAt(sx, sy, true); gradY = evalDiffYPolyAt(sx, sy, true); if (Math.abs(gradX) < MIN_GRAD && Math.abs(gradY) < MIN_GRAD) { //singularity stepSize /= 2; if (stepSize > MIN_STEP_SIZE * Math.max(scaleX, scaleY)) continue; else { singularitiesCollection.add(new double[] { sx, sy }); reachedEnd = true; break; } } a = Math.sqrt(gradX * gradX + gradY * gradY); gradX *= stepSize / a; gradY *= stepSize / a; if (e > 0) { gradX = -gradX; gradY = -gradY; } int e1 = epsSignum(evalPolyAt(sx + gradX, sy + gradY, true)); if (e1 == 0) { sx = sx + gradX; sy = sy + gradY; break; } if (e1 != e) { a = bisec(sx, sy, sx + gradX, sy + gradY); sx += a * gradX; sy += a * gradY; break; } else { stepSize /= 2; if (stepSize > MIN_STEP_SIZE * Math.max(scaleX, scaleY)) continue; else { reachedEnd = true; break; } } } } if (!reachedEnd || reachedSingularity) { if (reachedSingularity || ((lx - sx) * (lx - sx) + (ly - sy) * (ly - sy) > minGap * minGap)) { if (firstDirPoints != null) { firstDirPoints.add(new MyPoint(sx, sy, true)); } else { locus.insertPoint(sx, sy, true); } stepCount++; } } if (reachedEnd) { if (!first) { return; //reached the end two times } lastGradX = Double.POSITIVE_INFINITY; lastGradY = Double.POSITIVE_INFINITY; /* we reached end for the first time and now save the points into the locus */ ArrayList<MyPoint> pointList = locus.getMyPointList(); if (firstDirPoints.size() > 0) { MyPoint lastPoint = firstDirPoints.get(firstDirPoints.size() - 1); lastPoint.lineTo = false; pointList.ensureCapacity(pointList.size() + firstDirPoints.size()); for (int i = firstDirPoints.size() - 1; i >= 0; i--) { pointList.add(firstDirPoints.get(i)); } } firstDirPoints = null; sx = startX; sy = startY; lx = Double.NaN; ly = Double.NaN; w = startW; h = startH; lastW = w; lastH = h; first = false;//start again with other direction reachedEnd = false; reachedSingularity = false; nearSing = false; } } }