List of usage examples for java.util LinkedList push
public void push(E e)
From source file:de.dfki.madm.anomalydetection.evaluator.cluster_based.CMGOSEvaluator.java
private HashMap<Double, LinkedList<CovarianceMatrix>> getSorted( HashMap<Double, LinkedList<CovarianceMatrix>> map, CovarianceMatrix ret, int count) { Matrix mh = new Matrix(ret.getCovMat()); double det = mh.det(); if (map.containsKey(det)) { LinkedList<CovarianceMatrix> temp = map.get(det); temp.push(ret); map.put(det, temp);/*w ww .jav a 2 s.c om*/ } else { LinkedList<CovarianceMatrix> temp = new LinkedList<CovarianceMatrix>(); temp.push(ret); map.put(det, temp); } if (map.keySet().size() > count) { ArrayList<Double> sortedList = new ArrayList<Double>(); sortedList.addAll(map.keySet()); Collections.sort(sortedList); map.remove(sortedList.get(sortedList.size() - 1)); sortedList = null; } return map; }
From source file:org.eclipse.che.api.builder.internal.SourcesManagerImpl.java
private void download(String downloadUrl, java.io.File downloadTo) throws IOException { HttpURLConnection conn = null; try {// w ww.ja v a2 s .c o m final LinkedList<java.io.File> q = new LinkedList<>(); q.add(downloadTo); final long start = System.currentTimeMillis(); final List<Pair<String, String>> md5sums = new LinkedList<>(); while (!q.isEmpty()) { java.io.File current = q.pop(); java.io.File[] list = current.listFiles(); if (list != null) { for (java.io.File f : list) { if (f.isDirectory()) { q.push(f); } else { md5sums.add(Pair.of(com.google.common.io.Files.hash(f, Hashing.md5()).toString(), downloadTo.toPath().relativize(f.toPath()).toString().replace("\\", "/"))); //Replacing of "\" is need for windows support } } } } final long end = System.currentTimeMillis(); if (md5sums.size() > 0) { LOG.debug("count md5sums of {} files, time: {}ms", md5sums.size(), (end - start)); } conn = (HttpURLConnection) new URL(downloadUrl).openConnection(); conn.setConnectTimeout(CONNECT_TIMEOUT); conn.setReadTimeout(READ_TIMEOUT); final EnvironmentContext context = EnvironmentContext.getCurrent(); if (context.getUser() != null && context.getUser().getToken() != null) { conn.setRequestProperty(HttpHeaders.AUTHORIZATION, context.getUser().getToken()); } if (!md5sums.isEmpty()) { conn.setRequestMethod(HttpMethod.POST); conn.setRequestProperty("Content-type", MediaType.TEXT_PLAIN); conn.setRequestProperty(HttpHeaders.ACCEPT, MediaType.MULTIPART_FORM_DATA); conn.setDoOutput(true); try (OutputStream output = conn.getOutputStream(); Writer writer = new OutputStreamWriter(output)) { for (Pair<String, String> pair : md5sums) { writer.write(pair.first); writer.write(' '); writer.write(pair.second); writer.write('\n'); } } } final int responseCode = conn.getResponseCode(); if (responseCode == HttpURLConnection.HTTP_OK) { final String contentType = conn.getHeaderField("content-type"); if (contentType.startsWith(MediaType.MULTIPART_FORM_DATA)) { final HeaderParameterParser headerParameterParser = new HeaderParameterParser(); final String boundary = headerParameterParser.parse(contentType).get("boundary"); try (InputStream in = conn.getInputStream()) { MultipartStream multipart = new MultipartStream(in, boundary.getBytes()); boolean hasMore = multipart.skipPreamble(); while (hasMore) { final Map<String, List<String>> headers = parseChunkHeader( CharStreams.readLines(new StringReader(multipart.readHeaders()))); final List<String> contentDisposition = headers.get("content-disposition"); final String name = headerParameterParser.parse(contentDisposition.get(0)).get("name"); if ("updates".equals(name)) { int length = -1; List<String> contentLengthHeader = headers.get("content-length"); if (contentLengthHeader != null && !contentLengthHeader.isEmpty()) { length = Integer.parseInt(contentLengthHeader.get(0)); } if (length < 0 || length > 204800) { java.io.File tmp = java.io.File.createTempFile("tmp", ".zip", directory); try { try (FileOutputStream fOut = new FileOutputStream(tmp)) { multipart.readBodyData(fOut); } ZipUtils.unzip(tmp, downloadTo); } finally { if (tmp.exists()) { tmp.delete(); } } } else { final ByteArrayOutputStream bOut = new ByteArrayOutputStream(length); multipart.readBodyData(bOut); ZipUtils.unzip(new ByteArrayInputStream(bOut.toByteArray()), downloadTo); } } else if ("removed-paths".equals(name)) { final ByteArrayOutputStream bOut = new ByteArrayOutputStream(); multipart.readBodyData(bOut); final String[] removed = JsonHelper.fromJson( new ByteArrayInputStream(bOut.toByteArray()), String[].class, null); for (String path : removed) { java.io.File f = new java.io.File(downloadTo, path); if (!f.delete()) { throw new IOException(String.format("Unable delete %s", path)); } } } else { // To /dev/null :) multipart.readBodyData(DEV_NULL); } hasMore = multipart.readBoundary(); } } } else { try (InputStream in = conn.getInputStream()) { ZipUtils.unzip(in, downloadTo); } } } else if (responseCode != HttpURLConnection.HTTP_NO_CONTENT) { throw new IOException( String.format("Invalid response status %d from remote server. ", responseCode)); } } catch (ParseException | JsonParseException e) { throw new IOException(e.getMessage(), e); } finally { if (conn != null) { conn.disconnect(); } } }
From source file:de.dfki.madm.anomalydetection.evaluator.cluster_based.CMGOSEvaluator.java
public CovarianceMatrix Cstep(CovarianceMatrix covMat, double[][] data, int[] indexArray, int h) { HashMap<Double, LinkedList<Integer>> map = new HashMap<Double, LinkedList<Integer>>(); double[][] newMat = new double[h][]; Matrix mh = new Matrix(covMat.getCovMat()); if (mh.det() == 0) { covMat.addMinimum();/* ww w . ja v a 2 s .c o m*/ mh = new Matrix(covMat.getCovMat()); } mh = mh.inverse(); // Compute the distances d_old(i) for i = 1, ... , n. for (int index = 0; index < indexArray.length; index++) { double d = this.mahalanobisDistance(data[indexArray[index]], mh); if (map.containsKey(d)) { LinkedList<Integer> hilf = map.get(d); hilf.push(index); map.put(d, hilf); } else { LinkedList<Integer> hilf = new LinkedList<Integer>(); hilf.push(index); map.put(d, hilf); } } // Sort these distances ArrayList<Double> sortedList = new ArrayList<Double>(); sortedList.addAll(map.keySet()); Collections.sort(sortedList); // take the h smallest int count = 0; Iterator<Double> iter = sortedList.iterator(); while (iter.hasNext()) { Double key = iter.next(); for (Integer i : map.get(key)) { newMat[count] = data[indexArray[i]]; count++; if (count >= h) break; } if (count >= h) break; } return new CovarianceMatrix(newMat, this.numberOfThreads); }
From source file:org.knowrob.vis.model.util.algorithm.ACCUM.java
/** * Diffuses a vector field around one vertex, weighted by a Gaussian of width * {@code 1/sqrt(invsigma2)}. Ported from trimesh2 (2.12) (Szymon Rusinkiewicz Princeton University) * /*w w w . j av a2 s.c om*/ * @see <a href="https://github.com/fcole/qrtsc/tree/master/trimesh2">trimesh2</a> * * @param m * CAD model analyzed * @param curvatures * curvatures HashMap with model vertices as keys and their cuvatures as values * @param flags * map of vertex to long value (default initialized with 0f and of the same length with the list of vertices of the model) * @param flag_curr * atomic long value * @param accum * accumulator * @param invsigma * spread of the Gaussian used in weighting * @param flt * 3D vector field diffused based on the curvature */ @SuppressWarnings("javadoc") private static void diffuse_vert_field(final Model m, HashMap<Vertex, Curvature> curvatures, Map<Vertex, Long> flags, AtomicLong flag_curr, final ACCUM accum, int v, float invsigma2, Vertex flt) { Vertex vert = m.getVertices().get(v); if (vert.getNeighbors().size() == 0) { // flt.set(0, 0, 0); accum.a(m, curvatures, vert, flt, .5f, vert); return; } // flt.set(0, 0, 0); accum.a(m, curvatures, vert, flt, vert.getPointarea(), vert); float sum_w = vert.getPointarea(); final Vector3f nv = vert.getNormalVector(); long flag_curr_val = flag_curr.incrementAndGet(); flags.put(vert, flag_curr_val); LinkedList<Vertex> boundary = new LinkedList<Vertex>(); boundary.addAll(vert.getNeighbors()); while (boundary.size() > 0) { Vertex n = boundary.pop(); if (flags.get(n) != null && flags.get(n) == flag_curr_val) continue; flags.put(n, flag_curr_val); if (nv.dot(n.getNormalVector()) <= 0.0f) continue; // Gaussian weight float w = wt(n, vert, invsigma2); if (w == 0.0f) continue; // Downweight things pointing in different directions w *= nv.dot(n.getNormalVector()); // Surface area "belonging" to each point w *= n.getPointarea(); // Accumulate weight times field at neighbor accum.a(m, curvatures, vert, flt, w, n); sum_w += w; for (Vertex nn : n.getNeighbors()) { if (flags.get(nn) != null && flags.get(nn) == flag_curr_val) continue; boundary.push(nn); } } flt.scale(1 / sum_w); }
From source file:de.dfki.madm.anomalydetection.evaluator.cluster_based.CMGOSEvaluator.java
private CovarianceMatrix fastMDC(double[][] data, int h) { CovarianceMatrix ret = null;/* w w w. ja v a 2 s. c o m*/ int n = data.length; // If n is small (say, n <= 600) int small = this.fastMCDPoints; int p = data[0].length; int low = (n + p + 1) / 2; // The default h is [(n + p + 1)/2], but the user may choose any integer // h with [(n + p + 1)/2] <= h <= n if (h < low || h > n) { h = low; } // If h = n, then the MCn location estimate T is the average of the // whole dataset, and the MCn scatter estimate S is its covariance // matrix. Report these and stop. if (h == n) { ret = new CovarianceMatrix(data, this.numberOfThreads); } else { // If p = 1 (univariate data), compute the MCn esti-mate (T, S) by // the exact algorithm of Rousseeuw and Leroy (1987, pp. 171-172) in // O(n log n) time; then stop. // if (p == 1) { // ret = new CovarianceMatrix(data, 1); //} //else { if (n <= small) { int[] indexArray = new int[data.length]; for (int i = 0; i < data.length; i++) { indexArray[i] = i; } HashMap<Double, LinkedList<CovarianceMatrix>> map = getInit10(data, indexArray, h, n, p); HashMap<Double, LinkedList<CovarianceMatrix>> map2 = new HashMap<Double, LinkedList<CovarianceMatrix>>(); // for the 10 results with lowest det(S_3) for (LinkedList<CovarianceMatrix> covlist : map.values()) { for (CovarianceMatrix covmat : covlist) { CovarianceMatrix pre = convergence(data, indexArray, covmat, h); Matrix pre_mat = new Matrix(pre.getCovMat()); double pre_det = pre_mat.det(); if (map2.containsKey(pre_det)) { LinkedList<CovarianceMatrix> hilf = map2.get(pre_det); hilf.push(pre); map2.put(pre_det, hilf); } else { LinkedList<CovarianceMatrix> hilf = new LinkedList<CovarianceMatrix>(); hilf.push(pre); map2.put(pre_det, hilf); } } } // report the solution (T, S) with lowest det(S) ArrayList<Double> sortedList = new ArrayList<Double>(); sortedList.addAll(map2.keySet()); Collections.sort(sortedList); ret = map2.get(sortedList.get(0)).getFirst(); } else { HashMap<Double, LinkedList<CovarianceMatrix>> map = fast(data, h, n, p); ArrayList<Double> sortedList = new ArrayList<Double>(); sortedList.addAll(map.keySet()); Collections.sort(sortedList); ret = map.get(sortedList.get(0)).getFirst(); } } } return ret; }
From source file:org.metaservice.core.deb.util.GitCache.java
public void runDiscovery() { HashSet<String> parsed = new HashSet<>(); LinkedList<String> toParse = new LinkedList<>(); HashSet<String> dists = new HashSet<>(); toParse.add(startString);// w ww. jav a 2 s . c o m while (toParse.size() > 0) { String uri = toParse.pop(); try { String s = clientMetaservice.get(uri); if (s == null) { LOGGER.error("Couldn't load " + uri + " skipping."); continue; } Document document = Jsoup.parse(s, uri); parsed.add(uri); for (Element e : document.select("a:contains(next change)")) { String href = e.attr("abs:href"); if (!parsed.contains(href) && !toParse.contains(href)) { LOGGER.info("adding (next) ", href); toParse.push(href); } } for (Element e : document.select("a[href$=/]")) { String absHref = e.attr("abs:href"); String href = e.attr("href"); if (!dists.contains(href) && !href.startsWith("/") && !href.startsWith(".") /* &&!toParse.contains (href) */) { if (uri.endsWith("dists/") /*&& !href.contains("sid") && !href.contains("experimental")*/) { dists.add(href); LOGGER.info(href); for (String license : licenses) { String url = absHref + license + "/"; LOGGER.info("adding (lic) {}", url); toParse.add(url); } } for (String license : licenses) { if (uri.endsWith(license + "/")) { if (href.startsWith("binary-")) { for (String arch : archs) { if (href.contains(arch)) { LOGGER.info("adding (archdir) {}", absHref); toParse.add(absHref); } } } if (href.startsWith("source")) { LOGGER.info("adding (archdir) {}", absHref); toParse.add(absHref); } } } } } for (Element e : document.select("a[abs:href$=Packages.gz] , a[abs:href$=Sources.gz]")) { String href = e.attr("abs:href"); //only if this seems to be a non duplicate if (document.select("a:contains(prev change)").size() == 0 || document.select("a:contains(prev change)").get(0).attr("abs:href").equals(document .select("a:contains(prev):not(:contains(change))").get(0).attr("abs:href"))) { LOGGER.info("RESULT processing ... {} {} ", i++, href); processFileToParse(href); } } } catch (RuntimeException exception) { LOGGER.error("RUNTIME EXCEPTION ", exception); throw exception; } } }
From source file:org.bimserver.charting.Containers.TreeNode.java
public int getLeafNodeCount() { int leafCount = 0; LinkedList<TreeNode> nodes = new LinkedList<TreeNode>(Arrays.asList(this)); TreeNode thisNode = null;//from www .j a va 2 s . c o m while (nodes.size() > 0) { thisNode = nodes.pop(); if (thisNode.Children.length > 0) { int n = thisNode.Children.length; while (--n >= 0) nodes.push(thisNode.Children[n]); } else leafCount++; } return leafCount; }
From source file:hr.fer.spocc.regex.AbstractRegularExpression.java
protected RegularExpression<T> createParseTree(List<RegularExpressionElement> elements) { // System.out.println(">>> Parsing regexp: "+elements); /**//from w ww . jav a 2 s . co m * Stack which contains parts of regular expression * which are not yet used * by the operator. In addition, <code>null</code> values * can be pushed onto this stack to indicate that * the symbols to the right are grouped by the parenthesis. * */ LinkedList<RegularExpression<T>> symbolStack = new LinkedList<RegularExpression<T>>(); /** * Operator stack */ LinkedList<RegularExpressionOperator> opStack = new LinkedList<RegularExpressionOperator>(); boolean sentinelParentheses = false; // if (this.elements.get(0).getElementType() // != RegularExpressionElementType.LEFT_PARENTHESIS // || this.elements.get(elements.size()-1).getElementType() // != RegularExpressionElementType.RIGHT_PARENTHESIS) { sentinelParentheses = true; symbolStack.push(null); opStack.push(null); // } int ind = -1; Iterator<RegularExpressionElement> iter = elements.iterator(); while (iter.hasNext() || sentinelParentheses) { ++ind; RegularExpressionElement e; if (iter.hasNext()) { e = iter.next(); } else { // osiguraj dodatnu iteraciju za umjetnu zadnju ) e = RegularExpressionElements.RIGHT_PARENTHESIS; sentinelParentheses = false; } switch (e.getElementType()) { case SYMBOL: symbolStack.push(createTrivial(elements.subList(ind, ind + 1))); break; default: RegularExpressionOperator curOp = (e.getElementType() == RegularExpressionElementType.OPERATOR ? (RegularExpressionOperator) e : null); int priority = (curOp != null ? curOp.getPriority() : -1); if (e.getElementType() != RegularExpressionElementType.LEFT_PARENTHESIS) { // System.out.println("Pre-while symbolStack: "+symbolStack); while (!opStack.isEmpty() && opStack.getFirst() != null && opStack.getFirst().getPriority() >= priority && symbolStack.getFirst() != null) { RegularExpressionOperator op = opStack.pop(); int arity = op.getArity(); int elementCount = 0; // System.out.println("POP: "+op); @SuppressWarnings("unchecked") RegularExpression<T>[] operands = new RegularExpression[arity]; for (int i = arity - 1; i >= 0; --i) { if (symbolStack.isEmpty()) { throw new IllegalArgumentException("Missing ( after"); } else if (symbolStack.getFirst() == null) { throw new IllegalArgumentException("Missing operand #" + (arity - i) + " for the operator " + op + " before index " + ind); } operands[i] = symbolStack.pop(); elementCount += operands[i].size(); } RegularExpression<T> regex = createComposite(elements.subList(ind - elementCount - 1, ind), op, operands); // System.err.println(regex); // System.err.println(regex.getSubexpression(0)); symbolStack.push(regex); // System.out.println("Group: "+ // ArrayToStringUtils.toString(operands, "\n")); // System.out.println("End group"); // System.out.println("Evaluated [" + (ind-elementCount-1) // + ", " + ind + "): "+regex); // System.out.println("Symbol stack: "+symbolStack); // System.out.println("Op stack: "+opStack); // System.out.println("---"); } } if (curOp != null) { opStack.push(curOp); } else { switch (e.getElementType()) { case LEFT_PARENTHESIS: symbolStack.push(null); opStack.push(null); break; default: // ako je ) Validate.isTrue(symbolStack.size() >= 2, "Exactly one expression is expected " + "inside parentheses before index " + ind); // pop left bracket (null) from the operator stack Object nullValue = opStack.pop(); Validate.isTrue(nullValue == null); // pop left bracket (null) from the symbol stack RegularExpression<T> regex = symbolStack.pop(); nullValue = symbolStack.pop(); // check if left bracket was removed indeed // Validate.isTrue(nullValue == null, // "Expected ( at index " + (ind-regex.size()-1)); // expand the expression if parentheses are not sentinel if (sentinelParentheses) { // XXX neki drugi flag bolje // System.out.print("Expand [" // + (ind - regex.size() - 1) + ", " // + (ind + 1) + "]: "); // System.out.println("[regex size = "+regex.size() // + "]"); regex = createExpanded(regex, elements.subList(ind - regex.size() - 1, ind + 1)); // System.out.println(" -> "+regex); } // and put back the expression inside parentheses symbolStack.push(regex); } } } // end of switch // System.out.println("----- " + ind + " ----"); // System.out.println("Symbol stack: "+symbolStack); // System.out.println("Op stack: "+opStack); } //Validate.isTrue(symbolStack.size() == 1); //Validate.isTrue(opStack.isEmpty()); return symbolStack.pop(); }
From source file:com.nhncorp.lucy.security.xss.XssSaxFilter.java
/** * @param stackForObjectTag/* w w w .j a v a 2s. c om*/ * @param stackForAllowNetworkingValue * @param element */ private void doObjectParamStartTagProcess(LinkedList<Element> stackForObjectTag, LinkedList<String> stackForAllowNetworkingValue, Element element) { if ("object".equalsIgnoreCase(element.getName())) { stackForObjectTag.push(element); boolean isDataWhiteUrl = false; Attribute dataUrl = element.getAttribute("data"); if (dataUrl != null) { // data ?? ? String dataUrlStr = dataUrl.getValue(); isDataWhiteUrl = this.isWhiteUrl(dataUrlStr); // URL MIME ? boolean isVulnerable = SecurityUtils.checkVulnerable(element, dataUrlStr, isDataWhiteUrl); if (isVulnerable) { element.setEnabled(false); return; } } if (isDataWhiteUrl) { stackForAllowNetworkingValue.push("\"all\""); // data?? url ? white url? allowNetworking ? ? all } else { stackForAllowNetworkingValue.push("\"internal\""); // allowNetworking ? ? internal } } else if (stackForObjectTag.size() > 0 && "param".equalsIgnoreCase(element.getName())) { Attribute nameAttr = element.getAttribute("name"); Attribute valueAttr = element.getAttribute("value"); if (nameAttr != null && valueAttr != null) { stackForObjectTag.push(element); if (containsURLName(nameAttr.getValue())) { stackForAllowNetworkingValue.pop(); boolean whiteUrl = isWhiteUrl(valueAttr.getValue()); if (whiteUrl) { stackForAllowNetworkingValue.push("\"all\""); // whiteUrl ? allowNetworking ? all } else { stackForAllowNetworkingValue.push("\"internal\""); // whiteUrl ? ? allowNetworking ? internal } } } } }
From source file:org.bimserver.charting.Containers.TreeNode.java
public Iterator<TreeNode> iterateLeafNodes() { LinkedList<TreeNode> nodes = new LinkedList<TreeNode>(Arrays.asList(this)); LinkedList<TreeNode> leafNodes = new LinkedList<>(); TreeNode thisNode = null;//from www .j a v a 2 s . c o m while (nodes.size() > 0) { thisNode = nodes.pop(); if (thisNode.Children.length > 0) { int i = -1; int n = thisNode.Children.length; while (++i < n) nodes.push(thisNode.Children[i]); } else leafNodes.push(thisNode); } return leafNodes.iterator(); }