List of usage examples for java.util LinkedList addAll
public boolean addAll(Collection<? extends E> c)
From source file:ubic.gemma.core.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Sort biomaterials according to a list of ordered factors * * @param start biomaterials to sort//from w w w . ja v a 2 s. com * @param factors sorted list of factors to define sort order for biomaterials, cannot be null */ private static List<BioMaterial> orderBiomaterialsBySortedFactors(List<BioMaterial> start, List<ExperimentalFactor> factors) { if (start.size() == 1) { return start; } if (start.size() == 0) { throw new IllegalArgumentException("Must provide some biomaterials"); } if (factors == null) { throw new IllegalArgumentException("Must provide sorted factors, or at least an empty list"); } if (factors.isEmpty()) { // we're done. return start; } ExperimentalFactor simplest = factors.get(0); if (simplest == null) { // we're done. return start; } /* * Order this chunk by the selected factor */ Map<FactorValue, List<BioMaterial>> fv2bms = ExpressionDataMatrixColumnSort.buildFv2BmMap(start); List<BioMaterial> ordered = ExpressionDataMatrixColumnSort.orderByFactor(simplest, fv2bms, start); // Abort ordering, so we are ordered only by the first continuous factor. if (ExperimentalDesignUtils.isContinuous(simplest)) { assert ordered != null; return ordered; } LinkedList<ExperimentalFactor> factorsStillToDo = new LinkedList<>(); factorsStillToDo.addAll(factors); factorsStillToDo.remove(simplest); if (factorsStillToDo.size() == 0) { /* * No more ordering is necessary. */ return ordered; } ExpressionDataMatrixColumnSort.log.debug("Factors: " + factors.size()); /* * Recurse in and order each chunk. First split it up, but retaining the order we just made. */ LinkedHashMap<FactorValue, List<BioMaterial>> chunks = ExpressionDataMatrixColumnSort .chunkOnFactor(simplest, ordered); if (chunks == null) { // this means we should bail, gracefully. return start; } /* * Process each chunk. */ List<BioMaterial> result = new ArrayList<>(); for (FactorValue fv : chunks.keySet()) { List<BioMaterial> chunk = chunks.get(fv); if (chunk.size() < 2) { result.addAll(chunk); } else { List<BioMaterial> orderedChunk = ExpressionDataMatrixColumnSort .orderBiomaterialsBySortedFactors(chunk, factorsStillToDo); if (orderedChunk != null) { result.addAll(orderedChunk); } } } return result; }
From source file:org.marketcetera.util.ws.types.TypeTest.java
private static <V> LinkedList<V> toLinkedList(List<V> in) { LinkedList<V> out = new LinkedList<V>(); out.addAll(in); return out;//from ww w. jav a 2s . com }
From source file:org.apache.hadoop.hive.ql.exec.SerializationUtilities.java
/** * Clones using the powers of XML. Do not use unless necessary. * @param roots The roots.//from www .jav a 2 s . co m * @return The clone. */ public static List<Operator<?>> cloneOperatorTree(List<Operator<?>> roots) { ByteArrayOutputStream baos = new ByteArrayOutputStream(4096); CompilationOpContext ctx = roots.isEmpty() ? null : roots.get(0).getCompilationOpContext(); serializePlan(roots, baos, true); @SuppressWarnings("unchecked") List<Operator<?>> result = deserializePlan(new ByteArrayInputStream(baos.toByteArray()), roots.getClass(), true); // Restore the context. LinkedList<Operator<?>> newOps = new LinkedList<>(result); while (!newOps.isEmpty()) { Operator<?> newOp = newOps.poll(); newOp.setCompilationOpContext(ctx); List<Operator<?>> children = newOp.getChildOperators(); if (children != null) { newOps.addAll(children); } } return result; }
From source file:org.knowrob.vis.model.util.algorithm.ACCUM.java
/** * Diffuses a vector field around one vertex, weighted by a Gaussian of width * {@code 1/sqrt(invsigma2)}. Ported from trimesh2 (2.12) (Szymon Rusinkiewicz Princeton University) * //from w w w. j a v a 2 s . c o m * @see <a href="https://github.com/fcole/qrtsc/tree/master/trimesh2">trimesh2</a> * * @param m * CAD model analyzed * @param curvatures * curvatures HashMap with model vertices as keys and their cuvatures as values * @param flags * map of vertex to long value (default initialized with 0f and of the same length with the list of vertices of the model) * @param flag_curr * atomic long value * @param accum * accumulator * @param invsigma * spread of the Gaussian used in weighting * @param flt * 3D vector field diffused based on the curvature */ @SuppressWarnings("javadoc") private static void diffuse_vert_field(final Model m, HashMap<Vertex, Curvature> curvatures, Map<Vertex, Long> flags, AtomicLong flag_curr, final ACCUM accum, int v, float invsigma2, Vertex flt) { Vertex vert = m.getVertices().get(v); if (vert.getNeighbors().size() == 0) { // flt.set(0, 0, 0); accum.a(m, curvatures, vert, flt, .5f, vert); return; } // flt.set(0, 0, 0); accum.a(m, curvatures, vert, flt, vert.getPointarea(), vert); float sum_w = vert.getPointarea(); final Vector3f nv = vert.getNormalVector(); long flag_curr_val = flag_curr.incrementAndGet(); flags.put(vert, flag_curr_val); LinkedList<Vertex> boundary = new LinkedList<Vertex>(); boundary.addAll(vert.getNeighbors()); while (boundary.size() > 0) { Vertex n = boundary.pop(); if (flags.get(n) != null && flags.get(n) == flag_curr_val) continue; flags.put(n, flag_curr_val); if (nv.dot(n.getNormalVector()) <= 0.0f) continue; // Gaussian weight float w = wt(n, vert, invsigma2); if (w == 0.0f) continue; // Downweight things pointing in different directions w *= nv.dot(n.getNormalVector()); // Surface area "belonging" to each point w *= n.getPointarea(); // Accumulate weight times field at neighbor accum.a(m, curvatures, vert, flt, w, n); sum_w += w; for (Vertex nn : n.getNeighbors()) { if (flags.get(nn) != null && flags.get(nn) == flag_curr_val) continue; boundary.push(nn); } } flt.scale(1 / sum_w); }
From source file:org.apache.hadoop.hive.ql.exec.SerializationUtilities.java
public static List<Operator<?>> cloneOperatorTree(List<Operator<?>> roots, int indexForTezUnion) { ByteArrayOutputStream baos = new ByteArrayOutputStream(4096); CompilationOpContext ctx = roots.isEmpty() ? null : roots.get(0).getCompilationOpContext(); serializePlan(roots, baos, true);//w ww. ja v a 2s. c om @SuppressWarnings("unchecked") List<Operator<?>> result = deserializePlan(new ByteArrayInputStream(baos.toByteArray()), roots.getClass(), true); // Restore the context. LinkedList<Operator<?>> newOps = new LinkedList<>(result); while (!newOps.isEmpty()) { Operator<?> newOp = newOps.poll(); newOp.setIndexForTezUnion(indexForTezUnion); newOp.setCompilationOpContext(ctx); List<Operator<?>> children = newOp.getChildOperators(); if (children != null) { newOps.addAll(children); } } return result; }
From source file:com.newlandframework.avatarmq.core.MessageCache.java
private void commitMessage(ConcurrentLinkedQueue<T> messages) { LinkedList<T> list = new LinkedList<T>(); list.addAll(messages); cache.clear();/*from w ww . j ava 2s . c om*/ if (list != null && list.size() > 0) { parallelDispatch(list); list.clear(); } }
From source file:org.georchestra.console.ws.backoffice.users.UserRule.java
public List<String> getListUidProtected() { if (this.listOfprotectedUsers.isEmpty()) { UserRule.LOG.warn("There isn't any protected user configured"); }// w w w .j a v a 2 s . co m LinkedList<String> res = new LinkedList<String>(); res.addAll(this.listOfprotectedUsers); return res; }
From source file:org.psidnell.omnifocus.integrationtest.IntegrationTest.java
private void runMainAndDiff(final String name, String[] extraArgs, boolean doDiff) throws Exception, IOException { File tmp = new File(tmpDataDir, name); String[] args = { "-import", PREVIOUSLY_EXPORTED_DATA_FILE.getPath(), "-o", tmp.getPath() }; LinkedList<String> combinedArgs = new LinkedList<>(); combinedArgs.addAll(Arrays.asList(args)); combinedArgs.addAll(Arrays.asList(extraArgs)); Main.main(combinedArgs.toArray(new String[combinedArgs.size()])); if (doDiff) { Diff.diff(new File("src/test/data/" + name), tmp); }/*from www . j a v a2 s . c om*/ }
From source file:net.sourceforge.fenixedu.domain.reports.FlunkedReportFile.java
@Override public void renderReport(Spreadsheet spreadsheet) { spreadsheet.setHeader("nmero aluno"); spreadsheet.setHeader("ciclo estudos"); setDegreeHeaders(spreadsheet);/*from w w w . ja va2 s .c o m*/ for (final Degree degree : Degree.readNotEmptyDegrees()) { if (checkDegreeType(getDegreeType(), degree)) { for (final Registration registration : degree.getRegistrationsSet()) { LinkedList<RegistrationState> states = new LinkedList<RegistrationState>(); states.addAll(registration.getRegistrationStatesSet()); CollectionUtils.filter(states, new Predicate() { @Override public boolean evaluate(Object item) { return ((RegistrationState) item).getExecutionYear() != null && ((RegistrationState) item).getExecutionYear().equals(getExecutionYear()); } }); Collections.sort(states, RegistrationState.DATE_COMPARATOR); if (!states.isEmpty() && states.getLast().getStateType().equals(RegistrationStateType.FLUNKED)) { final Row row = spreadsheet.addRow(); row.setCell(registration.getNumber()); CycleType cycleType = registration.getCycleType(states.getLast().getExecutionYear()); row.setCell(cycleType != null ? cycleType.toString() : ""); setDegreeCells(row, degree); } } } } }
From source file:es.emergya.ui.gis.popups.ConsultaHistoricos.java
public static List<Object> getCurrentRecursos() { synchronized (recursosMostrados) { LinkedList<Object> res = new LinkedList<Object>(); res.addAll(recursosMostrados); return res; }/*from w ww. j av a 2s . c o m*/ }