Example usage for java.util LinkedList addAll

List of usage examples for java.util LinkedList addAll

Introduction

In this page you can find the example usage for java.util LinkedList addAll.

Prototype

public boolean addAll(Collection<? extends E> c) 

Source Link

Document

Appends all of the elements in the specified collection to the end of this list, in the order that they are returned by the specified collection's iterator.

Usage

From source file:org.fusesource.mop.MOP.java

protected void warCommand(LinkedList<String> argList) throws Exception {
    assertNotEmpty(argList);/*from  ww  w  . j  a  v  a 2 s.c o  m*/
    defaultType = "war";
    artifactIds = parseArtifactList(argList);
    reminingArgs = argList;

    // lets default the artiact to WAR and then find all the files and pass them in as a command line argumnet
    repository.setTransitive(false); // We just need the wars.. not the transitive deps.
    List<File> files = repository.resolveFiles(artifactIds);
    // We will need transitive deps to load up jettty
    repository.setTransitive(true);

    LOG.debug("Running war with files: " + files);

    LinkedList<String> newArgs = new LinkedList<String>();
    newArgs.add("jar");
    newArgs.add("org.mortbay.jetty:jetty-runner:RELEASE");
    newArgs.addAll(argList);
    for (File file : files) {
        newArgs.add(file.toString());
    }

    LOG.debug("About to run: " + newArgs);
    executeCommand(newArgs);
}

From source file:se.sics.kompics.p2p.experiment.dsl.SimulationScenario.java

/**
 * Gets the resources from directory.//from w w w .ja  v  a2 s  . co m
 * 
 * @param directory
 *            the directory
 * @param pack
 *            the pack
 * 
 * @return the resources from directory
 */
private static LinkedList<String> getResourcesFromDirectory(File directory, String pack) {
    String[] files = directory.list();

    LinkedList<String> list = new LinkedList<String>();
    for (String string : files) {
        File f = new File(directory + System.getProperty("file.separator") + string);

        if (f.isFile() && !f.getName().endsWith(".class")) {
            String resourceName = f.getName();
            list.add(pack + resourceName);
        }

        if (f.isDirectory()) {
            LinkedList<String> resources = getResourcesFromDirectory(f, pack + f.getName() + "/");
            list.addAll(resources);
        }
    }
    return list;
}

From source file:org.openanzo.glitter.query.Projection.java

private void initialize() throws UnnamedProjectionException {
    // determine if this is an aggregate
    if (!this.groupByVars.isEmpty()) {
        this.isAggregate = true;
    } else {/*from  w  ww .j a va2  s  .  co  m*/
        // see if any aggregate functions are involved, which means we're grouping in one big (happy) group
        LinkedList<Expression> expressions = new LinkedList<Expression>(this.projectedExpressions);
        while (!expressions.isEmpty()) {
            Expression e = expressions.removeFirst();
            if (e instanceof FunctionCall) {
                FunctionCall fc = (FunctionCall) e;
                if (fc.getFunction() instanceof AggregateFunction) {
                    this.isAggregate = true;
                    break;
                }
                expressions.addAll(fc.getArguments());
            }
        }
    }
    // check that we have an output name for every projected expression
    if (this.projectedAs.size() < this.projectedExpressions.size())
        throw new UnnamedProjectionException(this.projectedExpressions.get(this.projectedAs.size()));
    if (this.projectedAs.size() > this.projectedExpressions.size())
        throw new GlitterRuntimeException(ExceptionConstants.GLITTER.MORE_NAMES);
    int i;
    if ((i = this.projectedAs.indexOf(null)) != -1)
        throw new UnnamedProjectionException(this.projectedExpressions.get(i));
    for (i = 0; i < this.projectedAs.size(); i++)
        this.aliasMap.put(this.projectedAs.get(i), this.projectedExpressions.get(i));
}

From source file:org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.java

/**
 * Update process-tree with latest state. If the root-process is not alive,
 * tree will be empty.//w  w w .j a  v  a  2s .  c  o m
 *
 */
@Override
public void updateProcessTree() {
    if (!pid.equals(deadPid)) {
        // Get the list of processes
        List<String> processList = getProcessList();

        Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();

        // cache the processTree to get the age for processes
        Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
        processTree.clear();

        ProcessInfo me = null;
        for (String proc : processList) {
            // Get information for each process
            ProcessInfo pInfo = new ProcessInfo(proc);
            if (constructProcessInfo(pInfo, procfsDir) != null) {
                allProcessInfo.put(proc, pInfo);
                if (proc.equals(this.pid)) {
                    me = pInfo; // cache 'me'
                    processTree.put(proc, pInfo);
                }
            }
        }

        if (me == null) {
            return;
        }

        // Add each process to its parent.
        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
            String pID = entry.getKey();
            if (!pID.equals("1")) {
                ProcessInfo pInfo = entry.getValue();
                String ppid = pInfo.getPpid();
                // If parent is init and process is not session leader,
                // attach to sessionID
                if (ppid.equals("1")) {
                    String sid = pInfo.getSessionId().toString();
                    if (!pID.equals(sid)) {
                        ppid = sid;
                    }
                }
                ProcessInfo parentPInfo = allProcessInfo.get(ppid);
                if (parentPInfo != null) {
                    parentPInfo.addChild(pInfo);
                }
            }
        }

        // now start constructing the process-tree
        LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
        pInfoQueue.addAll(me.getChildren());
        while (!pInfoQueue.isEmpty()) {
            ProcessInfo pInfo = pInfoQueue.remove();
            if (!processTree.containsKey(pInfo.getPid())) {
                processTree.put(pInfo.getPid(), pInfo);
            }
            pInfoQueue.addAll(pInfo.getChildren());
        }

        // update age values and compute the number of jiffies since last update
        for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
            ProcessInfo oldInfo = oldProcs.get(procs.getKey());
            if (procs.getValue() != null) {
                procs.getValue().updateJiffy(oldInfo);
                if (oldInfo != null) {
                    procs.getValue().updateAge(oldInfo);
                }
            }
        }

        if (LOG.isDebugEnabled()) {
            // Log.debug the ProcfsBasedProcessTree
            LOG.debug(this.toString());
        }
        if (smapsEnabled) {
            //Update smaps info
            processSMAPTree.clear();
            for (ProcessInfo p : processTree.values()) {
                if (p != null) {
                    // Get information for each process
                    ProcessTreeSmapMemInfo memInfo = new ProcessTreeSmapMemInfo(p.getPid());
                    constructProcessSMAPInfo(memInfo, procfsDir);
                    processSMAPTree.put(p.getPid(), memInfo);
                }
            }
        }
    }
}

From source file:com.lenovo.tensorhusky.common.utils.ProcfsBasedProcessTree.java

/**
 * Update process-tree with latest state. If the root-process is not alive,
 * tree will be empty./*from www . j ava  2  s. com*/
 */
@Override
public void updateProcessTree() {
    if (!pid.equals(deadPid)) {
        // Get the list of processes
        List<String> processList = getProcessList();

        Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();

        // cache the processTree to get the age for processes
        Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
        processTree.clear();

        ProcessInfo me = null;
        for (String proc : processList) {
            // Get information for each process
            ProcessInfo pInfo = new ProcessInfo(proc);
            if (constructProcessInfo(pInfo, procfsDir) != null) {
                allProcessInfo.put(proc, pInfo);
                if (proc.equals(this.pid)) {
                    me = pInfo; // cache 'me'
                    processTree.put(proc, pInfo);
                }
            }
        }

        if (me == null) {
            return;
        }

        // Add each process to its parent.
        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
            String pID = entry.getKey();
            if (!pID.equals("1")) {
                ProcessInfo pInfo = entry.getValue();
                ProcessInfo parentPInfo = allProcessInfo.get(pInfo.getPpid());
                if (parentPInfo != null) {
                    parentPInfo.addChild(pInfo);
                }
            }
        }

        // now start constructing the process-tree
        LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
        pInfoQueue.addAll(me.getChildren());
        while (!pInfoQueue.isEmpty()) {
            ProcessInfo pInfo = pInfoQueue.remove();
            if (!processTree.containsKey(pInfo.getPid())) {
                processTree.put(pInfo.getPid(), pInfo);
            }
            pInfoQueue.addAll(pInfo.getChildren());
        }

        // update age values and compute the number of jiffies since last
        // update
        for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
            ProcessInfo oldInfo = oldProcs.get(procs.getKey());
            if (procs.getValue() != null) {
                procs.getValue().updateJiffy(oldInfo);
                if (oldInfo != null) {
                    procs.getValue().updateAge(oldInfo);
                }
            }
        }

        if (LOG.isDebugEnabled()) {
            // Log.debug the ProcfsBasedProcessTree
            LOG.debug(this.toString());
        }
        if (smapsEnabled) {
            // Update smaps info
            processSMAPTree.clear();
            for (ProcessInfo p : processTree.values()) {
                if (p != null) {
                    // Get information for each process
                    ProcessTreeSmapMemInfo memInfo = new ProcessTreeSmapMemInfo(p.getPid());
                    constructProcessSMAPInfo(memInfo, procfsDir);
                    processSMAPTree.put(p.getPid(), memInfo);
                }
            }
        }
    }
}

From source file:se.sics.kompics.p2p.experiment.dsl.SimulationScenario.java

/**
 * Gets the classes from directory.//from  w  w w .ja  v a2s. c  o m
 * 
 * @param directory
 *            the directory
 * @param pack
 *            the pack
 * 
 * @return the classes from directory
 */
private static LinkedList<String> getClassesFromDirectory(File directory, String pack) {
    String[] files = directory.list();

    LinkedList<String> list = new LinkedList<String>();
    for (String string : files) {
        File f = new File(directory + System.getProperty("file.separator") + string);

        if (f.isFile() && f.getName().endsWith(".class")) {
            String className = f.getName().substring(0, f.getName().lastIndexOf('.'));
            list.add(pack + className);
        }

        if (f.isDirectory()) {
            LinkedList<String> classes = getClassesFromDirectory(f, pack + f.getName() + ".");
            list.addAll(classes);
        }
    }
    return list;
}

From source file:de.dfki.madm.anomalydetection.evaluator.cluster_based.CMGOSEvaluator.java

private HashMap<Double, LinkedList<CovarianceMatrix>> fast(double[][] data, int h, int n, int p) {

    class StepWorker extends Thread {
        private int id;
        private HashMap<Integer, HashMap<Double, LinkedList<CovarianceMatrix>>> map2;
        private double[][] data;
        private int[] indexArray;
        private int h_sub;
        private HashMap<Double, LinkedList<CovarianceMatrix>> retMap;
        private int anz;

        public StepWorker(int anz, HashMap<Integer, HashMap<Double, LinkedList<CovarianceMatrix>>> map, int id,
                double[][] data, int[] indexArray, int h_sub) {
            this.id = id;
            this.map2 = map;
            this.data = data;
            this.indexArray = indexArray;
            this.h_sub = h_sub;
            this.anz = anz;
            this.retMap = new HashMap<Double, LinkedList<CovarianceMatrix>>();
        }/*from   w  ww . j a v a  2 s  .co  m*/

        public HashMap<Double, LinkedList<CovarianceMatrix>> getMap() {
            return this.retMap;
        }

        public void run() {
            for (int id = (this.id * anz); id <= ((this.id * anz) + anz); id++) {
                if (map2.containsKey(id)) {
                    HashMap<Double, LinkedList<CovarianceMatrix>> map = map2.get(id);
                    for (double d : map.keySet()) {
                        LinkedList<CovarianceMatrix> l = map.get(d);
                        for (CovarianceMatrix c : l) {
                            CovarianceMatrix ret = c;
                            for (int rep = 0; rep < 2; rep++)
                                ret = Cstep(ret, data, indexArray, h_sub);
                            retMap = getSorted(retMap, ret, 10);
                        }
                    }
                }
            }
        }
    }

    // construct up to five disjoint random subsets of size nsub according
    // to Section 3.3 (say, five subsets of size nsub = 300);
    double anz_subset = this.numberOfSubsets;
    double anz_points = Math.floor(data.length / anz_subset);
    boolean[] taken = new boolean[data.length];
    int merge_id = 0;

    // keep the 10 best results (Tsub, Ssub);
    HashMap<Integer, HashMap<Double, LinkedList<CovarianceMatrix>>> map2 = new HashMap<Integer, HashMap<Double, LinkedList<CovarianceMatrix>>>();
    for (int i = 0; i < anz_subset; i++) {
        int dim = (int) anz_points;
        int[] indexArray = new int[dim];
        // create sub-dataset
        for (int j = 0; j < dim; j++) {
            int index;
            do {
                index = generator.nextInt(n);
            } while (taken[index]);
            taken[index] = true;
            indexArray[j] = index;
        }
        double h_sub = Math.ceil((dim * (h / (n * 1.0))));
        HashMap<Double, LinkedList<CovarianceMatrix>> map = getInit10(data, indexArray, (int) h_sub, dim, p);

        if (!map2.containsKey(merge_id))
            map2.put(merge_id, map);
        else {
            HashMap<Double, LinkedList<CovarianceMatrix>> hilf = map2.get(merge_id);
            for (double k : map.keySet()) {
                if (!hilf.containsKey(k))
                    hilf.put(k, map.get(k));
                else {
                    LinkedList<CovarianceMatrix> h1 = hilf.get(k);
                    h1.addAll(map.get(k));
                    hilf.put(k, h1);
                }
            }
            map2.put(merge_id, hilf);
        }
        if ((i % 5) == 0 && i != 0) {
            merge_id++;
        }
    }

    // pool the subsets, yielding the merged set (say, of size nmerged =
    // 1,500);

    anz_subset = Math.floor(data.length / 1500.0);
    if (anz_subset <= 0)
        anz_subset = 1;
    anz_points = Math.floor(data.length / anz_subset);
    taken = new boolean[data.length];
    double h_sub = Math.ceil((anz_points * (h / (n * 1.0))));
    int dim = (int) anz_points;

    int[] indexArray = new int[dim];
    for (int j = 0; j < dim; j++) {
        int index;
        do {
            index = generator.nextInt(n);
        } while (taken[index]);
        taken[index] = true;
        indexArray[j] = index;
    }

    int anz = map2.keySet().size() % this.numberOfThreads;
    StepWorker[] wa = new StepWorker[this.numberOfThreads];
    for (int i = 0; i < this.numberOfThreads; i++) {
        wa[i] = new StepWorker(anz, map2, i, data, indexArray, (int) h_sub);
        wa[i].start();
    }
    for (int i = 0; i < this.numberOfThreads; i++) {
        try {
            wa[i].join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    map2 = null;
    HashMap<Double, LinkedList<CovarianceMatrix>> map3 = new HashMap<Double, LinkedList<CovarianceMatrix>>();
    for (int i = 0; i < this.numberOfThreads; i++) {
        for (Double k : wa[i].getMap().keySet()) {
            for (CovarianceMatrix mat : wa[i].getMap().get(k))
                map3 = getSorted(map3, mat, 10);
        }
    }

    // in the full dataset, repeat for the m_full best results:
    HashMap<Double, LinkedList<CovarianceMatrix>> map4 = new HashMap<Double, LinkedList<CovarianceMatrix>>();

    indexArray = new int[data.length];
    for (int i = 0; i < data.length; i++) {
        indexArray[i] = i;
    }
    for (double d : map3.keySet()) {
        LinkedList<CovarianceMatrix> l = map3.get(d);
        for (CovarianceMatrix c : l) {
            map4 = getSorted(map4, convergence(data, indexArray, c, h), 10);
        }
    }
    map3 = null;
    return map4;
}

From source file:net.cliseau.composer.javatarget.PointcutParseException.java

/**
 * Update the manifest of a given JAR file to include CliSeAu's dependencies in the classpath list.
 *
 * This method modifies the "Class-Path" entry of the given JAR file's
 * manifest to include the paths of all runtime dependencies that are caused
 * by the instrumentation with the CliSeAu unit.
 *
 * @param targetJARFile The JAR file whose manifest to update.
 * @exception IOException Thrown when reading or writing the JAR file fails.
 * @todo Check whether this update is possible also with the JarFile API alone.
 *//*from   w w w. j  a  va2 s . co m*/
private void updateTargetManifest(final File targetJARFile) throws IOException, InvalidConfigurationException {
    // Step 1: Obtain the existing class path list from the target JAR file
    JarFile targetJAR = new JarFile(targetJARFile);
    Manifest targetManifest = targetJAR.getManifest();
    LinkedList<String> classPathEntries;
    if (targetManifest != null) {
        String targetClassPath = targetManifest.getMainAttributes().getValue(Attributes.Name.CLASS_PATH);
        if (targetClassPath == null) {
            targetClassPath = "";
        }
        classPathEntries = new LinkedList<String>(
                Arrays.asList(targetClassPath.split(manifestClassPathSeparator)));
    } else {
        classPathEntries = new LinkedList<String>();
    }
    // close the object again (this shall ensure that the command in
    // Step 4 can safely work on the file again)
    targetJAR.close();

    // Step 2: Add all newly introduced runtime dependencies of CliSeAu
    classPathEntries.addAll(getInlinedDependencies());

    // Step 3: Create a new manifest file with *only* the updated class path directive
    File manifestUpdate = File.createTempFile("MANIFEST", ".MF");
    PrintWriter muWriter = new PrintWriter(manifestUpdate);
    muWriter.print("Class-path:");
    muWriter.print(StringUtils.join(classPathEntries, manifestClassPathSeparator));
    muWriter.println();
    muWriter.close();

    // Step 4: Run "jar" to update the JAR file with the new manifest; this
    // does not replace the JAR file's manifest with the new one, but
    // *update* *only* those entries in the JAR file's manifest which are
    // present in the new manifest. That is, only the class path settings are
    // updated and everything else remains intact.
    CommandRunner.exec(new String[] { aspectjConfig.getJarExecutable(), "umf", // update manifest
            manifestUpdate.getPath(), targetJARFile.getPath() });

    // Step 5: cleanup
    manifestUpdate.delete();
}

From source file:org.gluu.site.ldap.persistence.LdapEntryManager.java

private <T> List<T> createEntitiesVirtualListView(Class<T> entryClass,
        List<PropertyAnnotation> propertiesAnnotations, SearchResultEntry... searchResultEntries) {

    List<T> result = new LinkedList<T>();
    Map<String, List<AttributeData>> entriesAttributes = new LinkedHashMap<String, List<AttributeData>>(100);

    int count = 0;
    for (int i = 0; i < searchResultEntries.length; i++) {

        count++;//from  www .  jav a 2  s.  co  m

        SearchResultEntry entry = searchResultEntries[i];

        LinkedList<AttributeData> attributeDataLinkedList = new LinkedList<AttributeData>();
        attributeDataLinkedList.addAll(getAttributeDataList(entry));
        entriesAttributes.put(entry.getDN(), attributeDataLinkedList);

        // Remove reference to allow java clean up object
        searchResultEntries[i] = null;

        // Allow java to clean up temporary objects
        if (count >= 100) {

            List<T> currentResult = new LinkedList<T>();
            currentResult.addAll(createEntities(entryClass, propertiesAnnotations, entriesAttributes, false));
            result.addAll(currentResult);

            entriesAttributes = new LinkedHashMap<String, List<AttributeData>>(100);
            count = 0;
        }
    }

    List<T> currentResult = createEntities(entryClass, propertiesAnnotations, entriesAttributes, false);
    result.addAll(currentResult);

    return result;
}

From source file:org.guzz.builder.GuzzConfigFileBuilder.java

public List listDBGroups() {
    /*/*from  w w  w  .j  a  v  a  2  s  . c om*/
     <tran>
       <dbgroup name="default" masterDBConfigName="masterDB" slaveDBConfigName="slaveDB" dialectName="mysql5dialect" />
       <dbgroup name="activeLog" masterDBConfigName="masterLogDB" defaultDialect="h2dialect" />
            
       <virtualdbgroup name="log" dialectName="h2dialect" shadow="xxx.VirtualDBGroupView">
    <dbgroup name="log.old.1" masterDBConfigName="masterLogDB2" />
    <dbgroup name="log.old.2" masterDBConfigName="masterLogDB3" />
    <dbgroup name="log.old.3" masterDBConfigName="masterLogDB4" />
       </virtualdbgroup>
     </tran>
    */

    LinkedList dbGroups = new LinkedList();

    List rootDBGroups = parseForPhysicsDBGroup(this.rootDoc.selectNodes("tran/dbgroup"), "default");
    if (rootDBGroups != null) {
        dbGroups.addAll(rootDBGroups);
    }

    //Load virtual dbGroup
    List vss = this.rootDoc.selectNodes("tran/virtualdbgroup");

    if (vss != null && !vss.isEmpty()) {
        for (int i = 0; i < vss.size(); i++) {
            Element e = (Element) vss.get(i);

            VirtualDBGroup db = new VirtualDBGroup();
            String groupName = e.attributeValue("name");
            String dialectName = e.attributeValue("dialectName");
            String shadow = e.attributeValue("shadow");

            if (StringUtil.isEmpty(groupName)) {
                db.setGroupName("default");
            } else {
                db.setGroupName(groupName);
            }

            if (StringUtil.isEmpty(dialectName)) {
                dialectName = "default";
            }

            Dialect dt = this.gf.getDialect(dialectName);
            if (dt == null) {
                throw new InvalidConfigurationException(
                        "dialect:[" + dialectName + "] not found for dbgroup:[" + e.asXML() + "]");
            }

            db.setDialect(dt);

            //shadow
            if (StringUtil.isEmpty(shadow)) {
                throw new InvalidConfigurationException(
                        "missing attribute [shadow] in virtualdbgroup:[" + e.asXML() + "]");
            }

            Object vv = BeanCreator.newBeanInstance(shadow);

            if (vv instanceof VirtualDBView) {
                VirtualDBView vdv = (VirtualDBView) vv;
                vdv.setConfiguredVirtualDBGroup(db);

                this.gf.registerVirtualDBView(vdv);

                db.setVirtualDBGroupView(vdv);
            } else {
                throw new InvalidConfigurationException("attribute [shadow] must be a subclass of + "
                        + VirtualDBView.class.getName() + " for virtualdbgroup:[" + e.asXML() + "]");
            }

            dbGroups.addLast(db);

            //Load virtualdbgroup's sub dbgroup.
            List subDBGroups = parseForPhysicsDBGroup(e.selectNodes("dbgroup"), dialectName);
            if (subDBGroups != null) {
                dbGroups.addAll(subDBGroups);
            }
        }
    }

    return dbGroups;
}