Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:org.apache.hadoop.yarn.util.SmapsBasedProcessTree.java

/**
 * Update process-tree with latest state. If the root-process is not alive,
 * tree will be empty.//from w  ww  .  ja  v  a 2 s .  c  o m
 * 
 */
@Override
public void updateProcessTree() {
    if (!pid.equals(deadPid)) {
        // Get the list of processes
        List<String> processList = getProcessList();

        Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();

        // cache the processTree to get the age for processes
        Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
        processTree.clear();

        ProcessInfo me = null;
        for (String proc : processList) {
            // Get information for each process
            ProcessInfo pInfo = new ProcessInfo(proc);
            if (constructProcessInfo(pInfo, procfsDir) != null) {
                allProcessInfo.put(proc, pInfo);
                if (proc.equals(this.pid)) {
                    me = pInfo; // cache 'me'
                    processTree.put(proc, pInfo);
                }
            }
        }

        if (me == null) {
            return;
        }

        // Add each process to its parent.
        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
            String pID = entry.getKey();
            if (!pID.equals("1")) {
                ProcessInfo pInfo = entry.getValue();
                ProcessInfo parentPInfo = allProcessInfo.get(pInfo.getPpid());
                if (parentPInfo != null) {
                    parentPInfo.addChild(pInfo);
                }
            }
        }

        // now start constructing the process-tree
        LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
        pInfoQueue.addAll(me.getChildren());
        while (!pInfoQueue.isEmpty()) {
            ProcessInfo pInfo = pInfoQueue.remove();
            if (!processTree.containsKey(pInfo.getPid())) {
                processTree.put(pInfo.getPid(), pInfo);
            }
            pInfoQueue.addAll(pInfo.getChildren());
        }

        // update age values and compute the number of jiffies since last update
        for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
            ProcessInfo oldInfo = oldProcs.get(procs.getKey());
            if (procs.getValue() != null) {
                procs.getValue().updateJiffy(oldInfo);
                if (oldInfo != null) {
                    procs.getValue().updateAge(oldInfo);
                }
            }
        }

        if (LOG.isDebugEnabled()) {
            // Log.debug the ProcfsBasedProcessTree
            LOG.debug(this.toString());
        }
    }

    // Update PSS related information
    for (ProcessInfo p : processTree.values()) {
        if (p != null) {
            // Get information for each process
            ProcessMemInfo memInfo = new ProcessMemInfo(p.getPid());
            constructProcessSMAPInfo(memInfo, procfsDir);
            processSMAPTree.put(p.getPid(), memInfo);
        }
    }
}

From source file:org.sonar.plugins.javascript.lcov.LCOVCoverageSensor.java

protected void saveMeasureFromLCOVFile(SensorContext context) {
    LinkedList<File> lcovFiles = new LinkedList<>();
    for (String reportPath : reportPaths) {
        String providedPath = settings.getString(reportPath);
        if (StringUtils.isBlank(providedPath)) {
            continue;
        }/*w ww . ja v  a 2s  .co m*/
        File lcovFile = getIOFile(fileSystem.baseDir(), providedPath);

        if (lcovFile.isFile()) {
            lcovFiles.add(lcovFile);
        } else {
            LOG.warn(
                    "No coverage information will be saved because LCOV file cannot be found. Provided LCOV file path: {}",
                    providedPath);
            LOG.warn("Provided LCOV file path: {}. Seek file with path: {}", providedPath,
                    lcovFile.getAbsolutePath());
        }
    }

    if (lcovFiles.isEmpty()) {
        LOG.warn("No coverage information will be saved because all LCOV files cannot be found.");
        return;
    }

    LOG.info("Analysing {}", lcovFiles);

    LCOVParser parser = LCOVParser.create(fileSystem, lcovFiles.toArray(new File[lcovFiles.size()]));
    Map<InputFile, CoverageMeasuresBuilder> coveredFiles = parser.coverageByFile();

    for (InputFile inputFile : fileSystem.inputFiles(mainFilePredicate)) {
        try {
            CoverageMeasuresBuilder fileCoverage = coveredFiles.get(inputFile);
            org.sonar.api.resources.File resource = org.sonar.api.resources.File
                    .create(inputFile.relativePath());

            if (fileCoverage != null) {
                for (Measure measure : fileCoverage.createMeasures()) {
                    context.saveMeasure(resource, convertMeasure(measure));
                }
            } else {
                // colour all lines as not executed
                LOG.debug("Default value of zero will be saved for file: {}", resource.getPath());
                LOG.debug(
                        "Because: either was not present in LCOV report either was not able to retrieve associated SonarQube resource");
                saveZeroValueForResource(resource, context);
            }
        } catch (Exception e) {
            LOG.error("Problem while calculating coverage for " + inputFile.absolutePath(), e);
        }
    }

    List<String> unresolvedPaths = parser.unresolvedPaths();
    if (!unresolvedPaths.isEmpty()) {
        LOG.warn(String.format("Could not resolve %d file paths in %s, first unresolved path: %s",
                unresolvedPaths.size(), lcovFiles, unresolvedPaths.get(0)));
    }
}

From source file:org.commonjava.emb.project.ProjectLoader.java

private void addProjects(final ProjectToolsSession session, final List<MavenProject> projects) {
    final DependencyGraph depGraph = session.getDependencyGraph();
    for (final MavenProject project : projects) {
        final LinkedList<Artifact> parentage = new LinkedList<Artifact>();
        MavenProject parent = project;//  w ww  .j  a  va 2  s.  co m
        while (parent != null) {
            final org.apache.maven.artifact.Artifact pomArtifact = mavenRepositorySystem
                    .createArtifact(project.getGroupId(), project.getArtifactId(), project.getVersion(), "pom");

            final Artifact aetherPomArtifact = RepositoryUtils.toArtifact(pomArtifact);

            parentage.addFirst(aetherPomArtifact);

            parent = parent.getParent();
        }

        Artifact current = parentage.removeFirst();
        while (!parentage.isEmpty()) {
            final Artifact next = parentage.getFirst();

            // This is WEIRD, but the parent POM is actually a dependency of the current one,
            // since it's required in order to build the current project...
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Marking parent POM: " + current + " as dependency of POM: " + next);
            }
            depGraph.addDependency(next, current, true, true);

            if (!parentage.isEmpty()) {
                current = parentage.removeFirst();
            }
        }
    }
}

From source file:edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.FDsAndEquivClassesVisitor.java

@Override
public Void visitGroupByOperator(GroupByOperator op, IOptimizationContext ctx) throws AlgebricksException {
    Map<LogicalVariable, EquivalenceClass> equivalenceClasses = new HashMap<LogicalVariable, EquivalenceClass>();
    List<FunctionalDependency> functionalDependencies = new ArrayList<FunctionalDependency>();
    ctx.putEquivalenceClassMap(op, equivalenceClasses);
    ctx.putFDList(op, functionalDependencies);

    List<FunctionalDependency> inheritedFDs = new ArrayList<FunctionalDependency>();
    for (ILogicalPlan p : op.getNestedPlans()) {
        for (Mutable<ILogicalOperator> r : p.getRoots()) {
            ILogicalOperator op2 = r.getValue();
            equivalenceClasses.putAll(getOrComputeEqClasses(op2, ctx));
            inheritedFDs.addAll(getOrComputeFDs(op2, ctx));
        }/*from ww  w .  j a v a2 s  .  c  o  m*/
    }

    ILogicalOperator op0 = op.getInputs().get(0).getValue();
    inheritedFDs.addAll(getOrComputeFDs(op0, ctx));
    Map<LogicalVariable, EquivalenceClass> inheritedEcs = getOrComputeEqClasses(op0, ctx);
    for (FunctionalDependency inherited : inheritedFDs) {
        boolean isCoveredByGbyOrDecorVars = true;
        List<LogicalVariable> newHead = new ArrayList<LogicalVariable>(inherited.getHead().size());
        for (LogicalVariable v : inherited.getHead()) {
            LogicalVariable vnew = getNewGbyVar(op, v);
            if (vnew == null) {
                vnew = getNewDecorVar(op, v);
                if (vnew == null) {
                    isCoveredByGbyOrDecorVars = false;
                }
                break;
            }
            newHead.add(vnew);
        }

        if (isCoveredByGbyOrDecorVars) {
            List<LogicalVariable> newTail = new ArrayList<LogicalVariable>();
            for (LogicalVariable v2 : inherited.getTail()) {
                LogicalVariable v3 = getNewGbyVar(op, v2);
                if (v3 != null) {
                    newTail.add(v3);
                }
            }
            if (!newTail.isEmpty()) {
                FunctionalDependency newFd = new FunctionalDependency(newHead, newTail);
                functionalDependencies.add(newFd);
            }
        }
    }

    List<LogicalVariable> premiseGby = new LinkedList<LogicalVariable>();
    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gByList = op.getGroupByList();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gByList) {
        premiseGby.add(p.first);
    }

    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorList = op.getDecorList();

    LinkedList<LogicalVariable> conclDecor = new LinkedList<LogicalVariable>();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : decorList) {
        conclDecor.add(GroupByOperator.getDecorVariable(p));
    }
    if (!conclDecor.isEmpty()) {
        functionalDependencies.add(new FunctionalDependency(premiseGby, conclDecor));
    }

    Set<LogicalVariable> gbySet = new HashSet<LogicalVariable>();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gByList) {
        ILogicalExpression expr = p.second.getValue();
        if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
            VariableReferenceExpression v = (VariableReferenceExpression) expr;
            gbySet.add(v.getVariableReference());
        }
    }
    LocalGroupingProperty lgp = new LocalGroupingProperty(gbySet);
    lgp.normalizeGroupingColumns(inheritedEcs, inheritedFDs);
    Set<LogicalVariable> normSet = lgp.getColumnSet();
    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> newGbyList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
    boolean changed = false;
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gByList) {
        ILogicalExpression expr = p.second.getValue();
        if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
            VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
            LogicalVariable v2 = varRef.getVariableReference();
            EquivalenceClass ec2 = inheritedEcs.get(v2);
            LogicalVariable v3;
            if (ec2 != null && !ec2.representativeIsConst()) {
                v3 = ec2.getVariableRepresentative();
            } else {
                v3 = v2;
            }
            if (normSet.contains(v3)) {
                newGbyList.add(p);
            } else {
                changed = true;
                decorList.add(p);
            }
        } else {
            newGbyList.add(p);
        }
    }
    if (changed) {
        AlgebricksConfig.ALGEBRICKS_LOGGER
                .fine(">>>> Group-by list changed from " + GroupByOperator.veListToString(gByList) + " to "
                        + GroupByOperator.veListToString(newGbyList) + ".\n");
    }
    gByList.clear();
    gByList.addAll(newGbyList);
    return null;
}

From source file:com.facebook.react.views.textinput.ReactTextInputManager.java

@ReactProp(name = "maxLength")
public void setMaxLength(ReactEditText view, @Nullable Integer maxLength) {
    InputFilter[] currentFilters = view.getFilters();
    InputFilter[] newFilters = EMPTY_FILTERS;

    if (maxLength == null) {
        if (currentFilters.length > 0) {
            LinkedList<InputFilter> list = new LinkedList<>();
            for (int i = 0; i < currentFilters.length; i++) {
                if (!(currentFilters[i] instanceof InputFilter.LengthFilter)) {
                    list.add(currentFilters[i]);
                }/*from w  ww. j  a  v  a  2  s .c o  m*/
            }
            if (!list.isEmpty()) {
                newFilters = (InputFilter[]) list.toArray(new InputFilter[list.size()]);
            }
        }
    } else {
        if (currentFilters.length > 0) {
            newFilters = currentFilters;
            boolean replaced = false;
            for (int i = 0; i < currentFilters.length; i++) {
                if (currentFilters[i] instanceof InputFilter.LengthFilter) {
                    currentFilters[i] = new InputFilter.LengthFilter(maxLength);
                    replaced = true;
                }
            }
            if (!replaced) {
                newFilters = new InputFilter[currentFilters.length + 1];
                System.arraycopy(currentFilters, 0, newFilters, 0, currentFilters.length);
                currentFilters[currentFilters.length] = new InputFilter.LengthFilter(maxLength);
            }
        } else {
            newFilters = new InputFilter[1];
            newFilters[0] = new InputFilter.LengthFilter(maxLength);
        }
    }

    view.setFilters(newFilters);
}

From source file:ORG.oclc.os.SRW.SRWDatabase.java

public static SRWDatabase getDB(String dbname, Properties properties, String servletContext,
        HttpServletRequest request) {/*w  w  w  .j av  a2  s .c  o  m*/
    log.debug("enter SRWDatabase.getDB");
    if (badDbs.get(dbname) != null) // we've seen this one before
        return null;

    LinkedList<SRWDatabase> queue = dbs.get(dbname);
    SRWDatabase db = null;
    try {
        if (queue == null)
            log.info("No databases created yet for database " + dbname);
        else {
            log.debug("about to synchronize #1 on queue");
            synchronized (queue) {
                if (queue.isEmpty())
                    log.info("No databases available for database " + dbname);
                else {
                    db = queue.removeFirst();
                    if (db == null)
                        log.debug("popped a null database off the queue for database " + dbname);
                }
            }
            log.debug("done synchronize #1 on queue");
        }
        if (db == null) {
            log.info("creating a database for " + dbname);
            try {
                while (db == null) {
                    createDB(dbname, properties, servletContext, request);
                    queue = dbs.get(dbname);
                    log.debug("about to synchronize #2 on queue");
                    synchronized (queue) {
                        if (!queue.isEmpty()) // crap, someone got to it before us
                            db = queue.removeFirst();
                    }
                }
                log.debug("done synchronize #2 on queue");
            } catch (Exception e) { // database not available
                badDbs.put(dbname, dbname);
                log.error(e, e);
                return null;
            }
        }
    } catch (Exception e) {
        log.error(e, e);
        log.error("shoot!");
    }
    if (log.isDebugEnabled())
        log.debug("getDB: db=" + db);
    log.debug("exit SRWDatabase.getDB");
    return db;
}

From source file:de.hasait.genesis.base.freemarker.FreemarkerModelWriter.java

static void write(final Configuration pConfiguration, final Writer pWriter, final Object pModel,
        final Map pParams) throws IOException, TemplateException {
    final Map<Class<?>, Template> templateCache = TEMPLATE_CACHE.get();

    Class<?> currentType = pModel.getClass();
    Template template = templateCache.get(currentType);
    if (template == null) {
        final LinkedList<TypeNode> queue = new LinkedList<TypeNode>();
        queue.add(new TypeNode(null, currentType));

        TemplateNotFoundException firstE = null;

        do {//  w ww . j  a va  2s  .c  om
            // take first from queue
            TypeNode current = queue.removeFirst();
            currentType = current._type;

            // determine template
            template = templateCache.get(currentType);
            if (template == null) {
                final String templateName = currentType.getSimpleName() + ".ftl";
                try {
                    template = pConfiguration.getTemplate(templateName);
                } catch (final TemplateNotFoundException e) {
                    if (firstE == null) {
                        firstE = e;
                    }
                }
            }

            if (template != null) {
                // fill cache including parents
                templateCache.put(currentType, template);
                while (true) {
                    templateCache.put(currentType, template);
                    current = current._parent;
                    if (current == null) {
                        break;
                    }
                    currentType = current._type;
                }
            } else {
                // fill queue with next nodes
                for (final Class<?> interfaceType : currentType.getInterfaces()) {
                    queue.add(new TypeNode(current, interfaceType));
                }
                final Class<?> superclassType = currentType.getSuperclass();
                if (superclassType != null) {
                    queue.add(new TypeNode(current, superclassType));
                }
            }
        } while (template == null && !queue.isEmpty());

        if (template == null) {
            throw firstE;
        }
    }

    write(pConfiguration, pWriter, template, pModel, pParams);
}

From source file:org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.FDsAndEquivClassesVisitor.java

@Override
public Void visitGroupByOperator(GroupByOperator op, IOptimizationContext ctx) throws AlgebricksException {
    Map<LogicalVariable, EquivalenceClass> equivalenceClasses = new HashMap<LogicalVariable, EquivalenceClass>();
    List<FunctionalDependency> functionalDependencies = new ArrayList<FunctionalDependency>();
    ctx.putEquivalenceClassMap(op, equivalenceClasses);
    ctx.putFDList(op, functionalDependencies);

    List<FunctionalDependency> inheritedFDs = new ArrayList<FunctionalDependency>();
    for (ILogicalPlan p : op.getNestedPlans()) {
        for (Mutable<ILogicalOperator> r : p.getRoots()) {
            ILogicalOperator op2 = r.getValue();
            equivalenceClasses.putAll(getOrComputeEqClasses(op2, ctx));
            inheritedFDs.addAll(getOrComputeFDs(op2, ctx));
        }/*from   ww  w .  java  2s.  c  o m*/
    }

    ILogicalOperator op0 = op.getInputs().get(0).getValue();
    inheritedFDs.addAll(getOrComputeFDs(op0, ctx));
    Map<LogicalVariable, EquivalenceClass> inheritedEcs = getOrComputeEqClasses(op0, ctx);
    for (FunctionalDependency inherited : inheritedFDs) {
        boolean isCoveredByGbyOrDecorVars = true;
        List<LogicalVariable> newHead = new ArrayList<LogicalVariable>(inherited.getHead().size());
        for (LogicalVariable v : inherited.getHead()) {
            LogicalVariable vnew = getNewGbyVar(op, v);
            if (vnew == null) {
                vnew = getNewDecorVar(op, v);
                if (vnew == null) {
                    isCoveredByGbyOrDecorVars = false;
                }
                break;
            }
            newHead.add(vnew);
        }

        if (isCoveredByGbyOrDecorVars) {
            List<LogicalVariable> newTail = new ArrayList<LogicalVariable>();
            for (LogicalVariable v2 : inherited.getTail()) {
                LogicalVariable v3 = getNewGbyVar(op, v2);
                if (v3 != null) {
                    newTail.add(v3);
                }
            }
            if (!newTail.isEmpty()) {
                FunctionalDependency newFd = new FunctionalDependency(newHead, newTail);
                functionalDependencies.add(newFd);
            }
        }
    }

    List<LogicalVariable> premiseGby = new LinkedList<LogicalVariable>();
    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gByList = op.getGroupByList();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gByList) {
        premiseGby.add(p.first);
    }

    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorList = op.getDecorList();

    LinkedList<LogicalVariable> conclDecor = new LinkedList<LogicalVariable>();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : decorList) {
        conclDecor.add(GroupByOperator.getDecorVariable(p));
    }
    if (!conclDecor.isEmpty()) {
        functionalDependencies.add(new FunctionalDependency(premiseGby, conclDecor));
    }

    Set<LogicalVariable> gbySet = new HashSet<LogicalVariable>();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gByList) {
        ILogicalExpression expr = p.second.getValue();
        if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
            VariableReferenceExpression v = (VariableReferenceExpression) expr;
            gbySet.add(v.getVariableReference());
        }
    }
    LocalGroupingProperty lgp = new LocalGroupingProperty(gbySet);
    ILocalStructuralProperty normalizedLgp = lgp.normalize(inheritedEcs, inheritedFDs);
    Set<LogicalVariable> normSet = new ListSet<>();
    normalizedLgp.getColumns(normSet);
    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> newGbyList = new ArrayList<>();
    boolean changed = false;
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gByList) {
        ILogicalExpression expr = p.second.getValue();
        if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
            VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
            LogicalVariable v2 = varRef.getVariableReference();
            EquivalenceClass ec2 = inheritedEcs.get(v2);
            LogicalVariable v3;
            if (ec2 != null && !ec2.representativeIsConst()) {
                v3 = ec2.getVariableRepresentative();
            } else {
                v3 = v2;
            }
            if (normSet.contains(v3)) {
                newGbyList.add(p);
            } else {
                changed = true;
                decorList.add(p);
            }
        } else {
            newGbyList.add(p);
        }
    }
    if (changed) {
        AlgebricksConfig.ALGEBRICKS_LOGGER
                .fine(">>>> Group-by list changed from " + GroupByOperator.veListToString(gByList) + " to "
                        + GroupByOperator.veListToString(newGbyList) + ".\n");
    }
    gByList.clear();
    gByList.addAll(newGbyList);
    return null;
}

From source file:org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.java

/**
 * Update process-tree with latest state. If the root-process is not alive,
 * tree will be empty./*from w  ww.  ja  v a2s .  c  o m*/
 *
 */
@Override
public void updateProcessTree() {
    if (!pid.equals(deadPid)) {
        // Get the list of processes
        List<String> processList = getProcessList();

        Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();

        // cache the processTree to get the age for processes
        Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
        processTree.clear();

        ProcessInfo me = null;
        for (String proc : processList) {
            // Get information for each process
            ProcessInfo pInfo = new ProcessInfo(proc);
            if (constructProcessInfo(pInfo, procfsDir) != null) {
                allProcessInfo.put(proc, pInfo);
                if (proc.equals(this.pid)) {
                    me = pInfo; // cache 'me'
                    processTree.put(proc, pInfo);
                }
            }
        }

        if (me == null) {
            return;
        }

        // Add each process to its parent.
        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
            String pID = entry.getKey();
            if (!pID.equals("1")) {
                ProcessInfo pInfo = entry.getValue();
                String ppid = pInfo.getPpid();
                // If parent is init and process is not session leader,
                // attach to sessionID
                if (ppid.equals("1")) {
                    String sid = pInfo.getSessionId().toString();
                    if (!pID.equals(sid)) {
                        ppid = sid;
                    }
                }
                ProcessInfo parentPInfo = allProcessInfo.get(ppid);
                if (parentPInfo != null) {
                    parentPInfo.addChild(pInfo);
                }
            }
        }

        // now start constructing the process-tree
        LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
        pInfoQueue.addAll(me.getChildren());
        while (!pInfoQueue.isEmpty()) {
            ProcessInfo pInfo = pInfoQueue.remove();
            if (!processTree.containsKey(pInfo.getPid())) {
                processTree.put(pInfo.getPid(), pInfo);
            }
            pInfoQueue.addAll(pInfo.getChildren());
        }

        // update age values and compute the number of jiffies since last update
        for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
            ProcessInfo oldInfo = oldProcs.get(procs.getKey());
            if (procs.getValue() != null) {
                procs.getValue().updateJiffy(oldInfo);
                if (oldInfo != null) {
                    procs.getValue().updateAge(oldInfo);
                }
            }
        }

        if (LOG.isDebugEnabled()) {
            // Log.debug the ProcfsBasedProcessTree
            LOG.debug(this.toString());
        }
        if (smapsEnabled) {
            //Update smaps info
            processSMAPTree.clear();
            for (ProcessInfo p : processTree.values()) {
                if (p != null) {
                    // Get information for each process
                    ProcessTreeSmapMemInfo memInfo = new ProcessTreeSmapMemInfo(p.getPid());
                    constructProcessSMAPInfo(memInfo, procfsDir);
                    processSMAPTree.put(p.getPid(), memInfo);
                }
            }
        }
    }
}

From source file:com.lenovo.tensorhusky.common.utils.ProcfsBasedProcessTree.java

/**
 * Update process-tree with latest state. If the root-process is not alive,
 * tree will be empty./*from  w  w w  .  j a va  2 s .c o  m*/
 */
@Override
public void updateProcessTree() {
    if (!pid.equals(deadPid)) {
        // Get the list of processes
        List<String> processList = getProcessList();

        Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();

        // cache the processTree to get the age for processes
        Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
        processTree.clear();

        ProcessInfo me = null;
        for (String proc : processList) {
            // Get information for each process
            ProcessInfo pInfo = new ProcessInfo(proc);
            if (constructProcessInfo(pInfo, procfsDir) != null) {
                allProcessInfo.put(proc, pInfo);
                if (proc.equals(this.pid)) {
                    me = pInfo; // cache 'me'
                    processTree.put(proc, pInfo);
                }
            }
        }

        if (me == null) {
            return;
        }

        // Add each process to its parent.
        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
            String pID = entry.getKey();
            if (!pID.equals("1")) {
                ProcessInfo pInfo = entry.getValue();
                ProcessInfo parentPInfo = allProcessInfo.get(pInfo.getPpid());
                if (parentPInfo != null) {
                    parentPInfo.addChild(pInfo);
                }
            }
        }

        // now start constructing the process-tree
        LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
        pInfoQueue.addAll(me.getChildren());
        while (!pInfoQueue.isEmpty()) {
            ProcessInfo pInfo = pInfoQueue.remove();
            if (!processTree.containsKey(pInfo.getPid())) {
                processTree.put(pInfo.getPid(), pInfo);
            }
            pInfoQueue.addAll(pInfo.getChildren());
        }

        // update age values and compute the number of jiffies since last
        // update
        for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
            ProcessInfo oldInfo = oldProcs.get(procs.getKey());
            if (procs.getValue() != null) {
                procs.getValue().updateJiffy(oldInfo);
                if (oldInfo != null) {
                    procs.getValue().updateAge(oldInfo);
                }
            }
        }

        if (LOG.isDebugEnabled()) {
            // Log.debug the ProcfsBasedProcessTree
            LOG.debug(this.toString());
        }
        if (smapsEnabled) {
            // Update smaps info
            processSMAPTree.clear();
            for (ProcessInfo p : processTree.values()) {
                if (p != null) {
                    // Get information for each process
                    ProcessTreeSmapMemInfo memInfo = new ProcessTreeSmapMemInfo(p.getPid());
                    constructProcessSMAPInfo(memInfo, procfsDir);
                    processSMAPTree.put(p.getPid(), memInfo);
                }
            }
        }
    }
}