Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:org.jboss.errai.otec.client.atomizer.Atomizer.java

public static AtomizerSession syncWidgetWith(final OTEngine engine, final OTEntity entity,
        final ValueBoxBase widget) {
    logger.info("NEW ATOMIZER SESSION (engine:" + engine.getId() + ", widget=" + widget + ")");

    final Multimap<Object, HandlerRegistration> HANDLER_REGISTRATION_MAP = HashMultimap.create();
    final EntityChangeStreamImpl entityChangeStream = new EntityChangeStreamImpl(engine, entity);

    final EntityStreamRegistration entityStreamRegistration = engine.getPeerState()
            .addEntityStream(entityChangeStream);

    widget.setValue(entity.getState().get());

    HANDLER_REGISTRATION_MAP.put(widget, widget.addKeyDownHandler(new KeyDownHandler() {
        @Override/*from  ww  w  . j a  v  a2s  .  c o m*/
        public void onKeyDown(final KeyDownEvent event) {
            if (shouldIgnoreKeyPress(event)) {
                return;
            }

            if (widget.getSelectedText().length() > 0) {
                stopEvents();
                entityChangeStream.notifyDelete(widget.getCursorPos(), widget.getSelectedText());
                startEvents();
            } else if (event.getNativeKeyCode() == KeyCodes.KEY_BACKSPACE) {
                stopEvents();
                final int index = widget.getCursorPos() - 1;
                entityChangeStream.notifyDelete(index, " ");
                startEvents();
            } else if (event.getNativeKeyCode() == KeyCodes.KEY_ENTER) {
                stopEvents();
                entityChangeStream.notifyInsert(widget.getCursorPos(), "\n");
                startEvents();
            }
        }
    }));

    HANDLER_REGISTRATION_MAP.put(widget, widget.addKeyPressHandler(new KeyPressHandler() {
        @Override
        public void onKeyPress(final KeyPressEvent event) {
            if (event.getUnicodeCharCode() != 13 && event.getUnicodeCharCode() != 0) {
                stopEvents();
                entityChangeStream.notifyInsert(widget.getCursorPos(), String.valueOf(event.getCharCode()));
                startEvents();
            }
        }
    }));

    DOM.setEventListener(widget.getElement(), new EventListener() {
        @Override
        public void onBrowserEvent(Event event) {
            if (event.getTypeInt() == Event.ONPASTE) {
                final String before = (String) entity.getState().get();
                new Timer() {
                    @Override
                    public void run() {
                        final String after = (String) widget.getValue();
                        final DiffUtil.Delta diff = DiffUtil.diff(before, after);

                        stopEvents();
                        entityChangeStream.notifyInsert(diff.getCursor(), diff.getDeltaText());
                        startEvents();
                    }
                }.schedule(1);
            }
            widget.onBrowserEvent(event);
        }
    });

    attachCutHandler(widget.getElement(), new Runnable() {
        @Override
        public void run() {
            stopEvents();
            entityChangeStream.notifyDelete(widget.getCursorPos(), widget.getSelectedText());
            startEvents();
        }
    });

    attachTextDragHandler(widget.getElement(), new Runnable() {
        @Override
        public void run() {
            stopEvents();
            entityChangeStream.notifyDelete(widget.getCursorPos(), widget.getSelectedText());
            entityChangeStream.flush();
            startEvents();
        }
    }, new Runnable() {
        @Override
        public void run() {
            final String old = (String) entity.getState().get();
            new Timer() {
                @Override
                public void run() {
                    final DiffUtil.Delta diff = DiffUtil.diff(old, (String) widget.getValue());
                    if (diff.getDeltaText().length() > 0) {
                        stopEvents();
                        entityChangeStream.notifyInsert(diff.getCursor(), diff.getDeltaText());
                        startEvents();
                    }
                }
            }.schedule(1);
        }
    });

    final ListenerRegistration listenerRegistration = entity.getState()
            .addStateChangeListener(new StateChangeListener() {
                @Override
                public int getCursorPos() {
                    return widget.getCursorPos();
                }

                @Override
                public void onStateChange(final int newCursorPos, final Object newValue) {
                    if (NO_PROPAGATE_STATE_CHANGE) {
                        return;
                    }

                    widget.setValue(newValue, false);
                    widget.setCursorPos(newCursorPos);
                }
            });

    DOM.sinkEvents(widget.getElement(), DOM.getEventsSunk(widget.getElement()) | Event.ONPASTE);

    final Timer timer = new Timer() {
        @Override
        public void run() {
            entityChangeStream.flush();
        }
    };
    timer.scheduleRepeating(500);

    return new AtomizerSession() {
        @Override
        public void end() {
            entityChangeStream.close();
            timer.cancel();

            logger.info("END ATOMIZER SESSION");
            entityStreamRegistration.remove();
            listenerRegistration.remove();
            final Collection<HandlerRegistration> values = HANDLER_REGISTRATION_MAP.values();
            for (final HandlerRegistration value : values) {
                value.removeHandler();
            }
        }
    };
}

From source file:org.gradle.model.internal.manage.schema.extract.StructSchemaExtractionStrategySupport.java

private <R> List<ModelPropertyExtractionResult<?>> extractPropertySchemas(
        ModelSchemaExtractionContext<R> extractionContext, Multimap<String, Method> methodsByName) {
    List<ModelPropertyExtractionResult<?>> results = Lists.newArrayList();
    Set<Method> handledMethods = Sets.newHashSet();

    List<String> methodNames = Lists.newArrayList(methodsByName.keySet());
    Collections.sort(methodNames);
    Set<String> skippedMethodNames = Sets.newHashSet();
    for (String methodName : methodNames) {
        if (skippedMethodNames.contains(methodName)) {
            continue;
        }/*from  w  w w. j  a v a2 s. com*/

        Collection<Method> methods = methodsByName.get(methodName);

        List<Method> overloadedMethods = getOverloadedMethods(methods);
        if (overloadedMethods != null) {
            handleOverloadedMethods(extractionContext, overloadedMethods);
            methods = filterGetterMethods(methods);
            if (methods.isEmpty()) {
                continue;
            }
        }

        int getterPrefixLen = getterPrefixLength(methodName);
        if (getterPrefixLen >= 0) {
            Method mostSpecificGetter = ModelSchemaUtils.findMostSpecificMethod(methods);

            char getterPropertyNameFirstChar = methodName.charAt(getterPrefixLen);
            if (!Character.isUpperCase(getterPropertyNameFirstChar)) {
                handleInvalidGetter(extractionContext, mostSpecificGetter,
                        String.format(
                                "the %s character of the getter method name must be an uppercase character",
                                getterPrefixLen == 2 ? "3rd" : "4th"));
                continue;
            }

            String propertyNameCapitalized = methodName.substring(getterPrefixLen);
            String propertyName = StringUtils.uncapitalize(propertyNameCapitalized);
            String setterName = "set" + propertyNameCapitalized;
            Collection<Method> setterMethods = methodsByName.get(setterName);
            PropertyAccessorExtractionContext setterContext = !setterMethods.isEmpty()
                    ? new PropertyAccessorExtractionContext(setterMethods)
                    : null;

            String prefix = methodName.substring(0, getterPrefixLen);
            Iterable<Method> getterMethods = methods;
            if (prefix.equals("get")) {
                String isGetterName = "is" + propertyNameCapitalized;
                Collection<Method> isGetterMethods = methodsByName.get(isGetterName);
                if (!isGetterMethods.isEmpty()) {
                    List<Method> overloadedIsGetterMethods = getOverloadedMethods(isGetterMethods);
                    if (overloadedIsGetterMethods != null) {
                        handleOverloadedMethods(extractionContext, overloadedIsGetterMethods);
                        isGetterMethods = filterGetterMethods(isGetterMethods);
                    }

                    if (!isGetterMethods.isEmpty()) {
                        Method mostSpecificIsGetter = ModelSchemaUtils.findMostSpecificMethod(isGetterMethods);
                        if (mostSpecificGetter.getReturnType() != boolean.class
                                || mostSpecificIsGetter.getReturnType() != boolean.class) {
                            handleInvalidGetter(extractionContext, mostSpecificIsGetter, String.format(
                                    "property '%s' has both '%s()' and '%s()' getters, but they don't both return a boolean",
                                    propertyName, isGetterName, methodName));
                            continue;
                        }
                        getterMethods = Iterables.concat(getterMethods, isGetterMethods);
                        skippedMethodNames.add(isGetterName);
                    }
                }
            }

            PropertyAccessorExtractionContext getterContext = new PropertyAccessorExtractionContext(
                    getterMethods);
            ModelPropertyExtractionResult<?> result = extractPropertySchema(extractionContext, propertyName,
                    getterContext, setterContext, getterPrefixLen);
            if (result != null) {
                results.add(result);
                handledMethods.addAll(getterContext.getDeclaringMethods());
                if (setterContext != null) {
                    handledMethods.addAll(setterContext.getDeclaringMethods());
                }

            }
        }
    }

    validateAllNecessaryMethodsHandled(extractionContext, methodsByName.values(), handledMethods);
    return results;
}

From source file:ai.grakn.graql.internal.reasoner.atom.binary.Relation.java

private Set<Pair<RelationPlayer, RelationPlayer>> getRelationPlayerMappings(Relation parentAtom) {
    Set<Pair<RelationPlayer, RelationPlayer>> rolePlayerMappings = new HashSet<>();

    //establish compatible castings for each parent casting
    Multimap<RelationPlayer, RelationPlayer> compatibleMappings = HashMultimap.create();
    parentAtom.getRoleRelationPlayerMap();
    Multimap<RoleType, RelationPlayer> childRoleRPMap = getRoleRelationPlayerMap();
    Map<Var, Type> parentVarTypeMap = parentAtom.getParentQuery().getVarTypeMap();
    Map<Var, Type> childVarTypeMap = this.getParentQuery().getVarTypeMap();

    Set<RoleType> relationRoles = new HashSet<>(getType().asRelationType().relates());
    Set<RoleType> childRoles = new HashSet<>(childRoleRPMap.keySet());

    parentAtom.getRelationPlayers().stream().filter(prp -> prp.getRoleType().isPresent()).forEach(prp -> {
        VarPatternAdmin parentRoleTypeVar = prp.getRoleType().orElse(null);
        TypeLabel parentRoleTypeLabel = parentRoleTypeVar.getTypeLabel().orElse(null);

        //TODO take into account indirect roles
        RoleType parentRole = parentRoleTypeLabel != null ? graph().getType(parentRoleTypeLabel) : null;

        if (parentRole != null) {
            boolean isMetaRole = Schema.MetaSchema.isMetaLabel(parentRole.getLabel());
            Var parentRolePlayer = prp.getRolePlayer().getVarName();
            Type parentType = parentVarTypeMap.get(parentRolePlayer);

            Set<RoleType> compatibleChildRoles = isMetaRole ? childRoles
                    : Sets.intersection(new HashSet<>(parentRole.subTypes()), childRoles);

            if (parentType != null) {
                boolean isMetaType = Schema.MetaSchema.isMetaLabel(parentType.getLabel());
                Set<RoleType> typeRoles = isMetaType ? childRoles : new HashSet<>(parentType.plays());

                //incompatible type
                if (Sets.intersection(relationRoles, typeRoles).isEmpty())
                    compatibleChildRoles = new HashSet<>();
                else {
                    compatibleChildRoles = compatibleChildRoles.stream().filter(
                            rc -> Schema.MetaSchema.isMetaLabel(rc.getLabel()) || typeRoles.contains(rc))
                            .collect(toSet());
                }//from ww  w. j  a  v a 2 s.c  om
            }

            compatibleChildRoles.stream().filter(childRoleRPMap::containsKey).forEach(r -> {
                Collection<RelationPlayer> childRPs = parentType != null
                        ? childRoleRPMap.get(r).stream().filter(rp -> {
                            Var childRolePlayer = rp.getRolePlayer().getVarName();
                            Type childType = childVarTypeMap.get(childRolePlayer);
                            return childType == null || !checkTypesDisjoint(parentType, childType);
                        }).collect(toSet())
                        : childRoleRPMap.get(r);

                childRPs.forEach(rp -> compatibleMappings.put(prp, rp));
            });
        }
    });

    //self-consistent procedure until no non-empty mappings present
    while (compatibleMappings.asMap().values().stream().filter(s -> !s.isEmpty()).count() > 0) {
        Map.Entry<RelationPlayer, RelationPlayer> entry = compatibleMappings.entries().stream()
                //prioritise mappings with equivalent types and unambiguous mappings
                .sorted(Comparator.comparing(e -> {
                    Type parentType = parentVarTypeMap.get(e.getKey().getRolePlayer().getVarName());
                    Type childType = childVarTypeMap.get(e.getValue().getRolePlayer().getVarName());
                    return !(parentType != null && childType != null && parentType.equals(childType));
                }))
                //prioritise mappings with sam var substitution (idpredicates)
                .sorted(Comparator.comparing(e -> {
                    IdPredicate parentId = parentAtom.getIdPredicates().stream()
                            .filter(p -> p.getVarName().equals(e.getKey().getRolePlayer().getVarName()))
                            .findFirst().orElse(null);
                    IdPredicate childId = getIdPredicates().stream()
                            .filter(p -> p.getVarName().equals(e.getValue().getRolePlayer().getVarName()))
                            .findFirst().orElse(null);
                    return !(parentId != null && childId != null
                            && parentId.getPredicate().equals(childId.getPredicate()));
                })).sorted(Comparator.comparing(e -> compatibleMappings.get(e.getKey()).size())).findFirst()
                .orElse(null);

        RelationPlayer parentCasting = entry.getKey();
        RelationPlayer childCasting = entry.getValue();

        rolePlayerMappings.add(new Pair<>(childCasting, parentCasting));
        compatibleMappings.removeAll(parentCasting);
        compatibleMappings.values().remove(childCasting);

    }
    return rolePlayerMappings;
}

From source file:org.apache.crunch.impl.mr.plan.MSCRPlanner.java

public MRExecutor plan(Class<?> jarClass, Configuration conf) throws IOException {

    DotfileUtil dotfileUtil = new DotfileUtil(jarClass, conf);

    // Generate the debug lineage dotfiles (if configuration is enabled)
    dotfileUtil.buildLineageDotfile(outputs);

    Map<PCollectionImpl<?>, Set<Target>> targetDeps = Maps.newTreeMap(DEPTH_COMPARATOR);
    for (PCollectionImpl<?> pcollect : outputs.keySet()) {
        targetDeps.put(pcollect, pcollect.getTargetDependencies());
    }/*from w  w w  .j a  v  a  2 s .  co m*/

    Multimap<Target, JobPrototype> assignments = HashMultimap.create();

    while (!targetDeps.isEmpty()) {
        Set<Target> allTargets = Sets.newHashSet();
        for (PCollectionImpl<?> pcollect : targetDeps.keySet()) {
            allTargets.addAll(outputs.get(pcollect));
        }
        GraphBuilder graphBuilder = new GraphBuilder();

        // Walk the current plan tree and build a graph in which the vertices are
        // sources, targets, and GBK operations.
        Set<PCollectionImpl<?>> currentStage = Sets.newHashSet();
        for (PCollectionImpl<?> output : targetDeps.keySet()) {
            Set<Target> deps = Sets.intersection(allTargets, targetDeps.get(output));
            if (deps.isEmpty()) {
                graphBuilder.visitOutput(output);
                currentStage.add(output);
            }
        }

        Graph baseGraph = graphBuilder.getGraph();
        boolean hasInputs = false;
        for (Vertex v : baseGraph) {
            if (v.isInput()) {
                hasInputs = true;
                break;
            }
        }
        if (!hasInputs) {
            LOG.warn("No input sources for pipeline, nothing to do...");
            return new MRExecutor(conf, jarClass, outputs, toMaterialize, appendedTargets, pipelineCallables);
        }

        // Create a new graph that splits up up dependent GBK nodes.
        Graph graph = prepareFinalGraph(baseGraph);

        // Break the graph up into connected components.
        List<List<Vertex>> components = graph.connectedComponents();

        // Generate the debug graph dotfiles (if configuration is enabled)
        dotfileUtil.buildBaseGraphDotfile(outputs, graph);
        dotfileUtil.buildSplitGraphDotfile(outputs, graph, components);

        // For each component, we will create one or more job prototypes,
        // depending on its profile.
        // For dependency handling, we only need to care about which
        // job prototype a particular GBK is assigned to.
        Multimap<Vertex, JobPrototype> newAssignments = HashMultimap.create();
        for (List<Vertex> component : components) {
            newAssignments.putAll(constructJobPrototypes(component));
        }

        // Add in the job dependency information here.
        for (Map.Entry<Vertex, JobPrototype> e : newAssignments.entries()) {
            JobPrototype current = e.getValue();
            for (Vertex parent : graph.getParents(e.getKey())) {
                for (JobPrototype parentJobProto : newAssignments.get(parent)) {
                    current.addDependency(parentJobProto);
                }
            }
        }

        ImmutableMultimap<Target, JobPrototype> previousStages = ImmutableMultimap.copyOf(assignments);
        for (Map.Entry<Vertex, JobPrototype> e : newAssignments.entries()) {
            if (e.getKey().isOutput()) {
                PCollectionImpl<?> pcollect = e.getKey().getPCollection();
                JobPrototype current = e.getValue();

                // Add in implicit dependencies via SourceTargets that are read into memory
                for (Target pt : pcollect.getTargetDependencies()) {
                    for (JobPrototype parentJobProto : assignments.get(pt)) {
                        current.addDependency(parentJobProto);
                    }
                }

                // Add this to the set of output assignments
                for (Target t : outputs.get(pcollect)) {
                    assignments.put(t, e.getValue());
                }
            } else {
                Source source = e.getKey().getSource();
                if (source != null && source instanceof Target) {
                    JobPrototype current = e.getValue();
                    Collection<JobPrototype> parentJobPrototypes = previousStages.get((Target) source);
                    if (parentJobPrototypes != null) {
                        for (JobPrototype parentJobProto : parentJobPrototypes) {
                            current.addDependency(parentJobProto);
                        }
                    }
                }
            }
        }

        // Remove completed outputs and mark materialized output locations
        // for subsequent job processing.
        for (PCollectionImpl<?> output : currentStage) {
            if (toMaterialize.containsKey(output)) {
                MaterializableIterable mi = toMaterialize.get(output);
                if (mi.isSourceTarget()) {
                    output.materializeAt((SourceTarget) mi.getSource());
                }
            }
            targetDeps.remove(output);
        }
    }

    // Finally, construct the jobs from the prototypes and return.
    MRExecutor exec = new MRExecutor(conf, jarClass, outputs, toMaterialize, appendedTargets,
            pipelineCallables);

    // Generate the debug Plan dotfiles
    dotfileUtil.buildPlanDotfile(exec, assignments, pipeline, lastJobID);

    for (JobPrototype proto : Sets.newHashSet(assignments.values())) {
        exec.addJob(proto.getCrunchJob(jarClass, conf, pipeline, lastJobID));
    }

    // Generate the debug RTNode dotfiles (if configuration is enabled)
    dotfileUtil.buildRTNodesDotfile(exec);

    // Attach the dotfiles to the MRExcutor context
    dotfileUtil.addDotfilesToContext(exec);

    return exec;
}

From source file:org.cloudsmith.geppetto.validation.impl.ValidationServiceImpl.java

/**
 * TODO: Horribly long method that should be refactored into several to get better optimization.
 * //  w  w w . ja  v  a 2s. c  o  m
 * @param diagnostics
 * @param root
 * @param options
 * @param examinedFiles
 * @param monitor
 * @return
 */
private BuildResult validateDirectory(Diagnostic diagnostics, File root, ValidationOptions options,
        File[] examinedFiles, IProgressMonitor monitor) {

    if (!(options.getFileType() == FileType.PUPPET_ROOT || options.getFileType() == FileType.MODULE_ROOT))
        throw new IllegalArgumentException("doDir can only process PUPPET_ROOT or MODULE_ROOT");

    // Process request to check layout
    if (options.isCheckLayout()) {
        if (options.getFileType() == FileType.MODULE_ROOT)
            checkModuleLayout(diagnostics, root, root);
        else if (options.getFileType() == FileType.PUPPET_ROOT)
            checkPuppetRootLayout(diagnostics, root, root);
    }

    List<File> ppFiles = findPPFiles(root);
    List<File> rbFiles = findRubyFiles(root);
    Collection<File> mdRoots = forge.findModuleRoots(root, null);
    List<File> rakeFiles = findRakefiles(root);

    final int workload = ppFiles.size() + mdRoots.size() * 3 + rbFiles.size() * 2 //
            + rakeFiles.size() * 2 //
            + 1 // load pptp
            + 1 // "for the pot" (to make sure there is a final tick to report)
    ;

    final SubMonitor ticker = SubMonitor.convert(monitor, workload); // TODO: scaling

    PPDiagnosticsRunner ppRunner = new PPDiagnosticsRunner();
    RubyHelper rubyHelper = new RubyHelper();

    try {
        IValidationAdvisor.ComplianceLevel complianceLevel = options.getComplianceLevel();
        if (complianceLevel == null)
            complianceLevel = IValidationAdvisor.ComplianceLevel.PUPPET_2_7;
        IPotentialProblemsAdvisor problemsAdvisor = options.getProblemsAdvisor();
        if (problemsAdvisor == null)
            problemsAdvisor = new DefaultPotentialProblemsAdvisor();
        ppRunner.setUp(complianceLevel, problemsAdvisor);
        rubyHelper.setUp();
    } catch (Exception e) {
        addExceptionDiagnostic(diagnostics, "Internal Error: Exception while setting up diagnostics.", e);
        return new BuildResult(rubyHelper.isRubyServicesAvailable()); // give up
    }
    ppRunner.configureEncoding(options.getEncodingProvider());
    ppRunner.configureSearchPath(root, options.getSearchPath(), options.getEnvironment());

    // get the configured search path
    final PPSearchPath searchPath = ppRunner.getDefaultSearchPath();

    // Modulefile processing
    // Modulefiles must be processed first in order to figure out containers and container visibility.
    final IPath rootPath = new Path(root.getAbsolutePath());
    final IPath nodeRootPath = rootPath.append(NAME_OF_DIR_WITH_RESTRICTED_SCOPE);

    // collect info in a structure
    Multimap<ModuleName, MetadataInfo> moduleData = ArrayListMultimap.create();
    for (File mdRoot : mdRoots) {
        // load and remember all that loaded ok
        File[] mdProvider = new File[1];
        Metadata m;
        try {
            m = forge.createFromModuleDirectory(mdRoot, true, null, mdProvider, diagnostics);
        } catch (IOException e) {
            addFileError(diagnostics, mdProvider[0], mdRoot, "Can not parse file: " + e.getMessage(),
                    IValidationConstants.ISSUE__MODULEFILE_PARSE_ERROR);
            m = null;
        }
        if (m == null)
            worked(ticker, 1);
        else {
            File f = mdProvider[0];
            ModuleName moduleName = m.getName();
            if (options.isCheckModuleSemantics()
                    && isOnPath(pathToFile(f.getAbsolutePath(), root), searchPath)) {
                // remember the metadata and where it came from
                // and if it represents a NODE as opposed to a regular MODULE
                moduleData.put(moduleName,
                        new MetadataInfo(m, f, nodeRootPath.isPrefixOf(new Path(f.getAbsolutePath()))));
            }
            if (isValidationWanted(examinedFiles, f)) {
                validateModuleMetadata(m, diagnostics, f, root, options, ticker.newChild(1));
            } else
                worked(ticker, 1);
        }
    }

    if (options.isCheckModuleSemantics()) {
        for (ModuleName key : moduleData.keySet()) {
            // check there is only one version of each module
            Collection<MetadataInfo> versions = moduleData.get(key);
            boolean redeclared = versions.size() > 1;

            for (MetadataInfo info : versions) {
                // processed dependencies for one version of a modulefile (in case of errors, there may not be as many ticks as
                // originally requested)
                // this ticks before the fact (but there is "one for the pot" left at the end),
                // as this makes it easier to just do "continue" below.
                worked(ticker, 1);

                // skip checks for unwanted
                final boolean shouldDiagnosticBeReported = isValidationWanted(examinedFiles, info.getFile());
                // if(!) continue;

                if (redeclared && shouldDiagnosticBeReported) {
                    addFileError(diagnostics, info.getFile(), root,
                            "Redefinition - equally named already exists",
                            IValidationConstants.ISSUE__MODULEFILE_REDEFINITION);
                }
                // Resolve all dependencies
                for (Dependency d : info.getMetadata().getDependencies()) {

                    // check dependency name and version requirement
                    final ModuleName requiredName = d.getName();
                    if (requiredName == null) {
                        if (shouldDiagnosticBeReported)
                            addFileError(diagnostics, info.getFile(), root, "Dependency without name",
                                    IValidationConstants.ISSUE__MODULEFILE_DEPENDENCY_ERROR);
                        continue; // not meaningful to resolve this
                                  // dependency
                    }

                    // find the best candidate (ignore the fact that there should just be one version of each
                    // module - there may be several, and one of the match).
                    // It is allowed to have modules without versions, they can only be matched by
                    // a dependency that does not have a version requirement.
                    //
                    Collection<MetadataInfo> candidates = moduleData.get(requiredName);
                    List<Version> candidateVersions = Lists.newArrayList();
                    List<MetadataInfo> unversioned = Lists.newArrayList();
                    if (candidates != null)
                        for (MetadataInfo mi : candidates) {
                            Version cv = mi.getMetadata().getVersion();
                            if (cv == null) {
                                unversioned.add(mi);
                                continue; // the (possibly) broken version
                                          // is reported elsewhere
                            }
                            candidateVersions.add(cv);
                        }

                    // if the dependency has no version requirement use ">=0"
                    final VersionRange versionRequirement = d.getVersionRequirement();
                    if (versionRequirement == null) {
                        // find best match for >= 0 if there are candidates with versions
                        // the best will always win over unversioned.
                        if (candidateVersions.size() > 0) {
                            Collections.sort(candidateVersions);
                            Version best = candidateVersions.get(candidateVersions.size() - 1);

                            // get the matched MetaDataInfo as the resolution of the dependency
                            // and remember it
                            for (MetadataInfo mi : candidates) {
                                if (mi.getMetadata().getVersion().equals(best))
                                    info.addResolvedDependency(d, mi);
                            }

                        }
                        // or there must be unversioned candidates
                        else if (unversioned.size() == 0)
                            if (shouldDiagnosticBeReported)
                                addFileDiagnostic(diagnostics,
                                        (candidates.size() > 0 ? Diagnostic.WARNING : Diagnostic.ERROR),
                                        info.getFile(), root,
                                        "Unresolved Dependency to: " + d.getName() + " (unversioned).",
                                        IValidationConstants.ISSUE__MODULEFILE_UNSATISFIED_DEPENDENCY);
                            else {
                                // pick the first as resolution
                                // worry about ambiguity elsewhere
                                info.addResolvedDependency(d, unversioned.get(0));
                            }
                    } else {
                        // there was a version requirement, it must match something with a version.
                        Version best = d.getVersionRequirement().findBestMatch(candidateVersions);
                        if (best == null) {
                            info.addUnresolvedDependency(d);
                            if (shouldDiagnosticBeReported)
                                addFileDiagnostic(diagnostics,
                                        (candidates.size() > 0 ? Diagnostic.WARNING : Diagnostic.ERROR),
                                        info.getFile(), root,
                                        "Unresolved Dependency to: " + d.getName() + " version: "
                                                + d.getVersionRequirement(),
                                        IValidationConstants.ISSUE__MODULEFILE_UNSATISFIED_DEPENDENCY);
                        } else {
                            // get the matched MetaDataInfo as the resolution of the dependency
                            // and remember it
                            for (MetadataInfo mi : candidates) {
                                if (mi.getMetadata().getVersion().equals(best))
                                    info.addResolvedDependency(d, mi);
                            }
                        }
                    }
                }
            }
        }
        IPotentialProblemsAdvisor advisor = options.getProblemsAdvisor();
        if (advisor != null && advisor.circularDependencyPreference().isWarningOrError()) {
            ValidationPreference preference = options.getProblemsAdvisor().circularDependencyPreference();
            checkCircularDependencies(moduleData, diagnostics, root);
            for (MetadataInfo mi : moduleData.values()) {
                if (isValidationWanted(examinedFiles, mi.getFile())) {
                    for (String circularity : mi.getCircularityMessages())
                        addFileDiagnostic(diagnostics,
                                preference.isError() ? Diagnostic.ERROR : Diagnostic.WARNING, mi.getFile(),
                                root, circularity, IPPDiagnostics.ISSUE__CIRCULAR_MODULE_DEPENDENCY);
                }
            }
        }
    }
    // TODO: Wasteful to calculate the URL's more than once.
    // Could be done once per pp and rb (to separate the processing), or have all in one pile
    // and let processing look at extension.

    // Calculate containers
    // sets up iterateable over all files including pptp

    boolean useContainers = true;
    URI uri = options.getPlatformURI();
    if (useContainers) {
        List<URI> pptpURIs = Lists.newArrayList(uri != null ? uri : PuppetTarget.getDefault().getPlatformURI());
        ppRunner.configureContainers(root, moduleData.values(), //
                Iterables.concat(
                        Iterables.transform(Iterables.concat(ppFiles, rbFiles), new Function<File, URI>() {
                            @Override
                            public URI apply(File from) {
                                return URI.createFileURI(from.getPath());
                            }
                        }), pptpURIs));
    }
    // Load pptp
    if (options.isCheckReferences()) {
        try {
            URI platformURI = options.getPlatformURI();
            ppRunner.loadResource(
                    platformURI != null ? platformURI : PuppetTarget.getDefault().getPlatformURI());
        } catch (IOException e) {
            addExceptionDiagnostic(diagnostics, "Internal Error: Could not load pptp.", e);
            return new BuildResult(rubyHelper.isRubyServicesAvailable()); // give
            // up
        }
    }
    worked(ticker, 1);

    // Load all ruby
    for (File f : rbFiles) {
        try {
            // Skip "Rakefile.rb" or they will be processed twice (but still tick x2
            // onece for validate and once for load - as this is included in work-count)
            if (f.getName().toLowerCase().equals("rakefile.rb")) {
                worked(ticker, 2);
                continue;
            }
            // Syntax check ruby file
            // consumes one rb tick
            if (isValidationWanted(examinedFiles, f))
                validateRubyFile(rubyHelper, diagnostics, f, root, ticker.newChild(1));
            else
                worked(ticker, 1);

            // Load ruby file with pptp contribution
            // consumes one rb tick
            if (options.isCheckReferences()) {
                Resource r = ppRunner.loadResource(new FileInputStream(f), URI.createFileURI(f.getPath()));
                if (r != null)
                    rememberRootInResource(root, r);
            }
            worked(ticker, 1);
        } catch (Exception e) {
            addExceptionDiagnostic(diagnostics,
                    "Internal Error: Exception while processing file: " + f.getName() + ": " + e, e);
            e.printStackTrace();
        }
    }
    RakefileInfo rakefileInfo = new RakefileInfo();
    // System.err.println("Processing Rakefiles count: " + rakeFiles.size());

    for (File f : rakeFiles) {
        // Syntax check ruby file
        // consumes one rakefile tick
        if (isValidationWanted(examinedFiles, f))
            validateRubyFile(rubyHelper, diagnostics, f, root, ticker.newChild(1));
        else
            worked(ticker, 1);

        // parsing adds one rakefile work tick
        rakefileInfo.addRakefile(getRakefileInformation(rubyHelper, f, root, ticker.newChild(1)));

    }
    // Load all pp
    // crosslink and validate all
    Map<File, Resource> ppResources = Maps.newHashMapWithExpectedSize(ppFiles.size());
    for (File f : ppFiles) {
        try {
            ppResources.put(f, ppRunner.loadResource(new FileInputStream(f), URI.createFileURI(f.getPath())));
        } catch (IOException e) {
            addExceptionDiagnostic(diagnostics, "I/O Error: Exception while processing file: " + f.toString(),
                    e);
        } catch (Exception e) {
            addExceptionDiagnostic(diagnostics,
                    "Internal Error: Exception while processing file: " + f.toString(), e);
        }
        // consume one pp tick
        worked(ticker, 1);
    }

    // Must set the root in all resources to allow cross reference error reports to contain
    // relative paths
    for (Resource r : ppResources.values())
        rememberRootInResource(root, r);

    IResourceValidator validator = ppRunner.getPPResourceValidator();
    long maxLinkTime = 0;
    // Turn on for debugging particular files
    // File slowCandidate = new File("/Users/henrik/gitrepos/forge-modules/jeffmccune-mockbuild/manifests/init.pp");

    for (Entry<File, Resource> r : ppResources.entrySet()) {
        File f = r.getKey();
        if (!isValidationWanted(examinedFiles, f))
            continue;
        long beforeTime = System.currentTimeMillis();
        boolean profileThis = false; // /* for debugging slow file */
        // f.equals(slowCandidate);
        if (options.isCheckReferences())
            ppRunner.resolveCrossReferences(r.getValue(), profileThis, ticker);
        long afterTime = System.currentTimeMillis();
        if (afterTime - beforeTime > maxLinkTime) {
            maxLinkTime = afterTime - beforeTime;
        }
        final CancelIndicator cancelMonitor = new CancelIndicator() {
            public boolean isCanceled() {
                return ticker.isCanceled();
            }
        };

        List<Issue> issues = validator.validate(r.getValue(), CheckMode.ALL, cancelMonitor);
        for (Issue issue : issues) {
            addIssueDiagnostic(diagnostics, issue, f, root);
        }
    }
    // // Debug stuff
    // if(slowestFile != null)
    // System.err.printf("Slowest file =%s (%s)\n", slowestFile.getAbsolutePath(), maxLinkTime);

    // // Compute the returned map
    // // Only the restricted modules are wanted (as everything else sees everything)
    // Iterable<File> filteredMdFiles = Iterables.filter(mdFiles, new Predicate<File>() {
    //
    // @Override
    // public boolean apply(File input) {
    // IPath p = new Path(input.getPath());
    // if(p.segmentCount() < 3)
    // return false;
    // p = p.removeLastSegments(2);
    // return NAME_OF_DIR_WITH_RESTRICTED_SCOPE.equals(p.lastSegment());
    // }
    // });
    AllModuleReferences all = ppRunner.getAllModulesState();

    // set the root to allow relative lookup of module exports
    all.setRoot(root);

    // // Debug stuff...
    // for(File f : result.getMap().keySet()) {
    // System.err.println("Exports for file: " + f.toString());
    // for(ExportsPerModule.Export export : result.getMap().get(f)) {
    // System.err.printf(
    // "    %s, %s, %s\n", export.getName(), export.getEClass().getName(),
    // export.getParentName());
    // }
    // }
    ppRunner.tearDown();
    boolean rubyServicesAvailable = rubyHelper.isRubyServicesAvailable();
    rubyHelper.tearDown();
    // make sure everything is consumed
    ticker.setWorkRemaining(0);
    BuildResult buildResult = new BuildResult(rubyServicesAvailable);
    // buildResult.setExportsForNodes(result);
    buildResult.setAllModuleReferences(all);
    buildResult.setModuleData(moduleData);
    buildResult.setRakefileInfo(rakefileInfo);
    return buildResult;
}