Example usage for com.google.common.collect Sets union

List of usage examples for com.google.common.collect Sets union

Introduction

In this page you can find the example usage for com.google.common.collect Sets union.

Prototype

public static <E> SetView<E> union(final Set<? extends E> set1, final Set<? extends E> set2) 

Source Link

Document

Returns an unmodifiable view of the union of two sets.

Usage

From source file:co.cask.cdap.etl.batch.mapreduce.ETLMapReduce.java

@Override
public void beforeSubmit(MapReduceContext context) throws Exception {
    if (Boolean.valueOf(context.getSpecification().getProperty(Constants.STAGE_LOGGING_ENABLED))) {
        LogStageInjector.start();/*from   w w w .j  a  v  a  2 s  .c  o m*/
    }
    CompositeFinisher.Builder finishers = CompositeFinisher.builder();

    Job job = context.getHadoopJob();
    Configuration hConf = job.getConfiguration();

    // plugin name -> runtime args for that plugin
    Map<String, Map<String, String>> runtimeArgs = new HashMap<>();

    Map<String, String> properties = context.getSpecification().getProperties();
    BatchPhaseSpec phaseSpec = GSON.fromJson(properties.get(Constants.PIPELINEID), BatchPhaseSpec.class);
    PipelinePhase phase = phaseSpec.getPhase();
    PipelinePluginInstantiator pluginInstantiator = new PipelinePluginInstantiator(context, phaseSpec);

    // we checked at configure time that there is exactly one source
    String sourceName = phaseSpec.getPhase().getSources().iterator().next();

    BatchConfigurable<BatchSourceContext> batchSource = pluginInstantiator.newPluginInstance(sourceName);
    batchSource = new LoggedBatchConfigurable<>(sourceName, batchSource);
    BatchSourceContext sourceContext = new MapReduceSourceContext(context, mrMetrics,
            new DatasetContextLookupProvider(context), sourceName, context.getRuntimeArguments());
    batchSource.prepareRun(sourceContext);
    runtimeArgs.put(sourceName, sourceContext.getRuntimeArguments());
    finishers.add(batchSource, sourceContext);

    Map<String, SinkOutput> sinkOutputs = new HashMap<>();

    for (StageInfo stageInfo : Sets.union(phase.getStagesOfType(Constants.CONNECTOR_TYPE),
            phase.getStagesOfType(BatchSink.PLUGIN_TYPE))) {
        String sinkName = stageInfo.getName();
        // todo: add a better way to get info for all sinks
        if (!phase.getSinks().contains(sinkName)) {
            continue;
        }

        BatchConfigurable<BatchSinkContext> batchSink = pluginInstantiator.newPluginInstance(sinkName);
        batchSink = new LoggedBatchConfigurable<>(sinkName, batchSink);
        MapReduceSinkContext sinkContext = new MapReduceSinkContext(context, mrMetrics,
                new DatasetContextLookupProvider(context), sinkName, context.getRuntimeArguments());
        batchSink.prepareRun(sinkContext);
        runtimeArgs.put(sinkName, sinkContext.getRuntimeArguments());
        finishers.add(batchSink, sinkContext);

        sinkOutputs.put(sinkName,
                new SinkOutput(sinkContext.getOutputNames(), stageInfo.getErrorDatasetName()));
    }
    finisher = finishers.build();
    hConf.set(SINK_OUTPUTS_KEY, GSON.toJson(sinkOutputs));

    // setup time partition for each error dataset
    for (StageInfo stageInfo : Sets.union(phase.getStagesOfType(Transform.PLUGIN_TYPE),
            phase.getStagesOfType(BatchSink.PLUGIN_TYPE))) {
        if (stageInfo.getErrorDatasetName() != null) {
            Map<String, String> args = new HashMap<>();
            args.put(FileSetProperties.OUTPUT_PROPERTIES_PREFIX + "avro.schema.output.key",
                    Constants.ERROR_SCHEMA.toString());
            TimePartitionedFileSetArguments.setOutputPartitionTime(args, context.getLogicalStartTime());
            context.addOutput(Output.ofDataset(stageInfo.getErrorDatasetName(), args));
        }
    }

    job.setMapperClass(ETLMapper.class);
    Set<StageInfo> aggregators = phaseSpec.getPhase().getStagesOfType(BatchAggregator.PLUGIN_TYPE);
    if (!aggregators.isEmpty()) {
        job.setReducerClass(ETLReducer.class);
        String aggregatorName = aggregators.iterator().next().getName();
        BatchAggregator aggregator = pluginInstantiator.newPluginInstance(aggregatorName);
        MapReduceAggregatorContext aggregatorContext = new MapReduceAggregatorContext(context, mrMetrics,
                new DatasetContextLookupProvider(context), aggregatorName, context.getRuntimeArguments());
        aggregator.prepareRun(aggregatorContext);
        finishers.add(aggregator, aggregatorContext);

        if (aggregatorContext.getNumPartitions() != null) {
            job.setNumReduceTasks(aggregatorContext.getNumPartitions());
        }
        // if the plugin sets the output key and value class directly, trust them
        Class<?> outputKeyClass = aggregatorContext.getGroupKeyClass();
        Class<?> outputValClass = aggregatorContext.getGroupValueClass();
        // otherwise, derive it from the plugin's parameters
        if (outputKeyClass == null) {
            outputKeyClass = TypeChecker.getGroupKeyClass(aggregator);
        }
        if (outputValClass == null) {
            outputValClass = TypeChecker.getGroupValueClass(aggregator);
        }
        hConf.set(GROUP_KEY_CLASS, outputKeyClass.getName());
        hConf.set(GROUP_VAL_CLASS, outputValClass.getName());
        // in case the classes are not a WritableComparable, but is some common type we support
        // for example, a String or a StructuredRecord
        WritableConversion writableConversion = WritableConversions.getConversion(outputKeyClass.getName());
        // if the conversion is null, it means the user is using their own object.
        if (writableConversion != null) {
            outputKeyClass = writableConversion.getWritableClass();
        }
        writableConversion = WritableConversions.getConversion(outputValClass.getName());
        if (writableConversion != null) {
            outputValClass = writableConversion.getWritableClass();
        }
        // check classes here instead of letting mapreduce do it, since mapreduce throws a cryptic error
        if (!WritableComparable.class.isAssignableFrom(outputKeyClass)) {
            throw new IllegalArgumentException(String.format(
                    "Invalid aggregator %s. The group key class %s must implement Hadoop's WritableComparable.",
                    aggregatorName, outputKeyClass));
        }
        if (!Writable.class.isAssignableFrom(outputValClass)) {
            throw new IllegalArgumentException(String.format(
                    "Invalid aggregator %s. The group value class %s must implement Hadoop's Writable.",
                    aggregatorName, outputValClass));
        }

        job.setMapOutputKeyClass(outputKeyClass);
        job.setMapOutputValueClass(outputValClass);
    } else {
        job.setNumReduceTasks(0);
    }

    hConf.set(RUNTIME_ARGS_KEY, GSON.toJson(runtimeArgs));
}

From source file:org.sosy_lab.cpachecker.cpa.policyiteration.PolicyReducer.java

/**
 * Update the meta-information for policies coming from the summary edge.
 *
 * @param inputPath Path formula, which contains {@link SSAMap} equivalent to the summary
 *                  application, and the summary encoded as a formula.
 * @param pParent Previous abstract state, associated with the function call before the
 *                function application./* ww w  .j  a v a 2s  .c o  m*/
 * @param summaryAbstraction Abstraction associated with the summary state.
 * @param summarySSA {@link SSAMap} associated with the summary.
 */
private Map<Template, PolicyBound> updateAbstractionForExpanded(PathFormula inputPath,
        PolicyAbstractedState pParent, Map<Template, PolicyBound> summaryAbstraction, SSAMap summarySSA) {

    ImmutableMap.Builder<Template, PolicyBound> newAbstraction = ImmutableMap.builder();
    Set<Template> allTemplates = Sets.union(pParent.getAbstraction().keySet(), summaryAbstraction.keySet());
    for (Template template : allTemplates) {
        PolicyBound pBound = summaryAbstraction.get(template);
        PolicyBound insertedBound = null;
        if (pBound != null) {

            // If bound for this template is present in the summary, add it after the abstraction.
            BooleanFormula policyFormula = stateFormulaConversionManager.templateToConstraint(template, pBound,
                    pfmgr, fmgr, inputPath);
            PathFormula policy = inputPath.updateFormula(policyFormula);
            insertedBound = PolicyBound.of(policy, pBound.getBound(), pParent,

                    // TODO: filter the set of dependent templates, at least
                    pParent.getAbstraction().keySet());
        } else if (template.getUsedVars().allMatch(v -> !(summarySSA.getIndex(v) > STARTING_SSA_IDX))) {

            // Otherwise, use the bound from the parent state.
            insertedBound = pParent.getBound(template).get();
        }

        if (insertedBound != null) {
            newAbstraction.put(template, insertedBound);
        }
    }

    return newAbstraction.build();
}

From source file:org.immutables.value.processor.meta.ValueAttribute.java

private List<CharSequence> extractAnnotationsForElement(ElementType elementType,
        Set<String> additionalAnnotations) {
    List<CharSequence> allAnnotations = Lists.newArrayListWithCapacity(1);

    boolean dontHaveJsonPropetyAnnotationAlready = Annotations.getAnnotationLines(element,
            Collections.singleton(JsonPropertyMirror.qualifiedName()), false, elementType).isEmpty();

    if (dontHaveJsonPropetyAnnotationAlready) {
        allAnnotations.add("@" + JsonPropertyMirror.qualifiedName());
    }/*from   w  w w. j a  va2  s.c o m*/

    allAnnotations.addAll(Annotations.getAnnotationLines(element,
            Sets.union(additionalAnnotations,
                    containingType.constitution.protoclass().styles().style().additionalJsonAnnotationsNames()),
            true, elementType));

    return allAnnotations;
}

From source file:org.geogit.api.plumbing.diff.DiffCounter.java

/**
 * Counts the number of differences between two trees that contain {@link RevTree#buckets()
 * buckets} instead of direct {@link RevTree#children() children}
 *//*from ww w .java  2s  .co  m*/
private DiffObjectCount countBucketDiffs(ImmutableSortedMap<Integer, Bucket> leftBuckets,
        ImmutableSortedMap<Integer, Bucket> rightBuckets) {

    DiffObjectCount count = new DiffObjectCount();
    final Set<Integer> bucketIds = Sets.union(leftBuckets.keySet(), rightBuckets.keySet());

    ObjectId leftTreeId;
    ObjectId rightTreeId;

    for (Integer bucketId : bucketIds) {
        @Nullable
        Bucket leftBucket = leftBuckets.get(bucketId);
        @Nullable
        Bucket rightBucket = rightBuckets.get(bucketId);

        leftTreeId = leftBucket == null ? null : leftBucket.id();
        rightTreeId = rightBucket == null ? null : rightBucket.id();

        if (leftTreeId == null || rightTreeId == null) {
            count.add(sizeOfTree(leftTreeId == null ? rightTreeId : leftTreeId));
        } else {
            count.add(countDiffs(leftTreeId, rightTreeId));
        }
    }
    return count;
}

From source file:com.google.cloud.dataflow.sdk.util.ApiSurface.java

/**
 * Returns a path from an exposed class to a root class. There may be many, but this
 * gives only one. It will not return a path that crosses the excluded classes.
 *
 * <p>If there are only cycles or paths through the excluded classes, returns null.
 *
 * <p>If the class is not actually in the exposure map, throws IllegalArgumentException
 *///from  w w  w  .j a va2  s . com
private List<Class<?>> getAnyExposurePath(Class<?> exposedClass, Set<Class<?>> excluded) {
    List<Class<?>> exposurePath = Lists.newArrayList();
    exposurePath.add(exposedClass);

    Collection<Class<?>> exposers = getExposedToExposers().get(exposedClass);
    if (exposers.isEmpty()) {
        throw new IllegalArgumentException("Class " + exposedClass + " is not exposed.");
    }

    for (Class<?> exposer : exposers) {
        if (excluded.contains(exposer)) {
            continue;
        }

        // A null exposer means this is already a root class.
        if (exposer == null) {
            return exposurePath;
        }

        List<Class<?>> restOfPath = getAnyExposurePath(exposer, Sets.union(excluded, Sets.newHashSet(exposer)));

        if (restOfPath != null) {
            exposurePath.addAll(restOfPath);
            return exposurePath;
        }
    }
    return null;
}

From source file:org.sosy_lab.cpachecker.cfa.ast.c.FileLocationCollectingVisitor.java

@Override
public Set<FileLocation> visit(CReturnStatement pNode) throws RuntimeException {
    Set<FileLocation> result = Collections.singleton(pNode.getFileLocation());
    if (pNode.getReturnValue().isPresent()) {
        result = Sets.union(result, pNode.getReturnValue().get().accept(this));
    }//from ww  w  . j  a v  a  2s .  com
    return result;
}

From source file:ai.grakn.graql.internal.reasoner.atom.Atom.java

/**
 * @return set of constraints of this atom (predicates + types) that are not selectable
 *///from  w w  w  .  ja v a  2  s.  c o m
public Set<Atomic> getNonSelectableConstraints() {
    Set<Atom> types = getTypeConstraints().stream().filter(at -> !at.isSelectable())
            .collect(Collectors.toSet());
    return Sets.union(types, getPredicates());
}

From source file:com.cloudant.sync.query.QueryExecutor.java

protected Set<String> executeQueryTree(QueryNode node, SQLDatabase db) {
    if (node instanceof AndQueryNode) {
        Set<String> accumulator = null;

        AndQueryNode andNode = (AndQueryNode) node;
        for (QueryNode qNode : andNode.children) {
            Set<String> childIds = executeQueryTree(qNode, db);
            if (childIds == null) {
                continue;
            }//w  ww. ja  v a 2  s  .  c  o m
            if (accumulator == null) {
                accumulator = new HashSet<String>(childIds);
            } else {
                accumulator = Sets.intersection(accumulator, childIds);
            }
        }

        return accumulator;
    }
    if (node instanceof OrQueryNode) {
        Set<String> accumulator = null;

        OrQueryNode orNode = (OrQueryNode) node;
        for (QueryNode qNode : orNode.children) {
            Set<String> childIds = executeQueryTree(qNode, db);
            if (childIds == null) {
                continue;
            }
            if (accumulator == null) {
                accumulator = new HashSet<String>(childIds);
            } else {
                accumulator = Sets.union(accumulator, childIds);
            }
        }

        return accumulator;
    } else if (node instanceof SqlQueryNode) {
        SqlQueryNode sqlNode = (SqlQueryNode) node;
        List<String> docIds;
        if (sqlNode.sql != null) {
            docIds = new ArrayList<String>();
            SqlParts sqlParts = sqlNode.sql;
            Cursor cursor = null;
            try {
                cursor = db.rawQuery(sqlParts.sqlWithPlaceHolders, sqlParts.placeHolderValues);
                while (cursor.moveToNext()) {
                    String docId = cursor.getString(0);
                    docIds.add(docId);
                }
            } catch (SQLException e) {
                logger.log(Level.SEVERE, "Failed to get a list of doc ids.", e);
            } finally {
                DatabaseUtils.closeCursorQuietly(cursor);
            }
        } else {
            // No SQL exists so we are now forced to go directly to the
            // document datastore to retrieve the list of document ids.
            docIds = datastore.getAllDocumentIds();
        }

        return new HashSet<String>(docIds);
    } else {
        return null;
    }
}

From source file:com.google.devtools.build.lib.skyframe.GlobFunction.java

@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws GlobFunctionException, InterruptedException {
    GlobDescriptor glob = (GlobDescriptor) skyKey.argument();

    // Note that the glob's package is assumed to exist which implies that the package's BUILD file
    // exists which implies that the package's directory exists.
    PathFragment globSubdir = glob.getSubdir();
    if (!globSubdir.equals(PathFragment.EMPTY_FRAGMENT)) {
        PackageLookupValue globSubdirPkgLookupValue = (PackageLookupValue) env
                .getValue(PackageLookupValue.key(PackageIdentifier.create(glob.getPackageId().getRepository(),
                        glob.getPackageId().getPackageFragment().getRelative(globSubdir))));
        if (globSubdirPkgLookupValue == null) {
            return null;
        }/*from   w  w w. j a v  a 2s.c om*/
        if (globSubdirPkgLookupValue.packageExists()) {
            // We crossed the package boundary, that is, pkg/subdir contains a BUILD file and thus
            // defines another package, so glob expansion should not descend into that subdir.
            return GlobValue.EMPTY;
        }
    }

    String pattern = glob.getPattern();
    // Split off the first path component of the pattern.
    int slashPos = pattern.indexOf('/');
    String patternHead;
    String patternTail;
    if (slashPos == -1) {
        patternHead = pattern;
        patternTail = null;
    } else {
        // Substrings will share the backing array of the original glob string. That should be fine.
        patternHead = pattern.substring(0, slashPos);
        patternTail = pattern.substring(slashPos + 1);
    }

    NestedSetBuilder<PathFragment> matches = NestedSetBuilder.stableOrder();

    boolean globMatchesBareFile = patternTail == null;

    // "**" also matches an empty segment, so try the case where it is not present.
    if ("**".equals(patternHead)) {
        if (globMatchesBareFile) {
            // Recursive globs aren't supposed to match the package's directory.
            if (!glob.excludeDirs() && !globSubdir.equals(PathFragment.EMPTY_FRAGMENT)) {
                matches.add(globSubdir);
            }
        } else {
            SkyKey globKey = GlobValue.internalKey(glob.getPackageId(), glob.getPackageRoot(), globSubdir,
                    patternTail, glob.excludeDirs());
            GlobValue globValue = (GlobValue) env.getValue(globKey);
            if (globValue == null) {
                return null;
            }
            matches.addTransitive(globValue.getMatches());
        }
    }

    PathFragment dirPathFragment = glob.getPackageId().getPackageFragment().getRelative(globSubdir);
    RootedPath dirRootedPath = RootedPath.toRootedPath(glob.getPackageRoot(), dirPathFragment);
    if (alwaysUseDirListing || containsGlobs(patternHead)) {
        String subdirPattern = "**".equals(patternHead) ? glob.getPattern() : patternTail;
        // Pattern contains globs, so a directory listing is required.
        //
        // Note that we have good reason to believe the directory exists: if this is the
        // top-level directory of the package, the package's existence implies the directory's
        // existence; if this is a lower-level directory in the package, then we got here from
        // previous directory listings. Filesystem operations concurrent with build could mean the
        // directory no longer exists, but DirectoryListingFunction handles that gracefully.
        DirectoryListingValue listingValue = (DirectoryListingValue) env
                .getValue(DirectoryListingValue.key(dirRootedPath));
        if (listingValue == null) {
            return null;
        }

        // In order to batch Skyframe requests, we do three passes over the directory:
        // (1) Process every dirent, keeping track of values we need to request if the dirent cannot
        //     be processed with current information (symlink targets and subdirectory globs/package
        //     lookups for some subdirectories).
        // (2) Get those values and process the symlinks, keeping track of subdirectory globs/package
        //     lookups we may need to request in case the symlink's target is a directory.
        // (3) Process the necessary subdirectories.
        int direntsSize = listingValue.getDirents().size();
        Map<SkyKey, Dirent> symlinkFileMap = Maps.newHashMapWithExpectedSize(direntsSize);
        Map<SkyKey, Dirent> subdirMap = Maps.newHashMapWithExpectedSize(direntsSize);
        Map<Dirent, Object> sortedResultMap = Maps.newTreeMap();
        // First pass: do normal files and collect SkyKeys to request for subdirectories and symlinks.
        for (Dirent dirent : listingValue.getDirents()) {
            Type direntType = dirent.getType();
            String fileName = dirent.getName();
            if (!UnixGlob.matches(patternHead, fileName, regexPatternCache)) {
                continue;
            }

            if (direntType == Dirent.Type.SYMLINK) {
                // TODO(bazel-team): Consider extracting the symlink resolution logic.
                // For symlinks, look up the corresponding FileValue. This ensures that if the symlink
                // changes and "switches types" (say, from a file to a directory), this value will be
                // invalidated. We also need the target's type to properly process the symlink.
                symlinkFileMap.put(FileValue.key(
                        RootedPath.toRootedPath(glob.getPackageRoot(), dirPathFragment.getRelative(fileName))),
                        dirent);
                continue;
            }

            if (direntType == Dirent.Type.DIRECTORY) {
                SkyKey keyToRequest = getSkyKeyForSubdir(fileName, glob, subdirPattern);
                if (keyToRequest != null) {
                    subdirMap.put(keyToRequest, dirent);
                }
            } else if (globMatchesBareFile) {
                sortedResultMap.put(dirent, glob.getSubdir().getRelative(fileName));
            }
        }

        Map<SkyKey, SkyValue> subdirAndSymlinksResult = env
                .getValues(Sets.union(subdirMap.keySet(), symlinkFileMap.keySet()));
        if (env.valuesMissing()) {
            return null;
        }
        Map<SkyKey, Dirent> symlinkSubdirMap = Maps.newHashMapWithExpectedSize(symlinkFileMap.size());
        // Second pass: process the symlinks and subdirectories from the first pass, and maybe
        // collect further SkyKeys if fully resolved symlink targets are themselves directories.
        // Also process any known directories.
        for (Map.Entry<SkyKey, SkyValue> lookedUpKeyAndValue : subdirAndSymlinksResult.entrySet()) {
            if (symlinkFileMap.containsKey(lookedUpKeyAndValue.getKey())) {
                FileValue symlinkFileValue = (FileValue) lookedUpKeyAndValue.getValue();
                if (!symlinkFileValue.isSymlink()) {
                    throw new GlobFunctionException(
                            new InconsistentFilesystemException("readdir and stat disagree about whether "
                                    + ((RootedPath) lookedUpKeyAndValue.getKey().argument()).asPath()
                                    + " is a symlink."),
                            Transience.TRANSIENT);
                }
                if (!symlinkFileValue.exists()) {
                    continue;
                }
                Dirent dirent = symlinkFileMap.get(lookedUpKeyAndValue.getKey());
                String fileName = dirent.getName();
                if (symlinkFileValue.isDirectory()) {
                    SkyKey keyToRequest = getSkyKeyForSubdir(fileName, glob, subdirPattern);
                    if (keyToRequest != null) {
                        symlinkSubdirMap.put(keyToRequest, dirent);
                    }
                } else if (globMatchesBareFile) {
                    sortedResultMap.put(dirent, glob.getSubdir().getRelative(fileName));
                }
            } else {
                processSubdir(lookedUpKeyAndValue, subdirMap, glob, sortedResultMap);
            }
        }

        Map<SkyKey, SkyValue> symlinkSubdirResult = env.getValues(symlinkSubdirMap.keySet());
        if (env.valuesMissing()) {
            return null;
        }
        // Third pass: do needed subdirectories of symlinked directories discovered during the second
        // pass.
        for (Map.Entry<SkyKey, SkyValue> lookedUpKeyAndValue : symlinkSubdirResult.entrySet()) {
            processSubdir(lookedUpKeyAndValue, symlinkSubdirMap, glob, sortedResultMap);
        }
        for (Map.Entry<Dirent, Object> fileMatches : sortedResultMap.entrySet()) {
            addToMatches(fileMatches.getValue(), matches);
        }
    } else {
        // Pattern does not contain globs, so a direct stat is enough.
        String fileName = patternHead;
        RootedPath fileRootedPath = RootedPath.toRootedPath(glob.getPackageRoot(),
                dirPathFragment.getRelative(fileName));
        FileValue fileValue = (FileValue) env.getValue(FileValue.key(fileRootedPath));
        if (fileValue == null) {
            return null;
        }
        if (fileValue.exists()) {
            if (fileValue.isDirectory()) {
                SkyKey keyToRequest = getSkyKeyForSubdir(fileName, glob, patternTail);
                if (keyToRequest != null) {
                    SkyValue valueRequested = env.getValue(keyToRequest);
                    if (env.valuesMissing()) {
                        return null;
                    }
                    Object fileMatches = getSubdirMatchesFromSkyValue(fileName, glob, valueRequested);
                    if (fileMatches != null) {
                        addToMatches(fileMatches, matches);
                    }
                }
            } else if (globMatchesBareFile) {
                matches.add(glob.getSubdir().getRelative(fileName));
            }
        }
    }

    Preconditions.checkState(!env.valuesMissing(), skyKey);

    NestedSet<PathFragment> matchesBuilt = matches.build();
    // Use the same value to represent that we did not match anything.
    if (matchesBuilt.isEmpty()) {
        return GlobValue.EMPTY;
    }
    return new GlobValue(matchesBuilt);
}

From source file:com.isotrol.impe3.core.component.ComponentDefinition.java

private ComponentDefinition(Class<T> type) {
    super(type);/*from  w w w  .j  a  v a  2  s  .com*/
    final List<Method> methods = getComponentMethods();
    this.directInjectors = DirectInjectors.direct(type, methods);
    this.configuration = loadConfiguration();
    this.bindingErrorsInjectors = DirectInjectors.bindingErrors(type, methods);
    this.headerInjectors = ParameterInjectors.headers(type, methods);
    this.cookieInjectors = ParameterInjectors.cookies(type, methods);
    this.requestInjectors = ParameterInjectors.request(type, methods);
    this.sessionInjectors = ParameterInjectors.session(type, methods);
    this.localInjectors = ParameterInjectors.local(type, methods);
    this.propertyInjectors = PropertyInjectors.of(type, methods);
    this.baseInjectors = BaseInjectors.of(type, methods);
    this.renderers = Renderers.of(type, methods);
    this.extractors = DirectExtractors.of(type, methods);
    this.localExtractors = LocalExtractors.of(type, methods);
    this.sessionExtractors = SessionExtractors.of(type, methods);
    this.queryExtractors = QueryExtractors.of(type, methods);
    this.headerExtractors = HeaderExtractors.of(type, methods);
    this.actionExtractors = ActionExtractors.of(type, methods);
    this.cookieExtractors = CookieExtractors.of(type, methods);
    this.paginated = type.isAnnotationPresent(Paginated.class);
    final RequiresLink links = type.getAnnotation(RequiresLink.class);
    if (links == null) {
        this.requiredLinks = ImmutableSet.of();
    } else {
        this.requiredLinks = ImmutableSet.copyOf(links.value());
    }
    this.providedLinks = ImmutableSet.copyOf(Sets.union(this.requiredLinks, extractors.typeSet()));
    this.componentType = getComponentType(!renderers.isEmpty());
    // Cache parameters
    this.cacheMode = type.isAnnotationPresent(ComponentCacheMode.class)
            ? type.getAnnotation(ComponentCacheMode.class).value()
            : CacheMode.ON;
    this.cacheScope = type.isAnnotationPresent(ComponentCacheScope.class)
            ? type.getAnnotation(ComponentCacheScope.class).value()
            : CacheScope.PUBLIC;
    if (type.isAnnotationPresent(ComponentETag.class)) {
        this.eTagMode = type.getAnnotation(ComponentETag.class).value();
    } else {
        this.eTagMode = this.extractors.contains(ETag.class) ? ComponentETagMode.DEFAULT
                : ComponentETagMode.DISABLED;
    }
}