Example usage for com.google.common.collect ImmutableMap.Builder putAll

List of usage examples for com.google.common.collect ImmutableMap.Builder putAll

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableMap.Builder putAll.

Prototype

public final void putAll(Map<? extends K, ? extends V> map) 

Source Link

Usage

From source file:com.google.devtools.build.lib.analysis.mock.BazelAnalysisMock.java

@Override
public ImmutableMap<SkyFunctionName, SkyFunction> getSkyFunctions() {
    ImmutableMap.Builder<SkyFunctionName, SkyFunction> skyFunctions = ImmutableMap.builder();
    skyFunctions.putAll(super.getSkyFunctions());
    skyFunctions.put(FdoSupportValue.SKYFUNCTION, new FdoSupportFunction());
    return skyFunctions.build();
}

From source file:com.google.devtools.build.lib.rules.extra.ExtraAction.java

/**
 * The spawn command for ExtraAction needs to be slightly modified from
 * regular SpawnActions:/*from www . ja v  a  2s .  c om*/
 * -the extraActionInfo file needs to be added to the list of inputs.
 * -the extraActionInfo file that is an output file of this task is created
 * before the SpawnAction so should not be listed as one of its outputs.
 */
// TODO(bazel-team): Add more tests that execute this code path!
@Override
public Spawn getSpawn(Map<String, String> clientEnv) {
    final Spawn base = super.getSpawn(clientEnv);
    return new DelegateSpawn(base) {
        @Override
        public ImmutableMap<PathFragment, Artifact> getRunfilesManifests() {
            ImmutableMap.Builder<PathFragment, Artifact> builder = ImmutableMap.builder();
            builder.putAll(super.getRunfilesManifests());
            builder.putAll(runfilesManifests);
            return builder.build();
        }

        @Override
        public String getMnemonic() {
            return ExtraAction.this.getMnemonic();
        }
    };
}

From source file:co.cask.cdap.data2.dataset2.lib.file.FileSetDataset.java

@Override
public Map<String, String> getInputFormatConfiguration(Iterable<? extends Location> inputLocs) {
    ImmutableMap.Builder<String, String> config = ImmutableMap.builder();
    config.putAll(FileSetProperties.getInputProperties(spec.getProperties()));
    config.putAll(FileSetProperties.getInputProperties(runtimeArguments));
    String inputs = Joiner.on(',').join(Iterables.transform(inputLocs, new Function<Location, String>() {
        @Override//  w  w w  .j a v a  2s . c  o  m
        public String apply(@Nullable Location location) {
            return getFileSystemPath(location);
        }
    }));
    config.put(FileInputFormat.INPUT_DIR, inputs);
    return config.build();
}

From source file:co.cask.cdap.data2.dataset2.lib.file.FileSetDataset.java

@Override
public Map<String, String> getOutputFormatConfiguration() {
    if (isExternal) {
        throw new UnsupportedOperationException(
                "Output is not supported for external file set '" + spec.getName() + "'");
    }/*www  .j a  v a  2s. com*/
    ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
    builder.putAll(FileSetProperties.getOutputProperties(spec.getProperties()));
    builder.putAll(FileSetProperties.getOutputProperties(runtimeArguments));
    if (outputLocation != null) {
        builder.put(FileOutputFormat.OUTDIR, getFileSystemPath(outputLocation));
    }
    return builder.build();
}

From source file:se.kth.climate.fast.netcdf.aligner.BlockFitter.java

private Pair<VariableAssignment, VariableFit> fitWithDD(
        Triplet<VariableAssignment, DataDescriptor, Long> pvadds) {
    final VariableAssignment va = pvadds.getValue0();
    final DataDescriptor initialDD = pvadds.getValue1();
    final long initialSize = pvadds.getValue2();
    double blockRatio = (double) blockLimit() / (double) initialSize;
    LOG.debug("block ratio is {}", blockRatio);
    List<String> rdims = rankDimensions(initialDD);

    if (rdims.isEmpty()) {
        throw new FittingException("Ranks are empty!", va, initialDD);
    }/*from w w w  .  ja  va  2s. co m*/
    LOG.debug("Ranked dimensions: {}", rdims);

    int curDim = 0;
    DimensionRange dr1 = initialDD.dims.get(rdims.get(curDim));
    while (dr1.getSize() <= 1) {
        curDim++;
        if (rdims.size() > curDim) {
            dr1 = initialDD.dims.get(rdims.get(curDim));
        } else {
            throw new FittingException("No non-constant dimensions to split over!", va, initialDD);
        }
    }
    long perBlockSize = (long) Math.floor(blockRatio * ((double) dr1.getSize()));
    LOG.debug("Splitting over {} with {} slices per block", dr1, perBlockSize);
    if (perBlockSize > 0) { // this should fit with at least a single slice per file
        // generate sub ranges
        List<DimensionRange> subRanges = new LinkedList<>();
        long offset = dr1.start;
        while (offset < dr1.end) { // NOTE: If there's an off by one issue, it's probably here^^
            long endset = Math.min(offset + perBlockSize, dr1.end);
            subRanges.add(new DimensionRange(dr1.name, offset, endset, dr1.inf));
            offset = endset + 1;
        }
        LOG.debug("Generated subranges:\n{}", subRanges);
        final Map<String, DimensionRange> otherDRs = new HashMap<>(initialDD.dims);
        otherDRs.remove(dr1.name);
        List<DataDescriptor> newDDs = subRanges.stream().map(splitdr -> {
            ImmutableMap.Builder<String, DimensionRange> drsB = ImmutableMap.builder();
            drsB.putAll(otherDRs);
            drsB.put(splitdr.name, splitdr);
            return new DataDescriptor(initialDD.metaInfo, initialDD.vars, drsB.build(),
                    Optional.of(splitdr.name));
        }).collect(Collectors.toList());
        LOG.debug("Generated new data descritors:\n{}", newDDs);
        long firstSize = newDDs.get(0).estimateSize();
        if (firstSize < blockLimit()) {
            return Pair.with(va, VariableFit.fromDataDescriptors(ImmutableList.copyOf(newDDs)));
        } else {
            // TODO write a more flexible fitter, that tries decrements of ranges until it fits (if possible)
            throw new FittingException("It should have fit, but it decided not to. Estimated size was "
                    + firstSize + "bytes of limit " + blockLimit() + "bytes."
                    + " Complain to the devs to write a better fitting algorithm.", va, newDDs.get(0));
        }
        // TODO write a tigher fitter, that tries increments of ranges to waste less space where dimensions impact multiple variables
    } else { // this won't fit...could split along a different dimension but for now just throw an exception
        throw new FittingException("Splitting over multiple dimensions not yet implemented!", va, initialDD);
    }
}

From source file:org.gradle.model.internal.manage.schema.extract.CandidateMethods.java

/**
 * @return All candidate methods indexed by signature equivalence
 *//*from ww  w .  ja  v  a  2  s.  c  om*/
public Map<Equivalence.Wrapper<Method>, Collection<Method>> allMethods() {
    ImmutableMap.Builder<Equivalence.Wrapper<Method>, Collection<Method>> builder = ImmutableMap.builder();
    for (Map<Equivalence.Wrapper<Method>, Collection<Method>> candidatesForSomeName : candidates.values()) {
        builder.putAll(candidatesForSomeName);
    }
    return builder.build();
}

From source file:com.google.devtools.build.lib.actions.BaseSpawn.java

@Override
public ImmutableMap<String, String> getEnvironment() {
    PathFragment runfilesRoot = getRunfilesRoot();
    if (runfilesRoot == null
            || (environment.containsKey("JAVA_RUNFILES") && environment.containsKey("PYTHON_RUNFILES"))) {
        return environment;
    } else {/*  ww  w.j a v  a  2 s . c  o  m*/
        ImmutableMap.Builder<String, String> env = ImmutableMap.builder();
        env.putAll(environment);
        // TODO(bazel-team): Unify these into a single env variable.
        String runfilesRootString = runfilesRoot.getPathString();
        env.put("JAVA_RUNFILES", runfilesRootString);
        env.put("PYTHON_RUNFILES", runfilesRootString);
        return env.build();
    }
}

From source file:org.opendaylight.controller.md.sal.dom.broker.impl.ShardedDOMDataTreeProducer.java

@Override
public synchronized DOMDataTreeProducer createProducer(final Collection<DOMDataTreeIdentifier> subtrees) {
    Preconditions.checkState(!closed, "Producer is already closed");
    Preconditions.checkState(openTx == null, "Transaction %s is still open", openTx);

    for (final DOMDataTreeIdentifier s : subtrees) {
        // Check if the subtree was visible at any time
        if (!haveSubtree(s)) {
            throw new IllegalArgumentException(
                    String.format("Subtree %s was never available in producer %s", s, this));
        }/*  www  .  j  a v a2 s. com*/

        // Check if the subtree has not been delegated to a child
        final DOMDataTreeProducer child = lookupChild(s);
        Preconditions.checkArgument(child == null, "Subtree %s is delegated to child producer %s", s, child);

        // Check if part of the requested subtree is not delegated to a child.
        for (final DOMDataTreeIdentifier c : children.keySet()) {
            if (s.contains(c)) {
                throw new IllegalArgumentException(String.format(
                        "Subtree %s cannot be delegated as it is superset of already-delegated %s", s, c));
            }
        }
    }

    final DOMDataTreeProducer ret = dataTree.createProducer(this, subtrees);
    final ImmutableMap.Builder<DOMDataTreeIdentifier, DOMDataTreeProducer> cb = ImmutableMap.builder();
    cb.putAll(children);
    for (final DOMDataTreeIdentifier s : subtrees) {
        cb.put(s, ret);
    }

    children = cb.build();
    return ret;
}

From source file:com.microsoft.thrifty.schema.UserElementMixin.java

UserElementMixin(String name, Location location, String documentation,
        @Nullable AnnotationElement annotationElement) {
    this.name = name;
    this.location = location;
    this.documentation = documentation;

    ImmutableMap.Builder<String, String> annotations = ImmutableMap.builder();
    if (annotationElement != null) {
        annotations.putAll(annotationElement.values());
    }// ww  w  . ja v a  2s  .c om
    this.annotations = annotations.build();
}

From source file:org.gradle.api.internal.tasks.scala.AntScalaCompiler.java

public WorkResult execute(final ScalaCompileSpec spec) {
    File destinationDir = spec.getDestinationDir();
    ScalaCompileOptionsInternal scalaCompileOptions = (ScalaCompileOptionsInternal) spec
            .getScalaCompileOptions();//from  w w w .  ja v a2 s.  c  o  m

    String backend = chooseBackend(spec);
    ImmutableMap.Builder<String, Object> optionsBuilder = ImmutableMap.builder();
    optionsBuilder.put("destDir", destinationDir);
    optionsBuilder.put("target", backend);
    optionsBuilder.putAll(scalaCompileOptions.optionMap());
    if (scalaCompileOptions.internalIsFork()) {
        optionsBuilder.put("compilerPath", GUtil.asPath(scalaClasspath));
    }
    final ImmutableMap<String, Object> options = optionsBuilder.build();
    final String taskName = scalaCompileOptions.internalUseCompileDaemon() ? "fsc" : "scalac";
    final Iterable<File> compileClasspath = spec.getClasspath();

    LOGGER.info("Compiling with Ant scalac task.");
    LOGGER.debug("Ant scalac task options: {}", options);

    antBuilder.withClasspath(scalaClasspath).execute(new Closure<Object>(this) {
        @SuppressWarnings("unused")
        public Object doCall(final AntBuilderDelegate ant) {
            ant.invokeMethod("taskdef", Collections.singletonMap("resource", "scala/tools/ant/antlib.xml"));

            return ant.invokeMethod(taskName, new Object[] { options, new Closure<Void>(this) {
                public void doCall() {
                    spec.getSource().addToAntBuilder(ant, "src", FileCollection.AntType.MatchingTask);
                    for (File file : compileClasspath) {
                        ant.invokeMethod("classpath", Collections.singletonMap("location", file));
                    }
                }
            } });
        }
    });

    return new SimpleWorkResult(true);
}