Example usage for com.google.common.collect Iterables toArray

List of usage examples for com.google.common.collect Iterables toArray

Introduction

In this page you can find the example usage for com.google.common.collect Iterables toArray.

Prototype

static <T> T[] toArray(Iterable<? extends T> iterable, T[] array) 

Source Link

Usage

From source file:edu.umn.msi.tropix.persistence.service.impl.RequestServiceImpl.java

public Request[] getActiveRequests(final String gridId) {
    final Iterable<Request> requests = getTropixObjectDao().getActiveRequests(gridId);
    return Iterables.toArray(requests, Request.class);
}

From source file:com.threerings.admin.web.server.ConfigServlet.java

protected ConfigurationRecord buildRecord(String key) {
    DObject object = _confReg.getObject(key);
    List<ConfigField> configFields = Lists.newArrayList();
    Field[] fields = object.getClass().getFields();
    for (Field field : fields) {
        if (field.getModifiers() != Modifier.PUBLIC) {
            continue;
        }//www  .j  a  v a  2  s.  co  m
        FieldType type = TYPES.get(field.getType());
        if (type == null) {
            log.warning("Unknown field type", "field", field.getName(), "type", field.getType());
            return null;
        }
        try {
            Object value = field.get(object);
            String valStr = type.toString(value);
            configFields.add(new ConfigField(field.getName(), type, valStr));

        } catch (IllegalAccessException e) {
            log.warning("Failure reflecting on configuration object", "key", key, "object", object, "field",
                    field, e);
            return null;
        }
    }
    ConfigurationRecord record = new ConfigurationRecord();
    record.fields = Iterables.toArray(configFields, ConfigField.class);
    return record;
}

From source file:software.reinvent.dependency.parser.service.CsvWriter.java

/**
 * Creates the csv files/* w  ww.  ja va 2 s. co  m*/
 * <ul>
 * <li>Internal_{date}.csv</li>
 * <li>External_{date}.csv</li>
 * <li>Artifacts_{date}.csv</li>
 * </ul>
 * with all important information's about the {@link Artifact}s and their {@link ArtifactDependency}'s.
 *
 * @param internalGroupId the internal maven group id
 * @param resultDir       the dir where the CSV files will be written
 * @param prefix          any optional prefix for the CSV files
 *
 * @throws IOException
 */
public void writeDependencies(final String internalGroupId, final File resultDir, final String prefix)
        throws IOException {
    final Set<ArtifactDependency> allDependencies = artifacts.stream().map(Artifact::getDependencies)
            .flatMap(Collection::stream).collect(Collectors.toSet());
    final Set<ArtifactDependency> internalDependencies = allDependencies.stream()
            .filter(isInternalPredicate(internalGroupId))
            .sorted(Comparator.comparing(ArtifactDependency::getGroupId)).collect(toSet());
    final Set<ArtifactDependency> externalDependencies = Sets
            .newHashSet(CollectionUtils.subtract(allDependencies, internalDependencies));

    final Multimap<ArtifactDependency, Artifact> dependencyToArtifact = HashMultimap.create();
    allDependencies.forEach(
            dependency -> artifacts.stream().filter(artifact -> artifact.getDependencies().contains(dependency))
                    .forEach(x -> dependencyToArtifact.put(dependency, x)));

    CSVWriter internalWriter = null;
    CSVWriter externalWriter = null;
    CSVWriter artifactWriter = null;
    try {
        resultDir.mkdirs();
        final File internalResultFile = new File(resultDir,
                prefix + "Internal_" + LocalDate.now().toString() + ".csv");
        final File externalResultFile = new File(resultDir,
                prefix + "External_" + LocalDate.now().toString() + ".csv");
        final File artifactResultFile = new File(resultDir,
                prefix + "Artifacts_" + LocalDate.now().toString() + ".csv");
        logger.info("Will write results to {} and {}.", internalResultFile, externalResultFile);
        internalWriter = new CSVWriter(new FileWriter(internalResultFile), separator);
        writeDependencyHeader(internalWriter);
        externalWriter = new CSVWriter(new FileWriter(externalResultFile), separator);
        writeDependencyHeader(externalWriter);
        artifactWriter = new CSVWriter(new FileWriter(artifactResultFile), separator);
        artifactWriter
                .writeNext(("groupId#artifactId#version#package#internalDependencies" + "#externalDependencies")
                        .split("#"));
        final CSVWriter finalInternalWriter = internalWriter;
        final CSVWriter finalExternalWriter = externalWriter;
        dependencyToArtifact.keySet().stream().sorted(Comparator.comparing(ArtifactDependency::getGroupId)
                .thenComparing(ArtifactDependency::getArtifactId)).forEach(dependency -> {
                    final List<String> dependentArtifacts = dependencyToArtifact.get(dependency).stream()
                            .map(Artifact::getArtifactId).sorted().collect(toList());
                    final String artifactLicenses = defaultIfBlank(
                            Joiner.on("\n").join(dependency.getArtifactLicenses()), "n/a in pom");

                    final ArrayList<String> newLine = Lists.newArrayList(dependency.getGroupId(),
                            dependency.getArtifactId(), Joiner.on("\n").join(dependency.getVersions()),
                            artifactLicenses, dependency.getDescription(),
                            Joiner.on("\n").join(dependentArtifacts));
                    final String[] csvLine = Iterables.toArray(newLine, String.class);
                    if (isInternal(internalGroupId, dependency)) {
                        finalInternalWriter.writeNext(csvLine);
                    } else {
                        finalExternalWriter.writeNext(csvLine);
                    }
                });
        final CSVWriter finalArtifactWriter = artifactWriter;
        artifacts.stream()
                .sorted(Comparator.comparing(Artifact::getGroupId).thenComparing(Artifact::getArtifactId))
                .forEachOrdered(artifact -> {
                    final String intDependencies = getDependencyColumn(artifact, internalDependencies,
                            ArtifactDependency::getArtifactId);
                    final String extDependencies = getDependencyColumn(artifact, externalDependencies,
                            ArtifactDependency::toString);
                    final ArrayList<String> newLine = Lists.newArrayList(artifact.getGroupId(),
                            artifact.getArtifactId(), Joiner.on(",").join(artifact.getVersions()),
                            defaultString(artifact.getPackaging()),

                            intDependencies, extDependencies);
                    final String[] csvLine = Iterables.toArray(newLine, String.class);
                    finalArtifactWriter.writeNext(csvLine);
                });
    } catch (IOException e)

    {
        logger.error("Could not write csv.", e);
    } finally

    {
        if (internalWriter != null) {
            internalWriter.close();
        }
        if (externalWriter != null) {
            externalWriter.close();
        }
        if (artifactWriter != null) {
            artifactWriter.close();
        }
    }

    logger.info("Found {} dependencies. {} internal and {} external", allDependencies.size(),
            internalDependencies.size(), externalDependencies.size());
}

From source file:cpw.mods.fml.common.asm.transformers.deobf.FMLDeobfuscatingRemapper.java

public void setup(File mcDir, LaunchClassLoader classLoader, String deobfFileName) {
    this.classLoader = classLoader;
    try {/*from  w  w w  .j av  a  2  s .  co m*/
        InputStream classData = getClass().getResourceAsStream(deobfFileName);
        LZMAInputSupplier zis = new LZMAInputSupplier(classData);
        InputSupplier<InputStreamReader> srgSupplier = CharStreams.newReaderSupplier(zis, Charsets.UTF_8);
        List<String> srgList = CharStreams.readLines(srgSupplier);
        rawMethodMaps = Maps.newHashMap();
        rawFieldMaps = Maps.newHashMap();
        Builder<String, String> builder = ImmutableBiMap.<String, String>builder();
        Splitter splitter = Splitter.on(CharMatcher.anyOf(": ")).omitEmptyStrings().trimResults();
        for (String line : srgList) {
            String[] parts = Iterables.toArray(splitter.split(line), String.class);
            String typ = parts[0];
            if ("CL".equals(typ)) {
                parseClass(builder, parts);
            } else if ("MD".equals(typ)) {
                parseMethod(parts);
            } else if ("FD".equals(typ)) {
                parseField(parts);
            }
        }
        classNameBiMap = builder.build();
    } catch (IOException ioe) {
        FMLRelaunchLog.log(Level.ERROR, ioe, "An error occurred loading the deobfuscation map data");
    }
    methodNameMaps = Maps.newHashMapWithExpectedSize(rawMethodMaps.size());
    fieldNameMaps = Maps.newHashMapWithExpectedSize(rawFieldMaps.size());
}

From source file:org.polymap.core.data.ui.featuretable.FeatureTableViewer.java

public IFeatureTableElement[] getSelectedElements() {
    return Iterables.toArray(new SelectionAdapter(getSelection()), IFeatureTableElement.class);
}

From source file:net.minecraftforge.gradle.patcher.TaskGenBinPatches.java

private void loadMappings() throws Exception {
    Files.readLines(getSrg(), Charset.defaultCharset(), new LineProcessor<String>() {

        Splitter splitter = Splitter.on(CharMatcher.anyOf(": ")).omitEmptyStrings().trimResults();

        @Override/*w w w  . j  av  a 2 s  .  c  o m*/
        public boolean processLine(String line) throws IOException {
            if (!line.startsWith("CL")) {
                return true;
            }

            String[] parts = Iterables.toArray(splitter.split(line), String.class);
            obfMapping.put(parts[1], parts[2]);
            String srgName = parts[2].substring(parts[2].lastIndexOf('/') + 1);
            srgMapping.put(srgName, parts[1]);
            int innerDollar = srgName.lastIndexOf('$');
            if (innerDollar > 0) {
                String outer = srgName.substring(0, innerDollar);
                innerClasses.put(outer, srgName);
            }
            return true;
        }

        @Override
        public String getResult() {
            return null;
        }
    });
}

From source file:com.android.tools.idea.npw.importing.CreateModuleFromArchiveAction.java

@Nullable
private Dependency filterDependencyStatement(Dependency dependency, File moduleRoot) {
    Object rawArguments = dependency.data;
    if (dependency.type == Dependency.Type.FILES && rawArguments != null) {
        String[] data = rawArguments instanceof String[] ? (String[]) rawArguments
                : new String[] { rawArguments.toString() };
        ArrayList<String> list = Lists.newArrayListWithCapacity(data.length);
        for (String jarFile : data) {
            File path = new File(jarFile);
            if (!path.isAbsolute()) {
                path = new File(moduleRoot, jarFile);
            }/* ww w  . ja  v a2s .c om*/
            if (!FileUtil.filesEqual(path, myArchivePath)) {
                list.add(jarFile);
            }
        }
        if (list.isEmpty()) {
            return null;
        } else if (list.size() == 1) {
            return new Dependency(dependency.scope, dependency.type, list.get(0));
        } else {
            return new Dependency(dependency.scope, dependency.type, Iterables.toArray(list, String.class));
        }
    }
    return dependency;
}

From source file:it.f2informatica.core.services.ConsultantServiceImpl.java

private LanguageModel[] removeFurtherEmptyLanguages(LanguageModel[] languageModels) {
    List<LanguageModel> languages = Lists.newArrayList(languageModels);
    Iterables.removeIf(languages, DOES_NOT_HAVE_ITEM_TO_ADD);
    return Iterables.toArray(languages, LanguageModel.class);
}

From source file:com.android.tools.idea.ui.properties.ListenerManager.java

/**
 * Convenience version of {@link #listenAll(ObservableValue[])} that works when you have a
 * {@link Collection} instead of an array.
 *///  w w  w.  ja  va  2  s . c om
@NotNull
public CompositeListener listenAll(@NotNull Collection<? extends ObservableValue<?>> values) {
    //noinspection unchecked
    return listenAll(Iterables.toArray(values, ObservableValue.class));
}

From source file:com.leacox.dagger.servlet.DaggerServletContextListener.java

@Override
public void contextInitialized(ServletContextEvent servletContextEvent) {
    checkState(filterDefinitions == null, "Re-entry is not allowed.");
    checkState(servletDefinitions == null, "Re-entry is not allowed.");
    filterDefinitions = Lists.newArrayList();
    servletDefinitions = Lists.newArrayList();
    try {// w ww . j  a  va2 s .com
        ServletContext servletContext = servletContextEvent.getServletContext();

        ObjectGraph unscopedGraph = ObjectGraph.create(getBaseModules());
        ObjectGraph scopingObjectGraph = ScopingObjectGraph.create(unscopedGraph)
                .addScopedModules(RequestScoped.class, getRequestScopedModules());

        scopingObjectGraph.get(ServletContextProvider.class).set(servletContext);
        scopingObjectGraph.get(InternalServletModule.ObjectGraphProvider.class).set(scopingObjectGraph);
        Iterable<Object> fullModules = Iterables.concat(Arrays.asList(getBaseModules()),
                Arrays.asList(getRequestScopedModules()));
        scopingObjectGraph.get(InternalServletModule.FullModulesProvider.class)
                .set(Iterables.toArray(fullModules, Object.class));

        configureServlets();

        scopingObjectGraph.get(InternalServletModule.FilterDefinitionsProvider.class)
                .set(filterDefinitions.toArray(new FilterDefinition[filterDefinitions.size()]));

        // Ensure that servlets are not bound twice to the same pattern.
        Set<String> servletUris = Sets.newHashSet();
        for (ServletDefinition servletDefinition : servletDefinitions) {
            if (servletUris.contains(servletDefinition.getPattern())) {
                // TODO: Consider finding all servlet configuration errors and throw one exception with all of them.
                throw new IllegalStateException("More than one servlet was mapped to the same URI pattern: "
                        + servletDefinition.getPattern());
            } else {
                servletUris.add(servletDefinition.getPattern());
            }
        }
        scopingObjectGraph.get(InternalServletModule.ServletDefinitionsProvider.class)
                .set(servletDefinitions.toArray(new ServletDefinition[servletDefinitions.size()]));

        // Make sure the dagger filter is injected
        DaggerFilter daggerFilter = scopingObjectGraph.get(DaggerFilter.class);
        scopingObjectGraph.inject(daggerFilter);

        objectGraph = scopingObjectGraph;

        servletContext.setAttribute(OBJECT_GRAPH_NAME, scopingObjectGraph);
    } finally {
        filterDefinitions = null;
        servletDefinitions = null;
    }
}