Example usage for java.util LinkedHashSet add

List of usage examples for java.util LinkedHashSet add

Introduction

In this page you can find the example usage for java.util LinkedHashSet add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:org.artifactory.storage.db.build.service.BuildStoreServiceImpl.java

@Override
public Set<BuildRun> getLatestBuildsByName() {
    try {//from w  ww  .j  av a2 s. c om
        List<String> allBuildNames = buildsDao.getAllBuildNames();
        LinkedHashSet<BuildRun> results = new LinkedHashSet<>(allBuildNames.size());
        for (String buildName : allBuildNames) {
            BuildEntity buildEntity = buildsDao.getLatestBuild(buildName);
            if (buildEntity != null) {
                results.add(getBuildRun(buildEntity));
            }
        }
        return results;
    } catch (SQLException e) {
        throw new StorageException("Could not list all builds by name and latest build date", e);
    }
}

From source file:org.openquark.cal.eclipse.embedded.analyzer.BindingVisitor.java

/** {@inheritDoc} */
public R visit_Expr_Let(Let let, Object arg) {

    enterScope();/*w w w.j ava2  s .  c o  m*/

    // Let expressions are mutually recursive, so we want to bind the
    // function names before we enter the new scope associated with each local function.

    /**
     * Handles the adding of bindings for local definitions (both local functions and local pattern match declarations).
     * This is done by walking the local definitions and adding a binding for each local function / pattern-bound variable
     * encountered. The synthetic local function generated by the compiler for desugaring a local pattern match declaration
     * is also taken into account.
     * 
     * @author Joseph Wong
     */
    class LocallyDefinedNamesCollector extends LocalBindingsProcessor<LinkedHashSet<String>, R> {

        /**
         * {@inheritDoc}
         */
        void processLocalDefinitionBinding(final String name, final SourceModel.SourceElement localDefinition,
                final LinkedHashSet<String> arg) {
            addLocalDefinitionBinding(name, localDefinition);
        }

        /**
         * {@inheritDoc}
         */
        void additionallyProcessPatternVar(final Pattern.Var var, final LinkedHashSet<String> patternVarNames) {
            patternVarNames.add(var.getName());
        }

        /**
         * {@inheritDoc}
         */
        void additionallyProcessPunnedTextualFieldPattern(final FieldName.Textual fieldName,
                final SourceRange fieldNameSourceRange, final LinkedHashSet<String> patternVarNames) {
            patternVarNames.add(fieldName.getCalSourceForm());
        }

        /**
         * Adds an additional binding for the synthetic local function which is generated by the compiler to host the defining
         * expression of a local pattern match declaration. This is done to keep the local function identifier generator in
         * sync with what the compiler would do.
         * 
         * @param patternMatchDecl the pattern match declaration.
         * @param patternVarNames the LinkedHashSet of the pattern variable names, in source order.
         */
        private void addBindingForSyntheticLocalDefinition(final LocalDefn.PatternMatch patternMatchDecl,
                final LinkedHashSet<String> patternVarNames) {
            addLocalDefinitionBinding(makeTempVarNameForDesugaredLocalPatternMatchDecl(patternVarNames),
                    patternMatchDecl);
        }

        /**
         * {@inheritDoc}
         */
        public R visit_LocalDefn_PatternMatch_UnpackDataCons(
                final LocalDefn.PatternMatch.UnpackDataCons unpackDataCons, final LinkedHashSet<String> arg) {
            // visit only the patterns
            final LinkedHashSet<String> patternVarNames = new LinkedHashSet<String>();
            super.visit_LocalDefn_PatternMatch_UnpackDataCons(unpackDataCons, patternVarNames);
            // add the synthetic definition last
            addBindingForSyntheticLocalDefinition(unpackDataCons, patternVarNames);
            return null;
        }

        /**
         * {@inheritDoc}
         */
        public R visit_LocalDefn_PatternMatch_UnpackListCons(
                final LocalDefn.PatternMatch.UnpackListCons unpackListCons, final LinkedHashSet<String> arg) {
            // visit only the patterns
            final LinkedHashSet<String> patternVarNames = new LinkedHashSet<String>();
            super.visit_LocalDefn_PatternMatch_UnpackListCons(unpackListCons, patternVarNames);
            // add the synthetic definition last
            addBindingForSyntheticLocalDefinition(unpackListCons, patternVarNames);
            return null;
        }

        /**
         * {@inheritDoc}
         */
        public R visit_LocalDefn_PatternMatch_UnpackRecord(
                final LocalDefn.PatternMatch.UnpackRecord unpackRecord, final LinkedHashSet<String> arg) {
            // visit only the field patterns (and not the base record pattern - since we do not support them in local pattern match decl)
            final LinkedHashSet<String> patternVarNames = new LinkedHashSet<String>();
            super.visit_LocalDefn_PatternMatch_UnpackRecord(unpackRecord, patternVarNames);
            // add the synthetic definition last
            addBindingForSyntheticLocalDefinition(unpackRecord, patternVarNames);
            return null;
        }

        /**
         * {@inheritDoc}
         */
        public R visit_LocalDefn_PatternMatch_UnpackTuple(final LocalDefn.PatternMatch.UnpackTuple unpackTuple,
                final LinkedHashSet<String> arg) {
            // visit only the patterns
            final LinkedHashSet<String> patternVarNames = new LinkedHashSet<String>();
            super.visit_LocalDefn_PatternMatch_UnpackTuple(unpackTuple, patternVarNames);
            // add the synthetic definition last
            addBindingForSyntheticLocalDefinition(unpackTuple, patternVarNames);
            return null;
        }
    }

    // Use the LocallyDefinedNamesCollector to visit the let definitions
    final LocallyDefinedNamesCollector locallyDefinedNamesCollector = new LocallyDefinedNamesCollector();
    final int nLocalFunctions = let.getNLocalDefinitions();
    for (int i = 0; i < nLocalFunctions; i++) {
        let.getNthLocalDefinition(i).accept(locallyDefinedNamesCollector, null);
    }

    // Now call the superclass implementation to walk through the let expression with the right name bindings
    R ret = super.visit_Expr_Let(let, arg);
    leaveScope();
    return ret;
}

From source file:jef.tools.reflect.ClassEx.java

/**
 * ?????//  w  w w  .  j  ava2s  .co m
 * 
 * @return
 */
public Class<?>[] getAllInterfaces() {
    LinkedHashSet<Class<?>> intf = new LinkedHashSet<Class<?>>();
    Class<?> c = cls;
    while (c != Object.class) {
        for (Class<?> ic : c.getInterfaces()) {
            intf.add(ic);
        }
        c = c.getSuperclass();
    }
    return intf.toArray(new Class<?>[intf.size()]);
}

From source file:com.datatorrent.stram.client.StramAppLauncher.java

/**
 * Submit application to the cluster and return the app id.
 * Sets the context class loader for application dependencies.
 *
 * @param appConfig/*from   w w  w .  ja va2s.c  o m*/
 * @return ApplicationId
 * @throws Exception
 */
public ApplicationId launchApp(AppFactory appConfig) throws Exception {
    loadDependencies();
    Configuration conf = propertiesBuilder.conf;
    conf.setEnum(StreamingApplication.ENVIRONMENT, StreamingApplication.Environment.CLUSTER);
    LogicalPlan dag = appConfig.createApp(propertiesBuilder);
    long hdfsTokenMaxLifeTime = conf.getLong(StramClientUtils.DT_HDFS_TOKEN_MAX_LIFE_TIME, conf.getLong(
            StramClientUtils.HDFS_TOKEN_MAX_LIFE_TIME, StramClientUtils.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT));
    dag.setAttribute(LogicalPlan.HDFS_TOKEN_LIFE_TIME, hdfsTokenMaxLifeTime);
    long rmTokenMaxLifeTime = conf.getLong(StramClientUtils.DT_RM_TOKEN_MAX_LIFE_TIME,
            conf.getLong(YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_KEY,
                    YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT));
    dag.setAttribute(LogicalPlan.RM_TOKEN_LIFE_TIME, rmTokenMaxLifeTime);
    if (conf.get(StramClientUtils.KEY_TAB_FILE) != null) {
        dag.setAttribute(LogicalPlan.KEY_TAB_FILE, conf.get(StramClientUtils.KEY_TAB_FILE));
    } else if (conf.get(StramUserLogin.DT_AUTH_KEYTAB) != null) {
        Path localKeyTabPath = new Path(conf.get(StramUserLogin.DT_AUTH_KEYTAB));
        FileSystem fs = StramClientUtils.newFileSystemInstance(conf);
        try {
            Path destPath = new Path(StramClientUtils.getDTDFSRootDir(fs, conf), localKeyTabPath.getName());
            if (!fs.exists(destPath)) {
                fs.copyFromLocalFile(false, false, localKeyTabPath, destPath);
            }
            dag.setAttribute(LogicalPlan.KEY_TAB_FILE, destPath.toString());
        } finally {
            fs.close();
        }
    }
    String tokenRefreshFactor = conf.get(StramClientUtils.TOKEN_ANTICIPATORY_REFRESH_FACTOR);
    if (tokenRefreshFactor != null && tokenRefreshFactor.trim().length() > 0) {
        dag.setAttribute(LogicalPlan.TOKEN_REFRESH_ANTICIPATORY_FACTOR, Double.parseDouble(tokenRefreshFactor));
    }
    StramClient client = new StramClient(conf, dag);
    try {
        client.start();
        LinkedHashSet<String> libjars = Sets.newLinkedHashSet();
        String libjarsCsv = conf.get(LIBJARS_CONF_KEY_NAME);
        if (libjarsCsv != null) {
            String[] jars = StringUtils.splitByWholeSeparator(libjarsCsv, StramClient.LIB_JARS_SEP);
            libjars.addAll(Arrays.asList(jars));
        }
        if (deployJars != null) {
            for (File deployJar : deployJars) {
                libjars.add(deployJar.getAbsolutePath());
            }
        }

        client.setResources(libjars);
        client.setFiles(conf.get(FILES_CONF_KEY_NAME));
        client.setArchives(conf.get(ARCHIVES_CONF_KEY_NAME));
        client.setOriginalAppId(conf.get(ORIGINAL_APP_ID));
        client.setQueueName(conf.get(QUEUE_NAME));
        client.startApplication();
        return client.getApplicationReport().getApplicationId();
    } finally {
        client.stop();
    }
}

From source file:com.redhat.rhn.taskomatic.task.DailySummary.java

/**
 * DO NOT CALL FROM OUTSIDE THIS CLASS. Renders the actions email message
 * @param actions list of recent actions
 * @return the actions email message// w  ww  . jav  a  2 s.c  om
 */
public String renderActionsMessage(List<ActionMessage> actions) {

    int longestActionLength = HEADER_SPACER;
    int longestStatusLength = 0;
    StringBuilder hdr = new StringBuilder();
    StringBuilder body = new StringBuilder();
    StringBuilder legend = new StringBuilder();
    StringBuilder msg = new StringBuilder();
    LinkedHashSet<String> statusSet = new LinkedHashSet();
    TreeMap<String, Map<String, Integer>> nonErrataActions = new TreeMap();
    TreeMap<String, Map<String, Integer>> errataActions = new TreeMap();
    TreeMap<String, String> errataSynopsis = new TreeMap();

    legend.append(LocalizationService.getInstance().getMessage("taskomatic.daily.errata"));
    legend.append("\n\n");

    for (ActionMessage am : actions) {

        if (!statusSet.contains(am.getStatus())) {
            statusSet.add(am.getStatus());
            if (am.getStatus().length() > longestStatusLength) {
                longestStatusLength = am.getStatus().length();
            }
        }

        if (am.getType().equals(ERRATA_UPDATE)) {
            String advisoryKey = ERRATA_INDENTION + am.getAdvisory();

            if (!errataActions.containsKey(advisoryKey)) {
                errataActions.put(advisoryKey, new HashMap());
                if (advisoryKey.length() + HEADER_SPACER > longestActionLength) {
                    longestActionLength = advisoryKey.length() + HEADER_SPACER;
                }
            }
            Map<String, Integer> counts = errataActions.get(advisoryKey);
            counts.put(am.getStatus(), am.getCount());

            if (am.getAdvisory() != null && !errataSynopsis.containsKey(am.getAdvisory())) {
                errataSynopsis.put(am.getAdvisory(), am.getSynopsis());
            }
        } else {
            if (!nonErrataActions.containsKey(am.getType())) {
                nonErrataActions.put(am.getType(), new HashMap());
                if (am.getType().length() + HEADER_SPACER > longestActionLength) {
                    longestActionLength = am.getType().length() + HEADER_SPACER;
                }
            }
            Map<String, Integer> counts = nonErrataActions.get(am.getType());
            counts.put(am.getStatus(), am.getCount());
        }

    }

    hdr.append(StringUtils.repeat(" ", longestActionLength));
    for (String status : statusSet) {
        hdr.append(status + StringUtils.repeat(" ", (longestStatusLength + ERRATA_SPACER) - status.length()));
    }

    if (!errataActions.isEmpty()) {
        body.append(ERRATA_UPDATE + ":" + "\n");
    }
    StringBuffer formattedErrataActions = renderActionTree(longestActionLength, longestStatusLength, statusSet,
            errataActions);
    body.append(formattedErrataActions);

    for (String advisory : errataSynopsis.keySet()) {
        legend.append(ERRATA_INDENTION + advisory + ERRATA_INDENTION + errataSynopsis.get(advisory) + "\n");
    }

    StringBuffer formattedNonErrataActions = renderActionTree(longestActionLength, longestStatusLength,
            statusSet, nonErrataActions);
    body.append(formattedNonErrataActions);

    // finally put all this together
    msg.append(hdr.toString());
    msg.append("\n");
    msg.append(body.toString());
    msg.append("\n\n");
    if (!errataSynopsis.isEmpty()) {
        msg.append(legend.toString());
    }
    return msg.toString();
}

From source file:org.apache.solr.cloud.TestCloudPivotFacet.java

@Test
public void test() throws Exception {

    sanityCheckAssertNumerics();//from w  w w  . java  2 s.  co  m

    waitForThingsToLevelOut(30000); // TODO: why would we have to wait?
    // 
    handle.clear();
    handle.put("QTime", SKIPVAL);
    handle.put("timestamp", SKIPVAL);

    final Set<String> fieldNameSet = new HashSet<>();

    // build up a randomized index
    final int numDocs = atLeast(500);
    log.info("numDocs: {}", numDocs);

    for (int i = 1; i <= numDocs; i++) {
        SolrInputDocument doc = buildRandomDocument(i);

        // not efficient, but it guarantees that even if people change buildRandomDocument
        // we'll always have the full list of fields w/o needing to keep code in sync
        fieldNameSet.addAll(doc.getFieldNames());

        cloudClient.add(doc);
    }
    cloudClient.commit();

    fieldNameSet.remove("id");
    assertTrue("WTF, bogus field exists?", fieldNameSet.add("bogus_not_in_any_doc_s"));

    final String[] fieldNames = fieldNameSet.toArray(new String[fieldNameSet.size()]);
    Arrays.sort(fieldNames); // need determinism when picking random fields

    for (int i = 0; i < 5; i++) {

        String q = "*:*";
        if (random().nextBoolean()) {
            q = "id:[* TO " + TestUtil.nextInt(random(), 300, numDocs) + "]";
        }
        ModifiableSolrParams baseP = params("rows", "0", "q", q);

        if (random().nextBoolean()) {
            baseP.add("fq", "id:[* TO " + TestUtil.nextInt(random(), 200, numDocs) + "]");
        }

        final boolean stats = random().nextBoolean();
        if (stats) {
            baseP.add(StatsParams.STATS, "true");

            // if we are doing stats, then always generated the same # of STATS_FIELD
            // params, using multiple tags from a fixed set, but with diff fieldName values.
            // later, each pivot will randomly pick a tag.
            baseP.add(StatsParams.STATS_FIELD, "{!key=sk1 tag=st1,st2}" + pickRandomStatsFields(fieldNames));
            baseP.add(StatsParams.STATS_FIELD, "{!key=sk2 tag=st2,st3}" + pickRandomStatsFields(fieldNames));
            baseP.add(StatsParams.STATS_FIELD, "{!key=sk3 tag=st3,st4}" + pickRandomStatsFields(fieldNames));
            // NOTE: there's a chance that some of those stats field names
            // will be the same, but if so, all the better to test that edge case
        }

        ModifiableSolrParams pivotP = params(FACET, "true");

        // put our FACET_PIVOT params in a set in case we just happen to pick the same one twice
        LinkedHashSet<String> pivotParamValues = new LinkedHashSet<String>();
        pivotParamValues.add(buildPivotParamValue(buildRandomPivot(fieldNames)));

        if (random().nextBoolean()) {
            pivotParamValues.add(buildPivotParamValue(buildRandomPivot(fieldNames)));
        }
        pivotP.set(FACET_PIVOT, pivotParamValues.toArray(new String[pivotParamValues.size()]));

        // keep limit low - lots of unique values, and lots of depth in pivots
        pivotP.add(FACET_LIMIT, "" + TestUtil.nextInt(random(), 1, 17));

        // sometimes use an offset
        if (random().nextBoolean()) {
            pivotP.add(FACET_OFFSET, "" + TestUtil.nextInt(random(), 0, 7));
        }

        if (random().nextBoolean()) {
            String min = "" + TestUtil.nextInt(random(), 0, numDocs + 10);
            pivotP.add(FACET_PIVOT_MINCOUNT, min);
            // trace param for validation
            baseP.add(TRACE_MIN, min);
        }

        if (random().nextBoolean()) {
            pivotP.add(FACET_DISTRIB_MCO, "true");
            // trace param for validation
            baseP.add(TRACE_DISTRIB_MIN, "true");
        }

        if (random().nextBoolean()) {
            String missing = "" + random().nextBoolean();
            pivotP.add(FACET_MISSING, missing);
            // trace param for validation
            baseP.add(TRACE_MISS, missing);
        }

        if (random().nextBoolean()) {
            String sort = random().nextBoolean() ? "index" : "count";
            pivotP.add(FACET_SORT, sort);
            // trace param for validation
            baseP.add(TRACE_SORT, sort);
        }

        // overrequest
        //
        // NOTE: since this test focuses on accuracy of refinement, and doesn't do 
        // control collection comparisons, there isn't a lot of need for excessive
        // overrequesting -- we focus here on trying to exercise the various edge cases
        // involved as different values are used with overrequest
        if (0 == TestUtil.nextInt(random(), 0, 4)) {
            // we want a decent chance of no overrequest at all
            pivotP.add(FACET_OVERREQUEST_COUNT, "0");
            pivotP.add(FACET_OVERREQUEST_RATIO, "0");
        } else {
            if (random().nextBoolean()) {
                pivotP.add(FACET_OVERREQUEST_COUNT, "" + TestUtil.nextInt(random(), 0, 5));
            }
            if (random().nextBoolean()) {
                // sometimes give a ratio less then 1, code should be smart enough to deal
                float ratio = 0.5F + random().nextFloat();
                // sometimes go negative
                if (random().nextBoolean()) {
                    ratio *= -1;
                }
                pivotP.add(FACET_OVERREQUEST_RATIO, "" + ratio);
            }
        }

        assertPivotCountsAreCorrect(baseP, pivotP);
    }
}

From source file:org.pentaho.reporting.ui.datasources.pmd.PmdDataSourceEditor.java

private ScriptEngineFactory[] getScriptEngineLanguages() {
    final LinkedHashSet<ScriptEngineFactory> langSet = new LinkedHashSet<ScriptEngineFactory>();
    langSet.add(null);
    final List<ScriptEngineFactory> engineFactories = new ScriptEngineManager().getEngineFactories();
    for (final ScriptEngineFactory engineFactory : engineFactories) {
        langSet.add(engineFactory);//from w ww . j  a  v  a  2 s.co  m
    }
    return langSet.toArray(new ScriptEngineFactory[langSet.size()]);
}

From source file:org.artificer.server.mvn.services.MavenFacadeServlet.java

/**
 * Generates the maven-metadata.xml file dynamically for a given groupId/artifactId pair.  This will
 * list all of the versions available for that groupId+artifactId, along with the latest release and
 * snapshot versions./*from  ww w  .  ja v a 2 s  . c o  m*/
 * @param gavInfo
 */
private String doGenerateArtifactDirMavenMetaData(MavenGavInfo gavInfo) throws Exception {
    PagedResult<ArtifactSummary> artifacts = queryService.query("/s-ramp[@maven.groupId = '"
            + gavInfo.getGroupId() + "' and @maven.artifactId = '" + gavInfo.getArtifactId() + "']",
            "createdTimestamp", true);
    if (artifacts.getTotalSize() == 0) {
        return null;
    }

    String groupId = gavInfo.getGroupId();
    String artifactId = gavInfo.getArtifactId();
    String latest = null;
    String release = null;
    String lastUpdated = null;

    LinkedHashSet<String> versions = new LinkedHashSet<String>();
    SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmss");
    for (ArtifactSummary artifactSummary : artifacts.getResults()) {
        BaseArtifactType artifact = artifactService.getMetaData(artifactSummary.getModel(),
                artifactSummary.getType(), artifactSummary.getUuid());
        String version = ArtificerModelUtils.getCustomProperty(artifact, "maven.version");
        if (versions.add(version)) {
            latest = version;
            if (!version.endsWith("-SNAPSHOT")) {
                release = version;
            }
        }
        lastUpdated = format.format(artifactSummary.getCreatedTimestamp().getTime());
    }

    StringBuilder mavenMetadata = new StringBuilder();
    mavenMetadata.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
    mavenMetadata.append("<metadata>\n");
    mavenMetadata.append("  <groupId>").append(groupId).append("</groupId>\n");
    mavenMetadata.append("  <artifactId>").append(artifactId).append("</artifactId>\n");
    mavenMetadata.append("  <versioning>\n");
    mavenMetadata.append("    <latest>").append(latest).append("</latest>\n");
    mavenMetadata.append("    <release>").append(release).append("</release>\n");
    mavenMetadata.append("    <versions>\n");
    for (String version : versions) {
        mavenMetadata.append("      <version>").append(version).append("</version>\n");
    }
    mavenMetadata.append("    </versions>\n");
    mavenMetadata.append("    <lastUpdated>").append(lastUpdated).append("</lastUpdated>\n");
    mavenMetadata.append("  </versioning>\n");
    mavenMetadata.append("</metadata>\n");

    if (!gavInfo.isHash()) {
        return mavenMetadata.toString();
    } else {
        return generateHash(mavenMetadata.toString(), gavInfo.getHashAlgorithm());
    }
}

From source file:org.springframework.cloud.dataflow.app.launcher.ModuleLauncher.java

public void launchAggregatedModules(List<ModuleLaunchRequest> moduleLaunchRequests,
        Map<String, String> aggregateArgs) {
    try {/*from   ww w  . jav a  2 s.  com*/
        List<String> mainClassNames = new ArrayList<>();
        LinkedHashSet<URL> jarURLs = new LinkedHashSet<>();
        List<String> seenArchives = new ArrayList<>();
        final List<String[]> arguments = new ArrayList<>();
        final ClassLoader classLoader;
        if (!(aggregateArgs.containsKey(EXCLUDE_DEPENDENCIES_ARG)
                || aggregateArgs.containsKey(INCLUDE_DEPENDENCIES_ARG))) {
            for (ModuleLaunchRequest moduleLaunchRequest : moduleLaunchRequests) {
                Resource resource = resolveModule(moduleLaunchRequest.getModule());
                JarFileArchive jarFileArchive = new JarFileArchive(resource.getFile());
                jarURLs.add(jarFileArchive.getUrl());
                for (Archive archive : jarFileArchive.getNestedArchives(ArchiveMatchingEntryFilter.FILTER)) {
                    // avoid duplication based on unique JAR names
                    String urlAsString = archive.getUrl().toString();
                    String jarNameWithExtension = urlAsString.substring(0, urlAsString.lastIndexOf("!/"));
                    String jarNameWithoutExtension = jarNameWithExtension
                            .substring(jarNameWithExtension.lastIndexOf("/") + 1);
                    if (!seenArchives.contains(jarNameWithoutExtension)) {
                        seenArchives.add(jarNameWithoutExtension);
                        jarURLs.add(archive.getUrl());
                    }
                }
                mainClassNames.add(jarFileArchive.getMainClass());
                arguments.add(toArgArray(moduleLaunchRequest.getArguments()));
            }
            classLoader = ClassloaderUtils.createModuleClassloader(jarURLs.toArray(new URL[jarURLs.size()]));
        } else {
            // First, resolve modules and extract main classes - while slightly less efficient than just
            // doing the same processing after resolution, this ensures that module artifacts are processed
            // correctly for extracting their main class names. It is not possible in the general case to
            // identify, after resolution, whether a resource represents a module artifact which was part of the
            // original request or not. We will include the first module as root and the next as direct dependencies
            Coordinates root = null;
            ArrayList<Coordinates> includeCoordinates = new ArrayList<>();
            for (ModuleLaunchRequest moduleLaunchRequest : moduleLaunchRequests) {
                Coordinates moduleCoordinates = toCoordinates(moduleLaunchRequest.getModule());
                if (root == null) {
                    root = moduleCoordinates;
                } else {
                    includeCoordinates.add(toCoordinates(moduleLaunchRequest.getModule()));
                }
                Resource moduleResource = resolveModule(moduleLaunchRequest.getModule());
                JarFileArchive moduleArchive = new JarFileArchive(moduleResource.getFile());
                mainClassNames.add(moduleArchive.getMainClass());
                arguments.add(toArgArray(moduleLaunchRequest.getArguments()));
            }
            for (String include : StringUtils
                    .commaDelimitedListToStringArray(aggregateArgs.get(INCLUDE_DEPENDENCIES_ARG))) {
                includeCoordinates.add(toCoordinates(include));
            }
            // Resolve all artifacts - since modules have been specified as direct dependencies, they will take
            // precedence in the resolution order, ensuring that the already resolved artifacts will be returned as
            // part of the response.
            Resource[] libraries = moduleResolver.resolve(root,
                    includeCoordinates.toArray(new Coordinates[includeCoordinates.size()]),
                    StringUtils.commaDelimitedListToStringArray(aggregateArgs.get(EXCLUDE_DEPENDENCIES_ARG)));
            for (Resource library : libraries) {
                jarURLs.add(library.getURL());
            }
            classLoader = new URLClassLoader(jarURLs.toArray(new URL[jarURLs.size()]));
        }

        final List<Class<?>> mainClasses = new ArrayList<>();
        for (String mainClass : mainClassNames) {
            mainClasses.add(ClassUtils.forName(mainClass, classLoader));
        }
        Runnable moduleAggregatorRunner = new ModuleAggregatorRunner(classLoader, mainClasses,
                toArgArray(aggregateArgs), arguments);
        Thread moduleAggregatorRunnerThread = new Thread(moduleAggregatorRunner);
        moduleAggregatorRunnerThread.setContextClassLoader(classLoader);
        moduleAggregatorRunnerThread.setName(MODULE_AGGREGATOR_RUNNER_THREAD_NAME);
        moduleAggregatorRunnerThread.start();
    } catch (Exception e) {
        throw new RuntimeException("failed to start aggregated modules: "
                + StringUtils.collectionToCommaDelimitedString(moduleLaunchRequests), e);
    }
}

From source file:net.sf.maltcms.common.charts.overlay.nodes.OverlayNode.java

@Override
public Action[] getActions(boolean context) {
    List<?> interfaces = getAllInterfaces(getBean().getClass());
    List<?> superClasses = getAllSuperclasses(getBean().getClass());
    LinkedHashSet<Action> containerActions = new LinkedHashSet<>();
    for (Object o : interfaces) {
        Class<?> c = (Class) o;
        containerActions.addAll(actionsForPath("Actions/OverlayNodeActions/" + c.getName()));
        containerActions.addAll(actionsForPath("Actions/OverlayNodeActions/" + c.getSimpleName()));
    }//  w  w w .  ja va 2  s .  com
    for (Object o : superClasses) {
        Class<?> c = (Class) o;
        containerActions.addAll(actionsForPath("Actions/OverlayNodeActions/" + c.getName()));
        containerActions.addAll(actionsForPath("Actions/OverlayNodeActions/" + c.getSimpleName()));
    }
    containerActions.addAll(actionsForPath("Actions/OverlayNodeActions/" + getBean().getClass().getName()));
    containerActions
            .addAll(actionsForPath("Actions/OverlayNodeActions/" + getBean().getClass().getSimpleName()));
    containerActions.add(null);
    containerActions.addAll(actionsForPath("Actions/OverlayNodeActions/DefaultActions"));
    containerActions.add(get(PropertiesAction.class));
    return containerActions.toArray(new Action[containerActions.size()]);
}