Example usage for java.net URLClassLoader URLClassLoader

List of usage examples for java.net URLClassLoader URLClassLoader

Introduction

In this page you can find the example usage for java.net URLClassLoader URLClassLoader.

Prototype

public URLClassLoader(URL[] urls) 

Source Link

Document

Constructs a new URLClassLoader for the specified URLs using the default delegation parent ClassLoader .

Usage

From source file:com.codecrate.webstart.GenerateJnlpMojo.java

private boolean artifactContainsMainClass(Artifact artifact) {
    try {//from w  w w. j a  v a2 s.  c o  m
        ClassLoader cl = new URLClassLoader(new URL[] { artifact.getFile().toURI().toURL() });
        Class.forName(mainClass, false, cl);
        return true;
    } catch (Exception e) {
        getLog().debug("Unable to find mainclass " + mainClass + " in artifact " + artifact);
    }

    return false;
}

From source file:org.apache.maven.archetype.old.DefaultOldArchetype.java

public void createArchetype(ArchetypeGenerationRequest request, File archetypeFile)
        throws ArchetypeDescriptorException, ArchetypeTemplateProcessingException {
    Map<String, String> parameters = new HashMap<String, String>();

    parameters.put("basedir", request.getOutputDirectory());

    parameters.put(Constants.PACKAGE, request.getPackage());

    parameters.put("packageName", request.getPackage());

    parameters.put(Constants.GROUP_ID, request.getGroupId());

    parameters.put(Constants.ARTIFACT_ID, request.getArtifactId());

    parameters.put(Constants.VERSION, request.getVersion());

    // ---------------------------------------------------------------------
    // Get Logger and display all parameters used
    // ---------------------------------------------------------------------
    if (getLogger().isInfoEnabled()) {
        getLogger().info("----------------------------------------------------------------------------");

        getLogger().info("Using following parameters for creating project from Old (1.x) Archetype: "
                + request.getArchetypeArtifactId() + ":" + request.getArchetypeVersion());

        getLogger().info("----------------------------------------------------------------------------");

        for (Map.Entry<String, String> entry : parameters.entrySet()) {
            String parameterName = entry.getKey();

            String parameterValue = entry.getValue();

            getLogger().info("Parameter: " + parameterName + ", Value: " + parameterValue);
        }/*from   w w w. j  av  a2  s. c o  m*/
    }

    // ----------------------------------------------------------------------
    // Load the descriptor
    // ----------------------------------------------------------------------

    ArchetypeDescriptorBuilder builder = new ArchetypeDescriptorBuilder();

    ArchetypeDescriptor descriptor;

    URLClassLoader archetypeJarLoader;

    InputStream is = null;

    try {
        URL[] urls = new URL[1];

        urls[0] = archetypeFile.toURL();

        archetypeJarLoader = new URLClassLoader(urls);

        is = getStream(ARCHETYPE_DESCRIPTOR, archetypeJarLoader);

        if (is == null) {
            is = getStream(ARCHETYPE_OLD_DESCRIPTOR, archetypeJarLoader);
        }

        if (is == null) {
            throw new ArchetypeDescriptorException(
                    "The " + ARCHETYPE_DESCRIPTOR + " descriptor cannot be found.");
        }

        descriptor = builder.build(new XmlStreamReader(is));
    } catch (IOException e) {
        throw new ArchetypeDescriptorException("Error reading the " + ARCHETYPE_DESCRIPTOR + " descriptor.", e);
    } catch (XmlPullParserException e) {
        throw new ArchetypeDescriptorException("Error reading the " + ARCHETYPE_DESCRIPTOR + " descriptor.", e);
    } finally {
        IOUtil.close(is);
    }

    // ----------------------------------------------------------------------
    //
    // ----------------------------------------------------------------------

    String artifactId = request.getArtifactId();

    File parentPomFile = new File(request.getOutputDirectory(), ARCHETYPE_POM);

    File outputDirectoryFile;

    boolean creating;
    File pomFile;
    if (parentPomFile.exists() && descriptor.isAllowPartial() && artifactId == null) {
        outputDirectoryFile = new File(request.getOutputDirectory());
        creating = false;
        pomFile = parentPomFile;
    } else {
        if (artifactId == null) {
            throw new ArchetypeTemplateProcessingException(
                    "Artifact ID must be specified when creating a new project from an archetype.");
        }

        outputDirectoryFile = new File(request.getOutputDirectory(), artifactId);
        creating = true;

        if (outputDirectoryFile.exists()) {
            if (descriptor.isAllowPartial()) {
                creating = false;
            } else {
                throw new ArchetypeTemplateProcessingException("Directory " + outputDirectoryFile.getName()
                        + " already exists - please run from a clean directory");
            }
        }

        pomFile = new File(outputDirectoryFile, ARCHETYPE_POM);
    }

    if (creating) {
        if (request.getGroupId() == null) {
            throw new ArchetypeTemplateProcessingException(
                    "Group ID must be specified when creating a new project from an archetype.");
        }

        if (request.getVersion() == null) {
            throw new ArchetypeTemplateProcessingException(
                    "Version must be specified when creating a new project from an archetype.");
        }
    }

    String outputDirectory = outputDirectoryFile.getAbsolutePath();

    String packageName = request.getPackage();

    // ----------------------------------------------------------------------
    // Set up the Velocity context
    // ----------------------------------------------------------------------

    Context context = new VelocityContext();

    context.put(Constants.PACKAGE, packageName);

    for (Map.Entry<String, String> entry : parameters.entrySet()) {
        context.put(entry.getKey(), entry.getValue());
    }

    // ----------------------------------------------------------------------
    // Process the templates
    // ----------------------------------------------------------------------

    ClassLoader old = Thread.currentThread().getContextClassLoader();

    Thread.currentThread().setContextClassLoader(archetypeJarLoader);

    Model parentModel = null;
    if (creating) {
        if (parentPomFile.exists()) {
            Reader fileReader = null;

            try {
                fileReader = ReaderFactory.newXmlReader(parentPomFile);
                MavenXpp3Reader reader = new MavenXpp3Reader();
                parentModel = reader.read(fileReader);
                if (!"pom".equals(parentModel.getPackaging())) {
                    throw new ArchetypeTemplateProcessingException(
                            "Unable to add module to the current project as it is not of packaging type 'pom'");
                }
            } catch (IOException e) {
                throw new ArchetypeTemplateProcessingException("Unable to read parent POM", e);
            } catch (XmlPullParserException e) {
                throw new ArchetypeTemplateProcessingException("Unable to read parent POM", e);
            } finally {
                IOUtil.close(fileReader);
            }

            parentModel.getModules().add(artifactId);
        }
    }

    try {
        processTemplates(pomFile, outputDirectory, context, descriptor, packageName, parentModel);
    } finally {
        Thread.currentThread().setContextClassLoader(old);
    }

    if (parentModel != null) {
        /*
                // TODO: would be nice to just write out with the xpp3 writer again, except that it loses a bunch of info and
                // reformats, so the module is just baked in as a string instead.
                    FileWriter fileWriter = null;
                
                    try
                    {
        fileWriter = new FileWriter( parentPomFile );
                
        MavenXpp3Writer writer = new MavenXpp3Writer();
        writer.write( fileWriter, parentModel );
                    }
                    catch ( IOException e )
                    {
        throw new ArchetypeTemplateProcessingException( "Unable to rewrite parent POM", e );
                    }
                    finally
                    {
        IOUtil.close( fileWriter );
                    }
        */
        Reader fileReader = null;
        boolean added;
        StringWriter w = new StringWriter();
        try {
            fileReader = ReaderFactory.newXmlReader(parentPomFile);
            added = addModuleToParentPom(artifactId, fileReader, w);
        } catch (IOException e) {
            throw new ArchetypeTemplateProcessingException("Unable to rewrite parent POM", e);
        } catch (DocumentException e) {
            throw new ArchetypeTemplateProcessingException("Unable to rewrite parent POM", e);
        } finally {
            IOUtil.close(fileReader);
        }

        if (added) {
            Writer out = null;
            try {
                out = WriterFactory.newXmlWriter(parentPomFile);
                IOUtil.copy(w.toString(), out);
            } catch (IOException e) {
                throw new ArchetypeTemplateProcessingException("Unable to rewrite parent POM", e);
            } finally {
                IOUtil.close(out);
            }
        }
    }

    // ----------------------------------------------------------------------
    // Log message on OldArchetype creation
    // ----------------------------------------------------------------------
    if (getLogger().isInfoEnabled()) {
        getLogger().info("project created from Old (1.x) Archetype in dir: " + outputDirectory);
    }

}

From source file:hu.bme.mit.sette.common.model.snippet.SnippetProject.java

/**
 * Creates a class loader for loading snippet project classes. The class
 * loader will have all the specified binary directories and JAR libraries
 * on its path./*from   w  w  w .  j  a  v a2 s  .  co  m*/
 *
 * @throws SetteConfigurationException
 *             if there was a configurational error
 */
private void createClassLoader() throws SetteConfigurationException {
    try {
        // collect all bytecode resources
        List<URL> urls = new ArrayList<>();
        urls.add(settings.getSnippetBinaryDirectory().toURI().toURL());
        urls.add(settings.getInputBinaryDirectory().toURI().toURL());

        if (files != null && files.libraryFiles != null) {
            for (File libraryFile : files.libraryFiles) {
                urls.add(libraryFile.toURI().toURL());
            }
        }

        // instantiate class loader
        classLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]));
    } catch (MalformedURLException e) {
        throw new SetteConfigurationException("At least one directory/file cannot be converted to an URL", e);
    }
}

From source file:SearchlistClassLoader.java

/**
 *  add a (non-shared) URL to the searchlist.
 *
 *  Creates a new URLClassLoader and adds it to the searchlist as a
 *  non-shared classloader./*w  w w . j a  v a 2 s . c o m*/
 *
 *  @param url the URL to add to the searchlist.
 */
public void add(URL url) {
    Loader ldr = new Loader(new URLClassLoader(new URL[] { url }), false);

    // store loaders in order in list
    if (list == null)
        list = new Vector(16);
    list.add(ldr);

    // store non-shared loaders after shared loaders in search
    if (search == null)
        search = new Vector(16);
    search.add(ldr);
}

From source file:org.kie.workbench.common.services.backend.compiler.impl.classloader.CompilerClassloaderUtils.java

private static Optional<ClassLoader> buildResult(List<URL> urls) {
    if (urls.isEmpty()) {
        return Optional.empty();
    } else {// www.j  a va 2s. co m
        URLClassLoader urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]));
        return Optional.of(urlClassLoader);
    }
}

From source file:com.opengamma.maven.scripts.ScriptableScriptGeneratorMojo.java

private static ClassLoader getResourceLoader(String resourceArtifact, MavenProject project)
        throws MojoExecutionException, MalformedURLException {
    if (StringUtils.isBlank(resourceArtifact)) {
        return ScriptableScriptGeneratorMojo.class.getClassLoader();
    }//from w w  w . j  av a 2s .c  o m
    File artifactFile = MojoUtils.getArtifactFile(resourceArtifact, project);
    return new URLClassLoader(new URL[] { artifactFile.toURI().toURL() });
}

From source file:org.pentaho.reporting.libraries.base.util.ClassQueryTool.java

/**
 * Processes all entries from a given directory, ignoring any subdirectory contents. If the directory contains
 * sub-directories these directories are not searched for JAR or ZIP files.
 * <p/>//from w  w  w  .  j  a  v  a  2s  . c o  m
 * In addition to the directory given as parameter, the direcories and JAR/ZIP-files on the classpath are also
 * searched for entries.
 * <p/>
 * If directory is null, only the classpath is searched.
 *
 * @param directory the directory to be searched, or null to just use the classpath.
 * @throws IOException       if an error occured while loading the resources from the directory.
 * @throws SecurityException if access to the system properties or access to the classloader is restricted.
 * @noinspection AccessOfSystemProperties
 */
public void processDirectory(final File directory) throws IOException {
    final ArrayList<URL> allURLs = new ArrayList<URL>();
    final ArrayList<URL> jarURLs = new ArrayList<URL>();
    final ArrayList<File> directoryURLs = new ArrayList<File>();

    final String classpath = System.getProperty("java.class.path");
    final String pathSeparator = System.getProperty("path.separator");
    final StringTokenizer tokenizer = new StringTokenizer(classpath, pathSeparator);

    while (tokenizer.hasMoreTokens()) {
        final String pathElement = tokenizer.nextToken();

        final File directoryOrJar = new File(pathElement);
        final File file = directoryOrJar.getAbsoluteFile();
        if (file.isDirectory() && file.exists() && file.canRead()) {
            allURLs.add(file.toURI().toURL());
            directoryURLs.add(file);
            continue;
        }

        if (!file.isFile() || (file.exists() == false) || (file.canRead() == false)) {
            continue;
        }

        final String fileName = file.getName();
        if (fileName.endsWith(".jar") || fileName.endsWith(".zip")) {
            allURLs.add(file.toURI().toURL());
            jarURLs.add(file.toURI().toURL());
        }
    }

    if (directory != null && directory.isDirectory()) {
        final File[] driverFiles = directory.listFiles();
        for (int i = 0; i < driverFiles.length; i++) {
            final File file = driverFiles[i];
            if (file.isDirectory() && file.exists() && file.canRead()) {
                allURLs.add(file.toURI().toURL());
                directoryURLs.add(file);
                continue;
            }

            if (!file.isFile() || (file.exists() == false) || (file.canRead() == false)) {
                continue;
            }

            final String fileName = file.getName();
            if (fileName.endsWith(".jar") || fileName.endsWith(".zip")) {
                allURLs.add(file.toURI().toURL());
                jarURLs.add(file.toURI().toURL());
            }
        }
    }

    final URL[] urlsArray = jarURLs.toArray(new URL[jarURLs.size()]);
    final File[] dirsArray = directoryURLs.toArray(new File[directoryURLs.size()]);
    final URL[] allArray = allURLs.toArray(new URL[allURLs.size()]);

    for (int i = 0; i < allArray.length; i++) {
        final URL url = allArray[i];
        logger.debug(url);
    }
    for (int i = 0; i < urlsArray.length; i++) {
        final URL url = urlsArray[i];
        final URLClassLoader classLoader = new URLClassLoader(allArray);
        processJarFile(classLoader, url);
    }
    for (int i = 0; i < dirsArray.length; i++) {
        final File file = dirsArray[i];
        final URLClassLoader classLoader = new URLClassLoader(allArray);
        processDirectory(classLoader, file, "");
    }
}

From source file:edu.uci.ics.asterix.event.service.AsterixEventServiceUtil.java

private static void injectAsterixLogPropertyFile(String asterixInstanceDir, AsterixInstance asterixInstance)
        throws IOException, EventException {
    final String asterixJarPath = asterixJarPath(asterixInstance, asterixInstanceDir);
    File sourceJar1 = new File(asterixJarPath);
    Properties txnLogProperties = new Properties();
    URLClassLoader urlClassLoader = new URLClassLoader(new URL[] { sourceJar1.toURI().toURL() });
    InputStream in = urlClassLoader.getResourceAsStream(TXN_LOG_CONFIGURATION_FILE);
    if (in != null) {
        txnLogProperties.load(in);/*from  w w w . ja  va 2 s.  c o  m*/
    }

    writeAsterixLogConfigurationFile(asterixInstance, txnLogProperties);

    File sourceJar2 = new File(asterixJarPath);
    File replacementFile = new File(asterixInstanceDir + File.separator + "log.properties");
    replaceInJar(sourceJar2, TXN_LOG_CONFIGURATION_FILE, replacementFile);

    new File(asterixInstanceDir + File.separator + "log.properties").delete();
}

From source file:com.streamsets.datacollector.cluster.TestShellClusterProvider.java

@Before
public void setup() throws Exception {
    tempDir = File.createTempFile(getClass().getSimpleName(), "");
    Assert.assertTrue(tempDir.delete());
    Assert.assertTrue(tempDir.mkdir());//w ww  .  ja  v  a  2  s  .com
    File libexecDir = new File(tempDir, "libexec");
    Assert.assertTrue(libexecDir.mkdir());
    sparkManagerShell = new File(libexecDir, "_cluster-manager");
    providerTemp = new File(tempDir, "provider-temp");
    Assert.assertTrue(providerTemp.mkdir());
    Assert.assertTrue(sparkManagerShell.createNewFile());
    sparkManagerShell.setExecutable(true);
    MockSystemProcess.reset();
    etcDir = new File(tempDir, "etc-src");
    Assert.assertTrue(etcDir.mkdir());
    File sdcProperties = new File(etcDir, "sdc.properties");
    Assert.assertTrue(sdcProperties.createNewFile());
    File log4jPropertyDummyFile = new File(etcDir, SDC_TEST_PREFIX + RuntimeInfo.LOG4J_PROPERTIES);
    Assert.assertTrue(log4jPropertyDummyFile.createNewFile());
    resourcesDir = new File(tempDir, "resources-src");
    Assert.assertTrue(resourcesDir.mkdir());
    Assert.assertTrue((new File(resourcesDir, "dir")).mkdir());
    File resourcesSubDir = new File(resourcesDir, "dir");
    File resourceFile = new File(resourcesSubDir, "core-site.xml");
    resourceFile.createNewFile();
    Assert.assertTrue((new File(resourcesDir, "file")).createNewFile());
    webDir = new File(tempDir, "static-web-dir-src");
    Assert.assertTrue(webDir.mkdir());
    File someWebFile = new File(webDir, "somefile");
    Assert.assertTrue(someWebFile.createNewFile());
    bootstrapLibDir = new File(tempDir, "bootstrap-lib");
    Assert.assertTrue(bootstrapLibDir.mkdir());
    File bootstrapMainLibDir = new File(bootstrapLibDir, "main");
    Assert.assertTrue(bootstrapMainLibDir.mkdirs());
    File bootstrapClusterLibDir = new File(bootstrapLibDir, "cluster");
    Assert.assertTrue(bootstrapClusterLibDir.mkdirs());
    Assert.assertTrue(new File(bootstrapMainLibDir, "streamsets-datacollector-bootstrap-1.7.0.0-SNAPSHOT.jar")
            .createNewFile());
    Assert.assertTrue(
            new File(bootstrapClusterLibDir, "streamsets-datacollector-cluster-bootstrap-1.7.0.0-SNAPSHOT.jar")
                    .createNewFile());
    Assert.assertTrue(new File(bootstrapClusterLibDir,
            "streamsets-datacollector-cluster-bootstrap-api-1.7.0.0-SNAPSHOT.jar").createNewFile());
    Assert.assertTrue(new File(bootstrapClusterLibDir, "streamsets-datacollector-mesos-bootstrap-1.7.0.0.jar")
            .createNewFile());
    Assert.assertTrue(
            new File(bootstrapClusterLibDir, "streamsets-datacollector-mapr-cluster-bootstrap-1.7.0.0.jar")
                    .createNewFile());
    List<Config> configs = new ArrayList<>();
    configs.add(new Config("clusterSlaveMemory", 512));
    configs.add(new Config("clusterSlaveJavaOpts", ""));
    configs.add(new Config("clusterKerberos", false));
    configs.add(new Config("kerberosPrincipal", ""));
    configs.add(new Config("kerberosKeytab", ""));
    configs.add(new Config("executionMode", ExecutionMode.CLUSTER_YARN_STREAMING));
    configs.add(new Config("sparkConfigs", Arrays.asList(new HashMap<String, String>() {
        {
            put("key", "a");
            put("value", "b");
        }
    })));
    pipelineConf = new PipelineConfiguration(PipelineStoreTask.SCHEMA_VERSION, PipelineConfigBean.VERSION,
            "pipelineId", UUID.randomUUID(), "label", null, configs, null,
            ImmutableList.of(MockStages.createSource("s", ImmutableList.of("S"))),
            MockStages.getErrorStageConfig(), MockStages.getStatsAggregatorStageConfig(),
            Collections.emptyList(), Collections.emptyList());
    pipelineConf.setPipelineInfo(new PipelineInfo("name", "label", "desc", null, null, "aaa", null, null, null,
            true, null, null, "x"));
    File sparkKafkaJar = new File(tempDir, "spark-streaming-kafka-1.2.jar");
    File avroJar = new File(tempDir, "avro-1.7.7.jar");
    File avroMapReduceJar = new File(tempDir, "avro-mapred-1.7.7.jar");
    File maprFsJar = new File(tempDir, "maprfs-5.1.0.jar");
    Assert.assertTrue(sparkKafkaJar.createNewFile());
    Assert.assertTrue(avroJar.createNewFile());
    Assert.assertTrue(avroMapReduceJar.createNewFile());
    Assert.assertTrue(maprFsJar.createNewFile());
    classLoader = new URLClassLoader(new URL[] { sparkKafkaJar.toURI().toURL(), avroJar.toURI().toURL(),
            avroMapReduceJar.toURI().toURL(), maprFsJar.toURI().toURL() }) {
        public String getType() {
            return ClusterModeConstants.USER_LIBS;
        }
    };
    stageLibrary = MockStages.createStageLibrary(classLoader);
    env = new HashMap<>();

    pipelineConfigBean = new PipelineConfigBean();
    pipelineConfigBean.clusterLauncherEnv = env;

    sourceInfo = new HashMap<>();
    sourceInfo.put(ClusterModeConstants.NUM_EXECUTORS_KEY, "64");
    URLClassLoader emptyCL = new URLClassLoader(new URL[0]);
    RuntimeInfo runtimeInfo = new StandaloneRuntimeInfo(SDC_TEST_PREFIX, null, Arrays.asList(emptyCL), tempDir);
    File configFile = new File(runtimeInfo.getConfigDir(), SDC_TEST_PREFIX + RuntimeInfo.LOG4J_PROPERTIES);
    File f = new File(runtimeInfo.getConfigDir());
    Assert.assertTrue(f.mkdirs());
    Assert.assertTrue(configFile.createNewFile());
    sparkProvider = Mockito
            .spy(new ShellClusterProvider(runtimeInfo, null, Mockito.mock(Configuration.class), stageLibrary));
    Mockito.doReturn(new MockSystemProcessFactory()).when(sparkProvider).getSystemProcessFactory();

    Mockito.doReturn(ShellClusterProvider.CLUSTER_BOOTSTRAP_API_JAR_PATTERN).when(sparkProvider)
            .findClusterBootstrapJar(Mockito.eq(ExecutionMode.CLUSTER_BATCH),
                    Mockito.any(PipelineConfiguration.class), Mockito.any(StageLibraryTask.class));
    Mockito.doReturn(ShellClusterProvider.CLUSTER_BOOTSTRAP_JAR_PATTERN).when(sparkProvider)
            .findClusterBootstrapJar(Mockito.eq(ExecutionMode.CLUSTER_YARN_STREAMING),
                    Mockito.any(PipelineConfiguration.class), Mockito.any(StageLibraryTask.class));
    Mockito.doReturn(ShellClusterProvider.CLUSTER_BOOTSTRAP_MESOS_JAR_PATTERN).when(sparkProvider)
            .findClusterBootstrapJar(Mockito.eq(ExecutionMode.CLUSTER_MESOS_STREAMING),
                    Mockito.any(PipelineConfiguration.class), Mockito.any(StageLibraryTask.class));
}

From source file:org.kie.workbench.common.services.backend.compiler.impl.classloader.CompilerClassloaderUtils.java

public static Optional<ClassLoader> createClassloaderFromStringDeps(List<String> depsProject) {
    List<URL> deps = readAllDepsAsUrls(depsProject);
    if (deps.isEmpty()) {
        return Optional.empty();
    } else {//w  ww  . jav  a 2s.  co  m
        URLClassLoader urlClassLoader = new URLClassLoader(deps.toArray(new URL[deps.size()]));
        return Optional.of(urlClassLoader);
    }
}