Example usage for java.lang ClassLoader getClass

List of usage examples for java.lang ClassLoader getClass

Introduction

In this page you can find the example usage for java.lang ClassLoader getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:org.apache.catalina.loader.WebappLoader.java

/**
 * Set the appropriate context attribute for our class path.  This
 * is required only because Jasper depends on it.
 *///w ww.j a v a 2  s  .co  m
private void setClassPath() {

    // Validate our current state information
    if (!(container instanceof Context))
        return;
    ServletContext servletContext = ((Context) container).getServletContext();
    if (servletContext == null)
        return;

    if (container instanceof StandardContext) {
        String baseClasspath = ((StandardContext) container).getCompilerClasspath();
        if (baseClasspath != null) {
            servletContext.setAttribute(Globals.CLASS_PATH_ATTR, baseClasspath);
            return;
        }
    }

    StringBuffer classpath = new StringBuffer();

    // Assemble the class path information from our class loader chain
    ClassLoader loader = getClassLoader();
    int layers = 0;
    int n = 0;
    while (loader != null) {
        if (!(loader instanceof URLClassLoader)) {
            String cp = getClasspath(loader);
            if (cp == null) {
                log.info("Unknown loader " + loader + " " + loader.getClass());
                break;
            } else {
                if (n > 0)
                    classpath.append(File.pathSeparator);
                classpath.append(cp);
                n++;
            }
            break;
            //continue;
        }
        URL repositories[] = ((URLClassLoader) loader).getURLs();
        for (int i = 0; i < repositories.length; i++) {
            String repository = repositories[i].toString();
            if (repository.startsWith("file://"))
                repository = repository.substring(7);
            else if (repository.startsWith("file:"))
                repository = repository.substring(5);
            else if (repository.startsWith("jndi:"))
                repository = servletContext.getRealPath(repository.substring(5));
            else
                continue;
            if (repository == null)
                continue;
            if (n > 0)
                classpath.append(File.pathSeparator);
            classpath.append(repository);
            n++;
        }
        loader = loader.getParent();
        layers++;
    }

    this.classpath = classpath.toString();

    // Store the assembled class path as a servlet context attribute
    servletContext.setAttribute(Globals.CLASS_PATH_ATTR, classpath.toString());

}

From source file:org.apache.catalina.loader.WebappLoader.java

private String getClasspath(ClassLoader loader) {
    try {/*from w  w w  . j a  va2 s  . c  om*/
        Method m = loader.getClass().getMethod("getClasspath", new Class[] {});
        if (log.isTraceEnabled())
            log.trace("getClasspath " + m);
        if (m == null)
            return null;
        Object o = m.invoke(loader, new Object[] {});
        if (log.isDebugEnabled())
            log.debug("gotClasspath " + o);
        if (o instanceof String)
            return (String) o;
        return null;
    } catch (Exception ex) {
        if (log.isDebugEnabled())
            log.debug("getClasspath ", ex);
    }
    return null;
}

From source file:org.apache.metron.stellar.dsl.functions.resolver.ClasspathFunctionResolver.java

/**
 * Returns a set of classes that should undergo further interrogation for resolution
 * (aka discovery) of Stellar functions.
 *//*from www  .ja va2 s .  c  om*/
@Override
public Set<Class<? extends StellarFunction>> resolvables() {

    ClassLoader[] cls = null;
    if (this.classLoaders.size() == 0) {
        LOG.warn("Using System classloader");
        cls = new ClassLoader[] { getClass().getClassLoader() };
    } else {
        cls = new ClassLoader[this.classLoaders.size()];
        for (int i = 0; i < this.classLoaders.size(); ++i) {
            ClassLoader cl = this.classLoaders.get(i);
            LOG.debug("Using classloader: " + cl.getClass().getCanonicalName());
            cls[i] = cl;
        }
    }

    FilterBuilder filterBuilder = new FilterBuilder();
    excludes.forEach(excl -> {
        if (excl != null) {
            filterBuilder.exclude(excl);
        }
    });
    includes.forEach(incl -> {
        if (incl != null) {
            filterBuilder.include(incl);
        }
    });
    Set<String> classes = new HashSet<>();
    Set<Class<? extends StellarFunction>> ret = new HashSet<>();
    for (ClassLoader cl : cls) {
        for (Class<?> c : getStellarClasses(cl)) {
            try {
                LOG.debug("{}: Found class: {}", cl.getClass().getCanonicalName(), c.getCanonicalName());
                if (includeClass(c, filterBuilder)) {
                    String className = c.getName();
                    if (!classes.contains(className)) {
                        LOG.debug("{}: Added class: {}", cl.getClass().getCanonicalName(), className);
                        ret.add((Class<? extends StellarFunction>) c);
                        classes.add(className);
                    }
                }
            } catch (Error le) {
                //we have had some error loading a stellar function.  This could mean that
                //the classpath is unstable (e.g. old copies of jars are on the classpath).
                try {
                    LOG.error("Skipping class " + c.getName() + ": " + le.getMessage()
                            + ", please check that there are not old versions of stellar functions on the classpath.",
                            le);
                } catch (Error ie) {
                    //it's possible that getName() will throw an exception if the class is VERY malformed.
                    LOG.error("Skipping class: " + le.getMessage()
                            + ", please check that there are not old versions of stellar functions on the classpath.",
                            le);
                }
            }
        }
    }
    return ret;
}

From source file:org.apache.nifi.controller.AbstractConfiguredComponent.java

/**
 * Adds all of the modules identified by the given module paths to the InstanceClassLoader for this component.
 *
 * @param modulePaths a list of module paths where each entry can be a comma-separated list of multiple module paths
 */// www .  j  a  v  a 2s . c o  m
private void processClasspathModifiers(final Set<String> modulePaths) {
    try {
        final URL[] urls = ClassLoaderUtils.getURLsForClasspath(modulePaths, null, true);

        if (logger.isDebugEnabled()) {
            logger.debug("Adding {} resources to the classpath for {}", new Object[] { urls.length, name });
            for (URL url : urls) {
                logger.debug(url.getFile());
            }
        }

        final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

        if (!(classLoader instanceof InstanceClassLoader)) {
            // Really shouldn't happen, but if we somehow got here and don't have an InstanceClassLoader then log a warning and move on
            final String classLoaderName = classLoader == null ? "null" : classLoader.getClass().getName();
            if (logger.isWarnEnabled()) {
                logger.warn(String.format(
                        "Unable to modify the classpath for %s, expected InstanceClassLoader, but found %s",
                        name, classLoaderName));
            }
            return;
        }

        final InstanceClassLoader instanceClassLoader = (InstanceClassLoader) classLoader;
        instanceClassLoader.setInstanceResources(urls);
    } catch (MalformedURLException e) {
        // Shouldn't get here since we are suppressing errors
        logger.warn("Error processing classpath resources", e);
    }
}

From source file:org.apache.roller.weblogger.util.I18nMessages.java

/**
 * Clear tomcat cache./*from w w w  .j  av a 2s.co m*/
 * 
 * @see com.opensymphony.xwork2.util.LocalizedTextUtil
 */
private static void clearTomcatCache() {

    ClassLoader loader = Thread.currentThread().getContextClassLoader();
    // no need for compilation here.
    Class cl = loader.getClass();

    try {
        if ("org.apache.catalina.loader.WebappClassLoader".equals(cl.getName())) {
            clearMap(cl, loader, "resourceEntries");
        } else {
            if (log.isDebugEnabled()) {
                log.debug("class loader " + cl.getName() + " is not tomcat loader.");
            }
        }
    } catch (Exception e) {
        log.warn("couldn't clear tomcat cache", e);
    }
}

From source file:org.apache.sqoop.accumulo.AccumuloTestCase.java

protected static String getClasspath(File confDir) throws URISyntaxException {
    // Mostly lifted from MiniAccumuloConfigImpl#getClasspath
    ArrayList<ClassLoader> classloaders = new ArrayList<ClassLoader>();

    ClassLoader cl = AccumuloTestCase.class.getClassLoader();

    while (cl != null) {
        classloaders.add(cl);/*from   w w  w.j a  va 2s  .  co m*/
        cl = cl.getParent();
    }

    Collections.reverse(classloaders);

    StringBuilder classpathBuilder = new StringBuilder(64);
    classpathBuilder.append(confDir.getAbsolutePath());

    // assume 0 is the system classloader and skip it
    for (int i = 1; i < classloaders.size(); i++) {
        ClassLoader classLoader = classloaders.get(i);

        if (classLoader instanceof URLClassLoader) {

            for (URL u : ((URLClassLoader) classLoader).getURLs()) {
                append(classpathBuilder, u);
            }
        } else {
            throw new IllegalArgumentException(
                    "Unknown classloader type : " + classLoader.getClass().getName());
        }
    }

    return classpathBuilder.toString();
}

From source file:org.apache.sysml.utils.lite.BuildLite.java

/**
 * Obtain a list of all the classes that have been loaded by the
 * classloader.//from ww  w .j  av  a 2s  . c  o  m
 * 
 * @return a list of all the classes that have been loaded by the
 *         classloader
 * @throws NoSuchFieldException
 *             if NoSuchFieldException occurs
 * @throws SecurityException
 *             if SecurityException occurs
 * @throws IllegalArgumentException
 *             if IllegalArgumentException occurs
 * @throws IllegalAccessException
 *             if IllegalAccessException occurs
 */
private static List<Class<?>> getLoadedClasses()
        throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
    ClassLoader cl = BuildLite.class.getClassLoader();
    Class<?> clClazz = cl.getClass();
    while (clClazz != java.lang.ClassLoader.class) {
        clClazz = clClazz.getSuperclass();
    }
    Field f = clClazz.getDeclaredField("classes");
    f.setAccessible(true);
    @SuppressWarnings("unchecked")
    Vector<Class<?>> classes = (Vector<Class<?>>) f.get(cl);
    List<Class<?>> list = new ArrayList<>(classes);

    return list;
}

From source file:org.apache.zeppelin.spark.dep.DependencyResolver.java

private void updateRuntimeClassPath_1_x(URL[] urls) throws SecurityException, IllegalAccessException,
        IllegalArgumentException, InvocationTargetException, NoSuchMethodException {
    ClassLoader cl = intp.classLoader().getParent();
    Method addURL;//from   w w w. j a  v  a2  s  .  c om
    addURL = cl.getClass().getDeclaredMethod("addURL", new Class[] { URL.class });
    addURL.setAccessible(true);
    for (URL url : urls) {
        addURL.invoke(cl, url);
    }
}

From source file:org.apache.zeppelin.spark.dep.DependencyResolver.java

private void updateRuntimeClassPath_2_x(URL[] urls) throws SecurityException, IllegalAccessException,
        IllegalArgumentException, InvocationTargetException, NoSuchMethodException {
    ClassLoader cl = intp.classLoader().getParent();
    Method addURL;//from  w ww  . ja  v  a2  s . com
    addURL = cl.getClass().getDeclaredMethod("addNewUrl", new Class[] { URL.class });
    addURL.setAccessible(true);
    for (URL url : urls) {
        addURL.invoke(cl, url);
    }
}

From source file:org.apache.zeppelin.spark.OldSparkInterpreter.java

@Override
public void open() throws InterpreterException {
    this.enableSupportedVersionCheck = java.lang.Boolean
            .parseBoolean(getProperty("zeppelin.spark.enableSupportedVersionCheck", "true"));

    // set properties and do login before creating any spark stuff for secured cluster
    if (isYarnMode()) {
        System.setProperty("SPARK_YARN_MODE", "true");
    }//from w  ww  . j a  va  2s . c  o m
    if (getProperties().containsKey("spark.yarn.keytab")
            && getProperties().containsKey("spark.yarn.principal")) {
        try {
            String keytab = getProperties().getProperty("spark.yarn.keytab");
            String principal = getProperties().getProperty("spark.yarn.principal");
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException e) {
            throw new RuntimeException("Can not pass kerberos authentication", e);
        }
    }

    conf = new SparkConf();
    URL[] urls = getClassloaderUrls();

    // Very nice discussion about how scala compiler handle classpath
    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0

    /*
     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
     * nsc.Settings.classpath.
     *
     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
     * getClass.getClassLoader >> } >> in.setContextClassLoader()
     */
    Settings settings = new Settings();

    // process args
    String args = getProperty("args");
    if (args == null) {
        args = "";
    }

    String[] argsArray = args.split(" ");
    LinkedList<String> argList = new LinkedList<>();
    for (String arg : argsArray) {
        argList.add(arg);
    }

    DepInterpreter depInterpreter = getParentSparkInterpreter()
            .getInterpreterInTheSameSessionByClassName(DepInterpreter.class, false);
    String depInterpreterClasspath = "";
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFiles();
            if (files != null) {
                for (File f : files) {
                    if (depInterpreterClasspath.length() > 0) {
                        depInterpreterClasspath += File.pathSeparator;
                    }
                    depInterpreterClasspath += f.getAbsolutePath();
                }
            }
        }
    }

    if (Utils.isScala2_10()) {
        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        Object sparkCommandLine = Utils.instantiateClass("org.apache.spark.repl.SparkCommandLine",
                new Class[] { scala.collection.immutable.List.class }, new Object[] { list });

        settings = (Settings) Utils.invokeMethod(sparkCommandLine, "settings");
    } else {
        String sparkReplClassDir = getProperty("spark.repl.classdir");
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("spark.repl.classdir");
        }
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("java.io.tmpdir");
        }

        synchronized (sharedInterpreterLock) {
            if (outputDir == null) {
                outputDir = createTempDir(sparkReplClassDir);
            }
        }
        argList.add("-Yrepl-class-based");
        argList.add("-Yrepl-outdir");
        argList.add(outputDir.getAbsolutePath());

        String classpath = "";
        if (conf.contains("spark.jars")) {
            classpath = StringUtils.join(conf.get("spark.jars").split(","), File.separator);
        }

        if (!depInterpreterClasspath.isEmpty()) {
            if (!classpath.isEmpty()) {
                classpath += File.separator;
            }
            classpath += depInterpreterClasspath;
        }

        if (!classpath.isEmpty()) {
            argList.add("-classpath");
            argList.add(classpath);
        }

        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        settings.processArguments(list, true);
    }

    // set classpath for scala compiler
    PathSetting pathSettings = settings.classpath();
    String classpath = "";

    List<File> paths = currentClassPath();
    for (File f : paths) {
        if (classpath.length() > 0) {
            classpath += File.pathSeparator;
        }
        classpath += f.getAbsolutePath();
    }

    if (urls != null) {
        for (URL u : urls) {
            if (classpath.length() > 0) {
                classpath += File.pathSeparator;
            }
            classpath += u.getFile();
        }
    }

    // add dependency from DepInterpreter
    if (classpath.length() > 0) {
        classpath += File.pathSeparator;
    }
    classpath += depInterpreterClasspath;

    // add dependency from local repo
    String localRepo = getProperty("zeppelin.interpreter.localRepo");
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (classpath.length() > 0) {
                        classpath += File.pathSeparator;
                    }
                    classpath += f.getAbsolutePath();
                }
            }
        }
    }

    pathSettings.v_$eq(classpath);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);

    // set classloader for scala compiler
    settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
    BooleanSetting b = (BooleanSetting) settings.usejavacp();
    b.v_$eq(true);
    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);

    /* Required for scoped mode.
     * In scoped mode multiple scala compiler (repl) generates class in the same directory.
     * Class names is not randomly generated and look like '$line12.$read$$iw$$iw'
     * Therefore it's possible to generated class conflict(overwrite) with other repl generated
     * class.
     *
     * To prevent generated class name conflict,
     * change prefix of generated class name from each scala compiler (repl) instance.
     *
     * In Spark 2.x, REPL generated wrapper class name should compatible with the pattern
     * ^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$
     *
     * As hashCode() can return a negative integer value and the minus character '-' is invalid
     * in a package name we change it to a numeric value '0' which still conforms to the regexp.
     *
     */
    System.setProperty("scala.repl.name.line", ("$line" + this.hashCode()).replace('-', '0'));

    // To prevent 'File name too long' error on some file system.
    MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
    numClassFileSetting.v_$eq(128);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(numClassFileSetting);

    synchronized (sharedInterpreterLock) {
        /* create scala repl */
        if (printREPLOutput()) {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
        } else {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null,
                    new PrintWriter(Console.out(), false));
        }

        interpreter.settings_$eq(settings);

        interpreter.createInterpreter();

        intp = Utils.invokeMethod(interpreter, "intp");
        Utils.invokeMethod(intp, "setContextClassLoader");
        Utils.invokeMethod(intp, "initializeSynchronous");

        if (Utils.isScala2_10()) {
            if (classOutputDir == null) {
                classOutputDir = settings.outputDirs().getSingleOutput().get();
            } else {
                // change SparkIMain class output dir
                settings.outputDirs().setSingleOutput(classOutputDir);
                ClassLoader cl = (ClassLoader) Utils.invokeMethod(intp, "classLoader");
                try {
                    Field rootField = cl.getClass().getSuperclass().getDeclaredField("root");
                    rootField.setAccessible(true);
                    rootField.set(cl, classOutputDir);
                } catch (NoSuchFieldException | IllegalAccessException e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }

        if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) != null) {
            completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion",
                    new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter",
                    new Class[] { IMain.class }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.JLineCompletion", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.JLineCompletion",
                    new Class[] { IMain.class }, new Object[] { intp });
        }

        if (Utils.isSpark2()) {
            sparkSession = getSparkSession();
        }
        sc = getSparkContext();
        if (sc.getPoolForName("fair").isEmpty()) {
            Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
            int minimumShare = 0;
            int weight = 1;
            Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
            sc.taskScheduler().rootPool().addSchedulable(pool);
        }

        sparkVersion = SparkVersion.fromVersionString(sc.version());
        sqlc = getSQLContext();
        dep = getDependencyResolver();
        hooks = getInterpreterGroup().getInterpreterHookRegistry();
        sparkUrl = getSparkUIUrl();
        sparkShims = SparkShims.getInstance(sc.version(), getProperties());
        sparkShims.setupSparkListener(sc.master(), sparkUrl, InterpreterContext.get());
        numReferenceOfSparkContext.incrementAndGet();

        z = new SparkZeppelinContext(sc, sparkShims, hooks,
                Integer.parseInt(getProperty("zeppelin.spark.maxResult")));

        interpret("@transient val _binder = new java.util.HashMap[String, Object]()");
        Map<String, Object> binder;
        if (Utils.isScala2_10()) {
            binder = (Map<String, Object>) getValue("_binder");
        } else {
            binder = (Map<String, Object>) getLastObject();
        }
        binder.put("sc", sc);
        binder.put("sqlc", sqlc);
        binder.put("z", z);

        if (Utils.isSpark2()) {
            binder.put("spark", sparkSession);
        }

        interpret("@transient val z = "
                + "_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.SparkZeppelinContext]");
        interpret("@transient val sc = " + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
        interpret("@transient val sqlc = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
        interpret("@transient val sqlContext = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");

        if (Utils.isSpark2()) {
            interpret("@transient val spark = "
                    + "_binder.get(\"spark\").asInstanceOf[org.apache.spark.sql.SparkSession]");
        }

        interpret("import org.apache.spark.SparkContext._");

        if (importImplicit()) {
            if (Utils.isSpark2()) {
                interpret("import spark.implicits._");
                interpret("import spark.sql");
                interpret("import org.apache.spark.sql.functions._");
            } else {
                interpret("import sqlContext.implicits._");
                interpret("import sqlContext.sql");
                interpret("import org.apache.spark.sql.functions._");
            }
        }
    }

    /* Temporary disabling DisplayUtils. see https://issues.apache.org/jira/browse/ZEPPELIN-127
     *
    // Utility functions for display
    intp.interpret("import org.apache.zeppelin.spark.utils.DisplayUtils._");
            
    // Scala implicit value for spark.maxResult
    intp.interpret("import org.apache.zeppelin.spark.utils.SparkMaxResult");
    intp.interpret("implicit val sparkMaxResult = new SparkMaxResult(" +
    Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
     */

    if (Utils.isScala2_10()) {
        try {
            Method loadFiles = this.interpreter.getClass()
                    .getMethod("org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
            loadFiles.invoke(this.interpreter, settings);
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            throw new InterpreterException(e);
        }
    }

    // add jar from DepInterpreter
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFilesDist();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    // add jar from local repo
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

}