Example usage for java.lang ClassLoader getSystemClassLoader

List of usage examples for java.lang ClassLoader getSystemClassLoader

Introduction

In this page you can find the example usage for java.lang ClassLoader getSystemClassLoader.

Prototype

@CallerSensitive
public static ClassLoader getSystemClassLoader() 

Source Link

Document

Returns the system class loader.

Usage

From source file:org.springframework.data.hadoop.mapreduce.ExecutionUtils.java

/**
 * Utility for doing static init for preventing Hadoop leaks during initialization (mainly based on TCCL).
 *///from  www.  j av  a 2s.  com
static void preventHadoopLeaks(ClassLoader hadoopCL) {

    ClassLoader cl = Thread.currentThread().getContextClassLoader();
    try {
        // set the sysCL as the TCCL
        Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader());

        // fix org.apache.hadoop.mapred.Counters#MAX_COUNTER_LIMIT
        // calling constructor since class loading is lazy
        new Counters();
    } finally {
        Thread.currentThread().setContextClassLoader(cl);
    }
}

From source file:org.gcaldaemon.core.GCalUtilitiesV3.java

private static Credential authorize() throws Exception {
    // load client secrets
    GoogleClientSecrets clientSecrets = GoogleClientSecrets.load(JSON_FACTORY, new InputStreamReader(
            ClassLoader.getSystemClassLoader().getResourceAsStream("client_secrets.json")));
    if (clientSecrets.getDetails().getClientId().startsWith("Enter")
            || clientSecrets.getDetails().getClientSecret().startsWith("Enter ")) {
        System.out.println("Enter Client ID and Secret from https://code.google.com/apis/console/?api=calendar "
                + "into calendar-cmdline-sample/src/main/resources/client_secrets.json");
        System.exit(1);/*from   w w  w  .  j a va 2s .com*/
    }
    // set up authorization code flow
    GoogleAuthorizationCodeFlow flow = new GoogleAuthorizationCodeFlow.Builder(httpTransport, JSON_FACTORY,
            clientSecrets, Collections.singleton(CalendarScopes.CALENDAR)).setDataStoreFactory(dataStoreFactory)
                    .build();
    // authorize
    return new AuthorizationCodeInstalledApp(flow, new LocalServerReceiver()).authorize("user");
}

From source file:org.wso2.extension.siddhi.io.jms.source.client.JMSClient.java

public void sendJMSEvents(List<String> messageList, String topicName, String queueName, String format,
        String broker, String providerURL) {
    if (format == null || "map".equals(format)) {
        format = "csv";
    }/*from   w  ww .j  av a  2s . c  o m*/
    if ("".equalsIgnoreCase(broker)) {
        broker = "activemq";
    }
    Session session = null;
    Properties properties = new Properties();
    if (!"activemq".equalsIgnoreCase(broker) && !"mb".equalsIgnoreCase(broker)
            && !"qpid".equalsIgnoreCase(broker)) {
        log.error("Please enter a valid JMS message broker. (ex: activemq, mb, qpid");
        return;
    }
    try {
        if (topicName != null && !"".equalsIgnoreCase(topicName)) {
            TopicConnection topicConnection;
            TopicConnectionFactory connFactory = null;
            if ("activemq".equalsIgnoreCase(broker)) {
                properties.load(ClassLoader.getSystemClassLoader().getResourceAsStream("activemq.properties"));
                // to provide custom provider urls
                if (providerURL != null) {
                    properties.put(Context.PROVIDER_URL, providerURL);
                }
                Context context = new InitialContext(properties);
                connFactory = (TopicConnectionFactory) context.lookup("ConnectionFactory");
            } else if ("mb".equalsIgnoreCase(broker)) {
                properties.load(ClassLoader.getSystemClassLoader().getResourceAsStream("mb.properties"));
                Context context = new InitialContext(properties);
                connFactory = (TopicConnectionFactory) context.lookup("qpidConnectionFactory");
            } else if ("qpid".equalsIgnoreCase(broker)) {
                properties.load(ClassLoader.getSystemClassLoader().getResourceAsStream("qpid.properties"));
                Context context = new InitialContext(properties);
                connFactory = (TopicConnectionFactory) context.lookup("qpidConnectionFactory");
            }
            if (connFactory != null) {
                topicConnection = connFactory.createTopicConnection();
                topicConnection.start();
                session = topicConnection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE);
                if (session != null) {
                    Topic topic = session.createTopic(topicName);
                    MessageProducer producer = session.createProducer(topic);
                    //List<String> messagesList = JMSClientUtil.readFile(filePath);
                    try {
                        if ("csv".equalsIgnoreCase(format)) {
                            log.info("Sending Map messages on '" + topicName + "' topic");
                            JMSClientUtil.publishMapMessage(producer, session, messageList);

                        } else {
                            log.info("Sending  " + format + " messages on '" + topicName + "' topic");
                            JMSClientUtil.publishTextMessage(producer, session, messageList);
                        }
                        log.info("All Order Messages sent");
                    } catch (JMSException e) {
                        log.error("Cannot subscribe." + e.getMessage(), e);
                    } catch (IOException e) {
                        log.error("Error when reading the data file." + e.getMessage(), e);
                    } finally {
                        producer.close();
                        session.close();
                        topicConnection.stop();
                    }
                }
            } else {
                log.error("Error when creating connection factory. Please check necessary jar files");
            }
        } else if (queueName != null && !queueName.equalsIgnoreCase("")) {
            QueueConnection queueConnection;
            QueueConnectionFactory connFactory = null;
            if ("activemq".equalsIgnoreCase(broker)) {
                properties.load(ClassLoader.getSystemClassLoader().getResourceAsStream("activemq.properties"));
                // to provide custom provider urls
                if (providerURL != null) {
                    properties.put(Context.PROVIDER_URL, providerURL);
                }
                Context context = new InitialContext(properties);
                connFactory = (QueueConnectionFactory) context.lookup("ConnectionFactory");
            } else if ("mb".equalsIgnoreCase(broker)) {
                properties.load(ClassLoader.getSystemClassLoader().getResourceAsStream("mb.properties"));
                Context context = new InitialContext(properties);
                connFactory = (QueueConnectionFactory) context.lookup("qpidConnectionFactory");
            } else if ("qpid".equalsIgnoreCase(broker)) {
                properties.load(ClassLoader.getSystemClassLoader().getResourceAsStream("qpid.properties"));
                Context context = new InitialContext(properties);
                connFactory = (QueueConnectionFactory) context.lookup("qpidConnectionFactory");
            }
            if (connFactory != null) {
                queueConnection = connFactory.createQueueConnection();
                queueConnection.start();
                session = queueConnection.createQueueSession(false, Session.AUTO_ACKNOWLEDGE);
                if (session != null) {
                    Queue queue = session.createQueue(queueName);
                    MessageProducer producer = session.createProducer(queue);
                    //List<String> messagesList = JMSClientUtil.readFile(filePath);
                    try {
                        if ("csv".equalsIgnoreCase(format)) {
                            log.info("Sending Map messages on '" + queueName + "' queue");
                            JMSClientUtil.publishMapMessage(producer, session, messageList);

                        } else {
                            log.info("Sending  " + format + " messages on '" + queueName + "' queue");
                            JMSClientUtil.publishTextMessage(producer, session, messageList);
                        }
                    } catch (JMSException e) {
                        log.error("Cannot subscribe." + e.getMessage(), e);
                    } catch (IOException e) {
                        log.error("Error when reading the data file." + e.getMessage(), e);
                    } finally {
                        producer.close();
                        session.close();
                        queueConnection.stop();
                    }
                }
            } else {
                log.error("Error when creating connection factory. Please check necessary jar files");
            }
        } else {
            log.error("Enter queue name or topic name to be published!");
        }
    } catch (Exception e) {
        log.error("Error when publishing" + e.getMessage(), e);
    }
}

From source file:org.wso2.das.integration.tests.analytics.execution.CompressedEventAnalyticsTestsCase.java

private List<Record> generateCompressedEventsRecords(int tenantId, String tableName, boolean generateRecordIds)
        throws AnalyticsException {
    List<Record> records = new ArrayList<>();
    Map<String, Object> values;
    ClassLoader classLoader = ClassLoader.getSystemClassLoader();
    String[] sampleData = null;//from  w  w  w .  j  ava  2 s.co m
    try {
        sampleData = IOUtils
                .toString(classLoader.getResourceAsStream(
                        "analytics" + File.separator + "sample-data" + File.separator + "CompressedEventData"))
                .split("\n");
    } catch (IOException e) {
        throw new AnalyticsException(e.getMessage());
    }
    long timestamp;
    for (int j = 0; j < sampleData.length; j++) {
        values = new HashMap<>();
        String[] fields = sampleData[j].split(",", 2);
        values.put(COMPRESSED_FLAG_FIELD, Boolean.parseBoolean(fields[0]));
        values.put(FLOW_DATA_FIELD, fields[1]);
        timestamp = System.currentTimeMillis();
        records.add(new Record(generateRecordIds ? GenericUtils.generateRecordID() : null, tenantId, tableName,
                values, timestamp));
    }
    return records;
}

From source file:org.seedstack.seed.core.utils.SeedReflectionUtils.java

/**
 * Find the most complete class loader by trying the current thread context class loader, then the classloader of the
 * given class if any, then the class loader that loaded SEED core, then the system class loader.
 *
 * @param target the class to get the class loader from if no current thread context class loader is present. May be null.
 * @return the most complete class loader it found.
 *//*from  ww w .j a va 2  s.  co m*/
public static ClassLoader findMostCompleteClassLoader(Class<?> target) {
    // Try the most complete class loader we can get
    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

    // Then fallback to the class loader from a specific class given
    if (classLoader == null && target != null) {
        classLoader = target.getClassLoader();
    }

    // Then fallback to the class loader that loaded SEED core
    if (classLoader == null) {
        classLoader = SeedReflectionUtils.class.getClassLoader();
    }

    // Then fallback to the system class loader
    if (classLoader == null) {
        classLoader = ClassLoader.getSystemClassLoader();
    }

    return classLoader;
}

From source file:com.eucalyptus.simpleworkflow.common.client.WorkflowClientStandalone.java

private void handleClassFile(final File f, final JarEntry j) throws IOException, RuntimeException {
    final String classGuess = j.getName().replaceAll("/", ".").replaceAll("\\.class.{0,1}", "");
    try {//from   w ww . j a  va  2 s. c om
        final Class candidate = ClassLoader.getSystemClassLoader().loadClass(classGuess);
        final Ats ats = Ats.inClassHierarchy(candidate);
        if ((this.allowedClassNames.isEmpty() || this.allowedClassNames.contains(candidate.getName())
                || this.allowedClassNames.contains(candidate.getCanonicalName())
                || this.allowedClassNames.contains(candidate.getSimpleName()))
                && (ats.has(Workflow.class) || ats.has(Activities.class))
                && !Modifier.isAbstract(candidate.getModifiers())
                && !Modifier.isInterface(candidate.getModifiers()) && !candidate.isLocalClass()
                && !candidate.isAnonymousClass()) {
            if (ats.has(Workflow.class)) {
                this.workflowClasses.add(candidate);
                LOG.debug("Discovered workflow implementation class: " + candidate.getName());
            } else {
                this.activityClasses.add(candidate);
                LOG.debug("Discovered activity implementation class: " + candidate.getName());
            }
        }
    } catch (final ClassNotFoundException e) {
        LOG.debug(e, e);
    }
}

From source file:com.alibaba.jstorm.daemon.worker.WorkerData.java

@SuppressWarnings({ "rawtypes", "unchecked" })
public WorkerData(Map conf, IContext context, String topology_id, String supervisor_id, int port,
        String worker_id, String jar_path) throws Exception {

    this.conf = conf;
    this.context = context;
    this.topologyId = topology_id;
    this.supervisorId = supervisor_id;
    this.port = port;
    this.workerId = worker_id;

    this.shutdown = new AtomicBoolean(false);

    this.monitorEnable = new AtomicBoolean(true);
    this.topologyStatus = StatusType.active;

    if (StormConfig.cluster_mode(conf).equals("distributed")) {
        String pidDir = StormConfig.worker_pids_root(conf, worker_id);
        JStormServerUtils.createPid(pidDir);
    }/*from   w ww .  j a va2 s  .c o m*/

    // create zk interface
    this.zkClusterstate = ZkTool.mk_distributed_cluster_state(conf);
    this.zkCluster = Cluster.mk_storm_cluster_state(zkClusterstate);

    Map rawConf = StormConfig.read_supervisor_topology_conf(conf, topology_id);
    this.stormConf = new HashMap<Object, Object>();
    this.stormConf.putAll(conf);
    this.stormConf.putAll(rawConf);

    JStormMetrics.setTopologyId(topology_id);
    JStormMetrics.setPort(port);
    JStormMetrics.setDebug(ConfigExtension.isEnableMetricDebug(stormConf));
    JStormMetrics.setEnabled(ConfigExtension.isEnableMetrics(stormConf));
    JStormMetrics.addDebugMetrics(ConfigExtension.getDebugMetricNames(stormConf));
    AsmMetric.setSampleRate(ConfigExtension.getMetricSampleRate(stormConf));

    ConfigExtension.setLocalSupervisorId(stormConf, supervisorId);
    ConfigExtension.setLocalWorkerId(stormConf, workerId);
    ConfigExtension.setLocalWorkerPort(stormConf, port);
    ControlMessage.setPort(port);

    JStormMetrics.registerWorkerTopologyMetric(
            JStormMetrics.workerMetricName(MetricDef.CPU_USED_RATIO, MetricType.GAUGE),
            new AsmGauge(new Gauge<Double>() {
                @Override
                public Double getValue() {
                    return JStormUtils.getCpuUsage();
                }
            }));

    JStormMetrics.registerWorkerTopologyMetric(
            JStormMetrics.workerMetricName(MetricDef.MEMORY_USED, MetricType.GAUGE),
            new AsmGauge(new Gauge<Double>() {
                @Override
                public Double getValue() {
                    return JStormUtils.getMemUsage();
                }
            }));

    JStormMetrics.registerWorkerMetric(JStormMetrics.workerMetricName(MetricDef.DISK_USAGE, MetricType.GAUGE),
            new AsmGauge(new Gauge<Double>() {
                @Override
                public Double getValue() {
                    return JStormUtils.getDiskUsage();
                }
            }));

    LOG.info("Worker Configuration " + stormConf);

    try {
        boolean enableClassloader = ConfigExtension.isEnableTopologyClassLoader(stormConf);
        boolean enableDebugClassloader = ConfigExtension.isEnableClassloaderDebug(stormConf);

        if (jar_path == null && enableClassloader == true
                && !conf.get(Config.STORM_CLUSTER_MODE).equals("local")) {
            LOG.error("enable classloader, but not app jar");
            throw new InvalidParameterException();
        }

        URL[] urlArray = new URL[0];
        if (jar_path != null) {
            String[] paths = jar_path.split(":");
            Set<URL> urls = new HashSet<URL>();
            for (String path : paths) {
                if (StringUtils.isBlank(path))
                    continue;
                URL url = new URL("File:" + path);
                urls.add(url);
            }
            urlArray = urls.toArray(new URL[0]);
        }

        WorkerClassLoader.mkInstance(urlArray, ClassLoader.getSystemClassLoader(),
                ClassLoader.getSystemClassLoader().getParent(), enableClassloader, enableDebugClassloader);
    } catch (Exception e) {
        LOG.error("init jarClassLoader error!", e);
        throw new InvalidParameterException();
    }

    if (this.context == null) {
        this.context = TransportFactory.makeContext(stormConf);
    }

    boolean disruptorUseSleep = ConfigExtension.isDisruptorUseSleep(stormConf);
    DisruptorQueue.setUseSleep(disruptorUseSleep);
    boolean isLimited = ConfigExtension.getTopologyBufferSizeLimited(stormConf);
    DisruptorQueue.setLimited(isLimited);
    LOG.info("Disruptor use sleep:" + disruptorUseSleep + ", limited size:" + isLimited);

    // this.transferQueue = new LinkedBlockingQueue<TransferData>();
    int buffer_size = Utils.getInt(stormConf.get(Config.TOPOLOGY_TRANSFER_BUFFER_SIZE));
    WaitStrategy waitStrategy = (WaitStrategy) JStormUtils.createDisruptorWaitStrategy(stormConf);
    this.transferQueue = DisruptorQueue.mkInstance("TotalTransfer", ProducerType.MULTI, buffer_size,
            waitStrategy);
    this.transferQueue.consumerStarted();
    this.sendingQueue = DisruptorQueue.mkInstance("TotalSending", ProducerType.MULTI, buffer_size,
            waitStrategy);
    this.sendingQueue.consumerStarted();

    this.nodeportSocket = new ConcurrentHashMap<WorkerSlot, IConnection>();
    this.taskNodeport = new ConcurrentHashMap<Integer, WorkerSlot>();
    this.workerToResource = new ConcurrentSkipListSet<ResourceWorkerSlot>();
    this.innerTaskTransfer = new ConcurrentHashMap<Integer, DisruptorQueue>();
    this.deserializeQueues = new ConcurrentHashMap<Integer, DisruptorQueue>();
    this.tasksToComponent = new ConcurrentHashMap<Integer, String>();
    this.componentToSortedTasks = new ConcurrentHashMap<String, List<Integer>>();

    Assignment assignment = zkCluster.assignment_info(topologyId, null);
    if (assignment == null) {
        String errMsg = "Failed to get Assignment of " + topologyId;
        LOG.error(errMsg);
        throw new RuntimeException(errMsg);
    }
    workerToResource.addAll(assignment.getWorkers());

    // get current worker's task list

    this.taskids = assignment.getCurrentWorkerTasks(supervisorId, port);
    if (taskids.size() == 0) {
        throw new RuntimeException("No tasks running current workers");
    }
    LOG.info("Current worker taskList:" + taskids);

    // deserialize topology code from local dir
    rawTopology = StormConfig.read_supervisor_topology_code(conf, topology_id);
    sysTopology = Common.system_topology(stormConf, rawTopology);

    generateMaps();

    contextMaker = new ContextMaker(this);

    outTaskStatus = new ConcurrentHashMap<Integer, Boolean>();

    threadPool = Executors.newScheduledThreadPool(THREAD_POOL_NUM);
    TimerTrigger.setScheduledExecutorService(threadPool);

    if (!StormConfig.local_mode(stormConf)) {
        healthReporterThread = new AsyncLoopThread(new JStormHealthReporter(this));
    }

    try {
        Long tmp = StormConfig.read_supervisor_topology_timestamp(conf, topology_id);
        assignmentTS = (tmp == null ? System.currentTimeMillis() : tmp);
    } catch (FileNotFoundException e) {
        assignmentTS = System.currentTimeMillis();
    }

    outboundTasks = new HashSet<Integer>();

    LOG.info("Successfully create WorkerData");

}

From source file:net.sf.jabref.gui.openoffice.OOBibBase.java

private XDesktop simpleBootstrap(String pathToExecutable) throws Exception {

    ClassLoader loader = ClassLoader.getSystemClassLoader();
    if (loader instanceof URLClassLoader) {
        URLClassLoader cl = (URLClassLoader) loader;
        Class<URLClassLoader> sysclass = URLClassLoader.class;
        try {/*from www . j  a v  a 2  s .  c  om*/
            Method method = sysclass.getDeclaredMethod("addURL", URL.class);
            method.setAccessible(true);
            method.invoke(cl, new File(pathToExecutable).toURI().toURL());
        } catch (SecurityException | NoSuchMethodException | MalformedURLException t) {
            LOGGER.error("Error, could not add URL to system classloader", t);
            cl.close();
            throw new IOException("Error, could not add URL to system classloader", t);
        }
    } else {
        LOGGER.error("Error occured, URLClassLoader expected but " + loader.getClass()
                + " received. Could not continue.");
    }

    //Get the office component context:
    XComponentContext xContext = Bootstrap.bootstrap();
    //Get the office service manager:
    XMultiComponentFactory xServiceManager = xContext.getServiceManager();
    //Create the desktop, which is the root frame of the
    //hierarchy of frames that contain viewable components:
    Object desktop = xServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", xContext);
    XDesktop xD = UnoRuntime.queryInterface(XDesktop.class, desktop);

    UnoRuntime.queryInterface(XComponentLoader.class, desktop);

    return xD;

}

From source file:org.apache.hadoop.security.UserGroupInformation.java

@SuppressWarnings("unchecked")
private static Class<? extends Principal> getOsPrincipalClass() {
    ClassLoader cl = ClassLoader.getSystemClassLoader();
    try {//from   w ww.  j av a 2 s  . c o m
        if (System.getProperty("java.vendor").contains("IBM")) {
            if (windows) {
                return (Class<? extends Principal>) cl.loadClass("com.ibm.security.auth.UsernamePrincipal");
            } else {
                return (Class<? extends Principal>) (System.getProperty("os.arch").contains("64")
                        ? cl.loadClass("com.ibm.security.auth.UsernamePrincipal")
                        : cl.loadClass("com.ibm.security.auth.LinuxPrincipal"));
            }
        } else {
            return (Class<? extends Principal>) (windows ? cl.loadClass("com.sun.security.auth.NTUserPrincipal")
                    : cl.loadClass("com.sun.security.auth.UnixPrincipal"));
        }
    } catch (ClassNotFoundException e) {
        LOG.error("Unable to find JAAS classes:" + e.getMessage());
    }
    return null;
}

From source file:org.apache.sysml.utils.NativeHelper.java

/**
 * Useful method for debugging.// www .  j a v  a 2s  .  co  m
 * 
 * @return empty string (if !LOG.isDebugEnabled()) or the path from where openblas or mkl is loaded.
 */
private static String getNativeBLASPath() {
    String blasPathAndHint = "";
    if (LOG.isDebugEnabled()) {
        // Only perform the checking of library paths when DEBUG is enabled to avoid runtime overhead.
        try {
            java.lang.reflect.Field loadedLibraryNamesField = ClassLoader.class
                    .getDeclaredField("loadedLibraryNames");
            loadedLibraryNamesField.setAccessible(true);
            @SuppressWarnings("unchecked")
            Vector<String> libraries = (Vector<String>) loadedLibraryNamesField
                    .get(ClassLoader.getSystemClassLoader());
            LOG.debug("List of native libraries loaded:" + libraries);
            for (String library : libraries) {
                if (library.contains("libmkl_rt") || library.contains("libopenblas")) {
                    blasPathAndHint = " from the path " + library;
                    break;
                }
            }
        } catch (NoSuchFieldException | SecurityException | IllegalArgumentException
                | IllegalAccessException e) {
            LOG.debug("Error while finding list of native libraries:" + e.getMessage());
        }
    }
    return blasPathAndHint;
}