Example usage for java.lang System getProperties

List of usage examples for java.lang System getProperties

Introduction

In this page you can find the example usage for java.lang System getProperties.

Prototype

public static Properties getProperties() 

Source Link

Document

Determines the current system properties.

Usage

From source file:de.hypoport.ep2.support.configuration.properties.PropertiesLoader.java

private static void mergePropertiesIntoSystemPropertiesWithoutOverwriting(Properties properties) {
    if (properties == null || properties.size() == 0) {
        return;//w  w w .j  av  a  2  s . co m
    }
    Properties systemProperties = System.getProperties();
    properties.putAll(systemProperties);
    systemProperties.putAll(properties);
}

From source file:edu.harvard.iq.dataverse.MailServiceBean.java

public void sendMail(String host, String from, String to, String subject, String messageText) {
    Properties props = System.getProperties();
    props.put("mail.smtp.host", host);
    Session session = Session.getDefaultInstance(props, null);

    try {//from   www  . ja  v  a 2 s.  com
        MimeMessage msg = new MimeMessage(session);
        String[] recipientStrings = to.split(",");
        InternetAddress[] recipients = new InternetAddress[recipientStrings.length];
        try {
            msg.setFrom(new InternetAddress(from, charset));
            for (int i = 0; i < recipients.length; i++) {
                recipients[i] = new InternetAddress(recipientStrings[i], "", charset);
            }
        } catch (UnsupportedEncodingException ex) {
            logger.severe(ex.getMessage());
        }
        msg.setRecipients(Message.RecipientType.TO, recipients);
        msg.setSubject(subject, charset);
        msg.setText(messageText, charset);
        Transport.send(msg, recipients);
    } catch (AddressException ae) {
        ae.printStackTrace(System.out);
    } catch (MessagingException me) {
        me.printStackTrace(System.out);
    }
}

From source file:ch.cyberduck.core.cryptomator.S3MultipartUploadServiceTest.java

@Test
public void testUploadSinglePart() throws Exception {
    // 5L * 1024L * 1024L
    final S3Session session = new S3Session(new Host(new S3Protocol(), new S3Protocol().getDefaultHostname(),
            new Credentials(System.getProperties().getProperty("s3.key"),
                    System.getProperties().getProperty("s3.secret")))) {
    };/*  w  w w  .  jav  a 2s  . co m*/
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final Path home = new Path("test-us-east-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory));
    final Path vault = new Path(home, new AlphanumericRandomStringService().random(),
            EnumSet.of(Path.Type.directory));
    final Path test = new Path(vault, new AlphanumericRandomStringService().random(),
            EnumSet.of(Path.Type.file));
    final CryptoVault cryptomator = new CryptoVault(vault, new DisabledPasswordStore());
    cryptomator.create(session, null, new VaultCredentials("test"));
    session.withRegistry(
            new DefaultVaultRegistry(new DisabledPasswordStore(), new DisabledPasswordCallback(), cryptomator));
    final CryptoUploadFeature m = new CryptoUploadFeature<>(session,
            new S3MultipartUploadService(session, new S3WriteFeature(session), 5L * 1024L * 1024L, 5),
            new S3WriteFeature(session), cryptomator);
    final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
    final int length = 5242880;
    final byte[] content = RandomUtils.nextBytes(length);
    IOUtils.write(content, local.getOutputStream(false));
    final TransferStatus writeStatus = new TransferStatus();
    final Cryptor cryptor = cryptomator.getCryptor();
    final FileHeader header = cryptor.fileHeaderCryptor().create();
    writeStatus.setHeader(cryptor.fileHeaderCryptor().encryptHeader(header));
    writeStatus.setLength(content.length);
    m.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(),
            writeStatus, null);
    assertEquals((long) content.length, writeStatus.getOffset(), 0L);
    assertTrue(writeStatus.isComplete());
    assertTrue(new CryptoFindFeature(session, new S3FindFeature(session), cryptomator).find(test));
    assertEquals(content.length,
            new CryptoAttributesFeature(session, new S3AttributesFinderFeature(session), cryptomator).find(test)
                    .getSize());
    final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length);
    final TransferStatus readStatus = new TransferStatus().length(content.length);
    final InputStream in = new CryptoReadFeature(session, new S3ReadFeature(session), cryptomator).read(test,
            readStatus, new DisabledConnectionCallback());
    new StreamCopier(readStatus, readStatus).transfer(in, buffer);
    assertArrayEquals(content, buffer.toByteArray());
    new CryptoDeleteFeature(session, new S3DefaultDeleteFeature(session), cryptomator)
            .delete(Arrays.asList(test, vault), new DisabledLoginCallback(), new Delete.DisabledCallback());
    local.delete();
    session.close();
}

From source file:org.tomitribe.tribestream.registryng.bootstrap.Provisioning.java

@PostConstruct
public void init() {
    loginContext.setUsername("system");

    ofNullable(script).ifPresent(s -> {
        final ScriptEngine engine = new ScriptEngineManager().getEngineByExtension("js");
        final Bindings bindings = engine.createBindings();
        bindings.put("props", System.getProperties());

        final File file = new File(s);
        if (file.isFile()) {
            try (final Reader reader = new FileReader(file)) {
                engine.eval(reader, bindings);
            } catch (final IOException | ScriptException e) {
                throw new IllegalArgumentException(e);
            }/* w  w w.j  a  va  2 s. c  om*/
        } else {
            try {
                engine.eval(s, bindings);
            } catch (final ScriptException e) {
                throw new IllegalArgumentException(e);
            }
        }
    });
    restore();
}

From source file:io.seldon.spark.actions.GroupActionsJob.java

public static void run(CmdLineArgs cmdLineArgs) {
    long unixDays = 0;
    try {/*from   w  w w.j a v  a 2  s  .c  o  m*/
        unixDays = JobUtils.dateToUnixDays(cmdLineArgs.input_date_string);
    } catch (ParseException e) {
        unixDays = 0;
    }
    System.out.println(String.format("--- started GroupActionsJob date[%s] unixDays[%s] ---",
            cmdLineArgs.input_date_string, unixDays));

    System.out.println("Env: " + System.getenv());
    System.out.println("Properties: " + System.getProperties());

    SparkConf sparkConf = new SparkConf().setAppName("GroupActionsJob");

    if (cmdLineArgs.debug_use_local_master) {
        System.out.println("Using 'local' master");
        sparkConf.setMaster("local");
    }

    Tuple2<String, String>[] sparkConfPairs = sparkConf.getAll();
    System.out.println("--- sparkConf ---");
    for (int i = 0; i < sparkConfPairs.length; i++) {
        Tuple2<String, String> kvPair = sparkConfPairs[i];
        System.out.println(String.format("%s:%s", kvPair._1, kvPair._2));
    }
    System.out.println("-----------------");

    JavaSparkContext jsc = new JavaSparkContext(sparkConf);
    { // setup aws access
        Configuration hadoopConf = jsc.hadoopConfiguration();
        hadoopConf.set("fs.s3.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem");
        if (cmdLineArgs.aws_access_key_id != null && !"".equals(cmdLineArgs.aws_access_key_id)) {
            hadoopConf.set("fs.s3n.awsAccessKeyId", cmdLineArgs.aws_access_key_id);
            hadoopConf.set("fs.s3n.awsSecretAccessKey", cmdLineArgs.aws_secret_access_key);
        }
    }

    // String output_path_dir = "./out/" + input_date_string + "-" + UUID.randomUUID();

    JavaRDD<String> dataSet = jsc.textFile(
            JobUtils.getSourceDirFromDate(cmdLineArgs.input_path_pattern, cmdLineArgs.input_date_string))
            .repartition(4);

    final ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);

    final String single_client = cmdLineArgs.single_client;
    if (single_client != null) {
        Function<String, Boolean> clientFilter = new Function<String, Boolean>() {

            @Override
            public Boolean call(String t) throws Exception {
                ActionData actionData = JobUtils.getActionDataFromActionLogLine(objectMapper, t);
                return ((actionData.client != null) && (actionData.client.equals(single_client)));
            }
        };
        dataSet = dataSet.filter(clientFilter);
    }

    JavaPairRDD<String, ActionData> pairs = dataSet.mapToPair(new PairFunction<String, String, ActionData>() {

        @Override
        public Tuple2<String, ActionData> call(String t) throws Exception {
            ActionData actionData = JobUtils.getActionDataFromActionLogLine(objectMapper, t);
            // String key = (actionData.userid == 0) ? "__no_userid__" : actionData.client;
            String key = actionData.client;
            return new Tuple2<String, ActionData>(key, actionData);
        }

    }).persist(StorageLevel.MEMORY_AND_DISK());

    List<String> clientList = pairs.keys().distinct().collect();
    Queue<ClientDetail> clientDetailQueue = new PriorityQueue<ClientDetail>(30, new Comparator<ClientDetail>() {

        @Override
        public int compare(ClientDetail o1, ClientDetail o2) {
            if (o1.itemCount > o2.itemCount) {
                return -1;
            } else if (o1.itemCount < o2.itemCount) {
                return 1;
            }
            return 0;
        }
    });
    Queue<ClientDetail> clientDetailZeroQueue = new PriorityQueue<ClientDetail>(30,
            new Comparator<ClientDetail>() {

                @Override
                public int compare(ClientDetail o1, ClientDetail o2) {
                    if (o1.itemCount > o2.itemCount) {
                        return -1;
                    } else if (o1.itemCount < o2.itemCount) {
                        return 1;
                    }
                    return 0;
                }
            });
    System.out.println("Client list " + clientList.toString());
    for (String client : clientList) {
        if (client != null) {
            System.out.println("looking at client " + client);
            final String currentClient = client;

            JavaPairRDD<String, ActionData> filtered_by_client = pairs
                    .filter(new Function<Tuple2<String, ActionData>, Boolean>() {

                        @Override
                        public Boolean call(Tuple2<String, ActionData> v1) throws Exception {
                            if (currentClient.equalsIgnoreCase(v1._1)) {
                                return Boolean.TRUE;
                            } else {
                                return Boolean.FALSE;
                            }
                        }
                    });

            JavaPairRDD<String, ActionData> nonZeroUserIds = filtered_by_client
                    .filter(new Function<Tuple2<String, ActionData>, Boolean>() {

                        @Override
                        public Boolean call(Tuple2<String, ActionData> v1) throws Exception {
                            if (v1._2.userid == 0) {
                                return Boolean.FALSE;
                            } else {
                                return Boolean.TRUE;
                            }
                        }
                    });

            JavaPairRDD<String, Integer> userIdLookupRDD = nonZeroUserIds
                    .mapToPair(new PairFunction<Tuple2<String, ActionData>, String, Integer>() {

                        @Override
                        public Tuple2<String, Integer> call(Tuple2<String, ActionData> t) throws Exception {
                            String key = currentClient + "_" + t._2.client_userid;
                            return new Tuple2<String, Integer>(key, t._2.userid);
                        }
                    });

            Map<String, Integer> userIdLookupMap = userIdLookupRDD.collectAsMap();
            Map<String, Integer> userIdLookupMap_wrapped = new HashMap<String, Integer>(userIdLookupMap);
            final Broadcast<Map<String, Integer>> broadcastVar = jsc.broadcast(userIdLookupMap_wrapped);
            JavaRDD<String> json_only_with_zeros = filtered_by_client
                    .map(new Function<Tuple2<String, ActionData>, String>() {

                        @Override
                        public String call(Tuple2<String, ActionData> v1) throws Exception {
                            Map<String, Integer> m = broadcastVar.getValue();
                            ActionData actionData = v1._2;
                            if (actionData.userid == 0) {
                                String key = currentClient + "_" + actionData.client_userid;
                                if (m.containsKey(key)) {
                                    actionData.userid = m.get(key);
                                } else {
                                    return "";
                                }
                            }
                            String json = JobUtils.getJsonFromActionData(actionData);
                            return json;
                        }
                    });

            JavaRDD<String> json_only = json_only_with_zeros.filter(new Function<String, Boolean>() {

                @Override
                public Boolean call(String v1) throws Exception {
                    return (v1.length() == 0) ? Boolean.FALSE : Boolean.TRUE;
                }
            });

            String outputPath = getOutputPath(cmdLineArgs.output_path_dir, unixDays, client);
            if (cmdLineArgs.gzip_output) {
                json_only.saveAsTextFile(outputPath, org.apache.hadoop.io.compress.GzipCodec.class);
            } else {
                json_only.saveAsTextFile(outputPath);
            }
            long json_only_count = json_only.count();
            clientDetailZeroQueue
                    .add(new ClientDetail(currentClient, json_only_with_zeros.count() - json_only_count));
            clientDetailQueue.add(new ClientDetail(currentClient, json_only_count));
        } else
            System.out.println("Found null client!");
    }

    System.out.println("- Client Action (Zero Userid) Count -");
    while (clientDetailZeroQueue.size() != 0) {
        GroupActionsJob.ClientDetail clientDetail = clientDetailZeroQueue.remove();
        System.out.println(String.format("%s: %d", clientDetail.client, clientDetail.itemCount));
    }

    System.out.println("- Client Action Count -");
    while (clientDetailQueue.size() != 0) {
        GroupActionsJob.ClientDetail clientDetail = clientDetailQueue.remove();
        System.out.println(String.format("%s: %d", clientDetail.client, clientDetail.itemCount));
    }

    jsc.stop();
    System.out.println(String.format("--- finished GroupActionsJob date[%s] unixDays[%s] ---",
            cmdLineArgs.input_date_string, unixDays));

}

From source file:com.cloudera.lib.service.instrumentation.InstrumentationService.java

@Override
@SuppressWarnings("unchecked")
public void init() throws ServiceException {
    timersSize = getServiceConfig().getInt(CONF_TIMERS_SIZE, 10);
    counterLock = new ReentrantLock();
    timerLock = new ReentrantLock();
    variableLock = new ReentrantLock();
    samplerLock = new ReentrantLock();
    jvmVariables = new ConcurrentHashMap<String, VariableHolder>();
    counters = new ConcurrentHashMap<String, Map<String, AtomicLong>>();
    timers = new ConcurrentHashMap<String, Map<String, Timer>>();
    variables = new ConcurrentHashMap<String, Map<String, VariableHolder>>();
    samplers = new ConcurrentHashMap<String, Map<String, Sampler>>();
    samplersList = new ArrayList<Sampler>();
    all = new LinkedHashMap<String, Map<String, ?>>();
    all.put("os-env", System.getenv());
    all.put("sys-props", (Map<String, ?>) (Map) System.getProperties());
    all.put("jvm", jvmVariables);
    all.put("counters", (Map) counters);
    all.put("timers", (Map) timers);
    all.put("variables", (Map) variables);
    all.put("samplers", (Map) samplers);

    jvmVariables.put("free.memory", new VariableHolder<Long>(new Instrumentation.Variable<Long>() {
        public Long getValue() {
            return Runtime.getRuntime().freeMemory();
        }//from   www . jav a  2 s  . co  m
    }));
    jvmVariables.put("max.memory", new VariableHolder<Long>(new Instrumentation.Variable<Long>() {
        public Long getValue() {
            return Runtime.getRuntime().maxMemory();
        }
    }));
    jvmVariables.put("total.memory", new VariableHolder<Long>(new Instrumentation.Variable<Long>() {
        public Long getValue() {
            return Runtime.getRuntime().totalMemory();
        }
    }));
}

From source file:it.haefelinger.flaka.util.HttpUpload.java

static private String getp(String p) {
    String s = System.getProperties().getProperty(p);
    if (s == null && !p.equals("httpupload.debug")) {
        System.err.println("error: required property `" + p + "' not set.");
        System.exit(1);//  w ww. j a v  a  2s.  co  m
    }
    return s != null ? s.trim() : null;
}

From source file:com.streamsets.datacollector.bundles.content.SdcInfoContentGenerator.java

@Override
public void generateContent(BundleContext context, BundleWriter writer) throws IOException {
    // Various properties
    writer.write("properties/build.properties", context.getBuildInfo().getInfo());
    writer.write("properties/system.properties", System.getProperties());

    // Interesting directory listings
    listDirectory(context.getRuntimeInfo().getConfigDir(), "conf.txt", writer);
    listDirectory(context.getRuntimeInfo().getResourcesDir(), "resource.txt", writer);
    listDirectory(context.getRuntimeInfo().getDataDir(), "data.txt", writer);
    listDirectory(context.getRuntimeInfo().getLogDir(), "log.txt", writer);
    listDirectory(context.getRuntimeInfo().getLibsExtraDir(), "lib_extra.txt", writer);
    listDirectory(context.getRuntimeInfo().getRuntimeDir() + "/streamsets-libs/", "stagelibs.txt", writer);

    // Interesting files
    String confDir = context.getRuntimeInfo().getConfigDir();
    writer.write("conf", Paths.get(confDir, "sdc.properties"));
    writer.write("conf", Paths.get(confDir, "sdc-log4j.properties"));
    writer.write("conf", Paths.get(confDir, "dpm.properties"));
    writer.write("conf", Paths.get(confDir, "ldap-login.conf"));
    writer.write("conf", Paths.get(confDir, "sdc-security.policy"));
    String libExecDir = context.getRuntimeInfo().getLibexecDir();
    writer.write("libexec", Paths.get(libExecDir, "sdc-env.sh"));
    writer.write("libexec", Paths.get(libExecDir, "sdcd-env.sh"));

    // JMX/*from ww w .  ja  v a  2  s. com*/
    writeJmx(writer);

    // Thread dump
    threadDump(writer);
}

From source file:ch.cyberduck.core.s3.S3ThresholdUploadServiceTest.java

@Test
public void testUploadSinglePartEuCentral() throws Exception {
    final S3Session session = new S3Session(new Host(new S3Protocol(), new S3Protocol().getDefaultHostname(),
            new Credentials(System.getProperties().getProperty("s3.key"),
                    System.getProperties().getProperty("s3.secret"))));
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final S3ThresholdUploadService service = new S3ThresholdUploadService(session, 5 * 1024L);
    final Path container = new Path("test-eu-central-1-cyberduck",
            EnumSet.of(Path.Type.directory, Path.Type.volume));
    final String name = UUID.randomUUID().toString();
    final Path test = new Path(container, name, EnumSet.of(Path.Type.file));
    final Local local = new Local(System.getProperty("java.io.tmpdir"), name);
    final String random = new RandomStringGenerator.Builder().build().generate(1000);
    IOUtils.write(random, local.getOutputStream(false), Charset.defaultCharset());
    final TransferStatus status = new TransferStatus();
    status.setLength((long) random.getBytes().length);
    status.setMime("text/plain");
    status.setStorageClass(S3Object.STORAGE_CLASS_REDUCED_REDUNDANCY);
    service.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED),
            new DisabledStreamListener(), status, new DisabledLoginCallback());
    assertEquals((long) random.getBytes().length, status.getOffset(), 0L);
    assertTrue(status.isComplete());/*from www  . j  a v  a  2 s .  co  m*/
    assertTrue(new S3FindFeature(session).find(test));
    final PathAttributes attributes = new S3AttributesFinderFeature(session).find(test);
    assertEquals(random.getBytes().length, attributes.getSize());
    assertEquals(S3Object.STORAGE_CLASS_REDUCED_REDUNDANCY, new S3StorageClassFeature(session).getClass(test));
    final Map<String, String> metadata = new S3MetadataFeature(session, new S3AccessControlListFeature(session))
            .getMetadata(test);
    assertFalse(metadata.isEmpty());
    assertEquals("text/plain", metadata.get("Content-Type"));
    new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
    local.delete();
    session.close();
}

From source file:ch.cyberduck.core.worker.SingleTransferWorkerTest.java

@Test
public void testTransferredSizeRepeat() throws Exception {
    final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
    final byte[] content = new byte[62768];
    new Random().nextBytes(content);
    final OutputStream out = local.getOutputStream(false);
    IOUtils.write(content, out);/*  w w  w. ja  v a  2  s  .c o  m*/
    out.close();
    final Host host = new Host(new DAVProtocol(), "test.cyberduck.ch",
            new Credentials(System.getProperties().getProperty("webdav.user"),
                    System.getProperties().getProperty("webdav.password")));
    host.setDefaultPath("/dav/basic");
    final AtomicBoolean failed = new AtomicBoolean();
    final DAVSession session = new DAVSession(host) {
        final DAVUploadFeature upload = new DAVUploadFeature(new DAVWriteFeature(this)) {
            @Override
            protected InputStream decorate(final InputStream in, final MessageDigest digest)
                    throws IOException {
                if (failed.get()) {
                    // Second attempt successful
                    return in;
                }
                return new CountingInputStream(in) {
                    @Override
                    protected void beforeRead(final int n) throws IOException {
                        super.beforeRead(n);
                        if (this.getByteCount() >= 32768L) {
                            failed.set(true);
                            throw new SocketTimeoutException();
                        }
                    }
                };
            }
        };

        @Override
        @SuppressWarnings("unchecked")
        public <T> T _getFeature(final Class<T> type) {
            if (type == Upload.class) {
                return (T) upload;
            }
            return super._getFeature(type);
        }
    };
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final Path test = new Path(new DefaultHomeFinderService(session).find(), UUID.randomUUID().toString(),
            EnumSet.of(Path.Type.file));
    final Transfer t = new UploadTransfer(new Host(new TestProtocol()), test, local);
    final BytecountStreamListener counter = new BytecountStreamListener(new DisabledStreamListener());
    assertTrue(new SingleTransferWorker(session, session, t, new TransferOptions(), new TransferSpeedometer(t),
            new DisabledTransferPrompt() {
                @Override
                public TransferAction prompt(final TransferItem file) {
                    return TransferAction.overwrite;
                }
            }, new DisabledTransferErrorCallback(), new DisabledProgressListener(), counter,
            new DisabledLoginCallback(), new DisabledPasswordCallback(), TransferItemCache.empty()) {

    }.run(session, session));
    local.delete();
    assertEquals(62768L, counter.getSent(), 0L);
    assertEquals(62768L, new DAVAttributesFinderFeature(session).find(test).getSize());
    assertTrue(failed.get());
    new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
}