List of usage examples for java.lang System getProperties
public static Properties getProperties()
From source file:ch.cyberduck.core.cryptomator.SwiftLargeObjectUploadFeatureTest.java
@Test public void testLargeObjectUpload() throws Exception { // 5L * 1024L * 1024L final Host host = new Host(new SwiftProtocol(), "identity.api.rackspacecloud.com", new Credentials(System.getProperties().getProperty("rackspace.key"), System.getProperties().getProperty("rackspace.secret"))); final SwiftSession session = new SwiftSession(host); session.open(new DisabledHostKeyCallback()); session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback()); final Path home = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.volume, Path.Type.directory)); home.attributes().setRegion("DFW"); final Path vault = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); final Path test = new Path(vault, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final CryptoVault cryptomator = new CryptoVault(vault, new DisabledPasswordStore()); cryptomator.create(session, null, new VaultCredentials("test")); session.withRegistry(// w w w .j a va2 s . c om new DefaultVaultRegistry(new DisabledPasswordStore(), new DisabledPasswordCallback(), cryptomator)); final CryptoUploadFeature m = new CryptoUploadFeature<>(session, new SwiftLargeObjectUploadFeature(session, new SwiftRegionService(session), new SwiftWriteFeature(session, new SwiftRegionService(session)), 5242880L, 5), new SwiftWriteFeature(session, new SwiftRegionService(session)), cryptomator); final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); final int length = 5242885; final byte[] content = RandomUtils.nextBytes(length); IOUtils.write(content, local.getOutputStream(false)); final TransferStatus writeStatus = new TransferStatus(); final Cryptor cryptor = cryptomator.getCryptor(); final FileHeader header = cryptor.fileHeaderCryptor().create(); writeStatus.setHeader(cryptor.fileHeaderCryptor().encryptHeader(header)); writeStatus.setLength(content.length); m.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), writeStatus, null); assertEquals((long) content.length, writeStatus.getOffset(), 0L); assertTrue(writeStatus.isComplete()); assertTrue(new CryptoFindFeature(session, new SwiftFindFeature(session), cryptomator).find(test)); assertEquals(content.length, new CryptoListService(session, session, cryptomator) .list(test.getParent(), new DisabledListProgressListener()).get(test).attributes().getSize()); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length); final TransferStatus readStatus = new TransferStatus().length(content.length); final InputStream in = new CryptoReadFeature(session, new SwiftReadFeature(session, new SwiftRegionService(session)), cryptomator).read(test, readStatus, new DisabledConnectionCallback()); new StreamCopier(readStatus, readStatus).transfer(in, buffer); assertArrayEquals(content, buffer.toByteArray()); new CryptoDeleteFeature(session, new SwiftDeleteFeature(session), cryptomator) .delete(Arrays.asList(test, vault), new DisabledLoginCallback(), new Delete.DisabledCallback()); local.delete(); session.close(); }
From source file:com.netflix.conductor.server.ConductorConfig.java
@Override public Map<String, Object> getAll() { Map<String, Object> map = new HashMap<>(); Properties props = System.getProperties(); props.entrySet().forEach(entry -> map.put(entry.getKey().toString(), entry.getValue())); return map;/*from ww w .ja va 2 s.c o m*/ }
From source file:com.tacitknowledge.util.migration.DistributedAutoPatchRollbackTest.java
/** * Configures a DistributedAutoPatchService and it's child AutoPatchService * objects to match the "migration.properties" configuration in the * AutoPatch test suite. This let's us reuse the actual functionality checks * that verify the configuration was correct, as the launcher is the same, * it's just the adapter that's different. * //from ww w . j a v a2 s .c om * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { // configure the controlled AutoPatchService, first by calling super super.setUp(); log.debug("setting up " + this.getClass().getName()); // Make sure we load our test launcher factory, which fakes out the data source context System.getProperties().setProperty("migration.factory", "com.tacitknowledge.util.migration.jdbc.TestJdbcMigrationLauncherFactory"); DistributedJdbcMigrationLauncherFactory factory = new TestDistributedJdbcMigrationLauncherFactory(); // Create the launcher (this does configure it as a side-effect) launcher = (DistributedJdbcMigrationLauncher) factory.createMigrationLauncher("orchestration"); // Make sure we get notification of any migrations launcher.getMigrationProcess().addListener(this); context = new TestMigrationContext(); // core sub-system AutoPatchService coreService = new TestAutoPatchService(); coreService.setSystemName("core"); coreService.setDatabaseType("postgres"); coreService.setDataSource(new MockDataSource()); coreService.setPatchPath("patch.core:com.tacitknowledge.util.migration.jdbc.test"); // orders: patch path // patches.orders.com.tacitknowledge.util.migration.tasks.normal AutoPatchService ordersService = new TestAutoPatchService(); ordersService.setSystemName("orders"); ordersService.setDatabaseType("postgres"); ordersService.setDataSource(new MockDataSource()); ordersService.setPatchPath("patch.orders:com.tacitknowledge.util.migration.tasks.rollback"); // catalog: patch path patches.catalog AutoPatchService catalogService = new TestAutoPatchService(); catalogService.setPatchPath("patch.catalog"); // make catalog a multi-node patch service TestDataSourceMigrationContext catalogContext1 = new TestDataSourceMigrationContext(); TestDataSourceMigrationContext catalogContext2 = new TestDataSourceMigrationContext(); catalogContext1.setSystemName("catalog"); catalogContext2.setSystemName("catalog"); catalogContext1.setDatabaseType(new DatabaseType("postgres")); catalogContext2.setDatabaseType(new DatabaseType("postgres")); catalogContext1.setDataSource(new MockDataSource()); catalogContext2.setDataSource(new MockDataSource()); catalogService.addContext(catalogContext1); catalogService.addContext(catalogContext2); // configure the DistributedAutoPatchService DistributedAutoPatchService distributedPatchService = new TestDistributedAutoPatchService(); distributedPatchService.setSystemName("orchestration"); distributedPatchService.setDatabaseType("postgres"); distributedPatchService.setReadOnly(false); AutoPatchService[] controlledSystems = new AutoPatchService[3]; controlledSystems[0] = coreService; controlledSystems[1] = ordersService; controlledSystems[2] = catalogService; distributedPatchService.setControlledSystems(controlledSystems); distributedPatchService.setDataSource(new MockDataSource()); // instantiate everything setLauncher(distributedPatchService.getLauncher()); // set ourselves up as a listener for any migrations that run getLauncher().getMigrationProcess().addListener(this); currentPatchInfoStore = MockBuilder.getPatchInfoStore(12); }
From source file:org.ringojs.jsgi.ExtJsgiServlet.java
@Override public void init(ServletConfig config) throws ServletException { super.init(config); _servletContext = config.getServletContext(); _springContext = WebApplicationContextUtils.getRequiredWebApplicationContext(_servletContext); // Hacky way of shoving an object into the System properties LOG.info("Injecting spring context into Java system property [{}]", PROP_SPRING_CONTEXT); System.getProperties().put(PROP_SPRING_CONTEXT, _springContext); // Hacky way of shoving the current servlet instance into System properties LOG.info("Injecting this servlet into Java system property [{}]", PROP_SERVLET); System.getProperties().put(PROP_SERVLET, this); }
From source file:com.vmware.bdd.specpolicy.TemplateClusterSpec.java
private static void init() { String homeDir = System.getProperties().getProperty("serengeti.home.dir"); File templateFile = null;/*from w w w .j a va 2 s . c o m*/ if (homeDir != null && homeDir.length() > 0) { StringBuilder builder = new StringBuilder(); builder.append(homeDir).append(File.separator).append("conf").append(File.separator) .append(TEMPLATE_CLUSTER_SPEC_JSON); templateFile = new File(builder.toString()); } else { URL filePath = ConfigurationUtils.locate(TEMPLATE_CLUSTER_SPEC_JSON); if (filePath != null) { templateFile = ConfigurationUtils.fileFromURL(filePath); } } if (templateFile == null) { logger.error("cluster template spec is not found, using the default cluster value."); loadDefaultValue(); } try { Reader fileReader = new FileReader(templateFile); createTemplate(fileReader); } catch (FileNotFoundException e) { logger.error("cluster template spec is not found, using the default cluster value."); loadDefaultValue(); } }
From source file:com.egt.ejb.toolkit.ToolKitUtils.java
public static String mkLibDir(String root, String project, String subproject) { String sep = System.getProperties().getProperty("file.separator"); String sub = StringUtils.isBlank(subproject) ? "" : sep + subproject.replace("-", sep).replace(".", sep); String dir = StringUtils.chomp(root, sep) + sep + project + sub; mkdirs(dir);//from w ww .j av a 2 s. c om return dir + sep; }
From source file:gestionelectrica.VentanaEjemploGraf.java
private void rellenar(JPanel jp) { // Create a simple XY chart XYSeries series = new XYSeries("EC"); series.add(1, 1);// w w w.j ava 2 s.com series.add(1, 2); series.add(2, 1); series.add(3, 9); series.add(4, 10); series.add(7, 15); series.add(10, 6); // Add the series to your data set XYSeriesCollection dataset = new XYSeriesCollection(); dataset.addSeries(series); // Generate the graph JFreeChart chart = ChartFactory.createXYLineChart("Energa comprada", // Title "hora", // x-axis Label "KWh", // y-axis Label dataset, // Dataset PlotOrientation.VERTICAL, // Plot Orientation true, // Show Legend true, // Use tooltips false // Configure chart to generate URLs? ); JFreeChart pieChart = ChartFactory.createXYLineChart("EJEMPLOOOO", "Category", "Score", createDataset(), PlotOrientation.VERTICAL, true, true, false); System.out.println(System.getProperty("user.home")); System.out.println(System.getProperty("java.io.tmpdir")); System.out.println(System.getProperties()); File f = new File(System.getProperty("java.io.tmpdir") + "\\chart.jpg"); try { ChartUtilities.saveChartAsJPEG(f, chart, jp.getWidth(), jp.getHeight()); } catch (IOException e) { System.err.println("Problem occurred creating chart."); System.err.println(e.getMessage()); } // Adding chart into a chart panel //ChartPanel chartPanel = new ChartPanel(pieChart); // settind default size //chartPanel.setPreferredSize(new java.awt.Dimension(500, 270)); // add to contentPane //setContentPane(chartPanel); //jPanel1.add(chartPanel); ImageIcon ic = new ImageIcon(f.getAbsolutePath()); ImagePanel panel = new ImagePanel(ic.getImage(), jp.getWidth(), jp.getHeight()); jp.add(panel); /*jPanel1.repaint(); jPanel1.updateUI(); jPanel1.validate();*/ if (f.delete()) System.out.println("BORRADO CORRECTAMENTE"); else System.err.println("NO SE PUDO BORRAR"); }
From source file:ch.cyberduck.core.cryptomator.AzureWriteFeatureTest.java
@Test public void testWrite() throws Exception { final Host host = new Host(new AzureProtocol(), "kahy9boj3eib.blob.core.windows.net", new Credentials(System.getProperties().getProperty("azure.account"), System.getProperties().getProperty("azure.key"))); final AzureSession session = new AzureSession(host); session.open(new DisabledHostKeyCallback()); session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback()); final TransferStatus status = new TransferStatus(); final byte[] content = RandomUtils.nextBytes(1048576); status.setLength(content.length);// ww w . j a v a 2 s . com final Path home = new Path("cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final CryptoVault cryptomator = new CryptoVault( new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new DisabledPasswordStore()); final Path vault = cryptomator.create(session, null, new VaultCredentials("test")); session.withRegistry( new DefaultVaultRegistry(new DisabledPasswordStore(), new DisabledPasswordCallback(), cryptomator)); final CryptoWriteFeature<Void> writer = new CryptoWriteFeature<>(session, new AzureWriteFeature(session, null), cryptomator); final Cryptor cryptor = cryptomator.getCryptor(); final FileHeader header = cryptor.fileHeaderCryptor().create(); status.setHeader(cryptor.fileHeaderCryptor().encryptHeader(header)); status.setNonces(new RotatingNonceGenerator(cryptomator.numberOfChunks(content.length))); final Path test = new Path(vault, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); status.setChecksum(writer.checksum(test).compute(new ByteArrayInputStream(content), status)); final OutputStream out = writer.write(test, status, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(status, status).transfer(new ByteArrayInputStream(content), out); out.close(); final OperationContext context = new OperationContext(); assertTrue(new CryptoFindFeature(session, new AzureFindFeature(session, context), cryptomator).find(test)); assertEquals(content.length, new CryptoListService(session, session, cryptomator) .list(test.getParent(), new DisabledListProgressListener()).get(test).attributes().getSize()); assertEquals(content.length, new CryptoWriteFeature<>(session, new AzureWriteFeature(session, context, new DefaultFindFeature(session), new DefaultAttributesFinderFeature(session)), cryptomator).append(test, status.getLength(), PathCache.empty()).size, 0L); assertEquals(content.length, new CryptoWriteFeature<>(session, new AzureWriteFeature(session, context, new AzureFindFeature(session, context), new AzureAttributesFinderFeature(session, context)), cryptomator).append(test, status.getLength(), PathCache.empty()).size, 0L); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length); final InputStream in = new CryptoReadFeature(session, new AzureReadFeature(session, context), cryptomator) .read(test, new TransferStatus().length(content.length), new DisabledConnectionCallback()); new StreamCopier(status, status).transfer(in, buffer); assertArrayEquals(content, buffer.toByteArray()); new CryptoDeleteFeature(session, new AzureDeleteFeature(session, context), cryptomator) .delete(Arrays.asList(test, vault), new DisabledLoginCallback(), new Delete.DisabledCallback()); session.close(); }
From source file:com.google.enterprise.connector.otex.LivelinkConnectorFactory.java
public static LivelinkConnector getConnector(String prefix) throws RepositoryException { Map<String, String> p = new HashMap<String, String>(); p.putAll(emptyProperties);// www . j av a 2 s .c o m Properties system = System.getProperties(); Enumeration<?> names = system.propertyNames(); boolean prefixFound = false; while (names.hasMoreElements()) { String name = (String) names.nextElement(); if (name.startsWith(prefix)) { prefixFound = true; LOGGER.config("PROPERTY: " + name); p.put(name.substring(prefix.length()), system.getProperty(name)); } } // If there is no connector configured by this name, bail early. if (!prefixFound) { throw new RepositoryException("No javatest." + prefix + "* properties specified for connector."); } return (LivelinkConnector) instance.makeConnector(p); }
From source file:ch.cyberduck.core.shared.DefaultFindFeatureTest.java
@Test public void testFindLargeUpload() throws Exception { final B2Session session = new B2Session(new Host(new B2Protocol(), new B2Protocol().getDefaultHostname(), new Credentials(System.getProperties().getProperty("b2.user"), System.getProperties().getProperty("b2.key")))); final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); session.open(new DisabledHostKeyCallback()); session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback()); final Path file = new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final StatusOutputStream<VersionId> out = new B2LargeUploadWriteFeature(session).write(file, new TransferStatus(), new DisabledConnectionCallback()); IOUtils.copyLarge(new ByteArrayInputStream(RandomUtils.nextBytes(100)), out); out.close();// w ww .j a v a2 s. c o m assertTrue(new DefaultFindFeature(session).find(file)); session.close(); }