List of usage examples for com.google.common.base Optional get
public abstract T get();
From source file:org.asoem.greyfish.cli.GreyfishCLIApplication.java
public static void main(final String[] args) { final Optional<String> commitHash = getCommitHash(GreyfishCLIApplication.class); if (commitHash.isPresent()) { logger.debug("Git Commit Hash for current Jar: %s", commitHash.get()); }/*from w w w.ja va 2 s.c o m*/ Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { closer.close(); } catch (IOException e) { logger.warn("Exception while closing resources", e); } } }); try { final OptionSet optionSet = optionParser.parse(args); if (optionSet.has(helpOptionSpec)) { printHelp(); System.exit(0); } final Module commandLineModule = createCommandLineModule(optionSet); final RandomGenerator randomGenerator = RandomGenerators .threadLocalGenerator(new Supplier<RandomGenerator>() { @Override public RandomGenerator get() { return new Well19937c(); } }); final EventBus eventBus = new EventBus(new SubscriberExceptionHandler() { @Override public void handleException(final Throwable exception, final SubscriberExceptionContext context) { context.getEventBus() .post(new AssertionError("The EventBus could not dispatch event: " + context.getSubscriber() + " to " + context.getSubscriberMethod(), exception.getCause())); } }); final Module coreModule = new CoreModule(randomGenerator, eventBus); final Injector injector = Guice.createInjector(coreModule, commandLineModule); final ExperimentExecutionService experimentExecutionService = injector .getInstance(ExperimentExecutionService.class); if (!optionSet.has(quietOptionSpec)) { final ExperimentMonitorService monitorService = new ExperimentMonitorService(System.out, eventBus); monitorService.addListener(new Service.Listener() { @Override public void starting() { } @Override public void running() { } @Override public void stopping(final Service.State from) { } @Override public void terminated(final Service.State from) { } @Override public void failed(final Service.State from, final Throwable failure) { logger.error("Monitor service failed", failure); } }, MoreExecutors.sameThreadExecutor()); experimentExecutionService.addListener(new MonitorServiceController(monitorService), MoreExecutors.sameThreadExecutor()); } // start getSimulation experimentExecutionService.startAsync(); // stop getSimulation on shutdown request (^C) Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { if (experimentExecutionService.isRunning()) { experimentExecutionService.stopAsync().awaitTerminated(); } } }); try { experimentExecutionService.awaitTerminated(); } catch (IllegalStateException e) { exitWithErrorMessage("Simulation execution failed", e); } } catch (OptionException e) { exitWithErrorMessage("Failed parsing options: ", e, true); } catch (Throwable e) { exitWithErrorMessage(String.format( "Exception during simulation execution: %s\n" + "Check log file for a stack trace.", e.getCause() != null ? e.getCause().getMessage() : e.getMessage()), e); } System.exit(0); }
From source file:GettingStarted.java
public static void main(String[] args) throws IOException { out.println("============================="); // # step-1/* www .j a va2 s . co m*/ String provider = "openstack-nova"; String identity = "your_project_name_or_id:your_auth_username"; // NB: Do not check this file into source control with a real password in it! String credential = "your_auth_password"; String authUrl = "http://controller:5000/v2.0/"; NovaApi conn = ContextBuilder.newBuilder(provider).endpoint(authUrl).credentials(identity, credential) .buildApi(NovaApi.class); String region = conn.getConfiguredRegions().iterator().next(); out.println("Running in region: " + region); // # step-2 ImageApi imageApi = conn.getImageApi(region); out.println("Images in region:"); imageApi.list().concat().forEach(image -> out.println(" " + image.getName())); // # step-3 FlavorApi flavorApi = conn.getFlavorApi(region); out.println("Flavors in region:"); flavorApi.list().concat().forEach(flavor -> out.println(" " + flavor.getName())); // # step-4 String imageId = "778e7b2e-4e67-44eb-9565-9c920e236dfd"; Image retrievedImage = conn.getImageApi(region).get(imageId); out.println(retrievedImage.toString()); // # step-5 String flavorId = "639b8b2a-a5a6-4aa2-8592-ca765ee7af63"; Flavor flavor = conn.getFlavorApi(region).get(flavorId); out.println(flavor.toString()); // # step-6 String testingInstance = "testingInstance"; ServerCreated testInstance = conn.getServerApi(region).create(testingInstance, imageId, flavorId); out.println("Server created. ID: " + testInstance.getId()); // # step-7 ServerApi serverApi = conn.getServerApi(region); out.println("Instances in region:"); serverApi.list().concat().forEach(instance -> out.println(" " + instance)); // # step-8 if (serverApi.delete(testInstance.getId())) { out.println("Server " + testInstance.getId() + " being deleted, please wait."); ServerPredicates.awaitStatus(serverApi, Server.Status.DELETED, 600, 5).apply(testInstance.getId()); serverApi.list().concat().forEach(instance -> out.println(" " + instance)); } else { out.println("Server not deleted."); } // # step-9 String pub_key_file = "id_rsa"; String privateKeyFile = "~/.ssh/" + pub_key_file; Optional<? extends KeyPairApi> keyPairApiExtension = conn.getKeyPairApi(region); if (keyPairApiExtension.isPresent()) { out.println("Checking for existing SSH keypair..."); KeyPairApi keyPairApi = keyPairApiExtension.get(); boolean keyPairFound = keyPairApi.get(pub_key_file) != null; if (keyPairFound) { out.println("Keypair " + pub_key_file + " already exists."); } else { out.println("Creating keypair."); KeyPair keyPair = keyPairApi.create(pub_key_file); try { Files.write(Paths.get(privateKeyFile), keyPair.getPrivateKey().getBytes()); out.println("Wrote " + privateKeyFile + "."); // set file permissions to 600 Set<PosixFilePermission> permissions = new HashSet<>(); permissions.add(PosixFilePermission.OWNER_READ); permissions.add(PosixFilePermission.OWNER_WRITE); Files.setPosixFilePermissions(Paths.get(privateKeyFile), permissions); } catch (IOException e) { e.printStackTrace(); } } out.println("Existing keypairs:"); keyPairApi.list().forEach(keyPair -> out.println(" " + keyPair)); } else { out.println("No keypair extension present; skipping keypair checks."); } // # step-10 String securityGroupName = "all-in-one"; Optional<? extends SecurityGroupApi> securityGroupApiExtension = conn.getSecurityGroupApi(region); if (securityGroupApiExtension.isPresent()) { out.println("Checking security groups."); SecurityGroupApi securityGroupApi = securityGroupApiExtension.get(); boolean securityGroupFound = false; for (SecurityGroup securityGroup : securityGroupApi.list()) { securityGroupFound = securityGroupFound || securityGroup.getName().equals(securityGroupName); } if (securityGroupFound) { out.println("Security group " + securityGroupName + " already exists."); } else { out.println("Creating " + securityGroupName + "..."); SecurityGroup securityGroup = securityGroupApi.createWithDescription(securityGroupName, securityGroupName + " network access for all-in-one application."); Ingress sshIngress = Ingress.builder().fromPort(22).ipProtocol(IpProtocol.TCP).toPort(22).build(); Ingress webIngress = Ingress.builder().fromPort(80).ipProtocol(IpProtocol.TCP).toPort(80).build(); securityGroupApi.createRuleAllowingCidrBlock(securityGroup.getId(), sshIngress, "0.0.0.0/0"); securityGroupApi.createRuleAllowingCidrBlock(securityGroup.getId(), webIngress, "0.0.0.0/0"); } out.println("Existing Security Groups: "); for (SecurityGroup thisSecurityGroup : securityGroupApi.list()) { out.println(" " + thisSecurityGroup); thisSecurityGroup.getRules().forEach(rule -> out.println(" " + rule)); } } else { out.println("No security group extension present; skipping security group checks."); } // # step-11 String ex_userdata = "#!/usr/bin/env bash\n" + " curl -L -s https://git.openstack.org/cgit/openstack/faafo/plain/contrib/install.sh | bash -s -- \\\n" + " -i faafo -i messaging -r api -r worker -r demo\n"; // # step-12 out.println("Checking for existing instance..."); String instanceName = "all-in-one"; Server allInOneInstance = null; for (Server thisInstance : serverApi.listInDetail().concat()) { if (thisInstance.getName().equals(instanceName)) { allInOneInstance = thisInstance; } } if (allInOneInstance != null) { out.println("Instance " + instanceName + " already exists. Skipping creation."); } else { out.println("Creating instance..."); CreateServerOptions allInOneOptions = CreateServerOptions.Builder.keyPairName(pub_key_file) .securityGroupNames(securityGroupName) // If not running in a single-tenant network this where you add your network... // .networks("79e8f822-99e1-436f-a62c-66e8d3706940") .userData(ex_userdata.getBytes()); ServerCreated allInOneInstanceCreated = serverApi.create(instanceName, imageId, flavorId, allInOneOptions); ServerPredicates.awaitActive(serverApi).apply(allInOneInstanceCreated.getId()); allInOneInstance = serverApi.get(allInOneInstanceCreated.getId()); out.println("Instance created: " + allInOneInstance.getId()); } out.println("Existing instances:"); serverApi.listInDetail().concat().forEach(instance -> out.println(" " + instance.getName())); // # step-13 out.println("Checking for unused floating IP's..."); FloatingIP unusedFloatingIP = null; if (conn.getFloatingIPApi(region).isPresent()) { FloatingIPApi floatingIPApi = conn.getFloatingIPApi(region).get(); List<FloatingIP> freeIP = floatingIPApi.list().toList().stream() .filter(floatingIp -> floatingIp.getInstanceId() == null).collect(Collectors.toList()); if (freeIP.size() > 0) { out.println("The following IPs are available:"); freeIP.forEach(floatingIP -> out.println(" " + floatingIP.getIp())); unusedFloatingIP = freeIP.get(0); } else { out.println("Creating new floating IP.... "); unusedFloatingIP = floatingIPApi.create(); } if (unusedFloatingIP != null) { out.println("Using: " + unusedFloatingIP.getIp()); } } else { out.println("No floating ip extension present; skipping floating ip creation."); } // # step-14 out.println(allInOneInstance.getAddresses()); if (allInOneInstance.getAccessIPv4() != null) { out.println("Public IP already assigned. Skipping attachment."); } else if (unusedFloatingIP != null) { out.println("Attaching new IP, please wait..."); // api must be present if we have managed to allocate a floating IP conn.getFloatingIPApi(region).get().addToServer(unusedFloatingIP.getIp(), allInOneInstance.getId()); //This operation takes some indeterminate amount of time; don't move on until it's done. while (allInOneInstance.getAccessIPv4() != null) { //Objects are not updated "live" so keep checking to make sure it's been added try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException ex) { out.println("Awakened prematurely."); } allInOneInstance = serverApi.get(allInOneInstance.getId()); } } // # step-15 out.print("Be patient: all going well, the Fractals app will soon be available at http://" + allInOneInstance.getAccessIPv4()); // # step-16 }
From source file:io.urmia.st.Main.java
public static void main(String[] args) throws Exception { final int port; final String base; boolean autoRegister = ArgumentParseUtil.isAutoRegister(args); String zkURL = ArgumentParseUtil.getZooKeeperURL(args); log.info("starting with zk at: {}, auto register: {}", zkURL, autoRegister); ns = new ZkNamingServiceImpl(zkURL, AZ); Optional<ServiceInstance<NodeType>> meOpt = ns.whoAmI(NodeType.ODS, autoRegister); if (!meOpt.isPresent()) { System.err.println("unable to find my instance. use auto register or cli-admin to add my node"); System.exit(1);/*from w w w .java2 s . co m*/ return; } Runtime.getRuntime().addShutdownHook(new ShutdownHook()); EventLoopGroup bossGroup = new NioEventLoopGroup(/*1*/); //EventLoopGroup bossGroup = new EpollEventLoopGroup(1); //EventLoopGroup workerGroup = new NioEventLoopGroup(); try { me = meOpt.get(); log.info("my service instance: {}", me); ns.register(me); base = me.getUriSpec().getParts().get(0).getValue(); port = me.getPort(); if (!(new File(base).isDirectory())) { System.err.println("base in not directory: " + base); return; } int nHeapArena = 1; int nDirectArena = 1; int pageSize = /*8192*/4096; int maxOrder = 1; // http://normanmaurer.me/presentations/2014-facebook-eng-netty/slides.html#14.0 ServerBootstrap b = new ServerBootstrap(); b.group(bossGroup).channel(NioServerSocketChannel.class).childOption(ChannelOption.AUTO_READ, false) .childOption(ChannelOption.ALLOCATOR, new PooledByteBufAllocator(true, nHeapArena, nDirectArena, pageSize, maxOrder)) .childHandler(new HttpUploadServerInitializer(base)); Channel ch = b.bind(port).sync().channel(); log.info("object storage Server (ODS) at port: {}", port); System.err.println("starting ODS " + me.getId() + " on port: " + port + ", base: " + base); ch.closeFuture().sync(); } finally { bossGroup.shutdownGracefully(); //workerGroup.shutdownGracefully(); } }
From source file:io.urmia.api.Main.java
public static void main(String[] args) throws Exception { boolean autoRegister = ArgumentParseUtil.isAutoRegister(args); String zkURL = ArgumentParseUtil.getZooKeeperURL(args); log.info("starting with zk at: {}, auto register: {}", zkURL, autoRegister); ns = new ZkNamingServiceImpl(zkURL, AZ); Optional<ServiceInstance<NodeType>> meOpt = ns.whoAmI(NodeType.MDS, autoRegister); if (!meOpt.isPresent()) { System.err.println("unable to find my instance. use auto register or cli-admin to add my node"); System.exit(1);/*w ww . j a va 2 s. co m*/ return; } Runtime.getRuntime().addShutdownHook(new ShutdownHook()); uuid = new RandomUuidImpl(); //Properties properties = parseArguments(args); EventLoopGroup bossGroup = new NioEventLoopGroup(/*1*/); EventLoopGroup workerGroup = new NioEventLoopGroup(); try { me = meOpt.get(); log.info("my service instance: {}", me); BoneCPConfig boneCPConfig = getBoneCPConfig(ns); ns.register(me); int port = me.getPort(); JdbcPool pool = new JdbcPool.BoneCPJdbcPool(boneCPConfig); MetadataRepository repository = new PsqlMetadataRepositoryImpl(pool); mds = new DefaultMetadataServiceImpl(repository); // http://normanmaurer.me/presentations/2014-facebook-eng-netty/slides.html#14.0 ServerBootstrap b = new ServerBootstrap(); b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class) .childOption(ChannelOption.AUTO_READ, false) .childOption(ChannelOption.ALLOCATOR, new PooledByteBufAllocator(true)) .childHandler(new HttpUploadServerInitializer()); Channel ch = b.bind(port).sync().channel(); log.info("object metadata API server (MDS) at port: {}", port); ch.closeFuture().sync(); } finally { ns.deregister(me); bossGroup.shutdownGracefully(); workerGroup.shutdownGracefully(); } }
From source file:benchmarkio.controlcenter.LaunchRocket.java
public static void main(final String[] args) throws Exception { // create the parser final CommandLineParser parser = new BasicParser(); // parse the command line arguments final CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("u")) { displayHelp();/*from www . j a v a 2s . co m*/ } final String host = cmd.getOptionValue("host"); final int port = Integer.parseInt(cmd.getOptionValue("port")); final BrokerType brokerType = BrokerType.valueOf(cmd.getOptionValue("broker-type")); final int numConsumers = Integer.parseInt(cmd.getOptionValue("num-consumers")); final int numProducers = Integer.parseInt(cmd.getOptionValue("num-producers")); final int totalNumberOfMessages = Integer.parseInt(cmd.getOptionValue("total-number-of-messages")); final double msgSizeInKB = Double.parseDouble(cmd.getOptionValue("msg-size-in-kb")); // Optional options final Optional<String> optionalBenchmarkType = Optional.fromNullable(cmd.getOptionValue("benchmark-type")); final Optional<String> optionalDurable = Optional.fromNullable(cmd.getOptionValue("durable")); // Kafka Specific final Optional<String> optionalZookeeper = Optional.fromNullable(cmd.getOptionValue("zookeeper")); Optional<String> optionalKafkaProducerType = Optional .fromNullable(cmd.getOptionValue("kafka-producer-type")); BenchmarkType benchmarkType; if (optionalBenchmarkType.isPresent()) { benchmarkType = BenchmarkType.valueOf(optionalBenchmarkType.get()); } else { log.info("Benchmark type was not specified, defaulting to: {}", BenchmarkType.PRODUCER_AND_CONSUMER); benchmarkType = BenchmarkType.PRODUCER_AND_CONSUMER; } boolean durable = false; if (optionalDurable.isPresent()) { durable = Boolean.valueOf(optionalDurable.get()); } else { log.info("Durable parameter was not specified, defaulting to: FALSE"); } if (brokerType == BrokerType.KAFKA) { if (!optionalZookeeper.isPresent()) { log.error("zookeeper is missing, it is a required property for KAFKA broker"); System.exit(0); } if (!optionalKafkaProducerType.isPresent()) { log.info("kafka-producer-type is not specified, defaulting to sync"); optionalKafkaProducerType = Optional.of("sync"); } else if (!optionalKafkaProducerType.get().equals("sync") && !optionalKafkaProducerType.get().equals("async")) { log.warn("kafka-producer-type is not one of the accepted sync | async values, defaulting to sync"); optionalKafkaProducerType = Optional.of("sync"); } } log.info("destination (topic or queue): {}", Consts.DESTINATION_NAME); log.info("host: {}", host); log.info("port: {}", port); log.info("broker-type: {}", brokerType); log.info("benchmark-type: {}", benchmarkType); log.info("durable: {}", durable); log.info("num-consumers: {}", numConsumers); log.info("num-producers: {}", numProducers); log.info("total-number-of-messages: {}", totalNumberOfMessages); log.info("msg-size-in-kb: {}", msgSizeInKB); if (brokerType == BrokerType.KAFKA) { log.info("zookeeper: {}", optionalZookeeper.get()); log.info("kafka-producer-type: {}", optionalKafkaProducerType.get()); } LaunchRocket.start(brokerType, benchmarkType, durable, host, port, numConsumers, numProducers, totalNumberOfMessages, msgSizeInKB, optionalZookeeper, optionalKafkaProducerType); System.exit(0); }
From source file:com.github.rinde.jaamas17.PerformExperiment.java
public static void main(String[] args) throws IOException { System.out.println(System.getProperty("java.vm.name") + ", " + System.getProperty("java.vm.vendor") + ", " + System.getProperty("java.vm.version") + " (runtime version: " + System.getProperty("java.runtime.version") + ")"); System.out.println(System.getProperty("os.name") + " " + System.getProperty("os.version") + " " + System.getProperty("os.arch")); checkArgument(System.getProperty("java.vm.name").contains("Server"), "Experiments should be run in a JVM in server mode."); checkArgument(args.length > 2 && args[0].equals("-exp"), "The type of experiment that should be run must be specified as follows: " + "\'-exp vanlon15|gendreau|timedeviation\', this option must be the " + "first in the list."); final ExperimentType experimentType = ExperimentType.find(args[1]); System.out.println(experimentType); final List<Configurations> configs = Configurations.parse(args[2]); System.out.println(configs);/* ww w . ja v a 2 s . c o m*/ final String[] expArgs = new String[args.length - 3]; System.arraycopy(args, 3, expArgs, 0, args.length - 3); final Gendreau06ObjectiveFunction objFunc = experimentType.getObjectiveFunction(); final OptaplannerSolvers.Builder opFfdFactory = OptaplannerSolvers.builder() .withSolverFromBenchmark("com/github/rinde/jaamas17/firstFitDecreasingBenchmark.xml") .withObjectiveFunction(objFunc); final OptaplannerSolvers.Builder opCiFactory = OptaplannerSolvers.builder() .withSolverFromBenchmark("com/github/rinde/jaamas17/cheapestInsertionBenchmark.xml") .withObjectiveFunction(objFunc); final long time = System.currentTimeMillis(); final Experiment.Builder experimentBuilder = Experiment.builder().computeLocal().withRandomSeed(123) .withThreads((int) Math.floor((Runtime.getRuntime().availableProcessors() - 1) / 2d)).repeat(1) .withWarmup(30000).addResultListener(new CommandLineProgress(System.out)) .usePostProcessor(new JaamasPostProcessor(objFunc)); experimentType.apply(experimentBuilder); for (final Configurations config : configs) { switch (config) { case OPTAPLANNER_TUNING: experimentBuilder.addConfigurations(optaplannerTuningConfigs(opFfdFactory, opCiFactory, objFunc)); break; case OPTAPLANNER_SENSITIVITY: experimentBuilder .addConfigurations(optaplannerSensitivityConfigs(opFfdFactory, opCiFactory, objFunc)); break; case MAS_TUNING_B_MS: experimentBuilder.addConfigurations(masTuning1BmsConfigs(opFfdFactory, objFunc)); break; case MAS_TUNING_RP_AND_B_MS: experimentBuilder.addConfigurations(masTuning2RPandBmsConfigs(opFfdFactory, objFunc)); break; case MAS_TUNING_3_REAUCT: experimentBuilder.addConfigurations(masTuning3ReauctConfigs(opFfdFactory, objFunc)); break; case RT_CIH_OPT2_SOLVERS: experimentBuilder.addConfigurations(rtCihOpt2Solvers(objFunc)); break; case MAIN_CONFIGS: experimentBuilder.addConfigurations(mainConfigs(opFfdFactory, objFunc)); break; case THREADS_TEST: experimentBuilder.addConfigurations(threadsConfigs(opFfdFactory, objFunc)); break; } } experimentBuilder.showGui(View.builder().withAutoPlay().withAutoClose().withSpeedUp(128) // .withFullScreen() .withTitleAppendix("JAAMAS 2017 Experiment").with(RoadUserRenderer.builder().withToStringLabel()) .with(RouteRenderer.builder()).with(PDPModelRenderer.builder()) .with(PlaneRoadModelRenderer.builder()).with(AuctionPanel.builder()).with(RoutePanel.builder()) .with(TimeLinePanel.builder()).with(RtSolverPanel.builder()).withResolution(1280, 1024)); final Optional<ExperimentResults> results = experimentBuilder.perform(System.out, expArgs); final long duration = System.currentTimeMillis() - time; if (!results.isPresent()) { return; } System.out.println("Done, computed " + results.get().getResults().size() + " simulations in " + duration / 1000d + "s"); }
From source file:com.arpnetworking.clusteraggregator.Main.java
/** * Entry point./*from w w w . ja v a 2 s . c o m*/ * * @param args command line arguments */ public static void main(final String[] args) { Thread.setDefaultUncaughtExceptionHandler((thread, throwable) -> { LOGGER.error().setMessage("Unhandled exception!").setThrowable(throwable).log(); }); Thread.currentThread().setUncaughtExceptionHandler((thread, throwable) -> { LOGGER.error().setMessage("Unhandled exception!").setThrowable(throwable).log(); }); LOGGER.info().setMessage("Launching cluster-aggregator").log(); Runtime.getRuntime().addShutdownHook(SHUTDOWN_THREAD); if (args.length != 1) { throw new RuntimeException("No configuration file specified"); } LOGGER.debug().setMessage("Loading configuration from file").addData("file", args[0]).log(); Optional<DynamicConfiguration> configuration = Optional.absent(); Optional<Configurator<Main, ClusterAggregatorConfiguration>> configurator = Optional.absent(); try { final File configurationFile = new File(args[0]); configurator = Optional.of(new Configurator<>(Main::new, ClusterAggregatorConfiguration.class)); final ObjectMapper objectMapper = ClusterAggregatorConfiguration.createObjectMapper(); configuration = Optional.of(new DynamicConfiguration.Builder().setObjectMapper(objectMapper) .addSourceBuilder(new JsonNodeFileSource.Builder().setObjectMapper(objectMapper) .setFile(configurationFile)) .addTrigger(new FileTrigger.Builder().setFile(configurationFile).build()) .addListener(configurator.get()).build()); configuration.get().launch(); // Wait for application shutdown SHUTDOWN_SEMAPHORE.acquire(); } catch (final InterruptedException e) { throw Throwables.propagate(e); } finally { if (configurator.isPresent()) { configurator.get().shutdown(); } if (configuration.isPresent()) { configuration.get().shutdown(); } // Notify the shutdown that we're done SHUTDOWN_SEMAPHORE.release(); } }
From source file:bear.core.BearMain.java
/** * -VbearMain.appConfigDir=src/main/groovy/examples -VbearMain.buildDir=.bear/classes -VbearMain.script=dumpSampleGrid -VbearMain.projectClass=SecureSocialDemoProject -VbearMain.propertiesFile=.bear/test.properties *//*from w ww . java 2 s .c o m*/ public static void main(String[] args) throws Exception { int i = ArrayUtils.indexOf(args, "--log-level"); if (i != -1) { LoggingBooter.changeLogLevel(LogManager.ROOT_LOGGER_NAME, Level.toLevel(args[i + 1])); } i = ArrayUtils.indexOf(args, "-q"); if (i != -1) { LoggingBooter.changeLogLevel(LogManager.ROOT_LOGGER_NAME, Level.WARN); } GlobalContext global = GlobalContext.getInstance(); BearMain bearMain = null; try { bearMain = new BearMain(global, getCompilerManager(), args); } catch (Exception e) { if (e.getClass().getSimpleName().equals("MissingRequiredOptionException")) { System.out.println(e.getMessage()); } else { Throwables.getRootCause(e).printStackTrace(); } System.exit(-1); } if (bearMain.checkHelpAndVersion()) { return; } AppOptions2 options2 = bearMain.options; if (options2.has(AppOptions2.UNPACK_DEMOS)) { String filesAsText = ProjectGenerator.readResource("/demoFiles.txt"); int count = 0; for (String resource : filesAsText.split("::")) { File dest = new File(BEAR_DIR + resource); System.out.printf("copying %s to %s...%n", resource, dest); writeStringToFile(dest, ProjectGenerator.readResource(resource)); count++; } System.out.printf("extracted %d files%n", count); return; } if (options2.has(AppOptions2.CREATE_NEW)) { String dashedTitle = options2.get(AppOptions2.CREATE_NEW); String user = options2.get(AppOptions2.USER); String pass = options2.get(AppOptions2.PASSWORD); List<String> hosts = options2.getList(AppOptions2.HOSTS); List<String> template; if (options2.has(AppOptions2.TEMPLATE)) { template = options2.getList(AppOptions2.TEMPLATE); } else { template = emptyList(); } ProjectGenerator g = new ProjectGenerator(dashedTitle, user, pass, hosts, template); if (options2.has(AppOptions2.ORACLE_USER)) { g.oracleUser = options2.get(AppOptions2.ORACLE_USER); } if (options2.has(AppOptions2.ORACLE_PASSWORD)) { g.oraclePassword = options2.get(AppOptions2.ORACLE_PASSWORD); } File projectFile = new File(BEAR_DIR, g.getProjectTitle() + ".groovy"); File pomFile = new File(BEAR_DIR, "pom.xml"); writeStringToFile(projectFile, g.processTemplate("TemplateProject.template")); writeStringToFile(new File(BEAR_DIR, dashedTitle + ".properties"), g.processTemplate("project-properties.template")); writeStringToFile(new File(BEAR_DIR, "demos.properties"), g.processTemplate("project-properties.template")); writeStringToFile(new File(BEAR_DIR, "bear-fx.properties"), g.processTemplate("bear-fx.properties.template")); writeStringToFile(pomFile, g.generatePom(dashedTitle)); System.out.printf("Created project file: %s%n", projectFile.getPath()); System.out.printf("Created maven pom: %s%n", pomFile.getPath()); System.out.println("\nProject files have been created. You may now: " + "\n a) Run `bear " + g.getShortName() + ".ls` to quick-test your minimal setup" + "\n b) Import the project to IDE or run smoke tests, find more details at the project wiki: https://github.com/chaschev/bear/wiki/."); return; } Bear bear = global.bear; if (options2.has(AppOptions2.QUIET)) { global.put(bear.quiet, true); LoggingBooter.changeLogLevel(LogManager.ROOT_LOGGER_NAME, Level.WARN); } if (options2.has(AppOptions2.USE_UI)) { global.put(bear.useUI, true); } if (options2.has(AppOptions2.NO_UI)) { global.put(bear.useUI, false); } List<?> list = options2.getOptionSet().nonOptionArguments(); if (list.size() > 1) { throw new IllegalArgumentException("too many arguments: " + list + ", " + "please specify an invoke line, project.method(arg1, arg2)"); } if (list.isEmpty()) { throw new UnsupportedOperationException("todo implement running a single project"); } String invokeLine = (String) list.get(0); String projectName; String method; if (invokeLine.contains(".")) { projectName = StringUtils.substringBefore(invokeLine, "."); method = StringUtils.substringAfter(invokeLine, "."); } else { projectName = invokeLine; method = null; } if (method == null || method.isEmpty()) method = "deploy()"; if (!method.contains("(")) method += "()"; Optional<CompiledEntry<? extends BearProject>> optional = bearMain.compileManager.findProject(projectName); if (!optional.isPresent()) { throw new IllegalArgumentException("project was not found: " + projectName + ", loaded classes: \n" + Joiner.on("\n").join(bearMain.compileManager.findProjects()) + ", searched in: " + bearMain.compileManager.getSourceDirs() + ", "); } BearProject project = OpenBean.newInstance(optional.get().aClass).injectMain(bearMain); GroovyShell shell = new GroovyShell(); shell.setVariable("project", project); shell.evaluate("project." + method); }
From source file:org.apache.rya.export.client.MergeDriverClient.java
public static void main(final String[] args) throws ParseException, MergeConfigurationException, UnknownHostException, MergerException, java.text.ParseException, SailException, AccumuloException, AccumuloSecurityException, InferenceEngineException, RepositoryException, MalformedQueryException, UpdateExecutionException { final String log4jConfiguration = System.getProperties().getProperty("log4j.configuration"); if (StringUtils.isNotBlank(log4jConfiguration)) { final String parsedConfiguration = StringUtils.removeStart(log4jConfiguration, "file:"); final File configFile = new File(parsedConfiguration); if (configFile.exists()) { DOMConfigurator.configure(parsedConfiguration); } else {/*from w w w.ja v a2s . co m*/ BasicConfigurator.configure(); } } final MergeConfigurationCLI config = new MergeConfigurationCLI(args); try { configuration = config.createConfiguration(); } catch (final MergeConfigurationException e) { LOG.error("Configuration failed.", e); } final boolean useTimeSync = configuration.getUseNtpServer(); Optional<Long> offset = Optional.absent(); if (useTimeSync) { final String tomcat = configuration.getChildTomcatUrl(); final String ntpHost = configuration.getNtpServerHost(); try { offset = Optional .<Long>fromNullable(TimeUtils.getNtpServerAndMachineTimeDifference(ntpHost, tomcat)); } catch (final IOException e) { LOG.error("Unable to get time difference between time server: " + ntpHost + " and the server: " + tomcat, e); } } final StatementStoreFactory storeFactory = new StatementStoreFactory(configuration); try { final RyaStatementStore parentStore = storeFactory.getParentStatementStore(); final RyaStatementStore childStore = storeFactory.getChildStatementStore(); LOG.info("Starting Merge Tool"); if (configuration.getParentDBType() == ACCUMULO && configuration.getChildDBType() == ACCUMULO) { final AccumuloRyaStatementStore childAStore = (AccumuloRyaStatementStore) childStore; final AccumuloRyaStatementStore parentAStore = (AccumuloRyaStatementStore) parentStore; //do map reduce merging. //TODO: Run Merger } else { if (configuration.getMergePolicy() == TIMESTAMP) { final TimestampPolicyMergeConfiguration timeConfig = (TimestampPolicyMergeConfiguration) configuration; final Long timeOffset; if (offset.isPresent()) { timeOffset = offset.get(); } else { timeOffset = 0L; } final MemoryTimeMerger merger = new MemoryTimeMerger(parentStore, childStore, new VisibilityStatementMerger(), timeConfig.getToolStartTime(), configuration.getParentRyaInstanceName(), timeOffset); merger.runJob(); } } } catch (final Exception e) { LOG.error("Something went wrong creating a Rya Statement Store connection.", e); } Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(final Thread thread, final Throwable throwable) { LOG.error("Uncaught exception in " + thread.getName(), throwable); } }); LOG.info("Finished running Merge Tool"); System.exit(1); }
From source file:ua.utility.kfsdbupgrade.App.java
/** * Main program entry point. Single argument is expected of a path to the * <code>kfsdbupgrade.properties</code> properties file. Optional second * argument of "<code>ingestWorkflow</code>" if the ingest workflow code * path should be followed instead of the database upgrade code path. * /* ww w. jav a 2 s . c o m*/ * @param args */ public static void main(final String[] args) { try { Optional<String> commandLinePropertiesFile = getArg(args, 0); Optional<String> workflowIndicator = getArg(args, 1); App app = new App(commandLinePropertiesFile); if (isIngestWorkflow(workflowIndicator)) { app.doWorkflow(commandLinePropertiesFile.get()); } else { app.doUpgrade(); } System.exit(0); } catch (Throwable e) { e.printStackTrace(); LOGGER.fatal(e); System.exit(1); } }