List of usage examples for java.util Arrays toString
public static String toString(Object[] a)
From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryThrownExceptionsPerPackage.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input Kyanos resource directory"); inputOpt.setArgs(1);//from ww w . jav a 2s . c om inputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); Option optFileOpt = OptionBuilder.create(OPTIONS_FILE); optFileOpt.setArgName("FILE"); optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource"); optFileOpt.setArgs(1); options.addOption(inputOpt); options.addOption(inClassOpt); options.addOption(optFileOpt); CommandLineParser parser = new PosixParser(); try { PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, new BlueprintsPersistenceBackendFactory()); CommandLine commandLine = parser.parse(options, args); URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN))); Class<?> inClazz = KyanosGraphQueryThrownExceptionsPerPackage.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap() .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE); Resource resource = resourceSet.createResource(uri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if (commandLine.hasOption(OPTIONS_FILE)) { Properties properties = new Properties(); properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE)))); for (final Entry<Object, Object> entry : properties.entrySet()) { loadOpts.put((String) entry.getKey(), (String) entry.getValue()); } } resource.load(loadOpts); { LOG.log(Level.INFO, "Start query"); long begin = System.currentTimeMillis(); HashMap<String, EList<TypeAccess>> list = JavaQueries.getThrownExceptionsPerPackage(resource); long end = System.currentTimeMillis(); LOG.log(Level.INFO, "End query"); LOG.log(Level.INFO, MessageFormat.format("Query result contains {0} elements", list.size())); LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin))); } if (resource instanceof PersistentResourceImpl) { PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource); } else { resource.unload(); } } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); } }
From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryInvisibleMethodDeclarations.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input Kyanos resource directory"); inputOpt.setArgs(1);/*from w w w . j av a 2s. co m*/ inputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); Option optFileOpt = OptionBuilder.create(OPTIONS_FILE); optFileOpt.setArgName("FILE"); optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource"); optFileOpt.setArgs(1); options.addOption(inputOpt); options.addOption(inClassOpt); options.addOption(optFileOpt); CommandLineParser parser = new PosixParser(); try { PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, new BlueprintsPersistenceBackendFactory()); CommandLine commandLine = parser.parse(options, args); URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN))); Class<?> inClazz = KyanosGraphQueryInvisibleMethodDeclarations.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap() .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE); Resource resource = resourceSet.createResource(uri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if (commandLine.hasOption(OPTIONS_FILE)) { Properties properties = new Properties(); properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE)))); for (final Entry<Object, Object> entry : properties.entrySet()) { loadOpts.put((String) entry.getKey(), (String) entry.getValue()); } } resource.load(loadOpts); { LOG.log(Level.INFO, "Start query"); long begin = System.currentTimeMillis(); EList<MethodDeclaration> list = JavaQueries.getInvisibleMethodDeclarations(resource); long end = System.currentTimeMillis(); LOG.log(Level.INFO, "End query"); LOG.log(Level.INFO, MessageFormat.format("Query result contains {0} elements", list.size())); LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin))); } if (resource instanceof PersistentResourceImpl) { PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource); } else { resource.unload(); } } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); } }
From source file:fr.inria.atlanmod.emf.graphs.Connectedness.java
public static void main(String[] args) { Options options = createOptions();/*from w ww . ja va 2s . c om*/ CommandLineParser parser = new PosixParser(); try { CommandLine commandLine = parser.parse(options, args); String inputMetamodel = commandLine.getOptionValue(INPUT_METAMODEL); String inputModel = commandLine.getOptionValue(INPUT_MODEL); Boolean logUnreachable = commandLine.hasOption(LOG_UNREACHABLE); ResourceSet resourceSet = new ResourceSetImpl(); Resource.Factory.Registry.INSTANCE.getExtensionToFactoryMap() .put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl()); { LOG.log(Level.INFO, "Loading input metamodel"); URI uri = URI.createFileURI(inputMetamodel); Resource resource = resourceSet.getResource(uri, true); registerEPackages(resource); } URI uri = URI.createFileURI(inputModel); LOG.log(Level.INFO, "Loading input model"); Resource resource = resourceSet.getResource(uri, true); LOG.log(Level.INFO, "Getting input model contents"); Set<EObject> resourceContents = getResourceContents(resource); int totalCount = resourceContents.size(); LOG.log(Level.INFO, MessageFormat.format("Input model contains {0} elements", totalCount)); List<EClassifier> candidateEClassifiers = buildCandidateEClassifiers(); for (Iterator<EObject> it = resource.getAllContents(); it.hasNext();) { EObject eObject = it.next(); if (candidateEClassifiers.contains(eObject.eClass())) { Set<EObject> reachableEObjects = getReachableEObjects(eObject); int i = reachableEObjects.size(); LOG.log(Level.INFO, MessageFormat.format("Found {0} reachable objects from {1} (EClass {2})", i, EcoreUtil.getURI(eObject), eObject.eClass().getName())); if (logUnreachable) { Set<EObject> unreachableEObjects = new HashSet<>(resourceContents); unreachableEObjects.removeAll(reachableEObjects); LOG.log(Level.INFO, MessageFormat.format("{0} elements are unreachable from {1} (EClass {2})", unreachableEObjects.size(), EcoreUtil.getURI(eObject), eObject.eClass().getName())); for (EObject unreachableEObject : unreachableEObjects) { LOG.log(Level.INFO, MessageFormat.format("Unreachable EObject {0} is of type {1}", EcoreUtil.getURI(unreachableEObject), unreachableEObject.eClass())); } } } } } catch (ParseException e) { LOG.log(Level.SEVERE, e.getLocalizedMessage(), e); LOG.log(Level.INFO, "Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { LOG.log(Level.SEVERE, e.getLocalizedMessage(), e); MessageUtil.showError(e.toString()); } }
From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryClassDeclarationAttributes.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input Kyanos resource directory"); inputOpt.setArgs(1);// ww w. j a v a 2s .c o m inputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); Option optFileOpt = OptionBuilder.create(OPTIONS_FILE); optFileOpt.setArgName("FILE"); optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource"); optFileOpt.setArgs(1); options.addOption(inputOpt); options.addOption(inClassOpt); options.addOption(optFileOpt); CommandLineParser parser = new PosixParser(); try { PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, new BlueprintsPersistenceBackendFactory()); CommandLine commandLine = parser.parse(options, args); URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN))); Class<?> inClazz = KyanosGraphQueryClassDeclarationAttributes.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap() .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE); Resource resource = resourceSet.createResource(uri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if (commandLine.hasOption(OPTIONS_FILE)) { Properties properties = new Properties(); properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE)))); for (final Entry<Object, Object> entry : properties.entrySet()) { loadOpts.put((String) entry.getKey(), (String) entry.getValue()); } } resource.load(loadOpts); { LOG.log(Level.INFO, "Start query"); long begin = System.currentTimeMillis(); HashMap<String, EList<NamedElement>> list = JavaQueries.getClassDeclarationAttributes(resource); long end = System.currentTimeMillis(); LOG.log(Level.INFO, "End query"); LOG.log(Level.INFO, MessageFormat.format("Query result contains {0} elements", list.entrySet().size())); LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin))); } if (resource instanceof PersistentResourceImpl) { PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource); } else { resource.unload(); } } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); } }
From source file:org.n52.oss.testdata.sml.GeneratorClient.java
/** * @param args//from w w w . j a va2s.c om */ public static void main(String[] args) { if (sending) log.info("Starting insertion of test sensors into " + sirURL); else log.warn("Starting generation of test sensors, NOT sending!"); TestSensorFactory factory = new TestSensorFactory(); // create sensors List<TestSensor> sensors = new ArrayList<TestSensor>(); for (int i = 0; i < nSensorsToGenerate; i++) { TestSensor s = factory.createRandomTestSensor(); sensors.add(s); log.info("Added new random sensor: " + s); } ArrayList<String> insertedSirIds = new ArrayList<String>(); // insert sensors to service int startOfSubList = 0; int endOfSubList = nSensorsInOneInsertRequest; while (endOfSubList <= sensors.size() + nSensorsInOneInsertRequest) { List<TestSensor> currentSensors = sensors.subList(startOfSubList, Math.min(endOfSubList, sensors.size())); if (currentSensors.isEmpty()) break; try { String[] insertedSirIDs = insertSensorsInSIR(currentSensors); if (insertedSirIDs == null) { log.error("Did not insert dummy sensors."); } else { insertedSirIds.addAll(Arrays.asList(insertedSirIDs)); } } catch (HttpException e) { log.error("Error inserting sensors.", e); } catch (IOException e) { log.error("Error inserting sensors.", e); } startOfSubList = Math.min(endOfSubList, sensors.size()); endOfSubList = endOfSubList + nSensorsInOneInsertRequest; if (sending) { try { if (log.isDebugEnabled()) log.debug("Sleeping for " + SLEEP_BETWEEN_REQUESTS + " msecs."); Thread.sleep(SLEEP_BETWEEN_REQUESTS); } catch (InterruptedException e) { log.error("Interrupted!", e); } } } log.info("Added sensors (ids in sir): " + Arrays.toString(insertedSirIds.toArray())); }
From source file:com.vmware.photon.controller.core.Main.java
public static void main(String[] args) throws Throwable { try {// www .j ava2 s .c o m LoggingFactory.bootstrap(); logger.info("args: " + Arrays.toString(args)); ArgumentParser parser = ArgumentParsers.newArgumentParser("PhotonControllerCore").defaultHelp(true) .description("Photon Controller Core"); parser.addArgument("config-file").help("photon controller configuration file"); parser.addArgument("--manual").type(Boolean.class).setDefault(false) .help("If true, create default deployment."); Namespace namespace = parser.parseArgsOrFail(args); PhotonControllerConfig photonControllerConfig = getPhotonControllerConfig(namespace); DeployerConfig deployerConfig = photonControllerConfig.getDeployerConfig(); new LoggingFactory(photonControllerConfig.getLogging(), "photon-controller-core").configure(); SSLContext sslContext; if (deployerConfig.getDeployerContext().isAuthEnabled()) { sslContext = SSLContext.getInstance(KeyStoreUtils.THRIFT_PROTOCOL); TrustManagerFactory tmf = null; tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); KeyStore keyStore = KeyStore.getInstance("JKS"); InputStream in = FileUtils .openInputStream(new File(deployerConfig.getDeployerContext().getKeyStorePath())); keyStore.load(in, deployerConfig.getDeployerContext().getKeyStorePassword().toCharArray()); tmf.init(keyStore); sslContext.init(null, tmf.getTrustManagers(), null); } else { KeyStoreUtils.generateKeys("/thrift/"); sslContext = KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } ThriftModule thriftModule = new ThriftModule(sslContext); PhotonControllerXenonHost xenonHost = startXenonHost(photonControllerConfig, thriftModule, deployerConfig, sslContext); if ((Boolean) namespace.get("manual")) { DefaultDeployment.createDefaultDeployment(photonControllerConfig.getXenonConfig().getPeerNodes(), deployerConfig, xenonHost); } // Creating a temp configuration file for apife with modification to some named sections in photon-controller-config // so that it can match the Configuration class of dropwizard. File apiFeTempConfig = File.createTempFile("apiFeTempConfig", ".tmp"); File source = new File(args[0]); FileInputStream fis = new FileInputStream(source); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); FileWriter fstream = new FileWriter(apiFeTempConfig, true); BufferedWriter out = new BufferedWriter(fstream); String aLine = null; while ((aLine = in.readLine()) != null) { if (aLine.equals("apife:")) { aLine = aLine.replace("apife:", "server:"); } out.write(aLine); out.newLine(); } in.close(); out.close(); // This approach can be simplified once the apife container is gone, but for the time being // it expects the first arg to be the string "server". String[] apiFeArgs = new String[2]; apiFeArgs[0] = "server"; apiFeArgs[1] = apiFeTempConfig.getAbsolutePath(); ApiFeService.setupApiFeConfigurationForServerCommand(apiFeArgs); ApiFeService.addServiceHost(xenonHost); ApiFeService.setSSLContext(sslContext); ApiFeService apiFeService = new ApiFeService(); apiFeService.run(apiFeArgs); apiFeTempConfig.deleteOnExit(); LocalApiClient localApiClient = apiFeService.getInjector().getInstance(LocalApiClient.class); xenonHost.setApiClient(localApiClient); // in the non-auth enabled scenario we need to be able to accept any self-signed certificate if (!deployerConfig.getDeployerContext().isAuthEnabled()) { KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { logger.info("Shutting down"); xenonHost.stop(); logger.info("Done"); LoggingFactory.detachAndStop(); } }); } catch (Exception e) { logger.error("Failed to start photon controller ", e); throw e; } }
From source file:org.opcfoundation.ua.examples.SampleClient.java
public static void main(String[] args) throws Exception { if (args.length == 0) { System.out.println("Usage: SampleClient [server uri]"); return;//www . j a v a2s . c o m } String url = args[0]; System.out.print("SampleClient: Connecting to " + url + " .. "); ////////////// CLIENT ////////////// // Create Client Application myApplication = new Application(); Client myClient = new Client(myApplication); myApplication.addLocale(ENGLISH); myApplication.setApplicationName(new LocalizedText("Java Sample Client", Locale.ENGLISH)); myApplication.setProductUri("urn:JavaSampleClient"); CertificateUtils.setKeySize(1024); // default = 1024 KeyPair pair = ExampleKeys.getCert("SampleClient"); myApplication.addApplicationInstanceCertificate(pair); // The HTTPS SecurityPolicies are defined separate from the endpoint securities myApplication.getHttpsSettings().setHttpsSecurityPolicies(HttpsSecurityPolicy.ALL); // Peer verifier myApplication.getHttpsSettings().setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); myApplication.getHttpsSettings().setCertificateValidator(CertificateValidator.ALLOW_ALL); // The certificate to use for HTTPS KeyPair myHttpsCertificate = ExampleKeys.getHttpsCert("SampleClient"); myApplication.getHttpsSettings().setKeyPair(myHttpsCertificate); // Connect to the given uri SessionChannel mySession = myClient.createSessionChannel(url); // mySession.activate("username", "123"); mySession.activate(); ////////////////////////////////////// ///////////// EXECUTE ////////////// // Browse Root BrowseDescription browse = new BrowseDescription(); browse.setNodeId(Identifiers.RootFolder); browse.setBrowseDirection(BrowseDirection.Forward); browse.setIncludeSubtypes(true); browse.setNodeClassMask(NodeClass.Object, NodeClass.Variable); browse.setResultMask(BrowseResultMask.All); BrowseResponse res3 = mySession.Browse(null, null, null, browse); System.out.println(res3); // Read Namespace Array ReadResponse res5 = mySession.Read(null, null, TimestampsToReturn.Neither, new ReadValueId(Identifiers.Server_NamespaceArray, Attributes.Value, null, null)); String[] namespaceArray = (String[]) res5.getResults()[0].getValue().getValue(); System.out.println(Arrays.toString(namespaceArray)); // Read a variable ReadResponse res4 = mySession.Read(null, 500.0, TimestampsToReturn.Source, new ReadValueId(new NodeId(6, 1710), Attributes.Value, null, null)); System.out.println(res4); res4 = mySession.Read(null, 500.0, TimestampsToReturn.Source, new ReadValueId(new NodeId(6, 1710), Attributes.DataType, null, null)); System.out.println(res4); ///////////// SHUTDOWN ///////////// mySession.close(); mySession.closeAsync(); ////////////////////////////////////// }
From source file:edu.cornell.mannlib.ld4lindexing.Ld4lIndexer.java
public static void main(String[] args) { try {/*from w w w. j av a2 s . c om*/ initializeLogging(); log.info("Arguments are " + Arrays.toString(args)); Settings settings = new Settings(args); LanguageReference.lookup("bogus"); TripleStore ts = TripleStoreFactory.instance(settings); SolrServer ss = SolrServerFactory.instance(settings); Bookmark bookmark = new Bookmark(settings, ss); DocumentFactory docFactory = new DocumentFactory(ts); Report report = new Report(settings, bookmark, docFactory); UriDiscoverer uris = new UriDiscoverer(ts, bookmark, report); new SigintHandler(uris); new Ld4lIndexer(settings, ss, uris, docFactory, report, bookmark).run(); } catch (StartupException e) { System.out.println(); System.out.println(e); System.out.println(); } }
From source file:fr.inria.atlanmod.kyanos.benchmarks.MorsaCreator.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input file"); inputOpt.setArgs(1);//w ww . j a v a 2 s. c o m inputOpt.setRequired(true); Option outputOpt = OptionBuilder.create(OUT); outputOpt.setArgName("OUTPUT"); outputOpt.setDescription("Output directory"); outputOpt.setArgs(1); outputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); options.addOption(inputOpt); options.addOption(outputOpt); options.addOption(inClassOpt); CommandLineParser parser = new PosixParser(); try { CommandLine commandLine = parser.parse(options, args); URI sourceUri = URI.createFileURI(commandLine.getOptionValue(IN)); URI targetUri = URI.createURI("morsa://" + commandLine.getOptionValue(OUT)); Class<?> inClazz = MorsaCreator.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("xmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("zxmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap().put("morsa", new MorsaResourceFactoryImpl(new MongoDBMorsaBackendFactory())); Resource sourceResource = resourceSet.createResource(sourceUri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if ("zxmi".equals(sourceUri.fileExtension())) { loadOpts.put(XMIResource.OPTION_ZIP, Boolean.TRUE); } Runtime.getRuntime().gc(); long initialUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory before loading: {0}", MessageUtil.byteCountToDisplaySize(initialUsedMemory))); LOG.log(Level.INFO, "Loading source resource"); sourceResource.load(loadOpts); LOG.log(Level.INFO, "Source resource loaded"); Runtime.getRuntime().gc(); long finalUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory after loading: {0}", MessageUtil.byteCountToDisplaySize(finalUsedMemory))); LOG.log(Level.INFO, MessageFormat.format("Memory use increase: {0}", MessageUtil.byteCountToDisplaySize(finalUsedMemory - initialUsedMemory))); Resource targetResource = resourceSet.createResource(targetUri); Map<String, Object> saveOpts = new HashMap<String, Object>(); //URI of the MongoDB database (e.g. localhost) saveOpts.put(IMorsaResource.OPTION_SERVER_URI, "localhost"); saveOpts.put(IMorsaResource.OPTION_MAX_SAVE_CACHE_SIZE, 30000); saveOpts.put(IMorsaResource.OPTION_DEMAND_LOAD, false); targetResource.save(saveOpts); LOG.log(Level.INFO, "Start moving elements"); targetResource.getContents().clear(); targetResource.getContents().addAll(sourceResource.getContents()); LOG.log(Level.INFO, "End moving elements"); LOG.log(Level.INFO, "Start saving"); targetResource.save(saveOpts); LOG.log(Level.INFO, "Saved"); targetResource.unload(); } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); } }
From source file:deck36.storm.plan9.nodejs.HighFiveStreamJoinTopology.java
public static void main(String[] args) throws Exception { String env = null;/*from w ww.j av a 2s . co m*/ if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"local".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (local|dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("storm_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String badgeName = HighFiveStreamJoinTopology.class.getSimpleName(); String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (node, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.nodejs.executor")); // Add main route to be invoked and its parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.params"); if (boltParams != null) command.addAll(boltParams); // Log the final command log.info("Command to start bolt for HighFive badge: " + Arrays.toString(command.toArray())); // Add constructed external bolt command to topology using MultilangAdapterBolt builder.setBolt("badge", new MultilangAdapterBolt(command, "badge"), parallelism_hint) .setNumTasks(num_tasks).shuffleGrouping("incoming"); builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.target_exchange"), "HighFive" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env) || "local".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }