List of usage examples for java.lang String format
public static String format(String format, Object... args)
From source file:com.boundary.aws.kinesis.Sample.java
public static void main(String[] args) throws Exception { init();/*from ww w . ja v a 2s . c om*/ final String myStreamName = "boundary-test-stream"; final Integer myStreamSize = 1; // Create a stream. The number of shards determines the provisioned // throughput. CreateStreamRequest createStreamRequest = new CreateStreamRequest(); createStreamRequest.setStreamName(myStreamName); createStreamRequest.setShardCount(myStreamSize); kinesisClient.createStream(createStreamRequest); // The stream is now being created. LOG.info("Creating Stream : " + myStreamName); waitForStreamToBecomeAvailable(myStreamName); // list all of my streams ListStreamsRequest listStreamsRequest = new ListStreamsRequest(); listStreamsRequest.setLimit(10); ListStreamsResult listStreamsResult = kinesisClient.listStreams(listStreamsRequest); List<String> streamNames = listStreamsResult.getStreamNames(); while (listStreamsResult.isHasMoreStreams()) { if (streamNames.size() > 0) { listStreamsRequest.setExclusiveStartStreamName(streamNames.get(streamNames.size() - 1)); } listStreamsResult = kinesisClient.listStreams(listStreamsRequest); streamNames.addAll(listStreamsResult.getStreamNames()); } LOG.info("Printing my list of streams : "); // print all of my streams. if (!streamNames.isEmpty()) { System.out.println("List of my streams: "); } for (int i = 0; i < streamNames.size(); i++) { System.out.println(streamNames.get(i)); } LOG.info("Putting records in stream : " + myStreamName); // Write 10 records to the stream for (int j = 0; j < 100; j++) { PutRecordRequest putRecordRequest = new PutRecordRequest(); putRecordRequest.setStreamName(myStreamName); putRecordRequest.setData(ByteBuffer.wrap(String.format("testData-%d", j).getBytes())); putRecordRequest.setPartitionKey(String.format("partitionKey-%d", j)); PutRecordResult putRecordResult = kinesisClient.putRecord(putRecordRequest); System.out.println("Successfully putrecord, partition key : " + putRecordRequest.getPartitionKey() + ", ShardID : " + putRecordResult.getShardId()); } // Delete the stream. LOG.info("Deleting stream : " + myStreamName); DeleteStreamRequest deleteStreamRequest = new DeleteStreamRequest(); deleteStreamRequest.setStreamName(myStreamName); kinesisClient.deleteStream(deleteStreamRequest); // The stream is now being deleted. LOG.info("Stream is now being deleted : " + myStreamName); }
From source file:com.act.biointerpretation.analytics.ReactionDeletion.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//from ww w . ja v a 2 s .co m } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { LOGGER.error(String.format("Argument parsing failed: %s\n", e.getMessage())); HELP_FORMATTER.printHelp(ReactionCountProvenance.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(ReactionCountProvenance.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } if (!cl.hasOption(OPTION_OUTPUT_PATH)) { LOGGER.error("Input -o prefix"); return; } NoSQLAPI srcApi = new NoSQLAPI(cl.getOptionValue(OPTION_SOURCE_DB), cl.getOptionValue(OPTION_SOURCE_DB)); NoSQLAPI sinkApi = new NoSQLAPI(cl.getOptionValue(OPTION_SINK_DB), cl.getOptionValue(OPTION_SINK_DB)); searchForDroppedReactions(srcApi, sinkApi, new File(cl.getOptionValue(OPTION_OUTPUT_PATH))); }
From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroupsAndBuffering.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println(String.format("%s <output-file>", GradleDependenciesWithGroupsAndBuffering.class.getSimpleName())); System.exit(1);/*from ww w .j a v a 2 s . c o m*/ } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); final com.github.fritaly.graphml4j.datastructure.Graph graph = new Graph(); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader( GradleDependenciesWithGroupsAndBuffering.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the nodes per depth inside the dependency graph // (the topmost dependency is the first one in the stack) final Stack<Node> parentNodes = new Stack<Node>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary node ids while (depth <= parentNodes.size()) { parentNodes.pop(); } final Artifact artifact = createArtifact(line); Node node = graph.getNodeByData(artifact); // Has this dependency already been added to the graph ? if (node == null) { // No, add the node node = graph.addNode(artifact); } parentNodes.push(node); if (parentNodes.size() > 1) { // Generate an edge between the current node and its parent graph.addEdge("Depends on", parentNodes.get(parentNodes.size() - 2), node); } } // Create the groups after creating the nodes & edges for (Node node : graph.getNodes()) { final Artifact artifact = (Artifact) node.getData(); final String groupId = artifact.group; Node groupNode = graph.getNodeByData(groupId); if (groupNode == null) { groupNode = graph.addNode(groupId); } // add the node to the group node.setParent(groupNode); } graph.toGraphML(fileWriter, new Renderer() { @Override public String getNodeLabel(Node node) { return node.isGroup() ? node.getData().toString() : ((Artifact) node.getData()).getLabel(); } @Override public boolean isGroupOpen(Node node) { return true; } @Override public NodeStyle getNodeStyle(Node node) { // Customize the rendering of nodes final NodeStyle nodeStyle = new NodeStyle(); nodeStyle.setWidth(250.0f); return nodeStyle; } @Override public GroupStyles getGroupStyles(Node node) { return new GroupStyles(); } @Override public EdgeStyle getEdgeStyle(Edge edge) { return new EdgeStyle(); } }); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:dfki.sb.rabbitmqjava.RabbitMQObjectStreamClient.java
public static void main(String[] argv) { int numMessages = 10000; RabbitMQObjectStreamClient rpcClient = null; String response = null;/* www.j a v a2s . co m*/ try { rpcClient = new RabbitMQObjectStreamClient(); sendMessages(20000, rpcClient); long startTime = System.currentTimeMillis(); sendMessages(numMessages, rpcClient); long finishTime = System.currentTimeMillis(); long difference = finishTime - startTime; difference = difference * 1000; double latency = (double) difference / (numMessages * 2.0); rpcClient.sendDesiconnectSignal(); System.out.println(String.format("\n\nAverage latency in microseconds %.3f\n\n\n", latency)); System.out.println("Finished"); } catch (Exception e) { e.printStackTrace(); } finally { if (rpcClient != null) { try { rpcClient.close(); } catch (Exception ignore) { } } } }
From source file:com.act.biointerpretation.analytics.ReactionCountProvenance.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//ww w. j a va 2 s . c o m } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { LOGGER.error(String.format("Argument parsing failed: %s\n", e.getMessage())); HELP_FORMATTER.printHelp(ReactionCountProvenance.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(ReactionCountProvenance.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } if (!cl.hasOption(OPTION_OUTPUT_PREFIX)) { LOGGER.error("Input -o prefix"); return; } List<String> dbs = new ArrayList<>(Arrays.asList(cl.getOptionValues(OPTION_ORDERED_LIST_OF_DBS))); ReactionCountProvenance reactionCountProvenance = new ReactionCountProvenance(dbs, cl.getOptionValue(OPTION_OUTPUT_PREFIX)); reactionCountProvenance.run(); reactionCountProvenance.writeToDisk(); }
From source file:fr.inria.atlanmod.kyanos.benchmarks.ReferencesCounter.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input file"); inputOpt.setArgs(1);//from w w w. j a va 2 s .c o m inputOpt.setRequired(true); Option outputOpt = OptionBuilder.create(OUT); outputOpt.setArgName("OUTPUT"); outputOpt.setDescription("Output file"); outputOpt.setArgs(1); outputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(IN_EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of input EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); Option labelOpt = OptionBuilder.create(LABEL); labelOpt.setArgName("LABEL"); labelOpt.setDescription("Label for the data set"); labelOpt.setArgs(1); labelOpt.setRequired(true); options.addOption(inputOpt); options.addOption(outputOpt); options.addOption(inClassOpt); options.addOption(labelOpt); CommandLineParser parser = new PosixParser(); try { CommandLine commandLine = parser.parse(options, args); URI sourceUri = URI.createFileURI(commandLine.getOptionValue(IN)); Class<?> inClazz = ReferencesCounter.class.getClassLoader() .loadClass(commandLine.getOptionValue(IN_EPACKAGE_CLASS)); @SuppressWarnings("unused") EPackage inEPackage = (EPackage) inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("xmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("zxmi", new XMIResourceFactoryImpl()); Resource sourceResource = resourceSet.getResource(sourceUri, true); FileWriter writer = new FileWriter(new File(commandLine.getOptionValue(OUT))); try { writer.write(commandLine.getOptionValue(LABEL)); writer.write("\n"); for (Iterator<EObject> iterator = sourceResource.getAllContents(); iterator.hasNext();) { EObject eObject = iterator.next(); for (EStructuralFeature feature : eObject.eClass().getEAllStructuralFeatures()) { if (feature.isMany() && eObject.eIsSet(feature)) { EList<?> value = (EList<?>) eObject.eGet(feature); // if (value.size() > 10) writer.write(String.format("%d\n", value.size())); } } } } finally { IOUtils.closeQuietly(writer); } } catch (ParseException e) { showError(e.toString()); showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { showError(e.toString()); } }
From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroups.java
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out/*from w ww .j a v a 2 s. c om*/ .println(String.format("%s <output-file>", GradleDependenciesWithGroups.class.getSimpleName())); System.exit(1); } final File file = new File(args[0]); System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ..."); FileWriter fileWriter = null; GraphMLWriter graphWriter = null; Reader reader = null; LineNumberReader lineReader = null; try { fileWriter = new FileWriter(file); graphWriter = new GraphMLWriter(fileWriter); // Customize the rendering of nodes final NodeStyle nodeStyle = graphWriter.getNodeStyle(); nodeStyle.setWidth(250.0f); nodeStyle.setHeight(50.0f); graphWriter.setNodeStyle(nodeStyle); // The dependency graph has been generated by Gradle with the // command "gradle dependencies". The output of this command has // been saved to a text file which will be parsed to rebuild the // dependency graph reader = new InputStreamReader( GradleDependenciesWithGroups.class.getResourceAsStream("gradle-dependencies.txt")); lineReader = new LineNumberReader(reader); String line = null; // Stack containing the artifacts per depth inside the dependency // graph (the topmost dependency is the first one in the stack) final Stack<Artifact> stack = new Stack<Artifact>(); final Map<String, Set<Artifact>> artifactsByGroup = new HashMap<String, Set<Artifact>>(); // List of parent/child relationships between artifacts final List<Relationship> relationships = new ArrayList<Relationship>(); while ((line = lineReader.readLine()) != null) { // Determine the depth of the current dependency inside the // graph. The depth can be inferred from the indentation used by // Gradle. Each level of depth adds 5 more characters of // indentation final int initialLength = line.length(); // Remove the strings used by Gradle to indent dependencies line = StringUtils.replace(line, "+--- ", ""); line = StringUtils.replace(line, "| ", ""); line = StringUtils.replace(line, "\\--- ", ""); line = StringUtils.replace(line, " ", ""); // The depth can easily be inferred now final int depth = (initialLength - line.length()) / 5; // Remove unnecessary artifacts while (depth <= stack.size()) { stack.pop(); } // Create an artifact from the dependency (group, artifact, // version) tuple final Artifact artifact = createArtifact(line); stack.push(artifact); if (stack.size() > 1) { // Store the artifact and its parent relationships.add(new Relationship(stack.get(stack.size() - 2), artifact)); } if (!artifactsByGroup.containsKey(artifact.group)) { artifactsByGroup.put(artifact.group, new HashSet<Artifact>()); } artifactsByGroup.get(artifact.group).add(artifact); } // Open the graph graphWriter.graph(); final Map<Artifact, String> nodeIdsByArtifact = new HashMap<Artifact, String>(); // Loop over the groups and generate the associated nodes for (String group : artifactsByGroup.keySet()) { graphWriter.group(group, true); for (Artifact artifact : artifactsByGroup.get(group)) { final String nodeId = graphWriter.node(artifact.getLabel()); nodeIdsByArtifact.put(artifact, nodeId); } graphWriter.closeGroup(); } // Generate the edges for (Relationship relationship : relationships) { final String parentId = nodeIdsByArtifact.get(relationship.parent); final String childId = nodeIdsByArtifact.get(relationship.child); graphWriter.edge(parentId, childId); } // Close the graph graphWriter.closeGraph(); System.out.println("Done"); } finally { // Calling GraphMLWriter.close() is necessary to dispose the underlying resources graphWriter.close(); fileWriter.close(); lineReader.close(); reader.close(); } }
From source file:ee.ria.xroad.common.conf.globalconf.ConfigurationClientMain.java
/** * Main entry point of configuration client. Based on the arguments, * the client will either:/*from www. j ava 2 s. c o m*/ * 1) <anchor file> <configuration path> -- download and exit * 2) <anchor file> -- download and verify * 3) [no args] -- start as daemon * @param args the arguments * @throws Exception if an error occurs */ public static void main(String[] args) throws Exception { CommandLine cmd = getCommandLine(args); String[] actualArgs = cmd.getArgs(); if (actualArgs.length == NUM_ARGS_FROM_CONF_PROXY_FULL) { // Run configuration client in one-shot mode downloading the specified global configuration version System.exit(download(actualArgs[0], actualArgs[1], actualArgs[2])); } else if (actualArgs.length == NUM_ARGS_FROM_CONF_PROXY) { // Run configuration client in one-shot mode downloading the current global configuration version System.exit(download(actualArgs[0], actualArgs[1], String.format("%d", SystemProperties.CURRENT_GLOBAL_CONFIGURATION_VERSION))); } else if (actualArgs.length == 1) { // Run configuration client in validate mode System.exit(validate(actualArgs[0], getParamsValidator(cmd))); } else { // Run configuration client in daemon mode startDaemon(); } }
From source file:com.griddynamics.jagger.JaggerLauncher.java
public static void main(String[] args) throws Exception { Thread memoryMonitorThread = new Thread("memory-monitor") { @Override/*from w ww . j ava2 s .co m*/ public void run() { for (;;) { try { log.info("Memory info: totalMemory={}, freeMemory={}", Runtime.getRuntime().totalMemory(), Runtime.getRuntime().freeMemory()); Thread.sleep(60000); } catch (InterruptedException e) { throw new RuntimeException(e); } } } }; memoryMonitorThread.setDaemon(true); memoryMonitorThread.start(); String pid = ManagementFactory.getRuntimeMXBean().getName(); System.out.println(String.format("PID:%s", pid)); Properties props = System.getProperties(); for (Map.Entry<Object, Object> prop : props.entrySet()) { log.info("{}: '{}'", prop.getKey(), prop.getValue()); } log.info(""); URL directory = new URL("file:" + System.getProperty("user.dir") + "/"); loadBootProperties(directory, args[0], environmentProperties); log.debug("Bootstrap properties:"); for (String propName : environmentProperties.stringPropertyNames()) { log.debug(" {}={}", propName, environmentProperties.getProperty(propName)); } String[] roles = environmentProperties.getProperty(ROLES).split(","); Set<String> rolesSet = Sets.newHashSet(roles); if (rolesSet.contains(Role.COORDINATION_SERVER.toString())) { launchCoordinationServer(directory); } if (rolesSet.contains(Role.HTTP_COORDINATION_SERVER.toString())) { launchCometdCoordinationServer(directory); } if (rolesSet.contains(Role.RDB_SERVER.toString())) { launchRdbServer(directory); } if (rolesSet.contains(Role.MASTER.toString())) { launchMaster(directory); } if (rolesSet.contains(Role.KERNEL.toString())) { launchKernel(directory); } if (rolesSet.contains(Role.REPORTER.toString())) { launchReporter(directory); } LaunchManager launchManager = builder.build(); int result = launchManager.launch(); System.exit(result); }
From source file:com.microsoft.azure.servicebus.samples.managingtopicrules.ManagingTopicRules.java
public static void main(String[] args) throws Exception { if (!parseCommandLine(args)) { return;/*from w w w .j a va 2s .c om*/ } logger.info("Starting TopicSubscriptionWithRuleOperations sample."); // create client logger.info("Create topic client."); topicClient = new TopicClient(new ConnectionStringBuilder(connectionString, topicName)); logger.info("Create subscription client."); ISubscriptionClient allMessagessubscriptionClient = new SubscriptionClient( new ConnectionStringBuilder(connectionString, topicName + "/subscriptions/" + allMessagesSubscriptionName), ReceiveMode.PEEKLOCK); ISubscriptionClient sqlFilterOnlySubscriptionClient = new SubscriptionClient( new ConnectionStringBuilder(connectionString, topicName + "/subscriptions/" + sqlFilterOnlySubscriptionName), ReceiveMode.PEEKLOCK); ISubscriptionClient sqlFilterWithActionSubscriptionClient = new SubscriptionClient( new ConnectionStringBuilder(connectionString, topicName + "/subscriptions/" + sqlFilterWithActionSubscriptionName), ReceiveMode.PEEKLOCK); ISubscriptionClient correlationFilterSubscriptionClient = new SubscriptionClient( new ConnectionStringBuilder(connectionString, topicName + "/subscriptions/" + correlationFilterSubscriptionName), ReceiveMode.PEEKLOCK); // Drop existing rules and add a TrueFilter for (RuleDescription rd : allMessagessubscriptionClient.getRules()) { allMessagessubscriptionClient.removeRule(rd.getName()); } allMessagessubscriptionClient.addRule(new RuleDescription("MatchAll", new TrueFilter())); // Drop existing rules and add a SQL filter for (RuleDescription rd : sqlFilterOnlySubscriptionClient.getRules()) { sqlFilterOnlySubscriptionClient.removeRule(rd.getName()); } sqlFilterOnlySubscriptionClient.addRule(new RuleDescription("RedSqlRule", new SqlFilter("Color = 'Red'"))); // Drop existing rules and add a SQL filter with a subsequent action for (RuleDescription rd : sqlFilterWithActionSubscriptionClient.getRules()) { sqlFilterWithActionSubscriptionClient.removeRule(rd.getName()); } RuleDescription sqlRuleWithAction = new RuleDescription("BlueSqlRule", new SqlFilter("Color = 'Blue'")); sqlRuleWithAction.setAction(new SqlRuleAction("SET Color = 'BlueProcessed'")); sqlFilterWithActionSubscriptionClient.addRule(sqlRuleWithAction); // Drop existing rules and add a Correlationfilter logger.info(String.format("SubscriptionName: %s, Removing Default Rule and Adding CorrelationFilter", sqlFilterWithActionSubscriptionName)); for (RuleDescription rd : correlationFilterSubscriptionClient.getRules()) { correlationFilterSubscriptionClient.removeRule(rd.getName()); } // this correlation filter CorrelationFilter correlationFilter = new CorrelationFilter(); correlationFilter.setCorrelationId("important"); correlationFilter.setLabel("Red"); correlationFilterSubscriptionClient .addRule(new RuleDescription("ImportantCorrelationRule", correlationFilter)); // Get Rules on Subscription, called here only for one subscription as example RuleDescription[] rules = correlationFilterSubscriptionClient.getRules().toArray(new RuleDescription[0]); logger.info(String.format("GetRules:: SubscriptionName: %s, CorrelationFilter Name: %s, Rule: %s", correlationFilterSubscriptionName, rules[0].getName(), rules[0].getFilter())); // Send messages to Topic sendMessages(); // Receive messages from 'allMessagesSubscriptionName'. Should receive all 9 messages receiveMessages(allMessagesSubscriptionName); // Receive messages from 'sqlFilterOnlySubscriptionName'. Should receive all messages with Color = 'Red' i.e 3 messages receiveMessages(sqlFilterOnlySubscriptionName); // Receive messages from 'sqlFilterWithActionSubscriptionClient'. Should receive all messages with Color = 'Blue' // i.e 3 messages AND all messages should have color set to 'BlueProcessed' receiveMessages(sqlFilterWithActionSubscriptionName); // Receive messages from 'correlationFilterSubscriptionName'. Should receive all messages with Color = 'Red' and CorrelationId = "important" // i.e 1 message receiveMessages(correlationFilterSubscriptionName); logger.info("Completed Receiving all messages..."); logger.info("========================================================="); allMessagessubscriptionClient.close(); sqlFilterOnlySubscriptionClient.close(); sqlFilterWithActionSubscriptionClient.close(); correlationFilterSubscriptionClient.close(); topicClient.close(); }