List of usage examples for java.lang System currentTimeMillis
@HotSpotIntrinsicCandidate public static native long currentTimeMillis();
From source file:com.aliyun.odps.examples.graph.TopologySort.java
public static void main(String[] args) throws IOException { if (args.length != 2) { System.out.println("Usage : <inputTable> <outputTable>"); System.exit(-1);/*from w w w. j av a 2 s . co m*/ } // ??? // 0 1;2 // 1 3 // 2 3 // 3 -1 // ?vertexid???estination vertexid?-1??? // ?? // 1,0 // 2,1 // 2,2 // 3,3 // ?supstep?????vertexid // TopologySortAggregator??????? // ??????????active????????? // ????????????????? GraphJob job = new GraphJob(); job.setGraphLoaderClass(TopologySortVertexReader.class); job.setVertexClass(TopologySortVertex.class); job.addInput(TableInfo.builder().tableName(args[0]).build()); job.addOutput(TableInfo.builder().tableName(args[1]).build()); job.setCombinerClass(LongSumCombiner.class); job.setAggregatorClass(TopologySortAggregator.class); long startTime = System.currentTimeMillis(); job.run(); System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds"); }
From source file:deck36.storm.plan9.nodejs.HighFiveStreamJoinTopology.java
public static void main(String[] args) throws Exception { String env = null;/*from w w w . j ava2s . c o m*/ if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"local".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (local|dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("storm_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String badgeName = HighFiveStreamJoinTopology.class.getSimpleName(); String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (node, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.nodejs.executor")); // Add main route to be invoked and its parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.params"); if (boltParams != null) command.addAll(boltParams); // Log the final command log.info("Command to start bolt for HighFive badge: " + Arrays.toString(command.toArray())); // Add constructed external bolt command to topology using MultilangAdapterBolt builder.setBolt("badge", new MultilangAdapterBolt(command, "badge"), parallelism_hint) .setNumTasks(num_tasks).shuffleGrouping("incoming"); builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.target_exchange"), "HighFive" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env) || "local".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }
From source file:fr.inria.atlanmod.dag.instantiator.Launcher.java
public static void main(String[] args) throws GenerationException, IOException { ResourceSetImpl resourceSet = new ResourceSetImpl(); { // initializing the registry resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put(EcorePackage.eNS_PREFIX, new EcoreResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap() .put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap().put(NeoEMFURI.NEOEMF_HBASE_SCHEME, NeoEMFResourceFactory.eINSTANCE); }//from ww w. j a v a 2s.c om Options options = new Options(); configureOptions(options); CommandLineParser parser = new GnuParser(); try { CommandLine commandLine = parser.parse(options, args); // String epackage_class = commandLine.getOptionValue(E_PACKAGE_CLASS); // // LOGGER.info("Start loading the package"); // Class<?> inClazz = Launcher.class.getClassLoader().loadClass(epackage_class); // EPackage _package = (EPackage) inClazz.getMethod("init").invoke(null); // // Resource metamodelResource = new XMIResourceImpl(URI.createFileURI("dummy")); // metamodelResource.getContents().add(_package); // LOGGER.info("Finish loading the package"); int size = Launcher.DEFAULT_AVERAGE_MODEL_SIZE; if (commandLine.hasOption(SIZE)) { Number number = (Number) commandLine.getParsedOptionValue(SIZE); size = (int) Math.min(Integer.MAX_VALUE, number.longValue()); } float variation = Launcher.DEFAULT_DEVIATION; if (commandLine.hasOption(VARIATION)) { Number number = (Number) commandLine.getParsedOptionValue(VARIATION); if (number.floatValue() < 0.0f || number.floatValue() > 1.0f) { throw new ParseException(MessageFormat.format("Invalid value for option -{0}: {1}", VARIATION, number.floatValue())); } variation = number.floatValue(); } float propVariation = Launcher.DEFAULT_DEVIATION; if (commandLine.hasOption(PROP_VARIATION)) { Number number = (Number) commandLine.getParsedOptionValue(PROP_VARIATION); if (number.floatValue() < 0.0f || number.floatValue() > 1.0f) { throw new ParseException(MessageFormat.format("Invalid value for option -{0}: {1}", PROP_VARIATION, number.floatValue())); } propVariation = number.floatValue(); } long seed = System.currentTimeMillis(); if (commandLine.hasOption(SEED)) { seed = ((Number) commandLine.getParsedOptionValue(SEED)).longValue(); } Range<Integer> range = Range.between(Math.round(size * (1 - variation)), Math.round(size * (1 + variation))); ISpecimenConfiguration config = new DagMetamodelConfig(range, seed); IGenerator generator = new DagGenerator(config, config.getSeed()); GenericMetamodelGenerator modelGen = new GenericMetamodelGenerator(generator); if (commandLine.hasOption(OUTPUT_PATH)) { String outDir = commandLine.getOptionValue(OUTPUT_PATH); //java.net.URI intermediateURI = java.net.URI.create(outDir); modelGen.setSamplesPath(outDir); } int numberOfModels = 1; if (commandLine.hasOption(N_MODELS)) { numberOfModels = ((Number) commandLine.getParsedOptionValue(N_MODELS)).intValue(); } int valuesSize = GenericMetamodelConfig.DEFAULT_AVERAGE_VALUES_LENGTH; if (commandLine.hasOption(VALUES_SIZE)) { Number number = (Number) commandLine.getParsedOptionValue(VALUES_SIZE); valuesSize = (int) Math.min(Integer.MAX_VALUE, number.longValue()); } int referencesSize = GenericMetamodelConfig.DEFAULT_AVERAGE_REFERENCES_SIZE; if (commandLine.hasOption(DEGREE)) { Number number = (Number) commandLine.getParsedOptionValue(DEGREE); referencesSize = (int) Math.min(Integer.MAX_VALUE, number.longValue()); } config.setValuesRange(Math.round(valuesSize * (1 - propVariation)), Math.round(valuesSize * (1 + propVariation))); config.setReferencesRange(Math.round(referencesSize * (1 - propVariation)), Math.round(referencesSize * (1 + propVariation))); config.setPropertiesRange(Math.round(referencesSize * (1 - propVariation)), Math.round(referencesSize * (1 + propVariation))); long start = System.currentTimeMillis(); modelGen.runGeneration(resourceSet, numberOfModels, size, variation); long end = System.currentTimeMillis(); LOGGER.info( MessageFormat.format("Generation finished after {0} s", Long.toString((end - start) / 1000))); // for (Resource rsc : resourceSet.getResources()) { // if (rsc.getContents().get(0) instanceof DAG) { // // } // // } if (commandLine.hasOption(DIAGNOSE)) { for (Resource resource : resourceSet.getResources()) { LOGGER.info( MessageFormat.format("Requested validation for resource ''{0}''", resource.getURI())); BasicDiagnostic diagnosticChain = diagnoseResource(resource); if (!isFailed(diagnosticChain)) { LOGGER.info(MessageFormat.format("Result of the diagnosis of resurce ''{0}'' is ''OK''", resource.getURI())); } else { LOGGER.severe(MessageFormat.format("Found ''{0}'' error(s) in the resource ''{1}''", diagnosticChain.getChildren().size(), resource.getURI())); for (Diagnostic diagnostic : diagnosticChain.getChildren()) { LOGGER.fine(diagnostic.getMessage()); } } } LOGGER.info("Validation finished"); } } catch (ParseException e) { System.err.println(e.getLocalizedMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.setOptionComparator(new OptionComarator<Option>()); try { formatter.setWidth(Math.max(Terminal.getTerminal().getTerminalWidth(), 80)); } catch (Throwable t) { LOGGER.warning("Unable to get console information"); } ; formatter.printHelp("java -jar <this-file.jar>", options, true); System.exit(ERROR); } catch (Throwable t) { System.err.println("ERROR: " + t.getLocalizedMessage()); StringWriter stringWriter = new StringWriter(); t.printStackTrace(new PrintWriter(stringWriter)); System.err.println(t); LOGGER.severe(stringWriter.toString()); System.exit(ERROR); } }
From source file:cc.twittertools.index.IndexStatuses.java
@SuppressWarnings("static-access") public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption(new Option(HELP_OPTION, "show help")); options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment")); options.addOption(new Option(STORE_TERM_VECTORS_OPTION, "store term vectors")); options.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("source collection directory") .create(COLLECTION_OPTION)); options.addOption(//from w w w . j a v a2 s . c o m OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION)); options.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("file with deleted tweetids") .create(DELETES_OPTION)); options.addOption(OptionBuilder.withArgName("id").hasArg().withDescription("max id").create(MAX_ID_OPTION)); CommandLine cmdline = null; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); System.exit(-1); } if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(COLLECTION_OPTION) || !cmdline.hasOption(INDEX_OPTION)) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(IndexStatuses.class.getName(), options); System.exit(-1); } String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION); String indexPath = cmdline.getOptionValue(INDEX_OPTION); final FieldType textOptions = new FieldType(); textOptions.setIndexed(true); textOptions.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); textOptions.setStored(true); textOptions.setTokenized(true); if (cmdline.hasOption(STORE_TERM_VECTORS_OPTION)) { textOptions.setStoreTermVectors(true); } LOG.info("collection: " + collectionPath); LOG.info("index: " + indexPath); LongOpenHashSet deletes = null; if (cmdline.hasOption(DELETES_OPTION)) { deletes = new LongOpenHashSet(); File deletesFile = new File(cmdline.getOptionValue(DELETES_OPTION)); if (!deletesFile.exists()) { System.err.println("Error: " + deletesFile + " does not exist!"); System.exit(-1); } LOG.info("Reading deletes from " + deletesFile); FileInputStream fin = new FileInputStream(deletesFile); byte[] ignoreBytes = new byte[2]; fin.read(ignoreBytes); // "B", "Z" bytes from commandline tools BufferedReader br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fin))); String s; while ((s = br.readLine()) != null) { if (s.contains("\t")) { deletes.add(Long.parseLong(s.split("\t")[0])); } else { deletes.add(Long.parseLong(s)); } } br.close(); fin.close(); LOG.info("Read " + deletes.size() + " tweetids from deletes file."); } long maxId = Long.MAX_VALUE; if (cmdline.hasOption(MAX_ID_OPTION)) { maxId = Long.parseLong(cmdline.getOptionValue(MAX_ID_OPTION)); LOG.info("index: " + maxId); } long startTime = System.currentTimeMillis(); File file = new File(collectionPath); if (!file.exists()) { System.err.println("Error: " + file + " does not exist!"); System.exit(-1); } StatusStream stream = new JsonStatusCorpusReader(file); Directory dir = FSDirectory.open(new File(indexPath)); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, IndexStatuses.ANALYZER); config.setOpenMode(OpenMode.CREATE); IndexWriter writer = new IndexWriter(dir, config); int cnt = 0; Status status; try { while ((status = stream.next()) != null) { if (status.getText() == null) { continue; } // Skip deletes tweetids. if (deletes != null && deletes.contains(status.getId())) { continue; } if (status.getId() > maxId) { continue; } cnt++; Document doc = new Document(); doc.add(new LongField(StatusField.ID.name, status.getId(), Field.Store.YES)); doc.add(new LongField(StatusField.EPOCH.name, status.getEpoch(), Field.Store.YES)); doc.add(new TextField(StatusField.SCREEN_NAME.name, status.getScreenname(), Store.YES)); doc.add(new Field(StatusField.TEXT.name, status.getText(), textOptions)); doc.add(new IntField(StatusField.FRIENDS_COUNT.name, status.getFollowersCount(), Store.YES)); doc.add(new IntField(StatusField.FOLLOWERS_COUNT.name, status.getFriendsCount(), Store.YES)); doc.add(new IntField(StatusField.STATUSES_COUNT.name, status.getStatusesCount(), Store.YES)); long inReplyToStatusId = status.getInReplyToStatusId(); if (inReplyToStatusId > 0) { doc.add(new LongField(StatusField.IN_REPLY_TO_STATUS_ID.name, inReplyToStatusId, Field.Store.YES)); doc.add(new LongField(StatusField.IN_REPLY_TO_USER_ID.name, status.getInReplyToUserId(), Field.Store.YES)); } String lang = status.getLang(); if (!lang.equals("unknown")) { doc.add(new TextField(StatusField.LANG.name, status.getLang(), Store.YES)); } long retweetStatusId = status.getRetweetedStatusId(); if (retweetStatusId > 0) { doc.add(new LongField(StatusField.RETWEETED_STATUS_ID.name, retweetStatusId, Field.Store.YES)); doc.add(new LongField(StatusField.RETWEETED_USER_ID.name, status.getRetweetedUserId(), Field.Store.YES)); doc.add(new IntField(StatusField.RETWEET_COUNT.name, status.getRetweetCount(), Store.YES)); if (status.getRetweetCount() < 0 || status.getRetweetedStatusId() < 0) { LOG.warn("Error parsing retweet fields of " + status.getId()); } } writer.addDocument(doc); if (cnt % 100000 == 0) { LOG.info(cnt + " statuses indexed"); } } LOG.info(String.format("Total of %s statuses added", cnt)); if (cmdline.hasOption(OPTIMIZE_OPTION)) { LOG.info("Merging segments..."); writer.forceMerge(1); LOG.info("Done!"); } LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms"); } catch (Exception e) { e.printStackTrace(); } finally { writer.close(); dir.close(); stream.close(); } }
From source file:com.clustercontrol.HinemosManagerMain.java
/** * Hinemos Manager?main<br/>/*from w w w .j ava 2s .c o m*/ * @param args * @throws Exception */ public static void main(String args[]) { try { try { if (System.getProperty("systime.iso") != null) { String oldVmName = System.getProperty("java.vm.name"); //System?mock?????HotSpot?????????? System.setProperty("java.vm.name", "HotSpot 64-Bit Server VM"); Class.forName("com.clustercontrol.util.SystemTimeShifter"); System.setProperty("java.vm.name", oldVmName); } } catch (Exception e) { log.error(e); } catch (Error e) { log.error(e); } long bootTime = System.currentTimeMillis(); log.info("Hinemos Manager is starting." + " (startupMode=" + _startupMode + ", clustered=" + _isClustered + ", locale=" + Locale.getDefault() + ")"); // Hinemos(??????)??????? // (???????????????????????) long offset = HinemosPropertyUtil.getHinemosPropertyNum("common.time.offset", Long.valueOf(0)); HinemosTime.setTimeOffsetMillis(offset); // Hinemos?(UTC??)??/(??) int timeZoneOffset = HinemosPropertyUtil .getHinemosPropertyNum("common.timezone", Long.valueOf(TimeZone.getDefault().getRawOffset())) .intValue(); HinemosTime.setTimeZoneOffset(timeZoneOffset); // ???HinemosPlugin??(create)? HinemosPluginService.getInstance().create(); // ???HinemosPlugin?(activate)? HinemosPluginService.getInstance().activate(); // Hinemos Manger???? Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { log.info("shutdown hook called."); synchronized (shutdownLock) { // Hinemos Manager??? String[] msgArgsShutdown = { _hostname }; AplLogger.put(PriorityConstant.TYPE_INFO, HinemosModuleConstant.HINEMOS_MANAGER_MONITOR, MessageConstant.MESSAGE_SYS_002_MNG, msgArgsShutdown); // ???HinemosPlugin??(deactivate)? HinemosPluginService.getInstance().deactivate(); // ???HinemosPlugin?(destroy)? HinemosPluginService.getInstance().destroy(); log.info("Hinemos Manager is stopped."); shutdown = true; shutdownLock.notify(); } } }); // ?? long startupTime = System.currentTimeMillis(); long initializeSec = (startupTime - bootTime) / 1000; long initializeMSec = (startupTime - bootTime) % 1000; log.info("Hinemos Manager Started in " + initializeSec + "s:" + initializeMSec + "ms"); // Hinemos Manager?? String[] msgArgsStart = { _hostname }; AplLogger.put(PriorityConstant.TYPE_INFO, HinemosModuleConstant.HINEMOS_MANAGER_MONITOR, MessageConstant.MESSAGE_SYS_001_MNG, msgArgsStart); // Hinemos Manager??????? synchronized (shutdownLock) { while (!shutdown) { try { shutdownLock.wait(); } catch (InterruptedException e) { log.warn("shutdown lock interrupted.", e); try { Thread.sleep(1000); } catch (InterruptedException sleepE) { } ; } } } System.exit(0); } catch (Throwable e) { log.error("unknown error occured.", e); } }
From source file:com.cloudhopper.httpclient.util.HttpPostMain.java
static public void main(String[] args) throws Exception { ///*from w w w . j a va 2 s . co m*/ // target urls // String strURL = "http://209.226.31.233:9009/SendSmsService/b98183b99a1f473839ce569c78b84dbd"; // Username: Twitter // Password: Twitter123 TrustManager easyTrustManager = new X509TrustManager() { public void checkClientTrusted(java.security.cert.X509Certificate[] arg0, String arg1) throws CertificateException { // allow all } public void checkServerTrusted(java.security.cert.X509Certificate[] arg0, String arg1) throws CertificateException { // allow all } public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } }; Scheme http = new Scheme("http", PlainSocketFactory.getSocketFactory(), 80); SSLContext sslcontext = SSLContext.getInstance("TLS"); sslcontext.init(null, new TrustManager[] { easyTrustManager }, null); SSLSocketFactory sf = new SSLSocketFactory(sslcontext); sf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); Scheme https = new Scheme("https", sf, 443); //SchemeRegistry sr = new SchemeRegistry(); //sr.register(http); //sr.register(https); // create and initialize scheme registry //SchemeRegistry schemeRegistry = new SchemeRegistry(); //schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); // create an HttpClient with the ThreadSafeClientConnManager. // This connection manager must be used if more than one thread will // be using the HttpClient. //ThreadSafeClientConnManager cm = new ThreadSafeClientConnManager(schemeRegistry); //cm.setMaxTotalConnections(1); DefaultHttpClient client = new DefaultHttpClient(); client.getConnectionManager().getSchemeRegistry().register(https); // for (int i = 0; i < 1; i++) { // // create a new ticket id // //String ticketId = TicketUtil.generate(1, System.currentTimeMillis()); /** StringBuilder string0 = new StringBuilder(200) .append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") .append("<S:Envelope xmlns:S=\"http://schemas.xmlsoap.org/soap/envelope/\">\n") .append(" <S:Header>\n") .append(" <ns3:TransactionID xmlns:ns4=\"http://vmp.vzw.com/schema\"\n") .append("xmlns:ns3=\"http://www.3gpp.org/ftp/Specs/archive/23_series/23.140/schema/REL-6-MM7-1-4\">" + ticketId + "</ns3:TransactionID>\n") .append(" </S:Header>\n") .append(" <S:Body>\n") .append(" <ns2:OptinReq xmlns:ns4=\"http://schemas.xmlsoap.org/soap/envelope/\"\n") .append("xmlns:ns3=\"http://www.3gpp.org/ftp/Specs/archive/23_series/23.140/schema/REL-6-MM7-1-4\"\n") .append("xmlns:ns2=\"http://vmp.vzw.com/schema\">\n") .append(" <ns2:VASPID>twitter</ns2:VASPID>\n") .append(" <ns2:VASID>tm33t!</ns2:VASID>\n") .append(" <ns2:ShortCode>800080008001</ns2:ShortCode>\n") .append(" <ns2:Number>9257089093</ns2:Number>\n") .append(" <ns2:Source>provider</ns2:Source>\n") .append(" <ns2:Message/>\n") .append(" </ns2:OptinReq>\n") .append(" </S:Body>\n") .append("</S:Envelope>"); */ // simple send sms StringBuilder string1 = new StringBuilder(200).append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") .append("<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:loc=\"http://www.csapi.org/schema/parlayx/sms/send/v2_3/local\">\n") .append(" <soapenv:Header/>\n").append(" <soapenv:Body>\n").append(" <loc:sendSms>\n") .append(" <loc:addresses>tel:+16472260233</loc:addresses>\n") .append(" <loc:senderName>6388</loc:senderName>\n") .append(" <loc:message>Test Message &</loc:message>\n").append(" </loc:sendSms>\n") .append(" </soapenv:Body>\n").append("</soapenv:Envelope>\n"); // startSmsNotification - place to deliver SMS to String req = string1.toString(); logger.debug("Request XML -> \n" + req); HttpPost post = new HttpPost(strURL); StringEntity postEntity = new StringEntity(req, "ISO-8859-1"); postEntity.setContentType("text/xml; charset=\"ISO-8859-1\""); post.addHeader("SOAPAction", "\"\""); post.setEntity(postEntity); long start = System.currentTimeMillis(); client.getCredentialsProvider().setCredentials(new AuthScope("209.226.31.233", AuthScope.ANY_PORT), new UsernamePasswordCredentials("Twitter", "Twitter123")); BasicHttpContext localcontext = new BasicHttpContext(); // Generate BASIC scheme object and stick it to the local // execution context BasicScheme basicAuth = new BasicScheme(); localcontext.setAttribute("preemptive-auth", basicAuth); // Add as the first request interceptor client.addRequestInterceptor(new PreemptiveAuth(), 0); HttpResponse httpResponse = client.execute(post, localcontext); HttpEntity responseEntity = httpResponse.getEntity(); // // was the request OK? // if (httpResponse.getStatusLine().getStatusCode() != 200) { logger.error("Request failed with StatusCode=" + httpResponse.getStatusLine().getStatusCode()); } // get an input stream String responseBody = EntityUtils.toString(responseEntity); long stop = System.currentTimeMillis(); logger.debug("----------------------------------------"); logger.debug("Response took " + (stop - start) + " ms"); logger.debug(responseBody); logger.debug("----------------------------------------"); // } // When HttpClient instance is no longer needed, // shut down the connection manager to ensure // immediate deallocation of all system resources client.getConnectionManager().shutdown(); }
From source file:org.helios.rindle.metric.MetricSerialization.java
public static void main(String[] args) { log("Test Metric Ser"); byte[] okey = new byte[10]; Random r = new Random(System.currentTimeMillis()); r.nextBytes(okey);/*from w ww.j av a 2 s . c o m*/ IMetricDefinition[] metrics = { new UnsafeMetricDefinition(77, "MeToo", okey), new UnsafeMetricDefinition(54, "FooBar", "FooBar".getBytes()) }; // SimpleModule module = new SimpleModule(); // module.addSerializer(IMetricDefinition.class, new MetricSerialization.UnsafeMetricDefinitionSerializer()); // module.addDeserializer(IMetricDefinition.class, new MetricSerialization.UnsafeMetricDefinitionDeserializer()); // MAP.registerModule(module); try { String s = JSON.MAP.writeValueAsString(metrics); log(s); // String text = "[{\"id\":54,\"ts\":1397644799993,\"n\":\"FooBar\",\"o\":\"Um05dlFtRnk=\"},{\"id\":77,\"ts\":1397644799993,\"n\":\"MeToo\",\"o\":\"V1c5NWJ3PT0A\"}]"; IMetricDefinition[] deser = JSON.MAP.readValue(s, IMetricDefinition[].class); log(Arrays.toString(deser)); log("========================================"); s = JSON.MAP.writeValueAsString(deser); log(s); log("========================================"); for (int i = 0; i < deser.length; i++) { log("Metric 1:" + deser[i].equals(metrics[i])); } } catch (Exception x) { x.printStackTrace(System.err); } // MAP.registerModule(mod); }
From source file:de.uni_koblenz.jgralab.utilities.tgraphbrowser.TGraphBrowserServer.java
/** * Runs the server. Needed args: -w --workspace the workspace * // www . j a v a2s .c om * @param args */ public static void main(String[] args) { CommandLine comLine = processCommandLineOptions(args); assert comLine != null; try { starttime = System.currentTimeMillis(); String portnumber = comLine.getOptionValue("p"); String workspacePath; if (comLine.hasOption("r")) { TwoDVisualizer.PRINT_ROLE_NAMES = true; } if (comLine.hasOption("w")) { workspacePath = comLine.getOptionValue("w"); } else { File workspace = new File(System.getProperty("java.io.tmpdir") + File.separator + "tgraphbrowser"); if (!workspace.exists()) { if (!workspace.mkdir()) { logger.info("The temp folder " + workspace.getAbsolutePath() + " could not be created."); } } workspace = new File(workspace.getAbsoluteFile() + File.separator + "workspace"); if (!workspace.exists()) { if (!workspace.mkdir()) { logger.info( "The default workspace " + workspace.getAbsolutePath() + " could not be created."); } } workspacePath = workspace.getAbsolutePath(); } new TGraphBrowserServer(portnumber == null ? DEFAULT_PORT : Integer.parseInt(portnumber), workspacePath, comLine.getOptionValue("m"), comLine.getOptionValue("s")).start(); if (comLine.getOptionValue("ic") != null) { TabularVisualizer.NUMBER_OF_INCIDENCES_PER_PAGE = Math .max(Integer.parseInt(comLine.getOptionValue("ic")), 1); } if (comLine.hasOption("d")) { StateRepository.dot = comLine.getOptionValue("d"); } else { System.out.println("The 2D-Visualization is disabled because parameter -d is not set."); } String timeout = comLine.getOptionValue("t"); String checkIntervall = comLine.getOptionValue("i"); new DeleteUnusedStates(timeout == null ? 600 : Integer.parseInt(timeout), checkIntervall == null ? 60 : Integer.parseInt(checkIntervall)).start(); if (comLine.hasOption("td")) { TwoDVisualizer.SECONDS_TO_WAIT_FOR_DOT = Integer.parseInt(comLine.getOptionValue("td")); } } catch (IOException e) { System.out.println(e); } }
From source file:deck36.storm.plan9.php.HighFiveBadgeTopology.java
public static void main(String[] args) throws Exception { String env = null;// ww w . j a v a 2 s.c o m if (args != null && args.length > 0) { env = args[0]; } if (!"dev".equals(env)) if (!"prod".equals(env)) { System.out.println("Usage: $0 (dev|prod)\n"); System.exit(1); } // Topology config Config conf = new Config(); // Load parameters and add them to the Config Map configMap = YamlLoader.loadYamlFromResource("config_" + env + ".yml"); conf.putAll(configMap); log.info(JSONValue.toJSONString((conf))); // Set topology loglevel to DEBUG conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug")); // Create Topology builder TopologyBuilder builder = new TopologyBuilder(); // if there are not special reasons, start with parallelism hint of 1 // and multiple tasks. By that, you can scale dynamically later on. int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint"); int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks"); // Create Stream from RabbitMQ messages // bind new queue with name of the topology // to the main plan9 exchange (from properties config) // consuming only CBT-related events by using the rounting key 'cbt.#' String badgeName = HighFiveBadgeTopology.class.getSimpleName(); String rabbitQueueName = badgeName; // use topology class name as name for the queue String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.rabbitmq.exchange"); String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.rabbitmq.routing_key"); // Get JSON deserialization scheme Scheme rabbitScheme = new SimpleJSONScheme(); // Setup a Declarator to configure exchange/queue/routing key RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName, rabbitRoutingKey); // Create Configuration for the Spout ConnectionConfig connectionConfig = new ConnectionConfig( (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"), (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"), (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat")); ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName) .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail() .build(); // add global parameters to topology config - the RabbitMQSpout will read them from there conf.putAll(spoutConfig.asMap()); // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md if ("prod".equals(env)) { conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")); } // Add RabbitMQ spout to topology builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint) .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks")); // construct command to invoke the external bolt implementation ArrayList<String> command = new ArrayList(15); // Add main execution program (php, hhvm, zend, ..) and parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.php.executor")); command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.executor_params")); // Add main command to be executed (app/console, the phar file, etc.) and global context parameters (environment etc.) command.add((String) JsonPath.read(conf, "$.deck36_storm.php.main")); command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.main_params")); // Add main route to be invoked and its parameters command.add((String) JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.main")); List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.params"); if (boltParams != null) command.addAll(boltParams); // Log the final command log.info("Command to start bolt for HighFive badge: " + Arrays.toString(command.toArray())); // Add constructed external bolt command to topology using MultilangAdapterBolt builder.setBolt("badge", new MultilangAdapterBolt(command, "badge"), parallelism_hint) .setNumTasks(num_tasks).shuffleGrouping("incoming"); builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt( (String) JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.rabbitmq.target_exchange"), "HighFive" // RabbitMQ routing key ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge"); builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks) .shuffleGrouping("rabbitmq_router"); if ("dev".equals(env)) { LocalCluster cluster = new LocalCluster(); cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology()); Thread.sleep(2000000); } if ("prod".equals(env)) { StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf, builder.createTopology()); } }
From source file:edu.monash.merc.system.remote.HttpHpaFileGetter.java
public static void main(String[] args) { String fileLocation = "http://www.proteinatlas.org/download/proteinatlas.xml.bz2"; HttpHpaFileGetter fileGetter = new HttpHpaFileGetter(); long startTime = System.currentTimeMillis(); fileGetter.importHPAXMLBZ2(fileLocation, "/opt/tpb/db_download/hpatest.xml"); HPAWSXmlParser parser = new HPAWSXmlParser(); List<HPAEntryBean> hpaEntryBeans = parser.parseHPAXml("/opt/tpb/db_download/hpatest.xml", WSXmlInputFactory.getInputFactoryConfiguredForXmlConformance()); long endTime = System.currentTimeMillis(); System.out.println("===== total entry size : " + hpaEntryBeans.size()); System.out.println("===== total processing time : " + (endTime - startTime) / 1000 + " seconds."); }