List of usage examples for java.lang String format
public static String format(Locale l, String format, Object... args)
From source file:edu.cmu.lti.oaqa.knn4qa.apps.LuceneIndexer.java
public static void main(String[] args) { Options options = new Options(); options.addOption(CommonParams.ROOT_DIR_PARAM, null, true, CommonParams.ROOT_DIR_DESC); options.addOption(CommonParams.SUB_DIR_TYPE_PARAM, null, true, CommonParams.SUB_DIR_TYPE_DESC); options.addOption(CommonParams.MAX_NUM_REC_PARAM, null, true, CommonParams.MAX_NUM_REC_DESC); options.addOption(CommonParams.SOLR_FILE_NAME_PARAM, null, true, CommonParams.SOLR_FILE_NAME_DESC); options.addOption(CommonParams.OUT_INDEX_PARAM, null, true, CommonParams.OUT_MINDEX_DESC); CommandLineParser parser = new org.apache.commons.cli.GnuParser(); try {// w ww .ja v a 2s . c om CommandLine cmd = parser.parse(options, args); String rootDir = null; rootDir = cmd.getOptionValue(CommonParams.ROOT_DIR_PARAM); if (null == rootDir) Usage("Specify: " + CommonParams.ROOT_DIR_DESC, options); String outputDirName = cmd.getOptionValue(CommonParams.OUT_INDEX_PARAM); if (null == outputDirName) Usage("Specify: " + CommonParams.OUT_MINDEX_DESC, options); String subDirTypeList = cmd.getOptionValue(CommonParams.SUB_DIR_TYPE_PARAM); if (null == subDirTypeList || subDirTypeList.isEmpty()) Usage("Specify: " + CommonParams.SUB_DIR_TYPE_DESC, options); String solrFileName = cmd.getOptionValue(CommonParams.SOLR_FILE_NAME_PARAM); if (null == solrFileName) Usage("Specify: " + CommonParams.SOLR_FILE_NAME_DESC, options); int maxNumRec = Integer.MAX_VALUE; String tmp = cmd.getOptionValue(CommonParams.MAX_NUM_REC_PARAM); if (tmp != null) { try { maxNumRec = Integer.parseInt(tmp); if (maxNumRec <= 0) { Usage("The maximum number of records should be a positive integer", options); } } catch (NumberFormatException e) { Usage("The maximum number of records should be a positive integer", options); } } File outputDir = new File(outputDirName); if (!outputDir.exists()) { if (!outputDir.mkdirs()) { System.out.println("couldn't create " + outputDir.getAbsolutePath()); System.exit(1); } } if (!outputDir.isDirectory()) { System.out.println(outputDir.getAbsolutePath() + " is not a directory!"); System.exit(1); } if (!outputDir.canWrite()) { System.out.println("Can't write to " + outputDir.getAbsolutePath()); System.exit(1); } String subDirs[] = subDirTypeList.split(","); int docNum = 0; // No English analyzer here, all language-related processing is done already, // here we simply white-space tokenize and index tokens verbatim. Analyzer analyzer = new WhitespaceAnalyzer(); FSDirectory indexDir = FSDirectory.open(outputDir); IndexWriterConfig indexConf = new IndexWriterConfig(analyzer.getVersion(), analyzer); System.out.println("Creating a new Lucene index, maximum # of docs to process: " + maxNumRec); indexConf.setOpenMode(OpenMode.CREATE); IndexWriter indexWriter = new IndexWriter(indexDir, indexConf); for (int subDirId = 0; subDirId < subDirs.length && docNum < maxNumRec; ++subDirId) { String inputFileName = rootDir + "/" + subDirs[subDirId] + "/" + solrFileName; System.out.println("Input file name: " + inputFileName); BufferedReader inpText = new BufferedReader( new InputStreamReader(CompressUtils.createInputStream(inputFileName))); String docText = XmlHelper.readNextXMLIndexEntry(inpText); for (; docText != null && docNum < maxNumRec; docText = XmlHelper.readNextXMLIndexEntry(inpText)) { ++docNum; Map<String, String> docFields = null; Document luceneDoc = new Document(); try { docFields = XmlHelper.parseXMLIndexEntry(docText); } catch (Exception e) { System.err.println(String.format("Parsing error, offending DOC #%d:\n%s", docNum, docText)); System.exit(1); } String id = docFields.get(UtilConst.TAG_DOCNO); if (id == null) { System.err.println(String.format("No ID tag '%s', offending DOC #%d:\n%s", UtilConst.TAG_DOCNO, docNum, docText)); } luceneDoc.add(new StringField(UtilConst.TAG_DOCNO, id, Field.Store.YES)); for (Map.Entry<String, String> e : docFields.entrySet()) if (!e.getKey().equals(UtilConst.TAG_DOCNO)) { luceneDoc.add(new TextField(e.getKey(), e.getValue(), Field.Store.YES)); } indexWriter.addDocument(luceneDoc); if (docNum % 1000 == 0) System.out.println("Indexed " + docNum + " docs"); } System.out.println("Indexed " + docNum + " docs"); } indexWriter.commit(); indexWriter.close(); } catch (ParseException e) { Usage("Cannot parse arguments", options); } catch (Exception e) { System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:com.renren.ntc.sg.util.wxpay.https.ClientCustomSSL.java
public final static void main(String[] args) throws Exception { KeyStore keyStore = KeyStore.getInstance("PKCS12"); FileInputStream instream = new FileInputStream( new File("/Users/allenz/Downloads/wx_cert/apiclient_cert.p12")); try {//from www .j av a 2s.c o m keyStore.load(instream, Constants.mch_id.toCharArray()); } finally { instream.close(); } // Trust own CA and all self-signed certs SSLContext sslcontext = SSLContexts.custom().loadKeyMaterial(keyStore, Constants.mch_id.toCharArray()) .build(); // Allow TLSv1 protocol only SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(sslcontext, new String[] { "TLSv1" }, null, SSLConnectionSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER); CloseableHttpClient httpclient = HttpClients.custom().setSSLSocketFactory(sslsf).build(); try { HttpPost post = new HttpPost("https://api.mch.weixin.qq.com/mmpaymkttransfers/promotion/transfers"); System.out.println("executing request" + post.getRequestLine()); String openid = "oQfDLjmZD7Lgynv6vuoBlWXUY_ic"; String nonce_str = Sha1Util.getNonceStr(); String orderId = SUtils.getOrderId(); String re_user_name = "?"; String amount = "1"; String desc = ""; String spbill_create_ip = "123.56.102.224"; String txt = TXT.replace("{mch_appid}", Constants.mch_appid); txt = txt.replace("{mchid}", Constants.mch_id); txt = txt.replace("{openid}", openid); txt = txt.replace("{nonce_str}", nonce_str); txt = txt.replace("{partner_trade_no}", orderId); txt = txt.replace("{check_name}", "FORCE_CHECK"); txt = txt.replace("{re_user_name}", re_user_name); txt = txt.replace("{amount}", amount); txt = txt.replace("{desc}", desc); txt = txt.replace("{spbill_create_ip}", spbill_create_ip); SortedMap<String, String> map = new TreeMap<String, String>(); map.put("mch_appid", Constants.mch_appid); map.put("mchid", Constants.mch_id); map.put("openid", openid); map.put("nonce_str", nonce_str); map.put("partner_trade_no", orderId); //FORCE_CHECK| OPTION_CHECK | NO_CHECK map.put("check_name", "OPTION_CHECK"); map.put("re_user_name", re_user_name); map.put("amount", amount); map.put("desc", desc); map.put("spbill_create_ip", spbill_create_ip); String sign = SUtils.createSign(map).toUpperCase(); txt = txt.replace("{sign}", sign); post.setEntity(new StringEntity(txt, "utf-8")); CloseableHttpResponse response = httpclient.execute(post); try { HttpEntity entity = response.getEntity(); System.out.println(response.getStatusLine()); if (entity != null) { System.out.println("Response content length: " + entity.getContentLength()); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(entity.getContent())); String text; StringBuffer sb = new StringBuffer(); while ((text = bufferedReader.readLine()) != null) { sb.append(text); } String resp = sb.toString(); LoggerUtils.getInstance().log(String.format("req %s rec %s", txt, resp)); if (isOk(resp)) { String payment_no = getValue(resp, "payment_no"); LoggerUtils.getInstance() .log(String.format("order %s pay OK payment_no %s", orderId, payment_no)); } } EntityUtils.consume(entity); } finally { response.close(); } } finally { httpclient.close(); } }
From source file:edu.indiana.d2i.htrc.corpus.analysis.LDAAnalysisDriver.java
public static void main(String[] args) throws Exception { GenericOptionsParser parser = new GenericOptionsParser(new Configuration(), args); CommandLine commandLine = parser.getCommandLine(); Option[] options = commandLine.getOptions(); /**//ww w . j ava 2 s . c om * appArgs[0] = <path/to/input/directory> (where sequence files reside) * appArgs[1] = <path/to/output/directory/prefix> (where LDA state file * should go) appArgs[2] = <path/local/property/file> * * Note: the passed in <path/to/output/directory/prefix> is only a * prefix, we automatically append the iteration number suffix */ String[] appArgs = parser.getRemainingArgs(); // load property file Properties prop = new Properties(); prop.load(new FileInputStream(appArgs[2])); int maxIterationNum = Integer.parseInt( prop.getProperty(Constants.LDA_ANALYSIS_MAX_ITER, Constants.LDA_ANALYSIS_DEFAULT_MAX_ITER)); int iterationCount = 0; /** * in the first iteration (iteration 0), there is no LDA state */ String[] arguments = generateArgs(options, new String[0], appArgs[0], appArgs[1] + "-iter-" + iterationCount); /** * iterate until convergence or maximum iteration number reached */ while (true) { int exitCode = ToolRunner.run(new LDAAnalysisDriver(), arguments); System.out.println(String.format("LDA analysis finished iteration %d, with exitCode = %d", iterationCount, exitCode)); /** * LDA state is the output (sequence file) from current iteration * and is used to initialize the words-topics table and * topics-documents table for the next iteration */ String ldaStateFilePath = appArgs[1] + "-iter-" + iterationCount + File.separator + "part-r-00000"; /** * load LDA state to check whether it is converged */ if (isAnalysisConverged(ldaStateFilePath)) { System.out.println(String.format("LDA analysis converged at iteration %d", iterationCount)); break; } if ((iterationCount + 1) >= maxIterationNum) { System.out.println(String.format( "LDA analysis reached the maximum iteration number %d, going to stop", maxIterationNum)); break; } String[] otherOps = { "-D", "user.args.lda.state.filepath=" + ldaStateFilePath }; /** * generate arguments for the next iteration and increase iteration * count */ arguments = generateArgs(options, otherOps, appArgs[0], appArgs[1] + "-iter-" + ++iterationCount); } }
From source file:AmazonKinesisGet.java
public static void main(String[] args) throws Exception { init();// ww w. j a va 2 s . c o m final String myStreamName = "philsteststream"; final Integer myStreamSize = 1; // list all of my streams ListStreamsRequest listStreamsRequest = new ListStreamsRequest(); listStreamsRequest.setLimit(10); ListStreamsResult listStreamsResult = kinesisClient.listStreams(listStreamsRequest); List<String> streamNames = listStreamsResult.getStreamNames(); while (listStreamsResult.isHasMoreStreams()) { if (streamNames.size() > 0) { listStreamsRequest.setExclusiveStartStreamName(streamNames.get(streamNames.size() - 1)); } listStreamsResult = kinesisClient.listStreams(listStreamsRequest); streamNames.addAll(listStreamsResult.getStreamNames()); } LOG.info("Printing my list of streams : "); // print all of my streams. if (!streamNames.isEmpty()) { System.out.println("List of my streams: "); } for (int i = 0; i < streamNames.size(); i++) { System.out.println(streamNames.get(i)); } //System.out.println(streamNames.get(0)); String myownstream = streamNames.get(0); // Retrieve the Shards from a Stream DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest(); describeStreamRequest.setStreamName(myownstream); DescribeStreamResult describeStreamResult; List<Shard> shards = new ArrayList<>(); String lastShardId = null; do { describeStreamRequest.setExclusiveStartShardId(lastShardId); describeStreamResult = kinesisClient.describeStream(describeStreamRequest); shards.addAll(describeStreamResult.getStreamDescription().getShards()); if (shards.size() > 0) { lastShardId = shards.get(shards.size() - 1).getShardId(); } } while (describeStreamResult.getStreamDescription().getHasMoreShards()); // Get Data from the Shards in a Stream // Hard-coded to use only 1 shard String shardIterator; GetShardIteratorRequest getShardIteratorRequest = new GetShardIteratorRequest(); getShardIteratorRequest.setStreamName(myownstream); //get(0) shows hardcoded to 1 stream getShardIteratorRequest.setShardId(shards.get(0).getShardId()); // using TRIM_HORIZON but could use alternatives getShardIteratorRequest.setShardIteratorType("TRIM_HORIZON"); GetShardIteratorResult getShardIteratorResult = kinesisClient.getShardIterator(getShardIteratorRequest); shardIterator = getShardIteratorResult.getShardIterator(); // Continuously read data records from shard. List<Record> records; while (true) { // Create new GetRecordsRequest with existing shardIterator. // Set maximum records to return to 1000. GetRecordsRequest getRecordsRequest = new GetRecordsRequest(); getRecordsRequest.setShardIterator(shardIterator); getRecordsRequest.setLimit(1000); GetRecordsResult result = kinesisClient.getRecords(getRecordsRequest); // Put result into record list. Result may be empty. records = result.getRecords(); // Print records for (Record record : records) { ByteBuffer byteBuffer = record.getData(); System.out.println(String.format("Seq No: %s - %s", record.getSequenceNumber(), new String(byteBuffer.array()))); } try { Thread.sleep(1000); } catch (InterruptedException exception) { throw new RuntimeException(exception); } shardIterator = result.getNextShardIterator(); } }
From source file:apps.LuceneQuery.java
public static void main(String[] args) { Options options = new Options(); options.addOption("d", null, true, "index directory"); options.addOption("i", null, true, "input file"); options.addOption("s", null, true, "stop word file"); options.addOption("n", null, true, "max # of results"); options.addOption("o", null, true, "a TREC-style output file"); options.addOption("r", null, true, "an optional QREL file, if specified," + "we save results only for queries for which we find at least one relevant entry."); options.addOption("prob", null, true, "question sampling probability"); options.addOption("max_query_qty", null, true, "a maximum number of queries to run"); options.addOption("bm25_b", null, true, "BM25 parameter: b"); options.addOption("bm25_k1", null, true, "BM25 parameter: k1"); options.addOption("bm25fixed", null, false, "use the fixed BM25 similarity"); options.addOption("seed", null, true, "random seed"); Joiner commaJoin = Joiner.on(','); Joiner spaceJoin = Joiner.on(' '); options.addOption("source_type", null, true, "query source type: " + commaJoin.join(SourceFactory.getQuerySourceList())); CommandLineParser parser = new org.apache.commons.cli.GnuParser(); QrelReader qrels = null;// w w w .j a v a 2 s.co m try { CommandLine cmd = parser.parse(options, args); String indexDir = null; if (cmd.hasOption("d")) { indexDir = cmd.getOptionValue("d"); } else { Usage("Specify 'index directory'", options); } String inputFileName = null; if (cmd.hasOption("i")) { inputFileName = cmd.getOptionValue("i"); } else { Usage("Specify 'input file'", options); } DictNoComments stopWords = null; if (cmd.hasOption("s")) { String stopWordFileName = cmd.getOptionValue("s"); stopWords = new DictNoComments(new File(stopWordFileName), true /* lowercasing */); System.out.println("Using the stopword file: " + stopWordFileName); } String sourceName = cmd.getOptionValue("source_type"); if (sourceName == null) Usage("Specify document source type", options); int numRet = 100; if (cmd.hasOption("n")) { numRet = Integer.parseInt(cmd.getOptionValue("n")); System.out.println("Retrieving at most " + numRet + " candidate entries."); } String trecOutFileName = null; if (cmd.hasOption("o")) { trecOutFileName = cmd.getOptionValue("o"); } else { Usage("Specify 'a TREC-style output file'", options); } double fProb = 1.0f; if (cmd.hasOption("prob")) { try { fProb = Double.parseDouble(cmd.getOptionValue("prob")); } catch (NumberFormatException e) { Usage("Wrong format for 'question sampling probability'", options); } } if (fProb <= 0 || fProb > 1) { Usage("Question sampling probability should be >0 and <=1", options); } System.out.println("Sample the following fraction of questions: " + fProb); float bm25_k1 = UtilConst.BM25_K1_DEFAULT, bm25_b = UtilConst.BM25_B_DEFAULT; if (cmd.hasOption("bm25_k1")) { try { bm25_k1 = Float.parseFloat(cmd.getOptionValue("bm25_k1")); } catch (NumberFormatException e) { Usage("Wrong format for 'bm25_k1'", options); } } if (cmd.hasOption("bm25_b")) { try { bm25_b = Float.parseFloat(cmd.getOptionValue("bm25_b")); } catch (NumberFormatException e) { Usage("Wrong format for 'bm25_b'", options); } } long seed = 0; String tmpl = cmd.getOptionValue("seed"); if (tmpl != null) seed = Long.parseLong(tmpl); System.out.println("Using seed: " + seed); Random randGen = new Random(seed); System.out.println(String.format("BM25 parameters k1=%f b=%f ", bm25_k1, bm25_b)); boolean useFixedBM25 = cmd.hasOption("bm25fixed"); EnglishAnalyzer analyzer = new EnglishAnalyzer(); Similarity similarity = null; if (useFixedBM25) { System.out.println(String.format("Using fixed BM25Simlarity, k1=%f b=%f", bm25_k1, bm25_b)); similarity = new BM25SimilarityFix(bm25_k1, bm25_b); } else { System.out.println(String.format("Using Lucene BM25Similarity, k1=%f b=%f", bm25_k1, bm25_b)); similarity = new BM25Similarity(bm25_k1, bm25_b); } int maxQueryQty = Integer.MAX_VALUE; if (cmd.hasOption("max_query_qty")) { try { maxQueryQty = Integer.parseInt(cmd.getOptionValue("max_query_qty")); } catch (NumberFormatException e) { Usage("Wrong format for 'max_query_qty'", options); } } System.out.println(String.format("Executing at most %d queries", maxQueryQty)); if (cmd.hasOption("r")) { String qrelFile = cmd.getOptionValue("r"); System.out.println("Using the qrel file: '" + qrelFile + "', queries not returning a relevant entry will be ignored."); qrels = new QrelReader(qrelFile); } System.out.println(String.format("Using indexing directory %s", indexDir)); LuceneCandidateProvider candProvider = new LuceneCandidateProvider(indexDir, analyzer, similarity); TextCleaner textCleaner = new TextCleaner(stopWords); QuerySource inpQuerySource = SourceFactory.createQuerySource(sourceName, inputFileName); QueryEntry inpQuery = null; BufferedWriter trecOutFile = new BufferedWriter(new FileWriter(new File(trecOutFileName))); int questNum = 0, questQty = 0; long totalTimeMS = 0; while ((inpQuery = inpQuerySource.next()) != null) { if (questQty >= maxQueryQty) break; ++questNum; String queryID = inpQuery.mQueryId; if (randGen.nextDouble() <= fProb) { ++questQty; String tokQuery = spaceJoin.join(textCleaner.cleanUp(inpQuery.mQueryText)); String query = TextCleaner.luceneSafeCleanUp(tokQuery).trim(); ResEntry[] results = null; if (query.isEmpty()) { results = new ResEntry[0]; System.out.println(String.format("WARNING, empty query id = '%s'", inpQuery.mQueryId)); } else { try { long start = System.currentTimeMillis(); results = candProvider.getCandidates(questNum, query, numRet); long end = System.currentTimeMillis(); long searchTimeMS = end - start; totalTimeMS += searchTimeMS; System.out.println(String.format( "Obtained results for the query # %d (answered %d queries), queryID %s the search took %d ms, we asked for max %d entries got %d", questNum, questQty, queryID, searchTimeMS, numRet, results.length)); } catch (ParseException e) { e.printStackTrace(); System.err.println( "Error parsing query: " + query + " orig question is :" + inpQuery.mQueryText); System.exit(1); } } boolean bSave = true; if (qrels != null) { boolean bOk = false; for (ResEntry r : results) { String label = qrels.get(queryID, r.mDocId); if (candProvider.isRelevLabel(label, 1)) { bOk = true; break; } } if (!bOk) bSave = false; } // System.out.println(String.format("Ranking results the query # %d queryId='%s' save results? %b", // questNum, queryID, bSave)); if (bSave) { saveTrecResults(queryID, results, trecOutFile, TREC_RUN, numRet); } } if (questNum % 1000 == 0) System.out.println(String.format("Proccessed %d questions", questNum)); } System.out.println(String.format("Proccessed %d questions, the search took %f MS on average", questQty, (float) totalTimeMS / questQty)); trecOutFile.close(); } catch (ParseException e) { e.printStackTrace(); Usage("Cannot parse arguments: " + e, options); } catch (Exception e) { System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:markov.java
/** * @param args// w ww.java 2 s . c o m */ public static void main(String[] args) { // hack: eclipse don't support IO redirection worth a shit // try { // System.setIn(new FileInputStream("./json")); // } catch (FileNotFoundException e1) { // // TODO Auto-generated catch block // e1.printStackTrace(); // } boolean graphMode = false; boolean jsonMode = false; boolean jsonRecoverMode = false; boolean endNode = false; int count = -1; long n = 0; long sumOfSqr = 0; long sum = 0; for (String s : args) { if (!s.matches("^-[vegjJh]*(c[0-9]*)?$")) { System.out.println("invalid argument"); return; } if (s.matches("^-.*h.*")) { System.out.println(HELP); return; } if (s.matches("^-.*v.*")) { verbose = true; log("verbose mode"); } if (s.matches("^-.*g.*")) { graphMode = true; log("graph mode"); } if (s.matches("^-.*j.*")) { jsonMode = true; log("json mode"); } if (s.matches("^-.*J.*")) { jsonRecoverMode = true; log("json recover mode"); } if (s.matches("^-.*e.*")) { endNode = true; log("include end node"); } if (s.matches("^-.*c[0-9]*$")) { log("counted output mode"); count = Integer.parseInt(s.replaceAll("^-.*c", "")); } boolean error = (graphMode == true && jsonMode == true); if (!error) { error = (count > -1) && (graphMode == true || jsonMode == true); } if (error) { System.err.println("[error] switches j, g and, c are mutualy exclusive."); return; } } StateTransitionDiagram<Character> std; BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); try { if (!jsonRecoverMode) { Trainer<Character> trainer = new Trainer<Character>(); String s = br.readLine(); while (s != null) { trainer.train(string2List(s)); n++; sumOfSqr += s.length() * s.length(); sum += s.length(); s = br.readLine(); } if (n == 0) { System.err .println("Invalid corpus: At least one sample is required, two to make it interesting"); return; } std = trainer.getTransitionDiagram(); } else { std = new StateTransitionDiagram<Character>(); GsonStub gstub = new Gson().fromJson(br, GsonStub.class); n = gstub.meta.n; sum = gstub.meta.sum; sumOfSqr = gstub.meta.sumOfSqr; for (Entry<String, StateStub> entry : gstub.states.entrySet()) { State<Character> state; if (entry.getKey().equals("null")) { state = std.getGuard(); } else { state = std.getState(Character.valueOf(entry.getKey().charAt(0))); } for (Entry<String, Integer> transitions : entry.getValue().transitions.entrySet()) { State<Character> tranny; if (transitions.getKey().equals("null")) { tranny = std.getGuard(); } else { tranny = std.getState(Character.valueOf(transitions.getKey().charAt(0))); } state.addTransition(tranny.getValue(), transitions.getValue()); } } } if (graphMode) { if (endNode) { System.out.println(std.toString()); } else { System.out.println(std.removeEndGuards().toString()); } return; } if (jsonMode) { Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create(); String partialJson; if (endNode) { partialJson = gson.toJson(std); } else { partialJson = gson.toJson(std.removeEndGuards()); } GsonStub gstub = new Gson().fromJson(partialJson, GsonStub.class); gstub.meta = new Meta(); gstub.meta.n = n; gstub.meta.sum = sum; gstub.meta.sumOfSqr = sumOfSqr; System.out.println(gson.toJson(gstub)); return; } Generator<Character> generator; if (endNode) { generator = new EndTagGenerator<Character>(std); } else { double sd = ((double) sumOfSqr - (double) (sum * sum) / (double) n) / (double) (n - 1); double mean = (double) sum / (double) n; log(String.format("mean: %.4f sd: %.4f", mean, sd)); NormalDistributionImpl dist = new NormalDistributionImpl(mean, sd); generator = new NormalizedGenerator<Character>(std.removeEndGuards(), dist); } if (count >= 0) { for (int c = 0; c < count; c++) { output(generator); } } else { while (true) { output(generator); } } } catch (IOException e) { e.printStackTrace(); } }
From source file:com.amazonaws.services.kinesis.leases.impl.LeaseCoordinatorExerciser.java
public static void main(String[] args) throws InterruptedException, DependencyException, InvalidStateException, ProvisionedThroughputException, IOException { int numCoordinators = 9; int numLeases = 73; int leaseDurationMillis = 10000; int epsilonMillis = 100; AWSCredentialsProvider creds = new DefaultAWSCredentialsProviderChain(); AmazonDynamoDBClient ddb = new AmazonDynamoDBClient(creds); ILeaseManager<KinesisClientLease> leaseManager = new KinesisClientLeaseManager("nagl_ShardProgress", ddb); if (leaseManager.createLeaseTableIfNotExists(10L, 50L)) { LOG.info("Waiting for newly created lease table"); if (!leaseManager.waitUntilLeaseTableExists(10, 300)) { LOG.error("Table was not created in time"); return; }/*w ww. ja va 2 s. c o m*/ } CWMetricsFactory metricsFactory = new CWMetricsFactory(creds, "testNamespace", 30 * 1000, 1000); final List<LeaseCoordinator<KinesisClientLease>> coordinators = new ArrayList<LeaseCoordinator<KinesisClientLease>>(); for (int i = 0; i < numCoordinators; i++) { String workerIdentifier = "worker-" + Integer.toString(i); LeaseCoordinator<KinesisClientLease> coord = new LeaseCoordinator<KinesisClientLease>(leaseManager, workerIdentifier, leaseDurationMillis, epsilonMillis, metricsFactory); coordinators.add(coord); } leaseManager.deleteAll(); for (int i = 0; i < numLeases; i++) { KinesisClientLease lease = new KinesisClientLease(); lease.setLeaseKey(Integer.toString(i)); lease.setCheckpoint(new ExtendedSequenceNumber("checkpoint")); leaseManager.createLeaseIfNotExists(lease); } final JFrame frame = new JFrame("Test Visualizer"); frame.setPreferredSize(new Dimension(800, 600)); final JPanel panel = new JPanel(new GridLayout(coordinators.size() + 1, 0)); final JLabel ticker = new JLabel("tick"); panel.add(ticker); frame.getContentPane().add(panel); final Map<String, JLabel> labels = new HashMap<String, JLabel>(); for (final LeaseCoordinator<KinesisClientLease> coord : coordinators) { JPanel coordPanel = new JPanel(); coordPanel.setLayout(new BoxLayout(coordPanel, BoxLayout.X_AXIS)); final Button button = new Button("Stop " + coord.getWorkerIdentifier()); button.setMaximumSize(new Dimension(200, 50)); button.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { if (coord.isRunning()) { coord.stop(); button.setLabel("Start " + coord.getWorkerIdentifier()); } else { try { coord.start(); } catch (LeasingException e) { LOG.error(e); } button.setLabel("Stop " + coord.getWorkerIdentifier()); } } }); coordPanel.add(button); JLabel label = new JLabel(); coordPanel.add(label); labels.put(coord.getWorkerIdentifier(), label); panel.add(coordPanel); } frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); new Thread() { // Key is lease key, value is green-ness as a value from 0 to 255. // Great variable name, huh? private Map<String, Integer> greenNesses = new HashMap<String, Integer>(); // Key is lease key, value is last owning worker private Map<String, String> lastOwners = new HashMap<String, String>(); @Override public void run() { while (true) { for (LeaseCoordinator<KinesisClientLease> coord : coordinators) { String workerIdentifier = coord.getWorkerIdentifier(); JLabel label = labels.get(workerIdentifier); List<KinesisClientLease> asgn = new ArrayList<KinesisClientLease>(coord.getAssignments()); Collections.sort(asgn, new Comparator<KinesisClientLease>() { @Override public int compare(KinesisClientLease arg0, KinesisClientLease arg1) { return arg0.getLeaseKey().compareTo(arg1.getLeaseKey()); } }); StringBuilder builder = new StringBuilder(); builder.append("<html>"); builder.append(workerIdentifier).append(":").append(asgn.size()).append(" "); for (KinesisClientLease lease : asgn) { String leaseKey = lease.getLeaseKey(); String lastOwner = lastOwners.get(leaseKey); // Color things green when they switch owners, decay the green-ness over time. Integer greenNess = greenNesses.get(leaseKey); if (greenNess == null || lastOwner == null || !lastOwner.equals(lease.getLeaseOwner())) { greenNess = 200; } else { greenNess = Math.max(0, greenNess - 20); } greenNesses.put(leaseKey, greenNess); lastOwners.put(leaseKey, lease.getLeaseOwner()); builder.append(String.format("<font color=\"%s\">%03d</font>", String.format("#00%02x00", greenNess), Integer.parseInt(leaseKey))).append(" "); } builder.append("</html>"); label.setText(builder.toString()); label.revalidate(); label.repaint(); } if (ticker.getText().equals("tick")) { ticker.setText("tock"); } else { ticker.setText("tick"); } try { Thread.sleep(200); } catch (InterruptedException e) { } } } }.start(); frame.pack(); frame.setVisible(true); for (LeaseCoordinator<KinesisClientLease> coord : coordinators) { coord.start(); } }
From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java
public static void main(String[] args) { if (args.length == 0) { printUsage();/* w w w . jav a 2s. c o m*/ return; } try { parseOptions(args); } catch (IllegalArgumentException e) { System.err.println("Error: " + e.getMessage()); printUsage(); return; } // Read in chrList, if specified if (chrFile != null) { chrFilter = new HashSet<String>(); try { BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile)); String chr = null; while ((chr = chrFilterIn.readLine()) != null) chrFilter.add(chr); chrFilterIn.close(); if (verbose > 0) System.err.println( "Only looking at alignments on " + chrFilter.size() + " specified chromosomes"); } catch (IOException e) { System.err.println("Error: " + e.getMessage()); return; } } if (verbose > 0) { // Start the processMonitor processMonitor = new Timer(); // Start the ProcessStatusTask statusTask = new ProcessStatusTask(); // Schedule to show the status every 1 second processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq); } // Read in known SNP file, if specified if (knownSnpFile != null) { if (verbose > 0) System.err.println("Checking known SNPs from user specified VCF file"); knownVCF = new VCFFileReader(new File(knownSnpFile)); } SamReaderFactory readerFac = SamReaderFactory.makeDefault(); SAMFileWriterFactory writerFac = new SAMFileWriterFactory(); if (!isSilent) readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency else readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency SamReader in = readerFac.open(new File(inFile)); SAMFileHeader inHeader = in.getFileHeader(); if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate) System.err.println("Warning: Input file '" + inFile + "' might be sorted by coordinate and cannot be correctly processed!"); SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header // Add new programHeader SAMProgramRecord progRec = new SAMProgramRecord(progName); progRec.setProgramName(progName); progRec.setProgramVersion(progVer); progRec.setCommandLine(StringUtils.join(" ", args)); header.addProgramRecord(progRec); //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder()); // reset the orders header.setGroupOrder(groupOrder); header.setSortOrder(sortOrder); // write SAMHeader String prevID = null; SAMRecord prevRecord = null; List<SAMRecord> alnList = new ArrayList<SAMRecord>(); List<SAMRecordPair> alnPEList = null; // Estimate fragment length distribution by scan one-pass through the alignments SAMRecordIterator results = in.iterator(); if (!NO_ESTIMATE) { if (verbose > 0) { System.err.println("Estimating insert fragment size distribution ..."); statusTask.reset(); statusTask.setInfo("alignments scanned"); } long N = 0; double fragL_S = 0; // fragLen sum double fragL_SS = 0; // fragLen^2 sum while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) { double fragLen = Math.abs(record.getInferredInsertSize()); if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments N++; fragL_S += fragLen; fragL_SS += fragLen * fragLen; } // stop estimate if already enough if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN) break; } } if (verbose > 0) statusTask.finish(); // estimate fragment size if (N >= MIN_ESTIMATE_BASE) { // override command line values MEAN_FRAG_LEN = fragL_S / N; SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1))); String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN, SD_FRAG_LEN); if (verbose > 0) System.err.println(estStr); // also add the estimation to comment header.addComment(estStr); } else { System.err.println( "Unable to estimate the fragment size distribution due to too few observed alignments"); System.err.println( "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step"); statusTask.cancel(); processMonitor.cancel(); return; } // Initiate the normal model normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN); // reset the iterator, if necessary if (in.type() == SamReader.Type.SAM_TYPE) { try { in.close(); } catch (IOException e) { System.err.println(e.getMessage()); } in = readerFac.open(new File(inFile)); } results.close(); results = in.iterator(); } // end of NO_ESTIMATE SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile)) : writerFac.makeBAMWriter(header, false, new File(outFile)); // check each alignment again if (verbose > 0) { System.err.println("Filtering alignments ..."); statusTask.reset(); statusTask.setInfo("alignments processed"); } while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); String ID = record.getReadName(); // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment if (ID.equals(prevID) && record.getReadLength() == 0) SAMAlignFixer.fixSAMRecordRead(record, prevRecord); if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) { prevID = ID; prevRecord = record; continue; } // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto) if (fixMD) SAMAlignFixer.fixMisStr(record); // fix alignment, ignore if failed (unmapped or empty) if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) { prevID = ID; prevRecord = record; continue; } if (!record.getReadPairedFlag()) { System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString()); out.close(); statusTask.cancel(); processMonitor.cancel(); return; } if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments // create alnPEList from filtered alnList alnPEList = createAlnPEListFromAlnList(alnList); //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size()); int totalPair = alnPEList.size(); // filter highly unlikely PEhits filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY); // calculate posterior mapQ for each pair calcPEHitPostP(alnPEList, totalPair, MAX_HIT); // filter hits by mapQ if (MIN_MAPQ > 0) filterPEHits(alnPEList, MIN_MAPQ); // sort the list first with an anonymous class of comparator, with DESCREASING order Collections.sort(alnPEList, Collections.reverseOrder()); // control max-best if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits int nBestStratum = 0; int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE for (SAMRecordPair pr : alnPEList) if (pr.getPEMapQ() == bestMapQ) nBestStratum++; else break; // stop searching for sorted list if (nBestStratum > MAX_BEST) alnPEList.clear(); } // filter alignments with auxiliary filters if (!MAX_SENSITIVITY) filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL); // report remaining secondary alignments, up-to MAX_REPORT for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) { SAMRecordPair repPair = alnPEList.get(i); if (doUpdateBit) repPair.setNotPrimaryAlignmentFlags(i != 0); int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size(); int nFiltered = alnPEList.size(); if (repPair.fwdRecord != null) { repPair.fwdRecord.setAttribute("NH", nReport); repPair.fwdRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.fwdRecord); } if (repPair.revRecord != null) { repPair.revRecord.setAttribute("NH", nReport); repPair.revRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.revRecord); } } // reset list alnList.clear(); alnPEList.clear(); } // update if (!ID.equals(prevID)) { prevID = ID; prevRecord = record; } alnList.add(record); } // end while try { in.close(); out.close(); } catch (IOException e) { System.err.println(e.getMessage()); } // Terminate the monitor task and monitor if (verbose > 0) { statusTask.cancel(); statusTask.finish(); processMonitor.cancel(); } }
From source file:com.yahoo.pulsar.testclient.PerformanceConsumer.java
public static void main(String[] args) throws Exception { final Arguments arguments = new Arguments(); JCommander jc = new JCommander(arguments); jc.setProgramName("pulsar-perf-consumer"); try {//from w w w . j a va 2s .co m jc.parse(args); } catch (ParameterException e) { System.out.println(e.getMessage()); jc.usage(); System.exit(-1); } if (arguments.help) { jc.usage(); System.exit(-1); } if (arguments.topic.size() != 1) { System.out.println("Only one destination name is allowed"); jc.usage(); System.exit(-1); } if (arguments.confFile != null) { Properties prop = new Properties(System.getProperties()); prop.load(new FileInputStream(arguments.confFile)); if (arguments.serviceURL == null) { arguments.serviceURL = prop.getProperty("brokerServiceUrl"); } if (arguments.serviceURL == null) { arguments.serviceURL = prop.getProperty("webServiceUrl"); } // fallback to previous-version serviceUrl property to maintain backward-compatibility if (arguments.serviceURL == null) { arguments.serviceURL = prop.getProperty("serviceUrl", "http://localhost:8080/"); } if (arguments.authPluginClassName == null) { arguments.authPluginClassName = prop.getProperty("authPlugin", null); } if (arguments.authParams == null) { arguments.authParams = prop.getProperty("authParams", null); } } // Dump config variables ObjectMapper m = new ObjectMapper(); ObjectWriter w = m.writerWithDefaultPrettyPrinter(); log.info("Starting Pulsar performance consumer with config: {}", w.writeValueAsString(arguments)); final DestinationName prefixDestinationName = DestinationName.get(arguments.topic.get(0)); final RateLimiter limiter = arguments.rate > 0 ? RateLimiter.create(arguments.rate) : null; MessageListener listener = new MessageListener() { public void received(Consumer consumer, Message msg) { messagesReceived.increment(); bytesReceived.add(msg.getData().length); if (limiter != null) { limiter.acquire(); } consumer.acknowledgeAsync(msg); } }; EventLoopGroup eventLoopGroup; if (SystemUtils.IS_OS_LINUX) { eventLoopGroup = new EpollEventLoopGroup(Runtime.getRuntime().availableProcessors() * 2, new DefaultThreadFactory("pulsar-perf-consumer")); } else { eventLoopGroup = new NioEventLoopGroup(Runtime.getRuntime().availableProcessors(), new DefaultThreadFactory("pulsar-perf-consumer")); } ClientConfiguration clientConf = new ClientConfiguration(); clientConf.setConnectionsPerBroker(arguments.maxConnections); clientConf.setStatsInterval(arguments.statsIntervalSeconds, TimeUnit.SECONDS); if (isNotBlank(arguments.authPluginClassName)) { clientConf.setAuthentication(arguments.authPluginClassName, arguments.authParams); } PulsarClient pulsarClient = new PulsarClientImpl(arguments.serviceURL, clientConf, eventLoopGroup); List<Future<Consumer>> futures = Lists.newArrayList(); ConsumerConfiguration consumerConfig = new ConsumerConfiguration(); consumerConfig.setMessageListener(listener); consumerConfig.setReceiverQueueSize(arguments.receiverQueueSize); for (int i = 0; i < arguments.numDestinations; i++) { final DestinationName destinationName = (arguments.numDestinations == 1) ? prefixDestinationName : DestinationName.get(String.format("%s-%d", prefixDestinationName, i)); log.info("Adding {} consumers on destination {}", arguments.numConsumers, destinationName); for (int j = 0; j < arguments.numConsumers; j++) { String subscriberName; if (arguments.numConsumers > 1) { subscriberName = String.format("%s-%d", arguments.subscriberName, j); } else { subscriberName = arguments.subscriberName; } futures.add( pulsarClient.subscribeAsync(destinationName.toString(), subscriberName, consumerConfig)); } } for (Future<Consumer> future : futures) { future.get(); } log.info("Start receiving from {} consumers on {} destinations", arguments.numConsumers, arguments.numDestinations); long oldTime = System.nanoTime(); while (true) { try { Thread.sleep(10000); } catch (InterruptedException e) { break; } long now = System.nanoTime(); double elapsed = (now - oldTime) / 1e9; double rate = messagesReceived.sumThenReset() / elapsed; double throughput = bytesReceived.sumThenReset() / elapsed * 8 / 1024 / 1024; log.info("Throughput received: {} msg/s -- {} Mbit/s", dec.format(rate), dec.format(throughput)); oldTime = now; } pulsarClient.close(); }
From source file:com.twitter.heron.scheduler.RuntimeManagerMain.java
public static void main(String[] args) throws ClassNotFoundException, IllegalAccessException, InstantiationException, IOException, ParseException { Options options = constructOptions(); Options helpOptions = constructHelpOptions(); CommandLineParser parser = new DefaultParser(); // parse the help options first. CommandLine cmd = parser.parse(helpOptions, args, true); if (cmd.hasOption("h")) { usage(options);// w ww . j a va 2 s .c om return; } try { // Now parse the required options cmd = parser.parse(options, args); } catch (ParseException e) { usage(options); throw new RuntimeException("Error parsing command line options: ", e); } Boolean verbose = false; Level logLevel = Level.INFO; if (cmd.hasOption("v")) { logLevel = Level.ALL; verbose = true; } // init log LoggingHelper.loggerInit(logLevel, false); String cluster = cmd.getOptionValue("cluster"); String role = cmd.getOptionValue("role"); String environ = cmd.getOptionValue("environment"); String heronHome = cmd.getOptionValue("heron_home"); String configPath = cmd.getOptionValue("config_path"); String overrideConfigFile = cmd.getOptionValue("override_config_file"); String releaseFile = cmd.getOptionValue("release_file"); String topologyName = cmd.getOptionValue("topology_name"); String commandOption = cmd.getOptionValue("command"); // Optional argument in the case of restart // TODO(karthik): convert into CLI String containerId = Integer.toString(-1); if (cmd.hasOption("container_id")) { containerId = cmd.getOptionValue("container_id"); } Command command = Command.makeCommand(commandOption); // first load the defaults, then the config from files to override it Config.Builder defaultsConfig = Config.newBuilder().putAll(ClusterDefaults.getDefaults()) .putAll(ClusterConfig.loadConfig(heronHome, configPath, releaseFile)); // add config parameters from the command line Config.Builder commandLineConfig = Config.newBuilder().put(Keys.cluster(), cluster).put(Keys.role(), role) .put(Keys.environ(), environ).put(Keys.verbose(), verbose) .put(Keys.topologyContainerId(), containerId); Config.Builder topologyConfig = Config.newBuilder().put(Keys.topologyName(), topologyName); Config.Builder overrideConfig = Config.newBuilder() .putAll(ClusterConfig.loadOverrideConfig(overrideConfigFile)); // build the final config by expanding all the variables Config config = Config .expand(Config.newBuilder().putAll(defaultsConfig.build()).putAll(overrideConfig.build()) .putAll(commandLineConfig.build()).putAll(topologyConfig.build()).build()); LOG.fine("Static config loaded successfully "); LOG.fine(config.toString()); // Create a new instance of RuntimeManagerMain RuntimeManagerMain runtimeManagerMain = new RuntimeManagerMain(config, command); boolean isSuccessful = runtimeManagerMain.manageTopology(); // Log the result and exit if (!isSuccessful) { throw new RuntimeException(String.format("Failed to %s topology %s", command, topologyName)); } else { LOG.log(Level.FINE, "Topology {0} {1} successfully", new Object[] { topologyName, command }); } }