List of usage examples for java.lang System currentTimeMillis
@HotSpotIntrinsicCandidate public static native long currentTimeMillis();
From source file:com.foudroyantfactotum.mod.fousarchive.midi.generation.MidiImageGeneration.java
public static void main(String[] args) throws IOException, InvalidMidiDataException, InterruptedException, ExecutionException { try (InputStream is = new FileInputStream("")) { final long startTime = System.currentTimeMillis(); final ImmutablePair<String, BufferedImage> res = new MidiImageGeneration(is, 119, 5000).buildImage(); final long endTime = System.currentTimeMillis(); Logger.info(UserLogger.GENERAL, String.format("startTime: %s endTime: %s totalTime: %s file: %s", startTime, endTime, endTime - startTime, res.getLeft())); ImageIO.write(res.getRight(), "png", new File(String.format("/tmp/%s.png", res.getLeft()))); }//w w w . j av a2 s . c o m }
From source file:gov.lanl.adore.djatoka.DjatokaExtract.java
/** * Uses apache commons cli to parse input args. Passes parsed * parameters to IExtract implementation. * @param args command line parameters to defined input,output,etc. *///from w ww. j a va2 s . c o m public static void main(String[] args) { // create the command line parser CommandLineParser parser = new PosixParser(); // create the Options Options options = new Options(); options.addOption("i", "input", true, "Filepath of the input file."); options.addOption("o", "output", true, "Filepath of the output file."); options.addOption("l", "level", true, "Resolution level to extract."); options.addOption("d", "reduce", true, "Resolution levels to subtract from max resolution."); options.addOption("r", "region", true, "Format: Y,X,H,W. "); options.addOption("c", "cLayer", true, "Compositing Layer Index."); options.addOption("s", "scale", true, "Format: Option 1. Define a long-side dimension (e.g. 96); Option 2. Define absolute w,h values (e.g. 1024,768); Option 3. Define a single dimension (e.g. 1024,0) with or without Level Parameter; Option 4. Use a single decimal scaling factor (e.g. 0.854)"); options.addOption("t", "rotate", true, "Number of degrees to rotate image (i.e. 90, 180, 270)."); options.addOption("f", "format", true, "Mimetype of the image format to be provided as response. Default: image/jpeg"); options.addOption("a", "AltImpl", true, "Alternate IExtract Implemenation"); try { if (args.length == 0) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("gov.lanl.adore.djatoka.DjatokaExtract", options); System.exit(0); } // parse the command line arguments CommandLine line = parser.parse(options, args); String input = line.getOptionValue("i"); String output = line.getOptionValue("o"); DjatokaDecodeParam p = new DjatokaDecodeParam(); String level = line.getOptionValue("l"); if (level != null) p.setLevel(Integer.parseInt(level)); String reduce = line.getOptionValue("d"); if (level == null && reduce != null) p.setLevelReductionFactor(Integer.parseInt(reduce)); String region = line.getOptionValue("r"); if (region != null) p.setRegion(region); String cl = line.getOptionValue("c"); if (cl != null) { int clayer = Integer.parseInt(cl); if (clayer > 0) p.setCompositingLayer(clayer); } String scale = line.getOptionValue("s"); if (scale != null) { String[] v = scale.split(","); if (v.length == 1) { if (v[0].contains(".")) p.setScalingFactor(Double.parseDouble(v[0])); else { int[] dims = new int[] { -1, Integer.parseInt(v[0]) }; p.setScalingDimensions(dims); } } else if (v.length == 2) { int[] dims = new int[] { Integer.parseInt(v[0]), Integer.parseInt(v[1]) }; p.setScalingDimensions(dims); } } String rotate = line.getOptionValue("t"); if (rotate != null) p.setRotationDegree(Integer.parseInt(rotate)); String format = line.getOptionValue("f"); if (format == null) format = "image/jpeg"; String alt = line.getOptionValue("a"); if (output == null) output = input + ".jpg"; long x = System.currentTimeMillis(); IExtract ex = new KduExtractExe(); if (alt != null) ex = (IExtract) Class.forName(alt).newInstance(); DjatokaExtractProcessor e = new DjatokaExtractProcessor(ex); e.extractImage(input, output, p, format); logger.info("Extraction Time: " + ((double) (System.currentTimeMillis() - x) / 1000) + " seconds"); } catch (ParseException e) { logger.error("Parse exception:" + e.getMessage(), e); } catch (DjatokaException e) { logger.error("djatoka Extraction exception:" + e.getMessage(), e); } catch (InstantiationException e) { logger.error("Unable to initialize alternate implemenation:" + e.getMessage(), e); } catch (Exception e) { logger.error("Unexpected exception:" + e.getMessage(), e); } }
From source file:DruidResponseTime.java
public static void main(String[] args) throws Exception { try (CloseableHttpClient client = HttpClients.createDefault()) { HttpPost post = new HttpPost("http://localhost:8082/druid/v2/?pretty"); post.addHeader("content-type", "application/json"); CloseableHttpResponse res;// ww w . ja v a2s .c o m if (STORE_RESULT) { File dir = new File(RESULT_DIR); if (!dir.exists()) { dir.mkdirs(); } } int length; // Make sure all segments online System.out.println("Test if number of records is " + RECORD_NUMBER); post.setEntity(new StringEntity("{" + "\"queryType\":\"timeseries\"," + "\"dataSource\":\"tpch_lineitem\"," + "\"intervals\":[\"1992-01-01/1999-01-01\"]," + "\"granularity\":\"all\"," + "\"aggregations\":[{\"type\":\"count\",\"name\":\"count\"}]}")); while (true) { System.out.print('*'); res = client.execute(post); boolean valid; try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent())) { length = in.read(BYTE_BUFFER); valid = new String(BYTE_BUFFER, 0, length, "UTF-8").contains("\"count\" : 6001215"); } res.close(); if (valid) { break; } else { Thread.sleep(5000); } } System.out.println("Number of Records Test Passed"); for (int i = 0; i < QUERIES.length; i++) { System.out.println( "--------------------------------------------------------------------------------"); System.out.println("Start running query: " + QUERIES[i]); try (BufferedReader reader = new BufferedReader( new FileReader(QUERY_FILE_DIR + File.separator + i + ".json"))) { length = reader.read(CHAR_BUFFER); post.setEntity(new StringEntity(new String(CHAR_BUFFER, 0, length))); } // Warm-up Rounds System.out.println("Run " + WARMUP_ROUND + " times to warm up cache..."); for (int j = 0; j < WARMUP_ROUND; j++) { res = client.execute(post); res.close(); System.out.print('*'); } System.out.println(); // Test Rounds int[] time = new int[TEST_ROUND]; int totalTime = 0; System.out.println("Run " + TEST_ROUND + " times to get average time..."); for (int j = 0; j < TEST_ROUND; j++) { long startTime = System.currentTimeMillis(); res = client.execute(post); long endTime = System.currentTimeMillis(); if (STORE_RESULT && j == 0) { try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent()); BufferedWriter writer = new BufferedWriter( new FileWriter(RESULT_DIR + File.separator + i + ".json", false))) { while ((length = in.read(BYTE_BUFFER)) > 0) { writer.write(new String(BYTE_BUFFER, 0, length, "UTF-8")); } } } res.close(); time[j] = (int) (endTime - startTime); totalTime += time[j]; System.out.print(time[j] + "ms "); } System.out.println(); // Process Results double avgTime = (double) totalTime / TEST_ROUND; double stdDev = 0; for (int temp : time) { stdDev += (temp - avgTime) * (temp - avgTime) / TEST_ROUND; } stdDev = Math.sqrt(stdDev); System.out.println("The average response time for the query is: " + avgTime + "ms"); System.out.println("The standard deviation is: " + stdDev); } } }
From source file:com.asakusafw.yaess.bootstrap.Yaess.java
/** * Program entry./*from w w w .j a v a 2 s . c om*/ * @param args program arguments */ public static void main(String... args) { CommandLineUtil.prepareLogContext(); YSLOG.info("I00000"); long start = System.currentTimeMillis(); int status = execute(args); long end = System.currentTimeMillis(); YSLOG.info("I00999", status, end - start); System.exit(status); }
From source file:com.vangent.hieos.empi.pixpdq.loader.PIDFeedRandomLoader.java
/** * * @param args// ww w.j a v a2 s. c o m */ public static void main(String[] args) { PIDFeedRandomLoader pfl = new PIDFeedRandomLoader(); long start = System.currentTimeMillis(); Thread[] threads = new Thread[MAX_THREADS]; for (int i = 0; i < MAX_THREADS; i++) { PIDFeedRunnable pidFeedRunnable = pfl.getPIDFeedRunnable(TEMPLATE, ENTERPRISE_ASSIGNING_AUTHORITY, LOCAL_ASSIGNING_AUTHORITY, RUN_COUNT_PER_THREAD, ENDPOINT); threads[i] = new Thread(pidFeedRunnable); threads[i].start(); } for (int i = 0; i < threads.length; i++) { try { threads[i].join(); } catch (InterruptedException ignore) { } } long elapsedTime = System.currentTimeMillis() - start; double txnSec = ((double) MAX_THREADS * (double) RUN_COUNT_PER_THREAD) / ((double) elapsedTime / 1000.0); System.out.println(" done ... TOTAL TIME = " + elapsedTime + "ms" + ", txn/sec = " + txnSec); }
From source file:mvm.rya.indexing.external.ExternalIndexMain.java
public static void main(String[] args) throws Exception { Preconditions.checkArgument(args.length == 6, "java " + ExternalIndexMain.class.getCanonicalName() + " sparqlFile cbinstance cbzk cbuser cbpassword rdfTablePrefix."); final String sparqlFile = args[0]; instStr = args[1];//from ww w . ja v a2 s . co m zooStr = args[2]; userStr = args[3]; passStr = args[4]; tablePrefix = args[5]; String queryString = FileUtils.readFileToString(new File(sparqlFile)); // Look for Extra Indexes Instance inst = new ZooKeeperInstance(instStr, zooStr); Connector c = inst.getConnector(userStr, passStr.getBytes()); System.out.println("Searching for Indexes"); Map<String, String> indexTables = Maps.newLinkedHashMap(); for (String table : c.tableOperations().list()) { if (table.startsWith(tablePrefix + "INDEX_")) { Scanner s = c.createScanner(table, new Authorizations()); s.setRange(Range.exact(new Text("~SPARQL"))); for (Entry<Key, Value> e : s) { indexTables.put(table, e.getValue().toString()); } } } List<ExternalTupleSet> index = Lists.newArrayList(); if (indexTables.isEmpty()) { System.out.println("No Index found"); } else { for (String table : indexTables.keySet()) { String indexSparqlString = indexTables.get(table); System.out.println("====================== INDEX FOUND ======================"); System.out.println(" table : " + table); System.out.println(" sparql : "); System.out.println(indexSparqlString); index.add(new AccumuloIndexSet(indexSparqlString, c, table)); } } // Connect to Rya Sail s = getRyaSail(); SailRepository repo = new SailRepository(s); repo.initialize(); // Perform Query CountingTupleQueryResultHandler count = new CountingTupleQueryResultHandler(); SailRepositoryConnection conn; if (index.isEmpty()) { conn = repo.getConnection(); } else { ExternalProcessor processor = new ExternalProcessor(index); Sail processingSail = new ExternalSail(s, processor); SailRepository smartSailRepo = new SailRepository(processingSail); smartSailRepo.initialize(); conn = smartSailRepo.getConnection(); } startTime = System.currentTimeMillis(); lastTime = startTime; System.out.println("Query Started"); conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(count); System.out.println("Count of Results found : " + count.i); System.out.println("Total query time (s) : " + (System.currentTimeMillis() - startTime) / 1000.); }
From source file:com.johnson.grab.browser.HttpClientUtil.java
public static void main(String[] args) throws IOException, CrawlException { // final String url = "http://192.168.24.248:8080/HbaseDb/youku/"; // String url = "http://business.sohu.com/20131021/n388557348.shtml?pvid=tc_business&a=&b=%E6%A5%BC%E5%B8%82%E6%B3%A1%E6%B2%AB%E7%A0%B4%E7%81%AD%E5%B0%86%E5%9C%A82015%E5%B9%B4%E5%BA%95%E4%B9%8B%E5%89%8D"; final String url = "http://www.sohu.com"; final int threadNum = 20; final int loop = 100; Thread[] threads = new Thread[threadNum]; final List<Integer> times = new ArrayList<Integer>(); final long s = System.currentTimeMillis(); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread() { public void run() { for (int i = 0; i < loop; i++) { try { getContent(url); } catch (CrawlException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Throwable t) { t.printStackTrace(); }/*from w w w. j a va2 s . co m*/ long e = System.currentTimeMillis(); times.add((int) (e - s)); } } }; threads[i].start(); } while (times.size() < threadNum * loop) { int current = times.size(); System.out.println("total: " + threadNum * loop + ", current: " + current + ", left: " + (threadNum * loop - current)); try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } } long e = System.currentTimeMillis(); int totalTime = 0; for (Integer time : times) { totalTime += time; } System.out.println("------------------------------------------------------"); System.out.println("thread num: " + threadNum + ", loop: " + loop); System.out.println("totalTime: " + totalTime + ", averTime: " + totalTime / (threadNum * loop)); System.out.println("finalTime: " + (e - s) + ", throughput: " + (e - s) / (threadNum * loop)); }
From source file:cn.jumper.study.http.ClientFormLogin.java
public static void main(String[] args) throws Exception { BasicCookieStore cookieStore = new BasicCookieStore(); CloseableHttpClient httpclient = HttpClients.custom().setDefaultCookieStore(cookieStore).build(); HttpHost proxy = new HttpHost("192.168.10.3", 8080, "http"); RequestConfig config = RequestConfig.custom().setProxy(proxy).build(); try {/*w w w. j a v a 2 s. c om*/ HttpGet httpget = new HttpGet("http://www.ksf-food.com/admin/Login.asp"); httpget.setConfig(config); CloseableHttpResponse response1 = httpclient.execute(httpget); try { HttpEntity entity = response1.getEntity(); System.out.println("Login form get: " + response1.getStatusLine()); EntityUtils.consume(entity); System.out.println("Initial set of cookies:"); List<Cookie> cookies = cookieStore.getCookies(); if (cookies.isEmpty()) { System.out.println("None"); } else { for (int i = 0; i < cookies.size(); i++) { System.out.println("- " + cookies.get(i).toString()); } } } finally { response1.close(); } String code = ""; try { HttpUriRequest httpgetCode = RequestBuilder.get() .setUri("http://www.ksf-food.com/admin/inc/checkcode.asp").setConfig(config).build(); /* * HttpGet httpgetCode = new HttpGet( * "http://www.qufuev.com/admin/inc/checkcode.asp"); * httpgetCode.setConfig(config); */ System.out.println("Executing request " + httpgetCode.getRequestLine()); System.out.println("========================================================"); System.out.println("==httpget header =="); for (Header header : httpgetCode.getAllHeaders()) { System.out.println(header.getName() + ":" + header.getValue()); } System.out.println("==httpget header =="); ResponseHandler<String> responseHandler = new ResponseHandler<String>() { public String handleResponse(final HttpResponse response) throws ClientProtocolException, IOException { int status = response.getStatusLine().getStatusCode(); if (status >= 200 && status < 300) { HttpEntity entity = response.getEntity(); System.out.println("==respons header =="); for (Header header : response.getAllHeaders()) { System.out.println(header.getName() + ":" + header.getValue()); } System.out.println("==respons header =="); String fileName = System.currentTimeMillis() + ""; DataOutputStream dataOutputStream = new DataOutputStream( new FileOutputStream("d://test//e3//" + fileName + ".jpg")); dataOutputStream.write(EntityUtils.toByteArray(entity)); dataOutputStream.close(); return ImageTest.getAllOcr("d://test//e3//" + fileName + ".jpg"); } else { throw new ClientProtocolException("Unexpected response status: " + status); } } }; code = httpclient.execute(httpgetCode, responseHandler); System.out.println("ClientFormLogin.main()-CheckCode:" + code); System.out.println("----------------------------------------"); } catch (IOException e) { e.printStackTrace(); } HttpUriRequest login = RequestBuilder.post() .setUri(new URI("http://www.ksf-food.com/admin/Admin_ChkLogin.asp")) .addParameter("UserName", "username").addParameter("Password", "password") .addParameter("CheckCode", code).setConfig(config).build(); System.out.println("========================================================"); System.out.println("==httpget header =="); for (Header header : login.getAllHeaders()) { System.out.println(header.getName() + ":" + header.getValue()); } CloseableHttpResponse response2 = httpclient.execute(login); try { HttpEntity entity = response2.getEntity(); System.out.println("Login form post: " + response2.getStatusLine()); // EntityUtils.consume(entity); System.out.println("ClientFormLogin.main():\\n" + EntityUtils.toString(entity, "GBK")); System.out.println("Post logon cookies:"); List<Cookie> cookies = cookieStore.getCookies(); if (cookies.isEmpty()) { System.out.println("None"); } else { for (int i = 0; i < cookies.size(); i++) { System.out.println("- " + cookies.get(i).toString()); } } } finally { response2.close(); } } finally { httpclient.close(); } }
From source file:eu.scape_project.pc.hadoop.TikaIdentifyHadoopJob.java
/** * The main entry point.//ww w . j a v a2 s .c om */ public static void main(String[] args) throws ParseException { Configuration conf = new Configuration(); GenericOptionsParser gop = new GenericOptionsParser(conf, args); HadoopJobCliConfig pc = new HadoopJobCliConfig(); CommandLineParser cmdParser = new PosixParser(); CommandLine cmd = cmdParser.parse(HadoopJobOptions.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(HadoopJobOptions.HELP_OPT))) { HadoopJobOptions.exit("Usage", 0); } else { HadoopJobOptions.initOptions(cmd, pc); } String dir = pc.getDirStr(); String name = pc.getHadoopJobName(); if (name == null || name.equals("")) { name = "tika_identification"; } try { Job job = new Job(conf, name); // local debugging // job.getConfiguration().set("mapred.job.tracker", "local"); // job.getConfiguration().set("fs.default.name", "file:///"); job.setJarByClass(TikaIdentifyHadoopJob.class); job.setMapperClass(TikaIdentifyMapper.class); //job.setCombinerClass(TikaIdentifyReducer.class); job.setReducerClass(TikaIdentifyReducer.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); //SequenceFileOutputFormat.setOutputCompressionType(job, SequenceFile.CompressionType.NONE); //conf.setMapOutputKeyClass(Text.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(LongWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); SequenceFileInputFormat.addInputPath(job, new Path(dir)); String outpath = "output/" + System.currentTimeMillis() + "tid"; FileOutputFormat.setOutputPath(job, new Path(outpath)); job.waitForCompletion(true); System.out.print(outpath); System.exit(0); } catch (Exception e) { logger.error("I/O error", e); } }
From source file:edu.msu.cme.rdp.readseq.utils.QualityTrimmer.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("f", "fastq-out", false, "Write fastq instead of fasta file"); options.addOption("l", "less-than", false, "Trim at <= instead of strictly ="); options.addOption("i", "illumina", false, "Illumina trimming mode"); FastqWriter fastqOut = null;//from w w w .ja v a2 s . c o m FastaWriter fastaOut = null; byte qualTrim = -1; boolean writeFasta = true; boolean trimle = false; boolean illumina = false; List<SeqReader> readers = new ArrayList(); List<File> seqFiles = new ArrayList(); try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("fastq-out")) { writeFasta = false; } if (line.hasOption("less-than")) { trimle = true; } if (line.hasOption("illumina")) { illumina = true; } args = line.getArgs(); if (args.length < 2) { throw new Exception("Unexpected number of arguments"); } if (args[0].length() != 1) { throw new Exception("Expected single character quality score"); } qualTrim = FastqCore.Phred33QualFunction.translate(args[0].charAt(0)); for (int index = 1; index < args.length; index++) { File seqFile = new File(args[index]); SeqReader reader; if (SeqUtils.guessFileFormat(seqFile) == SequenceFormat.FASTA) { if (index + 1 == args.length) { throw new Exception("Fasta files must be immediately followed by their quality file"); } File qualFile = new File(args[index + 1]); if (SeqUtils.guessFileFormat(qualFile) != SequenceFormat.FASTA) { throw new Exception(seqFile + " was not followed by a fasta quality file"); } reader = new QSeqReader(seqFile, qualFile); index++; } else { if (seqFile.getName().endsWith(".gz")) { reader = new SequenceReader(new GZIPInputStream(new FileInputStream(seqFile))); } else { reader = new SequenceReader(seqFile); } } readers.add(reader); seqFiles.add(seqFile); } } catch (Exception e) { new HelpFormatter().printHelp("USAGE: QualityTrimmer [options] <ascii_score> <seq_file> [qual_file]", options, true); System.err.println("Error: " + e.getMessage()); System.exit(1); } for (int readerIndex = 0; readerIndex < readers.size(); readerIndex++) { File seqFile = seqFiles.get(readerIndex); String outStem = "trimmed_" + seqFile.getName().substring(0, seqFile.getName().lastIndexOf(".")); if (writeFasta) { fastaOut = new FastaWriter(outStem + ".fasta"); } else { fastqOut = new FastqWriter(outStem + ".fastq", FastqCore.Phred33QualFunction); } int[] lengthHisto = new int[200]; SeqReader reader = readers.get(readerIndex); QSequence qseq; long totalLength = 0; int totalSeqs = 0; long trimmedLength = 0; int trimmedSeqs = 0; int zeroLengthAfterTrimming = 0; long startTime = System.currentTimeMillis(); while ((qseq = (QSequence) reader.readNextSequence()) != null) { char[] bases = qseq.getSeqString().toCharArray(); byte[] qual = qseq.getQuality(); if (bases.length != qual.length) { System.err.println(qseq.getSeqName() + ": Quality length doesn't match seq length for seq"); continue; } totalSeqs++; totalLength += bases.length; int trimIndex = -1; if (illumina && qual[bases.length - 1] == qualTrim) { trimIndex = bases.length - 1; while (trimIndex >= 0 && qual[trimIndex] == qualTrim) { trimIndex--; } trimIndex++; //Technically we're positioned over the first good base, move back to the last bad base } else if (!illumina) { for (int index = 0; index < bases.length; index++) { if (qual[index] == qualTrim || (trimle && qual[index] < qualTrim)) { trimIndex = index; break; } } } String outSeq; byte[] outQual; if (trimIndex == -1) { outSeq = qseq.getSeqString(); outQual = qseq.getQuality(); } else { outSeq = new String(bases, 0, trimIndex); outQual = Arrays.copyOfRange(qual, 0, trimIndex); trimmedSeqs++; } int len = outSeq.length(); trimmedLength += len; if (len >= lengthHisto.length) { lengthHisto = Arrays.copyOf(lengthHisto, len + 1); } lengthHisto[len]++; if (outSeq.length() == 0) { //System.err.println(qseq.getSeqName() + ": length 0 after trimming"); zeroLengthAfterTrimming++; continue; } if (writeFasta) { fastaOut.writeSeq(qseq.getSeqName(), qseq.getDesc(), outSeq); } else { fastqOut.writeSeq(qseq.getSeqName(), qseq.getDesc(), outSeq, outQual); } } reader.close(); if (writeFasta) { fastaOut.close(); } else { fastqOut.close(); } System.out.println( "Processed " + seqFile + " in " + (System.currentTimeMillis() - startTime) / 1000.0 + "s"); System.out.println("Before trimming:"); System.out.println("Total Sequences: " + totalSeqs); System.out.println("Total Sequence Data: " + totalLength); System.out.println("Average sequence length: " + ((float) totalLength / totalSeqs)); System.out.println(); System.out.println("After trimming:"); System.out.println("Total Sequences: " + (totalSeqs - zeroLengthAfterTrimming)); System.out.println("Sequences Trimmed: " + trimmedSeqs); System.out.println("Total Sequence Data: " + trimmedLength); System.out.println("Average sequence length: " + ((float) trimmedLength / (totalSeqs - zeroLengthAfterTrimming))); System.out.println(); System.out.println("Length\tCount"); for (int index = 0; index < lengthHisto.length; index++) { if (lengthHisto[index] == 0) { continue; } System.out.println(index + "\t" + lengthHisto[index]); } System.out.println(); System.out.println(); System.out.println(); } }