List of usage examples for java.lang String length
public int length()
From source file:com.heliosapm.benchmarks.json.JSONUnmarshalling.java
/** * @param args// w w w . ja v a 2 s . c o m */ public static void main(String[] args) { final Map<String, ChannelBuffer> bufferMap = DIRECT_DATA_BUFFERS; log("JSON Test"); InputStream is = null; for (String sample : DATA) { try { final String jsonText = bufferMap.get(sample).duplicate().toString(UTF8); Person[] p = parseToObject(jsonText, Person[].class); log("Parsed STRING [%s] to objects: %s", sample, p.length); p = parseToObject(bufferMap.get(sample).duplicate(), Person[].class); log("Parsed BUFFER [%s] to objects: %s", sample, p.length); String s = serializeToString(p); log("Serialized to STRING [%s], size: %s", sample, s.length()); ChannelBuffer c = serializeToBuffer(heapFactory, p); log("Serialized to Heap Buffer [%s], size: %s", sample, c.readableBytes()); c = serializeToBuffer(directFactory, p); log("Serialized to Direct Buffer [%s], size: %s", sample, c.readableBytes()); } catch (Exception ex) { throw new RuntimeException("Failed to process string sample [" + sample + "]", ex); } finally { if (is != null) try { is.close(); } catch (Exception x) { /* No Op */} } } }
From source file:com.github.ansell.shp.SHPDump.java
public static void main(String... args) throws Exception { final OptionParser parser = new OptionParser(); final OptionSpec<Void> help = parser.accepts("help").forHelp(); final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required() .describedAs("The input SHP file"); final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class).required() .describedAs("The output directory to use for debugging files"); final OptionSpec<String> outputPrefix = parser.accepts("prefix").withRequiredArg().ofType(String.class) .defaultsTo("shp-debug").describedAs("The output prefix to use for debugging files"); final OptionSpec<File> outputMappingTemplate = parser.accepts("output-mapping").withRequiredArg() .ofType(File.class).describedAs("The output mapping template file if it needs to be generated."); final OptionSpec<Integer> resolution = parser.accepts("resolution").withRequiredArg().ofType(Integer.class) .defaultsTo(2048).describedAs("The output image file resolution"); final OptionSpec<String> format = parser.accepts("format").withRequiredArg().ofType(String.class) .defaultsTo("png").describedAs("The output image format"); final OptionSpec<String> removeIfEmpty = parser.accepts("remove-if-empty").withRequiredArg() .ofType(String.class).describedAs( "The name of an attribute to remove if its value is empty before outputting the resulting shapefile. Use multiple times to specify multiple fields to check"); OptionSet options = null;/*from ww w. jav a2 s . c o m*/ try { options = parser.parse(args); } catch (final OptionException e) { System.out.println(e.getMessage()); parser.printHelpOn(System.out); throw e; } if (options.has(help)) { parser.printHelpOn(System.out); return; } final Path inputPath = input.value(options).toPath(); if (!Files.exists(inputPath)) { throw new FileNotFoundException("Could not find input SHP file: " + inputPath.toString()); } final Path outputPath = output.value(options).toPath(); if (!Files.exists(outputPath)) { throw new FileNotFoundException("Output directory does not exist: " + outputPath.toString()); } final Path outputMappingPath = options.has(outputMappingTemplate) ? outputMappingTemplate.value(options).toPath() : null; if (options.has(outputMappingTemplate) && Files.exists(outputMappingPath)) { throw new FileNotFoundException( "Output mapping template file already exists: " + outputMappingPath.toString()); } final Set<String> filterFields = ConcurrentHashMap.newKeySet(); if (options.has(removeIfEmpty)) { for (String nextFilterField : removeIfEmpty.values(options)) { System.out.println("Will filter field if empty value found: " + nextFilterField); filterFields.add(nextFilterField); } } if (!filterFields.isEmpty()) { System.out.println("Full set of filter fields: " + filterFields); } final String prefix = outputPrefix.value(options); FileDataStore store = FileDataStoreFinder.getDataStore(inputPath.toFile()); if (store == null) { throw new RuntimeException("Could not read the given input as an ESRI Shapefile: " + inputPath.toAbsolutePath().toString()); } for (String typeName : new LinkedHashSet<>(Arrays.asList(store.getTypeNames()))) { System.out.println(""); System.out.println("Type: " + typeName); SimpleFeatureSource featureSource = store.getFeatureSource(typeName); SimpleFeatureType schema = featureSource.getSchema(); Name outputSchemaName = new NameImpl(schema.getName().getNamespaceURI(), schema.getName().getLocalPart().replace(" ", "").replace("%20", "")); System.out.println("Replacing name on schema: " + schema.getName() + " with " + outputSchemaName); SimpleFeatureType outputSchema = SHPUtils.changeSchemaName(schema, outputSchemaName); List<String> attributeList = new ArrayList<>(); for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) { System.out.println("Attribute: " + attribute.getName().toString()); attributeList.add(attribute.getName().toString()); } CsvSchema csvSchema = CSVUtil.buildSchema(attributeList); SimpleFeatureCollection collection = featureSource.getFeatures(); int featureCount = 0; Path nextCSVFile = outputPath.resolve(prefix + ".csv"); Path nextSummaryCSVFile = outputPath .resolve(prefix + "-" + outputSchema.getTypeName() + "-Summary.csv"); List<SimpleFeature> outputFeatureList = new CopyOnWriteArrayList<>(); try (SimpleFeatureIterator iterator = collection.features(); Writer bufferedWriter = Files.newBufferedWriter(nextCSVFile, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW); SequenceWriter csv = CSVUtil.newCSVWriter(bufferedWriter, csvSchema);) { List<String> nextLine = new ArrayList<>(); while (iterator.hasNext()) { SimpleFeature feature = iterator.next(); featureCount++; if (featureCount <= 2) { System.out.println(""); System.out.println(feature.getIdentifier()); } else if (featureCount % 100 == 0) { System.out.print("."); } boolean filterThisFeature = false; for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) { String featureString = Optional.ofNullable(feature.getAttribute(attribute.getName())) .orElse("").toString(); nextLine.add(featureString); if (filterFields.contains(attribute.getName().toString()) && featureString.trim().isEmpty()) { filterThisFeature = true; } if (featureString.length() > 100) { featureString = featureString.substring(0, 100) + "..."; } if (featureCount <= 2) { System.out.print(attribute.getName() + "="); System.out.println(featureString); } } if (!filterThisFeature) { outputFeatureList.add(SHPUtils.changeSchemaName(feature, outputSchema)); csv.write(nextLine); } nextLine.clear(); } } try (Reader csvReader = Files.newBufferedReader(nextCSVFile, StandardCharsets.UTF_8); Writer summaryOutput = Files.newBufferedWriter(nextSummaryCSVFile, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW); final Writer mappingWriter = options.has(outputMappingTemplate) ? Files.newBufferedWriter(outputMappingPath) : NullWriter.NULL_WRITER) { CSVSummariser.runSummarise(csvReader, summaryOutput, mappingWriter, CSVSummariser.DEFAULT_SAMPLE_COUNT, false); } if (featureCount > 100) { System.out.println(""); } System.out.println(""); System.out.println("Feature count: " + featureCount); SimpleFeatureCollection outputCollection = new ListFeatureCollection(outputSchema, outputFeatureList); Path outputShapefilePath = outputPath.resolve(prefix + "-" + outputSchema.getTypeName() + "-dump"); if (!Files.exists(outputShapefilePath)) { Files.createDirectory(outputShapefilePath); } SHPUtils.writeShapefile(outputCollection, outputShapefilePath); // Create ZIP file from the contents to keep the subfiles together Path outputShapefileZipPath = outputPath .resolve(prefix + "-" + outputSchema.getTypeName() + "-dump.zip"); try (final OutputStream out = Files.newOutputStream(outputShapefileZipPath, StandardOpenOption.CREATE_NEW); final ZipOutputStream zip = new ZipOutputStream(out, StandardCharsets.UTF_8);) { Files.list(outputShapefilePath).forEachOrdered(Unchecked.consumer(e -> { zip.putNextEntry(new ZipEntry(e.getFileName().toString())); Files.copy(e, zip); zip.closeEntry(); })); } try (final OutputStream outputStream = Files.newOutputStream( outputPath.resolve(prefix + "." + format.value(options)), StandardOpenOption.CREATE_NEW);) { MapContent map = new MapContent(); map.setTitle(prefix + "-" + outputSchema.getTypeName()); Style style = SLD.createSimpleStyle(featureSource.getSchema()); Layer layer = new FeatureLayer(new CollectionFeatureSource(outputCollection), style); map.addLayer(layer); SHPUtils.renderImage(map, outputStream, resolution.value(options), format.value(options)); } } }
From source file:MainClass.java
public static void main(String args[]) throws Exception { SSLServerSocketFactory ssf = (SSLServerSocketFactory) SSLServerSocketFactory.getDefault(); ServerSocket ss = ssf.createServerSocket(443); while (true) { Socket s = ss.accept();//from w ww .j a v a2 s .c o m PrintStream out = new PrintStream(s.getOutputStream()); BufferedReader in = new BufferedReader(new InputStreamReader(s.getInputStream())); String info = null; String request = null; String refer = null; while ((info = in.readLine()) != null) { if (info.startsWith("GET")) { request = info; } if (info.startsWith("Referer:")) { refer = info; } if (info.equals("")) break; } if (request != null) { out.println("HTTP/1.0 200 OK\nMIME_version:1.0\nContent_Type:text/html"); int sp1 = request.indexOf(' '); int sp2 = request.indexOf(' ', sp1 + 1); String filename = request.substring(sp1 + 2, sp2); if (refer != null) { sp1 = refer.indexOf(' '); refer = refer.substring(sp1 + 1, refer.length()); if (!refer.endsWith("/")) { refer = refer + "/"; } filename = refer + filename; } URL con = new URL(filename); InputStream gotoin = con.openStream(); int n = gotoin.available(); byte buf[] = new byte[1024]; out.println("HTTP/1.0 200 OK\nMIME_version:1.0\nContent_Type:text/html"); out.println("Content_Length:" + n + "\n"); while ((n = gotoin.read(buf)) >= 0) { out.write(buf, 0, n); } out.close(); s.close(); in.close(); } } }
From source file:com.twentyn.chemicalClassifier.Runner.java
public static void main(String[] args) throws Exception { BufferedReader reader = new BufferedReader(new FileReader(args[0])); BufferedWriter writer = new BufferedWriter(new FileWriter(args[1])); try {/*from w w w .j a v a2 s. com*/ Oscar oscar = new Oscar(); String line = null; /* NOTE: this is exactly the wrong way to write a TSV reader. Caveat emptor. * See http://tburette.github.io/blog/2014/05/25/so-you-want-to-write-your-own-CSV-code/ * and then use org.apache.commons.csv.CSVParser instead. */ while ((line = reader.readLine()) != null) { // TSV means split on tabs! Nothing else will do. List<String> fields = Arrays.asList(line.split("\t")); // Choke if our invariants aren't satisfied. We expect ever line to have a name and an InChI. if (fields.size() != 2) { throw new RuntimeException( String.format("Found malformed line (all lines must have two fields: %s", line)); } String name = fields.get(1); List<ResolvedNamedEntity> entities = oscar.findAndResolveNamedEntities(name); System.out.println("**********"); System.out.println("Name: " + name); List<String> outputFields = new ArrayList<>(fields.size() + 1); outputFields.addAll(fields); if (entities.size() == 0) { System.out.println("No match"); outputFields.add("noMatch"); } else if (entities.size() == 1) { ResolvedNamedEntity entity = entities.get(0); NamedEntity ne = entity.getNamedEntity(); if (ne.getStart() != 0 || ne.getEnd() != name.length()) { System.out.println("Partial match"); printEntity(entity); outputFields.add("partialMatch"); } else { System.out.println("Exact match"); printEntity(entity); outputFields.add("exactMatch"); List<ChemicalStructure> structures = entity.getChemicalStructures(FormatType.STD_INCHI); for (ChemicalStructure s : structures) { outputFields.add(s.getValue()); } } } else { // Multiple matches found! System.out.println("Multiple matches"); for (ResolvedNamedEntity e : entities) { printEntity(e); } outputFields.add("multipleMatches"); } writer.write(String.join("\t", outputFields)); writer.newLine(); } } finally { writer.flush(); writer.close(); } }
From source file:edu.msu.cme.rdp.probematch.cli.SliceToPrimer.java
public static void main(String[] args) throws Exception { //args = "--fedit-dist 4 --redit-dist=4 -k --max-length=400 --min-length=280 -o java_sliced_edit4.fasta TGCGAYCCSAARGCBGACTC ATSGCCATCATYTCRCCGGA /scratch/fishjord/tae_kwon_primer_match/all_genomes.fasta".split(" "); PatternBitMask64[] fprimers;/*from w ww . j av a2 s.c o m*/ String[] fprimerStrs, rprimerStrs; PatternBitMask64[] rprimers; FastaWriter seqOut; PrintStream statsOut; int fEdit = 3; int rEdit = 3; int minLength = Integer.MIN_VALUE; int maxLength = Integer.MAX_VALUE; boolean allowAmbiguities = true; boolean keepPrimers = false; SequenceReader inSeqs; try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("edit-dist")) { fEdit = rEdit = Integer.parseInt(line.getOptionValue("edit-dist")); if (line.hasOption("redit-dist") || line.hasOption("fedit-dist")) { throw new Exception("edit-dist, [fedit-dist, redit-dist] are mutually exclusive"); } } if (line.hasOption("fedit-dist")) { fEdit = Integer.parseInt(line.getOptionValue("fedit-dist")); } if (line.hasOption("no-ambiguities")) { allowAmbiguities = false; } if (line.hasOption("keep-primers")) { keepPrimers = true; } if (line.hasOption("redit-dist")) { rEdit = Integer.parseInt(line.getOptionValue("redit-dist")); } if (line.hasOption("seq-out")) { seqOut = new FastaWriter(new File(line.getOptionValue("seq-out"))); } else { throw new Exception("Must specify seq-out"); } if (line.hasOption("stats-out")) { statsOut = new PrintStream(new File(line.getOptionValue("stats-out"))); } else { statsOut = System.out; } if (line.hasOption("min-length")) { minLength = Integer.parseInt(line.getOptionValue("min-length")); } if (line.hasOption("max-length")) { maxLength = Integer.parseInt(line.getOptionValue("max-length")); } args = line.getArgs(); if (args.length != 3) { throw new Exception("Unexpected number of command line arguments"); } fprimers = translateStringPrimers(args[0].split(","), allowAmbiguities, false); fprimerStrs = args[0].split(","); rprimers = translateStringPrimers(args[1].split(","), allowAmbiguities, true); rprimerStrs = args[1].split(","); inSeqs = new SequenceReader(new File(args[2])); } catch (Exception e) { new HelpFormatter().printHelp("SliceToPrimer [options] <f,p,r,i,m,e,r> <r,p,r,i,m,e,r> <in_seq_file>", options); System.err.println("ERROR: " + e.getMessage()); return; } Sequence seq; statsOut.println( "orig_seqid\tsliced_seqid\tfprimer\tstart\tend\tscore\trprimer\tstart\tend\tscore\tlength"); ScoringMatrix sccoringMatrix = ScoringMatrix.getDefaultNuclMatrix(); DPMAligner[] faligners = new DPMAligner[fprimers.length]; for (int index = 0; index < faligners.length; index++) { faligners[index] = new DPMAligner(fprimerStrs[index], Integer.MAX_VALUE); } try { while ((seq = inSeqs.readNextSequence()) != null) { Set<PrimerMatch> fprimerMatches = new HashSet(); Set<PrimerMatch> rprimerMatches = new HashSet(); for (int index = 0; index < fprimers.length; index++) { PatternBitMask64 primer = fprimers[index]; for (BitVector64Match r : BitVector64.process(seq.getSeqString().toCharArray(), primer, fEdit) .getResults()) { PrimerMatch match = new PrimerMatch(); match.start = r.getPosition() - (primer.getPatternLength() + r.getScore()); match.end = r.getPosition(); match.score = r.getScore(); match.primerIndex = index; fprimerMatches.add(match); } } for (int index = 0; index < rprimers.length; index++) { PatternBitMask64 primer = rprimers[index]; for (BitVector64Match r : BitVector64.process(seq.getSeqString().toCharArray(), primer, rEdit) .getResults()) { PrimerMatch match = new PrimerMatch(); match.start = r.getPosition() - (primer.getPatternLength() + r.getScore()); match.end = r.getPosition(); match.score = r.getScore(); match.primerIndex = index; rprimerMatches.add(match); } } if (fprimerMatches.isEmpty() || rprimerMatches.isEmpty()) { statsOut.println(seq.getSeqName() + "\tEither/or no forward/reverse primer hits"); continue; } for (PrimerMatch fmatch : fprimerMatches) { PrimerMatch bestReverse = null; int bestScore = Integer.MAX_VALUE; for (PrimerMatch rmatch : rprimerMatches) { if (rmatch.start > fmatch.end && rmatch.start - fmatch.end < bestScore) { bestReverse = rmatch; bestScore = rmatch.start - fmatch.end; } } if (bestReverse == null) { statsOut.println(seq.getSeqName() + "\tNo reverse primer before " + fmatch.end); continue; } String slicedSeq = null; if (keepPrimers) { slicedSeq = seq.getSeqString().substring(fmatch.start, bestReverse.end); } else { slicedSeq = seq.getSeqString().substring(fmatch.end, bestReverse.start); } String seqid = seq.getSeqName() + "_" + fmatch.primerIndex + "_" + fmatch.start; if (slicedSeq.length() > minLength && slicedSeq.length() < maxLength) { seqOut.writeSeq(seqid, "", slicedSeq); } DPMAlignment seqs = faligners[fmatch.primerIndex] .align(seq.getSeqString().substring(fmatch.start, fmatch.end)); System.err.println(">" + seqid); System.err.println(fprimerStrs[fmatch.primerIndex]); System.err.println(seq.getSeqString().substring(fmatch.start, fmatch.end)); System.err.println(); System.err.println(seqs.getAlignedMatchFragment()); System.err.println(seqs.getAlignedProbe()); System.err.println(); statsOut.println(seq.getSeqName() + "\t" + seqid + "\t" + fmatch.primerIndex + "\t" + fmatch.start + "\t" + fmatch.end + "\t" + fmatch.score + "\t" + bestReverse.primerIndex + "\t" + bestReverse.start + "\t" + bestReverse.end + "\t" + bestReverse.score + "\t" + slicedSeq.length()); } } } catch (Exception e) { e.printStackTrace(); } finally { statsOut.close(); seqOut.close(); } }
From source file:com.genentech.retrival.SDFExport.SDFSDFExporter.java
public static void main(String[] args) throws ParseException, JDOMException, IOException { // create command line Options object Options options = new Options(); Option opt = new Option("sqlFile", true, "sql-xml file"); opt.setRequired(false);/*from ww w . java2s .c o m*/ options.addOption(opt); opt = new Option("sqlName", true, "name of SQL element in xml file, Default sql.xml in 'sdfExport' config"); opt.setRequired(false); options.addOption(opt); opt = new Option("selectStatement", true, "select statement to execute"); opt.setRequired(false); options.addOption(opt); opt = new Option("paramTypes", true, "'|' separated list of parameter types to pass tostatment int,float,string,date"); opt.setRequired(false); options.addOption(opt); opt = new Option("o", "out", true, "output file"); opt.setRequired(false); options.addOption(opt); opt = new Option("i", "in", true, "input file, oe or .tab each record executes the query once. Use '.tab' to read from stdin"); opt.setRequired(false); options.addOption(opt); opt = new Option("queryTags", true, "'|' separetaed list of tags whose values is passt to the sql."); opt.setRequired(true); options.addOption(opt); opt = new Option("newLineReplacement", true, "If given newlines in fields will be replaced by this string."); options.addOption(opt); opt = new Option("filterIfNoRecords", false, "If no rows are returned by the query that record is filtered out."); options.addOption(opt); CommandLineParser parser = new BasicParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (Exception e) { System.err.println(e.getMessage()); exitWithHelp(options); } String outFile = cmd.getOptionValue("o"); String inFile = cmd.getOptionValue("i"); String sqlFile = cmd.getOptionValue("sqlFile"); String sqlName = cmd.getOptionValue("sqlName"); String selStr = cmd.getOptionValue("selectStatement"); String pTypes = cmd.getOptionValue("paramTypes"); String newLineReplacement = cmd.getOptionValue("newLineReplacement"); boolean printIfNoRecord = !cmd.hasOption("filterIfNoRecords"); String[] tagStr = cmd.getOptionValue("queryTags").trim().split("\\|"); try { SDFSDFExporter exporter = null; if ((sqlFile != null && sqlFile.length() > 0) || (sqlName != null && sqlName.length() > 0)) { if ((selStr != null && selStr.length() > 0) || (pTypes != null && pTypes.length() > 0)) { System.err.println("sqlFile and sqlName may not be used with selectStatement and paramTypes"); exitWithHelp(options); } exporter = createFromFile(sqlFile, sqlName, inFile, outFile, tagStr, printIfNoRecord, newLineReplacement); } else if (selStr == null || selStr.length() == 0 || pTypes == null || pTypes.length() == 0) { System.err.println("sqlFile and sqlName or selectStatement and paramTypes must be given"); exitWithHelp(options); } else { exporter = createFromStatementStr(selStr, pTypes, inFile, outFile, tagStr, printIfNoRecord, newLineReplacement); } exporter.export(); exporter.close(); } catch (Exception e) { e.printStackTrace(); System.err.println(); exitWithHelp(options); } }
From source file:com.glaf.base.modules.sys.service.mybatis.SysTreeServiceImpl.java
public static void main(String[] args) { String str1 = "1|2|5|195235|"; String str2 = "1|2|5|195235|195274|195347|195483|"; String tmp = str2.substring(str1.length(), str2.length()); System.out.println(tmp);//w w w . ja v a2 s . com StringTokenizer token = new StringTokenizer(tmp, "|"); System.out.println(token.countTokens()); }
From source file:com.athena.peacock.agent.Starter.java
/** * <pre>//from www . j a v a 2 s . co m * * </pre> * @param args */ @SuppressWarnings("resource") public static void main(String[] args) { int rand = (int) (Math.random() * 100) % 50; System.setProperty("random.seconds", Integer.toString(rand)); String configFile = null; try { configFile = PropertyUtil.getProperty(PeacockConstant.CONFIG_FILE_KEY); } catch (Exception e) { // nothing to do. } finally { if (StringUtils.isEmpty(configFile)) { configFile = "/peacock/agent/config/agent.conf"; } } /** * ${peacock.agent.config.file.name} ?? load ? ?? ? ? ? . */ String errorMsg = "\n\"" + configFile + "\" file does not exist or cannot read.\n" + "Please check \"" + configFile + "\" file exists and can read."; Assert.isTrue(AgentConfigUtil.exception == null, errorMsg); Assert.notNull(AgentConfigUtil.getConfig(PeacockConstant.SERVER_IP), "ServerIP cannot be empty."); Assert.notNull(AgentConfigUtil.getConfig(PeacockConstant.SERVER_PORT), "ServerPort cannot be empty."); /** * Agent ID ?? ${peacock.agent.agent.file.name} ? ?, * ?? ? Agent ID ? ?? . */ String agentFile = null; String agentId = null; try { agentFile = PropertyUtil.getProperty(PeacockConstant.AGENT_ID_FILE_KEY); } catch (Exception e) { // nothing to do. } finally { if (StringUtils.isEmpty(agentFile)) { agentFile = "/peacock/agent/.agent"; } } File file = new File(agentFile); boolean isNew = false; if (file.exists()) { try { agentId = IOUtils.toString(file.toURI()); // ? ? agent ID agent ID? ? 36? ? ?. if (StringUtils.isEmpty(agentId) || agentId.length() != 36) { throw new IOException(); } } catch (IOException e) { logger.error(agentFile + " file cannot read or saved invalid agent ID.", e); agentId = PeacockAgentIDGenerator.generateId(); isNew = true; } } else { agentId = PeacockAgentIDGenerator.generateId(); isNew = true; } if (isNew) { logger.info("New Agent-ID({}) be generated.", agentId); try { file.setWritable(true); OutputStreamWriter output = new OutputStreamWriter(new FileOutputStream(file)); output.write(agentId); file.setReadOnly(); IOUtils.closeQuietly(output); } catch (UnsupportedEncodingException e) { logger.error("UnsupportedEncodingException has occurred : ", e); } catch (FileNotFoundException e) { logger.error("FileNotFoundException has occurred : ", e); } catch (IOException e) { logger.error("IOException has occurred : ", e); } } // Spring Application Context Loading logger.debug("Starting application context..."); AbstractApplicationContext applicationContext = new ClassPathXmlApplicationContext( "classpath:spring/context-*.xml"); applicationContext.registerShutdownHook(); }
From source file:com.jcraft.weirdx.XDMCP.java
public static void main(String[] args) { String usage = "usage: [-query|-broadcast] address -display displayname"; int op = -1;// w w w .j a v a2s.c o m String address = null; String displayaddress = null; int displaynum = 0; if (args.length == 0) { System.err.println(usage); System.exit(-1); } for (int i = 0; i < args.length; i++) { if (args[i].equals("-query")) { op = Query; i++; address = args[i]; continue; } if (args[i].equals("-broadcast")) { op = BroadcastQuery; i++; address = args[i]; continue; } if (args[i].equals("-display")) { i++; displayaddress = args[i].substring(0, args[i].indexOf(":")); try { String foo = args[i].substring(args[i].indexOf(":") + 1, args[i].length()); foo = foo.substring(0, (foo.indexOf(".") == -1 ? foo.length() : foo.indexOf("."))); displaynum = Integer.parseInt(foo); } catch (Exception e) { } } } if (op == -1 || address == null || displayaddress == null) { System.err.println(usage); System.exit(-1); } XDMCP foo = new XDMCP(op, address, displayaddress, displaynum); foo.start(); }
From source file:com.genentech.chemistry.openEye.apps.QTorsionProfileGenerator.java
public static void main(String... args) throws IOException { // create command line Options object Options options = new Options(); Option opt = new Option(OPT_INFILE, true, "input file oe-supported Use .sdf|.smi to specify the file type."); opt.setRequired(true);//from w w w .j a va 2 s . c o m options.addOption(opt); opt = new Option(OPT_SDFFILE, true, "file to write onformers for debugging (oe-supported Use .sdf|.smi to specify the file type)."); options.addOption(opt); opt = new Option(OPT_WORKDIR, true, "Write files into this directory!"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_OUTPREFIX, true, "Prefix for output file."); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_OUTNAMETAG, true, "TagName of field containing outFilePrefix. (Use TITLE for mol title)."); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_TEMPLATE, true, "Template file for quantum program containing #XYZ# place holder line. A #FName# placeholder can be used fro chk files and the like."); opt.setRequired(true); options.addOption(opt); opt = new Option(OPT_SPIN_MULTIPLICITY, true, "Spin Multiplicity of the input molecules (default 1)"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_NCPU, true, "Overwrite nprocshared parameter in guassian input file if given"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_MEM, true, "Memory for gaussian default='10GB' replaces #mem# in tempalte"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_MINIMIZE, false, "minimize conformer at each step using MMFFs"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_CONSTRIANT, true, "one of strong (90),medium (45), weak(20), none or a floating point number" + " specifying the strength of tethered constrains for -doMinimize (def=strong)"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_BONDFILE, true, "Structure file containing 4 atoms defining the torsion. " + "In each input molecule the atoms colses these atoms are used to define the torsion."); opt.setRequired(true); options.addOption(opt); opt = new Option(OPT_STARTTorsion, true, "The torsion in your inMol will be rotated by this value for the first job"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_TORSIONIncrement, true, "Incremnt each subsequent conformation by this step size"); opt.setRequired(true); options.addOption(opt); opt = new Option(OPT_NSTEPS, true, "Number of conformations to create"); opt.setRequired(true); options.addOption(opt); opt = new Option(OPT_MAXCONFS_PER_STEP, true, "While holding the torsion fixed, maximum number of conformations of free atoms to generate. default=1"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_COREFILE, true, "Outputfile to store guessed core."); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_DEBUG, false, "Produce more debug output."); opt.setRequired(false); options.addOption(opt); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (Exception e) { System.err.println(e.getMessage()); exitWithHelp(options); } args = cmd.getArgs(); if (args.length != 0) { System.err.println("Unknown arguments" + args); exitWithHelp(options); } if (cmd.hasOption("d")) { System.err.println("Start debugger and press return:"); new BufferedReader(new InputStreamReader(System.in)).readLine(); } String inFile = cmd.getOptionValue(OPT_INFILE); String sdfFile = cmd.getOptionValue(OPT_SDFFILE); String outPrefix = cmd.getOptionValue(OPT_OUTPREFIX); String outNameTag = cmd.getOptionValue(OPT_OUTNAMETAG); String workDir = cmd.getOptionValue(OPT_WORKDIR); String coreFile = cmd.getOptionValue(OPT_COREFILE); String tempalte = cmd.getOptionValue(OPT_TEMPLATE); String bondFile = cmd.getOptionValue(OPT_BONDFILE); String nCPU = cmd.getOptionValue(OPT_NCPU); String memStr = cmd.getOptionValue(OPT_MEM); String maxConfsStr = cmd.getOptionValue(OPT_MAXCONFS_PER_STEP); String spinMult = cmd.getOptionValue(OPT_SPIN_MULTIPLICITY); boolean doMinimize = cmd.hasOption(OPT_MINIMIZE); String constraintStrength = cmd.getOptionValue(OPT_CONSTRIANT); int nStep = Integer.parseInt(cmd.getOptionValue(OPT_NSTEPS)); if (memStr == null || memStr.length() == 0) memStr = "10GB"; if (spinMult == null || spinMult.length() == 0) spinMult = "1"; if ((outPrefix == null && outNameTag == null) || (outPrefix != null && outNameTag != null)) { System.err.println("Exactly one of -outPrefix or outNameTag must be given!"); exitWithHelp(options); } if (workDir == null || workDir.trim().length() == 0) workDir = "."; double startTorsion = Double.NaN; if (cmd.hasOption(OPT_STARTTorsion)) startTorsion = Double.parseDouble(cmd.getOptionValue(OPT_STARTTorsion)); double torInc = Double.parseDouble(cmd.getOptionValue(OPT_TORSIONIncrement)); int maxStepConfs = maxConfsStr == null ? 1 : Integer.parseInt(maxConfsStr); QTorsionProfileGenerator calculator = new QTorsionProfileGenerator(tempalte, workDir, outPrefix, outNameTag, bondFile, spinMult, startTorsion, torInc, nStep, maxStepConfs, doMinimize, constraintStrength, cmd.hasOption(OPT_DEBUG)); calculator.run(inFile, sdfFile, coreFile, nCPU, memStr); }