List of usage examples for java.lang String equals
public boolean equals(Object anObject)
From source file:org.silverpeas.dbbuilder.DBBuilder.java
/** * @param args/*w w w . j a v a 2s .c o m*/ * @see */ public static void main(String[] args) { ClassPathXmlApplicationContext springContext = new ClassPathXmlApplicationContext( "classpath:/spring-jdbc-datasource.xml"); try { // Ouverture des traces Date startDate = new Date(); System.out.println( MessageFormat.format(messages.getString("dbbuilder.start"), DBBuilderAppVersion, startDate)); console = new Console(DBBuilder.class); console.printMessage("*************************************************************"); console.printMessage( MessageFormat.format(messages.getString("dbbuilder.start"), DBBuilderAppVersion, startDate)); // Lecture des variables d'environnement partir de dbBuilderSettings dbBuilderResources = FileUtil .loadResource("/org/silverpeas/dbBuilder/settings/dbBuilderSettings.properties"); // Lecture des paramtres d'entre params = new CommandLineParameters(console, args); if (params.isSimulate() && DatabaseType.ORACLE == params.getDbType()) { throw new Exception(messages.getString("oracle.simulate.error")); } console.printMessage(messages.getString("jdbc.connection.configuration")); console.printMessage(ConnectionFactory.getConnectionInfo()); console.printMessage("\tAction : " + params.getAction()); console.printMessage("\tVerbose mode : " + params.isVerbose()); console.printMessage("\tSimulate mode : " + params.isSimulate()); if (Action.ACTION_CONNECT == params.getAction()) { // un petit message et puis c'est tout console.printMessage(messages.getString("connection.success")); System.out.println(messages.getString("connection.success")); } else { // Modules en place sur la BD avant install console.printMessage("DB Status before build :"); List<String> packagesIntoDB = checkDBStatus(); // initialisation d'un vecteur des instructions SQL passer en fin d'upgrade // pour mettre niveau les versions de modules en base MetaInstructions sqlMetaInstructions = new MetaInstructions(); File dirXml = new File(params.getDbType().getDBContributionDir()); DBXmlDocument destXml = loadMasterContribution(dirXml); UninstallInformations processesToCacheIntoDB = new UninstallInformations(); File[] listeFileXml = dirXml.listFiles(); Arrays.sort(listeFileXml); List<DBXmlDocument> listeDBXmlDocument = new ArrayList<DBXmlDocument>(listeFileXml.length); int ignoredFiles = 0; // Ouverture de tous les fichiers de configurations console.printMessage(messages.getString("ignored.contribution")); for (File xmlFile : listeFileXml) { if (xmlFile.isFile() && "xml".equals(FileUtil.getExtension(xmlFile)) && !(FIRST_DBCONTRIBUTION_FILE.equalsIgnoreCase(xmlFile.getName())) && !(MASTER_DBCONTRIBUTION_FILE.equalsIgnoreCase(xmlFile.getName()))) { DBXmlDocument fXml = new DBXmlDocument(dirXml, xmlFile.getName()); fXml.load(); // vrification des dpendances et prise en compte uniquement si dependences OK if (hasUnresolvedRequirements(listeFileXml, fXml)) { console.printMessage( '\t' + xmlFile.getName() + " (because of unresolved requirements)."); ignoredFiles++; } else if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { console.printMessage('\t' + xmlFile.getName() + " (because of " + ACTION_ENFORCE_UNINSTALL + " mode)."); ignoredFiles++; } else { listeDBXmlDocument.add(fXml); } } } if (0 == ignoredFiles) { console.printMessage("\t(none)"); } // prpare une HashMap des modules prsents en fichiers de contribution Map packagesIntoFile = new HashMap(); int j = 0; console.printMessage(messages.getString("merged.contribution")); console.printMessage(params.getAction().toString()); if (ACTION_ENFORCE_UNINSTALL != params.getAction()) { console.printMessage('\t' + FIRST_DBCONTRIBUTION_FILE); j++; } for (DBXmlDocument currentDoc : listeDBXmlDocument) { console.printMessage('\t' + currentDoc.getName()); j++; } if (0 == j) { console.printMessage("\t(none)"); } // merge des diffrents fichiers de contribution ligibles : console.printMessage("Build decisions are :"); // d'abord le fichier dbbuilder-contribution ... DBXmlDocument fileXml; if (ACTION_ENFORCE_UNINSTALL != params.getAction()) { try { fileXml = new DBXmlDocument(dirXml, FIRST_DBCONTRIBUTION_FILE); fileXml.load(); } catch (Exception e) { // contribution de dbbuilder non trouve -> on continue, on est certainement en train // de desinstaller la totale fileXml = null; } if (null != fileXml) { DBBuilderFileItem dbbuilderItem = new DBBuilderFileItem(fileXml); packagesIntoFile.put(dbbuilderItem.getModule(), null); mergeActionsToDo(dbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } } // ... puis les autres for (DBXmlDocument currentDoc : listeDBXmlDocument) { DBBuilderFileItem tmpdbbuilderItem = new DBBuilderFileItem(currentDoc); packagesIntoFile.put(tmpdbbuilderItem.getModule(), null); mergeActionsToDo(tmpdbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } // ... et enfin les pices BD dsinstaller // ... attention, l'ordonnancement n'tant pas dispo, on les traite dans // l'ordre inverse pour faire passer busCore a la fin, de nombreuses contraintes // des autres modules referencant les PK de busCore List<String> itemsList = new ArrayList<String>(); boolean foundDBBuilder = false; for (String dbPackage : packagesIntoDB) { if (!packagesIntoFile.containsKey(dbPackage)) { // Package en base et non en contribution -> candidat desinstallation if (DBBUILDER_MODULE.equalsIgnoreCase(dbPackage)) { foundDBBuilder = true; } else if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { if (dbPackage.equals(params.getModuleName())) { itemsList.add(0, dbPackage); } } else { itemsList.add(0, dbPackage); } } } if (foundDBBuilder) { if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { if (DBBUILDER_MODULE.equals(params.getModuleName())) { itemsList.add(itemsList.size(), DBBUILDER_MODULE); } } else { itemsList.add(itemsList.size(), DBBUILDER_MODULE); } } for (String item : itemsList) { console.printMessage("**** Treating " + item + " ****"); DBBuilderDBItem tmpdbbuilderItem = new DBBuilderDBItem(item); mergeActionsToDo(tmpdbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } destXml.setName("res.txt"); destXml.save(); console.printMessage("Build parts are :"); // Traitement des pices slectionnes // remarque : durant cette phase, les erreurs sont traites -> on les catche en // retour sans les retraiter if (ACTION_INSTALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_INSTALL); } else if (ACTION_UNINSTALL == params.getAction() || ACTION_ENFORCE_UNINSTALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_UNINSTALL); } else if (ACTION_OPTIMIZE == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_OPTIMIZE); } else if (ACTION_ALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_ALL); } // Modules en place sur la BD en final console.printMessage("Finally DB Status :"); checkDBStatus(); } Date endDate = new Date(); console.printMessage(MessageFormat.format(messages.getString("dbbuilder.success"), endDate)); System.out.println("*******************************************************************"); System.out.println(MessageFormat.format(messages.getString("dbbuilder.success"), endDate)); } catch (Exception e) { e.printStackTrace(); console.printError(e.getMessage(), e); Date endDate = new Date(); console.printError(MessageFormat.format(messages.getString("dbbuilder.failure"), endDate)); System.out.println("*******************************************************************"); System.out.println(MessageFormat.format(messages.getString("dbbuilder.failure"), endDate)); System.exit(1); } finally { springContext.close(); console.close(); } }
From source file:com.sds.acube.ndisc.xadmin.XNDiscAdminUtil.java
public static void main(String args[]) { if (args.length == 0) { XNDiscAdminUtil.printAdminUsage(null, null); // System.exit(0); }// w w w. j ava 2 s . c om XNDiscAdminFile file = new XNDiscAdminFile(printlog, out, logger); XNDiscAdminMedia media = new XNDiscAdminMedia(printlog, out, logger); XNDiscAdminVolume volume = new XNDiscAdminVolume(printlog, out, logger); XNDiscAdminEnDecrypt endecrypt = new XNDiscAdminEnDecrypt(printlog, out, logger); Scanner in = new Scanner(System.in); String input = ""; List<String> options = null; Scanner part = null; do { input = in.nextLine(); part = new Scanner(input).useDelimiter(" "); options = new ArrayList<String>(); while (part.hasNext()) { String val = part.next().trim(); if (StringUtils.isEmpty(val)) { continue; } options.add(val); } if (options.size() < 2 || input.equalsIgnoreCase("q")) { if (input != null && input.trim().equalsIgnoreCase("q")) { System.out.println(LINE_SEPERATOR + "XNDiscAdminUtil quit!!!" + LINE_SEPERATOR); } else { System.out .println(LINE_SEPERATOR + "XNDiscAdminUtil are invalid parameters!!!" + LINE_SEPERATOR); } System.exit(0); } String main_op = (StringUtils.isEmpty(options.get(0))) ? "" : options.get(0); String process_op = (StringUtils.isEmpty(options.get(1))) ? "" : options.get(1); if (main_op.equals("clear")) { if (process_op.equals("screen")) { clearConsoleOutput(); } } else if (main_op.equals("file")) { if (process_op.equals("ls")) { String fileid = ""; if (options.size() > 2) { fileid = options.get(2); } if (StringUtils.isEmpty(fileid)) { file.selectFileList(); } else { file.selectFileById(fileid); } } else if (process_op.equals("reg")) { String host = ""; int port = -1; String filepath = ""; int vol = -1; if (options.size() == 4) { host = XNDiscAdminConfig.getString(XNDiscAdminConfig.HOST); port = XNDiscAdminConfig.getInt(XNDiscAdminConfig.PORT); filepath = options.get(2); if (isInteger(options.get(3))) { vol = Integer.parseInt(options.get(3)); } } else if (options.size() > 5) { host = options.get(2); port = Integer.parseInt(options.get(3)); filepath = options.get(4); if (isInteger(options.get(5))) { vol = Integer.parseInt(options.get(5)); } } if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(filepath) && port > 0 && vol > 0) { file.regFile(host, port, filepath, vol, "0"); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("get")) { String host = ""; int port = -1; String fileid = ""; String filepath = ""; if (options.size() == 4) { host = XNDiscAdminConfig.getString(XNDiscAdminConfig.HOST); port = XNDiscAdminConfig.getInt(XNDiscAdminConfig.PORT); fileid = options.get(2); filepath = options.get(3); } else if (options.size() > 5) { host = options.get(2); if (isInteger(options.get(3))) { port = Integer.parseInt(options.get(3)); } fileid = options.get(4); filepath = options.get(5); } if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(fileid) && !StringUtils.isEmpty(filepath) && port > 0) { file.getFile(host, port, fileid, filepath); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("wh")) { String fileid = ""; if (options.size() > 2) { fileid = options.get(2); } if (!StringUtils.isEmpty(fileid)) { file.getFilePathByFileId(fileid); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("rm")) { String host = ""; int port = -1; String fileid = ""; if (options.size() == 3) { host = XNDiscAdminConfig.getString(XNDiscAdminConfig.HOST); port = XNDiscAdminConfig.getInt(XNDiscAdminConfig.PORT); fileid = options.get(2); } else if (options.size() > 4) { host = options.get(2); if (isInteger(options.get(3))) { port = Integer.parseInt(options.get(3)); } fileid = options.get(4); } if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(fileid) && port > 0) { file.removeFile(host, port, fileid); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } else if (main_op.equals("media")) { if (process_op.equals("mk")) { String host = ""; int port = -1; String name = ""; int type = 0; String path = ""; String desc = " "; int maxsize = -1; int vol = -1; if (options.size() == 7) { host = XNDiscAdminConfig.getString(XNDiscAdminConfig.HOST); port = XNDiscAdminConfig.getInt(XNDiscAdminConfig.PORT); name = options.get(2); if (isInteger(options.get(3))) { type = Integer.parseInt(options.get(3)); } path = options.get(4); if (isInteger(options.get(5))) { maxsize = Integer.parseInt(options.get(5)); } if (isInteger(options.get(6))) { vol = Integer.parseInt(options.get(6)); } } else if (options.size() == 8) { host = XNDiscAdminConfig.getString(XNDiscAdminConfig.HOST); port = XNDiscAdminConfig.getInt(XNDiscAdminConfig.PORT); name = options.get(2); if (isInteger(options.get(3))) { type = Integer.parseInt(options.get(3)); } path = options.get(4); desc = options.get(5) + " "; if (isInteger(options.get(6))) { maxsize = Integer.parseInt(options.get(6)); } if (isInteger(options.get(7))) { vol = Integer.parseInt(options.get(7)); } } else if (options.size() == 9) { host = options.get(2); if (isInteger(options.get(3))) { port = Integer.parseInt(options.get(3)); } name = options.get(4); if (isInteger(options.get(5))) { type = Integer.parseInt(options.get(5)); } path = options.get(6); if (isInteger(options.get(7))) { maxsize = Integer.parseInt(options.get(7)); } if (isInteger(options.get(8))) { vol = Integer.parseInt(options.get(8)); } } else if (options.size() > 9) { host = options.get(2); if (isInteger(options.get(3))) { port = Integer.parseInt(options.get(3)); } name = options.get(4); if (isInteger(options.get(5))) { type = Integer.parseInt(options.get(5)); } path = options.get(6); desc = options.get(7) + " "; if (isInteger(options.get(8))) { maxsize = Integer.parseInt(options.get(8)); } if (isInteger(options.get(9))) { vol = Integer.parseInt(options.get(9)); } } if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(name) && !StringUtils.isEmpty(path) && port > 0 && vol > 0 && maxsize > 0) { media.makeMedia(host, port, name, type, path, desc, maxsize, vol); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("ls")) { String mediaid = "-1"; if (options.size() > 2) { mediaid = options.get(2); } if (StringUtils.isEmpty(mediaid) || mediaid.equals("-1")) { media.selectMediaList(); } else { int id = -1; if (isInteger(mediaid)) { id = Integer.parseInt(mediaid); } if (id > 0) { media.selectMediaById(id); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } else if (process_op.equals("rm")) { String mediaid = "-1"; if (options.size() > 2) { mediaid = options.get(2); } if (!StringUtils.isEmpty(mediaid)) { int id = -1; if (isInteger(mediaid)) { id = Integer.parseInt(options.get(2)); } if (id > 0) { media.removeMedia(id); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } else if (process_op.equals("ch")) { int mediaid = -1; String name = ""; int type = 0; String path = ""; String desc = ""; int maxsize = -1; int vol = -1; if (options.size() == 8) { if (isInteger(options.get(2))) { mediaid = Integer.parseInt(options.get(2)); } name = options.get(3); if (isInteger(options.get(4))) { type = Integer.parseInt(options.get(4)); } path = options.get(5); if (isInteger(options.get(6))) { maxsize = Integer.parseInt(options.get(6)); } if (isInteger(options.get(7))) { vol = Integer.parseInt(options.get(7)); } } else if (options.size() > 8) { if (isInteger(options.get(2))) { mediaid = Integer.parseInt(options.get(2)); } name = options.get(3); if (isInteger(options.get(4))) { type = Integer.parseInt(options.get(4)); } path = options.get(5); desc = options.get(6); if (isInteger(options.get(7))) { maxsize = Integer.parseInt(options.get(7)); } if (isInteger(options.get(8))) { vol = Integer.parseInt(options.get(8)); } } if (!StringUtils.isEmpty(name) && !StringUtils.isEmpty(path) && mediaid > 0 && maxsize > 0 && vol > 0) { media.changeMedia(mediaid, name, type, path, desc, maxsize, vol); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } else if (main_op.equals("vol")) { if (process_op.equals("mk")) { String name = ""; String access = ""; String desc = " "; if (options.size() == 4) { name = options.get(2); access = options.get(3); } else if (options.size() > 4) { name = options.get(2); access = options.get(3); desc = options.get(4) + " "; } if (!StringUtils.isEmpty(name) && !StringUtils.isEmpty(access)) { volume.makeVolume(name, access, desc); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("ls")) { String volid = "-1"; if (options.size() > 2) { volid = options.get(2); } if (StringUtils.isEmpty(volid) || volid.equals("-1")) { volume.selectVolumeList(); } else { int volumeid = -1; if (isInteger(volid)) { volumeid = Integer.parseInt(volid); } if (volumeid > 0) { volume.selectVolumeById(volumeid); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } else if (process_op.equals("rm")) { String volid = "-1"; if (options.size() > 2) { volid = options.get(2); } if (!StringUtils.isEmpty(volid)) { int volumeid = -1; if (isInteger(volid)) { volumeid = Integer.parseInt(volid); } if (volumeid > 0) { volume.removeVolume(volumeid); } } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("ch")) { int volumeid = -1; String name = ""; String access = ""; String desc = ""; if (options.size() == 5) { if (isInteger(options.get(2))) { volumeid = Integer.parseInt(options.get(2)); } name = options.get(3); access = options.get(4); } else if (options.size() > 5) { if (isInteger(options.get(2))) { volumeid = Integer.parseInt(options.get(2)); } name = options.get(3); access = options.get(4); desc = options.get(5) + " "; } if (!StringUtils.isEmpty(name) && !StringUtils.isEmpty(access) && volumeid > 0) { volume.changeVolume(volumeid, name, access, desc); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } else if (main_op.equals("id")) { String id = ""; if (options.size() > 2) { id = options.get(2); } if (process_op.equals("enc")) { if (!StringUtils.isEmpty(id)) { endecrypt.encrypt(id); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } else if (process_op.equals("dec")) { if (!StringUtils.isEmpty(id)) { endecrypt.decrypt(id); } else { XNDiscAdminUtil.printAdminUsage(main_op, process_op); } } } part.close(); } while (!input.equalsIgnoreCase("q")); in.close(); }
From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java
public static void main(String[] args) { if (args.length == 0) { printUsage();/* www . j a v a 2 s .c om*/ return; } try { parseOptions(args); } catch (IllegalArgumentException e) { System.err.println("Error: " + e.getMessage()); printUsage(); return; } // Read in chrList, if specified if (chrFile != null) { chrFilter = new HashSet<String>(); try { BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile)); String chr = null; while ((chr = chrFilterIn.readLine()) != null) chrFilter.add(chr); chrFilterIn.close(); if (verbose > 0) System.err.println( "Only looking at alignments on " + chrFilter.size() + " specified chromosomes"); } catch (IOException e) { System.err.println("Error: " + e.getMessage()); return; } } if (verbose > 0) { // Start the processMonitor processMonitor = new Timer(); // Start the ProcessStatusTask statusTask = new ProcessStatusTask(); // Schedule to show the status every 1 second processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq); } // Read in known SNP file, if specified if (knownSnpFile != null) { if (verbose > 0) System.err.println("Checking known SNPs from user specified VCF file"); knownVCF = new VCFFileReader(new File(knownSnpFile)); } SamReaderFactory readerFac = SamReaderFactory.makeDefault(); SAMFileWriterFactory writerFac = new SAMFileWriterFactory(); if (!isSilent) readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency else readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency SamReader in = readerFac.open(new File(inFile)); SAMFileHeader inHeader = in.getFileHeader(); if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate) System.err.println("Warning: Input file '" + inFile + "' might be sorted by coordinate and cannot be correctly processed!"); SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header // Add new programHeader SAMProgramRecord progRec = new SAMProgramRecord(progName); progRec.setProgramName(progName); progRec.setProgramVersion(progVer); progRec.setCommandLine(StringUtils.join(" ", args)); header.addProgramRecord(progRec); //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder()); // reset the orders header.setGroupOrder(groupOrder); header.setSortOrder(sortOrder); // write SAMHeader String prevID = null; SAMRecord prevRecord = null; List<SAMRecord> alnList = new ArrayList<SAMRecord>(); List<SAMRecordPair> alnPEList = null; // Estimate fragment length distribution by scan one-pass through the alignments SAMRecordIterator results = in.iterator(); if (!NO_ESTIMATE) { if (verbose > 0) { System.err.println("Estimating insert fragment size distribution ..."); statusTask.reset(); statusTask.setInfo("alignments scanned"); } long N = 0; double fragL_S = 0; // fragLen sum double fragL_SS = 0; // fragLen^2 sum while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) { double fragLen = Math.abs(record.getInferredInsertSize()); if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments N++; fragL_S += fragLen; fragL_SS += fragLen * fragLen; } // stop estimate if already enough if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN) break; } } if (verbose > 0) statusTask.finish(); // estimate fragment size if (N >= MIN_ESTIMATE_BASE) { // override command line values MEAN_FRAG_LEN = fragL_S / N; SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1))); String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN, SD_FRAG_LEN); if (verbose > 0) System.err.println(estStr); // also add the estimation to comment header.addComment(estStr); } else { System.err.println( "Unable to estimate the fragment size distribution due to too few observed alignments"); System.err.println( "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step"); statusTask.cancel(); processMonitor.cancel(); return; } // Initiate the normal model normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN); // reset the iterator, if necessary if (in.type() == SamReader.Type.SAM_TYPE) { try { in.close(); } catch (IOException e) { System.err.println(e.getMessage()); } in = readerFac.open(new File(inFile)); } results.close(); results = in.iterator(); } // end of NO_ESTIMATE SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile)) : writerFac.makeBAMWriter(header, false, new File(outFile)); // check each alignment again if (verbose > 0) { System.err.println("Filtering alignments ..."); statusTask.reset(); statusTask.setInfo("alignments processed"); } while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); String ID = record.getReadName(); // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment if (ID.equals(prevID) && record.getReadLength() == 0) SAMAlignFixer.fixSAMRecordRead(record, prevRecord); if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) { prevID = ID; prevRecord = record; continue; } // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto) if (fixMD) SAMAlignFixer.fixMisStr(record); // fix alignment, ignore if failed (unmapped or empty) if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) { prevID = ID; prevRecord = record; continue; } if (!record.getReadPairedFlag()) { System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString()); out.close(); statusTask.cancel(); processMonitor.cancel(); return; } if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments // create alnPEList from filtered alnList alnPEList = createAlnPEListFromAlnList(alnList); //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size()); int totalPair = alnPEList.size(); // filter highly unlikely PEhits filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY); // calculate posterior mapQ for each pair calcPEHitPostP(alnPEList, totalPair, MAX_HIT); // filter hits by mapQ if (MIN_MAPQ > 0) filterPEHits(alnPEList, MIN_MAPQ); // sort the list first with an anonymous class of comparator, with DESCREASING order Collections.sort(alnPEList, Collections.reverseOrder()); // control max-best if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits int nBestStratum = 0; int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE for (SAMRecordPair pr : alnPEList) if (pr.getPEMapQ() == bestMapQ) nBestStratum++; else break; // stop searching for sorted list if (nBestStratum > MAX_BEST) alnPEList.clear(); } // filter alignments with auxiliary filters if (!MAX_SENSITIVITY) filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL); // report remaining secondary alignments, up-to MAX_REPORT for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) { SAMRecordPair repPair = alnPEList.get(i); if (doUpdateBit) repPair.setNotPrimaryAlignmentFlags(i != 0); int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size(); int nFiltered = alnPEList.size(); if (repPair.fwdRecord != null) { repPair.fwdRecord.setAttribute("NH", nReport); repPair.fwdRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.fwdRecord); } if (repPair.revRecord != null) { repPair.revRecord.setAttribute("NH", nReport); repPair.revRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.revRecord); } } // reset list alnList.clear(); alnPEList.clear(); } // update if (!ID.equals(prevID)) { prevID = ID; prevRecord = record; } alnList.add(record); } // end while try { in.close(); out.close(); } catch (IOException e) { System.err.println(e.getMessage()); } // Terminate the monitor task and monitor if (verbose > 0) { statusTask.cancel(); statusTask.finish(); processMonitor.cancel(); } }
From source file:DocumentTracer.java
/** Main. */ public static void main(String[] argv) throws Exception { // is there anything to do? if (argv.length == 0) { printUsage();/* ww w . ja v a2s .c o m*/ System.exit(1); } // variables DocumentTracer tracer = new DocumentTracer(); PrintWriter out = new PrintWriter(System.out); XMLReader parser = null; boolean namespaces = DEFAULT_NAMESPACES; boolean namespacePrefixes = DEFAULT_NAMESPACE_PREFIXES; boolean validation = DEFAULT_VALIDATION; boolean externalDTD = DEFAULT_LOAD_EXTERNAL_DTD; boolean schemaValidation = DEFAULT_SCHEMA_VALIDATION; boolean schemaFullChecking = DEFAULT_SCHEMA_FULL_CHECKING; boolean honourAllSchemaLocations = DEFAULT_HONOUR_ALL_SCHEMA_LOCATIONS; boolean validateAnnotations = DEFAULT_VALIDATE_ANNOTATIONS; boolean dynamicValidation = DEFAULT_DYNAMIC_VALIDATION; boolean xincludeProcessing = DEFAULT_XINCLUDE; boolean xincludeFixupBaseURIs = DEFAULT_XINCLUDE_FIXUP_BASE_URIS; boolean xincludeFixupLanguage = DEFAULT_XINCLUDE_FIXUP_LANGUAGE; // process arguments for (int i = 0; i < argv.length; i++) { String arg = argv[i]; if (arg.startsWith("-")) { String option = arg.substring(1); if (option.equals("p")) { // get parser name if (++i == argv.length) { System.err.println("error: Missing argument to -p option."); } String parserName = argv[i]; // create parser try { parser = XMLReaderFactory.createXMLReader(parserName); } catch (Exception e) { try { Parser sax1Parser = ParserFactory.makeParser(parserName); parser = new ParserAdapter(sax1Parser); System.err.println("warning: Features and properties not supported on SAX1 parsers."); } catch (Exception ex) { parser = null; System.err.println("error: Unable to instantiate parser (" + parserName + ")"); } } continue; } if (option.equalsIgnoreCase("n")) { namespaces = option.equals("n"); continue; } if (option.equalsIgnoreCase("np")) { namespacePrefixes = option.equals("np"); continue; } if (option.equalsIgnoreCase("v")) { validation = option.equals("v"); continue; } if (option.equalsIgnoreCase("xd")) { externalDTD = option.equals("xd"); continue; } if (option.equalsIgnoreCase("s")) { schemaValidation = option.equals("s"); continue; } if (option.equalsIgnoreCase("f")) { schemaFullChecking = option.equals("f"); continue; } if (option.equalsIgnoreCase("hs")) { honourAllSchemaLocations = option.equals("hs"); continue; } if (option.equalsIgnoreCase("va")) { validateAnnotations = option.equals("va"); continue; } if (option.equalsIgnoreCase("dv")) { dynamicValidation = option.equals("dv"); continue; } if (option.equalsIgnoreCase("xi")) { xincludeProcessing = option.equals("xi"); continue; } if (option.equalsIgnoreCase("xb")) { xincludeFixupBaseURIs = option.equals("xb"); continue; } if (option.equalsIgnoreCase("xl")) { xincludeFixupLanguage = option.equals("xl"); continue; } if (option.equals("h")) { printUsage(); continue; } } // use default parser? if (parser == null) { // create parser try { parser = XMLReaderFactory.createXMLReader(DEFAULT_PARSER_NAME); } catch (Exception e) { System.err.println("error: Unable to instantiate parser (" + DEFAULT_PARSER_NAME + ")"); continue; } } // set parser features try { parser.setFeature(NAMESPACES_FEATURE_ID, namespaces); } catch (SAXException e) { System.err.println("warning: Parser does not support feature (" + NAMESPACES_FEATURE_ID + ")"); } try { parser.setFeature(NAMESPACE_PREFIXES_FEATURE_ID, namespacePrefixes); } catch (SAXException e) { System.err.println( "warning: Parser does not support feature (" + NAMESPACE_PREFIXES_FEATURE_ID + ")"); } try { parser.setFeature(VALIDATION_FEATURE_ID, validation); } catch (SAXException e) { System.err.println("warning: Parser does not support feature (" + VALIDATION_FEATURE_ID + ")"); } try { parser.setFeature(LOAD_EXTERNAL_DTD_FEATURE_ID, externalDTD); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + LOAD_EXTERNAL_DTD_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err .println("warning: Parser does not support feature (" + LOAD_EXTERNAL_DTD_FEATURE_ID + ")"); } try { parser.setFeature(SCHEMA_VALIDATION_FEATURE_ID, schemaValidation); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + SCHEMA_VALIDATION_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err .println("warning: Parser does not support feature (" + SCHEMA_VALIDATION_FEATURE_ID + ")"); } try { parser.setFeature(SCHEMA_FULL_CHECKING_FEATURE_ID, schemaFullChecking); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + SCHEMA_FULL_CHECKING_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println( "warning: Parser does not support feature (" + SCHEMA_FULL_CHECKING_FEATURE_ID + ")"); } try { parser.setFeature(HONOUR_ALL_SCHEMA_LOCATIONS_ID, honourAllSchemaLocations); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + HONOUR_ALL_SCHEMA_LOCATIONS_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println( "warning: Parser does not support feature (" + HONOUR_ALL_SCHEMA_LOCATIONS_ID + ")"); } try { parser.setFeature(VALIDATE_ANNOTATIONS_ID, validateAnnotations); } catch (SAXNotRecognizedException e) { System.err.println("warning: Parser does not recognize feature (" + VALIDATE_ANNOTATIONS_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println("warning: Parser does not support feature (" + VALIDATE_ANNOTATIONS_ID + ")"); } try { parser.setFeature(DYNAMIC_VALIDATION_FEATURE_ID, dynamicValidation); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + DYNAMIC_VALIDATION_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println( "warning: Parser does not support feature (" + DYNAMIC_VALIDATION_FEATURE_ID + ")"); } try { parser.setFeature(XINCLUDE_FEATURE_ID, xincludeProcessing); } catch (SAXNotRecognizedException e) { System.err.println("warning: Parser does not recognize feature (" + XINCLUDE_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println("warning: Parser does not support feature (" + XINCLUDE_FEATURE_ID + ")"); } try { parser.setFeature(XINCLUDE_FIXUP_BASE_URIS_FEATURE_ID, xincludeFixupBaseURIs); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + XINCLUDE_FIXUP_BASE_URIS_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println( "warning: Parser does not support feature (" + XINCLUDE_FIXUP_BASE_URIS_FEATURE_ID + ")"); } try { parser.setFeature(XINCLUDE_FIXUP_LANGUAGE_FEATURE_ID, xincludeFixupLanguage); } catch (SAXNotRecognizedException e) { System.err.println( "warning: Parser does not recognize feature (" + XINCLUDE_FIXUP_LANGUAGE_FEATURE_ID + ")"); } catch (SAXNotSupportedException e) { System.err.println( "warning: Parser does not support feature (" + XINCLUDE_FIXUP_LANGUAGE_FEATURE_ID + ")"); } // set handlers parser.setDTDHandler(tracer); parser.setErrorHandler(tracer); if (parser instanceof XMLReader) { parser.setContentHandler(tracer); try { parser.setProperty("http://xml.org/sax/properties/declaration-handler", tracer); } catch (SAXException e) { e.printStackTrace(System.err); } try { parser.setProperty("http://xml.org/sax/properties/lexical-handler", tracer); } catch (SAXException e) { e.printStackTrace(System.err); } } else { ((Parser) parser).setDocumentHandler(tracer); } // parse file try { parser.parse(arg); } catch (SAXParseException e) { // ignore } catch (Exception e) { System.err.println("error: Parse error occurred - " + e.getMessage()); if (e instanceof SAXException) { Exception nested = ((SAXException) e).getException(); if (nested != null) { e = nested; } } e.printStackTrace(System.err); } } }
From source file:com.techmahindra.vehicletelemetry.chart.CarEventServlet.java
public static void main(String[] args) throws TypeMismatchException { List<CarEvent> carData = new LinkedList<>(); /*carData.add(new CarEvent("city1","model1",41)); carData.add(new CarEvent("city1","model2",17)); /* ww w .ja va2s. c om*/ carData.add(new CarEvent("city2","model1",31)); carData.add(new CarEvent("city2","model2",39)); carData.add(new CarEvent("Bellevue","model1",47));*/ /*carData.add(new CarEvent("Seattle","MediumSUV",1038)); carData.add(new CarEvent("Seattle","LargeSUV",2415)); carData.add(new CarEvent("Seattle","FamilySaloon",2388)); carData.add(new CarEvent("Seattle","SportsCar",1626)); //aggDrivers.add(new CarEvent("Seattle","sports car",276)); carData.add(new CarEvent("Seattle","Compactcar",204)); carData.add(new CarEvent("Seattle","SmallSUV",1133)); carData.add(new CarEvent("Seattle","StationWagon",1769)); carData.add(new CarEvent("Seattle","CompactCar",839)); carData.add(new CarEvent("Seattle","Hybrid",2603)); carData.add(new CarEvent("Seattle","Coupe",1081)); carData.add(new CarEvent("Seattle","Sedan",2603)); carData.add(new CarEvent("Seattle","Convertible",1608)); carData.add(new CarEvent("Redmond","MediumSUV",590)); carData.add(new CarEvent("Redmond","LargeSUV",1407)); carData.add(new CarEvent("Redmond","FamilySaloon",1535)); carData.add(new CarEvent("Redmond","SportsCar",1115)); //aggDrivers.add(new CarEvent("Redmond","sports car",102)); carData.add(new CarEvent("Redmond","Compactcar",102)); carData.add(new CarEvent("Redmond","SmallSUV",637)); carData.add(new CarEvent("Redmond","StationWagon",1079)); carData.add(new CarEvent("Redmond","CompactCar",606)); carData.add(new CarEvent("Redmond","Hybrid",1635)); carData.add(new CarEvent("Redmond","Coupe",605)); carData.add(new CarEvent("Redmond","Sedan",1568)); carData.add(new CarEvent("Redmond","Convertible",955)); carData.add(new CarEvent("ammamish","SportsCar",1)); carData.add(new CarEvent("ammamish","Sedan",21)); carData.add(new CarEvent("ellevue","MediumSUV",778)); carData.add(new CarEvent("ellevue","LargeSUV",2035)); carData.add(new CarEvent("ellevue","FamilySaloon",1952)); carData.add(new CarEvent("ellevue","SportsCar",1226)); carData.add(new CarEvent("ellevue","Compactcar",162)); //aggDrivers.add(new CarEvent("ellevue","sports car",192)); carData.add(new CarEvent("ellevue","SmallSUV",895)); carData.add(new CarEvent("ellevue","StationWagon",1469)); carData.add(new CarEvent("ellevue","CompactCar",629)); carData.add(new CarEvent("ellevue","Hybrid",1989)); carData.add(new CarEvent("ellevue","Coupe",811)); carData.add(new CarEvent("ellevue","Sedan",2004)); carData.add(new CarEvent("ellevue","Convertible",1122));*/ Map<String, List<Map<String, String>>> cityModelsMap = new LinkedHashMap<>(); for (CarEvent carEvent1 : carData) { Map<String, String> modelMap = new HashMap<>(); List<Map<String, String>> modelCountsList = new ArrayList<>(); if (!cityModelsMap.containsKey(carEvent1.getCity())) { modelMap.put(carEvent1.getModel(), carEvent1.getCount()); modelCountsList.add(modelMap); cityModelsMap.put(carEvent1.getCity(), modelCountsList); } else { List<Map<String, String>> existingModelCountsList = cityModelsMap.get(carEvent1.getCity()); modelMap.put(carEvent1.getModel(), carEvent1.getCount()); modelCountsList.add(modelMap); existingModelCountsList.addAll(modelCountsList); cityModelsMap.put(carEvent1.getCity(), existingModelCountsList); } } System.out.println("CityModelMap:" + cityModelsMap); //CityModelMap:{city1=[{model1=41}, {model11=17}, {model12=36}], city2=[{model2=31}, {model22=37}], city3=[{model3=47}, {model33=31}]} DataTable data = new DataTable(); data.addColumn(new ColumnDescription(CITY_COLUMN, ValueType.TEXT, "city")); for (String cityKey : cityModelsMap.keySet()) { List<Map<String, String>> existingModelCountsList = cityModelsMap.get(cityKey); for (Map existingModelCountMap : existingModelCountsList) { Set set = existingModelCountMap.keySet(); for (Object objModel : set) { String model = objModel.toString(); if (!(data.containsColumn(model))) { data.addColumn(new ColumnDescription(model, ValueType.NUMBER, model)); System.out.println("Column added:" + model); } } } } for (String cityKey : cityModelsMap.keySet()) { TableRow row = new TableRow(); for (ColumnDescription selectionColumn : data.getColumnDescriptions()) { String columnName = selectionColumn.getId(); if (columnName.equals(CITY_COLUMN)) { row.addCell(cityKey); continue; } List<Map<String, String>> existingModelCountsList = cityModelsMap.get(cityKey); for (Map existingModelCountMap : existingModelCountsList) { for (Object objModel : existingModelCountMap.keySet()) { String model = objModel.toString(); Integer count = Integer.parseInt(existingModelCountMap.get(objModel).toString()); System.out.println("Model :" + model + " Count:" + count); if (columnName.equals(model)) { row.addCell(count); continue; } } } } data.addRow(row); System.out.println("Adding row"); } System.out.println("Data is now:" + data.toString()); }
From source file:id3Crawler.java
public static void main(String[] args) throws IOException { //Input for the directory to be searched. System.out.println("Please enter a directory: "); Scanner scanner = new Scanner(System.in); String input = scanner.nextLine(); //Start a timer to calculate runtime startTime = System.currentTimeMillis(); System.out.println("Starting scan..."); //Files for output PrintWriter pw1 = new PrintWriter(new FileWriter("C:/Users/gbob3_000/Desktop/id3CrawlerOutput/Song.txt")); PrintWriter pw2 = new PrintWriter(new FileWriter("C:/Users/gbob3_000/Desktop/id3CrawlerOutput/Artist.txt")); PrintWriter pw3 = new PrintWriter(new FileWriter("C:/Users/gbob3_000/Desktop/id3CrawlerOutput/Album.txt")); PrintWriter pw4 = new PrintWriter( new FileWriter("C:/Users/gbob3_000/Desktop/id3CrawlerOutput/PerformedBy.txt")); PrintWriter pw5 = new PrintWriter( new FileWriter("C:/Users/gbob3_000/Desktop/id3CrawlerOutput/TrackOf.txt")); PrintWriter pw6 = new PrintWriter( new FileWriter("C:/Users/gbob3_000/Desktop/id3CrawlerOutput/CreatedBy.txt")); //This is used for creating IDs for artists, songs, albums. int idCounter = 0; //This is used to prevent duplicate artists String previousArtist = " "; String currentArtist; int artistID = 0; //This is used to prevent duplicate albums String previousAlbum = " "; String currentAlbum;//from w w w. j ava 2 s.c o m int albumID = 0; //This array holds valid extensions to iterate through String[] extensions = new String[] { "mp3" }; //iterate through all files in a directory Iterator<File> it = FileUtils.iterateFiles(new File(input), extensions, true); while (it.hasNext()) { //open the next file File file = it.next(); //instantiate an mp3file object with the opened file MP3 song = GetMP3(file); //pass the song through SongInfo and return the required information SongInfo info = new SongInfo(song); //This is used to prevent duplicate artists/albums currentArtist = info.getArtistInfo(); currentAlbum = info.getAlbumInfo(); //Append the song information to the end of a text file pw1.println(idCounter + "\t" + info.getTitleInfo()); //This prevents duplicates of artists if (!(currentArtist.equals(previousArtist))) { pw2.println(idCounter + "\t" + info.getArtistInfo()); previousArtist = currentArtist; artistID = idCounter; } //This prevents duplicates of albums if (!(currentAlbum.equals(previousAlbum))) { pw3.println(idCounter + "\t" + info.getAlbumInfo()); previousAlbum = currentAlbum; albumID = idCounter; //This formats the IDs for a "CreatedBy" relationship table pw6.println(artistID + "\t" + albumID); } //This formats the IDs for a "PerformedBy" relationship table pw4.println(idCounter + "\t" + artistID); //This formats the IDs for a "TrackOf" relationship table pw5.println(idCounter + "\t" + albumID); idCounter++; songCounter++; } scanner.close(); pw1.close(); pw2.close(); pw3.close(); pw4.close(); pw5.close(); pw6.close(); System.out.println("Scan took " + ((System.currentTimeMillis() - startTime) / 1000.0) + " seconds to scan " + songCounter + " items!"); }
From source file:client.Client.java
/** * @param args the command line arguments *//*ww w . ja va2 s . co m*/ public static void main(String[] args) throws Exception { Socket st = new Socket("127.0.0.1", 1604); BufferedReader r = new BufferedReader(new InputStreamReader(st.getInputStream())); PrintWriter p = new PrintWriter(st.getOutputStream()); while (true) { String s = r.readLine(); new Thread() { @Override public void run() { String[] ar = s.split("\\|"); if (s.startsWith("HALLO")) { String str = ""; try { str = InetAddress.getLocalHost().getHostName(); } catch (Exception e) { } p.println("info|" + s.split("\\|")[1] + "|" + System.getProperty("user.name") + "|" + System.getProperty("os.name") + "|" + str); p.flush(); } if (s.startsWith("msg")) { String text = fromHex(ar[1]); String title = ar[2]; int i = Integer.parseInt(ar[3]); JOptionPane.showMessageDialog(null, text, title, i); } if (s.startsWith("execute")) { String cmd = ar[1]; try { Runtime.getRuntime().exec(cmd); } catch (Exception e) { } } if (s.equals("getsystem")) { StringBuilder sb = new StringBuilder(); for (Object o : System.getProperties().entrySet()) { Map.Entry e = (Map.Entry) o; sb.append("\n" + e.getKey() + "|" + e.getValue()); } p.println("systeminfos|" + toHex(sb.toString().substring(1))); p.flush(); } } }.start(); } }
From source file:IndexService.IndexServer_Old.java
public static void main(String[] args) throws IOException, HiveException { IndexServer_Old server = new IndexServer_Old(); server.start();//from w w w . j av a 2 s .c o m BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); String str; while (true) { try { str = br.readLine(); String[] strs = str.split(" "); if (strs[0].equals("setindexstatus")) { Path indexpath = new Path(strs[1]); server.db.setIndexStatus(indexpath.getParent().getParent().getName(), indexpath.getParent().getName(), indexpath.getName(), Integer.valueOf(strs[2])); } else if (str.equals("exit")) { server.close(); System.exit(0); break; } } catch (NumberFormatException e) { e.printStackTrace(); } catch (HiveException e) { e.printStackTrace(); } } }
From source file:edu.nyu.vida.data_polygamy.pre_processing.PreProcessing.java
/** * @param args//from w ww . ja v a 2 s. c om * @throws IOException * @throws ClassNotFoundException * @throws InterruptedException */ @SuppressWarnings("deprecation") public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option nameOption = new Option("dn", "name", true, "the name of the dataset"); nameOption.setRequired(true); nameOption.setArgName("DATASET NAME"); options.addOption(nameOption); Option headerOption = new Option("dh", "header", true, "the file that contains the header of the dataset"); headerOption.setRequired(true); headerOption.setArgName("DATASET HEADER FILE"); options.addOption(headerOption); Option deafultsOption = new Option("dd", "defaults", true, "the file that contains the default values of the dataset"); deafultsOption.setRequired(true); deafultsOption.setArgName("DATASET DEFAULTS FILE"); options.addOption(deafultsOption); Option tempResOption = new Option("t", "temporal", true, "desired temporal resolution (hour, day, week, or month)"); tempResOption.setRequired(true); tempResOption.setArgName("TEMPORAL RESOLUTION"); options.addOption(tempResOption); Option spatialResOption = new Option("s", "spatial", true, "desired spatial resolution (points, nbhd, zip, grid, or city)"); spatialResOption.setRequired(true); spatialResOption.setArgName("SPATIAL RESOLUTION"); options.addOption(spatialResOption); Option currentSpatialResOption = new Option("cs", "current-spatial", true, "current spatial resolution (points, nbhd, zip, grid, or city)"); currentSpatialResOption.setRequired(true); currentSpatialResOption.setArgName("CURRENT SPATIAL RESOLUTION"); options.addOption(currentSpatialResOption); Option indexResOption = new Option("i", "index", true, "indexes of the temporal and spatial attributes"); indexResOption.setRequired(true); indexResOption.setArgName("INDEX OF SPATIO-TEMPORAL RESOLUTIONS"); indexResOption.setArgs(Option.UNLIMITED_VALUES); options.addOption(indexResOption); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp( "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } Configuration conf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); String dataset = cmd.getOptionValue("dn"); String header = cmd.getOptionValue("dh"); String defaults = cmd.getOptionValue("dd"); String temporalResolution = cmd.getOptionValue("t"); String spatialResolution = cmd.getOptionValue("s"); String gridResolution = ""; String currentSpatialResolution = cmd.getOptionValue("cs"); if (spatialResolution.contains("grid")) { String[] res = spatialResolution.split("-"); spatialResolution = res[0]; gridResolution = res[1]; } conf.set("header", s3bucket + FrameworkUtils.dataDir + "/" + header); conf.set("defaults", s3bucket + FrameworkUtils.dataDir + "/" + defaults); conf.set("temporal-resolution", temporalResolution); conf.set("spatial-resolution", spatialResolution); conf.set("grid-resolution", gridResolution); conf.set("current-spatial-resolution", currentSpatialResolution); String[] indexes = cmd.getOptionValues("i"); String temporalPos = ""; Integer sizeSpatioTemp = 0; if (!(currentSpatialResolution.equals("points"))) { String spatialPos = ""; for (int i = 0; i < indexes.length; i++) { temporalPos += indexes[i] + ","; spatialPos += indexes[++i] + ","; sizeSpatioTemp++; } conf.set("spatial-pos", spatialPos); } else { String xPositions = "", yPositions = ""; for (int i = 0; i < indexes.length; i++) { temporalPos += indexes[i] + ","; xPositions += indexes[++i] + ","; yPositions += indexes[++i] + ","; sizeSpatioTemp++; } conf.set("xPositions", xPositions); conf.set("yPositions", yPositions); } conf.set("temporal-pos", temporalPos); conf.set("size-spatio-temporal", sizeSpatioTemp.toString()); // checking resolutions if (utils.spatialResolution(spatialResolution) < 0) { System.out.println("Invalid spatial resolution: " + spatialResolution); System.exit(-1); } if (utils.spatialResolution(spatialResolution) == FrameworkUtils.POINTS) { System.out.println("The data needs to be reduced at least to neighborhoods or grid."); System.exit(-1); } if (utils.spatialResolution(currentSpatialResolution) < 0) { System.out.println("Invalid spatial resolution: " + currentSpatialResolution); System.exit(-1); } if (utils.spatialResolution(currentSpatialResolution) > utils.spatialResolution(spatialResolution)) { System.out.println("The current spatial resolution is coarser than " + "the desired one. You can only navigate from a fine resolution" + " to a coarser one."); System.exit(-1); } if (utils.temporalResolution(temporalResolution) < 0) { System.out.println("Invalid temporal resolution: " + temporalResolution); System.exit(-1); } String fileName = s3bucket + FrameworkUtils.preProcessingDir + "/" + dataset + "-" + temporalResolution + "-" + spatialResolution + gridResolution; conf.set("aggregates", fileName + ".aggregates"); // making sure both files are removed, if they exist FrameworkUtils.removeFile(fileName, s3conf, s3); FrameworkUtils.removeFile(fileName + ".aggregates", s3conf, s3); /** * Hadoop Parameters * sources: http://www.slideshare.net/ImpetusInfo/ppt-on-advanced-hadoop-tuning-n-optimisation * https://cloudcelebrity.wordpress.com/2013/08/14/12-key-steps-to-keep-your-hadoop-cluster-running-strong-and-performing-optimum/ */ conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); conf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); conf.set("mapreduce.input.fileinputformat.split.minsize", "0"); conf.set("mapreduce.task.io.sort.mb", "200"); conf.set("mapreduce.task.io.sort.factor", "100"); // using SnappyCodec for intermediate and output data ? // TODO: for now, using SnappyCodec -- what about LZO + Protocol Buffer serialization? // LZO - http://www.oberhumer.com/opensource/lzo/#download // Hadoop-LZO - https://github.com/twitter/hadoop-lzo // Protocol Buffer - https://github.com/twitter/elephant-bird // General Info - http://www.devx.com/Java/Article/47913 // Compression - http://comphadoop.weebly.com/index.html if (snappyCompression) { conf.set("mapreduce.map.output.compress", "true"); conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { conf.set("mapreduce.map.output.compress", "true"); conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } // TODO: this is dangerous! if (s3) { conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); } Job job = new Job(conf); job.setJobName(dataset + "-" + temporalResolution + "-" + spatialResolution); job.setMapOutputKeyClass(MultipleSpatioTemporalWritable.class); job.setMapOutputValueClass(AggregationArrayWritable.class); job.setOutputKeyClass(MultipleSpatioTemporalWritable.class); job.setOutputValueClass(AggregationArrayWritable.class); job.setMapperClass(PreProcessingMapper.class); job.setCombinerClass(PreProcessingCombiner.class); job.setReducerClass(PreProcessingReducer.class); job.setNumReduceTasks(machineConf.getNumberReduces()); //job.setNumReduceTasks(1); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); SequenceFileOutputFormat.setCompressOutput(job, true); SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); FileInputFormat.setInputPaths(job, new Path(s3bucket + FrameworkUtils.dataDir + "/" + dataset)); FileOutputFormat.setOutputPath(job, new Path(fileName)); job.setJarByClass(PreProcessing.class); long start = System.currentTimeMillis(); job.submit(); job.waitForCompletion(true); System.out.println(fileName + "\t" + (System.currentTimeMillis() - start)); }
From source file:com.mgmtp.jfunk.core.JFunk.java
/** * Starts jFunk./*from w w w.j a v a 2 s . co m*/ * * <pre> * -threadcount=<count> Optional Number of threads to be used. Allows for parallel * execution of test scripts. * -parallel Optional Allows a single script to be executed in parallel * depending on the number of threads specified. The * argument is ignored if multiple scripts are specified. * <script parameters> Optional Similar to Java system properties they can be provided * as key-value-pairs preceded by -S, e.g. -Skey=value. * These parameters are then available in the script as * Groovy variables. * <script(s)> Required At least one test script must be specified. * * Example: * java -cp <jFunkClasspath> com.mgmtp.jfunk.core.JFunk -Skey=value -threadcount=4 -parallel mytest.script * </pre> * * @param args * The program arguments. */ public static void main(final String[] args) { SLF4JBridgeHandler.install(); boolean exitWithError = true; StopWatch stopWatch = new StopWatch(); try { RESULT_LOG.info("jFunk started"); stopWatch.start(); int threadCount = 1; boolean parallel = false; Properties scriptProperties = new Properties(); List<File> scripts = Lists.newArrayList(); for (String arg : args) { if (arg.startsWith("-threadcount")) { String[] split = arg.split("="); Preconditions.checkArgument(split.length == 2, "The number of threads must be specified as follows: -threadcount=<value>"); threadCount = Integer.parseInt(split[1]); RESULT_LOG.info("Using " + threadCount + (threadCount == 1 ? " thread" : " threads")); } else if (arg.startsWith("-S")) { arg = arg.substring(2); String[] split = arg.split("="); Preconditions.checkArgument(split.length == 2, "Script parameters must be given in the form -S<name>=<value>"); scriptProperties.setProperty(split[0], normalizeScriptParameterValue(split[1])); RESULT_LOG.info("Using script parameter " + split[0] + " with value " + split[1]); } else if (arg.equals("-parallel")) { parallel = true; RESULT_LOG.info("Using parallel mode"); } else { scripts.add(new File(arg)); } } if (scripts.isEmpty()) { scripts.addAll(requestScriptsViaGui()); if (scripts.isEmpty()) { RESULT_LOG.info("Execution finished (took " + stopWatch + " H:mm:ss.SSS)"); System.exit(0); } } String propsFileName = System.getProperty("jfunk.props.file", "jfunk.properties"); Module module = ModulesLoader.loadModulesFromProperties(new JFunkDefaultModule(), propsFileName); Injector injector = Guice.createInjector(module); JFunkFactory factory = injector.getInstance(JFunkFactory.class); JFunkBase jFunk = factory.create(threadCount, parallel, scripts, scriptProperties); jFunk.execute(); exitWithError = false; } catch (JFunkExecutionException ex) { // no logging necessary } catch (Exception ex) { Logger.getLogger(JFunk.class).error("jFunk terminated unexpectedly.", ex); } finally { stopWatch.stop(); RESULT_LOG.info("Execution finished (took " + stopWatch + " H:mm:ss.SSS)"); } System.exit(exitWithError ? -1 : 0); }