List of usage examples for java.lang ProcessBuilder redirectErrorStream
boolean redirectErrorStream
To view the source code for java.lang ProcessBuilder redirectErrorStream.
Click Source Link
From source file:jeplus.JEPlusFrameMain.java
private void jMenuItemJEPlusEAActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItemJEPlusEAActionPerformed // Check if JEPlus+EA folder is available if (JEPlusConfig.getDefaultInstance().getJEPlusEADir() == null) { // Select a file to open fc.resetChoosableFileFilters();//from w ww . ja v a2 s. com fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); fc.setCurrentDirectory(new File("./")); fc.setMultiSelectionEnabled(false); if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { String path = fc.getSelectedFile().getPath() + File.separator; JEPlusConfig.getDefaultInstance().setJEPlusEADir(path); fc.setFileSelectionMode(JFileChooser.FILES_ONLY); } else { fc.setFileSelectionMode(JFileChooser.FILES_ONLY); return; } } // Launch JESS Client new Thread(new Runnable() { @Override public void run() { List<String> command = new ArrayList<>(); command.add("java"); command.add("-jar"); command.add("jEPlus+EA.jar"); command.add(getCurrentProjectFile()); ProcessBuilder builder = new ProcessBuilder(command); builder.directory(new File(JEPlusConfig.getDefaultInstance().getJEPlusEADir())); builder.redirectErrorStream(true); try { Process proc = builder.start(); // int ExitValue = proc.waitFor(); try (BufferedReader ins = new BufferedReader(new InputStreamReader(proc.getInputStream()))) { int res = ins.read(); while (res != -1) { res = ins.read(); } } } catch (IOException ex) { logger.error("Cannot run jEPlus+EA.", ex); } } }, "jEPlus+EA").start(); }
From source file:jeplus.JEPlusFrameMain.java
private void jMenuItemJESSClientActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItemJESSClientActionPerformed // Check if JESS Client folder is available if (JEPlusConfig.getDefaultInstance().getJESSClientDir() == null) { String ori = fc.getDialogTitle(); // Select a file to open fc.setDialogTitle("Choose where JESS Client is located"); fc.resetChoosableFileFilters();/*www . ja v a 2 s. c o m*/ fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); fc.setCurrentDirectory(new File("./")); fc.setMultiSelectionEnabled(false); if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) { String path = fc.getSelectedFile().getPath() + File.separator; JEPlusConfig.getDefaultInstance().setJESSClientDir(path); fc.setFileSelectionMode(JFileChooser.FILES_ONLY); fc.setDialogTitle(ori); } else { fc.setFileSelectionMode(JFileChooser.FILES_ONLY); fc.setDialogTitle(ori); return; } } // Launch JESS Client new Thread(new Runnable() { @Override public void run() { List<String> command = new ArrayList<>(); command.add("java"); command.add("-jar"); command.add("jess_client_v3.jar"); command.add(getProject().getBaseDir()); ProcessBuilder builder = new ProcessBuilder(command); builder.directory(new File(JEPlusConfig.getDefaultInstance().getJESSClientDir())); builder.redirectErrorStream(true); try { Process proc = builder.start(); // int ExitValue = proc.waitFor(); try (BufferedReader ins = new BufferedReader(new InputStreamReader(proc.getInputStream()))) { int res = ins.read(); while (res != -1) { res = ins.read(); } } } catch (IOException ex) { logger.error("Cannot run JESS_Client.", ex); } } }, "JESS_Client").start(); }
From source file:org.apache.asterix.aql.translator.AqlTranslator.java
private void handlePregelixStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws AsterixException, Exception { RunStatement pregelixStmt = (RunStatement) stmt; boolean bActiveTxn = true; String dataverseNameFrom = getActiveDataverse(pregelixStmt.getDataverseNameFrom()); String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo()); String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue(); String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue(); if (dataverseNameFrom != dataverseNameTo) { throw new AlgebricksException("Pregelix statements across different dataverses are not supported."); }/*from w ww. j a va 2 s.co m*/ MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockManager.INSTANCE.pregelixBegin(dataverseNameFrom, datasetNameFrom, datasetNameTo); try { // construct input paths Index fromIndex = null; List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameFrom, pregelixStmt.getDatasetNameFrom().getValue()); for (Index ind : indexes) { if (ind.isPrimaryIndex()) fromIndex = ind; } if (fromIndex == null) { throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameFrom); } Dataset datasetFrom = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameFrom, datasetNameFrom); IFileSplitProvider fromSplits = metadataProvider.splitProviderAndPartitionConstraintsForDataset( dataverseNameFrom, datasetNameFrom, fromIndex.getIndexName(), datasetFrom.getDatasetDetails().isTemp()).first; StringBuilder fromSplitsPaths = new StringBuilder(); for (FileSplit f : fromSplits.getFileSplits()) { fromSplitsPaths .append("asterix://" + f.getNodeName() + f.getLocalFile().getFile().getAbsolutePath()); fromSplitsPaths.append(","); } fromSplitsPaths.setLength(fromSplitsPaths.length() - 1); // Construct output paths Index toIndex = null; indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameTo, pregelixStmt.getDatasetNameTo().getValue()); for (Index ind : indexes) { if (ind.isPrimaryIndex()) toIndex = ind; } if (toIndex == null) { throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameTo); } Dataset datasetTo = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameTo, datasetNameTo); IFileSplitProvider toSplits = metadataProvider.splitProviderAndPartitionConstraintsForDataset( dataverseNameTo, datasetNameTo, toIndex.getIndexName(), datasetTo.getDatasetDetails().isTemp()).first; StringBuilder toSplitsPaths = new StringBuilder(); for (FileSplit f : toSplits.getFileSplits()) { toSplitsPaths.append("asterix://" + f.getNodeName() + f.getLocalFile().getFile().getAbsolutePath()); toSplitsPaths.append(","); } toSplitsPaths.setLength(toSplitsPaths.length() - 1); try { Dataset toDataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameTo, datasetNameTo); DropStatement dropStmt = new DropStatement(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), true); this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc); IDatasetDetailsDecl idd = new InternalDetailsDecl(toIndex.getKeyFieldNames(), false, null, toDataset.getDatasetDetails().isTemp()); DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeName()), new Identifier(toDataset.getNodeGroupName()), toDataset.getCompactionPolicy(), toDataset.getCompactionPolicyProperties(), toDataset.getHints(), toDataset.getDatasetType(), idd, false); this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc); } catch (Exception e) { e.printStackTrace(); throw new AlgebricksException("Error cleaning the result dataset. This should not happen."); } // Flush source dataset flushDataset(hcc, metadataProvider, mdTxnCtx, dataverseNameFrom, datasetNameFrom, fromIndex.getIndexName()); // call Pregelix String pregelix_home = System.getenv("PREGELIX_HOME"); if (pregelix_home == null) { throw new AlgebricksException("PREGELIX_HOME is not defined!"); } // construct command ArrayList<String> cmd = new ArrayList<String>(); cmd.add("bin/pregelix"); cmd.add(pregelixStmt.getParameters().get(0)); // jar cmd.add(pregelixStmt.getParameters().get(1)); // class for (String s : pregelixStmt.getParameters().get(2).split(" ")) { cmd.add(s); } cmd.add("-inputpaths"); cmd.add(fromSplitsPaths.toString()); cmd.add("-outputpath"); cmd.add(toSplitsPaths.toString()); StringBuilder command = new StringBuilder(); for (String s : cmd) { command.append(s); command.append(" "); } LOGGER.info("Running Pregelix Command: " + command.toString()); ProcessBuilder pb = new ProcessBuilder(cmd); pb.directory(new File(pregelix_home)); pb.redirectErrorStream(true); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); bActiveTxn = false; Process pr = pb.start(); int resultState = 0; BufferedReader in = new BufferedReader(new InputStreamReader(pr.getInputStream())); String line; while ((line = in.readLine()) != null) { System.out.println(line); if (line.contains("job finished")) { resultState = 1; } if (line.contains("Exception") || line.contains("Error")) { if (line.contains("Connection refused")) { throw new AlgebricksException( "The connection to your Pregelix cluster was refused. Is it running? Is the port in the query correct?"); } if (line.contains("Could not find or load main class")) { throw new AlgebricksException( "The main class of your Pregelix query was not found. Is the path to your .jar file correct?"); } if (line.contains("ClassNotFoundException")) { throw new AlgebricksException( "The vertex class of your Pregelix query was not found. Does it exist? Is the spelling correct?"); } if (line.contains("HyracksException")) { throw new AlgebricksException( "Something went wrong executing your Pregelix Job (HyracksException). Check the configuration of STORAGE_BUFFERCACHE_PAGESIZE and STORAGE_MEMORYCOMPONENT_PAGESIZE." + "It must match the one of Asterix. You can use managix describe -admin to find out the right configuration. " + "Check also if your datatypes in Pregelix and Asterix are matching."); } throw new AlgebricksException( "Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster needs to be restartet. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? Check the logfiles for more details."); } } pr.waitFor(); in.close(); if (resultState != 1) { throw new AlgebricksException( "Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster needs to be restartet. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? Check the logfiles for more details."); } } catch (Exception e) { if (bActiveTxn) { abort(e, e, mdTxnCtx); } throw e; } finally { MetadataLockManager.INSTANCE.pregelixEnd(dataverseNameFrom, datasetNameFrom, datasetNameTo); } }
From source file:edu.uci.ics.asterix.aql.translator.AqlTranslator.java
private void handlePregelixStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws AsterixException, Exception { RunStatement pregelixStmt = (RunStatement) stmt; boolean bActiveTxn = true; String dataverseNameFrom = getActiveDataverse(pregelixStmt.getDataverseNameFrom()); String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo()); String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue(); String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue(); if (dataverseNameFrom != dataverseNameTo) { throw new AlgebricksException("Pregelix statements across different dataverses are not supported."); }//from w w w. java 2s. c o m MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockManager.INSTANCE.pregelixBegin(dataverseNameFrom, datasetNameFrom, datasetNameTo); try { // construct input paths Index fromIndex = null; List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameFrom, pregelixStmt.getDatasetNameFrom().getValue()); for (Index ind : indexes) { if (ind.isPrimaryIndex()) fromIndex = ind; } if (fromIndex == null) { throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameFrom); } Dataset datasetFrom = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameFrom, datasetNameFrom); IFileSplitProvider fromSplits = metadataProvider.splitProviderAndPartitionConstraintsForDataset( dataverseNameFrom, datasetNameFrom, fromIndex.getIndexName(), datasetFrom.getDatasetDetails().isTemp()).first; StringBuilder fromSplitsPaths = new StringBuilder(); for (FileSplit f : fromSplits.getFileSplits()) { fromSplitsPaths .append("asterix://" + f.getNodeName() + f.getLocalFile().getFile().getAbsolutePath()); fromSplitsPaths.append(","); } fromSplitsPaths.setLength(fromSplitsPaths.length() - 1); // Construct output paths Index toIndex = null; indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameTo, pregelixStmt.getDatasetNameTo().getValue()); for (Index ind : indexes) { if (ind.isPrimaryIndex()) toIndex = ind; } if (toIndex == null) { throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameTo); } Dataset datasetTo = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameTo, datasetNameTo); IFileSplitProvider toSplits = metadataProvider.splitProviderAndPartitionConstraintsForDataset( dataverseNameTo, datasetNameTo, toIndex.getIndexName(), datasetTo.getDatasetDetails().isTemp()).first; StringBuilder toSplitsPaths = new StringBuilder(); for (FileSplit f : toSplits.getFileSplits()) { toSplitsPaths.append("asterix://" + f.getNodeName() + f.getLocalFile().getFile().getAbsolutePath()); toSplitsPaths.append(","); } toSplitsPaths.setLength(toSplitsPaths.length() - 1); try { Dataset toDataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseNameTo, datasetNameTo); DropStatement dropStmt = new DropStatement(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), true); this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc); IDatasetDetailsDecl idd = new InternalDetailsDecl( new Identifier(toDataset.getDatasetDetails().getNodeGroupName()), toIndex.getKeyFieldNames(), false, toDataset.getDatasetDetails().getCompactionPolicy(), toDataset.getDatasetDetails().getCompactionPolicyProperties(), null, toDataset.getDatasetDetails().isTemp()); DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeName()), toDataset.getHints(), toDataset.getDatasetType(), idd, false); this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc); } catch (Exception e) { e.printStackTrace(); throw new AlgebricksException("Error cleaning the result dataset. This should not happen."); } // Flush source dataset flushDataset(hcc, metadataProvider, mdTxnCtx, dataverseNameFrom, datasetNameFrom, fromIndex.getIndexName()); // call Pregelix String pregelix_home = System.getenv("PREGELIX_HOME"); if (pregelix_home == null) { throw new AlgebricksException("PREGELIX_HOME is not defined!"); } // construct command ArrayList<String> cmd = new ArrayList<String>(); cmd.add("bin/pregelix"); cmd.add(pregelixStmt.getParameters().get(0)); // jar cmd.add(pregelixStmt.getParameters().get(1)); // class for (String s : pregelixStmt.getParameters().get(2).split(" ")) { cmd.add(s); } cmd.add("-inputpaths"); cmd.add(fromSplitsPaths.toString()); cmd.add("-outputpath"); cmd.add(toSplitsPaths.toString()); StringBuilder command = new StringBuilder(); for (String s : cmd) { command.append(s); command.append(" "); } LOGGER.info("Running Pregelix Command: " + command.toString()); ProcessBuilder pb = new ProcessBuilder(cmd); pb.directory(new File(pregelix_home)); pb.redirectErrorStream(true); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); bActiveTxn = false; Process pr = pb.start(); int resultState = 0; BufferedReader in = new BufferedReader(new InputStreamReader(pr.getInputStream())); String line; while ((line = in.readLine()) != null) { System.out.println(line); if (line.contains("job finished")) { resultState = 1; } if (line.contains("Exception") || line.contains("Error")) { if (line.contains("Connection refused")) { throw new AlgebricksException( "The connection to your Pregelix cluster was refused. Is it running? Is the port in the query correct?"); } if (line.contains("Could not find or load main class")) { throw new AlgebricksException( "The main class of your Pregelix query was not found. Is the path to your .jar file correct?"); } if (line.contains("ClassNotFoundException")) { throw new AlgebricksException( "The vertex class of your Pregelix query was not found. Does it exist? Is the spelling correct?"); } if (line.contains("HyracksException")) { throw new AlgebricksException( "Something went wrong executing your Pregelix Job (HyracksException). Check the configuration of STORAGE_BUFFERCACHE_PAGESIZE and STORAGE_MEMORYCOMPONENT_PAGESIZE." + "It must match the one of Asterix. You can use managix describe -admin to find out the right configuration. " + "Check also if your datatypes in Pregelix and Asterix are matching."); } throw new AlgebricksException( "Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster needs to be restartet. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? Check the logfiles for more details."); } } pr.waitFor(); in.close(); if (resultState != 1) { throw new AlgebricksException( "Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster needs to be restartet. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? Check the logfiles for more details."); } } catch (Exception e) { if (bActiveTxn) { abort(e, e, mdTxnCtx); } throw e; } finally { MetadataLockManager.INSTANCE.pregelixEnd(dataverseNameFrom, datasetNameFrom, datasetNameTo); } }
From source file:org.apache.asterix.app.translator.QueryTranslator.java
protected void handlePregelixStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception { RunStatement pregelixStmt = (RunStatement) stmt; boolean bActiveTxn = true; String dataverseNameFrom = getActiveDataverse(pregelixStmt.getDataverseNameFrom()); String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo()); String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue(); String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); List<String> readDataverses = new ArrayList<>(); readDataverses.add(dataverseNameFrom); List<String> readDatasets = new ArrayList<>(); readDatasets.add(datasetNameFrom);/*from ww w . j ava 2s . com*/ MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseNameTo, datasetNameTo, readDataverses, readDatasets); try { prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, dataverseNameFrom, dataverseNameTo, datasetNameFrom, datasetNameTo, mdTxnCtx); String pregelixHomeKey = "PREGELIX_HOME"; // Finds PREGELIX_HOME in system environment variables. String pregelixHome = System.getenv(pregelixHomeKey); // Finds PREGELIX_HOME in Java properties. if (pregelixHome == null) { pregelixHome = System.getProperty(pregelixHomeKey); } // Finds PREGELIX_HOME in AsterixDB configuration. if (pregelixHome == null) { // Since there is a default value for PREGELIX_HOME in AsterixCompilerProperties, // pregelixHome can never be null. pregelixHome = AsterixAppContextInfo.INSTANCE.getCompilerProperties().getPregelixHome(); } // Constructs the pregelix command line. List<String> cmd = constructPregelixCommand(pregelixStmt, dataverseNameFrom, datasetNameFrom, dataverseNameTo, datasetNameTo); ProcessBuilder pb = new ProcessBuilder(cmd); pb.directory(new File(pregelixHome)); pb.redirectErrorStream(true); MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); bActiveTxn = false; // Executes the Pregelix command. int resultState = executeExternalShellProgram(pb); // Checks the return state of the external Pregelix command. if (resultState != 0) { throw new AlgebricksException( "Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster " + "needs to be restarted. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? " + "Check the logfiles for more details."); } } catch (Exception e) { if (bActiveTxn) { abort(e, e, mdTxnCtx); } throw e; } finally { MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseNameTo, datasetNameTo, readDataverses, readDatasets); } }
From source file:revaligner.service.FileAligner.java
public void createAlignedXML_auto(String prjid) throws Exception { System.out.println("creating aligned xml with nbAligner...."); this.alignedfile = (this.prjfolder + File.separator + "rev_aligned.xml"); this.reformattedtargetmapfile = (this.prjfolder + File.separator + "target_reformatted" + File.separator + ".mp"); HashMap<String, String> srcidmap = new HashMap(); this.nbalignerfolder = (this.prjfolder + File.separator + "nbaligner"); if (!new File(this.nbalignerfolder).exists()) { new File(this.nbalignerfolder).mkdir(); }/*w w w . j a v a 2 s.c om*/ FileUtils.cleanDirectory(new File(this.nbalignerfolder)); String nbsourcefolder = this.nbalignerfolder + File.separator + this.sourcelanguage; new File(nbsourcefolder).mkdir(); org.dom4j.Document nbsource = DocumentHelper.createDocument(); org.dom4j.Element root_src = nbsource.addElement("txml"); root_src.addAttribute("locale", this.sourcelanguage); root_src.addAttribute("version", "1.0"); root_src.addAttribute("segtype", "sentence"); org.dom4j.Element translatable_src = root_src.addElement("translatable"); translatable_src.addAttribute("blockId", "1"); String nbtargetfolder = this.nbalignerfolder + File.separator + this.targetlanguage; new File(nbtargetfolder).mkdir(); org.dom4j.Document nbtarget = DocumentHelper.createDocument(); org.dom4j.Element root_trg = nbtarget.addElement("txml"); root_trg.addAttribute("locale", this.targetlanguage); root_trg.addAttribute("version", "1.0"); root_trg.addAttribute("segtype", "sentence"); org.dom4j.Element translatable_trg = root_trg.addElement("translatable"); translatable_trg.addAttribute("blockId", "0"); if (new File(this.alignedfile).exists()) { new File(this.alignedfile).delete(); } SegmenterFactory factory = new SegmenterFactory(); Configuration segconfig = createConfigForSegmenter(false, this.sourcelanguage); Segmenter segmenter = factory.getSegmenter("trados", Locale.makeLocale(this.sourcelanguage), segconfig); org.dom4j.Document document = DocumentHelper.createDocument(); org.dom4j.Element root = document.addElement("alinger"); org.dom4j.Element head = root.addElement("head"); head.addAttribute("src_lang", this.sourcelanguage); head.addAttribute("trg_lang", this.targetlanguage); head.addAttribute("creator", this.creatorid); org.dom4j.Element aligned = root.addElement("aligned"); org.dom4j.Element orphans = root.addElement("orphans"); org.dom4j.Document document_source_formatted_nonSeg = XmlParser .parseXmlFile(this.reformattedsourcetxlf_nonSeg); org.dom4j.Element root_source_formatted_nonSeg = document_source_formatted_nonSeg.getRootElement(); List list_source_formatted_nonSeg = root_source_formatted_nonSeg.selectNodes("//*[name() = 'trans-unit']"); org.dom4j.Document document_target_nonSeg = XmlParser.parseXmlFile(this.reformattedtargettxlf_nonSeg); org.dom4j.Element root_target_nonSeg = document_target_nonSeg.getRootElement(); List list_target_nonSeg = root_target_nonSeg.selectNodes("//*[name() = 'trans-unit']"); org.dom4j.Document document_target_seg = XmlParser.parseXmlFile(this.reformattedtargettxlf_seg); org.dom4j.Element root_target_seg = document_target_seg.getRootElement(); List list_target_seg = root_target_seg.selectNodes("//*[name() = 'group'][@restype = 'x-paragraph']"); ExtractionSupportImpl extractionSupportImpl_src = new ExtractionSupportImpl( Locale.makeLocale(this.sourcelanguage), Locale.makeLocale(this.sourcelanguage)); Configuration config_src = new BaseConfiguration(); config_src.setProperty("extraction.tokens.extract", "all"); extractionSupportImpl_src.setConfiguration(config_src); ExtractionSupportImpl extractionSupportImpl_trg = new ExtractionSupportImpl( Locale.makeLocale(this.targetlanguage), Locale.makeLocale(this.targetlanguage)); Configuration config_trg = new BaseConfiguration(); config_trg.setProperty("extraction.tokens.extract", "all"); extractionSupportImpl_trg.setConfiguration(config_trg); boolean issrcfirsthf = true; boolean istrgfirsthf = true; int gcount = -1; int segmentId = 0; for (int i = 0; i < list_source_formatted_nonSeg.size(); i++) { org.dom4j.Element src_txlf = ((org.dom4j.Element) list_source_formatted_nonSeg.get(i)) .element("source"); String merged_text = getTxlfElementText_withFakeTC(src_txlf); if (extractionSupportImpl_src .isExtractable(merged_text.replace("&paradel;", "").replace("&parains;", ""))) { gcount++; org.dom4j.Element group = aligned.addElement("group"); group.addAttribute("id", Integer.toString(gcount)); merged_text = trimText(merged_text, true)[0]; org.dom4j.Element merged_src_text = group.addElement("text"); merged_src_text.setText(merged_text.replace("&paradel;", "").replace("&parains;", "")); String[] split_merged_text = merged_text.replaceAll("(&paradel;)+", "&paradel;") .replaceAll("^&paradel;", "").replaceAll("&paradel;$", "").split("&paradel;"); List<String> segmentsGroup = segmentStringWithRevs( merged_text.replaceAll("(&paradel;)+", "&paradel;").replace("&parains;", ""), this.sourcelanguage); List<List<String>> resegmentedGroup = new ArrayList(); resegmentedGroup.add(new ArrayList()); int idx = 0; String orgs; String[] newsegs; for (int s = 0; s < segmentsGroup.size(); s++) { orgs = (String) segmentsGroup.get(s); if (orgs.contains("&paradel;")) { newsegs = orgs.split("&paradel;"); for (int ss = 0; ss < newsegs.length; ss++) { String sss = newsegs[ss]; if (!sss.trim().equals("")) { ((List) resegmentedGroup.get(idx)).add(fixMissingTags(sss)); } if ((((List) resegmentedGroup.get(idx)).size() != 0) && (ss != newsegs.length - 1)) { resegmentedGroup.add(new ArrayList()); idx++; } } if (orgs.trim().endsWith("&paradel;")) { resegmentedGroup.add(new ArrayList()); idx++; } } else { ((List) resegmentedGroup.get(idx)).add(fixMissingTags(orgs)); } } if (split_merged_text.length > resegmentedGroup.size()) { System.out.println(i); System.out.println("merged_text: " + merged_text); for (String smt : split_merged_text) { System.out.println("split_merged_text: " + smt); } for (List<String> smts : resegmentedGroup) { System.out.println("resegmentedGroup: " + smts); } for (String smtss : segmentsGroup) { System.out.println("segmentedGroup: " + smtss); } } for (int j = 0; j < split_merged_text.length; j++) { if (!split_merged_text[j].replaceAll("<(/)*ins>|<(/)*del>", "").trim().equals("")) { split_merged_text[j] = fixMissingTags(split_merged_text[j]); Element unit = group.addElement("unit"); unit.addAttribute("id", Integer.toString(j)); unit.addAttribute("alignsegs", "false"); Element src = unit.addElement("src_para"); org.dom4j.Element src_text = src.addElement("text"); boolean isAddedPara = split_merged_text[j].contains("&parains;"); src.addAttribute("added", "" + isAddedPara); String[] trim_result = trimText(split_merged_text[j].replace("&parains;", ""), false); src.addAttribute("lefttrim", trim_result[1]); src.addAttribute("righttrim", trim_result[2]); split_merged_text[j] = trim_result[0]; int src_tctype_para = TrackChangeHelper.getTrackChangeType(split_merged_text[j]); src.addAttribute("tctype", TrackChangeType.getName(src_tctype_para)); String rejected_src = split_merged_text[j].replaceAll("(?s)<ins>.*?</ins>", "") .replace("<del>", "").replace("</del>", ""); if (!extractionSupportImpl_src.isExtractable(rejected_src)) { unit.addAttribute("locked", "true"); } else { unit.addAttribute("locked", "false"); } src_text.setText(split_merged_text[j]); org.dom4j.Element src_segs = src.addElement("segments"); List<String> segments = (List) resegmentedGroup.get(j); for (int z = 0; z < segments.size(); z++) { String segment_text = trimText((String) segments.get(z), false)[0]; org.dom4j.Element src_seg = src_segs.addElement("src_seg"); src_seg.addAttribute("id", Integer.toString(z)); src_seg.addAttribute("needreview", "false"); src_seg.addAttribute("ignored", "false"); int tctype_seg = TrackChangeHelper.getTrackChangeType(segment_text); src_seg.addAttribute("tctype", TrackChangeType.getName(tctype_seg)); String accepted_t = segment_text.replaceAll("(?s)<del>.*?</del>", "") .replace("<ins>", "").replace("</ins>", ""); src_seg.addAttribute("isExtractable", Boolean.toString(extractionSupportImpl_src.isExtractable(accepted_t))); String rejected_s = segment_text.replaceAll("(?s)<ins>.*?</ins>", "") .replace("<del>", "").replace("</del>", ""); if (!extractionSupportImpl_src.isExtractable(rejected_s)) { src_seg.addAttribute("locked", "true"); } else { src_seg.addAttribute("locked", "false"); org.dom4j.Element segment_src = translatable_src.addElement("segment"); segment_src.addAttribute("segmentId", Integer.toString(segmentId)); srcidmap.put(i + " - " + j + " - " + z, Integer.toString(segmentId)); segmentId++; segment_src.addElement("source").setText(rejected_s); } src_seg.setText(segment_text); } } } } } segmentId = 0; for (int i = 0; i < list_target_nonSeg.size(); i++) { org.dom4j.Element trg_txlf = ((org.dom4j.Element) list_target_nonSeg.get(i)).element("source"); String trg_formatted_text = getTxlfElementText_normal(trg_txlf); List<String> trgsegs = segmentStringWithRevs(trg_formatted_text, this.targetlanguage); for (int j = 0; j < trgsegs.size(); j++) { String trgseg = ((String) trgsegs.get(j)).trim().replaceAll("(\\s)+", " "); if (extractionSupportImpl_trg.isExtractable(trgseg)) { org.dom4j.Element segment_trg = translatable_trg.addElement("segment"); segment_trg.addAttribute("segmentId", Integer.toString(segmentId)); segmentId++; segment_trg.addElement("source").setText(trgseg); } } } OutputStreamWriter writer = new OutputStreamWriter( new BufferedOutputStream( new FileOutputStream(nbsourcefolder + File.separator + this.sourcelanguage + ".txml")), "UTF8"); nbsource.write(writer); writer.close(); writer = new OutputStreamWriter( new BufferedOutputStream( new FileOutputStream(nbtargetfolder + File.separator + this.targetlanguage + ".txml")), "UTF8"); nbtarget.write(writer); writer.close(); String pahtexe = "\\\\10.2.50.190\\AutoAlignerCLI\\AutoAlignerCLI.exe"; ProcessBuilder pb = new ProcessBuilder( new String[] { pahtexe, "-i", this.nbalignerfolder, "-o", this.nbalignerfolder, "-lang_pairs", this.sourcelanguage + "_" + this.targetlanguage, "-lang_detect", "normal", "-identicals", "-match_filenames", "-txml_or_xmx_output", "-docnames_output", "-disallow_src_merging" }); pb.redirectErrorStream(true); Process p = pb.start(); InputStreamReader isr = new InputStreamReader(p.getInputStream()); BufferedReader br = new BufferedReader(isr); boolean sentESTTime = false; boolean alignstart = false; String lineRead; while ((lineRead = br.readLine()) != null) { System.out.println(lineRead); if (lineRead.contains("Aligning...")) { alignstart = true; } else { if ((lineRead.contains("Estimated Time to Completion:")) && (alignstart)) { this.estimateNBAlignerCompTime = lineRead.replace("Estimated Time to Completion: ", "") .replace(" Minute(s)", ""); } if ((!this.estimateNBAlignerCompTime.equals("")) && (!sentESTTime)) { sentESTTime = true; try { int minutes = 200 + Integer.parseInt(this.estimateNBAlignerCompTime); setAlignProgress(prjid, minutes); this.estimateNBAlignerCompTime = ""; } catch (Exception ex) { ex.printStackTrace(); } } } } p.waitFor(); for (File file : new File(this.nbalignerfolder).listFiles()) { if (file.getName().endsWith(".zip")) { UnzipFile.UnZipIt(file.getAbsolutePath(), this.nbalignerfolder); } } String alignedtxml = ""; for (File file : new File(this.nbalignerfolder).listFiles()) { if (file.getName().endsWith(".txml")) { alignedtxml = file.getAbsolutePath(); } } if (alignedtxml.equals("")) { throw new Exception("file didn't aligned by nbaligner"); } HashMap<String, String[]> alignedtrgs = new HashMap(); List<String[]> missingtrgs = new ArrayList(); int src_idx = -1; org.dom4j.Document alignedtxmldoc = XmlParser.parseXmlFile(alignedtxml); org.dom4j.Element root_alignedtxmldoc = alignedtxmldoc.getRootElement(); for (int i = 0; i < root_alignedtxmldoc.elements("translatable").size(); i++) { org.dom4j.Element translatable = (org.dom4j.Element) root_alignedtxmldoc.elements("translatable") .get(i); for (int j = 0; j < translatable.elements("segment").size(); j++) { org.dom4j.Element segment = (org.dom4j.Element) translatable.elements("segment").get(j); org.dom4j.Element source = segment.element("source"); org.dom4j.Element target = segment.element("target"); if ((source != null) && (!source.getTextTrim().equals(""))) { src_idx++; if ((target != null) && (!target.getTextTrim().equals(""))) { String matchscore = target.attributeValue("score"); int trg_idx = Integer.parseInt(target.attributeValue("sent_no")); if (matchscore.equals("0")) { alignedtrgs.put(Integer.toString(src_idx), new String[] { target.getTextTrim(), "1", Integer.toString(trg_idx) }); } else if (target.attribute("original_segments_count") != null) { int merged_cnt = Integer.parseInt(target.attributeValue("original_segments_count")); String trg_idx_str = Integer.toString(trg_idx) + " - " + Integer.toString(trg_idx + merged_cnt - 1); alignedtrgs.put(Integer.toString(src_idx), new String[] { target.getTextTrim(), matchscore, trg_idx_str }); } else { alignedtrgs.put(Integer.toString(src_idx), new String[] { target.getTextTrim(), matchscore, Integer.toString(trg_idx) }); } } } else if ((target != null) && (!target.getTextTrim().equals(""))) { String matchscore = target.attributeValue("score"); int trg_idx = Integer.parseInt(target.attributeValue("sent_no")); missingtrgs.add(new String[] { target.getTextTrim(), Integer.toString(trg_idx) }); } } } int null_idx = 0; List<org.dom4j.Element> groups = aligned.elements("group"); for (int i = 0; i < groups.size(); i++) { org.dom4j.Element group = (org.dom4j.Element) groups.get(i); List<org.dom4j.Element> units = group.elements("unit"); for (int j = 0; j < units.size(); j++) { org.dom4j.Element unit = (org.dom4j.Element) units.get(j); org.dom4j.Element src_para = unit.element("src_para"); org.dom4j.Element src_para_segs = src_para.element("segments"); org.dom4j.Element trg_para = unit.addElement("trg_para"); org.dom4j.Element trg_para_segs = trg_para.addElement("segments"); List<org.dom4j.Element> src_segs = src_para_segs.elements("src_seg"); for (int z = 0; z < src_segs.size(); z++) { org.dom4j.Element src_seg = (org.dom4j.Element) src_segs.get(z); org.dom4j.Element trg_seg = trg_para_segs.addElement("trg_seg"); String mapid = Integer.toString(i) + " - " + Integer.toString(j) + " - " + Integer.toString(z); trg_seg.addAttribute("edited", "false"); String trgsegtext = ""; if (srcidmap.containsKey(mapid)) { String sourceidintxml = (String) srcidmap.get(mapid); if (alignedtrgs.containsKey(sourceidintxml)) { src_seg.addAttribute("locked", "true"); trgsegtext = ((String[]) alignedtrgs.get(sourceidintxml))[0]; String score = ((String[]) alignedtrgs.get(sourceidintxml))[1]; String targetidintxml = ((String[]) alignedtrgs.get(sourceidintxml))[2]; if (Integer.parseInt(score) < needreviewthreshhold) { src_seg.addAttribute("needreview", "true"); } trg_seg.addAttribute("id", targetidintxml); trg_seg.addAttribute("isExtractable", Boolean.toString(extractionSupportImpl_trg.isExtractable(trgsegtext))); } else { trg_seg.addAttribute("id", "n - " + null_idx); null_idx++; trg_seg.addAttribute("isExtractable", "false"); } } else { trg_seg.addAttribute("id", "n - " + null_idx); null_idx++; trg_seg.addAttribute("isExtractable", "false"); } trg_seg.setText(trgsegtext); } } } org.dom4j.Element orp_unit = orphans.addElement("unit"); orp_unit.addAttribute("id", "0"); org.dom4j.Element orp_trg_para = orp_unit.addElement("trg_para"); org.dom4j.Element orp_segments = orp_trg_para.addElement("segments"); for (int i = 0; i < missingtrgs.size(); i++) { String orptrgtext = ((String[]) missingtrgs.get(i))[0]; String orptrgid = ((String[]) missingtrgs.get(i))[1]; org.dom4j.Element orp_trg_seg = orp_segments.addElement("trg_seg"); orp_trg_seg.addAttribute("id", orptrgid); orp_trg_seg.addAttribute("edited", "false"); orp_trg_seg.addAttribute("isExtractable", Boolean.toString(extractionSupportImpl_trg.isExtractable(orptrgtext))); orp_trg_seg.setText(orptrgtext); } OutputStreamWriter oswriter = new OutputStreamWriter( new BufferedOutputStream(new FileOutputStream(this.alignedfile)), "UTF8"); document.write(oswriter); oswriter.close(); }
From source file:com.peterbochs.PeterBochsDebugger.java
private void startBochs() { try {//from w ww.j a v a2 s .c o m this.enableAllButtons(true, false); runBochsButton.setText(MyLanguage.getString("Run_bochs")); runBochsButton.setToolTipText("Start emulation"); runBochsButton.setIcon(new ImageIcon(getClass().getClassLoader() .getResource("com/peterbochs/icons/famfam_icons/resultset_next.png"))); if (p != null) { p.destroy(); } ProcessBuilder pb; if (arguments.length == 0) { pb = new ProcessBuilder("bochs", "-q"); } else { pb = new ProcessBuilder(arguments); } pb.redirectErrorStream(true); p = pb.start(); InputStream is = p.getInputStream(); commandReceiver = new CommandReceiver(is, this); new Thread(commandReceiver, "commandReceiver thread").start(); commandOutputStream = new BufferedWriter(new OutputStreamWriter(p.getOutputStream())); // if (isLinux) { // sendCommand("6"); // } Date date1 = new Date(); while (commandReceiver.getLinesLength() < 9) { Thread.currentThread(); Thread.sleep(100); if (new Date().getTime() - date1.getTime() > 4000) { break; } } String versionLines[] = commandReceiver.getCommandResultUntilEnd().split("\n"); for (String line : versionLines) { if (line.contains("Bochs x86 Emulator")) { version = line.trim(); jBochsVersionLabel.setText(version + " "); } if (line.contains("Peter-bochs instrument")) { if (Setting.getInstance().isMemoryProfiling()) { if (Global.debug) { System.out.println("Memory profiling port " + Global.profilingMemoryPort); } MemorySocketServerController.start(Global.profilingMemoryPort, null); } if (Setting.getInstance().isJmpProfiling()) { if (Global.debug) { System.out.println("Jump profiling port " + Global.profilingJmpPort); } JmpSocketServerController.start(Global.profilingJmpPort, jInstrumentPanel.getJmpTableModel()); } if (Setting.getInstance().isInterruptProfiling()) { if (Global.debug) { System.out.println("Interrupt profiling port " + Global.profilingInterruptPort); } InterruptSocketServerController.start(Global.profilingInterruptPort); } } } } catch (Exception ex) { JOptionPane.showMessageDialog(this, MyLanguage.getString("Unable_to_start_bochs") + "\n" + MyLanguage.getString("Tips_you_specified_a_wrong_path_of_bochs")); ex.printStackTrace(); System.exit(1); } }
From source file:org.simmi.GeneSetHead.java
License:asdf
public void doBlast(final String fasta, final String evaluestr, final boolean ids, final RunnableResult rr, boolean x) { /*File blastn;//from ww w.j av a 2 s . c o m File blastp; File makeblastdb; File blastx = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\blastx.exe" ); if( !blastx.exists() ) { blastx = new File( "/opt/ncbi-blast-2.2.29+/bin/blastx" ); if( !blastx.exists() ) { blastx = new File( "/usr/local/ncbi/blast/bin/blastx" ); blastn = new File( "/usr/local/ncbi/blast/bin/blastn" ); blastp = new File( "/usr/local/ncbi/blast/bin/blastp" ); makeblastdb = new File( "/usr/local/ncbi/blast/bin/makeblastdb" ); } else { blastn = new File( "/opt/ncbi-blast-2.2.29+/bin/blastn" ); blastp = new File( "/opt/ncbi-blast-2.2.29+/bin/blastp" ); makeblastdb = new File( "/opt/ncbi-blast-2.2.29+/bin/makeblastdb" ); } } else { blastn = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\blastn.exe" ); blastp = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\blastp.exe" ); makeblastdb = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\makeblastdb.exe" ); }*/ String OS = System.getProperty("os.name").toLowerCase(); int procs = Runtime.getRuntime().availableProcessors(); String[] mcmds = { OS.indexOf("mac") >= 0 ? "/usr/local/bin/makeblastdb" : "makeblastdb", "-dbtype", "prot", "-title", "tmp", "-out", "tmp" }; List<String> lcmd = new ArrayList<String>(Arrays.asList(mcmds)); final ProcessBuilder mpb = new ProcessBuilder(lcmd); mpb.redirectErrorStream(true); try { final Process mp = mpb.start(); new Thread() { public void run() { try { OutputStream pos = mp.getOutputStream(); Writer ow = new OutputStreamWriter(pos); for (Gene g : geneset.genelist) { if (g.getTag() == null || g.getTag().length() == 0) { GeneGroup gg = g.getGeneGroup(); if (gg != null) { String name; if (ids) name = g.id; else { String addstr = ""; Cog cog = gg.getCog(geneset.cogmap); String cazy = gg.getCommonCazy(geneset.cazymap); if (cog != null) addstr += "_" + cog.id; if (cazy != null) { if (addstr.length() > 0) addstr += cazy; addstr += "_" + cazy; } if (addstr.length() > 0) addstr += "_"; name = g.name + addstr + "[" + g.id + "]"; //pos.write( (">" + g.name + addstr + "[" + g.id + "]\n").getBytes() ); } Sequence sb = g.tegeval.getProteinSequence(); sb.setName(name); sb.writeSequence(ow); /*for( int i = 0; i < sb.length(); i+=70 ) { pos.write( sb.substring(i, Math.min( sb.length(), i+70) ).getBytes() ); } pos.write( '\n' );*/ } } } ow.close(); pos.close(); } catch (IOException e) { e.printStackTrace(); } } }.start(); new Thread() { public void run() { try { InputStream pin = mp.getInputStream(); InputStreamReader rdr = new InputStreamReader(pin); //FileReader fr = new FileReader( new File("c:/dot.blastout") ); BufferedReader br = new BufferedReader(rdr); String line = br.readLine(); while (line != null) { System.out.println(line); line = br.readLine(); } pin.close(); } catch (IOException e) { e.printStackTrace(); } } }.run(); //File blastFile = x ? blastx : blastp; //dbType.equals("prot") ? type.equals("prot") ? blastp : blastx : blastn; String[] cmds = { OS.indexOf("mac") >= 0 ? "/usr/local/bin/blastp" : "blastp", "-query", "-", "-db", "tmp", "-evalue", evaluestr, "-num_threads", Integer.toString(procs) }; lcmd = new ArrayList<String>(Arrays.asList(cmds)); //String[] exts = extrapar.trim().split("[\t ]+"); ProcessBuilder pb = new ProcessBuilder(lcmd); pb.redirectErrorStream(true); final Process p = pb.start(); final Thread t = new Thread() { public void run() { try { OutputStream pos = p.getOutputStream(); pos.write(fasta.getBytes()); pos.close(); } catch (IOException e) { e.printStackTrace(); } } }; t.start(); final Thread t2 = new Thread() { public void run() { try { System.err.println("WHY NOT"); InputStreamReader rdr = new InputStreamReader(p.getInputStream()); //FileReader fr = new FileReader( new File("c:/dot.blastout") ); String res = ""; BufferedReader br = new BufferedReader(rdr); String line = br.readLine(); while (line != null) { //System.err.println( line ); if (line.startsWith("> ")) { int i = line.indexOf(' ', 2); if (i == -1) i = line.length(); String id = line.substring(2, i); Gene g = geneset.genemap.get(id); if (g != null) { if (!isGeneview()) { /*i = geneset.allgenegroups.indexOf( g.getGeneGroup() ); if( i != -1 && i < table.getRowCount() ) { int r = table.convertRowIndexToView( i ); table.addRowSelectionInterval(r, r); }*/ table.getSelectionModel().select(g.getGeneGroup()); } else { /*i = geneset.genelist.indexOf( g ); if( i != -1 && i < table.getRowCount() ) { int r = table.convertRowIndexToView( i ); table.addRowSelectionInterval(r, r); }*/ gtable.getSelectionModel().select(g); } } String stuff = line + "\n"; line = br.readLine(); while (line != null && !line.startsWith("Query=") && !line.startsWith("> ")) { stuff += line + "\n"; line = br.readLine(); } if (rr != null) { rr.run(stuff); //res += line+"\n"; } } else line = br.readLine(); } br.close(); //System.err.println("wn done"); p.destroy(); if (rr != null) rr.run("close"); /*if( rr != null ) { rr.run( res ); }*/ } catch (IOException e) { e.printStackTrace(); } } }; t2.start(); //fr.close(); } catch (IOException e2) { e2.printStackTrace(); } }
From source file:org.simmi.GeneSetHead.java
License:asdf
public void doBlastn(final String fasta, final String evaluestr, final boolean ids, final RunnableResult rr, boolean show) { /*File blastn;/*from w w w .j av a 2 s. c om*/ File blastp; File makeblastdb; File blastx = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\blastx.exe" ); if( !blastx.exists() ) { blastx = new File( "/opt/ncbi-blast-2.2.29+/bin/blastx" ); if( !blastx.exists() ) { blastx = new File( "/usr/local/ncbi/blast/bin/blastx" ); blastn = new File( "/usr/local/ncbi/blast/bin/blastn" ); blastp = new File( "/usr/local/ncbi/blast/bin/blastp" ); makeblastdb = new File( "/usr/local/ncbi/blast/bin/makeblastdb" ); } else { blastn = new File( "/opt/ncbi-blast-2.2.29+/bin/blastn" ); blastp = new File( "/opt/ncbi-blast-2.2.29+/bin/blastp" ); makeblastdb = new File( "/opt/ncbi-blast-2.2.29+/bin/makeblastdb" ); } } else { blastn = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\blastn.exe" ); blastp = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\blastp.exe" ); makeblastdb = new File( "c:\\\\Program files\\NCBI\\blast-2.2.29+\\bin\\makeblastdb.exe" ); }*/ int procs = Runtime.getRuntime().availableProcessors(); String[] mcmds = { "makeblastdb", "-dbtype", "nucl", "-title", "tmp", "-out", "tmp" }; List<String> lcmd = new ArrayList<String>(Arrays.asList(mcmds)); final ProcessBuilder mpb = new ProcessBuilder(lcmd); mpb.redirectErrorStream(true); try { final Process mp = mpb.start(); new Thread() { public void run() { try { OutputStream pos = mp.getOutputStream(); for (String cname : geneset.contigmap.keySet()) { Sequence c = geneset.contigmap.get(cname); if (ids) pos.write((">" + c.id + "\n").getBytes()); else { pos.write((">" + c.getName() + "\n").getBytes()); } StringBuilder sb = c.getStringBuilder(); for (int i = 0; i < sb.length(); i += 70) { pos.write(sb.substring(i, Math.min(sb.length(), i + 70)).getBytes()); } pos.write('\n'); } pos.close(); } catch (IOException e) { e.printStackTrace(); } } }.start(); new Thread() { public void run() { try { InputStream pin = mp.getInputStream(); InputStreamReader rdr = new InputStreamReader(pin); //FileReader fr = new FileReader( new File("c:/dot.blastout") ); BufferedReader br = new BufferedReader(rdr); String line = br.readLine(); while (line != null) { System.out.println(line); line = br.readLine(); } pin.close(); } catch (IOException e) { e.printStackTrace(); } } }.run(); //File blastFile = blastn; //dbType.equals("prot") ? type.equals("prot") ? blastp : blastx : blastn; String[] cmds1 = { "blastn", "-dust", "no", "-perc_identity", "99", "-word_size", "21", "-query", "-", "-db", "tmp", "-evalue", evaluestr, "-num_threads", Integer.toString(procs) }; String[] cmds2 = { "blastn", "-query", "-", "-db", "tmp", "-evalue", evaluestr, "-num_threads", Integer.toString(procs) }; String[] cmds = show ? cmds2 : cmds1; lcmd = new ArrayList<String>(Arrays.asList(cmds)); //String[] exts = extrapar.trim().split("[\t ]+"); ProcessBuilder pb = new ProcessBuilder(lcmd); pb.redirectErrorStream(true); final Process p = pb.start(); final Thread t = new Thread() { public void run() { try { OutputStream pos = p.getOutputStream(); pos.write(fasta.getBytes()); pos.close(); } catch (IOException e) { e.printStackTrace(); } } }; t.start(); Map<String, Set<String>> tph = new HashMap<String, Set<String>>(); Map<String, Map<String, String>> tvp = new HashMap<String, Map<String, String>>(); Map<String, Map<String, String>> tmr = new HashMap<String, Map<String, String>>(); Map<String, Integer> specindex = new LinkedHashMap<String, Integer>(); Map<String, Integer> phindex = new LinkedHashMap<String, Integer>(); /*final Thread t2 = new Thread() { public void run() {*/ try { System.err.println("WHY NOT"); InputStreamReader rdr = new InputStreamReader(p.getInputStream()); //FileReader fr = new FileReader( new File("c:/dot.blastout") ); String qspec = null; String query = null; String ctype = null; Annotation at = new Annotation(); int o = 0; StringBuilder res = new StringBuilder(); BufferedReader br = new BufferedReader(rdr); String line = br.readLine(); res.append(line + "\n"); while (line != null) { if (line.startsWith("Query= ")) { query = line.substring(7, line.length()); int e = query.indexOf("CRISPR") - 1; if (e > 0) { qspec = query.substring(0, e); qspec = Sequence.getSpec(qspec); String rest = query.substring(e + 8); int ri = rest.lastIndexOf('-'); if (ri != -1) ctype = rest.substring(ri + 1); } else { System.err.println(); } line = br.readLine(); res.append(line + "\n"); while (!line.startsWith("Length")) { line = br.readLine(); res.append(line + "\n"); } o = Integer.parseInt(line.substring(7)); } else if (line.startsWith("> ")) { String contname = line.substring(1).trim(); //line = br.readLine(); //res.append( line+"\n" ); //int o = Integer.parseInt( line.substring(7) ); Sequence cont = geneset.contigmap.get(contname); if (cont != null) { int start = -1; int stop = 0; line = br.readLine(); res.append(line + "\n"); String lastmatch = null; while (line != null && !line.startsWith(">") && !line.startsWith("Query=") /*&& !line.contains("Expect =")*/ ) { if (line.startsWith("Sbjct")) { String[] split = line.split("[\t ]+"); int k = Integer.parseInt(split[1]); int m = Integer.parseInt(split[3]); lastmatch = split[2]; if (start == -1) start = k; stop = m; } line = br.readLine(); res.append(line + "\n"); } if (start > stop) { int tmp = start; start = stop; stop = tmp; } at.start = start; at.stop = stop; //if( stop - start < o*2 ) { List<Annotation> lann = cont.getAnnotations(); if (lann != null) { int k = Collections.binarySearch(lann, at); //System.err.println( "kkk " + k + " " + lann.size() ); if (k < 0) k = -(k + 1) - 1; Annotation ann = lann.get(Math.max(0, k)); boolean yes = true; if (ann.type != null && ann.type.contains("ummer")) { yes = false; } int u = k - 1; Annotation nann = null; if (u >= 0 && u < lann.size()) nann = lann.get(u); u = k + 1; Annotation rann = null; if (u >= 0 && u < lann.size()) rann = lann.get(u); if (nann != null && nann.type != null && nann.type.contains("ummer")) { yes = false; } if (rann != null && rann.type != null && rann.type.contains("ummer")) { yes = false; } if (!yes) { //System.err.println(); } Gene g = ann.getGene(); String desig = ann.designation; if (yes && g != null) { //ann.stop > at.start && ann.start < at.stop ) { GeneGroup gg = g.getGeneGroup(); if (desig != null && desig.contains("phage")) { if (!phindex.containsKey(desig)) phindex.put(desig, phindex.size()); Map<String, String> tvps; String specname = qspec;//Sequence.nameFix(qspec, true); if (!specindex.containsKey(specname)) specindex.put(specname, specindex.size()); if (tvp.containsKey(specname)) { tvps = tvp.get(specname); } else { tvps = new HashMap<String, String>(); tvp.put(specname, tvps); } tvps.put(desig, ctype); String contspec = cont.getSpec(); System.err.println(query + " asdf " + contspec + " " + lastmatch + " " + at.start + " " + at.stop + " " + ann.start + " " + ann.stop + " rann " + (rann != null ? rann.start + " " + rann.stop : "") + " nann " + (nann != null ? nann.start + " " + nann.stop : "")); if (qspec.equals(contspec)) { if (tmr.containsKey(specname)) { tvps = tmr.get(specname); } else { tvps = new HashMap<String, String>(); tmr.put(specname, tvps); } tvps.put(desig, ctype); } /*if( specname.contains("brockianus_MAT_338") ) { System.err.println(); }*/ } Platform.runLater(() -> { if (!isGeneview()) { /*int ggindex = geneset.allgenegroups.indexOf( gg ); int i = table.convertRowIndexToView( ggindex ); if( i != -1 ) table.addRowSelectionInterval(i, i);*/ table.getSelectionModel().select(gg); } else { /*int gindex = geneset.genelist.indexOf( g ); int i = table.convertRowIndexToView( gindex ); table.addRowSelectionInterval(i, i);*/ gtable.getSelectionModel().select(g); } }); } /*for( Annotation ann : lann ) { if( ann.stop > start && ann.start < stop ) { Gene g = ann.getGene(); if( g != null ) { if( table.getModel() == groupModel ) { GeneGroup gg = g.getGeneGroup(); int ggindex = allgenegroups.indexOf( gg ); int i = table.convertRowIndexToView( ggindex ); table.addRowSelectionInterval(i, i); } else if( table.getModel() == defaultModel ) { int gindex = geneset.genelist.indexOf( g ); int i = table.convertRowIndexToView( gindex ); table.addRowSelectionInterval(i, i); } } } }*/ } //} continue; } } /*int i = line.indexOf(' ', 2); if( i == -1 ) i = line.length(); String id = line.substring(2, i); Gene g = genemap.get( id ); if( g != null ) { if( table.getModel() == groupModel ) { i = allgenegroups.indexOf( g.getGeneGroup() ); if( i != -1 && i < table.getRowCount() ) { int r = table.convertRowIndexToView( i ); table.addRowSelectionInterval(r, r); } } else { i = geneset.genelist.indexOf( g ); if( i != -1 && i < table.getRowCount() ) { int r = table.convertRowIndexToView( i ); table.addRowSelectionInterval(r, r); } } } String stuff = line+"\n"; line = br.readLine(); while( line != null && !line.startsWith("Query=") && !line.startsWith("> ") ) { stuff += line+"\n"; line = br.readLine(); } if( rr != null ) { rr.run( stuff ); //res += line+"\n"; } } //else*/ line = br.readLine(); res.append(line + "\n"); } br.close(); p.destroy(); for (String specname : geneset.speccontigMap.keySet()) { List<Sequence> lseq = geneset.speccontigMap.get(specname); for (Sequence seq : lseq) { List<Annotation> lann = seq.getAnnotations(); if (lann != null) { for (Annotation a : lann) { String desig = a.designation; if (desig != null && desig.contains("phage") && phindex.containsKey(desig)) { if (!specindex.containsKey(specname)) specindex.put(specname, specindex.size()); Set<String> tvps; if (tph.containsKey(specname)) { tvps = tph.get(specname); } else { tvps = new HashSet<String>(); tph.put(specname, tvps); } tvps.add(desig); } } } } } int k = 0; int u = 0; Workbook wb = new XSSFWorkbook(); Sheet sh = wb.createSheet("Phage"); Row rw = sh.createRow(u++); //res = new StringBuilder(); for (String ph : phindex.keySet()) { res.append("\t" + ph); rw.createCell(++k).setCellValue(ph); } res.append("\n"); for (String rspec : specindex.keySet()) { String spec = Sequence.nameFix(rspec, true); rw = sh.createRow(u++); k = 0; rw.createCell(k++).setCellValue(spec); Map<String, String> set = tvp.get(rspec); res.append(spec); if (set != null) { for (String ph : phindex.keySet()) { if (set.containsKey(ph)) { String type = set.get(ph); if (type == null || type.length() == 0) type = "yes"; res.append("\t" + type); rw.createCell(k).setCellValue(type); } else { res.append("\t"); } k++; } } res.append("\n"); } for (String ph : phindex.keySet()) { res.append("\t" + ph); } res.append("\n"); u++; for (String rspec : specindex.keySet()) { String spec = Sequence.nameFix(rspec, true); rw = sh.createRow(u++); k = 0; rw.createCell(k++).setCellValue(spec); Map<String, String> set = tmr.get(rspec); res.append(spec); if (set != null) { for (String ph : phindex.keySet()) { if (set.containsKey(ph)) { String type = set.get(ph); if (type == null || type.length() == 0) type = "yes"; res.append("\t" + type); rw.createCell(k).setCellValue(type); } else res.append("\t"); k++; } } res.append("\n"); } u++; for (String rspec : specindex.keySet()) { String spec = Sequence.nameFix(rspec, true); rw = sh.createRow(u++); k = 0; rw.createCell(k++).setCellValue(spec); Set<String> set = tph.get(rspec); Map<String, String> setvp = tvp.get(rspec); res.append(spec); if (set != null) { for (String ph : phindex.keySet()) { if (set.contains(ph)) { if (setvp != null && setvp.containsKey(ph)) { res.append("\tyes wspacer"); rw.createCell(k).setCellValue("yes wspacer"); } else { res.append("\tyes"); rw.createCell(k).setCellValue("yes"); } } else res.append("\t"); k++; } } res.append("\n"); } File file = new File("/Users/sigmar/phage.xlsx"); FileOutputStream fos = new FileOutputStream(file); wb.write(fos); fos.close(); Desktop.getDesktop().open(file); //if( !show ) { JFrame frame = new JFrame(); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); frame.setSize(800, 600); JTextArea ta = new JTextArea(); ta.setFont(new Font("monospaced", Font.PLAIN, 12)); ta.append(res.toString()); JScrollPane sp = new JScrollPane(ta); frame.add(sp); frame.setVisible(true); FileWriter fw = new FileWriter("/Users/sigmar/file.txt"); fw.write(res.toString()); fw.close(); if (rr != null) rr.run("close"); //} /*if( rr != null ) { rr.run( res ); }*/ } catch (IOException e) { e.printStackTrace(); } /* } }; t2.start();*/ //fr.close(); } catch (IOException e2) { e2.printStackTrace(); } }