List of usage examples for java.lang ProcessBuilder start
public Process start() throws IOException
From source file:com.blackberry.logtools.LogTools.java
public String parseDate(String time) throws Exception { String parsedTime = time;/*from w w w . j ava 2 s . co m*/ if (Pattern.matches("[0-9]+", time) == false) { String[] dateCmd = { "/bin/sh", "-c", "date -d '" + time + "' +%s" }; ProcessBuilder pBuilder = new ProcessBuilder(dateCmd); pBuilder.redirectErrorStream(true); Process p = pBuilder.start(); BufferedReader br = new BufferedReader(new InputStreamReader(p.getInputStream())); parsedTime = br.readLine() + "000"; if (parsedTime.contains("invalid date")) { logConsole(true, true, error, "Could not parse start time, invalid date."); System.exit(1); } } else if (time.length() != 13) { logConsole(true, true, error, "Could not parse start time."); logConsole(true, true, error, "Epoch date time must be in miliseconds. (13 digits)"); System.exit(1); } return parsedTime; }
From source file:acoli.controller.Controller.java
private void killProcessID(String path, String scriptCommand, String aScriptParam) throws IOException { // Run bash .sh script here. ProcessBuilder pb = null; // Call the startup script on the uploaded and unzipped grammar directory. pb = new ProcessBuilder(scriptCommand, aScriptParam); if (pb != null) { //System.out.println("Running bash script...."); // Point to where the script is located. pb.directory(new File(path + "/resources/uploads/")); Process p = pb.start(); BufferedReader output = getOutput(p); BufferedReader error = getError(p); String line = ""; while ((line = output.readLine()) != null) { System.out.println("out: " + line); }// ww w.j av a2 s . c o m while ((line = error.readLine()) != null) { System.out.println("err: " + line); } } }
From source file:com.novartis.opensource.yada.adaptor.SOAPAdaptor.java
/** * Constructs and executes a SOAP message. For {@code basic} authentication, YADA uses the * java soap api, and the {@link SOAPConnection} object stored in the query object. For * NTLM, which was never successful using the java api, YADA calls out to {@link #CURL_EXEC} * in {@link #YADA_BIN}. /*from w w w .j a v a2s. c o m*/ * @see com.novartis.opensource.yada.adaptor.Adaptor#execute(com.novartis.opensource.yada.YADAQuery) */ @Override public void execute(YADAQuery yq) throws YADAAdaptorExecutionException { String result = ""; resetCountParameter(yq); SOAPConnection connection = (SOAPConnection) yq.getConnection(); for (int row = 0; row < yq.getData().size(); row++) { yq.setResult(); YADAQueryResult yqr = yq.getResult(); String soapUrl = yq.getSoap(row); try { this.endpoint = new URL(soapUrl); MessageFactory factory = MessageFactory.newInstance(); SOAPMessage message = factory.createMessage(); byte[] authenticationToken = Base64.encodeBase64((this.soapUser + ":" + this.soapPass).getBytes()); // Assume a SOAP message was built previously MimeHeaders mimeHeaders = message.getMimeHeaders(); if ("basic".equals(this.soapAuth.toLowerCase())) { mimeHeaders.addHeader("Authorization", this.soapAuth + " " + new String(authenticationToken)); mimeHeaders.addHeader("SOAPAction", this.soapAction); mimeHeaders.addHeader("Content-Type", "text/xml"); SOAPHeader header = message.getSOAPHeader(); SOAPBody body = message.getSOAPBody(); header.detachNode(); l.debug("query:\n" + this.queryString); try { Document xml = DocumentBuilderFactory.newInstance().newDocumentBuilder() .parse(new ByteArrayInputStream(this.soapData.getBytes())); //SOAPBodyElement docElement = body.addDocument(xml); Authenticator.setDefault(new YadaSoapAuthenticator(this.soapUser, this.soapPass)); SOAPMessage response = connection.call(message, this.endpoint); try (ByteArrayOutputStream responseOutputStream = new ByteArrayOutputStream()) { response.writeTo(responseOutputStream); result = responseOutputStream.toString(); } } catch (IOException e) { String msg = "Unable to process input or output stream for SOAP message with Basic Authentication. This is an I/O problem, not an authentication issue."; throw new YADAAdaptorExecutionException(msg, e); } l.debug("SOAP Body:\n" + result); } else if (AUTH_NTLM.equals(this.soapAuth.toLowerCase()) || "negotiate".equals(this.soapAuth.toLowerCase())) { ArrayList<String> args = new ArrayList<>(); args.add(Finder.getEnv(YADA_BIN) + CURL_EXEC); args.add("-X"); args.add("-s"); args.add(this.soapSource + this.soapPath); args.add("-u"); args.add(this.soapDomain + "\\" + this.soapUser); args.add("-p"); args.add(this.soapPass); args.add("-a"); args.add(this.soapAuth); args.add("-q"); args.add(this.soapData); args.add("-t"); args.add(this.soapAction); String[] cmds = args.toArray(new String[0]); l.debug("Executing soap request via script: " + Arrays.toString(cmds)); String s = null; try { ProcessBuilder pb = new ProcessBuilder(args); l.debug(pb.environment().toString()); pb.redirectErrorStream(true); Process p = pb.start(); try (BufferedReader si = new BufferedReader(new InputStreamReader(p.getInputStream()))) { while ((s = si.readLine()) != null) { l.debug(s); if (null == result) { result = ""; } result += s; } } } catch (IOException e) { String msg = "Unable to execute NTLM-authenticated SOAP call using system call to 'curl'. Make sure the curl executable is still accessible."; throw new YADAAdaptorExecutionException(msg, e); } } } catch (SOAPException e) { String msg = "There was a problem creating or executing the SOAP message, or receiving the response."; throw new YADAAdaptorExecutionException(msg, e); } catch (SAXException e) { String msg = "Unable to parse SOAP message body."; throw new YADAAdaptorExecutionException(msg, e); } catch (ParserConfigurationException e) { String msg = "There was a problem creating the xml document for the SOAP message body."; throw new YADAAdaptorExecutionException(msg, e); } catch (YADAResourceException e) { String msg = "Cannot find 'curl' executable at specified JNDI path " + YADA_BIN + CURL_EXEC; throw new YADAAdaptorExecutionException(msg, e); } catch (MalformedURLException e) { String msg = "Can't create URL from provided source and path."; throw new YADAAdaptorExecutionException(msg, e); } finally { try { ConnectionFactory.releaseResources(connection, yq.getSoap().get(0)); } catch (YADAConnectionException e) { l.error(e.getMessage()); } } yqr.addResult(row, result); } }
From source file:interactivespaces.activity.binary.BaseNativeActivityRunner.java
/** * Attempt the run./*from w w w.j av a 2 s . c om*/ * * @return the process that was created */ private Process attemptRun() { try { ProcessBuilder builder = new ProcessBuilder(commands); builder.directory(executableFolder); spaceEnvironment.getLog().info( String.format("Starting up native code in folder %s", executableFolder.getAbsolutePath())); return builder.start(); } catch (Exception e) { throw new InteractiveSpacesException("Can't start up activity " + appName, e); } }
From source file:org.yamj.core.service.mediainfo.MediaInfoService.java
private InputStream createInputStream(String movieFilePath) throws IOException { // Create the command line List<String> commandMedia = new ArrayList<String>(execMediaInfo); commandMedia.add(movieFilePath);//from w w w.jav a2s.c om ProcessBuilder pb = new ProcessBuilder(commandMedia); // set up the working directory. pb.directory(MEDIAINFO_PATH); Process p = pb.start(); return p.getInputStream(); }
From source file:io.hops.hopsworks.common.dao.tensorflow.config.TensorBoardProcessMgr.java
/** * Start the TensorBoard process//w ww .j a v a2s . c o m * @param project * @param user * @param hdfsUser * @param hdfsLogdir * @return * @throws IOException */ @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED) public TensorBoardDTO startTensorBoard(Project project, Users user, HdfsUsers hdfsUser, String hdfsLogdir) throws IOException { String prog = settings.getHopsworksDomainDir() + "/bin/tensorboard.sh"; Process process = null; Integer port = 0; BigInteger pid = null; String tbBasePath = settings.getStagingDir() + Settings.TENSORBOARD_DIRS + File.separator; String projectUserUniquePath = project.getName() + "_" + hdfsUser.getName(); String tbPath = tbBasePath + DigestUtils.sha256Hex(projectUserUniquePath); String certsPath = "\"\""; File tbDir = new File(tbPath); if (tbDir.exists()) { for (File file : tbDir.listFiles()) { if (file.getName().endsWith(".pid")) { String pidContents = com.google.common.io.Files.readFirstLine(file, Charset.defaultCharset()); try { pid = BigInteger.valueOf(Long.parseLong(pidContents)); if (pid != null && ping(pid) == 0) { killTensorBoard(pid); } } catch (NumberFormatException nfe) { LOGGER.log(Level.WARNING, "Expected number in pidfile " + file.getAbsolutePath() + " got " + pidContents); } } } FileUtils.deleteDirectory(tbDir); } tbDir.mkdirs(); DistributedFileSystemOps dfso = dfsService.getDfsOps(); try { certsPath = tbBasePath + DigestUtils.sha256Hex(projectUserUniquePath + "_certs"); File certsDir = new File(certsPath); certsDir.mkdirs(); HopsUtils.materializeCertificatesForUserCustomDir(project.getName(), user.getUsername(), settings.getHdfsTmpCertDir(), dfso, certificateMaterializer, settings, certsPath); } catch (IOException ioe) { LOGGER.log(Level.SEVERE, "Failed in materializing certificates for " + hdfsUser + " in directory " + certsPath, ioe); HopsUtils.cleanupCertificatesForUserCustomDir(user.getUsername(), project.getName(), settings.getHdfsTmpCertDir(), certificateMaterializer, certsPath, settings); } finally { if (dfso != null) { dfsService.closeDfsClient(dfso); } } String anacondaEnvironmentPath = settings.getAnacondaProjectDir(project.getName()); int retries = 3; while (retries > 0) { if (retries == 0) { throw new IOException( "Failed to start TensorBoard for project=" + project.getName() + ", user=" + user.getUid()); } // use pidfile to kill any running servers port = ThreadLocalRandom.current().nextInt(40000, 59999); String[] command = new String[] { "/usr/bin/sudo", prog, "start", hdfsUser.getName(), hdfsLogdir, tbPath, port.toString(), anacondaEnvironmentPath, settings.getHadoopVersion(), certsPath, settings.getJavaHome() }; LOGGER.log(Level.INFO, Arrays.toString(command)); ProcessBuilder pb = new ProcessBuilder(command); try { // Send both stdout and stderr to the same stream pb.redirectErrorStream(true); process = pb.start(); synchronized (pb) { try { // Wait until the launcher bash script has finished process.waitFor(20l, TimeUnit.SECONDS); } catch (InterruptedException ex) { LOGGER.log(Level.SEVERE, "Woken while waiting for the TensorBoard to start: {0}", ex.getMessage()); } } int exitValue = process.exitValue(); String pidPath = tbPath + File.separator + port + ".pid"; File pidFile = new File(pidPath); // Read the pid for TensorBoard server if (pidFile.exists()) { String pidContents = com.google.common.io.Files.readFirstLine(pidFile, Charset.defaultCharset()); pid = BigInteger.valueOf(Long.parseLong(pidContents)); } if (exitValue == 0 && pid != null) { int maxWait = 10; String logFilePath = tbPath + File.separator + port + ".log"; File logFile = new File(logFilePath); while (maxWait > 0) { String logFileContents = com.google.common.io.Files.readFirstLine(logFile, Charset.defaultCharset()); // It is not possible to have a fixed wait time before showing the TB, we need to be sure it has started if (logFile.length() > 0 && (logFileContents.contains("Loaded") | logFileContents.contains("Reloader") | logFileContents.contains("event")) | maxWait == 1) { Thread.currentThread().sleep(5000); TensorBoardDTO tensorBoardDTO = new TensorBoardDTO(); String host = null; try { host = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException ex) { Logger.getLogger(TensorBoardProcessMgr.class.getName()).log(Level.SEVERE, null, ex); } tensorBoardDTO.setEndpoint(host + ":" + port); tensorBoardDTO.setPid(pid); return tensorBoardDTO; } else { Thread.currentThread().sleep(1000); maxWait--; } } TensorBoardDTO tensorBoardDTO = new TensorBoardDTO(); tensorBoardDTO.setPid(pid); String host = null; try { host = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException ex) { Logger.getLogger(TensorBoardProcessMgr.class.getName()).log(Level.SEVERE, null, ex); } tensorBoardDTO.setEndpoint(host + ":" + port); return tensorBoardDTO; } else { LOGGER.log(Level.SEVERE, "Failed starting TensorBoard got exitcode " + exitValue + " retrying on new port"); if (pid != null) { this.killTensorBoard(pid); } pid = null; } } catch (Exception ex) { LOGGER.log(Level.SEVERE, "Problem starting TensorBoard: {0}", ex); if (process != null) { process.destroyForcibly(); } } finally { retries--; } } //Failed to start TensorBoard, make sure there is no process running for it! (This should not be needed) if (pid != null && this.ping(pid) == 0) { this.killTensorBoard(pid); } //Certificates cleanup in case they were materialized but no TB started successfully dfso = dfsService.getDfsOps(); certsPath = tbBasePath + DigestUtils.sha256Hex(projectUserUniquePath + "_certs"); File certsDir = new File(certsPath); certsDir.mkdirs(); try { HopsUtils.cleanupCertificatesForUserCustomDir(user.getUsername(), project.getName(), settings.getHdfsTmpCertDir(), certificateMaterializer, certsPath, settings); } finally { if (dfso != null) { dfsService.closeDfsClient(dfso); } } return null; }
From source file:ldbc.snb.datagen.generator.LDBCDatagen.java
public int runGenerateJob(Configuration conf) throws Exception { String hadoopPrefix = conf.get("ldbc.snb.datagen.serializer.hadoopDir"); FileSystem fs = FileSystem.get(conf); ArrayList<Float> percentages = new ArrayList<Float>(); percentages.add(0.45f);//from ww w . j a v a 2 s . c o m percentages.add(0.45f); percentages.add(0.1f); long start = System.currentTimeMillis(); printProgress("Starting: Person generation"); long startPerson = System.currentTimeMillis(); HadoopPersonGenerator personGenerator = new HadoopPersonGenerator(conf); personGenerator.run(hadoopPrefix + "/persons", "ldbc.snb.datagen.hadoop.UniversityKeySetter"); long endPerson = System.currentTimeMillis(); printProgress("Creating university location correlated edges"); long startUniversity = System.currentTimeMillis(); HadoopKnowsGenerator knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.UniversityKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 0, conf.get("ldbc.snb.datagen.generator.knowsGenerator")); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/universityEdges"); long endUniversity = System.currentTimeMillis(); printProgress("Creating main interest correlated edges"); long startInterest = System.currentTimeMillis(); knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.InterestKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 1, conf.get("ldbc.snb.datagen.generator.knowsGenerator")); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/interestEdges"); long endInterest = System.currentTimeMillis(); printProgress("Creating random correlated edges"); long startRandom = System.currentTimeMillis(); knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 2, "ldbc.snb.datagen.generator.RandomKnowsGenerator"); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/randomEdges"); long endRandom = System.currentTimeMillis(); fs.delete(new Path(DatagenParams.hadoopDir + "/persons"), true); printProgress("Merging the different edge files"); ArrayList<String> edgeFileNames = new ArrayList<String>(); edgeFileNames.add(hadoopPrefix + "/universityEdges"); edgeFileNames.add(hadoopPrefix + "/interestEdges"); edgeFileNames.add(hadoopPrefix + "/randomEdges"); long startMerge = System.currentTimeMillis(); HadoopMergeFriendshipFiles merger = new HadoopMergeFriendshipFiles(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter"); merger.run(hadoopPrefix + "/mergedPersons", edgeFileNames); long endMerge = System.currentTimeMillis(); printProgress("Serializing persons"); long startPersonSerializing = System.currentTimeMillis(); if (!conf.getBoolean("ldbc.snb.datagen.serializer.persons.sort", false)) { HadoopPersonSerializer serializer = new HadoopPersonSerializer(conf); serializer.run(hadoopPrefix + "/mergedPersons"); } else { HadoopPersonSortAndSerializer serializer = new HadoopPersonSortAndSerializer(conf); serializer.run(hadoopPrefix + "/mergedPersons"); } long endPersonSerializing = System.currentTimeMillis(); long startPersonActivity = System.currentTimeMillis(); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", true)) { printProgress("Generating and serializing person activity"); HadoopPersonActivityGenerator activityGenerator = new HadoopPersonActivityGenerator(conf); activityGenerator.run(hadoopPrefix + "/mergedPersons"); int numThreads = DatagenParams.numThreads; int blockSize = DatagenParams.blockSize; int numBlocks = (int) Math.ceil(DatagenParams.numPersons / (double) blockSize); for (int i = 0; i < numThreads; ++i) { if (i < numBlocks) { fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "personFactors.txt"), new Path("./")); fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "activityFactors.txt"), new Path("./")); fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m0friendList" + i + ".csv"), new Path("./")); } } } long endPersonActivity = System.currentTimeMillis(); long startSortingUpdateStreams = System.currentTimeMillis(); if (conf.getBoolean("ldbc.snb.datagen.serializer.updateStreams", false)) { printProgress("Sorting update streams "); List<String> personStreamsFileNames = new ArrayList<String>(); List<String> forumStreamsFileNames = new ArrayList<String>(); for (int i = 0; i < DatagenParams.numThreads; ++i) { int numPartitions = conf.getInt("ldbc.snb.datagen.serializer.numUpdatePartitions", 1); for (int j = 0; j < numPartitions; ++j) { personStreamsFileNames .add(DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + "_" + j); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { forumStreamsFileNames .add(DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + "_" + j); } } } HadoopUpdateStreamSorterAndSerializer updateSorterAndSerializer = new HadoopUpdateStreamSorterAndSerializer( conf); updateSorterAndSerializer.run(personStreamsFileNames, "person"); updateSorterAndSerializer.run(forumStreamsFileNames, "forum"); for (String file : personStreamsFileNames) { fs.delete(new Path(file), true); } for (String file : forumStreamsFileNames) { fs.delete(new Path(file), true); } long minDate = Long.MAX_VALUE; long maxDate = Long.MIN_VALUE; long count = 0; for (int i = 0; i < DatagenParams.numThreads; ++i) { Path propertiesFile = new Path( DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + ".properties"); FSDataInputStream file = fs.open(propertiesFile); Properties properties = new Properties(); properties.load(file); long aux; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time")); minDate = aux < minDate ? aux : minDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time")); maxDate = aux > maxDate ? aux : maxDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events")); count += aux; file.close(); fs.delete(propertiesFile, true); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { propertiesFile = new Path( DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + ".properties"); file = fs.open(propertiesFile); properties = new Properties(); properties.load(file); aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time")); minDate = aux < minDate ? aux : minDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time")); maxDate = aux > maxDate ? aux : maxDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events")); count += aux; file.close(); fs.delete(propertiesFile, true); } } OutputStream output = fs .create(new Path(DatagenParams.socialNetworkDir + "/updateStream" + ".properties"), true); output.write(new String("ldbc.snb.interactive.gct_delta_duration:" + DatagenParams.deltaTime + "\n") .getBytes()); output.write( new String("ldbc.snb.interactive.min_write_event_start_time:" + minDate + "\n").getBytes()); output.write( new String("ldbc.snb.interactive.max_write_event_start_time:" + maxDate + "\n").getBytes()); output.write(new String("ldbc.snb.interactive.update_interleave:" + (maxDate - minDate) / count + "\n") .getBytes()); output.write(new String("ldbc.snb.interactive.num_events:" + count).getBytes()); output.close(); } long endSortingUpdateStreams = System.currentTimeMillis(); printProgress("Serializing invariant schema "); long startInvariantSerializing = System.currentTimeMillis(); HadoopInvariantSerializer invariantSerializer = new HadoopInvariantSerializer(conf); invariantSerializer.run(); long endInvariantSerializing = System.currentTimeMillis(); long end = System.currentTimeMillis(); System.out.println(((end - start) / 1000) + " total seconds"); System.out.println("Person generation time: " + ((endPerson - startPerson) / 1000)); System.out.println( "University correlated edge generation time: " + ((endUniversity - startUniversity) / 1000)); System.out.println("Interest correlated edge generation time: " + ((endInterest - startInterest) / 1000)); System.out.println("Random correlated edge generation time: " + ((endRandom - startRandom) / 1000)); System.out.println("Edges merge time: " + ((endMerge - startMerge) / 1000)); System.out .println("Person serialization time: " + ((endPersonSerializing - startPersonSerializing) / 1000)); System.out.println("Person activity generation and serialization time: " + ((endPersonActivity - startPersonActivity) / 1000)); System.out.println( "Sorting update streams time: " + ((endSortingUpdateStreams - startSortingUpdateStreams) / 1000)); System.out.println("Invariant schema serialization time: " + ((endInvariantSerializing - startInvariantSerializing) / 1000)); System.out.println("Total Execution time: " + ((end - start) / 1000)); if (conf.getBoolean("ldbc.snb.datagen.parametergenerator.parameters", false) && conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { System.out.println("Running Parameter Generation"); System.out.println("Generating Interactive Parameters"); ProcessBuilder pb = new ProcessBuilder("mkdir", "-p", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); Process p = pb.start(); p.waitFor(); pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"), "paramgenerator/generateparams.py", "./", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); File logInteractive = new File("parameters_interactive.log"); pb.redirectErrorStream(true); pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logInteractive)); p = pb.start(); p.waitFor(); System.out.println("Generating BI Parameters"); pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"), "paramgenerator/generateparamsbi.py", "./", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); File logBi = new File("parameters_bi.log"); pb.redirectErrorStream(true); pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logBi)); p = pb.start(); p.waitFor(); System.out.println("Finished Parameter Generation"); } return 0; }
From source file:jp.co.tis.gsp.tools.dba.dialect.OracleDialect.java
@Override public void exportSchema(ExportParams params) throws MojoExecutionException { BufferedReader reader = null; try {/*from w ww . ja va2 s .c o m*/ File dumpFile = params.getDumpFile(); String user = params.getUser(); String password = params.getPassword(); String schema = params.getSchema(); createDirectory(user, password, dumpFile.getParentFile()); ProcessBuilder pb = new ProcessBuilder("expdp", user + "/" + password, "directory=exp_dir", "dumpfile=" + dumpFile.getName(), "schemas=" + schema, "reuse_dumpfiles=y", "nologfile=y"); pb.redirectErrorStream(true); Process process = pb.start(); Charset terminalCharset = System.getProperty("os.name").toLowerCase().contains("windows") ? Charset.forName("Shift_JIS") : Charset.forName("UTF-8"); reader = new BufferedReader(new InputStreamReader(process.getInputStream(), terminalCharset)); //??????????????? String line; while ((line = reader.readLine()) != null) { System.out.println(line); } process.waitFor(); if (process.exitValue() != 0) { throw new MojoExecutionException("oracle export error"); } process.destroy(); } catch (Exception e) { throw new MojoExecutionException("oracle export", e); } finally { IOUtils.closeQuietly(reader); } }
From source file:acoli.controller.Controller.java
private void runBashScript(String path, String scriptCommand, String aScriptParam, String anotherScriptParam) throws IOException { // Run bash .sh script here. ProcessBuilder pb = null; // Call the startup script on the uploaded and unzipped grammar directory. pb = new ProcessBuilder(scriptCommand, aScriptParam, anotherScriptParam); if (pb != null) { //System.out.println("Running bash script...."); // Point to where the script is located. pb.directory(new File(path + "/resources/uploads/")); Process p = pb.start(); BufferedReader output = getOutput(p); BufferedReader error = getError(p); String line = ""; while ((line = output.readLine()) != null) { System.out.println("out: " + line); }/*from w ww . j av a 2 s .c o m*/ while ((line = error.readLine()) != null) { System.out.println("err: " + line); } } }
From source file:edu.isi.wings.execution.engine.api.impl.distributed.DistributedExecutionEngine.java
@Override public ProcessStatus call() throws Exception { File tempdir = File.createTempFile(planName + "-", "-" + exeName); if (!tempdir.delete() || !tempdir.mkdirs()) throw new Exception("Cannot create temp directory"); ProcessStatus status = new ProcessStatus(); try {/* w ww .j a va 2 s .co m*/ File codef = new File(this.codeBinary); codef.setExecutable(true); PrintWriter fout = null; if (outfilepath != null) { File f = new File(outfilepath); f.getParentFile().mkdirs(); fout = new PrintWriter(f); } ProcessBuilder pb = new ProcessBuilder(args); pb.directory(tempdir); pb.redirectErrorStream(true); // Set environment variables for (String var : this.environment.keySet()) pb.environment().put(var, this.environment.get(var)); this.process = pb.start(); // Read output stream StreamGobbler outputGobbler = new StreamGobbler(this.process.getInputStream(), fout); outputGobbler.start(); // Wait for the process to exit this.process.waitFor(); status.setExitValue(this.process.exitValue()); status.setLog(outputGobbler.getLog()); } catch (InterruptedException e) { if (this.process != null) { //System.out.println("Stopping remote process"); this.process.destroy(); } status.setLog("!! Stopping Remotely !! .. " + exeName); status.setExitValue(-1); } catch (Exception e) { status.setLog(e.getMessage()); status.setExitValue(-1); } // Delete temp directory FileUtils.deleteDirectory(tempdir); return status; }