List of usage examples for java.lang ProcessBuilder directory
File directory
To view the source code for java.lang ProcessBuilder directory.
Click Source Link
From source file:ldbc.snb.datagen.generator.LDBCDatagen.java
public int runGenerateJob(Configuration conf) throws Exception { String hadoopPrefix = conf.get("ldbc.snb.datagen.serializer.hadoopDir"); FileSystem fs = FileSystem.get(conf); ArrayList<Float> percentages = new ArrayList<Float>(); percentages.add(0.45f);/*from www. j av a 2 s . co m*/ percentages.add(0.45f); percentages.add(0.1f); long start = System.currentTimeMillis(); printProgress("Starting: Person generation"); long startPerson = System.currentTimeMillis(); HadoopPersonGenerator personGenerator = new HadoopPersonGenerator(conf); personGenerator.run(hadoopPrefix + "/persons", "ldbc.snb.datagen.hadoop.UniversityKeySetter"); long endPerson = System.currentTimeMillis(); printProgress("Creating university location correlated edges"); long startUniversity = System.currentTimeMillis(); HadoopKnowsGenerator knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.UniversityKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 0, conf.get("ldbc.snb.datagen.generator.knowsGenerator")); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/universityEdges"); long endUniversity = System.currentTimeMillis(); printProgress("Creating main interest correlated edges"); long startInterest = System.currentTimeMillis(); knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.InterestKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 1, conf.get("ldbc.snb.datagen.generator.knowsGenerator")); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/interestEdges"); long endInterest = System.currentTimeMillis(); printProgress("Creating random correlated edges"); long startRandom = System.currentTimeMillis(); knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 2, "ldbc.snb.datagen.generator.RandomKnowsGenerator"); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/randomEdges"); long endRandom = System.currentTimeMillis(); fs.delete(new Path(DatagenParams.hadoopDir + "/persons"), true); printProgress("Merging the different edge files"); ArrayList<String> edgeFileNames = new ArrayList<String>(); edgeFileNames.add(hadoopPrefix + "/universityEdges"); edgeFileNames.add(hadoopPrefix + "/interestEdges"); edgeFileNames.add(hadoopPrefix + "/randomEdges"); long startMerge = System.currentTimeMillis(); HadoopMergeFriendshipFiles merger = new HadoopMergeFriendshipFiles(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter"); merger.run(hadoopPrefix + "/mergedPersons", edgeFileNames); long endMerge = System.currentTimeMillis(); printProgress("Serializing persons"); long startPersonSerializing = System.currentTimeMillis(); if (!conf.getBoolean("ldbc.snb.datagen.serializer.persons.sort", false)) { HadoopPersonSerializer serializer = new HadoopPersonSerializer(conf); serializer.run(hadoopPrefix + "/mergedPersons"); } else { HadoopPersonSortAndSerializer serializer = new HadoopPersonSortAndSerializer(conf); serializer.run(hadoopPrefix + "/mergedPersons"); } long endPersonSerializing = System.currentTimeMillis(); long startPersonActivity = System.currentTimeMillis(); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", true)) { printProgress("Generating and serializing person activity"); HadoopPersonActivityGenerator activityGenerator = new HadoopPersonActivityGenerator(conf); activityGenerator.run(hadoopPrefix + "/mergedPersons"); int numThreads = DatagenParams.numThreads; int blockSize = DatagenParams.blockSize; int numBlocks = (int) Math.ceil(DatagenParams.numPersons / (double) blockSize); for (int i = 0; i < numThreads; ++i) { if (i < numBlocks) { fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "personFactors.txt"), new Path("./")); fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "activityFactors.txt"), new Path("./")); fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m0friendList" + i + ".csv"), new Path("./")); } } } long endPersonActivity = System.currentTimeMillis(); long startSortingUpdateStreams = System.currentTimeMillis(); if (conf.getBoolean("ldbc.snb.datagen.serializer.updateStreams", false)) { printProgress("Sorting update streams "); List<String> personStreamsFileNames = new ArrayList<String>(); List<String> forumStreamsFileNames = new ArrayList<String>(); for (int i = 0; i < DatagenParams.numThreads; ++i) { int numPartitions = conf.getInt("ldbc.snb.datagen.serializer.numUpdatePartitions", 1); for (int j = 0; j < numPartitions; ++j) { personStreamsFileNames .add(DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + "_" + j); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { forumStreamsFileNames .add(DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + "_" + j); } } } HadoopUpdateStreamSorterAndSerializer updateSorterAndSerializer = new HadoopUpdateStreamSorterAndSerializer( conf); updateSorterAndSerializer.run(personStreamsFileNames, "person"); updateSorterAndSerializer.run(forumStreamsFileNames, "forum"); for (String file : personStreamsFileNames) { fs.delete(new Path(file), true); } for (String file : forumStreamsFileNames) { fs.delete(new Path(file), true); } long minDate = Long.MAX_VALUE; long maxDate = Long.MIN_VALUE; long count = 0; for (int i = 0; i < DatagenParams.numThreads; ++i) { Path propertiesFile = new Path( DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + ".properties"); FSDataInputStream file = fs.open(propertiesFile); Properties properties = new Properties(); properties.load(file); long aux; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time")); minDate = aux < minDate ? aux : minDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time")); maxDate = aux > maxDate ? aux : maxDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events")); count += aux; file.close(); fs.delete(propertiesFile, true); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { propertiesFile = new Path( DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + ".properties"); file = fs.open(propertiesFile); properties = new Properties(); properties.load(file); aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time")); minDate = aux < minDate ? aux : minDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time")); maxDate = aux > maxDate ? aux : maxDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events")); count += aux; file.close(); fs.delete(propertiesFile, true); } } OutputStream output = fs .create(new Path(DatagenParams.socialNetworkDir + "/updateStream" + ".properties"), true); output.write(new String("ldbc.snb.interactive.gct_delta_duration:" + DatagenParams.deltaTime + "\n") .getBytes()); output.write( new String("ldbc.snb.interactive.min_write_event_start_time:" + minDate + "\n").getBytes()); output.write( new String("ldbc.snb.interactive.max_write_event_start_time:" + maxDate + "\n").getBytes()); output.write(new String("ldbc.snb.interactive.update_interleave:" + (maxDate - minDate) / count + "\n") .getBytes()); output.write(new String("ldbc.snb.interactive.num_events:" + count).getBytes()); output.close(); } long endSortingUpdateStreams = System.currentTimeMillis(); printProgress("Serializing invariant schema "); long startInvariantSerializing = System.currentTimeMillis(); HadoopInvariantSerializer invariantSerializer = new HadoopInvariantSerializer(conf); invariantSerializer.run(); long endInvariantSerializing = System.currentTimeMillis(); long end = System.currentTimeMillis(); System.out.println(((end - start) / 1000) + " total seconds"); System.out.println("Person generation time: " + ((endPerson - startPerson) / 1000)); System.out.println( "University correlated edge generation time: " + ((endUniversity - startUniversity) / 1000)); System.out.println("Interest correlated edge generation time: " + ((endInterest - startInterest) / 1000)); System.out.println("Random correlated edge generation time: " + ((endRandom - startRandom) / 1000)); System.out.println("Edges merge time: " + ((endMerge - startMerge) / 1000)); System.out .println("Person serialization time: " + ((endPersonSerializing - startPersonSerializing) / 1000)); System.out.println("Person activity generation and serialization time: " + ((endPersonActivity - startPersonActivity) / 1000)); System.out.println( "Sorting update streams time: " + ((endSortingUpdateStreams - startSortingUpdateStreams) / 1000)); System.out.println("Invariant schema serialization time: " + ((endInvariantSerializing - startInvariantSerializing) / 1000)); System.out.println("Total Execution time: " + ((end - start) / 1000)); if (conf.getBoolean("ldbc.snb.datagen.parametergenerator.parameters", false) && conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { System.out.println("Running Parameter Generation"); System.out.println("Generating Interactive Parameters"); ProcessBuilder pb = new ProcessBuilder("mkdir", "-p", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); Process p = pb.start(); p.waitFor(); pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"), "paramgenerator/generateparams.py", "./", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); File logInteractive = new File("parameters_interactive.log"); pb.redirectErrorStream(true); pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logInteractive)); p = pb.start(); p.waitFor(); System.out.println("Generating BI Parameters"); pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"), "paramgenerator/generateparamsbi.py", "./", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); File logBi = new File("parameters_bi.log"); pb.redirectErrorStream(true); pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logBi)); p = pb.start(); p.waitFor(); System.out.println("Finished Parameter Generation"); } return 0; }
From source file:org.yamj.core.service.mediainfo.MediaInfoService.java
private InputStream createInputStream(String movieFilePath) throws IOException { // Create the command line List<String> commandMedia = new ArrayList<String>(execMediaInfo); commandMedia.add(movieFilePath);/*from www . j a v a2 s . c o m*/ ProcessBuilder pb = new ProcessBuilder(commandMedia); // set up the working directory. pb.directory(MEDIAINFO_PATH); Process p = pb.start(); return p.getInputStream(); }
From source file:edu.stanford.epad.epadws.handlers.dicom.DSOUtil.java
public static String getNiftiDSOComparison(File standardDSO, File testDSO) throws Exception { String command = EPADConfig.getEPADWebServerBaseDir() + "bin/EvaluateSegmentation " + standardDSO.getAbsolutePath() + " " + testDSO.getAbsolutePath() + " -use DICE,JACRD,AUC,KAPPA,RNDIND,ADJRIND,ICCORR,VOLSMTY,MUTINF,MAHLNBS,VARINFO,GCOERR,PROBDST,SNSVTY,SPCFTY,PRCISON,ACURCY,FALLOUT,HDRFDST@0.96@,FMEASR@0.5@ -xml " + EPADConfig.getEPADWebServerBaseDir() + "bin/result.xml"; log.info(command);/*from www . j a v a 2 s.c o m*/ String[] args = command.split(" "); InputStream is = null; InputStreamReader isr = null; BufferedReader br = null; try { ProcessBuilder processBuilder = new ProcessBuilder(args); processBuilder.directory(new File(EPADConfig.getEPADWebServerBaseDir() + "bin/")); processBuilder.redirectErrorStream(true); Process process = processBuilder.start(); is = process.getInputStream(); isr = new InputStreamReader(is); br = new BufferedReader(isr); String line; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { sb.append(line).append("\n"); log.debug("./eval_seg output: " + line); } int exitValue = process.waitFor(); log.info("Evaluate Segmentation exit value is: " + exitValue); return sb.toString(); } catch (Exception e) { log.warning("Error evaluating dsos", e); throw e; } }
From source file:org.cosmo.common.util.Util.java
public static Bytes resourceMinify(File file) throws IOException { // creates a temp file by appending .min // NOTE -Xss32M IS the maxsize allowed for dataURI, increase accordingly File minifiedFile = new File(file.getAbsolutePath() + ".min"); String cp = System.getProperty("java.class.path"); ProcessBuilder pb = new ProcessBuilder("java", "-Xss32M", "-Xmx256M", "-classpath", cp, "com.yahoo.platform.yui.compressor.YUICompressor", "-o", minifiedFile.getAbsolutePath(), "--charset", "UTF8", file.getAbsolutePath()); pb.directory(new File(".")); Process p = pb.start();//ww w.j a v a 2s . co m writeProcessOutput(p); System.out.println("minifiying: " + file); return Bytes.load(minifiedFile); }
From source file:org.mxupdate.eclipse.mxadapter.connectors.URLConnector.java
/** * Initializes the connection to the MX server. * * @param _projectPath path to the project temporary folder * @param _bundle bundle of the plug-in to access the class * and required libraries for the server * @param _javaPath path for the Java executable * @param _mxJarPath path of the MX Jar library * @param _url URL of the MX server * @param _user name of the user on the MX server * @param _passwd password of the user on the MX server * @param _updateByFileContent <i>true</i> if update is done by * transmitting the file content; otherwise * <i>false</i> * @throws Exception if connection to the MX server could not be started */// www. ja va 2 s . c o m public URLConnector(final File _projectPath, final Bundle _bundle, final String _javaPath, final String _mxJarPath, final String _url, final String _user, final String _passwd, final boolean _updateByFileContent) throws Exception { super(_updateByFileContent); // copy the required classes and JAR library to temporary project dir. if (!_projectPath.exists()) { _projectPath.mkdirs(); } for (final Class<?> clazz : URLConnector.SERVER_CLASSES) { // the URL must use slashes to be a valid URL (and // File.separatorChar delivers backslashes in Windows) final String clazzFileName = "/" + clazz.getName().replace('.', '/') + ".class"; this.copy(this.getClass().getClassLoader().getResource(clazzFileName), new File(_projectPath, "/bin" + clazzFileName)); } // prepare class path (and always slashes instead of backslashes) final StringBuilder classPath = new StringBuilder().append("bin").append('/').append('.') .append(File.pathSeparatorChar).append(_mxJarPath.replace('\\', '/')); // start process final ProcessBuilder pb = new ProcessBuilder(_javaPath, "-classpath", classPath.toString(), URLConnectorServer.class.getName(), _url, _user, _passwd); pb.directory(_projectPath); this.process = pb.start(); this.out = new OutputStreamWriter(this.process.getOutputStream()); this.inHandler = new InputStreamHandler(this.process.getInputStream()); this.errHandler = new ErrorStreamHandler(this.process.getErrorStream()); new Thread(this.inHandler).start(); new Thread(this.errHandler).start(); try { this.testConnection(); this.connected = true; } finally { if (!this.connected) { this.disconnect(); } } }
From source file:org.buildboost.hudson.plugins.boostscm.BuildBoostSCM.java
private void executeNativeBinary(String localPath, List<String> command, IFunction<Boolean, String> readerCallback) { ProcessBuilder pb = new ProcessBuilder(command); if (localPath != null) { pb.directory(new File(localPath)); }//from w w w. j ava2 s . c o m Process process = null; try { logger.info("executing " + command + " in " + localPath); process = pb.start(); InputStream inputStream = process.getInputStream(); BufferedReader isr = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = isr.readLine()) != null) { logger.info("output: " + line); if (readerCallback == null) { continue; } boolean continueReading = readerCallback.call(line); if (!continueReading) { break; } } int exitCode = process.waitFor(); logger.info("exitCode " + exitCode); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { closeStreams(process); } }
From source file:de.uni_luebeck.inb.knowarc.usecases.invocation.local.LocalUseCaseInvocation.java
@Override public String setOneInput(ReferenceService referenceService, T2Reference t2Reference, ScriptInput input) throws InvocationException { if (input.getCharsetName() == null) { input.setCharsetName(Charset.defaultCharset().name()); }//from w w w . j a v a 2 s .c om String target = null; String targetSuffix = null; if (input.isFile()) { targetSuffix = input.getTag(); } else if (input.isTempFile()) { targetSuffix = "tempfile." + (nTempFiles++) + ".tmp"; } if (input.isBinary()) { return setOneBinaryInput(referenceService, t2Reference, input, targetSuffix); } logger.info("Target is " + target); if (input.isFile() || input.isTempFile()) { target = tempDir.getAbsolutePath() + "/" + targetSuffix; // Try to get it as a file Reader r; Writer w; FileReference fileRef = getAsFileReference(referenceService, t2Reference); if (fileRef != null) { if (!input.isForceCopy()) { if (linkCommand != null) { String source = fileRef.getFile().getAbsolutePath(); String actualLinkCommand = getActualOsCommand(linkCommand, source, targetSuffix, target); logger.info("Link command is " + actualLinkCommand); String[] splitCmds = actualLinkCommand.split(" "); ProcessBuilder builder = new ProcessBuilder(splitCmds); builder.directory(tempDir); try { int code = builder.start().waitFor(); if (code == 0) { return target; } else { logger.error("Link command gave errorcode: " + code); } } catch (InterruptedException e) { // go through } catch (IOException e) { // go through } } } if (fileRef.getDataNature().equals(ReferencedDataNature.TEXT)) { r = new InputStreamReader(fileRef.openStream(this.getContext()), Charset.forName(fileRef.getCharset())); } else { try { r = new FileReader(fileRef.getFile()); } catch (FileNotFoundException e) { throw new InvocationException(e); } } } else { r = new InputStreamReader(getAsStream(referenceService, t2Reference)); } try { w = new OutputStreamWriter(new FileOutputStream(target), input.getCharsetName()); } catch (UnsupportedEncodingException e) { throw new InvocationException(e); } catch (FileNotFoundException e) { throw new InvocationException(e); } try { IOUtils.copyLarge(r, w); } catch (IOException e) { throw new InvocationException(e); } try { r.close(); w.close(); } catch (IOException e) { throw new InvocationException(e); } return target; } else { String value = (String) referenceService.renderIdentifier(t2Reference, String.class, this.getContext()); return value; } }
From source file:org.apache.htrace.util.HTracedProcess.java
public HTracedProcess(final File binPath, final File dataDir, final String host) throws IOException { // Create a notifier socket bound to a random port. ServerSocket listener = new ServerSocket(0); boolean success = false; Process process = null;/* ww w .j a va 2 s . c o m*/ try { // Use a random port for the web address. No 'scheme' yet. String webAddress = host + ":0"; String logPath = new File(dataDir, "log.txt").getAbsolutePath(); // Pass cmdline args to htraced to it uses our test dir for data. ProcessBuilder pb = new ProcessBuilder(binPath.toString(), "-Dlog.level=TRACE", "-Dlog.path=" + logPath, "-Dweb.address=" + webAddress, "-Ddata.store.clear=true", "-Dstartup.notification.address=localhost:" + listener.getLocalPort(), "-Ddata.store.directories=" + dataDir.toString()); pb.redirectErrorStream(true); // Inherit STDERR/STDOUT i/o; dumps on console for now. Can add logs later. pb.inheritIO(); pb.directory(dataDir); //assert pb.redirectInput() == Redirect.PIPE; //assert pb.redirectOutput().file() == dataDir; process = pb.start(); assert process.getInputStream().read() == -1; StartupNotificationData data = readStartupNotification(listener); httpAddr = data.httpAddr; LOG.info("Started htraced process " + data.processId + " with http " + "address " + data.httpAddr + ", logging to " + logPath); success = true; } finally { if (!success) { // Clean up after failure if (process != null) { process.destroy(); process = null; } } delegate = process; listener.close(); } }
From source file:nz.co.fortytwo.signalk.handler.GitHandler.java
private void runNpmInstall(final File output, File destDir) throws Exception { FileUtils.writeStringToFile(output, "\nBeginning npm install", true); ProcessBuilder pb = new ProcessBuilder("npm", "install"); Map<String, String> env = System.getenv(); if (env.containsKey("PATH")) { pb.environment().put("PATH", env.get("PATH")); }//from www. j av a2 s . c o m if (env.containsKey("Path")) { pb.environment().put("Path", env.get("Path")); } if (env.containsKey("path")) { pb.environment().put("path", env.get("path")); } pb.directory(destDir); pb.redirectErrorStream(true); pb.redirectOutput(output); final Process p = pb.start(); Thread t = new Thread() { @Override public void run() { try { p.waitFor(); FileUtils.writeStringToFile(output, "\nDONE: Npm ended sucessfully", true); } catch (Exception e) { try { logger.error(e); FileUtils.writeStringToFile(output, "\nNpm ended badly:" + e.getMessage(), true); FileUtils.writeStringToFile(output, "\n" + e.getStackTrace(), true); } catch (IOException e1) { logger.error(e1); } } } }; t.start(); }
From source file:org.generationcp.ibpworkbench.launcher.Launcher.java
protected void launchMySQLProcess() { File workingDirPath = new File(mysqlBinDir).getAbsoluteFile(); String mysqldPath = "mysqld.exe"; String myIniPath = "../my.ini"; ProcessBuilder pb = new ProcessBuilder(workingDirPath.getAbsolutePath() + File.separator + mysqldPath, "--defaults-file=" + myIniPath); pb.directory(workingDirPath); try {// ww w.ja va 2s. c om mysqlProcess = pb.start(); } catch (IOException e) { LOG.error("IOException", e); } }