List of usage examples for java.lang ProcessBuilder redirectErrorStream
boolean redirectErrorStream
To view the source code for java.lang ProcessBuilder redirectErrorStream.
Click Source Link
From source file:jenkins.plugins.tanaguru.TanaguruRunnerBuilder.java
/** * /*from www . jav a 2 s. c o m*/ * @param tanaguruRunner * @param scenario * @param workspace */ private void linkToTanaguruWebapp(TanaguruRunner tanaguruRunner, String scenarioName, String scenario, File contextDir, String projectName) throws IOException, InterruptedException { File insertProcedureFile = TanaguruRunnerBuilder.createTempFile(contextDir, SQL_PROCEDURE_SCRIPT_NAME, IOUtils.toString(getClass().getResourceAsStream(SQL_PROCEDURE_NAME))); String script = IOUtils.toString(getClass().getResourceAsStream(INSERT_ACT_NAME)) .replace("$host", TanaguruInstallation.get().getDatabaseHost()) .replace("$user", TanaguruInstallation.get().getDatabaseLogin()) .replace("$port", TanaguruInstallation.get().getDatabasePort()) .replace("$passwd", TanaguruInstallation.get().getDatabasePassword()) .replace("$db", TanaguruInstallation.get().getDatabaseName()) .replace("$procedureFileName", TMP_FOLDER_NAME + SQL_PROCEDURE_SCRIPT_NAME); File insertActFile = TanaguruRunnerBuilder.createTempFile(contextDir, INSERT_ACT_SCRIPT_NAME, script); ProcessBuilder pb = new ProcessBuilder(TMP_FOLDER_NAME + INSERT_ACT_SCRIPT_NAME, TanaguruInstallation.get().getTanaguruLogin(), projectName.replaceAll("'", "'\"'\"'"), scenarioName.replaceAll("'", "'\"'\"'"), TanaguruRunnerBuilder.forceVersion1ToScenario(scenario.replaceAll("'", "'\"'\"'")), tanaguruRunner.auditId); pb.directory(contextDir); pb.redirectErrorStream(true); Process p = pb.start(); p.waitFor(); FileUtils.deleteQuietly(insertActFile); FileUtils.deleteQuietly(insertProcedureFile); }
From source file:jenkins.plugins.asqatasun.AsqatasunRunnerBuilder.java
/** * //w w w . j a v a 2 s .c o m * @param asqatasunRunner * @param scenario * @param workspace */ private void linkToWebapp(AsqatasunRunner asqatasunRunner, String scenarioName, String scenario, File contextDir, PrintStream printStream, boolean isDebug, String projectName) throws IOException, InterruptedException { File insertProcedureFile = AsqatasunRunnerBuilder.createTempFile(contextDir, SQL_PROCEDURE_SCRIPT_NAME, IOUtils.toString(getClass().getResourceAsStream(SQL_PROCEDURE_NAME))); if (isDebug) { printStream.print("insertProcedureFile created : " + insertProcedureFile.getAbsolutePath()); printStream.print("with content : " + FileUtils.readFileToString(insertProcedureFile)); } String script = IOUtils.toString(getClass().getResourceAsStream(INSERT_ACT_NAME)) .replace("$host", AsqatasunInstallation.get().getDatabaseHost()) .replace("$user", AsqatasunInstallation.get().getDatabaseLogin()) .replace("$port", AsqatasunInstallation.get().getDatabasePort()) .replace("$passwd", AsqatasunInstallation.get().getDatabasePassword()) .replace("$db", AsqatasunInstallation.get().getDatabaseName()) .replace("$procedureFileName", TMP_FOLDER_NAME + SQL_PROCEDURE_SCRIPT_NAME); File insertActFile = AsqatasunRunnerBuilder.createTempFile(contextDir, INSERT_ACT_SCRIPT_NAME, script); ProcessBuilder pb = new ProcessBuilder(TMP_FOLDER_NAME + INSERT_ACT_SCRIPT_NAME, AsqatasunInstallation.get().getAsqatasunLogin(), projectName.replaceAll("'", QUOTES), scenarioName.replaceAll("'", QUOTES), AsqatasunRunnerBuilder.forceVersion1ToScenario(scenario.replaceAll("'", QUOTES)), asqatasunRunner.getAuditId()); pb.directory(contextDir); pb.redirectErrorStream(true); Process p = pb.start(); p.waitFor(); FileUtils.forceDelete(insertActFile); FileUtils.forceDelete(insertProcedureFile); }
From source file:org.josso.tooling.gshell.install.installer.VFSInstaller.java
/** * Executes the specified external command and arguments in a separate process. * /*w w w . ja v a2s . co m*/ * @param cmdarray array containing the command to call and its arguments * @return the exit value of the process. By convention, 0 indicates normal termination. * @throws IOException */ public int executeExternalCommand(String[] cmdarray) throws IOException { int exitVal = -1; try { ProcessBuilder pb = new ProcessBuilder(cmdarray); pb.redirectErrorStream(true); Process pr = pb.start(); BufferedReader input = new BufferedReader(new InputStreamReader(pr.getInputStream())); String line = null; while ((line = input.readLine()) != null) { log.debug(line); } exitVal = pr.waitFor(); } catch (Exception e) { exitVal = -1; log.debug(e.getMessage()); } return exitVal; }
From source file:uk.ac.liverpool.narrative.SolutionGraph.java
public void validatePlans() throws IOException, InterruptedException, URISyntaxException { int n = 0;/*from www. ja va 2s . c om*/ for (Solution s : solutions) { n++; File tempPlan = File.createTempFile("plan", ".plan"); FileWriter fw = new FileWriter(tempPlan); for (Action a : s.actions) { fw.write("(" + a + ")\n"); } fw.close(); ProcessBuilder pb; File domain = new File(s.domain); File problem = new File(s.problem); File val = new File("validate");// pb = new ProcessBuilder(val.getAbsolutePath(), domain.getAbsolutePath(), problem.getAbsolutePath(), tempPlan.getAbsolutePath()); pb.redirectErrorStream(true); Process p = pb.start(); InputStream io = p.getInputStream(); StringWriter sw = new StringWriter(); int i; while ((i = io.read()) != -1) { sw.append((char) i); } p.waitFor(); if (!sw.toString().contains("Plan valid")) { System.out.println(sw.toString()); } else { System.out.println("Plan valid " + n); } } }
From source file:hydrograph.ui.graph.job.JobManager.java
/** * Kill remote process./*from w ww.ja va2 s.com*/ * * @param job the job * @param gefCanvas the gef canvas */ private void killRemoteProcess(Job job, DefaultGEFCanvas gefCanvas) { String gradleCommand = getKillJobCommand(job); String[] runCommand = new String[3]; if (OSValidator.isWindows()) { String[] command = { Messages.CMD, "/c", gradleCommand }; runCommand = command; } else if (OSValidator.isMac()) { String[] command = { Messages.SHELL, "-c", gradleCommand }; runCommand = command; } ProcessBuilder processBuilder = new ProcessBuilder(runCommand); processBuilder.directory(new File(job.getJobProjectDirectory())); processBuilder.redirectErrorStream(true); try { Process process = processBuilder.start(); if (gefCanvas != null) logKillProcessLogsAsyncronously(process, job, gefCanvas); } catch (IOException e) { logger.debug("Unable to kill the job", e); } }
From source file:ldbc.snb.datagen.generator.LDBCDatagen.java
public int runGenerateJob(Configuration conf) throws Exception { String hadoopPrefix = conf.get("ldbc.snb.datagen.serializer.hadoopDir"); FileSystem fs = FileSystem.get(conf); ArrayList<Float> percentages = new ArrayList<Float>(); percentages.add(0.45f);// w w w . j a va 2 s . c o m percentages.add(0.45f); percentages.add(0.1f); long start = System.currentTimeMillis(); printProgress("Starting: Person generation"); long startPerson = System.currentTimeMillis(); HadoopPersonGenerator personGenerator = new HadoopPersonGenerator(conf); personGenerator.run(hadoopPrefix + "/persons", "ldbc.snb.datagen.hadoop.UniversityKeySetter"); long endPerson = System.currentTimeMillis(); printProgress("Creating university location correlated edges"); long startUniversity = System.currentTimeMillis(); HadoopKnowsGenerator knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.UniversityKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 0, conf.get("ldbc.snb.datagen.generator.knowsGenerator")); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/universityEdges"); long endUniversity = System.currentTimeMillis(); printProgress("Creating main interest correlated edges"); long startInterest = System.currentTimeMillis(); knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.InterestKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 1, conf.get("ldbc.snb.datagen.generator.knowsGenerator")); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/interestEdges"); long endInterest = System.currentTimeMillis(); printProgress("Creating random correlated edges"); long startRandom = System.currentTimeMillis(); knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 2, "ldbc.snb.datagen.generator.RandomKnowsGenerator"); knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/randomEdges"); long endRandom = System.currentTimeMillis(); fs.delete(new Path(DatagenParams.hadoopDir + "/persons"), true); printProgress("Merging the different edge files"); ArrayList<String> edgeFileNames = new ArrayList<String>(); edgeFileNames.add(hadoopPrefix + "/universityEdges"); edgeFileNames.add(hadoopPrefix + "/interestEdges"); edgeFileNames.add(hadoopPrefix + "/randomEdges"); long startMerge = System.currentTimeMillis(); HadoopMergeFriendshipFiles merger = new HadoopMergeFriendshipFiles(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter"); merger.run(hadoopPrefix + "/mergedPersons", edgeFileNames); long endMerge = System.currentTimeMillis(); printProgress("Serializing persons"); long startPersonSerializing = System.currentTimeMillis(); if (!conf.getBoolean("ldbc.snb.datagen.serializer.persons.sort", false)) { HadoopPersonSerializer serializer = new HadoopPersonSerializer(conf); serializer.run(hadoopPrefix + "/mergedPersons"); } else { HadoopPersonSortAndSerializer serializer = new HadoopPersonSortAndSerializer(conf); serializer.run(hadoopPrefix + "/mergedPersons"); } long endPersonSerializing = System.currentTimeMillis(); long startPersonActivity = System.currentTimeMillis(); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", true)) { printProgress("Generating and serializing person activity"); HadoopPersonActivityGenerator activityGenerator = new HadoopPersonActivityGenerator(conf); activityGenerator.run(hadoopPrefix + "/mergedPersons"); int numThreads = DatagenParams.numThreads; int blockSize = DatagenParams.blockSize; int numBlocks = (int) Math.ceil(DatagenParams.numPersons / (double) blockSize); for (int i = 0; i < numThreads; ++i) { if (i < numBlocks) { fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "personFactors.txt"), new Path("./")); fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "activityFactors.txt"), new Path("./")); fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m0friendList" + i + ".csv"), new Path("./")); } } } long endPersonActivity = System.currentTimeMillis(); long startSortingUpdateStreams = System.currentTimeMillis(); if (conf.getBoolean("ldbc.snb.datagen.serializer.updateStreams", false)) { printProgress("Sorting update streams "); List<String> personStreamsFileNames = new ArrayList<String>(); List<String> forumStreamsFileNames = new ArrayList<String>(); for (int i = 0; i < DatagenParams.numThreads; ++i) { int numPartitions = conf.getInt("ldbc.snb.datagen.serializer.numUpdatePartitions", 1); for (int j = 0; j < numPartitions; ++j) { personStreamsFileNames .add(DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + "_" + j); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { forumStreamsFileNames .add(DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + "_" + j); } } } HadoopUpdateStreamSorterAndSerializer updateSorterAndSerializer = new HadoopUpdateStreamSorterAndSerializer( conf); updateSorterAndSerializer.run(personStreamsFileNames, "person"); updateSorterAndSerializer.run(forumStreamsFileNames, "forum"); for (String file : personStreamsFileNames) { fs.delete(new Path(file), true); } for (String file : forumStreamsFileNames) { fs.delete(new Path(file), true); } long minDate = Long.MAX_VALUE; long maxDate = Long.MIN_VALUE; long count = 0; for (int i = 0; i < DatagenParams.numThreads; ++i) { Path propertiesFile = new Path( DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + ".properties"); FSDataInputStream file = fs.open(propertiesFile); Properties properties = new Properties(); properties.load(file); long aux; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time")); minDate = aux < minDate ? aux : minDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time")); maxDate = aux > maxDate ? aux : maxDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events")); count += aux; file.close(); fs.delete(propertiesFile, true); if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { propertiesFile = new Path( DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + ".properties"); file = fs.open(propertiesFile); properties = new Properties(); properties.load(file); aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time")); minDate = aux < minDate ? aux : minDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time")); maxDate = aux > maxDate ? aux : maxDate; aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events")); count += aux; file.close(); fs.delete(propertiesFile, true); } } OutputStream output = fs .create(new Path(DatagenParams.socialNetworkDir + "/updateStream" + ".properties"), true); output.write(new String("ldbc.snb.interactive.gct_delta_duration:" + DatagenParams.deltaTime + "\n") .getBytes()); output.write( new String("ldbc.snb.interactive.min_write_event_start_time:" + minDate + "\n").getBytes()); output.write( new String("ldbc.snb.interactive.max_write_event_start_time:" + maxDate + "\n").getBytes()); output.write(new String("ldbc.snb.interactive.update_interleave:" + (maxDate - minDate) / count + "\n") .getBytes()); output.write(new String("ldbc.snb.interactive.num_events:" + count).getBytes()); output.close(); } long endSortingUpdateStreams = System.currentTimeMillis(); printProgress("Serializing invariant schema "); long startInvariantSerializing = System.currentTimeMillis(); HadoopInvariantSerializer invariantSerializer = new HadoopInvariantSerializer(conf); invariantSerializer.run(); long endInvariantSerializing = System.currentTimeMillis(); long end = System.currentTimeMillis(); System.out.println(((end - start) / 1000) + " total seconds"); System.out.println("Person generation time: " + ((endPerson - startPerson) / 1000)); System.out.println( "University correlated edge generation time: " + ((endUniversity - startUniversity) / 1000)); System.out.println("Interest correlated edge generation time: " + ((endInterest - startInterest) / 1000)); System.out.println("Random correlated edge generation time: " + ((endRandom - startRandom) / 1000)); System.out.println("Edges merge time: " + ((endMerge - startMerge) / 1000)); System.out .println("Person serialization time: " + ((endPersonSerializing - startPersonSerializing) / 1000)); System.out.println("Person activity generation and serialization time: " + ((endPersonActivity - startPersonActivity) / 1000)); System.out.println( "Sorting update streams time: " + ((endSortingUpdateStreams - startSortingUpdateStreams) / 1000)); System.out.println("Invariant schema serialization time: " + ((endInvariantSerializing - startInvariantSerializing) / 1000)); System.out.println("Total Execution time: " + ((end - start) / 1000)); if (conf.getBoolean("ldbc.snb.datagen.parametergenerator.parameters", false) && conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) { System.out.println("Running Parameter Generation"); System.out.println("Generating Interactive Parameters"); ProcessBuilder pb = new ProcessBuilder("mkdir", "-p", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); Process p = pb.start(); p.waitFor(); pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"), "paramgenerator/generateparams.py", "./", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); File logInteractive = new File("parameters_interactive.log"); pb.redirectErrorStream(true); pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logInteractive)); p = pb.start(); p.waitFor(); System.out.println("Generating BI Parameters"); pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"), "paramgenerator/generateparamsbi.py", "./", conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters"); pb.directory(new File("./")); File logBi = new File("parameters_bi.log"); pb.redirectErrorStream(true); pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logBi)); p = pb.start(); p.waitFor(); System.out.println("Finished Parameter Generation"); } return 0; }
From source file:net.doubledoordev.backend.server.Server.java
/** * Downloads and uses specific forge installer *//*from w w w . j a v a2 s. c o m*/ public void installForge(final IMethodCaller methodCaller, final String name) { if (getOnline()) throw new ServerOnlineException(); final String version = Helper.getForgeVersionForName(name); if (version == null) throw new IllegalArgumentException("Forge with ID " + name + " not found."); if (downloading) throw new IllegalStateException("Already downloading something."); if (!isCoOwner(methodCaller.getUser())) throw new AuthenticationException(); final Server instance = this; new Thread(new Runnable() { @Override public void run() { downloading = true; try { // delete old files for (File file : folder.listFiles(ACCEPT_MINECRAFT_SERVER_FILTER)) file.delete(); for (File file : folder.listFiles(ACCEPT_FORGE_FILTER)) file.delete(); // download new files String url = Constants.FORGE_INSTALLER_URL.replace("%ID%", version); String forgeName = url.substring(url.lastIndexOf('/')); File forge = new File(folder, forgeName); FileUtils.copyURLToFile(new URL(url), forge); // run installer List<String> arguments = new ArrayList<>(); arguments.add(Constants.getJavaPath()); arguments.add("-Xmx1G"); arguments.add("-jar"); arguments.add(forge.getName()); arguments.add("--installServer"); ProcessBuilder builder = new ProcessBuilder(arguments); builder.directory(folder); builder.redirectErrorStream(true); final Process process = builder.start(); printLine(arguments.toString()); BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; while ((line = reader.readLine()) != null) { methodCaller.sendMessage(line); printLine(line); } try { process.waitFor(); } catch (InterruptedException e) { e.printStackTrace(); } for (String name : folder.list(ACCEPT_FORGE_FILTER)) getJvmData().jarName = name; forge.delete(); methodCaller.sendDone(); printLine("Forge installer done."); instance.update(); } catch (IOException e) { printLine("##################################################################"); printLine("Error installing a new forge version (version " + version + ")"); printLine(e.toString()); printLine("##################################################################"); e.printStackTrace(); } downloading = false; } }, getID() + "-forge-installer").start(); }
From source file:com.tencent.gaia.portal.util.Shell.java
/** * Run a command//from w w w.j a v a2 s . c o m */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null; ShellTimeoutTimerTask timeoutTimerTask = null; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); } if (dir != null) { builder.directory(this.dir); } builder.redirectErrorStream(redirectErrorStream); if (Shell.WINDOWS) { synchronized (WindowsProcessLaunchLock) { // To workaround the race condition issue with child processes // inheriting unintended handles during process launch that can // lead to hangs on reading output and error streams, we // serialize process creation. More info available at: // http://support.microsoft.com/kb/315939 process = builder.start(); } } else { process = builder.start(); } if (timeOutInterval > 0) { timeOutTimer = new Timer("Shell command timeout"); timeoutTimerTask = new ShellTimeoutTimerTask(this); //One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ise) { } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); try { // make sure that the error thread exits errThread.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while reading the error stream", ie); } completed.set(true); //the timeout thread handling //taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if (timeOutTimer != null) { timeOutTimer.cancel(); } // close the input stream try { // JDK 7 tries to automatically drain the input streams for us // when the process exits, but since close is not synchronized, // it creates a race if we close the stream first and the same // fd is recycled. the stream draining thread will attempt to // drain that fd!! it may block, OOM, or cause bizarre behavior // see: https://bugs.openjdk.java.net/browse/JDK-8024521 // issue is fixed in build 7u60 InputStream stdout = process.getInputStream(); synchronized (stdout) { inReader.close(); } } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } try { if (!completed.get()) { errThread.interrupt(); errThread.join(); } } catch (InterruptedException ie) { LOG.warn("Interrupted while joining errThread"); } try { InputStream stderr = process.getErrorStream(); synchronized (stderr) { errReader.close(); } } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = System.currentTimeMillis(); } }
From source file:net.doubledoordev.backend.server.Server.java
/** * Start the server in a process controlled by us. * Threaded to avoid haning.//from w w w.ja va 2 s.c o m * * @throws ServerOnlineException */ public void startServer() throws Exception { if (getOnline() || starting) throw new ServerOnlineException(); if (downloading) throw new Exception("Still downloading something. You can see the progress in the server console."); if (new File(folder, getJvmData().jarName + ".tmp").exists()) throw new Exception("Minecraft server jar still downloading..."); if (!new File(folder, getJvmData().jarName).exists()) throw new FileNotFoundException(getJvmData().jarName + " not found."); User user = Settings.getUserByName(getOwner()); if (user == null) throw new Exception("No owner set??"); if (user.getMaxRamLeft() != -1 && getJvmData().ramMax > user.getMaxRamLeft()) throw new Exception("Out of usable RAM. Lower your max RAM."); saveProperties(); starting = true; final Server instance = this; for (String blocked : SERVER_START_ARGS_BLACKLIST_PATTERNS) if (getJvmData().extraJavaParameters.contains(blocked)) throw new Exception("JVM options contain a blocked option: " + blocked); File eula = new File(getFolder(), "eula.txt"); if (!eula.exists()) { try { FileUtils.writeStringToFile(eula, "#The server owner indicated to agree with the EULA when submitting the from that produced this server instance.\n" + "#That means that there is no need for extra halting of the server startup sequence with this stupid file.\n" + "#" + new Date().toString() + "\n" + "eula=true\n"); } catch (IOException e) { printLine("Error making the eula file...."); e.printStackTrace(); } } new Thread(new Runnable() { @Override public void run() { printLine("Starting server ................"); try { /** * Build arguments list. */ List<String> arguments = new ArrayList<>(); arguments.add(Constants.getJavaPath()); arguments.add("-server"); { int amount = getJvmData().ramMin; if (amount > 0) arguments.add(String.format("-Xms%dM", amount)); amount = getJvmData().ramMax; if (amount > 0) arguments.add(String.format("-Xmx%dM", amount)); amount = getJvmData().permGen; if (amount > 0) arguments.add(String.format("-XX:MaxPermSize=%dm", amount)); } if (getJvmData().extraJavaParameters.trim().length() != 0) arguments.add(getJvmData().extraJavaParameters.trim()); arguments.add("-jar"); arguments.add(getJvmData().jarName); arguments.add("nogui"); if (getJvmData().extraMCParameters.trim().length() != 0) arguments.add(getJvmData().extraMCParameters.trim()); // Debug printout printLine("Arguments: " + arguments.toString()); /** * Make ProcessBuilder, set rundir, and make sure the io gets redirected */ ProcessBuilder pb = new ProcessBuilder(arguments); pb.directory(folder); pb.redirectErrorStream(true); if (!new File(folder, getJvmData().jarName).exists()) return; // for reasons of WTF? process = pb.start(); new Thread(new Runnable() { @Override public void run() { try { printLine("----=====##### STARTING SERVER #####=====-----"); BufferedReader reader = new BufferedReader( new InputStreamReader(process.getInputStream())); String line; while ((line = reader.readLine()) != null) { printLine(line); } printLine("----=====##### SERVER PROCESS HAS ENDED #####=====-----"); instance.update(); } catch (IOException e) { error(e); } } }, ID.concat("-streamEater")).start(); instance.update(); } catch (IOException e) { error(e); } starting = false; } }, "ServerStarter-" + getID()).start(); // <-- Very important call. }
From source file:org.pshdl.model.simulation.codegenerator.GoCodeGenerator.java
public IHDLInterpreterFactory<NativeRunner> createInterpreter(final File tempDir) { try {//from ww w . java2s.c o m IHDLInterpreterFactory<NativeRunner> _xblockexpression = null; { final CharSequence dartCode = this.generateMainCode(); final File dutFile = new File(tempDir, "TestUnit.go"); Files.createParentDirs(dutFile); Files.write(dartCode, dutFile, StandardCharsets.UTF_8); final File testRunner = new File(tempDir, "runner.go"); final InputStream runnerStream = CCodeGenerator.class .getResourceAsStream("/org/pshdl/model/simulation/includes/runner.go"); final FileOutputStream fos = new FileOutputStream(testRunner); try { ByteStreams.copy(runnerStream, fos); } finally { fos.close(); } String _absolutePath = testRunner.getAbsolutePath(); String _absolutePath_1 = dutFile.getAbsolutePath(); ProcessBuilder _processBuilder = new ProcessBuilder("/usr/local/go/bin/go", "build", _absolutePath, _absolutePath_1); ProcessBuilder _directory = _processBuilder.directory(tempDir); ProcessBuilder _redirectErrorStream = _directory.redirectErrorStream(true); final ProcessBuilder goBuilder = _redirectErrorStream.inheritIO(); final Process goCompiler = goBuilder.start(); int _waitFor = goCompiler.waitFor(); boolean _notEquals = (_waitFor != 0); if (_notEquals) { throw new RuntimeException("Compilation of Go Program failed"); } _xblockexpression = new IHDLInterpreterFactory<NativeRunner>() { public NativeRunner newInstance() { try { final File runnerExecutable = new File(tempDir, "runner"); String _absolutePath = runnerExecutable.getAbsolutePath(); ProcessBuilder _processBuilder = new ProcessBuilder(_absolutePath); ProcessBuilder _directory = _processBuilder.directory(tempDir); final ProcessBuilder goBuilder = _directory.redirectErrorStream(true); final Process goRunner = goBuilder.start(); InputStream _inputStream = goRunner.getInputStream(); OutputStream _outputStream = goRunner.getOutputStream(); String _absolutePath_1 = runnerExecutable.getAbsolutePath(); return new NativeRunner(_inputStream, _outputStream, GoCodeGenerator.this.em, goRunner, 5, _absolutePath_1); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } }; } return _xblockexpression; } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } }