List of usage examples for java.lang ProcessBuilder environment
Map environment
To view the source code for java.lang ProcessBuilder environment.
Click Source Link
From source file:org.craftercms.deployer.git.processor.ShellProcessor.java
@Override public void doProcess(SiteConfiguration siteConfiguration, PublishedChangeSet changeSet) throws PublishingException { checkConfiguration(siteConfiguration); LOGGER.debug("Starting Shell Processor"); ProcessBuilder builder = new ProcessBuilder(); builder.directory(getWorkingDir(workingDir, siteConfiguration.getSiteId())); LOGGER.debug("Working directory is " + workingDir); HashMap<String, String> argumentsMap = buildArgumentsMap(getFileList(changeSet)); if (asSingleCommand) { StrSubstitutor substitutor = new StrSubstitutor(argumentsMap, "%{", "}"); String execComand = substitutor.replace(command); LOGGER.debug("Command to be Executed is " + execComand); builder.command("/bin/bash", "-c", execComand); } else {// w w w. j av a 2 s. c o m Set<String> keys = argumentsMap.keySet(); ArrayList<String> commandAsList = new ArrayList<String>(); commandAsList.add(command.trim()); for (String key : keys) { if (!key.equalsIgnoreCase(INCLUDE_FILTER_PARAM)) { commandAsList.add(argumentsMap.get(key)); } } LOGGER.debug("Command to be Executed is " + StringUtils.join(commandAsList, " ")); builder.command(commandAsList); } builder.environment().putAll(enviroment); builder.redirectErrorStream(true); try { Process process = builder.start(); process.waitFor(); BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); String str; while ((str = reader.readLine()) != null) { LOGGER.info("PROCESS OUTPUT :" + str); } reader.close(); LOGGER.info("Process Finish with Exit Code " + process.exitValue()); LOGGER.debug("Process Output "); } catch (IOException ex) { LOGGER.error("Error ", ex); } catch (InterruptedException e) { LOGGER.error("Error ", e); } finally { LOGGER.debug("End of Shell Processor"); } }
From source file:com.lenovo.tensorhusky.common.utils.Shell.java
/** * Run a command/*from ww w .jav a2 s . co m*/ */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null; ShellTimeoutTimerTask timeoutTimerTask = null; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); } if (dir != null) { builder.directory(this.dir); } builder.redirectErrorStream(redirectErrorStream); if (Shell.WINDOWS) { synchronized (WindowsProcessLaunchLock) { // To workaround the race condition issue with child processes // inheriting unintended handles during process launch that can // lead to hangs on reading output and error streams, we // serialize process creation. More info available at: // http://support.microsoft.com/kb/315939 process = builder.start(); } } else { process = builder.start(); } if (timeOutInterval > 0) { timeOutTimer = new Timer("Shell command timeout"); timeoutTimerTask = new ShellTimeoutTimerTask(this); // One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader( new InputStreamReader(process.getErrorStream(), Charset.defaultCharset())); final BufferedReader inReader = new BufferedReader( new InputStreamReader(process.getInputStream(), Charset.defaultCharset())); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ise) { } catch (OutOfMemoryError oe) { LOG.error("Caught " + oe + ". One possible reason is that ulimit" + " setting of 'max user processes' is too low. If so, do" + " 'ulimit -u <largerNum>' and try again."); throw oe; } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); // make sure that the error thread exits joinThread(errThread); completed.set(true); // the timeout thread handling // taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if (timeOutTimer != null) { timeOutTimer.cancel(); } // close the input stream try { // JDK 7 tries to automatically drain the input streams for us // when the process exits, but since close is not synchronized, // it creates a race if we close the stream first and the same // fd is recycled. the stream draining thread will attempt to // drain that fd!! it may block, OOM, or cause bizarre behavior // see: https://bugs.openjdk.java.net/browse/JDK-8024521 // issue is fixed in build 7u60 InputStream stdout = process.getInputStream(); synchronized (stdout) { inReader.close(); } } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); joinThread(errThread); } try { InputStream stderr = process.getErrorStream(); synchronized (stderr) { errReader.close(); } } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = Time.monotonicNow(); } }
From source file:org.craftercms.cstudio.publishing.processor.ShellProcessor.java
@Override public void doProcess(PublishedChangeSet changeSet, Map<String, String> parameters, PublishingTarget target) throws PublishingException { checkConfiguration(parameters, target); LOGGER.debug("Starting Shell Processor"); ProcessBuilder builder = new ProcessBuilder(); builder.directory(getWorkingDir(workingDir, parameters.get(FileUploadServlet.PARAM_SITE))); LOGGER.debug("Working directory is " + workingDir); HashMap<String, String> argumentsMap = buildArgumentsMap(getFileList(parameters, changeSet)); if (asSingleCommand) { StrSubstitutor substitutor = new StrSubstitutor(argumentsMap, "%{", "}"); String execComand = substitutor.replace(command); LOGGER.debug("Command to be Executed is " + execComand); builder.command("/bin/bash", "-c", execComand); } else {//w w w . j av a2 s .c o m Set<String> keys = argumentsMap.keySet(); ArrayList<String> commandAsList = new ArrayList<String>(); commandAsList.add(command.trim()); for (String key : keys) { if (!key.equalsIgnoreCase(INCLUDE_FILTER_PARAM)) { commandAsList.add(argumentsMap.get(key)); } } LOGGER.debug("Command to be Executed is " + StringUtils.join(commandAsList, " ")); builder.command(commandAsList); } builder.environment().putAll(enviroment); builder.redirectErrorStream(true); try { Process process = builder.start(); process.waitFor(); BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); String str; while ((str = reader.readLine()) != null) { LOGGER.info("PROCESS OUTPUT :" + str); } reader.close(); LOGGER.info("Process Finish with Exit Code " + process.exitValue()); LOGGER.debug("Process Output "); } catch (IOException ex) { LOGGER.error("Error ", ex); } catch (InterruptedException e) { LOGGER.error("Error ", e); } finally { LOGGER.debug("End of Shell Processor"); } }
From source file:org.apache.nifi.processors.standard.ExecuteStreamCommand.java
@Override public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile inputFlowFile = session.get(); if (null == inputFlowFile) { return;/* ww w. ja v a2 s. com*/ } final ArrayList<String> args = new ArrayList<>(); final boolean putToAttribute = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).isSet(); final Integer attributeSize = context.getProperty(PUT_ATTRIBUTE_MAX_LENGTH).asInteger(); final String attributeName = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).getValue(); final String executeCommand = context.getProperty(EXECUTION_COMMAND) .evaluateAttributeExpressions(inputFlowFile).getValue(); args.add(executeCommand); final String commandArguments = context.getProperty(EXECUTION_ARGUMENTS) .evaluateAttributeExpressions(inputFlowFile).getValue(); final boolean ignoreStdin = Boolean.parseBoolean(context.getProperty(IGNORE_STDIN).getValue()); if (!StringUtils.isBlank(commandArguments)) { for (String arg : ArgumentUtils.splitArgs(commandArguments, context.getProperty(ARG_DELIMITER).getValue().charAt(0))) { args.add(arg); } } final String workingDir = context.getProperty(WORKING_DIR).evaluateAttributeExpressions(inputFlowFile) .getValue(); final ProcessBuilder builder = new ProcessBuilder(); logger.debug("Executing and waiting for command {} with arguments {}", new Object[] { executeCommand, commandArguments }); File dir = null; if (!StringUtils.isBlank(workingDir)) { dir = new File(workingDir); if (!dir.exists() && !dir.mkdirs()) { logger.warn("Failed to create working directory {}, using current working directory {}", new Object[] { workingDir, System.getProperty("user.dir") }); } } final Map<String, String> environment = new HashMap<>(); for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { if (entry.getKey().isDynamic()) { environment.put(entry.getKey().getName(), entry.getValue()); } } builder.environment().putAll(environment); builder.command(args); builder.directory(dir); builder.redirectInput(Redirect.PIPE); builder.redirectOutput(Redirect.PIPE); final Process process; try { process = builder.start(); } catch (IOException e) { logger.error("Could not create external process to run command", e); throw new ProcessException(e); } try (final OutputStream pos = process.getOutputStream(); final InputStream pis = process.getInputStream(); final InputStream pes = process.getErrorStream(); final BufferedInputStream bis = new BufferedInputStream(pis); final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(pes))) { int exitCode = -1; final BufferedOutputStream bos = new BufferedOutputStream(pos); FlowFile outputFlowFile = putToAttribute ? inputFlowFile : session.create(inputFlowFile); ProcessStreamWriterCallback callback = new ProcessStreamWriterCallback(ignoreStdin, bos, bis, logger, attributeName, session, outputFlowFile, process, putToAttribute, attributeSize); session.read(inputFlowFile, callback); outputFlowFile = callback.outputFlowFile; if (putToAttribute) { outputFlowFile = session.putAttribute(outputFlowFile, attributeName, new String(callback.outputBuffer, 0, callback.size)); } exitCode = callback.exitCode; logger.debug("Execution complete for command: {}. Exited with code: {}", new Object[] { executeCommand, exitCode }); Map<String, String> attributes = new HashMap<>(); final StringBuilder strBldr = new StringBuilder(); try { String line; while ((line = bufferedReader.readLine()) != null) { strBldr.append(line).append("\n"); } } catch (IOException e) { strBldr.append("Unknown...could not read Process's Std Error"); } int length = strBldr.length() > 4000 ? 4000 : strBldr.length(); attributes.put("execution.error", strBldr.substring(0, length)); final Relationship outputFlowFileRelationship = putToAttribute ? ORIGINAL_RELATIONSHIP : OUTPUT_STREAM_RELATIONSHIP; if (exitCode == 0) { logger.info("Transferring flow file {} to {}", new Object[] { outputFlowFile, outputFlowFileRelationship.getName() }); } else { logger.error("Transferring flow file {} to {}. Executable command {} ended in an error: {}", new Object[] { outputFlowFile, outputFlowFileRelationship.getName(), executeCommand, strBldr.toString() }); } attributes.put("execution.status", Integer.toString(exitCode)); attributes.put("execution.command", executeCommand); attributes.put("execution.command.args", commandArguments); outputFlowFile = session.putAllAttributes(outputFlowFile, attributes); // This transfer will transfer the FlowFile that received the stream out put to it's destined relationship. // In the event the stream is put to the an attribute of the original, it will be transferred here. session.transfer(outputFlowFile, outputFlowFileRelationship); if (!putToAttribute) { logger.info("Transferring flow file {} to original", new Object[] { inputFlowFile }); inputFlowFile = session.putAllAttributes(inputFlowFile, attributes); session.transfer(inputFlowFile, ORIGINAL_RELATIONSHIP); } } catch (final IOException ex) { // could not close Process related streams logger.warn("Problem terminating Process {}", new Object[] { process }, ex); } finally { process.destroy(); // last ditch effort to clean up that process. } }
From source file:edu.uci.ics.asterix.test.aql.TestsUtils.java
public static void executeTest(String actualPath, TestCaseContext testCaseCtx, ProcessBuilder pb, boolean isDmlRecoveryTest) throws Exception { File testFile;/*from w ww . j a v a 2s. c o m*/ File expectedResultFile; String statement; List<TestFileContext> expectedResultFileCtxs; List<TestFileContext> testFileCtxs; File qbcFile = null; File qarFile = null; int queryCount = 0; List<CompilationUnit> cUnits = testCaseCtx.getTestCase().getCompilationUnit(); for (CompilationUnit cUnit : cUnits) { LOGGER.info("Starting [TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " ... "); testFileCtxs = testCaseCtx.getTestFiles(cUnit); expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit); for (TestFileContext ctx : testFileCtxs) { testFile = ctx.getFile(); statement = TestsUtils.readTestFile(testFile); boolean failed = false; try { switch (ctx.getType()) { case "ddl": TestsUtils.executeDDL(statement); break; case "update": //isDmlRecoveryTest: set IP address if (isDmlRecoveryTest && statement.contains("nc1://")) { statement = statement.replaceAll("nc1://", "127.0.0.1://../../../../../../asterix-app/"); } TestsUtils.executeUpdate(statement); break; case "query": case "async": case "asyncdefer": // isDmlRecoveryTest: insert Crash and Recovery if (isDmlRecoveryTest) { executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator + "dml_recovery" + File.separator + "kill_cc_and_nc.sh"); executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator + "dml_recovery" + File.separator + "stop_and_start.sh"); } InputStream resultStream = null; OutputFormat fmt = OutputFormat.forCompilationUnit(cUnit); if (ctx.getType().equalsIgnoreCase("query")) resultStream = executeQuery(statement, fmt); else if (ctx.getType().equalsIgnoreCase("async")) resultStream = executeAnyAQLAsync(statement, false, fmt); else if (ctx.getType().equalsIgnoreCase("asyncdefer")) resultStream = executeAnyAQLAsync(statement, true, fmt); if (queryCount >= expectedResultFileCtxs.size()) { throw new IllegalStateException("no result file for " + testFile.toString()); } expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile(); File actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath)); actualResultFile.getParentFile().mkdirs(); TestsUtils.writeOutputToFile(actualResultFile, resultStream); TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), expectedResultFile, actualResultFile); LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " PASSED "); queryCount++; break; case "mgx": executeManagixCommand(statement); break; case "txnqbc": //qbc represents query before crash resultStream = executeQuery(statement, OutputFormat.forCompilationUnit(cUnit)); qbcFile = new File(actualPath + File.separator + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_" + cUnit.getName() + "_qbc.adm"); qbcFile.getParentFile().mkdirs(); TestsUtils.writeOutputToFile(qbcFile, resultStream); break; case "txnqar": //qar represents query after recovery resultStream = executeQuery(statement, OutputFormat.forCompilationUnit(cUnit)); qarFile = new File(actualPath + File.separator + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_" + cUnit.getName() + "_qar.adm"); qarFile.getParentFile().mkdirs(); TestsUtils.writeOutputToFile(qarFile, resultStream); TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), qbcFile, qarFile); LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " PASSED "); break; case "txneu": //eu represents erroneous update try { TestsUtils.executeUpdate(statement); } catch (Exception e) { //An exception is expected. failed = true; e.printStackTrace(); } if (!failed) { throw new Exception( "Test \"" + testFile + "\" FAILED!\n An exception" + "is expected."); } System.err.println("...but that was expected."); break; case "script": try { String output = executeScript(pb, getScriptPath(testFile.getAbsolutePath(), pb.environment().get("SCRIPT_HOME"), statement.trim())); if (output.contains("ERROR")) { throw new Exception(output); } } catch (Exception e) { throw new Exception("Test \"" + testFile + "\" FAILED!\n", e); } break; case "sleep": Thread.sleep(Long.parseLong(statement.trim())); break; case "errddl": // a ddlquery that expects error try { TestsUtils.executeDDL(statement); } catch (Exception e) { // expected error happens failed = true; e.printStackTrace(); } if (!failed) { throw new Exception( "Test \"" + testFile + "\" FAILED!\n An exception" + "is expected."); } System.err.println("...but that was expected."); break; default: throw new IllegalArgumentException("No statements of type " + ctx.getType()); } } catch (Exception e) { System.err.println("testFile " + testFile.toString() + " raised an exception:"); e.printStackTrace(); if (cUnit.getExpectedError().isEmpty()) { System.err.println("...Unexpected!"); throw new Exception("Test \"" + testFile + "\" FAILED!", e); } else { LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " failed as expected: " + e.getMessage()); System.err.println("...but that was expected."); } } } } }
From source file:net.emotivecloud.scheduler.drp4ost.OStackClient.java
public String createVM(String flavorID, String baseImageID, String vmName, String host) { // http://docs.openstack.org/essex/openstack-compute/admin/content/specify-host-to-boot-instances-on.html // nova boot --image 1 --flavor 2 --key_name test --hint force_hosts=server2 my-first-server String ret = null;/*w ww. j ava2 s . c o m*/ try { // execution of the add command System.out.println("DRP4OST>HOST: " + host); // Create a list with command and its arguments ArrayList<String> myCmd = new ArrayList<String>(); myCmd.add("nova"); myCmd.add("boot"); myCmd.add(String.format("--flavor=%s", flavorID)); myCmd.add(String.format("--image=%s", baseImageID)); myCmd.add(String.format("--availability_zone=nova:%s", host)); myCmd.add(String.format("%s", vmName)); ProcessBuilder pb = new ProcessBuilder(myCmd); // Set up the environment to communicate with OpenStack Map<String, String> envir = pb.environment(); envir.put("OS_AUTH_URL", this.OS_AUTH_URL); envir.put("OS_TENANT_ID", this.OS_TENANT_ID); envir.put("OS_TENANT_NAME", this.OS_TENANT_NAME); envir.put("OS_USERNAME", this.OS_USERNAME); envir.put("OS_PASSWORD", this.OS_PASSWORD); envir.put("mycommand", "nova boot"); pb.redirectErrorStream(true); Process p = pb.start(); InputStream pis = p.getInputStream(); String novaOutput = ISToString(pis); System.out.println("DRP4OST-createVM > what nova boot returns me" + novaOutput); // Read the id line from the nova-client output String vmID = getIdFromNova(novaOutput); // Get Server details, that will be returned ret = getServer(vmID); pis.close(); } catch (Exception e) { e.printStackTrace(); throw new DRPOSTException("Exception creating the image", StatusCodes.BAD_REQUEST); } return ret; }
From source file:net.emotivecloud.scheduler.drp4ost.OStackClient.java
public String createImage(String name, String path, String baseImage) { String format = null;//from w w w . j a v a 2 s.c o m try { System.out.println("DRP4ONE-OneExtraFuncs.createImage()> name(" + name + "), path(" + path + ") "); if (!existsImage(name)) { //If the image does not exist, create it if (path.trim().endsWith(".iso")) { format = "iso"; } else if (name.contains(baseImage)) { // TODO: create the image, base image does not require volume String[] fileSplit = path.split("[.]"); format = fileSplit[fileSplit.length - 1]; } else { format = this.IMG_DEFAULT_DISK_FORMAT; } System.out.println( "DRP4ONE-OneExtraFuncs.createImage()> crating image=" + name + " with format=" + format); // TODO: create the image at the OpenStack repository ArrayList<String> myCmd = new ArrayList<String>(); myCmd.add("glance"); myCmd.add("add"); myCmd.add(String.format("id=%s", name)); myCmd.add(String.format("name=%s", name)); myCmd.add(String.format("disk_format=%s", format)); myCmd.add(String.format("container_format=%s", this.IMG_DEFAULT_CONTAINER_FORMAT)); myCmd.add(String.format("is_public=%s", this.IMG_DEFAULT_IS_PUBLIC)); ProcessBuilder pb = new ProcessBuilder(myCmd); pb.redirectErrorStream(true); // Set up the environment to communicate with OpenStack Map<String, String> envir = pb.environment(); envir.put("OS_AUTH_URL", this.OS_AUTH_URL); envir.put("OS_TENANT_ID", this.OS_TENANT_ID); envir.put("OS_TENANT_NAME", this.OS_TENANT_NAME); envir.put("OS_USERNAME", this.OS_USERNAME); envir.put("OS_PASSWORD", this.OS_PASSWORD); // Execute the command specified with its environment Process p = pb.start(); OutputStream pos = p.getOutputStream(); InputStream fis = new FileInputStream(new File(path)); byte[] buffer = new byte[1024]; int read = 0; while ((read = fis.read(buffer)) != -1) { pos.write(buffer, 0, read); } // Close the file stream fis.close(); // Close the process stream. If not, OpenStack keeps the image at "Saving" status pos.close(); //TODO: verify error creating image // if (or.isError()) { // //TODO: if error creating image... // System.out.println("DRP4ONE-OneExtraFuncs.createImage()> Error creating image: " + name); // // } else { // //TODO: if ok creating image // System.out.println("DRP4ONE-OneExtraFuncs.createImage()> OK creating image: " + name); // int imgID = Integer.parseInt(or.getMessage()); // //TODO: not leave while image not ready to being used // while (i.stateString() != "READY") { // String tmpState = i.stateString(); // System.out.println("DRP4ONE-OneExtraFuncs.createImage()> STATE(imgID=" + imgID + "): " + tmpState); // Thread.sleep(3000); // } // } } else { //If the image already exists, don't create it //TODO: return the identifier return name; } } catch (Exception e) { System.out.println("DRP4ONE-OneExtraFuncs.createImage()> name(" + name + "), path(" + path + ") "); e.printStackTrace(); } return name; }
From source file:net.emotivecloud.scheduler.drp4ost.OStackClient.java
public String createImage(OVFDisk disk, String pathLocalBaseImage) { // disk_format=qcow2 //The disk_format field specifies the format of the image file. In this case, the image file format is QCOW2, which can be verified using the file command: ///* w ww . j av a 2s .c om*/ //$ file stackimages/cirros.img //Other valid formats are raw, vhd, vmdk, vdi, iso, aki, ari and ami. // container-format=bare //The container-format field is required by the glance image-create command but isn't actually used by any of the OpenStack services, so the value specified here has no effect on system behavior. We specify bare to indicate that the image file is not in a file format that contains metadata about the virtual machine. // //Because the value is not used anywhere, it safe to always specify bare as the container format, although the command will accept other formats: ovf, aki, ari, ami. // glance image-create --name centos63-image --disk-format=qcow2 --container-format=raw --is-public=True < ./centos63.qcow2 // glance add name=cirros-0.3.0-x86_64 disk_format=qcow2 container_format=bare < stackimages/cirros.img System.out.println( "DRP4OST-this.createImage()> disk.getHref()=" + disk.getHref() + "disk.getId()" + disk.getId()); String imagePath = disk.getHref(); //"/home/smendoza/cirros-0.3.0-x86_64-disk.img"; String name = imagePath.trim().substring(imagePath.lastIndexOf("/") + 1, imagePath.length()); // String name = disk.getId(); // String name = disk.getId() + "_" + (new Random()).nextInt(99999999); String diskFormat = "qcow2"; // permitted: img, raw, vhd, vmdk, vdi, iso, aki, ari and ami boolean download = false; String openStackID = null; try { // execution of the add command // Process p = Runtime.getRuntime().exec(cmdAdd, env); // Obtained the arguments from 'glance help add' ArrayList<String> myCmd = new ArrayList<String>(); myCmd.add("glance"); myCmd.add("add"); // myCmd.add(String.format("id=%s", disk.getId())); myCmd.add(String.format("name=%s", name)); myCmd.add(String.format("disk_format=%s", diskFormat)); myCmd.add(String.format("container_format=%s", this.IMG_DEFAULT_CONTAINER_FORMAT)); myCmd.add(String.format("is_public=%s", this.IMG_DEFAULT_IS_PUBLIC)); if (disk.getHref().startsWith("http://")) { //The image has been already downloaded and merged, and is staying at pathLocalBaseImage imagePath = pathLocalBaseImage; download = false; } else { // App will add the full path download = false; if (!disk.getHref().startsWith("/")) { //Incomplete path, necessary to add default path (img.default.path parameter) imagePath = this.IMG_DEFAULT_PATH + "/" + imagePath; } else { //Complete path (expected), not necessary to add nothing } } ProcessBuilder pb = new ProcessBuilder(myCmd); pb.redirectErrorStream(true); // Set up the environment to communicate with OpenStack Map<String, String> envir = pb.environment(); envir.put("OS_AUTH_URL", this.OS_AUTH_URL); envir.put("OS_TENANT_ID", this.OS_TENANT_ID); envir.put("OS_TENANT_NAME", this.OS_TENANT_NAME); envir.put("OS_USERNAME", this.OS_USERNAME); envir.put("OS_PASSWORD", this.OS_PASSWORD); // Execute the command specified with its environment Process p = pb.start(); InputStream pis = p.getInputStream(); // if image not downloaded, it will have to uploaded if (!download) { OutputStream pos = p.getOutputStream(); File imgFile = new File(imagePath); System.out.println("DRP4OST-OStackClient.createImage()> START UPLOADING (" + imgFile.getPath() + ") TO OPENSTACK REPOSITORY"); if (imgFile.exists()) { InputStream fis = new FileInputStream(imgFile); byte[] buffer = new byte[1024]; int read = 0; while ((read = fis.read(buffer)) != -1) { pos.write(buffer, 0, read); } // Close the file stream fis.close(); System.out.println("DRP4OST-OStackClient.createImage()> FINISH UPLOADING (" + imgFile.getPath() + ")TO OPENSTACK REPOSITORY"); } else { System.out.println("DRP4OST-OStackClient.createImage()> The file " + imgFile.getPath() + " does not exist! Abort VM creation!"); throw new Exception("DRP4OST-OStackClient.createImage()> The file " + imgFile.getPath() + " does not exist! Abort VM instance creation!"); } // Close the process stream. If not, OpenStack keeps the image at "Saving" status pos.close(); } else { System.out.println( "DRP4OST-OStackClient.createImage()> The image is expected to be download automatically by OpenStack-glance from " + imagePath); } System.out.println("DRP4OST-OStackClient.createImage()> glance output: " + ImageMerge.ISToString(pis)); pis.close(); openStackID = getImageID(name); System.out.println("DRP4OST-OStackClient.createImage()> getImageStatus(" + openStackID + ")=" + getImageStatus(openStackID)); // Wait while hte image is not ready to be used while (!getImageStatus(openStackID).equals("ACTIVE")) { System.out.println("DRP4OST-OStackClient.createImage()> getImageStatus(" + openStackID + ")=" + getImageStatus(openStackID)); Thread.sleep(1000); } } catch (Exception e) { e.printStackTrace(); throw new DRPOSTException("Exception creating the image", StatusCodes.BAD_REQUEST); } return openStackID; }
From source file:org.wso2.andes.test.utils.QpidBrokerTestCase.java
public void startBroker(int port) throws Exception { port = getPort(port);//www . j a va 2s. c o m // Save any configuration changes that have been made saveTestConfiguration(); saveTestVirtualhosts(); if(_brokers.get(port) != null) { throw new IllegalStateException("There is already an existing broker running on port " + port); } if (_brokerType.equals(BrokerType.INTERNAL) && !existingInternalBroker()) { setConfigurationProperty(ServerConfiguration.MGMT_CUSTOM_REGISTRY_SOCKET, String.valueOf(false)); saveTestConfiguration(); BrokerOptions options = new BrokerOptions(); options.setConfigFile(_configFile.getAbsolutePath()); options.addPort(port); addExcludedPorts(port, options); options.setJmxPort(getManagementPort(port)); //Set the log config file, relying on the log4j.configuration system property //set on the JVM by the JUnit runner task in module.xml. options.setLogConfigFile(new URL(System.getProperty("log4j.configuration")).getFile()); Broker broker = new Broker(); _logger.info("starting internal broker (same JVM)"); broker.startup(options); _brokers.put(port, new InternalBrokerHolder(broker)); } else if (!_brokerType.equals(BrokerType.EXTERNAL)) { String cmd = getBrokerCommand(port); _logger.info("starting external broker: " + cmd); ProcessBuilder pb = new ProcessBuilder(cmd.split("\\s+")); pb.redirectErrorStream(true); Map<String, String> env = pb.environment(); String qpidHome = System.getProperty(QPID_HOME); env.put(QPID_HOME, qpidHome); //Augment Path with bin directory in QPID_HOME. env.put("PATH", env.get("PATH").concat(File.pathSeparator + qpidHome + "/bin")); //Add the test name to the broker run. // DON'T change PNAME, qpid.stop needs this value. env.put("QPID_PNAME", "-DPNAME=QPBRKR -DTNAME=\"" + _testName + "\""); // Add the port to QPID_WORK to ensure unique working dirs for multi broker tests env.put("QPID_WORK", getQpidWork(_brokerType, port)); // Use the environment variable to set amqj.logging.level for the broker // The value used is a 'server' value in the test configuration to // allow a differentiation between the client and broker logging levels. if (System.getProperty("amqj.server.logging.level") != null) { setBrokerEnvironment("AMQJ_LOGGING_LEVEL", System.getProperty("amqj.server.logging.level")); } // Add all the environment settings the test requested if (!_env.isEmpty()) { for (Map.Entry<String, String> entry : _env.entrySet()) { env.put(entry.getKey(), entry.getValue()); } } // Add default test logging levels that are used by the log4j-test // Use the convenience methods to push the current logging setting // in to the external broker's QPID_OPTS string. if (System.getProperty("amqj.protocol.logging.level") != null) { setSystemProperty("amqj.protocol.logging.level"); } if (System.getProperty("root.logging.level") != null) { setSystemProperty("root.logging.level"); } String QPID_OPTS = " "; // Add all the specified system properties to QPID_OPTS if (!_propertiesSetForBroker.isEmpty()) { for (String key : _propertiesSetForBroker.keySet()) { QPID_OPTS += "-D" + key + "=" + _propertiesSetForBroker.get(key) + " "; } if (env.containsKey("QPID_OPTS")) { env.put("QPID_OPTS", env.get("QPID_OPTS") + QPID_OPTS); } else { env.put("QPID_OPTS", QPID_OPTS); } } Process process = pb.start();; Piper p = new Piper(process.getInputStream(), _brokerOutputStream, System.getProperty(BROKER_READY), System.getProperty(BROKER_STOPPED)); p.start(); if (!p.await(30, TimeUnit.SECONDS)) { _logger.info("broker failed to become ready (" + p.ready + "):" + p.getStopLine()); //Ensure broker has stopped process.destroy(); cleanBroker(); throw new RuntimeException("broker failed to become ready:" + p.getStopLine()); } try { //test that the broker is still running and hasn't exited unexpectedly int exit = process.exitValue(); _logger.info("broker aborted: " + exit); cleanBroker(); throw new RuntimeException("broker aborted: " + exit); } catch (IllegalThreadStateException e) { // this is expect if the broker started successfully } _brokers.put(port, new SpawnedBrokerHolder(process)); } }
From source file:org.apache.qpid.test.utils.QpidBrokerTestCase.java
public void startBroker(int port, TestBrokerConfiguration testConfiguration, XMLConfiguration virtualHosts, boolean managementMode) throws Exception { port = getPort(port);/*from w w w .j a v a 2s . c om*/ String testConfig = saveTestConfiguration(port, testConfiguration); String virtualHostsConfig = saveTestVirtualhosts(port, virtualHosts); if(_brokers.get(port) != null) { throw new IllegalStateException("There is already an existing broker running on port " + port); } Set<Integer> portsUsedByBroker = guessAllPortsUsedByBroker(port); if (_brokerType.equals(BrokerType.INTERNAL) && !existingInternalBroker()) { _logger.info("Set test.virtualhosts property to: " + virtualHostsConfig); setSystemProperty(TEST_VIRTUALHOSTS, virtualHostsConfig); setSystemProperty(BrokerProperties.PROPERTY_USE_CUSTOM_RMI_SOCKET_FACTORY, "false"); BrokerOptions options = new BrokerOptions(); options.setConfigurationStoreType(_brokerStoreType); options.setConfigurationStoreLocation(testConfig); options.setManagementMode(managementMode); //Set the log config file, relying on the log4j.configuration system property //set on the JVM by the JUnit runner task in module.xml. options.setLogConfigFile(_logConfigFile.getAbsolutePath()); Broker broker = new Broker(); _logger.info("Starting internal broker (same JVM)"); broker.startup(options); _brokers.put(port, new InternalBrokerHolder(broker, System.getProperty("QPID_WORK"), portsUsedByBroker)); } else if (!_brokerType.equals(BrokerType.EXTERNAL)) { // Add the port to QPID_WORK to ensure unique working dirs for multi broker tests final String qpidWork = getQpidWork(_brokerType, port); String[] cmd = _brokerCommandHelper.getBrokerCommand(port, testConfig, _brokerStoreType, _logConfigFile); if (managementMode) { String[] newCmd = new String[cmd.length + 1]; System.arraycopy(cmd, 0, newCmd, 0, cmd.length); newCmd[cmd.length] = "-mm"; cmd = newCmd; } _logger.info("Starting spawn broker using command: " + StringUtils.join(cmd, ' ')); ProcessBuilder pb = new ProcessBuilder(cmd); pb.redirectErrorStream(true); Map<String, String> processEnv = pb.environment(); String qpidHome = System.getProperty(QPID_HOME); processEnv.put(QPID_HOME, qpidHome); //Augment Path with bin directory in QPID_HOME. processEnv.put("PATH", processEnv.get("PATH").concat(File.pathSeparator + qpidHome + "/bin")); //Add the test name to the broker run. // DON'T change PNAME, qpid.stop needs this value. processEnv.put("QPID_PNAME", "-DPNAME=QPBRKR -DTNAME=\"" + getTestName() + "\""); processEnv.put("QPID_WORK", qpidWork); // Use the environment variable to set amqj.logging.level for the broker // The value used is a 'server' value in the test configuration to // allow a differentiation between the client and broker logging levels. if (System.getProperty("amqj.server.logging.level") != null) { setBrokerEnvironment("AMQJ_LOGGING_LEVEL", System.getProperty("amqj.server.logging.level")); } // Add all the environment settings the test requested if (!_env.isEmpty()) { for (Map.Entry<String, String> entry : _env.entrySet()) { processEnv.put(entry.getKey(), entry.getValue()); } } String qpidOpts = ""; // a synchronized hack to avoid adding into QPID_OPTS the values // of JVM properties "test.virtualhosts" and "test.config" set by a concurrent startup process synchronized (_propertiesSetForBroker) { // Add default test logging levels that are used by the log4j-test // Use the convenience methods to push the current logging setting // in to the external broker's QPID_OPTS string. setSystemProperty("amqj.protocol.logging.level"); setSystemProperty("root.logging.level"); setSystemProperty(BrokerProperties.PROPERTY_BROKER_DEFAULT_AMQP_PROTOCOL_EXCLUDES); setSystemProperty(BrokerProperties.PROPERTY_BROKER_DEFAULT_AMQP_PROTOCOL_INCLUDES); setSystemProperty(TEST_VIRTUALHOSTS, virtualHostsConfig); // Add all the specified system properties to QPID_OPTS if (!_propertiesSetForBroker.isEmpty()) { for (String key : _propertiesSetForBroker.keySet()) { qpidOpts += " -D" + key + "=" + _propertiesSetForBroker.get(key); } } } if (processEnv.containsKey("QPID_OPTS")) { qpidOpts = processEnv.get("QPID_OPTS") + qpidOpts; } processEnv.put("QPID_OPTS", qpidOpts); // cpp broker requires that the work directory is created createBrokerWork(qpidWork); Process process = pb.start(); Piper p = new Piper(process.getInputStream(), _testcaseOutputStream, System.getProperty(BROKER_READY), System.getProperty(BROKER_STOPPED), _interleaveBrokerLog ? _brokerLogPrefix : null); p.start(); SpawnedBrokerHolder holder = new SpawnedBrokerHolder(process, qpidWork, portsUsedByBroker); if (!p.await(30, TimeUnit.SECONDS)) { _logger.info("broker failed to become ready (" + p.getReady() + "):" + p.getStopLine()); String threadDump = holder.dumpThreads(); if (!threadDump.isEmpty()) { _logger.info("the result of a try to capture thread dump:" + threadDump); } //Ensure broker has stopped process.destroy(); cleanBrokerWork(qpidWork); throw new RuntimeException("broker failed to become ready:" + p.getStopLine()); } try { //test that the broker is still running and hasn't exited unexpectedly int exit = process.exitValue(); _logger.info("broker aborted: " + exit); cleanBrokerWork(qpidWork); throw new RuntimeException("broker aborted: " + exit); } catch (IllegalThreadStateException e) { // this is expect if the broker started successfully } _brokers.put(port, holder); } }