Example usage for java.lang Process getErrorStream

List of usage examples for java.lang Process getErrorStream

Introduction

In this page you can find the example usage for java.lang Process getErrorStream.

Prototype

public abstract InputStream getErrorStream();

Source Link

Document

Returns the input stream connected to the error output of the process.

Usage

From source file:net.firejack.platform.generate.service.ResourceGeneratorService.java

@ProgressStatus(weight = 2, description = "Copy jars")
public void copyJar(IPackageDescriptor descriptor, InputStream stream, final Structure structure)
        throws Exception {
    ResourceElement[] resources = descriptor.getResources();
    IDomainElement[] domains = descriptor.getConfiguredDomains();

    if (domains == null)
        return;//  w ww  . j ava  2 s  . c o m

    final Map<String, String> schemas = new HashMap<String, String>();
    for (IDomainElement domain : domains) {
        if (StringUtils.isNotBlank(domain.getWsdlLocation())) {
            Process exec = Runtime.getRuntime()
                    .exec(new String[] { "wsimport", "-d", structure.getSrc().getPath(), "-p",
                            "wsdl." + StringUtils.normalize(domain.getName()), "-Xnocompile", "-target", "2.1",
                            "-extension", domain.getWsdlLocation() });
            exec.getInputStream().close();
            exec.getErrorStream().close();
            exec.getOutputStream().close();
            exec.waitFor();
        }
    }

    for (ResourceElement resource : resources) {
        if (resource.getName().equals(ReverseEngineeringService.WSDL_SCHEME)) {
            List<FileResourceVersionElement> fileResourceVersionElements = resource
                    .getFileResourceVersionElements();
            FileResourceVersionElement versionElement = fileResourceVersionElements.get(0);
            schemas.put(versionElement.getResourceFilename(), versionElement.getOriginalFilename());
        }
    }

    ArchiveUtils.unzip(stream, new ArchiveUtils.ArchiveCallback() {
        @Override
        public void callback(String dir, String name, InputStream stream) {
            String schemaName = schemas.get(name);
            File file = null;
            if (schemaName != null) {
                file = FileUtils.create(structure.getResource(), "wsdl", schemaName);
            }

            if (file != null) {
                try {
                    FileOutputStream outputStream = FileUtils.openOutputStream(file);
                    IOUtils.copy(stream, outputStream);
                    IOUtils.closeQuietly(outputStream);
                } catch (IOException e) {
                    logger.error(e, e);
                }
            }
        }
    });
}

From source file:org.archive.modules.writer.Kw3WriterProcessor.java

private void chmod(File file, String permissions) {
    Process proc = null;
    try {// ww  w  .  j  a  v a  2  s .c om
        proc = Runtime.getRuntime().exec("chmod " + permissions + " " + file.getAbsolutePath());
        proc.waitFor();
        proc.getInputStream().close();
        proc.getOutputStream().close();
        proc.getErrorStream().close();
    } catch (IOException e) {
        logger.log(Level.WARNING, "chmod failed", e);
    } catch (InterruptedException e) {
        logger.log(Level.WARNING, "chmod failed", e);
    }
}

From source file:er.extensions.ERXExtensions.java

/**
 * Frees all of the resources associated with a given
 * process and then destroys the process.
 * @param p process to destroy/*from   www  . j av  a  2  s.  c  o  m*/
 */
public static void freeProcessResources(Process p) {
    if (p != null) {
        try {
            if (p.getInputStream() != null)
                p.getInputStream().close();
            if (p.getOutputStream() != null)
                p.getOutputStream().close();
            if (p.getErrorStream() != null)
                p.getErrorStream().close();
            p.destroy();
        } catch (IOException e) {
        }
    }
}

From source file:gov.nih.nci.sdk.example.generator.WebServiceGenerator.java

public void runCommand(String _command) {
    try {/*from  w w w  . j ava  2 s.c  o m*/
        StringBuffer input = new StringBuffer();

        input.setLength(0); // erase input StringBuffer
        String s;

        if (_command != null) {
            Runtime a = Runtime.getRuntime();
            java.lang.Process p = a.exec(_command);

            BufferedReader stdInput = new BufferedReader(new InputStreamReader(p.getInputStream()));

            BufferedReader stdError = new BufferedReader(new InputStreamReader(p.getErrorStream()));

            // read the output from the command

            // System.out.println("Here is the standard output of the command:\n");
            while ((s = stdInput.readLine()) != null) {
                input.append(s);
            }

            // read any errors from the attempted command
            while ((s = stdError.readLine()) != null) {
                getScriptContext().logInfo(s);
            }

        }
    } catch (Throwable t) {
        t.printStackTrace();
        getScriptContext().logError(t);
    }
}

From source file:jeplus.TRNSYSWinTools.java

/**
 * Call TRNSYS executable file to run the simulation
 *
 * @param config TRNSYS configuration instance
 * @param WorkDir The working directory where the input files are stored and the output files to be generated
 * @param dckfile The dck file to execute
 * @return the result code represents the state of execution steps. >=0 means successful
 *//*from  w  w w. j  a  v a  2  s. c  om*/
public static int runTRNSYS(TRNSYSConfig config, String WorkDir, String dckfile) {

    int ExitValue = -99;

    Process EPProc = null;
    try {
        // Run TRNSYS executable
        String CmdLine = config.getResolvedTRNSYSEXEC() + " " + dckfile + " /n /h";
        EPProc = Runtime.getRuntime().exec(CmdLine, null, new File(WorkDir));
        // Console logger
        try (PrintWriter outs = (config.getScreenFile() == null) ? null
                : new PrintWriter(new FileWriter(WorkDir + "/" + config.getScreenFile(), true));) {
            if (outs != null) {
                outs.println();
                outs.println("# Calling TRNexe - " + (new SimpleDateFormat()).format(new Date()));
                outs.println("# Command line: " + WorkDir + ">" + CmdLine);
                outs.flush();
            }
            StreamPrinter p_out = new StreamPrinter(EPProc.getInputStream(), "OUTPUT", outs);
            StreamPrinter p_err = new StreamPrinter(EPProc.getErrorStream(), "ERROR", outs);
            p_out.start();
            p_err.start();
            ExitValue = EPProc.waitFor();
            p_out.join();
            p_err.join();
            if (outs != null) {
                outs.println("# TRNSYS executable returns: " + ExitValue);
                outs.flush();
            }
        }
    } catch (Exception e) {
        logger.error("Error executing TRNSYS executable.", e);
    }

    // Return Radiance exit value
    return ExitValue;
}

From source file:org.openmeetings.app.data.flvrecord.converter.FlvInterviewConverter.java

public HashMap<String, String> processImageWindows(String file1, String file2, String file3) {
    HashMap<String, String> returnMap = new HashMap<String, String>();
    returnMap.put("process", "processImageWindows");
    try {//from  w ww.  j  a  va2 s  .c  om

        // Init variables
        String[] cmd;
        String executable_fileName = "";
        String pathToIMagick = this.getPathToImageMagick();

        Date tnow = new Date();
        String runtimeFile = "interviewMerge" + tnow.getTime() + ".bat";

        // String runtimeFile = "interviewMerge.bat";
        executable_fileName = ScopeApplicationAdapter.batchFileDir + runtimeFile;

        cmd = new String[1];
        cmd[0] = executable_fileName;

        // Create the Content of the Converter Script (.bat or .sh File)
        String fileContent = pathToIMagick + " " + file1 + " " + file2 + " " + "+append" + " " + file3
                + ScopeApplicationAdapter.lineSeperator + "";

        File previous = new File(executable_fileName);
        if (previous.exists()) {
            previous.delete();
        }

        // execute the Script
        FileOutputStream fos = new FileOutputStream(executable_fileName);
        fos.write(fileContent.getBytes());
        fos.close();

        File now = new File(executable_fileName);
        now.setExecutable(true);

        Runtime rt = Runtime.getRuntime();
        returnMap.put("command", cmd.toString());
        Process proc = rt.exec(cmd);

        InputStream stderr = proc.getErrorStream();
        BufferedReader br = new BufferedReader(new InputStreamReader(stderr));
        String line = null;
        String error = "";
        while ((line = br.readLine()) != null) {
            error += line;
        }
        br.close();
        returnMap.put("error", error);
        int exitVal = proc.waitFor();
        returnMap.put("exitValue", "" + exitVal);

        if (now.exists()) {
            now.delete();
        }

        return returnMap;
    } catch (Throwable t) {
        t.printStackTrace();
        returnMap.put("error", t.getMessage());
        returnMap.put("exitValue", "-1");
        return returnMap;
    }
}

From source file:ComputeNode.java

/**
 * Internal method to get load/*from   w  w w . j ava2  s.  com*/
 */
private Double getCurrentLoad() {
    // can be a simulated load or command output
    // can use taskCount

    Double currLoad = 0d;
    try {
        Process p = Runtime.getRuntime().exec("uptime");

        BufferedReader stdInput = new BufferedReader(new InputStreamReader(p.getInputStream()));

        BufferedReader stdError = new BufferedReader(new InputStreamReader(p.getErrorStream()));

        // read the output from the command

        // 11:57:57 up 29 min,  2 users,  load average: 0.27, 0.12, 0.09

        String s;
        //System.out.println("Here is the standard output of the command:\n");

        s = stdInput.readLine();
        s = s.replace(':', ',');

        String data[] = s.split(",");

        currLoad = Double.parseDouble(data[data.length - 2]);

    } catch (Exception e) {
        lg.log(Level.WARNING,
                "getCurrentLoad: Unable to parse system " + " load information! Using constant,guassian or 0 ");

    }
    if (loadConstant != null) {
        currLoad = loadConstant;
    } else {
        if (loadGaussian != null) {
            currLoad = RandomGaussian.getGaussian(loadGaussian.fst(), loadGaussian.snd());

        } else {
            currLoad = 0.0;
        }
    }

    myNodeStats.setCurrentLoad(currLoad);
    myNodeStats.noOfLoadChecks.incrementAndGet();
    myNodeStats.setTotalLoad(myNodeStats.getTotalLoad() + currLoad);

    lg.log(Level.FINER, " getCurrentLoad: Load " + currLoad);
    return currLoad;
}

From source file:madkitgroupextension.export.Export.java

public static void execExternalProcess(String command, final boolean screen_output,
        final boolean screen_erroutput) throws IOException, InterruptedException {
    Runtime runtime = Runtime.getRuntime();
    final Process process = runtime.exec(command);

    // Consommation de la sortie standard de l'application externe dans un Thread separe
    new Thread() {
        public void run() {
            try {
                BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
                String line = "";
                try {
                    while ((line = reader.readLine()) != null) {
                        if (screen_output) {
                            System.out.println(line);
                        }// w  w w  .ja  v  a2 s.c o  m
                    }
                } finally {
                    reader.close();
                }
            } catch (IOException ioe) {
                ioe.printStackTrace();
            }
        }
    }.start();

    // Consommation de la sortie d'erreur de l'application externe dans un Thread separe
    new Thread() {
        public void run() {
            try {
                BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
                String line = "";
                try {
                    while ((line = reader.readLine()) != null) {
                        if (screen_erroutput) {
                            System.out.println(line);
                        }
                    }
                } finally {
                    reader.close();
                }
            } catch (IOException ioe) {
                ioe.printStackTrace();
            }
        }
    }.start();
    process.waitFor();
}

From source file:de.smartics.maven.plugin.buildmetadata.scm.maven.ScmAccessInfo.java

/**
 * Fetches the version from the remote Git repository. The implementation uses
 * the Git Command Line Utils.//from ww w.  j  a v a2 s .c o  m
 *
 * @param repository the reference to the repository (currently not used).
 * @param remoteVersion the version to fetch.
 * @return the revision information.
 * @throws ScmException on any problem accessing the remote repository.
 */
public Revision fetchRemoteGitVersion(final ScmRepository repository, final ScmVersion remoteVersion)
        throws ScmException {
    try {
        final Commandline cl = GitCommandLineUtils.getBaseGitCommandLine(rootDirectory, "log");
        cl.createArg().setLine("-n 1");
        cl.createArg().setLine("--pretty=format:\"%H %ct\"");
        cl.createArg().setLine(remoteVersion.getName());
        final Process process = cl.execute();
        try {
            process.waitFor();
            final int exitValue = process.exitValue();
            if (exitValue != 0) {
                throw new ScmException("Cannot fetch remote version from repository (" + exitValue + "): "
                        + IOUtils.toString(process.getErrorStream()));
            }
            final String result = IOUtils.toString(process.getInputStream());
            final Revision revision = createRevision(result);
            return revision;
        } finally {
            process.destroy();
        }
    } catch (final Exception e) {
        throw new ScmException("Cannot fetch remote version from repository.", e);
    }
}

From source file:com.panet.imeta.trans.steps.luciddbbulkloader.LucidDBBulkLoader.java

public boolean execute(LucidDBBulkLoaderMeta meta, boolean wait) throws KettleException {
    Runtime rt = Runtime.getRuntime();

    try {/*from ww  w .  j a  v  a 2  s.  c o  m*/
        String tableName = environmentSubstitute(meta.getTableName());

        // 1) Set up the FIFO folder, create the directory and path to it... 
        //
        String fifoVfsDirectory = environmentSubstitute(meta.getFifoDirectory());
        FileObject directory = KettleVFS.getFileObject(fifoVfsDirectory);
        directory.createFolder();
        String fifoDirectory = KettleVFS.getFilename(directory);

        // 2) Create the FIFO file using the "mkfifo" command...
        //    Make sure to log all the possible output, also from STDERR
        //
        data.fifoFilename = KettleVFS.getFilename(directory) + Const.FILE_SEPARATOR + tableName + ".csv";
        data.bcpFilename = KettleVFS.getFilename(directory) + Const.FILE_SEPARATOR + tableName + ".bcp";

        File fifoFile = new File(data.fifoFilename);
        if (!fifoFile.exists()) {
            String mkFifoCmd = "mkfifo " + data.fifoFilename + "";
            logBasic("Creating FIFO file using this command : " + mkFifoCmd);
            Process mkFifoProcess = rt.exec(mkFifoCmd);
            StreamLogger errorLogger = new StreamLogger(mkFifoProcess.getErrorStream(), "mkFifoError");
            StreamLogger outputLogger = new StreamLogger(mkFifoProcess.getInputStream(), "mkFifoOuptut");
            new Thread(errorLogger).start();
            new Thread(outputLogger).start();
            int result = mkFifoProcess.waitFor();
            if (result != 0) {
                throw new Exception("Return code " + result + " received from statement : " + mkFifoCmd);
            }
        }

        // 3) Make a connection to LucidDB for sending SQL commands
        // (Also, we need a clear cache for getting up-to-date target metadata)
        DBCache.getInstance().clear(meta.getDatabaseMeta().getName());

        data.db = new Database(meta.getDatabaseMeta());
        data.db.shareVariablesWith(this);
        // Connect to the database
        if (getTransMeta().isUsingUniqueConnections()) {
            synchronized (getTrans()) {
                data.db.connect(getTrans().getThreadName(), getPartitionID());
            }
        } else {
            data.db.connect(getPartitionID());
        }

        logBasic("Connected to LucidDB");

        // 4) Now we are ready to create the LucidDB FIFO server that will handle the actual bulk loading.
        //
        String fifoServerStatement = "";
        fifoServerStatement += "create or replace server " + meta.getFifoServerName() + Const.CR;
        fifoServerStatement += "foreign data wrapper sys_file_wrapper" + Const.CR;
        fifoServerStatement += "options (" + Const.CR;
        fifoServerStatement += "directory '" + fifoDirectory + "'," + Const.CR;
        fifoServerStatement += "file_extension 'csv'," + Const.CR;
        fifoServerStatement += "with_header 'no'," + Const.CR;
        fifoServerStatement += "num_rows_scan '0'," + Const.CR;
        fifoServerStatement += "lenient 'no');" + Const.CR;

        logBasic("Creating LucidDB fifo_server with the following command: " + fifoServerStatement);
        data.db.execStatements(fifoServerStatement);

        // 5) Set the error limit in the LucidDB session 
        // REVIEW jvs 13-Dec-2008:  is this guaranteed to retain the same
        // connection?
        String errorMaxStatement = "";
        errorMaxStatement += "alter session set \"errorMax\" = " + meta.getMaxErrors() + ";" + Const.CR;
        logBasic("Setting error limit in LucidDB session with the following command: " + errorMaxStatement);
        data.db.execStatements(errorMaxStatement);

        // 6) Now we also need to create a bulk loader file .bcp
        //
        createBulkLoadConfigFile(data.bcpFilename);

        // 7) execute the actual load command!
        //    This will actually block until the load is done in the
        // separate execution thread; see notes in executeLoadCommand
        // on why it's important for this to occur BEFORE
        // opening our end of the FIFO.
        //
        executeLoadCommand(tableName);

        // 8) We have to write rows to the FIFO file later on.
        data.fifoStream = new BufferedOutputStream(new FileOutputStream(fifoFile));
    } catch (Exception ex) {
        throw new KettleException(ex);
    }

    return true;
}