Example usage for java.lang System exit

List of usage examples for java.lang System exit

Introduction

In this page you can find the example usage for java.lang System exit.

Prototype

public static void exit(int status) 

Source Link

Document

Terminates the currently running Java Virtual Machine.

Usage

From source file:com.eviware.soapui.tools.SoapUIMockAsWarGenerator.java

/**
 * Runs the specified tool in the specified soapUI project file, see soapUI
 * xdocs for details./*from   www.  jav a2  s  . co  m*/
 * 
 * @param args
 * @throws Exception
 */

public static void main(String[] args) throws Exception {
    System.exit(new SoapUIMockAsWarGenerator().runFromCommandLine(args));
}

From source file:CheckBoxMnemonic.java

public static void main(String[] a) {
    JFrame f = new JFrame();
    f.addWindowListener(new WindowAdapter() {
        public void windowClosing(WindowEvent e) {
            System.exit(0);
        }//ww  w.ja  va  2  s. c  o  m
    });
    f.getContentPane().add(new CheckBoxMnemonic());
    f.pack();
    f.setSize(new Dimension(300, 200));
    f.show();

}

From source file:SenBench.java

public static void main(String[] args) {
    try {//from ww w  .ja v  a  2  s  .  c  om
        if (args.length == 0) {
            System.out.println("usage: java SenBench file [file ..]");
            System.exit(2);
        }

        StringTagger tagger = StringTagger.getInstance(Locale.JAPANESE);

        long processed = 0;
        long nbytes = 0;
        long nchars = 0;

        long start = System.currentTimeMillis();
        for (int a = 0; a < args.length; a++) {
            String text = "";
            try {
                RandomAccessFile raf = new RandomAccessFile(args[a], "r");
                byte[] buf = new byte[(int) raf.length()];
                raf.readFully(buf);
                raf.close();
                text = new String(buf, encoding);
                nbytes += buf.length;
                nchars += text.length();
            } catch (IOException ioe) {
                log.error(ioe);
                continue;
            }

            long s_start = System.currentTimeMillis();
            for (int c = 0; c < repeat; c++)
                doWork(tagger, text);
            long s_end = System.currentTimeMillis();
            processed += (s_end - s_start);
        }
        long end = System.currentTimeMillis();
        System.out.println("number of files: " + args.length);
        System.out.println("number of repeat: " + repeat);
        System.out.println("number of bytes: " + nbytes);
        System.out.println("number of chars: " + nchars);
        System.out.println("total time elapsed: " + (end - start) + " msec.");
        System.out.println("analysis time: " + (processed) + " msec.");
    } catch (Exception e) {
        e.printStackTrace(System.err);
        System.exit(1);
    }
}

From source file:io.fluo.webindex.data.Copy.java

public static void main(String[] args) throws Exception {

    if (args.length != 3) {
        log.error("Usage: Copy <pathsFile> <range> <dest>");
        System.exit(1);
    }/*from   w  w  w. j  av a2 s  .  co m*/
    final String hadoopConfDir = IndexEnv.getHadoopConfDir();
    final List<String> copyList = IndexEnv.getPathsRange(args[0], args[1]);
    if (copyList.isEmpty()) {
        log.error("No files to copy given {} {}", args[0], args[1]);
        System.exit(1);
    }

    DataConfig dataConfig = DataConfig.load();

    SparkConf sparkConf = new SparkConf().setAppName("webindex-copy");
    try (JavaSparkContext ctx = new JavaSparkContext(sparkConf)) {

        FileSystem hdfs = FileSystem.get(ctx.hadoopConfiguration());
        Path destPath = new Path(args[2]);
        if (!hdfs.exists(destPath)) {
            hdfs.mkdirs(destPath);
        }

        log.info("Copying {} files (Range {} of paths file {}) from AWS to HDFS {}", copyList.size(), args[1],
                args[0], destPath.toString());

        JavaRDD<String> copyRDD = ctx.parallelize(copyList, dataConfig.getNumExecutorInstances());

        final String prefix = DataConfig.CC_URL_PREFIX;
        final String destDir = destPath.toString();

        copyRDD.foreachPartition(iter -> {
            FileSystem fs = IndexEnv.getHDFS(hadoopConfDir);
            iter.forEachRemaining(ccPath -> {
                try {
                    Path dfsPath = new Path(destDir + "/" + getFilename(ccPath));
                    if (fs.exists(dfsPath)) {
                        log.error("File {} exists in HDFS and should have been previously filtered",
                                dfsPath.getName());
                    } else {
                        String urlToCopy = prefix + ccPath;
                        log.info("Starting copy of {} to {}", urlToCopy, destDir);
                        try (OutputStream out = fs.create(dfsPath);
                                BufferedInputStream in = new BufferedInputStream(
                                        new URL(urlToCopy).openStream())) {
                            IOUtils.copy(in, out);
                        }
                        log.info("Created {}", dfsPath.getName());
                    }
                } catch (IOException e) {
                    log.error("Exception while copying {}", ccPath, e);
                }
            });
        });
    }
}

From source file:com.opengamma.integration.tool.marketdata.BloombergTimeSeriesUpdateTool.java

/**
 * Main method to run the tool./*ww  w  .  j a v  a2 s . c o m*/
 * 
 * @param args  the arguments
 */
public static void main(String[] args) { // CSIGNORE
    boolean success = new BloombergTimeSeriesUpdateTool().initAndRun(args, IntegrationToolContext.class);
    System.exit(success ? 0 : 1);
}

From source file:async.nio2.Main.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {

    if (args.length == 3) {
        PORT = Integer.valueOf(args[0]);
        NO_CLIENTS = Integer.valueOf(args[1]);
        NO_SAMPLES = Integer.valueOf(args[2]);
    }/*from ww  w.  j a  va 2 s.  c  o  m*/

    if (PORT < 0) {
        System.err.println("Error: port < 0");
        System.exit(1);
    }

    if (NO_CLIENTS < 1) {
        System.err.println("Error: #clients < 1");
        System.exit(1);
    }

    if (NO_SAMPLES < 1) {
        System.err.println("Error: #samples < 1");
        System.exit(1);
    }

    AsynchronousChannelGroup groupServer = AsynchronousChannelGroup
            .withThreadPool(Executors.newFixedThreadPool(1));
    AsynchronousChannelGroup groupClient = AsynchronousChannelGroup
            .withThreadPool(Executors.newFixedThreadPool(1));

    Server server = Server.newInstance(new InetSocketAddress("localhost", PORT), groupServer);
    InetSocketAddress localAddress = server.getLocalAddress();
    String hostname = localAddress.getHostName();
    int port = localAddress.getPort();

    ExecutorService es = Executors.newFixedThreadPool(2);

    System.out.printf("%03d clients on %s:%d, %03d runs each. All times in s.%n", NO_CLIENTS, hostname, port,
            NO_SAMPLES);
    range(0, NO_CLIENTS).unordered().parallel()
            .mapToObj(i -> CompletableFuture.supplyAsync(newClient(localAddress, groupClient), es).join())
            .map(array -> Arrays.stream(array).reduce(new DescriptiveStatistics(), Main::accumulate,
                    Main::combine))
            .map(Main::toEvaluationString).forEach(System.out::println);

    es.shutdown();
    es.awaitTermination(5, TimeUnit.SECONDS);

    groupClient.shutdown();
    groupClient.awaitTermination(5, TimeUnit.SECONDS);

    server.close();
    groupServer.shutdown();
    groupServer.awaitTermination(5, TimeUnit.SECONDS);
}

From source file:com.jeffy.hdfs.compression.FileCompressor.java

/**
 * @param args/*from   ww w.  ja v  a  2s . c om*/
 * ??????
 * ????
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
    Configuration conf = new Configuration();
    //??
    CompressionCodecFactory factory = new CompressionCodecFactory(conf);
    // For example for the 'GzipCodec' codec class name the alias are 'gzip' and 'gzipcodec'.
    CompressionCodec codec = factory.getCodecByName(args[0]);
    if (codec == null) {//???
        System.err.println("Comperssion codec not found for " + args[0]);
        System.exit(1);
    }
    String ext = codec.getDefaultExtension();
    Compressor compressor = null;
    try {
        //?CodecPool?Compressor
        compressor = CodecPool.getCompressor(codec);
        for (int i = 1; i < args.length; i++) {
            String filename = args[i] + ext;
            System.out.println("Compression the file " + filename);
            try (FileSystem outFs = FileSystem.get(URI.create(filename), conf);
                    FileSystem inFs = FileSystem.get(URI.create(args[i]), conf);
                    InputStream in = inFs.open(new Path(args[i]))) {//
                //Compressor?
                CompressionOutputStream out = codec.createOutputStream(outFs.create(new Path(filename)),
                        compressor);
                //?????
                IOUtils.copy(in, out);
                out.finish();//?finish()?flush()???
                compressor.reset(); //???????java.io.IOException: write beyond end of stream
            }
        }
    } finally {//?Compressor??
        CodecPool.returnCompressor(compressor);
    }
}

From source file:org.age.node.Bootstrapper.java

public static void main(final String... args) {

    DefaultNodeLifecycleService lifecycleService = null;
    try (final ConfigurableApplicationContext context = new ClassPathXmlApplicationContext("spring-node.xml")) {
        context.registerShutdownHook();//from  w w w  . j  a  v a 2  s .co m
        lifecycleService = context.getBean(DefaultNodeLifecycleService.class);
        lifecycleService.awaitTermination();
    } finally {
        log.info("Finishing.");
        lifecycleService.awaitTermination();
    }
    log.info("Exiting.");
    System.exit(0);
}

From source file:Main.java

public static void main(String[] args) throws Exception {
    if (!SystemTray.isSupported()) {
        return;//  w  w  w .java 2s .c  o  m
    }
    SystemTray tray = SystemTray.getSystemTray();

    PropertyChangeListener pcl;
    pcl = new PropertyChangeListener() {
        public void propertyChange(PropertyChangeEvent pce) {
            System.out.println("Property changed = " + pce.getPropertyName());
            TrayIcon[] tia = (TrayIcon[]) pce.getOldValue();
            if (tia != null) {
                for (int i = 0; i < tia.length; i++)
                    System.out.println(tia[i]);
            }

            tia = (TrayIcon[]) pce.getNewValue();
            if (tia != null) {
                for (int i = 0; i < tia.length; i++)
                    System.out.println(tia[i]);
            }
        }
    };
    tray.addPropertyChangeListener("trayIcons", pcl);

    Dimension size = tray.getTrayIconSize();

    TrayIcon[] icons = tray.getTrayIcons();

    BufferedImage bi = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_RGB);
    Graphics g = bi.getGraphics();

    g.setColor(Color.blue);
    g.fillRect(0, 0, size.width, size.height);
    TrayIcon icon = null;
    tray.add(icon = new TrayIcon(bi));

    Thread.sleep(3000);
    tray.remove(icon);

    Thread.sleep(3000);
    System.exit(0);
}

From source file:com.endgame.binarypig.util.BuildSequenceFileFromDir.java

public static void main(String[] args) throws Exception {
    if (args.length < 2) {
        System.err.println("Usage: hadoop jar JAR " + BuildSequenceFileFromDir.class.getName()
                + " <dirOfBinaries> <HDFSOutputDir>");
        System.exit(-1);
    }/* w w w .j a v  a2 s . com*/

    ToolRunner.run(new BuildSequenceFileFromDir(), args);
}