Example usage for java.util Optional ofNullable

List of usage examples for java.util Optional ofNullable

Introduction

In this page you can find the example usage for java.util Optional ofNullable.

Prototype

@SuppressWarnings("unchecked")
public static <T> Optional<T> ofNullable(T value) 

Source Link

Document

Returns an Optional describing the given value, if non- null , otherwise returns an empty Optional .

Usage

From source file:Main.java

public static void main(String... args) {
    List<String> names3 = null;

    Optional.ofNullable(names3).ifPresent(list -> list.sort(Comparator.naturalOrder()));

    System.out.println(names3);/*w w  w  . j a  va  2s.c  om*/

}

From source file:grakn.core.server.Grakn.java

public static void main(String[] args) {
    Thread.setDefaultUncaughtExceptionHandler(
            (Thread t, Throwable e) -> LOG.error(ErrorMessage.UNCAUGHT_EXCEPTION.getMessage(t.getName()), e));

    try {/*from  www .  j av a2s.c o m*/
        String graknPidFileProperty = Optional.ofNullable(SystemProperty.GRAKN_PID_FILE.value()).orElseThrow(
                () -> new RuntimeException(ErrorMessage.GRAKN_PIDFILE_SYSTEM_PROPERTY_UNDEFINED.getMessage()));

        Path pidfile = Paths.get(graknPidFileProperty);
        PIDManager pidManager = new PIDManager(pidfile);
        pidManager.trackGraknPid();

        // Start Server with timer
        Stopwatch timer = Stopwatch.createStarted();
        boolean benchmark = parseBenchmarkArg(args);
        Server server = ServerFactory.createServer(benchmark);
        server.start();

        LOG.info("Grakn started in {}", timer.stop());
    } catch (RuntimeException | IOException e) {
        LOG.error(ErrorMessage.UNCAUGHT_EXCEPTION.getMessage(e.getMessage()), e);
        System.err.println(ErrorMessage.UNCAUGHT_EXCEPTION.getMessage(e.getMessage()));
    }
}

From source file:edu.jhu.hlt.concrete.ingesters.gigaword.GigawordGzProcessor.java

public static void main(String... args) {
    Thread.setDefaultUncaughtExceptionHandler(new LoggedUncaughtExceptionHandler());
    if (args.length != 2) {
        LOGGER.info("This program takes 2 arguments.");
        LOGGER.info("First: the path to a .gz file that is part of the English Gigaword v5 corpus.");
        LOGGER.info("Second: the path to the output file (a .tar.gz with communication files).");
        LOGGER.info("Example usage:");
        LOGGER.info("{} {} {}", GigawordGzProcessor.class.getName(), "/path/to/LDC/sgml/.gz",
                "/path/to/out.tar.gz");
        System.exit(1);/*from ww w. j a  v a  2s.  c om*/
    }

    String inPathStr = args[0];
    String outPathStr = args[1];

    Path inPath = Paths.get(inPathStr);
    if (!Files.exists(inPath))
        LOGGER.error("Input path {} does not exist. Try again with the right path.", inPath.toString());

    Path outPath = Paths.get(outPathStr);
    Optional<Path> parent = Optional.ofNullable(outPath.getParent());
    // lambda does not allow caught exceptions.
    if (parent.isPresent()) {
        if (!Files.exists(outPath.getParent())) {
            LOGGER.info("Attempting to create output directory: {}", outPath.toString());
            try {
                Files.createDirectories(outPath);
            } catch (IOException e) {
                LOGGER.error("Caught exception creating output directory.", e);
            }
        }
    }

    GigawordDocumentConverter conv = new GigawordDocumentConverter();
    Iterator<Communication> iter = conv.gzToStringIterator(inPath);
    try (OutputStream os = Files.newOutputStream(outPath);
            BufferedOutputStream bos = new BufferedOutputStream(os, 1024 * 8 * 16);
            GzipCompressorOutputStream gout = new GzipCompressorOutputStream(bos);
            TarArchiver archiver = new TarArchiver(gout);) {
        while (iter.hasNext()) {
            Communication c = iter.next();
            LOGGER.info("Adding Communication {} [UUID: {}] to archive.", c.getId(),
                    c.getUuid().getUuidString());
            archiver.addEntry(new ArchivableCommunication(c));
        }
    } catch (IOException e) {
        LOGGER.error("Caught IOException during output.", e);
    }
}

From source file:com.epam.reportportal.auth.AuthServerApplication.java

public static void main(String[] args) {
    Optional.ofNullable(System.getenv("rp.profiles"))
            .ifPresent(p -> System.setProperty("spring.profiles.active", p));
    SpringApplication.run(AuthServerApplication.class, args);
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkPassthroughTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    try {/*from   w w w.  ja v a2 s . c  o m*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkPassthroughTopology");

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final Optional<Double> sub_sample = test_spec
                .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1)))
                .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1)))
                .filter(d -> d > 0);

        //INFO:
        sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d));
        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());

            final Optional<JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> input = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2));

            long written = input.map(in -> in.values())
                    .map(rdd -> sub_sample.map(sample -> rdd.sample(true, sample)).orElse(rdd)).map(rdd -> {
                        return rdd.map(t2 -> {
                            final Validation<BasicMessageBean, JsonNode> ret_val = context.emitObject(
                                    Optional.empty(), context.getJob().get(), Either.left(t2._2().getJson()),
                                    Optional.empty());
                            return ret_val; // (doesn't matter what I return, just want to count it up)
                        })
                                //DEBUG: (print the output JSON on success and the error message on fail)
                                //.map(val -> test_mode ? val.f().bind(f -> Validation.fail("FAIL: " + f.message())) : val)
                                .count();
                    }).orElse(-1L);

            jsc.stop();

            //INFO:
            System.out.println("Wrote: data_objects=" + written);
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkSqlTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    try {/*from ww w  . ja v a 2 s  .  c om*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String sql_string = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting SparkSqlTopology");

        SparkConf spark_context = new SparkConf().setAppName("SparkSqlTopology");

        final Optional<Double> sub_sample = test_spec
                .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1)))
                .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1)))
                .filter(d -> d > 0);

        //INFO:
        sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d));
        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));
        System.out.println("OPTIONS: sql = " + sql_string);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            SQLContext sql_context = new SQLContext(jsc);

            final Multimap<String, DataFrame> inputs = SparkTechnologyUtils.buildBatchSparkSqlInputs(context,
                    test_spec, sql_context, Collections.emptySet());

            //INFO
            System.out.println("Registered tables = " + inputs.keySet().toString());

            final DataFrame filtered_df = sql_context.sql(sql_string);
            final String[] columns = filtered_df.columns(); // (have to do this here because columns() depends on transient code)

            final long written = filtered_df.javaRDD().map(row -> {
                final ObjectNode j = _mapper.createObjectNode(); //.put("message", row.toString()); (Don't think we want this now that we're using the columns)
                for (int ii = 0; ii < row.length(); ++ii) {
                    j.set(columns[ii], _mapper.convertValue(row.get(ii), JsonNode.class));
                }
                return context.emitObject(Optional.empty(), context.getJob().get(), Either.left(j),
                        Optional.empty());
            }).count();

            //INFO:
            System.out.println("Wrote: data_objects=" + written);
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkScalaInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> logger = new SetOnce<>();

    try {/*from w  w w. ja v a  2  s  .c o m*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        logger.set(context.getLogger(context.getBucket()));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkScalaInterpreterTopology logging=" + logger.optional().isPresent());

        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.INFO,
                    ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                            "Starting SparkScalaInterpreterTopology.{0}",
                            Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
        });

        final SparkTopologyConfigBean job_config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String scala_script = Optional.ofNullable(job_config.script()).orElse("");

        final String wrapper_script = IOUtils.toString(
                SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
                "UTF-8");
        final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);
        final SparkCompilerService scs = new SparkCompilerService();
        final Tuple2<ClassLoader, Object> o = scs.buildClass(to_compile, "ScriptRunner", logger.optional());

        Thread.currentThread().setContextClassLoader(o._1());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final long streaming_batch_interval = (long) spark_context
                .getInt(SparkTopologyConfigBean.STREAMING_BATCH_INTERVAL, 10);

        // MAIN PROCESSING

        final Method m = o._2().getClass().getMethod("runScript", SparkScriptEngine.class);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        boolean is_streaming = context.getJob().map(j -> j.analytic_type())
                .map(t -> MasterEnrichmentType.streaming == t).orElse(false);
        final Either<JavaSparkContext, JavaStreamingContext> jsc = Lambdas.get(() -> {
            return is_streaming
                    ? Either.<JavaSparkContext, JavaStreamingContext>right(new JavaStreamingContext(
                            spark_context, Durations.seconds(streaming_batch_interval)))
                    : Either.<JavaSparkContext, JavaStreamingContext>left(new JavaSparkContext(spark_context));
        });
        try {
            final JavaSparkContext jsc_batch = jsc.either(l -> l, r -> r.sparkContext());

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc_batch, Collections.emptySet());

            final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> streaming_inputs = jsc
                    .<Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>>either(
                            l -> HashMultimap
                                    .<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>create(),
                            r -> SparkTechnologyUtils.buildStreamingSparkInputs(context, test_spec, r,
                                    Collections.emptySet()));

            final SparkScriptEngine script_engine_bridge = new SparkScriptEngine(context, inputs,
                    streaming_inputs, test_spec, jsc_batch, jsc.either(l -> null, r -> r), job_config);

            // Add driver and generated JARs to path:
            jsc_batch.addJar(LiveInjector.findPathJar(o._2().getClass()));

            m.invoke(o._2(), script_engine_bridge);

            jsc.either(l -> {
                l.stop();
                return null;
            }, r -> {
                r.stop();
                return null;
            });

            logger.optional().ifPresent(l -> {
                l.inefficientLog(Level.INFO,
                        ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                                "Stopping SparkScalaInterpreterTopology.{0}",
                                Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
            });

            //INFO:
            System.out.println("Finished interpreter");
        } finally {
            jsc.either(l -> {
                l.close();
                return null;
            }, r -> {
                r.close();
                return null;
            });
        }
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
    } catch (Throwable t) {
        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.ERROR, ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology",
                    "main",
                    ErrorUtils.getLongForm("Error executing SparkScalaInterpreterTopology.unknown: {0}", t)));
        });

        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
        System.exit(-1);
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkJsInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> bucket_logger = new SetOnce<>();
    final SetOnce<String> job_name = new SetOnce<>(); // (the string we'll use in logging activities)
    try {// w ww  .  ja  v a2  s  .co m
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        bucket_logger.set(context.getLogger(context.getBucket()));
        job_name.set(context.getJob().map(j -> j.name()).orElse("no_name"));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();

        final String js_script = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting " + job_name.get());

        SparkConf spark_context = new SparkConf().setAppName(job_name.get());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());
            final JavaPairRDD<Object, Tuple2<Long, IBatchRecord>> all_inputs = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2)).orElse(null);

            // Load globals:
            ScriptEngineManager manager = new ScriptEngineManager();
            ScriptEngine engine = manager.getEngineByName("JavaScript");
            engine.put("_a2_global_context", context);
            engine.put("_a2_global_bucket", context.getBucket().get());
            engine.put("_a2_global_job", context.getJob().get());
            engine.put("_a2_global_config",
                    BeanTemplateUtils.configureMapper(Optional.empty()).convertValue(config, JsonNode.class));
            engine.put("_a2_global_mapper", BeanTemplateUtils.configureMapper(Optional.empty()));
            //TODO (until bucket logger is serializable, don't allow anywhere)
            //engine.put("_a2_bucket_logger", bucket_logger.optional().orElse(null));
            engine.put("_a2_enrichment_name", job_name.get());
            engine.put("_a2_spark_inputs", inputs);
            engine.put("_a2_spark_inputs_all", all_inputs);
            engine.put("_a2_spark_context", jsc);

            Stream.concat(config.uploaded_lang_files().stream(),
                    Stream.of("aleph2_sparkjs_globals_before.js", ""))
                    .flatMap(Lambdas.flatWrap_i(import_path -> {
                        try {
                            if (import_path.equals("")) { // also import the user script just before here
                                return js_script;
                            } else
                                return IOUtils.toString(SparkJsInterpreterTopology.class.getClassLoader()
                                        .getResourceAsStream(import_path), "UTF-8");
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (script {1}): {2}",
                                                            job_name.get(), import_path, e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    })).forEach(Lambdas.wrap_consumer_i(script -> {
                        try {
                            engine.eval(script);
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (main script): {1}",
                                                            job_name.get(), e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    }));
            ;

            jsc.stop();

            //INFO:
            System.out.println("Finished " + job_name.get());
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));

        bucket_logger.optional().ifPresent(l -> l.log(Level.ERROR, ErrorUtils.lazyBuildMessage(false,
                () -> SparkJsInterpreterTopology.class.getSimpleName()
                        + job_name.optional().map(j -> "." + j).orElse(""),
                () -> job_name.optional().orElse("global") + ".main", () -> null,
                () -> ErrorUtils.get("Error on batch in job {0}: {1}",
                        job_name.optional().orElse("global") + ".main", t.getMessage()),
                () -> ImmutableMap.<String, Object>of("full_error", ErrorUtils.getLongForm("{0}", t)))));
    }
}

From source file:edu.jhu.hlt.concrete.ingesters.simple.CompleteFileIngester.java

/**
 * See usage string./*from   w  w  w.j  a va  2 s. c  o m*/
 *
 * @param args
 */
public static void main(String[] args) {
    if (args.length != 3) {
        System.err.println("This program converts a character-based file to a .concrete file.");
        System.err.println("The text file must contain UTF-8 encoded characters.");
        System.err.println(
                "The .concrete file will share the same name as the input file, including the extension.");
        System.err.println("This program takes 3 arguments.");
        System.err.println("Argument 1: path/to/a/character/based/file");
        System.err.println("Argument 2: type of Communication to generate [e.g., tweet]");
        System.err.println("Argument 3: path/to/output/folder");
        System.err.println("Example usage: " + CompleteFileIngester.class.getName()
                + " /my/text/file story /my/output/folder");
        System.exit(1);
    }

    String inPathStr = args[0];
    Path inPath = Paths.get(inPathStr);
    try {
        ExistingNonDirectoryFile ef = new ExistingNonDirectoryFile(inPath);
        Optional<String> commType = Optional.ofNullable(args[1]);
        Optional<String> outPathStr = Optional.ofNullable(args[2]);

        Path ep = ef.getPath();
        String fn = ef.getName();
        Path outPath = Paths.get(outPathStr.get());
        Path outFile = outPath.resolve(fn + ".concrete");

        // Output directory exists, or it doesn't.
        // Try to create if it does not.
        if (!Files.exists(outPath)) {
            try {
                Files.createDirectories(outPath);
            } catch (IOException e) {
                logger.error("Caught exception when making output directories.", e);
            }

            // if it does, check to make sure it's a directory.
        } else {
            if (!Files.isDirectory(outPath)) {
                logger.error("Output path exists but is not a directory.");
                System.exit(1);
            } else {
                // check to make sure the output file won't be overwritten.
                if (Files.exists(outFile)) {
                    logger.warn("Output file {} exists; not overwriting.", outFile.toString());
                    System.exit(1);
                }
            }
        }

        try {
            UTF8FileIngester ing = new CompleteFileIngester(commType.get());
            Communication comm = ing.fromCharacterBasedFile(ep);
            new WritableCommunication(comm).writeToFile(outFile, false);
        } catch (IngestException e) {
            logger.error("Caught exception during ingest.", e);
            System.exit(1);
        } catch (ConcreteException e) {
            logger.error("Caught exception writing output.", e);
        }

    } catch (NoSuchFileException e) {
        logger.error("Path {} does not exist.", inPathStr);
        System.exit(1);
    } catch (NotFileException e) {
        logger.error("Path {} is a directory.", inPathStr);
        System.exit(1);
    }
}

From source file:edu.jhu.hlt.concrete.ingesters.simple.DoubleLineBreakFileIngester.java

/**
 * See usage string./*from   w  w  w.  ja v a 2 s .  c o  m*/
 *
 * @param args
 */
public static void main(String[] args) {
    if (args.length != 4) {
        System.err.println("This program converts a character-based file to a .concrete file.");
        System.err.println("The text file must contain UTF-8 encoded characters.");
        System.err.println(
                "If the file contains any double-newlines, the file will be split into sections where those double-newlines occur.");
        System.err.println(
                "The .concrete file will share the same name as the input file, including the extension.");
        System.err.println("This program takes 4 arguments.");
        System.err.println("Argument 1: path/to/a/character/based/file");
        System.err.println("Argument 2: type of Communication to generate [e.g., tweet]");
        System.err.println("Argument 3: type of Sections to generate [e.g., passage]");
        System.err.println("Argument 4: path/to/out/concrete/file");
        System.err.println("Example usage: " + CompleteFileIngester.class.getName()
                + " /my/text/file story passage /my/output/folder");
        System.exit(1);
    }

    String inPathStr = args[0];
    Path inPath = Paths.get(inPathStr);
    try {
        ExistingNonDirectoryFile ef = new ExistingNonDirectoryFile(inPath);
        Optional<String> commType = Optional.ofNullable(args[1]);
        Optional<String> sectionType = Optional.ofNullable(args[2]);
        Optional<String> outPathStr = Optional.ofNullable(args[3]);

        Path ep = ef.getPath();
        String fn = ef.getName();
        Path outPath = Paths.get(outPathStr.get());
        Path outFile = outPath.resolve(fn + ".concrete");

        // Output directory exists, or it doesn't.
        // Try to create if it does not.
        if (!Files.exists(outPath)) {
            try {
                Files.createDirectories(outPath);
            } catch (IOException e) {
                logger.error("Caught exception when making output directories.", e);
            }

            // if it does, check to make sure it's a directory.
        } else {
            if (!Files.isDirectory(outPath)) {
                logger.error("Output path exists but is not a directory.");
                System.exit(1);
            } else {
                // check to make sure the output file won't be overwritten.
                if (Files.exists(outFile)) {
                    logger.warn("Output file {} exists; not overwriting.", outFile.toString());
                    System.exit(1);
                }
            }
        }

        try {
            UTF8FileIngester ing = new DoubleLineBreakFileIngester(commType.get(), sectionType.get());
            Communication comm = ing.fromCharacterBasedFile(ep);
            new WritableCommunication(comm).writeToFile(outFile, false);
        } catch (IngestException e) {
            logger.error("Caught exception during ingest.", e);
            System.exit(1);
        } catch (ConcreteException e) {
            logger.error("Caught exception writing output.", e);
        }

    } catch (NoSuchFileException e) {
        logger.error("Path {} does not exist.", inPathStr);
        System.exit(1);
    } catch (NotFileException e) {
        logger.error("Path {} is a directory.", inPathStr);
        System.exit(1);
    }
}