Example usage for java.io File exists

List of usage examples for java.io File exists

Introduction

In this page you can find the example usage for java.io File exists.

Prototype

public boolean exists() 

Source Link

Document

Tests whether the file or directory denoted by this abstract pathname exists.

Usage

From source file:licenseUtil.LicenseUtil.java

public static void main(String[] args) throws IOException, IncompleteLicenseObjectException {
    if (args.length == 0) {
        logger.error("Missing parameters. Use --help to get a list of the possible options.");
    } else if (args[0].equals("--addPomToTsv")) {
        if (args.length < 4)
            logger.error(/*from   w w  w  .  j a  v  a  2  s  . com*/
                    "Missing arguments for option --addPomToTsv. Please specify <pomFileName> <licenses.stub.tsv> <currentVersion> or use the option --help for further information.");
        String pomFN = args[1];
        String spreadSheetFN = args[2];
        String currentVersion = args[3];

        MavenProject project = null;
        try {
            project = Utils.readPom(new File(pomFN));
        } catch (XmlPullParserException e) {
            logger.error("Could not parse pom file: \"" + pomFN + "\"");
        }
        LicensingList licensingList = new LicensingList();
        File f = new File(spreadSheetFN);
        if (f.exists() && !f.isDirectory()) {
            licensingList.readFromSpreadsheet(spreadSheetFN);
        }

        licensingList.addMavenProject(project, currentVersion);
        licensingList.writeToSpreadsheet(spreadSheetFN);
    } else if (args[0].equals("--writeLicense3rdParty")) {
        if (args.length < 4)
            logger.error(
                    "Missing arguments for option --writeLicense3rdParty. Please provide <licenses.enhanced.tsv> <processModule> <currentVersion> [and <targetDir>] or use the option --help for further information.");
        String spreadSheetFN = args[1];
        String processModule = args[2];
        String currentVersion = args[3];

        HashMap<String, String> targetDirs = new HashMap<>();
        if (args.length > 4) {
            File targetDir = new File(args[4]);
            logger.info("scan pom files in direct subdirectories of \"" + targetDir.getPath()
                    + "\" to obtain target locations for 3rd party license files...");
            File[] subdirs = targetDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY);
            for (File subdir : subdirs) {
                File pomFile = new File(subdir.getPath() + File.separator + POM_FN);
                if (!pomFile.exists())
                    continue;
                MavenProject mavenProject;
                try {
                    mavenProject = Utils.readPom(pomFile);
                } catch (Exception e) {
                    logger.warn("Could not read from pom file: \"" + pomFile.getPath() + "\" because of "
                            + e.getMessage());
                    continue;
                }
                targetDirs.put(mavenProject.getModel().getArtifactId(), subdir.getAbsolutePath());
            }
        }

        LicensingList licensingList = new LicensingList();
        licensingList.readFromSpreadsheet(spreadSheetFN);
        if (processModule.toUpperCase().equals("ALL")) {
            for (String module : licensingList.getNonFixedHeaders()) {
                try {
                    writeLicense3rdPartyFile(module, licensingList, currentVersion, targetDirs.get(module));
                } catch (NoLicenseTemplateSetException e) {
                    logger.error("Could not write file for module \"" + module
                            + "\". There is no template specified for \"" + e.getLicensingObject()
                            + "\". Please add an existing template filename to the column \""
                            + LicensingObject.ColumnHeader.LICENSE_TEMPLATE.value() + "\" of \"" + spreadSheetFN
                            + "\".");
                }
            }
        } else {
            try {
                writeLicense3rdPartyFile(processModule, licensingList, currentVersion,
                        targetDirs.get(processModule));
            } catch (NoLicenseTemplateSetException e) {
                logger.error("Could not write file for module \"" + processModule
                        + "\". There is no template specified for \"" + e.getLicensingObject()
                        + "\". Please add an existing template filename to the column \""
                        + LicensingObject.ColumnHeader.LICENSE_TEMPLATE.value() + "\" of \"" + spreadSheetFN
                        + "\".");
            }
        }
    } else if (args[0].equals("--buildEffectivePom")) {
        Utils.writeEffectivePom(new File(args[1]), (new File(EFFECTIVE_POM_FN)).getAbsolutePath());
    } else if (args[0].equals("--updateTsvWithProjectsInFolder")) {
        if (args.length < 4)
            logger.error(
                    "Missing arguments for option --processProjectsInFolder. Please provide <superDirectory> <licenses.stub.tsv> and <currentVersion> or use the option --help for further information.");
        File directory = new File(args[1]);
        String spreadSheetFN = args[2];
        String currentVersion = args[3];
        LicensingList licensingList = new LicensingList();
        File f = new File(spreadSheetFN);
        if (f.exists() && !f.isDirectory()) {
            licensingList.readFromSpreadsheet(spreadSheetFN);
        }
        licensingList.addAll(processProjectsInFolder(directory, currentVersion, false));
        licensingList.writeToSpreadsheet(spreadSheetFN);

    } else if (args[0].equals("--purgeTsv")) {
        if (args.length < 3)
            logger.error(
                    "Missing arguments for option --purgeTsv. Please provide <spreadSheetIN.tsv>, <spreadSheetOUT.tsv> and <currentVersion> or use the option --help for further information.");
        String spreadSheetIN = args[1];
        String spreadSheetOUT = args[2];
        String currentVersion = args[3];

        LicensingList licensingList = new LicensingList();
        licensingList.readFromSpreadsheet(spreadSheetIN);
        licensingList.purge(currentVersion);
        licensingList.writeToSpreadsheet(spreadSheetOUT);

    } else if (args[0].equals("--help")) {
        InputStream in = LicenseUtil.class.getClassLoader().getResourceAsStream(README_PATH);
        BufferedReader reader = new BufferedReader(new InputStreamReader(in));
        String line;
        while ((line = reader.readLine()) != null) {
            System.out.println(line);
        }
    } else {
        logger.error("Unknown parameter: " + args[0] + ". Use --help to get a list of the possible options.");
    }
}

From source file:ViewImageTest.java

/**
 * Test image(s) (default : JPEG_example_JPG_RIP_100.jpg) are parsed and
 * rendered to an output foler. Result can then be checked with program of
 * your choice.//from www  . j av  a2s .co m
 * 
 * @param args
 *            may be empty or contain parameters to override defaults :
 *            <ul>
 *            <li>args[0] : input image file URL or folder containing image
 *            files URL. Default :
 *            viewImageTest/test/JPEG_example_JPG_RIP_100.jpg</li>
 *            <li>args[1] : output format name (for example : "jpg") for
 *            rendered image</li>
 *            <li>args[2] : ouput folder URL</li>
 *            <li>args[3] : max width (in pixels) for rendered image.
 *            Default : no value.</li>
 *            <li>args[4] : max height (in pixels) for rendered image.
 *            Default : no value.</li>
 *            </ul>
 * @throws IOException
 *             when a read/write error occured
 */
public static void main(String args[]) throws IOException {
    File inURL = getInputURL(args);
    String ext = getEncodingExt(args);
    File outDir = getOuputDir(args);
    serverObjects post = makePostParams(args);
    outDir.mkdirs();

    File[] inFiles;
    if (inURL.isFile()) {
        inFiles = new File[1];
        inFiles[0] = inURL;
        System.out.println("Testing ViewImage rendering with input file : " + inURL.getAbsolutePath()
                + " encoded To : " + ext);
    } else if (inURL.isDirectory()) {
        FileFilter filter = FileFileFilter.FILE;
        inFiles = inURL.listFiles(filter);
        System.out.println("Testing ViewImage rendering with input files in folder : " + inURL.getAbsolutePath()
                + " encoded To : " + ext);
    } else {
        inFiles = new File[0];
    }
    if (inFiles.length == 0) {
        throw new IllegalArgumentException(inURL.getAbsolutePath() + " is not a valid file or folder url.");
    }
    System.out.println("Rendered images will be written in dir : " + outDir.getAbsolutePath());

    Map<String, Exception> failures = new HashMap<String, Exception>();
    try {
        for (File inFile : inFiles) {
            /* Delete eventual previous result file */
            File outFile = new File(outDir, inFile.getName() + "." + ext);
            if (outFile.exists()) {
                outFile.delete();
            }

            byte[] resourceb = getBytes(inFile);
            String urlString = inFile.getAbsolutePath();
            EncodedImage img = null;
            Exception error = null;
            try {
                img = ViewImage.parseAndScale(post, true, urlString, ext, false, resourceb);
            } catch (Exception e) {
                error = e;
            }

            if (img == null) {
                failures.put(urlString, error);
            } else {
                FileOutputStream outFileStream = null;
                try {
                    outFileStream = new FileOutputStream(outFile);
                    img.getImage().writeTo(outFileStream);
                } finally {
                    if (outFileStream != null) {
                        outFileStream.close();
                    }
                    img.getImage().close();
                }
            }
        }
        displayResults(inFiles, failures);
    } finally {
        ConcurrentLog.shutdown();
    }

}

From source file:eu.fbk.dkm.sectionextractor.WikipediaSectionTitlesExtractor.java

public static void main(String args[]) throws IOException {

    CommandLineWithLogger commandLineWithLogger = new CommandLineWithLogger();
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("wikipedia xml dump file").isRequired().withLongOpt("wikipedia-dump").create("d"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("Filter file")
            .withLongOpt("filter").create("f"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("output file")
            .isRequired().withLongOpt("output-file").create("o"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("max depth (default " + MAX_DEPTH + ")").withLongOpt("max-depth").create("m"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("max num of sections").withLongOpt("max-num").create("n"));
    commandLineWithLogger.addOption(new Option("l", "print titles"));

    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription(//from  w  ww . jav  a  2s .  co m
                    "number of threads (default " + AbstractWikipediaXmlDumpParser.DEFAULT_THREADS_NUMBER + ")")
            .withLongOpt("num-threads").create("t"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("number of pages to process (default all)").withLongOpt("num-pages").create("p"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg()
            .withDescription("receive notification every n pages (default "
                    + AbstractWikipediaExtractor.DEFAULT_NOTIFICATION_POINT + ")")
            .withLongOpt("notification-point").create("b"));

    CommandLine commandLine = null;
    try {
        commandLine = commandLineWithLogger.getCommandLine(args);
        PropertyConfigurator.configure(commandLineWithLogger.getLoggerProps());
    } catch (Exception e) {
        System.exit(1);
    }

    int numThreads = Integer.parseInt(commandLine.getOptionValue("num-threads",
            Integer.toString(AbstractWikipediaXmlDumpParser.DEFAULT_THREADS_NUMBER)));
    int numPages = Integer.parseInt(commandLine.getOptionValue("num-pages",
            Integer.toString(AbstractWikipediaExtractor.DEFAULT_NUM_PAGES)));
    int notificationPoint = Integer.parseInt(commandLine.getOptionValue("notification-point",
            Integer.toString(AbstractWikipediaExtractor.DEFAULT_NOTIFICATION_POINT)));

    int configuredDepth = Integer
            .parseInt(commandLine.getOptionValue("max-depth", Integer.toString(MAX_DEPTH)));
    int maxNum = Integer.parseInt(commandLine.getOptionValue("max-num", "0"));
    boolean printTitles = commandLine.hasOption("l");

    HashSet<String> pagesToConsider = null;
    String filterFileName = commandLine.getOptionValue("filter");
    if (filterFileName != null) {
        File filterFile = new File(filterFileName);
        if (filterFile.exists()) {
            pagesToConsider = new HashSet<>();
            List<String> lines = Files.readLines(filterFile, Charsets.UTF_8);
            for (String line : lines) {
                line = line.trim();
                if (line.length() == 0) {
                    continue;
                }

                line = line.replaceAll("\\s+", "_");

                pagesToConsider.add(line);
            }
        }
    }

    File outputFile = new File(commandLine.getOptionValue("output-file"));
    ExtractorParameters extractorParameters = new ExtractorParameters(
            commandLine.getOptionValue("wikipedia-dump"), outputFile.getAbsolutePath());

    WikipediaExtractor wikipediaPageParser = new WikipediaSectionTitlesExtractor(numThreads, numPages,
            extractorParameters.getLocale(), outputFile, configuredDepth, maxNum, printTitles, pagesToConsider);
    wikipediaPageParser.setNotificationPoint(notificationPoint);
    wikipediaPageParser.start(extractorParameters);

    logger.info("extraction ended " + new Date());

}

From source file:com.rtl.http.Upload.java

public static void main(String[] args) throws Exception {

    if (args.length != 4) {
        // Jxj001 20150603 d:\rpt_rtl_0001.txt d:\confDir
        logger.error("??4,dataTypedataVersionfilePathconfDir");
        // System.out.println("??3,dataTypedataVersionfilePath");
        return;//from w ww  . j  a  v a2  s  .  c o m
    } else {
        Upload upload = new Upload();
        upload.readInfo(args[3]);

        Date date = new Date();
        DateFormat format1 = new SimpleDateFormat("yyyyMMdd");
        String todayDir = format1.format(date);

        File dir = new File(logDir + File.separator + todayDir);
        if (dir != null && !dir.exists()) {
            dir.mkdirs();
        }

        //         SimpleLayout layout = new SimpleLayout();
        PatternLayout layout = new PatternLayout("%d %-5p %c - %m%n");

        FileAppender appender = null;

        try {

            appender = new FileAppender(layout, dir.getPath() + File.separator + todayDir + "client.log", true);

        } catch (Exception e) {
            logger.error("");
        }

        logger.addAppender(appender);
        logger.setLevel(Level.INFO);

        InputStream fileIn;
        dataType = args[0];
        dataVersion = args[1];
        filePath = args[2];

        logger.info("dataType=" + dataType);
        logger.info("dataVersion=" + dataVersion);
        logger.info("filePath=" + filePath);

        try {
            File file = new File(filePath);
            if (!file.exists()) {
                // System.out.println("?:"+filePath);
                logger.error("?:" + filePath);
                return;
            }
            fileIn = new FileInputStream(filePath);
            String responseStr = send(upload.getJson(), fileIn, url);
            if (responseStr != null) {
                String[] values = responseStr.split(",");
                if ("ok".equals(values[0])) {
                    System.out.println("0");
                    logger.info("ok");
                } else {
                    System.out.println("1");
                    logger.info(" A" + values[1]);
                }
            } else {
                System.out.println("1");
                logger.info(" B????");
            }

        } catch (Exception e) {
            System.out.println("1");
            logger.error(" C" + e.getMessage());
        }
    }

    logger.info("??");
    // System.out.println("??");

}

From source file:com.ikanow.infinit.e.core.CoreMain.java

/**
 * @param args/*w ww  . java2  s  .  c om*/
 * @throws ParseException 
 * @throws IOException 
 * @throws InterruptedException 
 */
public static void main(String[] args) throws ParseException, IOException, InterruptedException {

    CommandLineParser cliParser = new BasicParser();
    Options allOps = new Options();
    // Common
    allOps.addOption("c", "config", true, "Configuration path");
    allOps.addOption("g", "community", true, "Only run on one community");
    // Harvest specific
    allOps.addOption("h", "harvest", false, "Run harvester");
    allOps.addOption("l", "local", false, "(for debug: use dummy index)");
    allOps.addOption("i", "source", true, "(for debug: use a single source)");
    allOps.addOption("r", "reset", false, "Reset bad sources");
    // Sync specific
    allOps.addOption("s", "sync", false, "Run synchronization");
    allOps.addOption("f", "from", true, "String unix time (secs) from when to sync");
    // Custom specifc
    allOps.addOption("p", "custom", false, "Run custom processing server");
    allOps.addOption("d", "dummy", true, "Use to keep temp unwanted options on the command line");
    allOps.addOption("j", "jobtitle", true, "(for debug: run a single job)");

    CommandLine cliOpts = cliParser.parse(allOps, args);

    Globals.setIdentity(com.ikanow.infinit.e.data_model.Globals.Identity.IDENTITY_SERVICE);

    if (cliOpts.hasOption("config")) {
        String configOverride = (String) cliOpts.getOptionValue("config");
        Globals.overrideConfigLocation(configOverride);
    }
    //Set up logging
    java.io.File file = new java.io.File(
            com.ikanow.infinit.e.data_model.Globals.getLogPropertiesLocation() + ".xml");
    if (file.exists()) {
        DOMConfigurator.configure(com.ikanow.infinit.e.data_model.Globals.getLogPropertiesLocation() + ".xml");
    } else {
        PropertyConfigurator.configure(Globals.getLogPropertiesLocation());
    }

    if (cliOpts.hasOption("harvest")) {
        if (SourceUtils.checkDbSyncLock()) {
            Thread.sleep(10000); // (wait 10s and then try again)
            System.exit(0);
        }
        String communityOverride = null;
        String sourceDebug = null;
        if (cliOpts.hasOption("local")) {
            ElasticSearchManager.setLocalMode(true);
        }
        if (cliOpts.hasOption("reset")) {
            SourceUtils.resetBadSources();
        }
        if (cliOpts.hasOption("community")) {
            communityOverride = (String) cliOpts.getOptionValue("community");
        }
        if (cliOpts.hasOption("source")) {
            sourceDebug = (String) cliOpts.getOptionValue("source");
        }
        new HarvestThenProcessController()
                .startService(SourceUtils.getSourcesToWorkOn(communityOverride, sourceDebug, false, true));
    } //TESTED
    else if (cliOpts.hasOption("sync")) {
        if (SourceUtils.checkDbSyncLock()) {
            Thread.sleep(10000); // (wait 10s and then try again)
            System.exit(0);
        }
        // Sync command line options:
        long nTimeOfLastCleanse_secs = 0; // (default)
        if (cliOpts.hasOption("from")) {
            try {
                nTimeOfLastCleanse_secs = Long.parseLong((String) cliOpts.getOptionValue("from"));
            } catch (NumberFormatException e) {
                System.out.println("From date is incorrect");
                System.exit(-1);
            }
        }
        String communityOverride = null;
        String sourceDebug = null;
        if (cliOpts.hasOption("community")) {
            communityOverride = (String) cliOpts.getOptionValue("community");
        } else if (cliOpts.hasOption("source")) {
            sourceDebug = (String) cliOpts.getOptionValue("source");
        }
        SourceUtils.checkSourcesHaveHashes(communityOverride, sourceDebug);
        // (infrequently ie as part of sync, check all the sources have hashes, which the harvester depends on)

        new SynchronizationController().startService(nTimeOfLastCleanse_secs,
                SourceUtils.getSourcesToWorkOn(communityOverride, sourceDebug, true, true));
    } //TESTED
    else if (cliOpts.hasOption("custom")) {
        String jobOverride = null;
        if (cliOpts.hasOption("jobtitle")) {
            jobOverride = (String) cliOpts.getOptionValue("jobtitle");
        }
        CustomProcessingController customPxController = new CustomProcessingController();
        customPxController.checkScheduledJobs(jobOverride);
        customPxController.checkRunningJobs();
        customPxController.runThroughSavedQueues();
    } else {
        //Test code for distribution:
        //         boolean bSync = true;
        //         LinkedList<SourcePojo> testSources = null;
        //         LinkedList<SourcePojo> batchOfSources = null;
        //         testSources = getSourcesToWorkOn(null, null, bSync, true);
        //         System.out.println("Sources considered = " + testSources.size());
        //         // Grab a batch of sources
        //         batchOfSources = getDistributedSourceList(testSources, null, false);
        //         System.out.println("Sources left = " + testSources.size());
        //         System.out.println("Sources extracted = " + new com.google.gson.Gson().toJson(batchOfSources));

        System.out.println(
                "com.ikanow.infinit.e.core.server [--config <config-dir>] [--harvest [<other options>]|--sync [<other options>]|--custom [<other options>]]");
        System.exit(-1);
    }
    MongoApplicationLock.registerAppShutdown(); // (in case the saved queries are running, for some reason built in startup hook not running)
}

From source file:com.mvdb.etl.actions.ExtractDBChanges.java

public static void main(String[] args) throws JSONException {

    ActionUtils.setUpInitFileProperty();
    //        boolean success = ActionUtils.markActionChainBroken("Just Testing");        
    //        System.exit(success ? 0 : 1);
    ActionUtils.assertActionChainNotBroken();
    ActionUtils.assertEnvironmentSetupOk();
    ActionUtils.assertFileExists("~/.mvdb", "~/.mvdb missing. Existing.");
    ActionUtils.assertFileExists("~/.mvdb/status.InitCustomerData.complete",
            "300init-customer-data.sh not executed yet. Exiting");
    //This check is not required as data can be modified any number of times
    //ActionUtils.assertFileDoesNotExist("~/.mvdb/status.ModifyCustomerData.complete", "ModifyCustomerData already done. Start with 100init.sh if required. Exiting");

    ActionUtils.createMarkerFile("~/.mvdb/status.ExtractDBChanges.start", true);

    //String schemaDescription = "{ 'root' : [{'table' : 'orders', 'keyColumn' : 'order_id', 'updateTimeColumn' : 'update_time'}]}";

    String customerName = null;/*from w  w  w .  j a v a  2  s.  c om*/
    final CommandLineParser cmdLinePosixParser = new PosixParser();
    final Options posixOptions = constructPosixOptions();
    CommandLine commandLine;
    try {
        commandLine = cmdLinePosixParser.parse(posixOptions, args);
        if (commandLine.hasOption("customer")) {
            customerName = commandLine.getOptionValue("customer");
        }
    } catch (ParseException parseException) // checked exception
    {
        System.err.println(
                "Encountered exception while parsing using PosixParser:\n" + parseException.getMessage());
    }

    if (customerName == null) {
        System.err.println("Could not find customerName. Aborting...");
        System.exit(1);
    }

    ApplicationContext context = Top.getContext();

    final OrderDAO orderDAO = (OrderDAO) context.getBean("orderDAO");
    final ConfigurationDAO configurationDAO = (ConfigurationDAO) context.getBean("configurationDAO");
    final GenericDAO genericDAO = (GenericDAO) context.getBean("genericDAO");
    File snapshotDirectory = getSnapshotDirectory(configurationDAO, customerName);
    try {
        FileUtils.writeStringToFile(new File("/tmp/etl.extractdbchanges.directory.txt"),
                snapshotDirectory.getName(), false);
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
        return;
    }
    long currentTime = new Date().getTime();
    Configuration lastRefreshTimeConf = configurationDAO.find(customerName, "last-refresh-time");
    Configuration schemaDescriptionConf = configurationDAO.find(customerName, "schema-description");
    long lastRefreshTime = Long.parseLong(lastRefreshTimeConf.getValue());
    OrderJsonFileConsumer orderJsonFileConsumer = new OrderJsonFileConsumer(snapshotDirectory);
    Map<String, ColumnMetadata> metadataMap = orderDAO.findMetadata();
    //write file schema-orders.dat in snapshotDirectory
    genericDAO.fetchMetadata("orders", snapshotDirectory);
    //writes files: header-orders.dat, data-orders.dat in snapshotDirectory
    JSONObject json = new JSONObject(schemaDescriptionConf.getValue());
    JSONArray rootArray = json.getJSONArray("root");
    int length = rootArray.length();
    for (int i = 0; i < length; i++) {
        JSONObject jsonObject = rootArray.getJSONObject(i);
        String table = jsonObject.getString("table");
        String keyColumnName = jsonObject.getString("keyColumn");
        String updateTimeColumnName = jsonObject.getString("updateTimeColumn");
        System.out.println("table:" + table + ", keyColumn: " + keyColumnName + ", updateTimeColumn: "
                + updateTimeColumnName);
        genericDAO.fetchAll2(snapshotDirectory, new Timestamp(lastRefreshTime), table, keyColumnName,
                updateTimeColumnName);
    }

    //Unlikely failure
    //But Need to factor this into a separate task so that extraction does not have to be repeated. 
    //Extraction is an expensive task. 
    try {
        String sourceDirectoryAbsolutePath = snapshotDirectory.getAbsolutePath();

        File sourceRelativeDirectoryPath = getRelativeSnapShotDirectory(configurationDAO,
                sourceDirectoryAbsolutePath);
        String hdfsRoot = ActionUtils.getConfigurationValue(ConfigurationKeys.GLOBAL_CUSTOMER,
                ConfigurationKeys.GLOBAL_HDFS_ROOT);
        String targetDirectoryFullPath = hdfsRoot + "/data" + sourceRelativeDirectoryPath;

        ActionUtils.copyLocalDirectoryToHdfsDirectory(sourceDirectoryAbsolutePath, targetDirectoryFullPath);
        String dirName = snapshotDirectory.getName();
        ActionUtils.setConfigurationValue(customerName, ConfigurationKeys.LAST_COPY_TO_HDFS_DIRNAME, dirName);
    } catch (Throwable e) {
        e.printStackTrace();
        logger.error("Objects Extracted from database. But copy of snapshot directory<"
                + snapshotDirectory.getAbsolutePath() + "> to hdfs <" + ""
                + ">failed. Fix the problem and redo extract.", e);
        System.exit(1);
    }

    //Unlikely failure
    //But Need to factor this into a separate task so that extraction does not have to be repeated. 
    //Extraction is an expensive task. 
    String targetZip = null;
    try {
        File targetZipDirectory = new File(snapshotDirectory.getParent(), "archives");
        if (!targetZipDirectory.exists()) {
            boolean success = targetZipDirectory.mkdirs();
            if (success == false) {
                logger.error("Objects copied to hdfs. But able to create archive directory <"
                        + targetZipDirectory.getAbsolutePath() + ">. Fix the problem and redo extract.");
                System.exit(1);
            }
        }
        targetZip = new File(targetZipDirectory, snapshotDirectory.getName() + ".zip").getAbsolutePath();
        ActionUtils.zipFullDirectory(snapshotDirectory.getAbsolutePath(), targetZip);
    } catch (Throwable e) {
        e.printStackTrace();
        logger.error("Objects copied to hdfs. But zipping of snapshot directory<"
                + snapshotDirectory.getAbsolutePath() + "> to  <" + targetZip
                + ">failed. Fix the problem and redo extract.", e);
        System.exit(1);
    }

    //orderDAO.findAll(new Timestamp(lastRefreshTime), orderJsonFileConsumer);
    Configuration updateRefreshTimeConf = new Configuration(customerName, "last-refresh-time",
            String.valueOf(currentTime));
    configurationDAO.update(updateRefreshTimeConf, String.valueOf(lastRefreshTimeConf.getValue()));
    ActionUtils.createMarkerFile("~/.mvdb/status.ExtractDBChanges.complete", true);

}

From source file:com.act.lcms.db.io.LoadPlateCompositionIntoDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption(Option.builder("t").argName("type")
            .desc("The type of plate composition in this file, valid options are: "
                    + StringUtils.join(Arrays.asList(Plate.CONTENT_TYPE.values()), ", "))
            .hasArg().longOpt("plate-type").required().build());
    opts.addOption(Option.builder("i").argName("path").desc("The plate composition file to read").hasArg()
            .longOpt("input-file").required().build());

    // DB connection options.
    opts.addOption(Option.builder().argName("database url")
            .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build());
    opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg()
            .longOpt("db-user").build());
    opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg()
            .longOpt("db-pass").build());
    opts.addOption(Option.builder("H").argName("database host")
            .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host")
            .build());//  ww w  .  jav  a 2 s .  c  o m
    opts.addOption(Option.builder("P").argName("database port")
            .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port")
            .build());
    opts.addOption(Option.builder("N").argName("database name")
            .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg()
            .longOpt("db-name").build());

    // Everybody needs a little help from their friends.
    opts.addOption(
            Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build());

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        return;
    }

    File inputFile = new File(cl.getOptionValue("input-file"));
    if (!inputFile.exists()) {
        System.err.format("Unable to find input file at %s\n", cl.getOptionValue("input-file"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    PlateCompositionParser parser = new PlateCompositionParser();
    parser.processFile(inputFile);

    Plate.CONTENT_TYPE contentType = null;
    try {
        contentType = Plate.CONTENT_TYPE.valueOf(cl.getOptionValue("plate-type"));
    } catch (IllegalArgumentException e) {
        System.err.format("Unrecognized plate type '%s'\n", cl.getOptionValue("plate-type"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    DB db;
    if (cl.hasOption("db-url")) {
        db = new DB().connectToDB(cl.getOptionValue("db-url"));
    } else {
        Integer port = null;
        if (cl.getOptionValue("P") != null) {
            port = Integer.parseInt(cl.getOptionValue("P"));
        }
        db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"),
                cl.getOptionValue("p"));
    }

    try {
        db.getConn().setAutoCommit(false);

        Plate p = Plate.getOrInsertFromPlateComposition(db, parser, contentType);

        switch (contentType) {
        case LCMS:
            List<LCMSWell> LCMSWells = LCMSWell.getInstance().insertFromPlateComposition(db, parser, p);
            for (LCMSWell LCMSWell : LCMSWells) {
                System.out.format("%d: %d x %d  %s  %s\n", LCMSWell.getId(), LCMSWell.getPlateColumn(),
                        LCMSWell.getPlateRow(), LCMSWell.getMsid(), LCMSWell.getComposition());
            }
            break;
        case STANDARD:
            List<StandardWell> standardWells = StandardWell.getInstance().insertFromPlateComposition(db, parser,
                    p);
            for (StandardWell standardWell : standardWells) {
                System.out.format("%d: %d x %d  %s\n", standardWell.getId(), standardWell.getPlateColumn(),
                        standardWell.getPlateRow(), standardWell.getChemical());
            }
            break;
        case DELIVERED_STRAIN:
            List<DeliveredStrainWell> deliveredStrainWells = DeliveredStrainWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (DeliveredStrainWell deliveredStrainWell : deliveredStrainWells) {
                System.out.format("%d: %d x %d (%s) %s %s \n", deliveredStrainWell.getId(),
                        deliveredStrainWell.getPlateColumn(), deliveredStrainWell.getPlateRow(),
                        deliveredStrainWell.getWell(), deliveredStrainWell.getMsid(),
                        deliveredStrainWell.getComposition());
            }
            break;
        case INDUCTION:
            List<InductionWell> inductionWells = InductionWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (InductionWell inductionWell : inductionWells) {
                System.out.format("%d: %d x %d %s %s %s %d\n", inductionWell.getId(),
                        inductionWell.getPlateColumn(), inductionWell.getPlateRow(), inductionWell.getMsid(),
                        inductionWell.getComposition(), inductionWell.getChemical(), inductionWell.getGrowth());
            }
            break;
        case PREGROWTH:
            List<PregrowthWell> pregrowthWells = PregrowthWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (PregrowthWell pregrowthWell : pregrowthWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %d\n", pregrowthWell.getId(),
                        pregrowthWell.getPlateColumn(), pregrowthWell.getPlateRow(),
                        pregrowthWell.getSourcePlate(), pregrowthWell.getSourceWell(), pregrowthWell.getMsid(),
                        pregrowthWell.getComposition(), pregrowthWell.getGrowth());
            }
            break;
        case FEEDING_LCMS:
            List<FeedingLCMSWell> feedingLCMSWells = FeedingLCMSWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (FeedingLCMSWell feedingLCMSWell : feedingLCMSWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %f\n", feedingLCMSWell.getId(),
                        feedingLCMSWell.getPlateColumn(), feedingLCMSWell.getPlateRow(),
                        feedingLCMSWell.getMsid(), feedingLCMSWell.getComposition(),
                        feedingLCMSWell.getExtract(), feedingLCMSWell.getChemical(),
                        feedingLCMSWell.getConcentration());
            }
            break;
        default:
            System.err.format("Unrecognized/unimplemented data type '%s'\n", contentType);
            break;
        }
        // If we didn't encounter an exception, commit the transaction.
        db.getConn().commit();
    } catch (Exception e) {
        System.err.format("Caught exception when trying to load plate composition, rolling back. %s\n",
                e.getMessage());
        db.getConn().rollback();
        throw (e);
    } finally {
        db.getConn().close();
    }

}

From source file:com.sanaldiyar.projects.nanohttpd.nanoinstaller.App.java

public static void main(String[] args) {
    try {//from w ww. ja  va2 s  . c o m
        String executableName = new File(
                App.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getName();
        Options options = new Options();

        Option destination = OptionBuilder.withArgName("folder").withLongOpt("destination").hasArgs(1)
                .withDescription("destionation folder").withType(String.class).create("d");

        Option lrfolder = OptionBuilder.withArgName("folder").withLongOpt("localrepo").hasArgs(1)
                .withDescription("local repository folder").withType(String.class).create("lr");

        Option rmlrfolder = OptionBuilder.withLongOpt("deletelocalrepo").hasArg(false)
                .withDescription("delete local repository after installation").create("dlr");

        Option help = OptionBuilder.withLongOpt("help").withDescription("print this help").create("h");

        options.addOption(destination);
        options.addOption(lrfolder);
        options.addOption(rmlrfolder);
        options.addOption(help);

        HelpFormatter helpFormatter = new HelpFormatter();

        CommandLineParser commandLineParser = new PosixParser();
        CommandLine commands;
        try {
            commands = commandLineParser.parse(options, args);
        } catch (ParseException ex) {
            System.out.println("Error at parsing arguments");
            helpFormatter.printHelp("java -jar " + executableName, options);
            return;
        }

        if (commands.hasOption("h")) {
            helpFormatter.printHelp("java -jar " + executableName, options);
            return;
        }

        String sdest = commands.getOptionValue("d", "./nanosystem");
        System.out.println("The nano system will be installed into " + sdest);
        File dest = new File(sdest);
        if (dest.exists()) {
            FileUtils.deleteDirectory(dest);
        }
        dest.mkdirs();
        File bin = new File(dest, "bin");
        bin.mkdir();
        File bundle = new File(dest, "bundle");
        bundle.mkdir();
        File conf = new File(dest, "conf");
        conf.mkdir();
        File core = new File(dest, "core");
        core.mkdir();
        File logs = new File(dest, "logs");
        logs.mkdir();
        File nanohttpdcore = new File(dest, "nanohttpd-core");
        nanohttpdcore.mkdir();
        File nanohttpdservices = new File(dest, "nanohttpd-services");
        nanohttpdservices.mkdir();
        File temp = new File(dest, "temp");
        temp.mkdir();
        File apps = new File(dest, "apps");
        apps.mkdir();

        File local = new File(commands.getOptionValue("lr", "./local-repository"));
        Collection<RemoteRepository> repositories = Arrays.asList(
                new RemoteRepository("sanaldiyar-snap", "default", "http://maven2.sanaldiyar.com/snap-repo"),
                new RemoteRepository("central", "default", "http://repo1.maven.org/maven2/"));
        Aether aether = new Aether(repositories, local);

        //Copy core felix main
        System.out.println("Downloading Felix main executable");
        List<Artifact> felixmain = aether.resolve(
                new DefaultArtifact("org.apache.felix", "org.apache.felix.main", "jar", "LATEST"), "runtime");
        for (Artifact artifact : felixmain) {
            if (artifact.getArtifactId().equals("org.apache.felix.main")) {
                FileUtils.copyFile(artifact.getFile(), new File(bin, "felix-main.jar"));
                System.out.println(artifact.getArtifactId());
                break;
            }
        }
        System.out.println("OK");

        //Copy core felix bundles
        System.out.println("Downloading Felix core bundles");
        Collection<String> felixcorebundles = Arrays.asList("fileinstall", "bundlerepository", "gogo.runtime",
                "gogo.shell", "gogo.command");
        for (String felixcorebunlde : felixcorebundles) {
            List<Artifact> felixcore = aether.resolve(new DefaultArtifact("org.apache.felix",
                    "org.apache.felix." + felixcorebunlde, "jar", "LATEST"), "runtime");
            for (Artifact artifact : felixcore) {
                if (artifact.getArtifactId().equals("org.apache.felix." + felixcorebunlde)) {
                    FileUtils.copyFileToDirectory(artifact.getFile(), core);
                    System.out.println(artifact.getArtifactId());
                }
            }
        }
        System.out.println("OK");

        //Copy nanohttpd core bundles
        System.out.println("Downloading nanohttpd core bundles and configurations");
        List<Artifact> nanohttpdcorebundle = aether.resolve(
                new DefaultArtifact("com.sanaldiyar.projects.nanohttpd", "nanohttpd", "jar", "LATEST"),
                "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            if (!artifact.getArtifactId().equals("org.osgi.core")) {
                FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
                System.out.println(artifact.getArtifactId());
            }
        }

        nanohttpdcorebundle = aether.resolve(
                new DefaultArtifact("com.sanaldiyar.projects", "engender", "jar", "LATEST"), "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
            System.out.println(artifact.getArtifactId());
        }

        nanohttpdcorebundle = aether.resolve(
                new DefaultArtifact("org.codehaus.jackson", "jackson-mapper-asl", "jar", "1.9.5"), "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
            System.out.println(artifact.getArtifactId());
        }

        nanohttpdcorebundle = aether
                .resolve(new DefaultArtifact("org.mongodb", "mongo-java-driver", "jar", "LATEST"), "runtime");
        for (Artifact artifact : nanohttpdcorebundle) {
            FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdcore);
            System.out.println(artifact.getArtifactId());
        }

        //Copy nanohttpd conf
        FileUtils.copyInputStreamToFile(App.class.getResourceAsStream("/nanohttpd.conf"),
                new File(dest, "nanohttpd.conf"));
        System.out.println("Configuration: nanohttpd.conf");

        //Copy nanohttpd start script
        File startsh = new File(dest, "start.sh");
        FileUtils.copyInputStreamToFile(App.class.getResourceAsStream("/start.sh"), startsh);
        startsh.setExecutable(true);
        System.out.println("Script: start.sh");

        System.out.println("OK");

        //Copy nanohttpd service bundles
        System.out.println("Downloading nanohttpd service bundles");
        List<Artifact> nanohttpdservicebundle = aether
                .resolve(new DefaultArtifact("com.sanaldiyar.projects.nanohttpd", "mongodbbasedsessionhandler",
                        "jar", "1.0-SNAPSHOT"), "runtime");
        for (Artifact artifact : nanohttpdservicebundle) {
            if (artifact.getArtifactId().equals("mongodbbasedsessionhandler")) {
                FileUtils.copyFileToDirectory(artifact.getFile(), nanohttpdservices);
                System.out.println(artifact.getArtifactId());
                break;
            }
        }

        //Copy nanohttpd mongodbbasedsessionhandler conf
        FileUtils.copyInputStreamToFile(App.class.getResourceAsStream("/mdbbasedsh.conf"),
                new File(dest, "mdbbasedsh.conf"));
        System.out.println("Configuration: mdbbasedsh.conf");

        System.out.println("OK");

        if (commands.hasOption("dlr")) {
            System.out.println("Local repository is deleting");
            FileUtils.deleteDirectory(local);
            System.out.println("OK");
        }

        System.out.println("You can reconfigure nanohttpd and services. To start system run start.sh script");

    } catch (Exception ex) {
        System.out.println("Error at installing.");
    }
}

From source file:com.linkedin.helix.examples.BootstrapProcess.java

public static void main(String[] args) throws Exception {
    String zkConnectString = "localhost:2181";
    String clusterName = "storage-integration-cluster";
    String instanceName = "localhost_8905";
    String file = null;// w  w  w  . ja  v  a 2 s  . com
    String stateModelValue = "MasterSlave";
    int delay = 0;
    boolean skipZeroArgs = true;// false is for dev testing
    if (!skipZeroArgs || args.length > 0) {
        CommandLine cmd = processCommandLineArgs(args);
        zkConnectString = cmd.getOptionValue(zkServer);
        clusterName = cmd.getOptionValue(cluster);

        String host = cmd.getOptionValue(hostAddress);
        String portString = cmd.getOptionValue(hostPort);
        int port = Integer.parseInt(portString);
        instanceName = host + "_" + port;

        file = cmd.getOptionValue(configFile);
        if (file != null) {
            File f = new File(file);
            if (!f.exists()) {
                System.err.println("static config file doesn't exist");
                System.exit(1);
            }
        }

        stateModelValue = cmd.getOptionValue(stateModel);
        if (cmd.hasOption(transDelay)) {
            try {
                delay = Integer.parseInt(cmd.getOptionValue(transDelay));
                if (delay < 0) {
                    throw new Exception("delay must be positive");
                }
            } catch (Exception e) {
                e.printStackTrace();
                delay = 0;
            }
        }
    }
    // Espresso_driver.py will consume this
    System.out.println("Starting Process with ZK:" + zkConnectString);

    BootstrapProcess process = new BootstrapProcess(zkConnectString, clusterName, instanceName, file,
            stateModelValue, delay);

    process.start();
    Thread.currentThread().join();
}

From source file:com.act.biointerpretation.l2expansion.L2FilteringDriver.java

public static void main(String[] args) throws Exception {

    // Build command line parser.
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//from  w  w  w  . jav a2 s . c  om
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(L2FilteringDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Print help.
    if (cl.hasOption(OPTION_HELP)) {
        HELP_FORMATTER.printHelp(L2FilteringDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    checkFilterOptionIsValid(OPTION_CHEMICAL_FILTER, cl);
    checkFilterOptionIsValid(OPTION_REACTION_FILTER, cl);

    // Get corpus files.
    File corpusFile = new File(cl.getOptionValue(OPTION_INPUT_CORPUS));
    if (!corpusFile.exists()) {
        LOGGER.error("Input corpus file does not exist.");
        return;
    }

    File outputFile = new File(cl.getOptionValue(OPTION_OUTPUT_PATH));
    outputFile.createNewFile();
    if (outputFile.isDirectory()) {
        LOGGER.error("Output file is directory.");
        System.exit(1);
    }

    LOGGER.info("Reading corpus from file.");
    L2PredictionCorpus predictionCorpus = L2PredictionCorpus.readPredictionsFromJsonFile(corpusFile);
    LOGGER.info("Read in corpus with %d predictions.", predictionCorpus.getCorpus().size());
    LOGGER.info("Corpus has %d distinct substrates.", predictionCorpus.getUniqueSubstrateInchis().size());

    if (cl.hasOption(OPTION_FILTER_SUBSTRATES)) {
        LOGGER.info("Filtering by substrates.");
        File substratesFile = new File(cl.getOptionValue(OPTION_FILTER_SUBSTRATES));
        L2InchiCorpus inchis = new L2InchiCorpus();
        inchis.loadCorpus(substratesFile);
        Set<String> inchiSet = new HashSet<String>();
        inchiSet.addAll(inchis.getInchiList());

        predictionCorpus = predictionCorpus
                .applyFilter(prediction -> inchiSet.containsAll(prediction.getSubstrateInchis()));

        predictionCorpus.writePredictionsToJsonFile(outputFile);
        LOGGER.info("Done writing filtered corpus to file.");
        return;
    }

    if (cl.hasOption(OPTION_SPLIT_BY_RO)) {
        LOGGER.info("Splitting corpus into distinct corpuses for each ro.");
        Map<String, L2PredictionCorpus> corpusMap = predictionCorpus
                .splitCorpus(prediction -> prediction.getProjectorName());

        for (Map.Entry<String, L2PredictionCorpus> entry : corpusMap.entrySet()) {
            String fileName = cl.getOptionValue(OPTION_OUTPUT_PATH) + "." + entry.getKey();
            File oneOutputFile = new File(fileName);
            entry.getValue().writePredictionsToJsonFile(oneOutputFile);
        }
        LOGGER.info("Done writing split corpuses to file.");
        return;
    }

    predictionCorpus = runDbLookups(cl, predictionCorpus, opts);

    LOGGER.info("Applying filters.");
    predictionCorpus = applyFilter(predictionCorpus, ALL_CHEMICALS_IN_DB, cl, OPTION_CHEMICAL_FILTER);
    predictionCorpus = applyFilter(predictionCorpus, REACTION_MATCHES_DB, cl, OPTION_REACTION_FILTER);
    LOGGER.info("Filtered corpus has %d predictions.", predictionCorpus.getCorpus().size());

    LOGGER.info("Printing final corpus.");
    predictionCorpus.writePredictionsToJsonFile(outputFile);

    LOGGER.info("L2FilteringDriver complete!.");
}