Example usage for java.io File createNewFile

List of usage examples for java.io File createNewFile

Introduction

In this page you can find the example usage for java.io File createNewFile.

Prototype

public boolean createNewFile() throws IOException 

Source Link

Document

Atomically creates a new, empty file named by this abstract pathname if and only if a file with this name does not yet exist.

Usage

From source file:com.joliciel.talismane.terminology.Main.java

public static void main(String[] args) throws Exception {
    String termFilePath = null;// w w w. j  ava 2  s  . c  o  m
    String outFilePath = null;
    Command command = Command.extract;
    int depth = -1;
    String databasePropertiesPath = null;
    String projectCode = null;

    Map<String, String> argMap = TalismaneConfig.convertArgs(args);

    String logConfigPath = argMap.get("logConfigFile");
    if (logConfigPath != null) {
        argMap.remove("logConfigFile");
        Properties props = new Properties();
        props.load(new FileInputStream(logConfigPath));
        PropertyConfigurator.configure(props);
    }

    Map<String, String> innerArgs = new HashMap<String, String>();
    for (Entry<String, String> argEntry : argMap.entrySet()) {
        String argName = argEntry.getKey();
        String argValue = argEntry.getValue();

        if (argName.equals("command"))
            command = Command.valueOf(argValue);
        else if (argName.equals("termFile"))
            termFilePath = argValue;
        else if (argName.equals("outFile"))
            outFilePath = argValue;
        else if (argName.equals("depth"))
            depth = Integer.parseInt(argValue);
        else if (argName.equals("databaseProperties"))
            databasePropertiesPath = argValue;
        else if (argName.equals("projectCode"))
            projectCode = argValue;
        else
            innerArgs.put(argName, argValue);
    }
    if (termFilePath == null && databasePropertiesPath == null)
        throw new TalismaneException("Required argument: termFile or databasePropertiesPath");

    if (termFilePath != null) {
        String currentDirPath = System.getProperty("user.dir");
        File termFileDir = new File(currentDirPath);
        if (termFilePath.lastIndexOf("/") >= 0) {
            String termFileDirPath = termFilePath.substring(0, termFilePath.lastIndexOf("/"));
            termFileDir = new File(termFileDirPath);
            termFileDir.mkdirs();
        }
    }

    long startTime = new Date().getTime();
    try {
        TerminologyServiceLocator terminologyServiceLocator = TerminologyServiceLocator.getInstance();
        TerminologyService terminologyService = terminologyServiceLocator.getTerminologyService();
        TerminologyBase terminologyBase = null;

        if (projectCode == null)
            throw new TalismaneException("Required argument: projectCode");

        File file = new File(databasePropertiesPath);
        FileInputStream fis = new FileInputStream(file);
        Properties dataSourceProperties = new Properties();
        dataSourceProperties.load(fis);
        terminologyBase = terminologyService.getPostGresTerminologyBase(projectCode, dataSourceProperties);

        if (command.equals(Command.analyse) || command.equals(Command.extract)) {
            if (depth < 0)
                throw new TalismaneException("Required argument: depth");

            if (command.equals(Command.analyse)) {
                innerArgs.put("command", "analyse");
            } else {
                innerArgs.put("command", "process");
            }

            TalismaneFrench talismaneFrench = new TalismaneFrench();
            TalismaneConfig config = new TalismaneConfig(innerArgs, talismaneFrench);

            PosTagSet tagSet = TalismaneSession.getPosTagSet();
            Charset outputCharset = config.getOutputCharset();

            TermExtractor termExtractor = terminologyService.getTermExtractor(terminologyBase);
            termExtractor.setMaxDepth(depth);
            termExtractor.setOutFilePath(termFilePath);
            termExtractor.getIncludeChildren().add(tagSet.getPosTag("P"));
            termExtractor.getIncludeChildren().add(tagSet.getPosTag("P+D"));
            termExtractor.getIncludeChildren().add(tagSet.getPosTag("CC"));

            termExtractor.getIncludeWithParent().add(tagSet.getPosTag("DET"));

            if (outFilePath != null) {
                if (outFilePath.lastIndexOf("/") >= 0) {
                    String outFileDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/"));
                    File outFileDir = new File(outFileDirPath);
                    outFileDir.mkdirs();
                }
                File outFile = new File(outFilePath);
                outFile.delete();
                outFile.createNewFile();

                Writer writer = new BufferedWriter(
                        new OutputStreamWriter(new FileOutputStream(outFilePath), outputCharset));
                TermAnalysisWriter termAnalysisWriter = new TermAnalysisWriter(writer);
                termExtractor.addTermObserver(termAnalysisWriter);
            }

            Talismane talismane = config.getTalismane();
            talismane.setParseConfigurationProcessor(termExtractor);
            talismane.process();
        } else if (command.equals(Command.list)) {

            List<Term> terms = terminologyBase.getTermsByFrequency(2);
            for (Term term : terms) {
                LOG.debug("Term: " + term.getText());
                LOG.debug("Frequency: " + term.getFrequency());
                LOG.debug("Heads: " + term.getHeads());
                LOG.debug("Expansions: " + term.getExpansions());
                LOG.debug("Contexts: " + term.getContexts());
            }
        }
    } finally {
        long endTime = new Date().getTime();
        long totalTime = endTime - startTime;
        LOG.info("Total time: " + totalTime);
    }
}

From source file:com.maxpowered.amazon.advertising.api.app.App.java

public static void main(final String... args)
        throws FileNotFoundException, IOException, JAXBException, XMLStreamException, InterruptedException {
    try (ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("application-context.xml")) {
        /*//w  ww . j a v a 2s  .  co  m
         * Get default options based on spring configs
         */
        final String inputDefault = getOptionDefaultBasedOnSpringProperty(ctx, PROPERTY_APP_INPUT, STD_IN_STR);
        final String processedDefault = inputDefault.equals(STD_IN_STR) ? DEFAULT_PROCESSED_FILE_BASE
                : inputDefault + PROCESSED_EXT;
        final String outputDefault = getOptionDefaultBasedOnSpringProperty(ctx, PROPERTY_APP_OUTPUT,
                STD_OUT_STR);
        int throttleDefault = Integer.valueOf(getOptionDefaultBasedOnSpringProperty(ctx, PROPERTY_APP_THROTTLE,
                String.valueOf(DEFAULT_APP_THROTTLE)));
        // Maximum of 25000 requests per hour
        throttleDefault = Math.min(throttleDefault, MAX_APP_THROTTLE);

        /*
         * Get options from the CLI args
         */
        final Options options = new Options();

        options.addOption("h", false, "Display this help.");
        options.addOption("i", true, "Set the file to read ASINs from. " + DEFAULT_STR + inputDefault);
        options.addOption("p", true, "Set the file to store processed ASINs in. " + DEFAULT_STR
                + processedDefault + " or '" + PROCESSED_EXT + "' appended to the input file name.");
        // Add a note that the output depends on the configured processors. If none are configured, it defaults to a
        // std.out processor
        options.addOption("o", true,
                "Set the file to write fetched info xml to via FileProcessor. " + DEFAULT_STR + outputDefault);
        options.addOption("1", false, "Override output file and always output fetched info xml to std.out.");
        options.addOption("t", true, "Set the requests per hour throttle (max of " + MAX_APP_THROTTLE + "). "
                + DEFAULT_STR + throttleDefault);

        final CommandLineParser parser = new DefaultParser();
        CommandLine cmd = null;
        boolean needsHelp = false;

        try {
            cmd = parser.parse(options, args);
        } catch (final ParseException e) {
            needsHelp = true;
        }

        if (cmd.hasOption("h") || needsHelp) {
            final HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("App", options);
            return;
        }

        // Get throttle rate
        final int throttle = Math.min(
                cmd.hasOption("t") ? Integer.valueOf(cmd.getOptionValue("t")) : throttleDefault,
                MAX_APP_THROTTLE);
        LOG.debug("Throttle (default {}) is {} requests per hour", throttleDefault, throttle);
        // We don't want to hit our limit, just under an hour worth of milliseconds
        final int requestWait = 3540000 / throttle;

        // Get input stream
        String input;
        if (cmd.hasOption("i")) {
            input = cmd.getOptionValue("i");
        } else {
            input = inputDefault;
        }
        LOG.debug("Input name (default {}) is {}", inputDefault, input);

        // Get processed file
        String processed;
        if (cmd.hasOption("p")) {
            processed = cmd.getOptionValue("p");
        } else {
            processed = input + PROCESSED_EXT;
        }
        LOG.debug("Processed file name (default {}) is {}", processedDefault, processed);
        final File processedFile = new File(processed);
        processedFile.createNewFile();

        try (final InputStream inputStream = getInputStream(input)) {

            // Get output stream
            String output;
            if (cmd.hasOption("o")) {
                output = cmd.getOptionValue("o");
            } else {
                output = outputDefault;
            }
            if (cmd.hasOption("1")) {
                output = STD_OUT_STR;
            }
            LOG.debug("Output (default {}) name is {}", outputDefault, output);
            // Special logic to set the FileProcessor output
            if (output.equals(STD_OUT_STR)) {
                final FileProcessor fileProcessor = ctx.getBeanFactory().getBean(FileProcessor.class);
                fileProcessor.setOutputStream(System.out);
            } else if (!output.equals(outputDefault)) {
                final FileProcessor fileProcessor = ctx.getBeanFactory().getBean(FileProcessor.class);
                fileProcessor.setOutputFile(output);
            }

            // This could be easily configured through CLI or properties
            final List<String> responseGroups = Lists.newArrayList();
            for (final ResponseGroup responseGroup : new ResponseGroup[] { ResponseGroup.IMAGES,
                    ResponseGroup.ITEM_ATTRIBUTES }) {
                responseGroups.add(responseGroup.getResponseGroupName());
            }
            final String responseGroupString = Joiner.on(",").join(responseGroups);

            // Search the list of remaining ASINs
            final ProductFetcher fetcher = ctx.getBeanFactory().getBean(ProductFetcher.class);
            fetcher.setProcessedFile(processedFile);
            fetcher.setRequestWait(requestWait);
            fetcher.setInputStream(inputStream);
            fetcher.setResponseGroups(responseGroupString);

            // This ensures that statistics of processed asins should almost always get printed at the end
            Runtime.getRuntime().addShutdownHook(new Thread() {
                @Override
                public void run() {
                    fetcher.logStatistics();
                }
            });

            fetcher.fetchProductInformation();
        }
    }
}

From source file:eu.mrbussy.pdfsplitter.Application.java

/**
 * Start the main program.//from w ww  .  ja  va  2 s .  c o  m
 * 
 * @param args
 *            - Arguments passed on to the program
 */
public static void main(String[] args) {

    // Read configurations
    try {
        String configDirname = FilenameUtils.concat(System.getProperty("user.home"),
                String.format(".%1$s%2$s", NAME, IOUtils.DIR_SEPARATOR));
        String filename = FilenameUtils.concat(configDirname, CONFIGURATION_FILE);

        // Check to see if the directory exists and the file can be created/opened
        File configDir = new File(configDirname);
        if (!configDir.exists())
            configDir.mkdir();

        // Check to see if the file exists. If not create it
        File file = new File(filename);
        if (!file.exists()) {
            file.createNewFile();
        }
        Configuration = new PropertiesConfiguration(file);
        // Automatically store the settings that change
        Configuration.setAutoSave(true);

    } catch (ConfigurationException | IOException ex) {
        // Unable to read the file. Probably because it does not exist --> create it.
        ex.printStackTrace();
    }

    // Set locale to a configured language
    Locale.setDefault(
            new Locale(Configuration.getString("language", "nl"), Configuration.getString("country", "NL")));

    // Start by parsing the command line
    ParseCommandline(args);

    // Display the help if required and leave the app
    if (arguments.hasOption("h")) {
        showHelp();
    }

    // Display the app version and leave the app.
    if (arguments.hasOption("v")) {
        showVersion();
    }

    // Not command line so start the app GUI
    if (!arguments.hasOption("c")) {
        try {
            // Change the look and feel
            UIManager.setLookAndFeel(
                    Configuration.getString("LookAndFeel", "com.sun.java.swing.plaf.gtk.GTKLookAndFeel"));
            javax.swing.SwingUtilities.invokeLater(new Runnable() {
                public void run() {
                    (new MainWindow()).setVisible(true);
                }
            });
        } catch (ClassNotFoundException | InstantiationException | IllegalAccessException
                | UnsupportedLookAndFeelException e) {
            // Something terrible happened so show the  help
            showHelp();
        }

    }

}

From source file:com.newproject.ApacheHttp.java

public static void main(String[] args) throws Exception {
    CloseableHttpClient httpclient = HttpClients.createDefault();
    String jsonFilePath = "/Users/vikasmohandoss/Documents/Cloud/test.txt";
    String url = "http://www.sentiment140.com/api/bulkClassifyJson&appid=vm2446@columbia.edu";
    JSONParser jsonParser = new JSONParser();
    JSONObject jsonObject = new JSONObject();
    URL obj = new URL(url);
    HttpURLConnection con = (HttpURLConnection) obj.openConnection();
    try {/*from  ww  w .  ja va2s  . com*/
        FileReader fileReader = new FileReader(jsonFilePath);
        jsonObject = (JSONObject) jsonParser.parse(fileReader);
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    }
    System.out.println(jsonObject.toString());
    /*try {
    /*HttpGet httpGet = new HttpGet("http://httpbin.org/get");
    CloseableHttpResponse response1 = httpclient.execute(httpGet);
    // The underlying HTTP connection is still held by the response object
    // to allow the response content to be streamed directly from the network socket.
    // In order to ensure correct deallocation of system resources
    // the user MUST call CloseableHttpResponse#close() from a finally clause.
    // Please note that if response content is not fully consumed the underlying
    // connection cannot be safely re-used and will be shut down and discarded
    // by the connection manager.
    try {
        System.out.println(response1.getStatusLine());
        HttpEntity entity1 = response1.getEntity();
        // do something useful with the response body
        // and ensure it is fully consumed
        EntityUtils.consume(entity1);
    } finally {
        response1.close();
    }
    HttpPost httpPost = new HttpPost("http://httpbin.org/post");
    List <NameValuePair> nvps = new ArrayList <NameValuePair>();
    nvps.add(new BasicNameValuePair("username", "vip"));
    nvps.add(new BasicNameValuePair("password", "secret"));
    httpPost.setEntity(new UrlEncodedFormEntity(nvps));
    CloseableHttpResponse response2 = httpclient.execute(httpPost);
            
    try {
        System.out.println(response2.getStatusLine());
        HttpEntity entity2 = response2.getEntity();
        // do something useful with the response body
        // and ensure it is fully consumed
        EntityUtils.consume(entity2);
    } finally {
        response2.close();
    }
    } finally {
    httpclient.close();
    }*/
    try {
        HttpPost request = new HttpPost("http://www.sentiment140.com/api/bulkClassifyJson");
        StringEntity params = new StringEntity(jsonObject.toString());
        request.addHeader("content-type", "application/json");
        request.setEntity(params);
        HttpResponse response = httpclient.execute(request);
        System.out.println(response.toString());
        String result = EntityUtils.toString(response.getEntity());
        System.out.println(result);
        try {
            File file = new File("/Users/vikasmohandoss/Documents/Cloud/sentiment.txt");
            // if file doesnt exists, then create it
            if (!file.exists()) {
                file.createNewFile();
            }
            FileWriter fw = new FileWriter(file.getAbsoluteFile());
            BufferedWriter bw = new BufferedWriter(fw);
            bw.write(result);
            bw.close();
            System.out.println("Done");
        } catch (IOException e) {
            e.printStackTrace();
        }
        // handle response here...
    } catch (Exception ex) {
        // handle exception here
    } finally {
        httpclient.close();
    }
}

From source file:it.sayservice.platform.smartplanner.utils.LegGenerator.java

public static void main(String[] args) throws IOException {

    Mongo m = new Mongo("localhost"); // default port 27017
    DB db = m.getDB("smart-planner-15x");
    DBCollection coll = db.getCollection("stops");

    // read trips.txt(trips,serviceId).
    List<String[]> trips = readFileGetLines("src/main/resources/schedules/17/trips.txt");
    List<String[]> stopTimes = readFileGetLines("src/main/resources/schedules/17/stop_times.txt");
    for (String[] words : trips) {
        try {/*from www . ja  v a2 s .  c o  m*/
            String routeId = words[0].trim();
            String serviceId = words[1].trim();
            String tripId = words[2].trim();
            // fetch schedule for trips.
            for (int i = 0; i < stopTimes.size(); i++) {
                // already ordered by occurence.
                String[] scheduleLeg = stopTimes.get(i);
                if (scheduleLeg[0].equalsIgnoreCase(tripId)) {
                    // check if next leg belongs to same trip
                    if (stopTimes.get(i + 1)[0].equalsIgnoreCase(tripId)) {

                        String arrivalT = scheduleLeg[1];
                        String departT = scheduleLeg[2];
                        String sourceId = scheduleLeg[3];
                        String destId = stopTimes.get(i + 1)[3];
                        // get coordinates of stops.
                        /**
                         * make sure that mongo stop collection is
                         * populated. if, not, invoke
                         * http://localhost:7070/smart
                         * -planner/rest/getTransitTimes
                         * /TB_R2_R/1366776000000/1366819200000
                         */
                        Stop source = (Stop) getObjectByField(db, "id", sourceId, coll, Stop.class);
                        Stop destination = (Stop) getObjectByField(db, "id", destId, coll, Stop.class);
                        // System.out.println(tripId + ","
                        // + routeId + ","
                        // + source.getId() + ","
                        // + source.getLatitude() + ","
                        // + source.getLongitude() + ","
                        // + arrivalT + ","
                        // + destination.getId() + ","
                        // + destination.getLatitude() + ","
                        // + destination.getLongitude() + ","
                        // + departT + ","
                        // + serviceId
                        // );
                        String content = tripId + "," + routeId + "," + source.getStopId() + ","
                                + source.getLatitude() + "," + source.getLongitude() + "," + arrivalT + ","
                                + destination.getStopId() + "," + destination.getLatitude() + ","
                                + destination.getLongitude() + "," + departT + "," + "Giornaliero" + "\n";

                        File file = new File("src/main/resources/legs/legs.txt");
                        // single leg file
                        if (!file.exists()) {
                            file.createNewFile();
                        }

                        FileWriter fw = new FileWriter(file.getAbsoluteFile(), true);
                        BufferedWriter bw = new BufferedWriter(fw);
                        bw.write(content);
                        bw.close();
                        // individual trip leg file.
                        File fileT = new File("src/main/resources/legs/legs_" + routeId + ".txt");
                        FileWriter fwT = new FileWriter(fileT.getAbsoluteFile(), true);
                        BufferedWriter bwT = new BufferedWriter(fwT);
                        bwT.write(content);
                        bwT.close();

                    }
                }
            }

        } catch (Exception e) {
            System.out.println("Error parsing trip: " + words[0] + "," + words[1] + "," + words[2]);
        }
    }
    System.out.println("Done");
}

From source file:com.act.biointerpretation.l2expansion.L2FilteringDriver.java

public static void main(String[] args) throws Exception {

    // Build command line parser.
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//ww w.  j  a  va  2 s. c  om
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(L2FilteringDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Print help.
    if (cl.hasOption(OPTION_HELP)) {
        HELP_FORMATTER.printHelp(L2FilteringDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    checkFilterOptionIsValid(OPTION_CHEMICAL_FILTER, cl);
    checkFilterOptionIsValid(OPTION_REACTION_FILTER, cl);

    // Get corpus files.
    File corpusFile = new File(cl.getOptionValue(OPTION_INPUT_CORPUS));
    if (!corpusFile.exists()) {
        LOGGER.error("Input corpus file does not exist.");
        return;
    }

    File outputFile = new File(cl.getOptionValue(OPTION_OUTPUT_PATH));
    outputFile.createNewFile();
    if (outputFile.isDirectory()) {
        LOGGER.error("Output file is directory.");
        System.exit(1);
    }

    LOGGER.info("Reading corpus from file.");
    L2PredictionCorpus predictionCorpus = L2PredictionCorpus.readPredictionsFromJsonFile(corpusFile);
    LOGGER.info("Read in corpus with %d predictions.", predictionCorpus.getCorpus().size());
    LOGGER.info("Corpus has %d distinct substrates.", predictionCorpus.getUniqueSubstrateInchis().size());

    if (cl.hasOption(OPTION_FILTER_SUBSTRATES)) {
        LOGGER.info("Filtering by substrates.");
        File substratesFile = new File(cl.getOptionValue(OPTION_FILTER_SUBSTRATES));
        L2InchiCorpus inchis = new L2InchiCorpus();
        inchis.loadCorpus(substratesFile);
        Set<String> inchiSet = new HashSet<String>();
        inchiSet.addAll(inchis.getInchiList());

        predictionCorpus = predictionCorpus
                .applyFilter(prediction -> inchiSet.containsAll(prediction.getSubstrateInchis()));

        predictionCorpus.writePredictionsToJsonFile(outputFile);
        LOGGER.info("Done writing filtered corpus to file.");
        return;
    }

    if (cl.hasOption(OPTION_SPLIT_BY_RO)) {
        LOGGER.info("Splitting corpus into distinct corpuses for each ro.");
        Map<String, L2PredictionCorpus> corpusMap = predictionCorpus
                .splitCorpus(prediction -> prediction.getProjectorName());

        for (Map.Entry<String, L2PredictionCorpus> entry : corpusMap.entrySet()) {
            String fileName = cl.getOptionValue(OPTION_OUTPUT_PATH) + "." + entry.getKey();
            File oneOutputFile = new File(fileName);
            entry.getValue().writePredictionsToJsonFile(oneOutputFile);
        }
        LOGGER.info("Done writing split corpuses to file.");
        return;
    }

    predictionCorpus = runDbLookups(cl, predictionCorpus, opts);

    LOGGER.info("Applying filters.");
    predictionCorpus = applyFilter(predictionCorpus, ALL_CHEMICALS_IN_DB, cl, OPTION_CHEMICAL_FILTER);
    predictionCorpus = applyFilter(predictionCorpus, REACTION_MATCHES_DB, cl, OPTION_REACTION_FILTER);
    LOGGER.info("Filtered corpus has %d predictions.", predictionCorpus.getCorpus().size());

    LOGGER.info("Printing final corpus.");
    predictionCorpus.writePredictionsToJsonFile(outputFile);

    LOGGER.info("L2FilteringDriver complete!.");
}

From source file:dependencies.DependencyResolving.java

/**
 * @param args the command line arguments
 */// w  w  w.  j av a 2 s.c  o m
public static void main(String[] args) {
    // TODO code application logic here
    JSONParser parser = new JSONParser(); //we use JSONParser in order to be able to read from JSON file
    try { //here we declare the file reader and define the path to the file dependencies.json
        Object obj = parser.parse(new FileReader(
                "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\dependencies.json"));
        JSONObject project = (JSONObject) obj; //a JSON object containing all the data in the .json file
        JSONArray dependencies = (JSONArray) project.get("dependencies"); //get array of objects with key "dependencies"
        System.out.print("We need to install the following dependencies: ");
        Iterator<String> iterator = dependencies.iterator(); //define an iterator over the array "dependencies"
        while (iterator.hasNext()) {
            System.out.println(iterator.next());
        } //on the next line we declare another object, which parses a Parser object and reads from all_packages.json
        Object obj2 = parser.parse(new FileReader(
                "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\all_packages.json"));
        JSONObject tools = (JSONObject) obj2; //a JSON object containing all thr data in the file all_packages.json
        for (int i = 0; i < dependencies.size(); i++) {
            if (tools.containsKey(dependencies.get(i))) {
                System.out.println(
                        "In order to install " + dependencies.get(i) + ", we need the following programs:");
                JSONArray temporaryArray = (JSONArray) tools.get(dependencies.get(i)); //a temporary JSON array in which we store the keys and values of the dependencies
                for (i = 0; i < temporaryArray.size(); i++) {
                    System.out.println(temporaryArray.get(i));
                }
                ArrayList<Object> arraysOfJsonData = new ArrayList<Object>(); //an array in which we will store the keys of the objects, after we use the values and won't need them anymore
                for (i = 0; i < temporaryArray.size(); i++) {
                    System.out.println("Installing " + temporaryArray.get(i));
                }
                while (!temporaryArray.isEmpty()) {

                    for (Object element : temporaryArray) {

                        if (tools.containsKey(element)) {
                            JSONArray secondaryArray = (JSONArray) tools.get(element); //a temporary array within the scope of the if-statement
                            if (secondaryArray.size() != 0) {
                                System.out.println("In order to install " + element + ", we need ");
                            }
                            for (i = 0; i < secondaryArray.size(); i++) {
                                System.out.println(secondaryArray.get(i));
                            }

                            for (Object o : secondaryArray) {

                                arraysOfJsonData.add(o);
                                //here we create a file with the installed dependency
                                File file = new File(
                                        "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\installed_modules\\"
                                                + o);
                                if (file.createNewFile()) {
                                    System.out.println(file.getName() + " is installed!");
                                } else {
                                }
                            }
                            secondaryArray.clear();
                        }
                    }
                    temporaryArray.clear();
                    for (i = 0; i < arraysOfJsonData.size(); i++) {
                        temporaryArray.add(arraysOfJsonData.get(i));
                    }
                    arraysOfJsonData.clear();
                }
            }
        }
        Set<String> keys = tools.keySet(); // here we define a set of keys of the objects in all_packages.json
        for (String s : keys) {
            File file = new File(
                    "C:\\Users\\Vladimir\\Documents\\NetBeansProjects\\DependenciesResolving\\src\\dependencies\\installed_modules\\"
                            + s);
            if (file.createNewFile()) {
                System.out.println(file.getName() + " is installed.");
            } else {
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(DependencyResolving.class.getName()).log(Level.SEVERE, null, ex);
    } catch (ParseException ex) {
        Logger.getLogger(DependencyResolving.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:copi.ScalaEntryPoint.java

public static void main(String[] args) {
    /*//from  www . j a  v a 2  s  . c  om
       * Set lwjgl library path so that LWJGL finds the natives depending on
       * the OS.
       */
    File libDir = new File(path);

    if (!libDir.exists()) {
        // create native lib folder 
        libDir.mkdir();

        // retrieve os type
        String osName = System.getProperty("os.name");

        // try to determine if the system is 64 bit  
        boolean is64bit = false;
        if (System.getProperty("os.name").contains("Windows")) {
            is64bit = (System.getenv("ProgramFiles(x86)") != null);
        } else {
            is64bit = (System.getProperty("os.arch").indexOf("64") != -1);
        }

        // construct name of native lib file 
        String natLibLWJGL = "";
        if (osName.startsWith("Windows")) {
            natLibLWJGL += "lwjgl";
            if (is64bit)
                natLibLWJGL += "64";
            natLibLWJGL += ".dll";
        } else if (osName.startsWith("Linux")) {
            natLibLWJGL += "liblwjgl";
            if (is64bit)
                natLibLWJGL += "64";
            natLibLWJGL += ".so";
        } else if (osName.startsWith("Mac OS X")) {
            natLibLWJGL += "liblwjgl";
            natLibLWJGL += ".jnilib";
        } else {
            System.out.println("Unsupported OS: " + osName + ". Exiting.");
            System.exit(-1);
        }

        // try to establish an input stream on the native lib inside the jar
        InputStream fis = ScalaEntryPoint.class.getResourceAsStream("/" + natLibLWJGL);
        if (fis == null) {
            System.out.println("Native library file " + natLibLWJGL + " was not found inside JAR.");
            System.exit(-1);
        }

        // establish an output stream on the target file 
        File fOut = new File(path + "/" + natLibLWJGL);
        try (FileOutputStream fos = new FileOutputStream(fOut)) {
            // create file at destination if not already existing
            if (!fOut.exists())
                fOut.createNewFile();

            // making buffer for copy operation 
            byte[] buffer = new byte[1024];
            int readBytes;

            // Open output stream and copy data between source file in JAR and the temporary file
            try {
                while ((readBytes = fis.read(buffer)) != -1) {
                    fos.write(buffer, 0, readBytes);
                }
            } finally {
                fos.close();
                fis.close();
            }
        } catch (IOException e) {
            System.out.println(e.getMessage());
            System.exit(-1);
        }

        // register shutdown hook
        JVMShutdownHook jvmShutdownHook = new JVMShutdownHook();
        Runtime.getRuntime().addShutdownHook(jvmShutdownHook);

    }

    // set lwjgl native library path
    System.setProperty("org.lwjgl.librarypath", libDir.getAbsolutePath());

    // start COPI
    System.out.println("Starting COPI ...");
    (new SICApplicationLogic()).render();
}

From source file:com.joliciel.talismane.terminology.TalismaneTermExtractorMain.java

public static void main(String[] args) throws Exception {
    String termFilePath = null;/*from   w w w .j a  v  a  2s . c  o  m*/
    String outFilePath = null;
    Command command = Command.extract;
    int depth = -1;
    String databasePropertiesPath = null;
    String projectCode = null;
    String terminologyPropertiesPath = null;

    Map<String, String> argMap = StringUtils.convertArgs(args);

    String logConfigPath = argMap.get("logConfigFile");
    if (logConfigPath != null) {
        argMap.remove("logConfigFile");
        Properties props = new Properties();
        props.load(new FileInputStream(logConfigPath));
        PropertyConfigurator.configure(props);
    }

    Map<String, String> innerArgs = new HashMap<String, String>();
    for (Entry<String, String> argEntry : argMap.entrySet()) {
        String argName = argEntry.getKey();
        String argValue = argEntry.getValue();

        if (argName.equals("command"))
            command = Command.valueOf(argValue);
        else if (argName.equals("termFile"))
            termFilePath = argValue;
        else if (argName.equals("outFile"))
            outFilePath = argValue;
        else if (argName.equals("depth"))
            depth = Integer.parseInt(argValue);
        else if (argName.equals("databaseProperties"))
            databasePropertiesPath = argValue;
        else if (argName.equals("terminologyProperties"))
            terminologyPropertiesPath = argValue;
        else if (argName.equals("projectCode"))
            projectCode = argValue;
        else
            innerArgs.put(argName, argValue);
    }
    if (termFilePath == null && databasePropertiesPath == null)
        throw new TalismaneException("Required argument: termFile or databasePropertiesPath");

    if (termFilePath != null) {
        String currentDirPath = System.getProperty("user.dir");
        File termFileDir = new File(currentDirPath);
        if (termFilePath.lastIndexOf("/") >= 0) {
            String termFileDirPath = termFilePath.substring(0, termFilePath.lastIndexOf("/"));
            termFileDir = new File(termFileDirPath);
            termFileDir.mkdirs();
        }
    }

    long startTime = new Date().getTime();
    try {
        if (command.equals(Command.analyse)) {
            innerArgs.put("command", "analyse");
        } else {
            innerArgs.put("command", "process");
        }

        String sessionId = "";
        TalismaneServiceLocator locator = TalismaneServiceLocator.getInstance(sessionId);
        TalismaneService talismaneService = locator.getTalismaneService();

        TalismaneConfig config = talismaneService.getTalismaneConfig(innerArgs, sessionId);

        TerminologyServiceLocator terminologyServiceLocator = TerminologyServiceLocator.getInstance(locator);
        TerminologyService terminologyService = terminologyServiceLocator.getTerminologyService();
        TerminologyBase terminologyBase = null;

        if (projectCode == null)
            throw new TalismaneException("Required argument: projectCode");

        File file = new File(databasePropertiesPath);
        FileInputStream fis = new FileInputStream(file);
        Properties dataSourceProperties = new Properties();
        dataSourceProperties.load(fis);
        terminologyBase = terminologyService.getPostGresTerminologyBase(projectCode, dataSourceProperties);

        TalismaneSession talismaneSession = talismaneService.getTalismaneSession();

        if (command.equals(Command.analyse) || command.equals(Command.extract)) {
            Locale locale = talismaneSession.getLocale();
            Map<TerminologyProperty, String> terminologyProperties = new HashMap<TerminologyProperty, String>();
            if (terminologyPropertiesPath != null) {
                Map<String, String> terminologyPropertiesStr = StringUtils.getArgMap(terminologyPropertiesPath);
                for (String key : terminologyPropertiesStr.keySet()) {
                    try {
                        TerminologyProperty property = TerminologyProperty.valueOf(key);
                        terminologyProperties.put(property, terminologyPropertiesStr.get(key));
                    } catch (IllegalArgumentException e) {
                        throw new TalismaneException("Unknown terminology property: " + key);
                    }
                }
            } else {
                terminologyProperties = getDefaultTerminologyProperties(locale);
            }
            if (depth <= 0 && !terminologyProperties.containsKey(TerminologyProperty.maxDepth))
                throw new TalismaneException("Required argument: depth");

            InputStream regexInputStream = getInputStreamFromResource(
                    "parser_conll_with_location_input_regex.txt");
            Scanner regexScanner = new Scanner(regexInputStream, "UTF-8");
            String inputRegex = regexScanner.nextLine();
            regexScanner.close();
            config.setInputRegex(inputRegex);

            Charset outputCharset = config.getOutputCharset();

            TermExtractor termExtractor = terminologyService.getTermExtractor(terminologyBase,
                    terminologyProperties);
            if (depth > 0)
                termExtractor.setMaxDepth(depth);
            termExtractor.setOutFilePath(termFilePath);

            if (outFilePath != null) {
                if (outFilePath.lastIndexOf("/") >= 0) {
                    String outFileDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/"));
                    File outFileDir = new File(outFileDirPath);
                    outFileDir.mkdirs();
                }
                File outFile = new File(outFilePath);
                outFile.delete();
                outFile.createNewFile();

                Writer writer = new BufferedWriter(
                        new OutputStreamWriter(new FileOutputStream(outFilePath), outputCharset));
                TermAnalysisWriter termAnalysisWriter = new TermAnalysisWriter(writer);
                termExtractor.addTermObserver(termAnalysisWriter);
            }

            Talismane talismane = config.getTalismane();
            talismane.setParseConfigurationProcessor(termExtractor);
            talismane.process();
        } else if (command.equals(Command.list)) {

            List<Term> terms = terminologyBase.findTerms(2, null, 0, null, null);
            for (Term term : terms) {
                LOG.debug("Term: " + term.getText());
                LOG.debug("Frequency: " + term.getFrequency());
                LOG.debug("Heads: " + term.getHeads());
                LOG.debug("Expansions: " + term.getExpansions());
                LOG.debug("Contexts: " + term.getContexts());
            }
        }
    } finally {
        long endTime = new Date().getTime();
        long totalTime = endTime - startTime;
        LOG.info("Total time: " + totalTime);
    }
}

From source file:com.nextdoor.bender.S3SnsNotifier.java

public static void main(String[] args) throws ParseException, InterruptedException, IOException {
    formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZoneUTC();

    /*/*from  w ww  . ja  va2 s.co  m*/
     * Parse cli arguments
     */
    Options options = new Options();
    options.addOption(Option.builder().longOpt("bucket").hasArg().required()
            .desc("Name of S3 bucket to list s3 objects from").build());
    options.addOption(Option.builder().longOpt("key-file").hasArg().required()
            .desc("Local file of S3 keys to process").build());
    options.addOption(
            Option.builder().longOpt("sns-arn").hasArg().required().desc("SNS arn to publish to").build());
    options.addOption(Option.builder().longOpt("throttle-ms").hasArg()
            .desc("Amount of ms to wait between publishing to SNS").build());
    options.addOption(Option.builder().longOpt("processed-file").hasArg()
            .desc("Local file to use to store procssed S3 object names").build());
    options.addOption(Option.builder().longOpt("skip-processed").hasArg(false)
            .desc("Whether to skip S3 objects that have been processed").build());
    options.addOption(
            Option.builder().longOpt("dry-run").hasArg(false).desc("If set do not publish to SNS").build());

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = parser.parse(options, args);

    String bucket = cmd.getOptionValue("bucket");
    String keyFile = cmd.getOptionValue("key-file");
    String snsArn = cmd.getOptionValue("sns-arn");
    String processedFile = cmd.getOptionValue("processed-file", null);
    boolean skipProcessed = cmd.hasOption("skip-processed");
    dryRun = cmd.hasOption("dry-run");
    long throttle = Long.parseLong(cmd.getOptionValue("throttle-ms", "-1"));

    if (processedFile != null) {
        File file = new File(processedFile);

        if (!file.exists()) {
            logger.debug("creating local file to store processed s3 object names: " + processedFile);
            file.createNewFile();
        }
    }

    /*
     * Import S3 keys that have been processed
     */
    if (skipProcessed && processedFile != null) {
        try (BufferedReader br = new BufferedReader(new FileReader(processedFile))) {
            String line;
            while ((line = br.readLine()) != null) {
                alreadyPublished.add(line.trim());
            }
        }
    }

    /*
     * Setup writer for file containing processed S3 keys
     */
    FileWriter fw = null;
    BufferedWriter bw = null;
    if (processedFile != null) {
        fw = new FileWriter(processedFile, true);
        bw = new BufferedWriter(fw);
    }

    /*
     * Create clients
     */
    AmazonS3Client s3Client = new AmazonS3Client();
    AmazonSNSClient snsClient = new AmazonSNSClient();

    /*
     * Get S3 object list
     */
    try (BufferedReader br = new BufferedReader(new FileReader(keyFile))) {
        String line;
        while ((line = br.readLine()) != null) {
            String key = line.trim();

            if (alreadyPublished.contains(key)) {
                logger.info("skipping " + key);
            }

            ObjectMetadata om = s3Client.getObjectMetadata(bucket, key);

            S3EventNotification s3Notification = getS3Notification(key, bucket, om.getContentLength());

            String json = s3Notification.toJson();

            /*
             * Publish to SNS
             */
            if (publish(snsArn, json, snsClient, key) && processedFile != null) {
                bw.write(key + "\n");
                bw.flush();
            }

            if (throttle != -1) {
                Thread.sleep(throttle);
            }

        }
    }

    if (processedFile != null) {
        bw.close();
        fw.close();
    }
}