Example usage for java.io File createNewFile

List of usage examples for java.io File createNewFile

Introduction

In this page you can find the example usage for java.io File createNewFile.

Prototype

public boolean createNewFile() throws IOException 

Source Link

Document

Atomically creates a new, empty file named by this abstract pathname if and only if a file with this name does not yet exist.

Usage

From source file:com.act.biointerpretation.sars.SeqDBReactionGrouper.java

public static void main(String[] args) throws Exception {
    // Build command line parser.
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/*from   w  w  w .  j  a v  a  2  s . c o  m*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(SeqDBReactionGrouper.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Print help.
    if (cl.hasOption(OPTION_HELP)) {
        HELP_FORMATTER.printHelp(SeqDBReactionGrouper.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    // Handle arguments
    String mongoDBName = cl.getOptionValue(OPTION_DB);
    MongoDB mongoDB = new MongoDB(LOCAL_HOST, MONGO_PORT, mongoDBName);

    File outputFile = new File(cl.getOptionValue(OPTION_OUTPUT_PATH));
    if (outputFile.isDirectory() || outputFile.exists()) {
        LOGGER.error("Supplied output file is a directory or already exists.");
        System.exit(1);
    }
    outputFile.createNewFile();

    Integer limit = DEFAULT_LIMIT_INFINITY;
    if (cl.hasOption(OPTION_LIMIT)) {
        limit = Integer.parseInt(cl.getOptionValue(OPTION_LIMIT));
    }
    LOGGER.info("Only processing first %d entries in Seq DB.", limit);

    SeqDBReactionGrouper enzymeGrouper = new SeqDBReactionGrouper(mongoDB.getSeqIterator(), mongoDBName, limit);

    LOGGER.info("Scanning seq db for reactions with same seq.");
    ReactionGroupCorpus groupCorpus = enzymeGrouper.getReactionGroupCorpus();

    LOGGER.info("Writing output to file.");
    groupCorpus.printToJsonFile(outputFile);

    LOGGER.info("Complete!");
}

From source file:com.joliciel.frenchTreebank.FrenchTreebank.java

/**
 * @param args/*w  w  w  .j a v a  2  s .  c o m*/
 */
public static void main(String[] args) throws Exception {
    String command = args[0];

    String outFilePath = "";
    String outDirPath = "";
    String treebankPath = "";
    String ftbFileName = "";
    String rawTextDir = "";
    String queryPath = "";
    String sentenceNumber = null;
    boolean firstArg = true;
    for (String arg : args) {
        if (firstArg) {
            firstArg = false;
            continue;
        }
        int equalsPos = arg.indexOf('=');
        String argName = arg.substring(0, equalsPos);
        String argValue = arg.substring(equalsPos + 1);
        if (argName.equals("outfile"))
            outFilePath = argValue;
        else if (argName.equals("outdir"))
            outDirPath = argValue;
        else if (argName.equals("ftbFileName"))
            ftbFileName = argValue;
        else if (argName.equals("treebank"))
            treebankPath = argValue;
        else if (argName.equals("sentence"))
            sentenceNumber = argValue;
        else if (argName.equals("query"))
            queryPath = argValue;
        else if (argName.equals("rawTextDir"))
            rawTextDir = argValue;
        else
            throw new RuntimeException("Unknown argument: " + argName);
    }

    TalismaneServiceLocator talismaneServiceLocator = TalismaneServiceLocator.getInstance();

    TreebankServiceLocator locator = TreebankServiceLocator.getInstance(talismaneServiceLocator);

    if (treebankPath.length() == 0)
        locator.setDataSourcePropertiesFile("jdbc-live.properties");

    if (command.equals("search")) {
        final SearchService searchService = locator.getSearchService();
        final XmlPatternSearch search = searchService.newXmlPatternSearch();
        search.setXmlPatternFile(queryPath);
        List<SearchResult> searchResults = search.perform();

        FileWriter fileWriter = new FileWriter(outFilePath);
        for (SearchResult searchResult : searchResults) {
            String lineToWrite = "";
            Sentence sentence = searchResult.getSentence();
            Phrase phrase = searchResult.getPhrase();
            lineToWrite += sentence.getFile().getFileName() + "|";
            lineToWrite += sentence.getSentenceNumber() + "|";
            List<PhraseUnit> phraseUnits = searchResult.getPhraseUnits();
            LOG.debug("Phrase: " + phrase.getId());
            for (PhraseUnit phraseUnit : phraseUnits)
                lineToWrite += phraseUnit.getLemma().getText() + "|";
            lineToWrite += phrase.getText();
            fileWriter.write(lineToWrite + "\n");
        }
        fileWriter.flush();
        fileWriter.close();
    } else if (command.equals("load")) {
        final TreebankService treebankService = locator.getTreebankService();
        final TreebankSAXParser parser = new TreebankSAXParser();
        parser.setTreebankService(treebankService);
        parser.parseDocument(treebankPath);
    } else if (command.equals("loadAll")) {
        final TreebankService treebankService = locator.getTreebankService();

        File dir = new File(treebankPath);

        String firstFile = null;
        if (args.length > 2)
            firstFile = args[2];
        String[] files = dir.list();
        if (files == null) {
            throw new RuntimeException("Not a directory or no children: " + treebankPath);
        } else {
            boolean startProcessing = true;
            if (firstFile != null)
                startProcessing = false;
            for (int i = 0; i < files.length; i++) {
                if (!startProcessing && files[i].equals(firstFile))
                    startProcessing = true;
                if (startProcessing) {
                    String filePath = args[1] + "/" + files[i];
                    LOG.debug(filePath);
                    final TreebankSAXParser parser = new TreebankSAXParser();
                    parser.setTreebankService(treebankService);
                    parser.parseDocument(filePath);
                }
            }
        }
    } else if (command.equals("loadRawText")) {
        final TreebankService treebankService = locator.getTreebankService();
        final TreebankRawTextAssigner assigner = new TreebankRawTextAssigner();
        assigner.setTreebankService(treebankService);
        assigner.setRawTextDirectory(rawTextDir);
        assigner.loadRawText();
    } else if (command.equals("tokenize")) {
        Writer csvFileWriter = null;
        if (outFilePath != null && outFilePath.length() > 0) {
            if (outFilePath.lastIndexOf("/") > 0) {
                String outputDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/"));
                File outputDir = new File(outputDirPath);
                outputDir.mkdirs();
            }

            File csvFile = new File(outFilePath);
            csvFile.delete();
            csvFile.createNewFile();
            csvFileWriter = new BufferedWriter(
                    new OutputStreamWriter(new FileOutputStream(csvFile, false), "UTF8"));
        }
        try {

            final TreebankService treebankService = locator.getTreebankService();
            TreebankExportService treebankExportService = locator.getTreebankExportServiceLocator()
                    .getTreebankExportService();
            TreebankUploadService treebankUploadService = locator.getTreebankUploadServiceLocator()
                    .getTreebankUploadService();
            TreebankReader treebankReader = null;

            if (treebankPath.length() > 0) {
                File treebankFile = new File(treebankPath);
                if (sentenceNumber != null)
                    treebankReader = treebankUploadService.getXmlReader(treebankFile, sentenceNumber);
                else
                    treebankReader = treebankUploadService.getXmlReader(treebankFile);

            } else {
                treebankReader = treebankService.getDatabaseReader(TreebankSubSet.ALL, 0);
            }

            TokeniserAnnotatedCorpusReader reader = treebankExportService
                    .getTokeniserAnnotatedCorpusReader(treebankReader, csvFileWriter);

            while (reader.hasNextTokenSequence()) {
                TokenSequence tokenSequence = reader.nextTokenSequence();
                List<Integer> tokenSplits = tokenSequence.getTokenSplits();
                String sentence = tokenSequence.getText();
                LOG.debug(sentence);
                int currentPos = 0;
                StringBuilder sb = new StringBuilder();
                for (int split : tokenSplits) {
                    if (split == 0)
                        continue;
                    String token = sentence.substring(currentPos, split);
                    sb.append('|');
                    sb.append(token);
                    currentPos = split;
                }
                LOG.debug(sb.toString());
            }
        } finally {
            csvFileWriter.flush();
            csvFileWriter.close();
        }
    } else if (command.equals("export")) {
        if (outDirPath.length() == 0)
            throw new RuntimeException("Parameter required: outdir");
        File outDir = new File(outDirPath);
        outDir.mkdirs();

        final TreebankService treebankService = locator.getTreebankService();
        FrenchTreebankXmlWriter xmlWriter = new FrenchTreebankXmlWriter();
        xmlWriter.setTreebankService(treebankService);

        if (ftbFileName.length() == 0) {
            xmlWriter.write(outDir);
        } else {
            TreebankFile ftbFile = treebankService.loadTreebankFile(ftbFileName);
            String fileName = ftbFileName.substring(ftbFileName.lastIndexOf('/') + 1);
            File xmlFile = new File(outDir, fileName);
            xmlFile.delete();
            xmlFile.createNewFile();

            Writer xmlFileWriter = new BufferedWriter(
                    new OutputStreamWriter(new FileOutputStream(xmlFile, false), "UTF8"));
            xmlWriter.write(xmlFileWriter, ftbFile);
            xmlFileWriter.flush();
            xmlFileWriter.close();
        }
    } else {
        throw new RuntimeException("Unknown command: " + command);
    }
    LOG.debug("========== END ============");
}

From source file:com.tmo.swagger.main.GenrateSwaggerJson.java

public static void main(String[] args)
        throws JsonGenerationException, JsonMappingException, IOException, EmptyXlsRows {

    PropertyReader pr = new PropertyReader();

    Properties prop = pr.readPropertiesFile(args[0]);
    //Properties prop =pr.readClassPathPropertyFile("common.properties");
    String swaggerFile = prop.getProperty("swagger.json");
    String sw = "";
    if (swaggerFile != null && swaggerFile.length() > 0) {
        Swagger swagger = populatePropertiesOnlyPaths(prop, new SwaggerParser().read(swaggerFile));
        ObjectMapper mapper = new ObjectMapper();
        mapper.setSerializationInclusion(Include.NON_NULL);
        sw = mapper.writeValueAsString(swagger);
    } else {//from  ww  w.  ja  v  a 2 s  . co  m
        ObjectMapper mapper = new ObjectMapper();
        mapper.setSerializationInclusion(Include.NON_NULL);
        Swagger swagger = populateProperties(prop);
        sw = mapper.writeValueAsString(swagger);
    }
    try {
        File file = new File(args[1] + prop.getProperty("path.operation.tags") + ".json");
        //File file = new File("src/main/resources/"+prop.getProperty("path.operation.tags")+".json");
        if (!file.exists()) {
            file.createNewFile();
        }
        FileWriter fw = new FileWriter(file.getAbsoluteFile());
        BufferedWriter bw = new BufferedWriter(fw);
        bw.write(sw);
        logger.info("Swagger Genration Done!");
        bw.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:gentracklets.Propagate.java

public static void main(String[] args) throws OrekitException {

    // load the data files
    File data = new File("/home/zittersteijn/Documents/java/libraries/orekit-data.zip");
    DataProvidersManager DM = DataProvidersManager.getInstance();
    ZipJarCrawler crawler = new ZipJarCrawler(data);
    DM.clearProviders();//  w  w w  .  j  ava  2s .c  o m
    DM.addProvider(crawler);

    // Read in TLE elements
    File tleFile = new File("/home/zittersteijn/Documents/TLEs/ASTRA20151207.tle");
    FileReader TLEfr;
    Vector<TLE> tles = new Vector<>();
    tles.setSize(30);

    try {
        // read and save TLEs to a vector
        TLEfr = new FileReader("/home/zittersteijn/Documents/TLEs/ASTRA20151207.tle");
        BufferedReader readTLE = new BufferedReader(TLEfr);

        Scanner s = new Scanner(tleFile);

        String line1, line2;
        TLE2 tle = new TLE2();

        int nrOfObj = 4;
        for (int ii = 1; ii < nrOfObj + 1; ii++) {
            System.out.println(ii);
            line1 = s.nextLine();
            line2 = s.nextLine();
            if (TLE.isFormatOK(line1, line2)) {
                tles.setElementAt(new TLE(line1, line2), ii);
                System.out.println(tles.get(ii).toString());
            } else {
                System.out.println("format problem");
            }

        }
        readTLE.close();

        // define a groundstation
        Frame inertialFrame = FramesFactory.getEME2000();
        TimeScale utc = TimeScalesFactory.getUTC();
        double longitude = FastMath.toRadians(7.465);
        double latitude = FastMath.toRadians(46.87);
        double altitude = 950.;
        GeodeticPoint station = new GeodeticPoint(latitude, longitude, altitude);
        Frame earthFrame = FramesFactory.getITRF(IERSConventions.IERS_2010, true);
        BodyShape earth = new OneAxisEllipsoid(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
                Constants.WGS84_EARTH_FLATTENING, earthFrame);
        TopocentricFrame staF = new TopocentricFrame(earth, station, "station");

        Vector<Orbit> eles = new Vector<>();
        eles.setSize(tles.size());
        for (int ii = 1; ii < nrOfObj + 1; ii++) {
            double a = FastMath.pow(Constants.WGS84_EARTH_MU / FastMath.pow(tles.get(ii).getMeanMotion(), 2),
                    (1.0 / 3));
            // convert them to orbits
            Orbit kep = new KeplerianOrbit(a, tles.get(ii).getE(), tles.get(ii).getI(),
                    tles.get(ii).getPerigeeArgument(), tles.get(ii).getRaan(), tles.get(ii).getMeanAnomaly(),
                    PositionAngle.MEAN, inertialFrame, tles.get(ii).getDate(), Constants.WGS84_EARTH_MU);

            eles.setElementAt(kep, ii);

            // set up propagators
            KeplerianPropagator kepler = new KeplerianPropagator(eles.get(ii));

            System.out.println("a: " + a);

            // Initial state definition
            double mass = 1000.0;
            SpacecraftState initialState = new SpacecraftState(kep, mass);

            // Adaptive step integrator
            // with a minimum step of 0.001 and a maximum step of 1000
            double minStep = 0.001;
            double maxstep = 1000.0;
            double positionTolerance = 10.0;
            OrbitType propagationType = OrbitType.KEPLERIAN;
            double[][] tolerances = NumericalPropagator.tolerances(positionTolerance, kep, propagationType);
            AdaptiveStepsizeIntegrator integrator = new DormandPrince853Integrator(minStep, maxstep,
                    tolerances[0], tolerances[1]);

            NumericalPropagator propagator = new NumericalPropagator(integrator);
            propagator.setOrbitType(propagationType);

            // set up and add force models
            double AMR = 4.0;
            double crossSection = mass * AMR;
            double Cd = 0.01;
            double Cr = 0.5;
            double Co = 0.8;
            NormalizedSphericalHarmonicsProvider provider = GravityFieldFactory.getNormalizedProvider(4, 4);
            ForceModel holmesFeatherstone = new HolmesFeatherstoneAttractionModel(
                    FramesFactory.getITRF(IERSConventions.IERS_2010, true), provider);
            SphericalSpacecraft ssc = new SphericalSpacecraft(crossSection, Cd, Cr, Co);
            PVCoordinatesProvider sun = CelestialBodyFactory.getSun();
            SolarRadiationPressure srp = new SolarRadiationPressure(sun,
                    Constants.WGS84_EARTH_EQUATORIAL_RADIUS, ssc);

            //                propagator.addForceModel(srp);
            //                propagator.addForceModel(holmesFeatherstone);
            propagator.setInitialState(initialState);

            // propagate the orbits with steps size and tracklet lenght at several epochs (tracklets)
            Vector<AbsoluteDate> startDates = new Vector<>();
            startDates.setSize(1);
            startDates.setElementAt(new AbsoluteDate(2016, 1, 26, 20, 00, 00, utc), 0);

            // set the step size [s] and total length
            double tstep = 100;
            double ld = 3;
            double ls = FastMath.floor(ld * (24 * 60 * 60) / tstep);
            System.out.println(ls);

            SpacecraftState currentStateKep = kepler.propagate(startDates.get(0));
            SpacecraftState currentStatePer = propagator.propagate(startDates.get(0));

            for (int tt = 0; tt < startDates.size(); tt++) {

                // set up output file
                String app = tles.get(ii).getSatelliteNumber() + "_" + startDates.get(tt) + ".txt";

                // with formatted output
                File file1 = new File("/home/zittersteijn/Documents/propagate/keplerian/MEO/" + app);
                File file2 = new File("/home/zittersteijn/Documents/propagate/perturbed/MEO/" + app);
                file1.createNewFile();
                file2.createNewFile();
                Formatter fmt1 = new Formatter(file1);
                Formatter fmt2 = new Formatter(file2);

                for (int kk = 0; kk < (int) ls; kk++) {
                    AbsoluteDate propDate = startDates.get(tt).shiftedBy(tstep * kk);
                    currentStateKep = kepler.propagate(propDate);
                    currentStatePer = propagator.propagate(propDate);

                    System.out.println(currentStateKep.getPVCoordinates().getPosition() + "\t"
                            + currentStateKep.getDate());

                    // convert to RADEC coordinates
                    double[] radecKep = conversions.geo2radec(currentStateKep.getPVCoordinates(), staF,
                            inertialFrame, propDate);
                    double[] radecPer = conversions.geo2radec(currentStatePer.getPVCoordinates(), staF,
                            inertialFrame, propDate);

                    // write the orbit to seperate files with the RA, DEC, epoch and fence given
                    AbsoluteDate year = new AbsoluteDate(YEAR, utc);
                    fmt1.format("%.12f %.12f %.12f %d%n", radecKep[0], radecKep[2],
                            (currentStateKep.getDate().durationFrom(year) / (24 * 3600)), (tt + 1));
                    fmt2.format("%.12f %.12f %.12f %d%n", radecPer[0], radecPer[2],
                            (currentStateKep.getDate().durationFrom(year) / (24 * 3600)), (tt + 1));

                }
                fmt1.flush();
                fmt1.close();
                fmt2.flush();
                fmt2.close();

            }
            double[] radecKep = conversions.geo2radec(currentStateKep.getPVCoordinates(), staF, inertialFrame,
                    new AbsoluteDate(startDates.get(0), ls * tstep));
            double[] radecPer = conversions.geo2radec(currentStatePer.getPVCoordinates(), staF, inertialFrame,
                    new AbsoluteDate(startDates.get(0), ls * tstep));
            double sig0 = 1.0 / 3600.0 / 180.0 * FastMath.PI;
            double dRA = radecKep[0] - radecPer[0] / (sig0 * sig0);
            double dDEC = radecKep[2] - radecPer[2] / (sig0 * sig0);

            System.out.println(dRA + "\t" + dDEC);

        }

    } catch (FileNotFoundException ex) {
        Logger.getLogger(GenTracklets.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException iox) {
        Logger.getLogger(GenTracklets.class.getName()).log(Level.SEVERE, null, iox);
    }

}

From source file:com.act.biointerpretation.sars.SarGenerationDriver.java

public static void main(String[] args) throws Exception {
    // Build command line parser.
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//from w  w  w. j  av  a  2s.com
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(SarGenerationDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Print help.
    if (cl.hasOption(OPTION_HELP)) {
        HELP_FORMATTER.printHelp(SarGenerationDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    // Create DB and DbAPI
    MongoDB mongoDB = new MongoDB(LOCAL_HOST, MONGO_PORT, cl.getOptionValue(OPTION_DB));
    DbAPI dbApi = new DbAPI(mongoDB);

    // Handle output file
    File outputFile = new File(cl.getOptionValue(OPTION_OUTPUT_PATH));
    if (outputFile.isDirectory() || outputFile.exists()) {
        LOGGER.error("Supplied output file is a directory or already exists.");
        HELP_FORMATTER.printHelp(SarGenerationDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }
    outputFile.createNewFile();

    // Check that there is exactly one reaction group input option
    if (cl.hasOption(OPTION_REACTION_LIST) && cl.hasOption(OPTION_REACTIONS_FILE)) {
        LOGGER.error("Cannot process both a reaction list and a reactions file as input.");
        HELP_FORMATTER.printHelp(SarGenerationDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }
    if (!cl.hasOption(OPTION_REACTION_LIST) && !cl.hasOption(OPTION_REACTIONS_FILE)) {
        LOGGER.error("Must supply either a reaction list or a reactions file as input.");
        HELP_FORMATTER.printHelp(SarGenerationDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Build input reaction group corpus.
    Iterable<ReactionGroup> groups = null;
    if (cl.hasOption(OPTION_REACTION_LIST)) {
        LOGGER.info("Using specific input reactions.");
        ReactionGroup group = new ReactionGroup("ONLY_GROUP", "NO_DB");
        for (String idString : cl.getOptionValues(OPTION_REACTION_LIST)) {
            group.addReactionId(Long.parseLong(idString));
        }
        groups = Arrays.asList(group);
    }
    if (cl.hasOption(OPTION_REACTIONS_FILE)) {
        LOGGER.info("Using reactions file.");
        File inputFile = new File(cl.getOptionValue(OPTION_REACTIONS_FILE));
        try {
            groups = ReactionGroupCorpus.loadFromJsonFile(inputFile);
            LOGGER.info("Successfully parsed input as json file.");
        } catch (IOException e) {
            LOGGER.info("Input file not json file. Trying txt format.");
            try {
                groups = ReactionGroupCorpus.loadFromTextFile(inputFile);
                LOGGER.info("Successfully parsed input as text file.");
            } catch (IOException f) {
                LOGGER.error("Reactions input file not parseable. %s", f.getMessage());
                throw f;
            }
        }
    }

    // Build all pieces of SAR generator
    ReactionProjector projector = new ReactionProjector();
    ExpandedReactionSearcher generalizer = new ExpandedReactionSearcher(projector);

    McsCalculator reactionMcsCalculator = new McsCalculator(McsCalculator.REACTION_BUILDING_OPTIONS);
    McsCalculator sarMcsCalculator = new McsCalculator(McsCalculator.SAR_OPTIONS);

    FullReactionBuilder reactionBuilder = new FullReactionBuilder(reactionMcsCalculator, generalizer,
            projector);

    SarFactory substructureSarFactory = new OneSubstrateSubstructureSar.Factory(sarMcsCalculator);
    SarFactory carbonCountSarFactory = new OneSubstrateCarbonCountSar.Factory();
    List<SarFactory> sarFactories = Arrays.asList(carbonCountSarFactory, substructureSarFactory);

    ErosCorpus roCorpus = new ErosCorpus();
    roCorpus.loadValidationCorpus();

    ReactionGroupCharacterizer reactionGroupCharacterizer = new OneSubstrateOneRoCharacterizer(dbApi,
            sarFactories, reactionBuilder, roCorpus);
    SarCorpusBuilder corpusBuilder = new SarCorpusBuilder(groups, reactionGroupCharacterizer);
    LOGGER.info("Parsed arguments and constructed SAR corpus builder. Building corpus.");

    SarCorpus sarCorpus = corpusBuilder.build();
    LOGGER.info("Built sar corpus. Printing to file in json format.");

    sarCorpus.printToJsonFile(outputFile);
    LOGGER.info("Complete!");
}

From source file:com.act.biointerpretation.l2expansion.L2ExpansionDriver.java

public static void main(String[] args) throws Exception {

    // Build command line parser.
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/* w  w w  .j  a va  2 s.  c  om*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(L2ExpansionDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    // Print help.
    if (cl.hasOption(OPTION_HELP)) {
        HELP_FORMATTER.printHelp(L2ExpansionDriver.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    // Get output files.
    String outputPath = cl.getOptionValue(OPTION_OUTPUT_PATH);
    File outputFile = new File(outputPath);
    if (outputFile.isDirectory() || outputFile.exists()) {
        LOGGER.error("Supplied output file is a directory or already exists.");
        System.exit(1);
    }
    outputFile.createNewFile();
    File inchiOutputFile = new File(outputPath + ".inchis");
    if (inchiOutputFile.isDirectory() || inchiOutputFile.exists()) {
        LOGGER.error("Supplied inchi output file is a directory or already exists.");
        System.exit(1);
    }
    inchiOutputFile.createNewFile();

    Optional<OutputStream> maybeProgressStream = Optional.empty();
    if (cl.hasOption(OPTION_PROGRESS_PATH)) {
        String progressPath = cl.getOptionValue(OPTION_PROGRESS_PATH);
        File progressFile = new File(progressPath);
        LOGGER.info("Writing incremental results to file at %s", progressFile.getAbsolutePath());
        if (progressFile.isDirectory() || progressFile.exists()) {
            LOGGER.error("Supplied progress file is a directory or already exists.");
            System.exit(1);
        }
        maybeProgressStream = Optional.of(new FileOutputStream(progressFile));
    }

    // Get metabolite list
    L2InchiCorpus inchiCorpus = getInchiCorpus(cl, OPTION_METABOLITES);
    LOGGER.info("%d substrate inchis.", inchiCorpus.getInchiList().size());

    Integer maxMass = NO_MASS_THRESHOLD;
    if (cl.hasOption(OPTION_MASS_THRESHOLD)) {
        maxMass = Integer.parseInt(cl.getOptionValue(OPTION_MASS_THRESHOLD));
        LOGGER.info("Filtering out substrates with mass more than %d daltons.", maxMass);
    }
    inchiCorpus.filterByMass(maxMass);
    LOGGER.info("%d substrate inchis that are importable as molecules.", inchiCorpus.getInchiList().size());

    PredictionGenerator generator = new AllPredictionsGenerator(new ReactionProjector());

    L2Expander expander = buildExpander(cl, inchiCorpus, generator);
    L2PredictionCorpus predictionCorpus = expander.getPredictions(maybeProgressStream);

    LOGGER.info("Done with L2 expansion. Produced %d predictions.", predictionCorpus.getCorpus().size());

    LOGGER.info("Writing corpus to file.");
    predictionCorpus.writePredictionsToJsonFile(outputFile);
    L2InchiCorpus productInchis = new L2InchiCorpus(predictionCorpus.getUniqueProductInchis());
    productInchis.writeToFile(inchiOutputFile);
    LOGGER.info("L2ExpansionDriver complete!");
}

From source file:gentracklets.GenTracklets.java

public static void main(String[] args) throws OrekitException {

    // load the data files
    File data = new File("/home/zittersteijn/Documents/java/libraries/orekit-data.zip");
    DataProvidersManager DM = DataProvidersManager.getInstance();
    ZipJarCrawler crawler = new ZipJarCrawler(data);
    DM.clearProviders();/*w  ww.  j  a v  a2  s . com*/
    DM.addProvider(crawler);

    // Read in TLE elements
    File tleFile = new File("/home/zittersteijn/Documents/TLEs/ASTRA20151207.tle");
    FileReader TLEfr;
    Vector<TLE> tles = new Vector<>();
    tles.setSize(30);

    try {
        // read and save TLEs to a vector
        TLEfr = new FileReader("/home/zittersteijn/Documents/TLEs/ASTRA20151207.tle");
        BufferedReader readTLE = new BufferedReader(TLEfr);

        Scanner s = new Scanner(tleFile);

        String line1, line2;
        TLE2 tle = new TLE2();

        int nrOfObj = 4;
        for (int ii = 1; ii < nrOfObj + 1; ii++) {
            System.out.println(ii);
            line1 = s.nextLine();
            line2 = s.nextLine();
            if (TLE.isFormatOK(line1, line2)) {
                tles.setElementAt(new TLE(line1, line2), ii);
                System.out.println(tles.get(ii).toString());
            } else {
                System.out.println("format problem");
            }

        }
        readTLE.close();

        // define a groundstation
        Frame inertialFrame = FramesFactory.getEME2000();
        TimeScale utc = TimeScalesFactory.getUTC();
        double longitude = FastMath.toRadians(7.465);
        double latitude = FastMath.toRadians(46.87);
        double altitude = 950.;
        GeodeticPoint station = new GeodeticPoint(latitude, longitude, altitude);
        Frame earthFrame = FramesFactory.getITRF(IERSConventions.IERS_2010, true);
        BodyShape earth = new OneAxisEllipsoid(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
                Constants.WGS84_EARTH_FLATTENING, earthFrame);
        TopocentricFrame staF = new TopocentricFrame(earth, station, "station");

        Vector<Orbit> eles = new Vector<>();
        eles.setSize(tles.size());
        for (int ii = 1; ii < nrOfObj + 1; ii++) {
            double a = FastMath.pow(Constants.WGS84_EARTH_MU / FastMath.pow(tles.get(ii).getMeanMotion(), 2),
                    (1.0 / 3));
            // convert them to orbits
            Orbit kep = new KeplerianOrbit(a, tles.get(ii).getE(), tles.get(ii).getI(),
                    tles.get(ii).getPerigeeArgument(), tles.get(ii).getRaan(), tles.get(ii).getMeanAnomaly(),
                    PositionAngle.MEAN, inertialFrame, tles.get(ii).getDate(), Constants.WGS84_EARTH_MU);

            eles.setElementAt(kep, ii);

            // set up propagators
            KeplerianPropagator kepler = new KeplerianPropagator(eles.get(ii));

            System.out.println("a: " + a);

            // Initial state definition
            double mass = 1000.0;
            SpacecraftState initialState = new SpacecraftState(kep, mass);

            // Adaptive step integrator
            // with a minimum step of 0.001 and a maximum step of 1000
            double minStep = 0.001;
            double maxstep = 1000.0;
            double positionTolerance = 10.0;
            OrbitType propagationType = OrbitType.KEPLERIAN;
            double[][] tolerances = NumericalPropagator.tolerances(positionTolerance, kep, propagationType);
            AdaptiveStepsizeIntegrator integrator = new DormandPrince853Integrator(minStep, maxstep,
                    tolerances[0], tolerances[1]);

            NumericalPropagator propagator = new NumericalPropagator(integrator);
            propagator.setOrbitType(propagationType);

            // set up and add force models
            double AMR = 0.4;
            double crossSection = mass * AMR;
            double Cd = 0.01;
            double Cr = 0.5;
            double Co = 0.8;
            NormalizedSphericalHarmonicsProvider provider = GravityFieldFactory.getNormalizedProvider(4, 4);
            ForceModel holmesFeatherstone = new HolmesFeatherstoneAttractionModel(
                    FramesFactory.getITRF(IERSConventions.IERS_2010, true), provider);
            SphericalSpacecraft ssc = new SphericalSpacecraft(crossSection, Cd, Cr, Co);
            PVCoordinatesProvider sun = CelestialBodyFactory.getSun();
            SolarRadiationPressure srp = new SolarRadiationPressure(sun,
                    Constants.WGS84_EARTH_EQUATORIAL_RADIUS, ssc);

            propagator.addForceModel(srp);
            propagator.addForceModel(holmesFeatherstone);
            propagator.setInitialState(initialState);

            // propagate the orbits with steps size and tracklet lenght at several epochs (tracklets)
            Vector<AbsoluteDate> startDates = new Vector<>();
            startDates.setSize(3);
            startDates.setElementAt(new AbsoluteDate(2015, 12, 8, 20, 00, 00, utc), 0);
            startDates.setElementAt(new AbsoluteDate(2015, 12, 9, 21, 00, 00, utc), 1);
            startDates.setElementAt(new AbsoluteDate(2015, 12, 10, 22, 00, 00, utc), 2);

            double tstep = 30;
            int l = 7;

            for (int tt = 0; tt < startDates.size(); tt++) {

                // set up output file
                String app = "S_" + tles.get(ii).getSatelliteNumber() + "_" + startDates.get(tt) + ".txt";
                //                    FileWriter trackletsOutKep = new FileWriter("/home/zittersteijn/Documents/tracklets/simulated/keplerian/ASTRA/dt1h/AMR040/" + app);
                //                    FileWriter trackletsOutPer = new FileWriter("/home/zittersteijn/Documents/tracklets/simulated/perturbed/ASTRA/dt1h/AMR040/" + app);
                //                    BufferedWriter trackletsKepBW = new BufferedWriter(trackletsOutKep);
                //                    BufferedWriter trackletsPerBW = new BufferedWriter(trackletsOutPer);

                // with formatted output
                File file1 = new File(
                        "/home/zittersteijn/Documents/tracklets/simulated/keplerian/ASTRA/dt1d/AMR040/" + app);
                File file2 = new File(
                        "/home/zittersteijn/Documents/tracklets/simulated/perturbed/ASTRA/dt1d/AMR040/" + app);
                file1.createNewFile();
                file2.createNewFile();
                Formatter fmt1 = new Formatter(file1);
                Formatter fmt2 = new Formatter(file2);

                for (int kk = 0; kk < l; kk++) {
                    AbsoluteDate propDate = startDates.get(tt).shiftedBy(tstep * kk);
                    SpacecraftState currentStateKep = kepler.propagate(propDate);
                    SpacecraftState currentStatePer = propagator.propagate(propDate);

                    System.out.println(currentStateKep.getPVCoordinates().getPosition() + "\t"
                            + currentStateKep.getDate());

                    // convert to RADEC coordinates
                    double[] radecKep = conversions.geo2radec(currentStateKep.getPVCoordinates(), staF,
                            inertialFrame, propDate);
                    double[] radecPer = conversions.geo2radec(currentStatePer.getPVCoordinates(), staF,
                            inertialFrame, propDate);

                    // write the tracklets to seperate files with the RA, DEC, epoch and fence given
                    //                        System.out.println(tles.get(kk).getSatelliteNumber() + "\t" + radec[0] / (2 * FastMath.PI) * 180 + "\t" + currentState.getDate());
                    AbsoluteDate year = new AbsoluteDate(YEAR, utc);
                    fmt1.format("%.12f %.12f %.12f %d%n", radecKep[0], radecKep[2],
                            (currentStateKep.getDate().durationFrom(year) / (24 * 3600)), (tt + 1));
                    fmt2.format("%.12f %.12f %.12f %d%n", radecPer[0], radecPer[2],
                            (currentStateKep.getDate().durationFrom(year) / (24 * 3600)), (tt + 1));

                }
                fmt1.flush();
                fmt1.close();
                fmt2.flush();
                fmt2.close();

            }
        }

    } catch (FileNotFoundException ex) {
        Logger.getLogger(GenTracklets.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException iox) {
        Logger.getLogger(GenTracklets.class.getName()).log(Level.SEVERE, null, iox);
    }

}

From source file:htmlwordtag.HtmlWordTag.java

public static void main(String[] args)
        throws RepositoryException, MalformedQueryException, QueryEvaluationException {
    //get current path
    String current = System.getProperty("user.dir");
    //get html file from internet
    loadhtml();//  w  w w  .java2 s  .  c o  m
    //make director for output
    verifyArgs();
    //translate html file to rdf
    HtmlWordTag httpClientPost = new HtmlWordTag();
    httpClientPost.input = new File("input");
    httpClientPost.output = new File("output");
    httpClientPost.client = new HttpClient();
    httpClientPost.client.getParams().setParameter("http.useragent", "Calais Rest Client");

    httpClientPost.run();

    //create main memory repository
    Repository repo = new SailRepository(new MemoryStore());
    repo.initialize();

    File file = new File(current + "\\output\\website1.html.xml");

    RepositoryConnection con = repo.getConnection();
    try {
        con.add(file, null, RDFFormat.RDFXML);
    } catch (OpenRDFException e) {
        // handle exception
    } catch (java.io.IOException e) {
        // handle io exception
    }

    System.out.println(con.isEmpty());

    //query entire repostiory
    String queryString = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
            + "PREFIX c: <http://s.opencalais.com/1/type/em/e/>\n"
            + "PREFIX p: <http://s.opencalais.com/1/pred/>\n"
            + "PREFIX geo: <http://s.opencalais.com/1/type/er/Geo/>\n"

            + "SELECT  distinct ?s ?n\n" + "WHERE {\n" + "{  ?s rdf:type c:Organization.\n"
            + "   ?s p:name ?n.\n}" + "  UNION \n" + "{  ?s rdf:type c:Person.\n" + "   ?s p:name ?n.\n}"
            + "  UNION \n" + "{  ?s rdf:type geo:City.\n" + "   ?s p:name ?n.\n}" + "}";

    //System.out.println(queryString);

    //insert query through sparql repository connection
    TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
    TupleQueryResult result = tupleQuery.evaluate();

    File queryresultdir = new File(current + "\\queryresult");
    if (!queryresultdir.exists()) {
        if (queryresultdir.mkdir()) {
            System.out.println("Directory is created!");
        } else {
            System.out.println("Failed to create directory!");
        }
    }

    File queryresult = null;
    try {
        // create new file
        queryresult = new File(current + "\\queryresult\\queryresult1.txt");

        // tries to create new file in the system
        if (queryresult.exists()) {
            if (queryresult.delete()) {
                System.out.println("file queryresult1.txt is already exist.");
                System.out.println("file queryresult1.txt has been delete.");
                if (queryresult.createNewFile()) {
                    System.out.println("create queryresult1.txt success");
                } else {
                    System.out.println("fail to create queryresult1.txt");
                }
            } else {
                System.out.println("fail to delete queryresult1.txt.");
            }
        } else {
            if (queryresult.createNewFile()) {
                System.out.println("create queryresult1.txt success");
            } else {
                System.out.println("fail to create queryresult1.txt");
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
    }

    try {
        PrintWriter outputStream = null;
        try {
            outputStream = new PrintWriter(new FileOutputStream(current + "\\queryresult\\queryresult1.txt"));
        } catch (FileNotFoundException e) {
            System.out.println("Error to find file queryresult1.txt");
            System.exit(0);
        }
        //go through all triple in sparql repository
        while (result.hasNext()) { // iterate over the result
            BindingSet bindingSet = result.next();
            Value valueOfS = bindingSet.getValue("s");
            Value valueOfN = bindingSet.getValue("n");

            System.out.println(valueOfS + " " + valueOfN);

            outputStream.println(valueOfS + " " + valueOfN);

        }
        outputStream.close();
    } finally {
        result.close();
    }
    //create main memory repository
    Repository repo2 = new SailRepository(new MemoryStore());
    repo2.initialize();

    File file2 = new File(current + "\\output\\website2.html.xml");

    RepositoryConnection con2 = repo2.getConnection();
    try {
        con2.add(file2, null, RDFFormat.RDFXML);
    } catch (OpenRDFException e) {
        // handle exception
    } catch (java.io.IOException e) {
        // handle io exception
    }

    System.out.println(con2.isEmpty());

    //query entire repostiory
    String queryString2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
            + "PREFIX c: <http://s.opencalais.com/1/type/em/e/>\n"
            + "PREFIX p: <http://s.opencalais.com/1/pred/>\n"
            + "PREFIX geo: <http://s.opencalais.com/1/type/er/Geo/>\n"

            + "SELECT  distinct ?s ?n\n" + "WHERE {\n" + "{  ?s rdf:type c:Organization.\n"
            + "   ?s p:name ?n.\n}" + "  UNION \n" + "{  ?s rdf:type c:Person.\n" + "   ?s p:name ?n.\n}"
            + "  UNION \n" + "{  ?s rdf:type geo:City.\n" + "   ?s p:name ?n.\n}" + "}";

    //System.out.println(queryString2);

    //insert query through sparql repository connection
    TupleQuery tupleQuery2 = con2.prepareTupleQuery(QueryLanguage.SPARQL, queryString2);
    TupleQueryResult result2 = tupleQuery2.evaluate();

    File queryresult2 = null;
    try {
        // create new file
        queryresult2 = new File(current + "\\queryresult\\queryresult2.txt");

        // tries to create new file in the system
        if (queryresult2.exists()) {
            if (queryresult2.delete()) {
                System.out.println("file queryresult2.txt is already exist.");
                System.out.println("file queryresult2.txt has been delete.");
                if (queryresult2.createNewFile()) {
                    System.out.println("create queryresult2.txt success");
                } else {
                    System.out.println("fail to create queryresult2.txt");
                }
            } else {
                System.out.println("fail to delete queryresult2.txt.");
            }
        } else {
            if (queryresult2.createNewFile()) {
                System.out.println("create queryresult2.txt success");
            } else {
                System.out.println("fail to create queryresult2.txt");
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
    }

    try {
        PrintWriter outputStream2 = null;
        try {
            outputStream2 = new PrintWriter(new FileOutputStream(current + "\\queryresult\\queryresult2.txt"));
        } catch (FileNotFoundException e) {
            System.out.println("Error to find file queryresult2.txt");
            System.exit(0);
        }
        //go through all triple in sparql repository
        while (result2.hasNext()) { // iterate over the result
            BindingSet bindingSet = result2.next();
            Value valueOfS = bindingSet.getValue("s");
            Value valueOfN = bindingSet.getValue("n");

            System.out.println(valueOfS + " " + valueOfN);

            outputStream2.println(valueOfS + " " + valueOfN);

        }
        outputStream2.close();
    } finally {
        result2.close();
    }

}

From source file:com.medicaid.mmis.util.DataLoader.java

/**
 * The main function, imports the files given as arguments.
 * /*from  www.  j a  v a2 s .  c o m*/
 * @param args the file names
 * @throws IOException for read/write errors
 * @throws PortalServiceException for any other errors
 */
public static void main(String[] args) throws IOException, PortalServiceException {
    if (args.length != 2) {
        System.out.println("2 file path arguments are required.");
        return;
    }

    PropertyConfigurator.configure("log4j.properties");
    logger = Logger.getLogger(DataLoader.class);

    LookupServiceBean lookupBean = new LookupServiceBean();
    EntityManagerFactory emf = Persistence.createEntityManagerFactory("cms-data-load");
    EntityManager em = emf.createEntityManager();
    lookupBean.setEm(em);
    DataLoader loader = new DataLoader();
    loader.setLookup(lookupBean);

    SequenceGeneratorBean sequence = new SequenceGeneratorBean();
    sequence.setEm(em);

    ProviderEnrollmentServiceBean enrollmentBean = new ProviderEnrollmentServiceBean();
    enrollmentBean.setEm(em);
    enrollmentBean.setSequence(sequence);
    enrollmentBean.setLookupService(lookupBean);

    loader.setEnrollmentService(enrollmentBean);

    long processId = sequence.getNextValue("PROCESS_ID");
    System.out.println("Started process id " + processId);

    BufferedReader br = null;
    PrintWriter accepted = null;
    PrintWriter rejected = null;
    try {
        System.out.println("Processing file 1...");
        File success = new File("accepted_1_" + processId + ".txt");
        File failure = new File("rejected_1_" + processId + ".txt");
        success.createNewFile();
        failure.createNewFile();
        accepted = new PrintWriter(success);
        rejected = new PrintWriter(failure);
        br = new BufferedReader(new FileReader(args[0]));
        String line = null;
        int total = 0;
        int errors = 0;
        while ((line = br.readLine()) != null) {
            total++;
            try {
                em.getTransaction().begin();
                loader.readProviderFile(new ByteArrayInputStream(line.getBytes()));
                em.getTransaction().commit();
                accepted.println(line);
                logger.info("Commit row " + total);
            } catch (PortalServiceException e) {
                rejected.println(line);
                em.getTransaction().rollback();
                errors++;
                logger.error("Rollback row " + total + " :" + e.getMessage());
            }
        }

        accepted.flush();
        accepted.close();
        rejected.flush();
        rejected.close();
        br.close();
        System.out.println("Total records read: " + total);
        System.out.println("Total rejected: " + errors);

        System.out.println("Processing file 2...");
        success = new File("accepted_2_" + processId + ".txt");
        failure = new File("rejected_2_" + processId + ".txt");
        success.createNewFile();
        failure.createNewFile();
        accepted = new PrintWriter(success);
        rejected = new PrintWriter(failure);
        br = new BufferedReader(new FileReader(args[1]));
        line = null;
        total = 0;
        errors = 0;
        while ((line = br.readLine()) != null) {
            total++;
            try {
                em.getTransaction().begin();
                Map<String, OwnershipInformation> owners = loader
                        .readWS000EXT2OWNBEN(new ByteArrayInputStream(line.getBytes()));
                for (Map.Entry<String, OwnershipInformation> entry : owners.entrySet()) {
                    enrollmentBean.addBeneficialOwners(entry.getKey(), entry.getValue());
                }
                em.getTransaction().commit();
                accepted.println(line);
                logger.info("Commit row " + total);
            } catch (PortalServiceException e) {
                rejected.println(line);
                em.getTransaction().rollback();
                errors++;
                logger.error("Rollback row " + total + " :" + e.getMessage());
            }
        }
        accepted.flush();
        rejected.flush();
        System.out.println("Total records read: " + total);
        System.out.println("Total rejected: " + errors);

    } finally {
        if (br != null) {
            br.close();
        }
        if (accepted != null) {
            accepted.close();
        }
        if (rejected != null) {
            rejected.close();
        }
    }
}

From source file:IndexService.IndexServer.java

public static void main(String[] args) {
    File stop = new File("/tmp/.indexstop");
    File running = new File("/tmp/.indexrunning");
    if (args != null && args.length > 0 && args[0].equals("stop")) {
        try {/* www  .ja v a 2  s.com*/
            stop.createNewFile();
            running.delete();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return;
    }

    if (running.exists() && (System.currentTimeMillis() - running.lastModified() < 15000)) {
        long time = running.lastModified();
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        if (running.lastModified() == time) {
            running.delete();
        } else {
            return;
        }
    }

    if (stop.exists()) {
        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        if (stop.exists())
            stop.delete();
    }

    Configuration conf = new Configuration();
    IndexServer server = new IndexServer(conf);
    if (args != null && args.length > 0 && args[0].equals("test")) {
        server.testmode = true;
    }
    server.start();
    try {
        running.createNewFile();
    } catch (IOException e) {
        e.printStackTrace();
    }
    new UserCmdProc(server).start();

    while (true) {
        stop = new File("/tmp/.indexstop");
        if (stop.exists()) {
            server.close();
            running.delete();
            stop.delete();
            break;
        }
        try {
            Thread.sleep(5000);
        } catch (InterruptedException e1) {
            e1.printStackTrace();
        }

        running.setLastModified(System.currentTimeMillis());

    }
}