Example usage for java.io BufferedWriter BufferedWriter

List of usage examples for java.io BufferedWriter BufferedWriter

Introduction

In this page you can find the example usage for java.io BufferedWriter BufferedWriter.

Prototype

public BufferedWriter(Writer out) 

Source Link

Document

Creates a buffered character-output stream that uses a default-sized output buffer.

Usage

From source file:edu.cmu.lti.oaqa.knn4qa.apps.FilterTranTable.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption(INPUT_PARAM, null, true, INPUT_DESC);
    options.addOption(OUTPUT_PARAM, null, true, OUTPUT_DESC);
    options.addOption(CommonParams.MEM_FWD_INDEX_PARAM, null, true, CommonParams.MEM_FWD_INDEX_DESC);
    options.addOption(CommonParams.GIZA_ITER_QTY_PARAM, null, true, CommonParams.GIZA_ITER_QTY_PARAM);
    options.addOption(CommonParams.GIZA_ROOT_DIR_PARAM, null, true, CommonParams.GIZA_ROOT_DIR_PARAM);
    options.addOption(CommonParams.MIN_PROB_PARAM, null, true, CommonParams.MIN_PROB_DESC);
    options.addOption(CommonParams.MAX_WORD_QTY_PARAM, null, true, CommonParams.MAX_WORD_QTY_PARAM);

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    try {/*w  w  w.  j  a va 2  s .  c  om*/
        CommandLine cmd = parser.parse(options, args);

        String outputFile = null;

        outputFile = cmd.getOptionValue(OUTPUT_PARAM);
        if (null == outputFile) {
            Usage("Specify 'A name of the output file'", options);
        }

        String gizaRootDir = cmd.getOptionValue(CommonParams.GIZA_ROOT_DIR_PARAM);
        if (null == gizaRootDir) {
            Usage("Specify '" + CommonParams.GIZA_ROOT_DIR_DESC + "'", options);
        }

        String gizaIterQty = cmd.getOptionValue(CommonParams.GIZA_ITER_QTY_PARAM);

        if (null == gizaIterQty) {
            Usage("Specify '" + CommonParams.GIZA_ITER_QTY_DESC + "'", options);
        }

        float minProb = 0;

        String tmpf = cmd.getOptionValue(CommonParams.MIN_PROB_PARAM);

        if (tmpf != null) {
            minProb = Float.parseFloat(tmpf);
        }

        int maxWordQty = Integer.MAX_VALUE;

        String tmpi = cmd.getOptionValue(CommonParams.MAX_WORD_QTY_PARAM);

        if (null != tmpi) {
            maxWordQty = Integer.parseInt(tmpi);
        }

        String memFwdIndxName = cmd.getOptionValue(CommonParams.MEM_FWD_INDEX_PARAM);
        if (null == memFwdIndxName) {
            Usage("Specify '" + CommonParams.MEM_FWD_INDEX_DESC + "'", options);
        }

        System.out.println("Filtering index: " + memFwdIndxName + " max # of frequent words: " + maxWordQty
                + " min. probability:" + minProb);

        VocabularyFilterAndRecoder filter = new FrequentIndexWordFilterAndRecoder(memFwdIndxName, maxWordQty);

        String srcVocFile = CompressUtils.findFileVariant(gizaRootDir + "/source.vcb");

        System.out.println("Source vocabulary file: " + srcVocFile);

        GizaVocabularyReader srcVoc = new GizaVocabularyReader(srcVocFile, filter);

        String dstVocFile = CompressUtils.findFileVariant(gizaRootDir + "/target.vcb");

        System.out.println("Target vocabulary file: " + dstVocFile);

        GizaVocabularyReader dstVoc = new GizaVocabularyReader(CompressUtils.findFileVariant(dstVocFile),
                filter);

        String inputFile = CompressUtils.findFileVariant(gizaRootDir + "/output.t1." + gizaIterQty);

        BufferedReader finp = new BufferedReader(
                new InputStreamReader(CompressUtils.createInputStream(inputFile)));

        BufferedWriter fout = new BufferedWriter(
                new OutputStreamWriter(CompressUtils.createOutputStream(outputFile)));

        try {
            String line;
            int prevSrcId = -1;
            int wordQty = 0;
            long addedQty = 0;
            long totalQty = 0;
            boolean isNotFiltered = false;

            for (totalQty = 0; (line = finp.readLine()) != null;) {
                ++totalQty;
                // Skip empty lines
                line = line.trim();
                if (line.isEmpty())
                    continue;

                GizaTranRec rec = new GizaTranRec(line);

                if (rec.mSrcId != prevSrcId) {
                    ++wordQty;
                }
                if (totalQty % REPORT_INTERVAL_QTY == 0) {
                    System.out.println(String.format(
                            "Processed %d lines (%d source word entries) from '%s', added %d lines", totalQty,
                            wordQty, inputFile, addedQty));
                }

                // isNotFiltered should be set after procOneWord
                if (rec.mSrcId != prevSrcId) {
                    if (rec.mSrcId == 0)
                        isNotFiltered = true;
                    else {
                        String wordSrc = srcVoc.getWord(rec.mSrcId);
                        isNotFiltered = filter == null || (wordSrc != null && filter.checkWord(wordSrc));
                    }
                }

                prevSrcId = rec.mSrcId;

                if (rec.mProb >= minProb && isNotFiltered) {
                    String wordDst = dstVoc.getWord(rec.mDstId);

                    if (filter == null || (wordDst != null && filter.checkWord(wordDst))) {
                        fout.write(String.format(rec.mSrcId + " " + rec.mDstId + " " + rec.mProb));
                        fout.newLine();
                        addedQty++;
                    }
                }
            }

            System.out.println(
                    String.format("Processed %d lines (%d source word entries) from '%s', added %d lines",
                            totalQty, wordQty, inputFile, addedQty));

        } finally {
            finp.close();
            fout.close();
        }
    } catch (ParseException e) {
        Usage("Cannot parse arguments", options);
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }
}

From source file:be.ugent.maf.cellmissy.gui.controller.MSDGenerator.java

public static void main(String[] args) {
    // get the application context
    ApplicationContext context = ApplicationContextProvider.getInstance().getApplicationContext();
    // get the services we need
    ExperimentService experimentService = (ExperimentService) context.getBean("experimentService");
    ProjectService projectService = (ProjectService) context.getBean("projectService");
    WellService wellService = (WellService) context.getBean("wellService");
    SingleCellConditionPreProcessor singleCellConditionPreProcessor = (SingleCellConditionPreProcessor) context
            .getBean("singleCellConditionPreProcessor");
    SingleCellConditionOperator singleCellConditionOperator = (SingleCellConditionOperator) context
            .getBean("singleCellConditionOperator");
    // get all the experiments from DB
    Project project = projectService.findById(4L);
    List<Experiment> experiments = experimentService.findExperimentsByProjectId(project.getProjectid());
    // root folder
    File folder = new File("C:\\Users\\Paola\\Desktop\\benchmark\\cellmissy");

    for (Experiment experiment : experiments) {
        if (experiment.getExperimentNumber() == 1) {
            List<List<TrackDataHolder>> biologicalConditions = new ArrayList<>();
            double instrumentConversionFactor = experiment.getInstrument().getConversionFactor();
            double magnificationValue = experiment.getMagnification().getMagnificationValue();
            double conversionFactor = instrumentConversionFactor * magnificationValue / 10;
            // fetch the migration data
            System.out//from  w w  w  .  j  av  a 2  s .  c om
                    .println("fetching data for project: " + project + ", experiment: " + experiment + " ...");
            for (PlateCondition plateCondition : experiment.getPlateConditionList()) {
                List<Well> wells = new ArrayList<>();
                for (Well well : plateCondition.getWellList()) {
                    Well fetchedWell = wellService.fetchMigrationData(well.getWellid());
                    wells.add(fetchedWell);
                }
                plateCondition.setWellList(wells);
            }

            for (PlateCondition plateCondition : experiment.getPlateConditionList()) {
                // create a new object to hold pre-processing results
                SingleCellConditionDataHolder singleCellConditionDataHolder = new SingleCellConditionDataHolder(
                        plateCondition);
                System.out.println("****************computations started for condition: " + plateCondition);
                // do the computations

                singleCellConditionPreProcessor.generateDataHolders(singleCellConditionDataHolder);
                singleCellConditionPreProcessor.generateDataStructure(singleCellConditionDataHolder);
                singleCellConditionPreProcessor.preProcessStepsAndCells(singleCellConditionDataHolder,
                        conversionFactor, experiment.getExperimentInterval());
                singleCellConditionPreProcessor
                        .generateRawTrackCoordinatesMatrix(singleCellConditionDataHolder);
                singleCellConditionPreProcessor
                        .generateShiftedTrackCoordinatesMatrix(singleCellConditionDataHolder);
                singleCellConditionOperator.operateOnStepsAndCells(singleCellConditionDataHolder);

                List<TrackDataHolder> trackDataHolders = singleCellConditionDataHolder.getTrackDataHolders();
                biologicalConditions.add(trackDataHolders);
            }

            try (BufferedWriter bufferedWriter = new BufferedWriter(
                    new FileWriter(new File(folder, "bench_msd.txt")))) {
                // header of the file
                bufferedWriter.append("traj_id" + " " + "t_lag" + " " + "msd");
                bufferedWriter.newLine();
                for (List<TrackDataHolder> conditionTracks : biologicalConditions) {
                    for (TrackDataHolder trackDataHolder : conditionTracks) {
                        StepCentricDataHolder stepCentricDataHolder = trackDataHolder
                                .getStepCentricDataHolder();
                        double[][] msd = stepCentricDataHolder.getMSD();
                        for (int i = 0; i < msd.length; i++) {
                            bufferedWriter.append("" + stepCentricDataHolder.getTrack().getTrackid());
                            bufferedWriter.append(" ");
                            bufferedWriter.append("" + msd[i][0]);
                            bufferedWriter.append(" ");
                            bufferedWriter.append("" + msd[i][1]);
                            bufferedWriter.newLine();
                        }
                    }
                }
                System.out.println("txt file succ. created!");
            } catch (IOException ex) {
            }
        }
    }
}

From source file:com.act.lcms.db.io.report.IonAnalysisInterchangeModelOperations.java

public static void main(String[] args) throws IOException {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/*from   w ww  . ja v  a2 s .c om*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error("Argument parsing failed: %s", e.getMessage());
        HELP_FORMATTER.printHelp(IonAnalysisInterchangeModelOperations.class.getCanonicalName(), HELP_MESSAGE,
                opts, null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(IonAnalysisInterchangeModelOperations.class.getCanonicalName(), HELP_MESSAGE,
                opts, null, true);
        System.exit(1);
    }

    if (cl.hasOption(OPTION_LOG_DISTRIBUTION)) {
        IonAnalysisInterchangeModel model = new IonAnalysisInterchangeModel();
        model.loadResultsFromFile(new File(cl.getOptionValue(OPTION_INPUT_FILE)));
        Map<Pair<Double, Double>, Integer> rangeToCount = model
                .computeLogFrequencyDistributionOfMoleculeCountToMetric(
                        IonAnalysisInterchangeModel.METRIC.valueOf(cl.getOptionValue(OPTION_LOG_DISTRIBUTION)));

        try (BufferedWriter predictionWriter = new BufferedWriter(
                new FileWriter(new File(OPTION_OUTPUT_FILE)))) {
            for (Map.Entry<Pair<Double, Double>, Integer> entry : rangeToCount.entrySet()) {
                String value = String.format("%f,%d", entry.getKey().getLeft(), entry.getValue());
                predictionWriter.write(value);
                predictionWriter.newLine();
            }
        }
    }
}

From source file:DruidResponseTime.java

public static void main(String[] args) throws Exception {
    try (CloseableHttpClient client = HttpClients.createDefault()) {
        HttpPost post = new HttpPost("http://localhost:8082/druid/v2/?pretty");
        post.addHeader("content-type", "application/json");
        CloseableHttpResponse res;//from  w  ww  . j  a  v  a 2 s . c o  m

        if (STORE_RESULT) {
            File dir = new File(RESULT_DIR);
            if (!dir.exists()) {
                dir.mkdirs();
            }
        }

        int length;

        // Make sure all segments online
        System.out.println("Test if number of records is " + RECORD_NUMBER);
        post.setEntity(new StringEntity("{" + "\"queryType\":\"timeseries\","
                + "\"dataSource\":\"tpch_lineitem\"," + "\"intervals\":[\"1992-01-01/1999-01-01\"],"
                + "\"granularity\":\"all\"," + "\"aggregations\":[{\"type\":\"count\",\"name\":\"count\"}]}"));
        while (true) {
            System.out.print('*');
            res = client.execute(post);
            boolean valid;
            try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent())) {
                length = in.read(BYTE_BUFFER);
                valid = new String(BYTE_BUFFER, 0, length, "UTF-8").contains("\"count\" : 6001215");
            }
            res.close();
            if (valid) {
                break;
            } else {
                Thread.sleep(5000);
            }
        }
        System.out.println("Number of Records Test Passed");

        for (int i = 0; i < QUERIES.length; i++) {
            System.out.println(
                    "--------------------------------------------------------------------------------");
            System.out.println("Start running query: " + QUERIES[i]);
            try (BufferedReader reader = new BufferedReader(
                    new FileReader(QUERY_FILE_DIR + File.separator + i + ".json"))) {
                length = reader.read(CHAR_BUFFER);
                post.setEntity(new StringEntity(new String(CHAR_BUFFER, 0, length)));
            }

            // Warm-up Rounds
            System.out.println("Run " + WARMUP_ROUND + " times to warm up cache...");
            for (int j = 0; j < WARMUP_ROUND; j++) {
                res = client.execute(post);
                res.close();
                System.out.print('*');
            }
            System.out.println();

            // Test Rounds
            int[] time = new int[TEST_ROUND];
            int totalTime = 0;
            System.out.println("Run " + TEST_ROUND + " times to get average time...");
            for (int j = 0; j < TEST_ROUND; j++) {
                long startTime = System.currentTimeMillis();
                res = client.execute(post);
                long endTime = System.currentTimeMillis();
                if (STORE_RESULT && j == 0) {
                    try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent());
                            BufferedWriter writer = new BufferedWriter(
                                    new FileWriter(RESULT_DIR + File.separator + i + ".json", false))) {
                        while ((length = in.read(BYTE_BUFFER)) > 0) {
                            writer.write(new String(BYTE_BUFFER, 0, length, "UTF-8"));
                        }
                    }
                }
                res.close();
                time[j] = (int) (endTime - startTime);
                totalTime += time[j];
                System.out.print(time[j] + "ms ");
            }
            System.out.println();

            // Process Results
            double avgTime = (double) totalTime / TEST_ROUND;
            double stdDev = 0;
            for (int temp : time) {
                stdDev += (temp - avgTime) * (temp - avgTime) / TEST_ROUND;
            }
            stdDev = Math.sqrt(stdDev);
            System.out.println("The average response time for the query is: " + avgTime + "ms");
            System.out.println("The standard deviation is: " + stdDev);
        }
    }
}

From source file:graticules2wld.Main.java

/**
 * @param args/* www. jav  a 2s  .  c o  m*/
 * @throws Exception
 */
public static void main(String[] args) throws Exception {

    /* parse the command line arguments */
    // create the command line parser
    CommandLineParser parser = new PosixParser();

    // create the Options
    Options options = new Options();
    options.addOption("x", "originx", true, "x component of projected coordinates of upper left pixel");
    options.addOption("y", "originy", true, "y component of projected coordinates of upper left pixel");
    options.addOption("u", "tometers", true, "multiplication factor to get source units into meters");
    options.addOption("h", "help", false, "prints this usage page");
    options.addOption("d", "debug", false, "prints debugging information to stdout");

    double originNorthing = 0;
    double originEasting = 0;

    String inputFileName = null;
    String outputFileName = null;

    try {
        // parse the command line arguments
        CommandLine line = parser.parse(options, args);

        if (line.hasOption("help"))
            printUsage(0); // print usage then exit using a non error exit status

        if (line.hasOption("debug"))
            debug = true;

        // these arguments are required
        if (!line.hasOption("originy") || !line.hasOption("originx"))
            printUsage(1);

        originNorthing = Double.parseDouble(line.getOptionValue("originy"));
        originEasting = Double.parseDouble(line.getOptionValue("originx"));

        if (line.hasOption("tometers"))
            unitsToMeters = Double.parseDouble(line.getOptionValue("tometers"));

        // two args should be left. the input csv file name and the output wld file name.
        String[] iofiles = line.getArgs();
        if (iofiles.length < 2) {
            printUsage(1);
        }

        inputFileName = iofiles[0];
        outputFileName = iofiles[1];
    } catch (ParseException exp) {
        System.err.println("Unexpected exception:" + exp.getMessage());
        System.exit(1);
    }

    // try to open the input file for reading and the output file for writing
    File graticulesCsvFile;
    BufferedReader csvReader = null;

    File wldFile;
    BufferedWriter wldWriter = null;

    try {
        graticulesCsvFile = new File(inputFileName);
        csvReader = new BufferedReader(new FileReader(graticulesCsvFile));
    } catch (IOException exp) {
        System.err.println("Could not open input file for reading: " + inputFileName);
        System.exit(1);
    }

    try {
        wldFile = new File(outputFileName);
        wldWriter = new BufferedWriter(new FileWriter(wldFile));
    } catch (IOException exp) {
        System.err.println("Could not open output file for writing: " + outputFileName);
        System.exit(1);
    }

    // list of lon graticules and lat graticules
    ArrayList<Graticule> lonGrats = new ArrayList<Graticule>();
    ArrayList<Graticule> latGrats = new ArrayList<Graticule>();

    // read the source CSV and convert its information into the two ArrayList<Graticule> data structures
    readCSV(csvReader, lonGrats, latGrats);

    // we now need to start finding the world file paramaters
    DescriptiveStatistics stats = new DescriptiveStatistics();

    // find theta and phi
    for (Graticule g : latGrats) {
        stats.addValue(g.angle());
    }

    double theta = stats.getMean(); // we use the mean of the lat angles as theta
    if (debug)
        System.out.println("theta range = " + Math.toDegrees(stats.getMax() - stats.getMin()));
    stats.clear();

    for (Graticule g : lonGrats) {
        stats.addValue(g.angle());
    }

    double phi = stats.getMean(); // ... and the mean of the lon angles for phi
    if (debug)
        System.out.println("phi range = " + Math.toDegrees(stats.getMax() - stats.getMin()));
    stats.clear();

    // print these if in debug mode
    if (debug) {
        System.out.println("theta = " + Math.toDegrees(theta) + "deg");
        System.out.println("phi = " + Math.toDegrees(phi) + "deg");
    }

    // find x and y (distance beteen pixels in map units)
    Collections.sort(latGrats);
    Collections.sort(lonGrats);
    int prevMapValue = 0; //fixme: how to stop warning about not being initilised?
    Line2D prevGratPixelSys = new Line2D.Double();

    boolean first = true;
    for (Graticule g : latGrats) {
        if (!first) {
            int deltaMapValue = Math.abs(g.realValue() - prevMapValue);
            double deltaPixelValue = (g.l.ptLineDist(prevGratPixelSys.getP1())
                    + (g.l.ptLineDist(prevGratPixelSys.getP2()))) / 2;

            double delta = deltaMapValue / deltaPixelValue;
            stats.addValue(delta);
        } else {
            first = false;
            prevMapValue = g.realValue();
            prevGratPixelSys = (Line2D) g.l.clone();
        }
    }

    double y = stats.getMean();
    if (debug)
        System.out.println("y range = " + (stats.getMax() - stats.getMin()));
    stats.clear();

    first = true;
    for (Graticule g : lonGrats) {
        if (!first) {
            int deltaMapValue = g.realValue() - prevMapValue;
            double deltaPixelValue = (g.l.ptLineDist(prevGratPixelSys.getP1())
                    + (g.l.ptLineDist(prevGratPixelSys.getP2()))) / 2;

            double delta = deltaMapValue / deltaPixelValue;
            stats.addValue(delta);
        } else {
            first = false;
            prevMapValue = g.realValue();
            prevGratPixelSys = (Line2D) g.l.clone();
        }
    }

    double x = stats.getMean();
    if (debug)
        System.out.println("x range = " + (stats.getMax() - stats.getMin()));
    stats.clear();

    if (debug) {
        System.out.println("x = " + x);
        System.out.println("y = " + y);
    }

    SimpleRegression regression = new SimpleRegression();

    // C, F are translation terms: x, y map coordinates of the center of the upper-left pixel
    for (Graticule g : latGrats) {
        // find perp dist to pixel space 0,0
        Double perpPixelDist = g.l.ptLineDist(new Point2D.Double(0, 0));

        // find the map space distance from this graticule to the center of the 0,0 pixel
        Double perpMapDist = perpPixelDist * y; // perpMapDist / perpPixelDist = y

        regression.addData(perpMapDist, g.realValue());
    }

    double F = regression.getIntercept();
    regression.clear();

    for (Graticule g : lonGrats) {
        // find perp dist to pixel space 0,0
        Double perpPixelDist = g.l.ptLineDist(new Point2D.Double(0, 0));

        // find the map space distance from this graticule to the center of the 0,0 pixel
        Double perpMapDist = perpPixelDist * x; // perpMapDist / perpPixelDist = x

        regression.addData(perpMapDist, g.realValue());
    }

    double C = regression.getIntercept();
    regression.clear();

    if (debug) {
        System.out.println("Upper Left pixel has coordinates " + C + ", " + F);
    }

    // convert to meters
    C *= unitsToMeters;
    F *= unitsToMeters;

    // C,F store the projected (in map units) coordinates of the upper left pixel.
    // originNorthing,originEasting is the offset we need to apply to 0,0 to push the offsets into our global coordinate system 
    C = originEasting + C;
    F = originNorthing + F;

    // calculate the affine transformation matrix elements
    double D = -1 * x * unitsToMeters * Math.sin(theta);
    double A = x * unitsToMeters * Math.cos(theta);
    double B = y * unitsToMeters * Math.sin(phi); // if should be negative, it'll formed by negative sin
    double E = -1 * y * unitsToMeters * Math.cos(phi);

    /*
     * Line 1: A: pixel size in the x-direction in map units/pixel
     * Line 2: D: rotation about y-axis
     * Line 3: B: rotation about x-axis
     * Line 4: E: pixel size in the y-direction in map units, almost always negative[3]
     * Line 5: C: x-coordinate of the center of the upper left pixel
     * Line 6: F: y-coordinate of the center of the upper left pixel
     */
    if (debug) {
        System.out.println("A = " + A);
        System.out.println("D = " + D);
        System.out.println("B = " + B);
        System.out.println("E = " + E);
        System.out.println("C = " + C);
        System.out.println("F = " + F);

        // write the world file
        System.out.println();
        System.out.println("World File:");
        System.out.println(A);
        System.out.println(D);
        System.out.println(B);
        System.out.println(E);
        System.out.println(C);
        System.out.println(F);
    }

    // write to the .wld file
    wldWriter.write(A + "\n");
    wldWriter.write(D + "\n");
    wldWriter.write(B + "\n");
    wldWriter.write(E + "\n");
    wldWriter.write(C + "\n");
    wldWriter.write(F + "\n");

    wldWriter.close();
}

From source file:com.ibm.watson.catalyst.corpus.tfidf.ApplyTemplate.java

public static void main(String[] args) {

    System.out.println("Loading Corpus.");
    JsonNode root;//from www. j  av  a  2 s.c om
    TermCorpus c;
    JsonNode documents;
    try (InputStream in = new FileInputStream(new File("tfidf-health-1.json"))) {
        root = MAPPER.readTree(in);
        documents = root.get("documents");
        TermCorpusBuilder cb = new TermCorpusBuilder();
        cb.setDocumentCombiner(0, 0);
        cb.setJson(new File("health-corpus.json"));
        c = cb.build();
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return;
    } catch (JsonProcessingException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return;
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return;
    }
    System.out.println("Corpus loaded.");

    List<TemplateMatch> matches = new ArrayList<TemplateMatch>();
    Iterator<TermDocument> documentIterator = c.getDocuments().iterator();

    int index = 0;
    for (JsonNode document : documents) {
        Pattern p1 = Template.getTemplatePattern(document, "\\b(an? |the )?(\\w+ ){0,4}",
                "( \\w+)?(?= is (an?|one|the)\\b)");
        if (p1.toString().equals("\\b(an? |the )?(\\w+ ){0,4}()( \\w+)?(?= is (an?|one|the)\\b)"))
            continue;
        Pattern p2 = Template.getTemplatePattern(document, "^(\\w+ ){0,2}",
                "( \\w+){0,1}?(?=( can| may)? causes?\\b)");
        Pattern p3 = Template.getTemplatePattern(document, "(?<=the use of )(\\w+ ){0,3}",
                "( \\w+| ){0,2}?(?=( (and|does|in|for|can|is|as|to|of)\\b|\\.))");
        Pattern p4 = Template.getTemplatePattern(document, "^(\\w+ ){0,3}",
                "( \\w+){0,1}(?=( can| may) leads? to\\b)");
        Pattern p5 = Template.getTemplatePattern(document, "(?<=\\bthe risk of )(\\w+ ){0,3}",
                "( (disease|stroke|attack|cancer))?\\b");
        Pattern p6 = Template.getTemplatePattern(document, "(\\w{3,} ){0,3}",
                "( (disease|stroke|attack|cancer))?(?= is caused by\\b)");
        Pattern p7 = Template.getTemplatePattern(document, "(?<= is caused by )(\\w+ ){0,10}", "");
        Pattern p8 = Template.getTemplatePattern(document, "\\b", "( \\w{4,})(?= can be used)");
        Pattern p9 = Template.getTemplatePattern(document, "(?<= can be used )(\\w+ ){0,10}", "\\b");
        TermDocument d = documentIterator.next();

        DocumentMatcher dm = new DocumentMatcher(d);
        matches.addAll(dm.getParagraphMatches(p1, "What is ", "?"));
        matches.addAll(dm.getParagraphMatches(p2, "What does ", " cause?"));
        matches.addAll(dm.getParagraphMatches(p3, "How is ", " used?"));
        matches.addAll(dm.getParagraphMatches(p4, "What can ", " lead to?"));
        matches.addAll(dm.getParagraphMatches(p5, "What impacts the risk of ", "?"));
        matches.addAll(dm.getParagraphMatches(p6, "What causes ", "?"));
        matches.addAll(dm.getParagraphMatches(p7, "What is caused by ", "?"));
        matches.addAll(dm.getParagraphMatches(p8, "How can ", " be used?"));
        matches.addAll(dm.getParagraphMatches(p9, "What can be used ", "?"));
        System.out.print("Progress: " + ((100 * ++index) / documents.size()) + "%\r");
    }
    System.out.println();

    List<TemplateMatch> condensedMatches = new ArrayList<TemplateMatch>();

    for (TemplateMatch match : matches) {
        for (TemplateMatch baseMatch : condensedMatches) {
            if (match.sameQuestion(baseMatch)) {
                baseMatch.addAnswers(match);
                break;
            }
        }
        condensedMatches.add(match);
    }

    try (BufferedWriter bw = new BufferedWriter(new FileWriter("health-questions.txt"))) {
        for (TemplateMatch match : condensedMatches) {
            bw.write(match.toString());
        }
        bw.write("\n");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    System.out.println("Done and generated: " + condensedMatches.size());

}

From source file:com.newproject.ApacheHttp.java

public static void main(String[] args) throws Exception {
    CloseableHttpClient httpclient = HttpClients.createDefault();
    String jsonFilePath = "/Users/vikasmohandoss/Documents/Cloud/test.txt";
    String url = "http://www.sentiment140.com/api/bulkClassifyJson&appid=vm2446@columbia.edu";
    JSONParser jsonParser = new JSONParser();
    JSONObject jsonObject = new JSONObject();
    URL obj = new URL(url);
    HttpURLConnection con = (HttpURLConnection) obj.openConnection();
    try {/*from   w w  w .  j a v a 2  s . co m*/
        FileReader fileReader = new FileReader(jsonFilePath);
        jsonObject = (JSONObject) jsonParser.parse(fileReader);
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    }
    System.out.println(jsonObject.toString());
    /*try {
    /*HttpGet httpGet = new HttpGet("http://httpbin.org/get");
    CloseableHttpResponse response1 = httpclient.execute(httpGet);
    // The underlying HTTP connection is still held by the response object
    // to allow the response content to be streamed directly from the network socket.
    // In order to ensure correct deallocation of system resources
    // the user MUST call CloseableHttpResponse#close() from a finally clause.
    // Please note that if response content is not fully consumed the underlying
    // connection cannot be safely re-used and will be shut down and discarded
    // by the connection manager.
    try {
        System.out.println(response1.getStatusLine());
        HttpEntity entity1 = response1.getEntity();
        // do something useful with the response body
        // and ensure it is fully consumed
        EntityUtils.consume(entity1);
    } finally {
        response1.close();
    }
    HttpPost httpPost = new HttpPost("http://httpbin.org/post");
    List <NameValuePair> nvps = new ArrayList <NameValuePair>();
    nvps.add(new BasicNameValuePair("username", "vip"));
    nvps.add(new BasicNameValuePair("password", "secret"));
    httpPost.setEntity(new UrlEncodedFormEntity(nvps));
    CloseableHttpResponse response2 = httpclient.execute(httpPost);
            
    try {
        System.out.println(response2.getStatusLine());
        HttpEntity entity2 = response2.getEntity();
        // do something useful with the response body
        // and ensure it is fully consumed
        EntityUtils.consume(entity2);
    } finally {
        response2.close();
    }
    } finally {
    httpclient.close();
    }*/
    try {
        HttpPost request = new HttpPost("http://www.sentiment140.com/api/bulkClassifyJson");
        StringEntity params = new StringEntity(jsonObject.toString());
        request.addHeader("content-type", "application/json");
        request.setEntity(params);
        HttpResponse response = httpclient.execute(request);
        System.out.println(response.toString());
        String result = EntityUtils.toString(response.getEntity());
        System.out.println(result);
        try {
            File file = new File("/Users/vikasmohandoss/Documents/Cloud/sentiment.txt");
            // if file doesnt exists, then create it
            if (!file.exists()) {
                file.createNewFile();
            }
            FileWriter fw = new FileWriter(file.getAbsoluteFile());
            BufferedWriter bw = new BufferedWriter(fw);
            bw.write(result);
            bw.close();
            System.out.println("Done");
        } catch (IOException e) {
            e.printStackTrace();
        }
        // handle response here...
    } catch (Exception ex) {
        // handle exception here
    } finally {
        httpclient.close();
    }
}

From source file:edu.cmu.lti.oaqa.knn4qa.apps.BuildRetrofitLexicons.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption(CommonParams.GIZA_ROOT_DIR_PARAM, null, true, CommonParams.GIZA_ROOT_DIR_DESC);
    options.addOption(CommonParams.GIZA_ITER_QTY_PARAM, null, true, CommonParams.GIZA_ITER_QTY_DESC);
    options.addOption(CommonParams.MEMINDEX_PARAM, null, true, CommonParams.MEMINDEX_DESC);
    options.addOption(OUT_FILE_PARAM, null, true, OUT_FILE_DESC);
    options.addOption(MIN_PROB_PARAM, null, true, MIN_PROB_DESC);
    options.addOption(FORMAT_PARAM, null, true, FORMAT_DESC);

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    try {/*from  w ww . j a  v  a 2  s .  c  o  m*/
        CommandLine cmd = parser.parse(options, args);
        String gizaRootDir = cmd.getOptionValue(CommonParams.GIZA_ROOT_DIR_PARAM);
        int gizaIterQty = -1;

        if (cmd.hasOption(CommonParams.GIZA_ITER_QTY_PARAM)) {
            gizaIterQty = Integer.parseInt(cmd.getOptionValue(CommonParams.GIZA_ITER_QTY_PARAM));
        } else {
            Usage("Specify: " + CommonParams.GIZA_ITER_QTY_PARAM, options);
        }
        String outFileName = cmd.getOptionValue(OUT_FILE_PARAM);
        if (null == outFileName) {
            Usage("Specify: " + OUT_FILE_PARAM, options);
        }

        String indexDir = cmd.getOptionValue(CommonParams.MEMINDEX_PARAM);

        if (null == indexDir) {
            Usage("Specify: " + CommonParams.MEMINDEX_DESC, options);
        }

        FormatType outType = FormatType.kOrig;

        String outTypeStr = cmd.getOptionValue(FORMAT_PARAM);

        if (null != outTypeStr) {
            if (outTypeStr.equals(ORIG_TYPE)) {
                outType = FormatType.kOrig;
            } else if (outTypeStr.equals(WEIGHTED_TYPE)) {
                outType = FormatType.kWeighted;
            } else if (outTypeStr.equals(UNWEIGHTED_TYPE)) {
                outType = FormatType.kUnweighted;
            } else {
                Usage("Unknown format type: " + outTypeStr, options);
            }
        }

        float minProb = 0;

        if (cmd.hasOption(MIN_PROB_PARAM)) {
            minProb = Float.parseFloat(cmd.getOptionValue(MIN_PROB_PARAM));
        } else {
            Usage("Specify: " + MIN_PROB_PARAM, options);
        }

        System.out.println(String.format(
                "Saving lexicon to '%s' (output format '%s'), keep only entries with translation probability >= %f",
                outFileName, outType.toString(), minProb));

        // We use unlemmatized text here, because lemmatized dictionary is going to be mostly subset of the unlemmatized one.
        InMemForwardIndex textIndex = new InMemForwardIndex(FeatureExtractor.indexFileName(indexDir,
                FeatureExtractor.mFieldNames[FeatureExtractor.TEXT_UNLEMM_FIELD_ID]));
        InMemForwardIndexFilterAndRecoder filterAndRecoder = new InMemForwardIndexFilterAndRecoder(textIndex);

        String prefix = gizaRootDir + "/" + FeatureExtractor.mFieldNames[FeatureExtractor.TEXT_UNLEMM_FIELD_ID]
                + "/";
        GizaVocabularyReader answVoc = new GizaVocabularyReader(prefix + "source.vcb", filterAndRecoder);
        GizaVocabularyReader questVoc = new GizaVocabularyReader(prefix + "target.vcb", filterAndRecoder);

        GizaTranTableReaderAndRecoder gizaTable = new GizaTranTableReaderAndRecoder(false, // we don't need to flip the table for the purpose 
                prefix + "/output.t1." + gizaIterQty, filterAndRecoder, answVoc, questVoc,
                (float) FeatureExtractor.DEFAULT_PROB_SELF_TRAN, minProb);
        BufferedWriter outFile = new BufferedWriter(new FileWriter(outFileName));

        for (int srcWordId = 0; srcWordId <= textIndex.getMaxWordId(); ++srcWordId) {
            GizaOneWordTranRecs tranRecs = gizaTable.getTranProbs(srcWordId);

            if (null != tranRecs) {
                String wordSrc = textIndex.getWord(srcWordId);
                StringBuffer sb = new StringBuffer();
                sb.append(wordSrc);

                for (int k = 0; k < tranRecs.mDstIds.length; ++k) {
                    float prob = tranRecs.mProbs[k];
                    if (prob >= minProb) {
                        int dstWordId = tranRecs.mDstIds[k];

                        if (dstWordId == srcWordId && outType != FormatType.kWeighted)
                            continue; // Don't duplicate the word, unless it's probability weighted

                        sb.append(' ');
                        String dstWord = textIndex.getWord(dstWordId);
                        if (null == dstWord) {
                            throw new Exception(
                                    "Bug or inconsistent data: Couldn't retriev a word for wordId = "
                                            + dstWordId);
                        }
                        if (dstWord.indexOf(':') >= 0)
                            throw new Exception(
                                    "Illegal dictionary word '" + dstWord + "' b/c it contains ':'");
                        sb.append(dstWord);
                        if (outType != FormatType.kOrig) {
                            sb.append(':');
                            sb.append(outType == FormatType.kWeighted ? prob : 1);
                        }
                    }
                }

                outFile.write(sb.toString());
                outFile.newLine();
            }
        }

        outFile.close();
    } catch (ParseException e) {
        e.printStackTrace();
        Usage("Cannot parse arguments", options);
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }

    System.out.println("Terminated successfully!");

}

From source file:de.citec.sc.matoll.process.Matoll_CreateMax.java

public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException,
        InstantiationException, IllegalAccessException, ClassNotFoundException, Exception {

    String directory;//from   ww  w . ja v  a 2  s  .c  om
    String gold_standard_lexicon;
    String output_lexicon;
    String configFile;
    Language language;
    String output;

    Stopwords stopwords = new Stopwords();

    HashMap<String, Double> maxima;
    maxima = new HashMap<String, Double>();

    if (args.length < 3) {
        System.out.print("Usage: Matoll --mode=train/test <DIRECTORY> <CONFIG>\n");
        return;

    }

    //      Classifier classifier;

    directory = args[1];
    configFile = args[2];

    final Config config = new Config();

    config.loadFromFile(configFile);

    gold_standard_lexicon = config.getGoldStandardLexicon();

    String model_file = config.getModel();

    output_lexicon = config.getOutputLexicon();
    output = config.getOutput();

    language = config.getLanguage();

    LexiconLoader loader = new LexiconLoader();
    Lexicon gold = loader.loadFromFile(gold_standard_lexicon);

    Set<String> uris = new HashSet<>();
    //        Map<Integer,String> sentence_list = new HashMap<>();
    Map<Integer, Set<Integer>> mapping_words_sentences = new HashMap<>();

    //consider only properties
    for (LexicalEntry entry : gold.getEntries()) {
        try {
            for (Sense sense : entry.getSenseBehaviours().keySet()) {
                String tmp_uri = sense.getReference().getURI().replace("http://dbpedia.org/ontology/", "");
                if (!Character.isUpperCase(tmp_uri.charAt(0))) {
                    uris.add(sense.getReference().getURI());
                }
            }
        } catch (Exception e) {
        }
        ;
    }

    ModelPreprocessor preprocessor = new ModelPreprocessor(language);
    preprocessor.setCoreferenceResolution(false);
    Set<String> dep = new HashSet<>();
    dep.add("prep");
    dep.add("appos");
    dep.add("nn");
    dep.add("dobj");
    dep.add("pobj");
    dep.add("num");
    preprocessor.setDEP(dep);

    List<File> list_files = new ArrayList<>();

    if (config.getFiles().isEmpty()) {
        File folder = new File(directory);
        File[] files = folder.listFiles();
        for (File file : files) {
            if (file.toString().contains(".ttl"))
                list_files.add(file);
        }
    } else {
        list_files.addAll(config.getFiles());
    }
    System.out.println(list_files.size());

    int sentence_counter = 0;
    Map<String, Set<Integer>> bag_words_uri = new HashMap<>();
    Map<String, Integer> mapping_word_id = new HashMap<>();
    for (File file : list_files) {
        Model model = RDFDataMgr.loadModel(file.toString());
        for (Model sentence : getSentences(model)) {
            String reference = getReference(sentence);
            reference = reference.replace("http://dbpedia/", "http://dbpedia.org/");
            if (uris.contains(reference)) {
                sentence_counter += 1;
                Set<Integer> words_ids = getBagOfWords(sentence, stopwords, mapping_word_id);
                //TODO: add sentence preprocessing
                String obj = getObject(sentence);
                String subj = getSubject(sentence);
                preprocessor.preprocess(sentence, subj, obj, language);
                //TODO: also return marker if object or subject of property (in SPARQL this has to be optional of course)
                String parsed_sentence = getParsedSentence(sentence);
                try (FileWriter fw = new FileWriter("mapping_sentences_to_ids_goldstandard.tsv", true);
                        BufferedWriter bw = new BufferedWriter(fw);
                        PrintWriter out = new PrintWriter(bw)) {
                    out.println(sentence_counter + "\t" + parsed_sentence);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                for (Integer word_id : words_ids) {
                    if (mapping_words_sentences.containsKey(word_id)) {
                        Set<Integer> tmp_set = mapping_words_sentences.get(word_id);
                        tmp_set.add(sentence_counter);
                        mapping_words_sentences.put(word_id, tmp_set);

                    } else {
                        Set<Integer> tmp_set = new HashSet<>();
                        tmp_set.add(sentence_counter);
                        mapping_words_sentences.put(word_id, tmp_set);
                    }

                }
                if (bag_words_uri.containsKey(reference)) {
                    Set<Integer> tmp = bag_words_uri.get(reference);
                    for (Integer w : words_ids) {
                        tmp.add(w);

                    }
                    bag_words_uri.put(reference, tmp);
                } else {
                    Set<Integer> tmp = new HashSet<>();
                    for (Integer w : words_ids) {
                        tmp.add(w);
                    }
                    bag_words_uri.put(reference, tmp);
                }
            }

        }
        model.close();

    }

    PrintWriter writer = new PrintWriter("bag_of_words_only_goldstandard.tsv");
    StringBuilder string_builder = new StringBuilder();
    for (String r : bag_words_uri.keySet()) {
        string_builder.append(r);
        for (Integer i : bag_words_uri.get(r)) {
            string_builder.append("\t");
            string_builder.append(i);
        }
        string_builder.append("\n");
    }
    writer.write(string_builder.toString());
    writer.close();

    writer = new PrintWriter("mapping_words_to_sentenceids_goldstandard.tsv");
    string_builder = new StringBuilder();
    for (Integer w : mapping_words_sentences.keySet()) {
        string_builder.append(w);
        for (int i : mapping_words_sentences.get(w)) {
            string_builder.append("\t");
            string_builder.append(i);
        }
        string_builder.append("\n");
    }
    writer.write(string_builder.toString());
    writer.close();

}

From source file:com.cloud.utils.crypt.EncryptionSecretKeyChanger.java

public static void main(String[] args) {
    List<String> argsList = Arrays.asList(args);
    Iterator<String> iter = argsList.iterator();
    String oldMSKey = null;//  www .  ja va2  s. c om
    String oldDBKey = null;
    String newMSKey = null;
    String newDBKey = null;

    //Parse command-line args
    while (iter.hasNext()) {
        String arg = iter.next();
        // Old MS Key
        if (arg.equals("-m")) {
            oldMSKey = iter.next();
        }
        // Old DB Key
        if (arg.equals("-d")) {
            oldDBKey = iter.next();
        }
        // New MS Key
        if (arg.equals("-n")) {
            newMSKey = iter.next();
        }
        // New DB Key
        if (arg.equals("-e")) {
            newDBKey = iter.next();
        }
    }

    if (oldMSKey == null || oldDBKey == null) {
        System.out.println("Existing MS secret key or DB secret key is not provided");
        usage();
        return;
    }

    if (newMSKey == null && newDBKey == null) {
        System.out.println("New MS secret key and DB secret are both not provided");
        usage();
        return;
    }

    final File dbPropsFile = PropertiesUtil.findConfigFile("db.properties");
    final Properties dbProps;
    EncryptionSecretKeyChanger keyChanger = new EncryptionSecretKeyChanger();
    StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
    keyChanger.initEncryptor(encryptor, oldMSKey);
    dbProps = new EncryptableProperties(encryptor);
    PropertiesConfiguration backupDBProps = null;

    System.out.println("Parsing db.properties file");
    try {
        dbProps.load(new FileInputStream(dbPropsFile));
        backupDBProps = new PropertiesConfiguration(dbPropsFile);
    } catch (FileNotFoundException e) {
        System.out.println("db.properties file not found while reading DB secret key" + e.getMessage());
    } catch (IOException e) {
        System.out.println("Error while reading DB secret key from db.properties" + e.getMessage());
    } catch (ConfigurationException e) {
        e.printStackTrace();
    }

    String dbSecretKey = null;
    try {
        dbSecretKey = dbProps.getProperty("db.cloud.encrypt.secret");
    } catch (EncryptionOperationNotPossibleException e) {
        System.out.println("Failed to decrypt existing DB secret key from db.properties. " + e.getMessage());
        return;
    }

    if (!oldDBKey.equals(dbSecretKey)) {
        System.out.println("Incorrect MS Secret Key or DB Secret Key");
        return;
    }

    System.out.println("Secret key provided matched the key in db.properties");
    final String encryptionType = dbProps.getProperty("db.cloud.encryption.type");

    if (newMSKey == null) {
        System.out.println("No change in MS Key. Skipping migrating db.properties");
    } else {
        if (!keyChanger.migrateProperties(dbPropsFile, dbProps, newMSKey, newDBKey)) {
            System.out.println("Failed to update db.properties");
            return;
        } else {
            //db.properties updated successfully
            if (encryptionType.equals("file")) {
                //update key file with new MS key
                try {
                    FileWriter fwriter = new FileWriter(keyFile);
                    BufferedWriter bwriter = new BufferedWriter(fwriter);
                    bwriter.write(newMSKey);
                    bwriter.close();
                } catch (IOException e) {
                    System.out.println("Failed to write new secret to file. Please update the file manually");
                }
            }
        }
    }

    boolean success = false;
    if (newDBKey == null || newDBKey.equals(oldDBKey)) {
        System.out.println("No change in DB Secret Key. Skipping Data Migration");
    } else {
        EncryptionSecretKeyChecker.initEncryptorForMigration(oldMSKey);
        try {
            success = keyChanger.migrateData(oldDBKey, newDBKey);
        } catch (Exception e) {
            System.out.println("Error during data migration");
            e.printStackTrace();
            success = false;
        }
    }

    if (success) {
        System.out.println("Successfully updated secret key(s)");
    } else {
        System.out.println("Data Migration failed. Reverting db.properties");
        //revert db.properties
        try {
            backupDBProps.save();
        } catch (ConfigurationException e) {
            e.printStackTrace();
        }
        if (encryptionType.equals("file")) {
            //revert secret key in file
            try {
                FileWriter fwriter = new FileWriter(keyFile);
                BufferedWriter bwriter = new BufferedWriter(fwriter);
                bwriter.write(oldMSKey);
                bwriter.close();
            } catch (IOException e) {
                System.out.println("Failed to revert to old secret to file. Please update the file manually");
            }
        }
    }
}