Example usage for java.util Map put

List of usage examples for java.util Map put

Introduction

In this page you can find the example usage for java.util Map put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:com.bt.aloha.batchtest.PerformanceBatchTest.java

public static void main(String[] args) throws Exception {
    MultistackApplicationContextManager manager = null;

    try {//from  w  w w  .  j av a2s .  c  o m
        manager = new MultistackApplicationContextManager(new String[] { "batchTestApplicationContext.xml",
                "propertyListenerApplicationContext.performance.xml" }, null);
        manager.injectManagerIntoApplicatonContext1Beans();
    } catch (Exception e) {
        log.error(e);
        e.printStackTrace();
        System.exit(1);
    }

    log.info("Loading application context");

    boolean success = true;
    PerformanceMeasurmentDao dao = null;
    //ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext("batchTestApplicationContext.xml");
    ClassPathXmlApplicationContext applicationContext = manager.getApplicationContext1();
    dao = (PerformanceMeasurmentDao) applicationContext.getBean("performanceMeasurementDaoBean");
    PerformanceBatchTest performanceBatchTest = (PerformanceBatchTest) applicationContext
            .getBean("performanceBatchTestBean");
    int start = performanceBatchTest.getNumberOfInitialConcurrentStarts();
    int max = performanceBatchTest.getNumberOfMaxConcurrentStarts();
    int inc = performanceBatchTest.getNumberOfConcurrentStartsIncrements();
    long runId = dao.generateId();
    performanceBatchTest.setApplicationContext(applicationContext);
    performanceBatchTest.resetDb();
    for (int currNumberOfAppThreads = start; currNumberOfAppThreads <= max; currNumberOfAppThreads += inc) {

        performanceBatchTest.init();
        performanceBatchTest.addBatchScenarios();
        performanceBatchTest.setNumberOfConcurrentStarts(currNumberOfAppThreads);
        logSystemInformation(performanceBatchTest);
        performanceBatchTest.setExecutorService(Executors.newFixedThreadPool(currNumberOfAppThreads));
        log.info("Running tests with " + currNumberOfAppThreads + " concurrent threads");
        performanceBatchTest.run();
        performanceBatchTest.currentMetrics
                .setThreadInfo(String.format(Metrics.TI_STRING, currNumberOfAppThreads, start, max, inc));
        performanceBatchTest.currentMetrics.setTestType(performanceBatchTest.getTestType());
        dao.record("Call", runId, performanceBatchTest.currentMetrics);
        performanceBatchTest.executorService.shutdownNow();
        performanceBatchTest.results.clear();
        success &= performanceBatchTest.overallSuccess;
    }
    applicationContext.destroy();

    if (dao != null) {
        List<Metrics> metrics = dao.findMetricsByRunId(runId);
        Map<Long, List<Metrics>> m = new HashMap<Long, List<Metrics>>();
        m.put(runId, metrics);
        Chart c = new Chart(m);
        //String xLabel = String.format("Runs - %s calls per thread, %s min threads, %s max theads, %s increment", cpt, start, max, inc);
        c.saveChart(new File("unitPerSecond.jpg"), "UPS with Std deviation", "threads", "units per second");
        m = dao.findLastXMetricsForTestType(5, performanceBatchTest.getTestType());
        c = new Chart(m);
        c.saveCombinedChart(new File("unitPerSecond-historical.jpg"), "Runs Per Second", "threads",
                "runs per second", "Standard Deviation", "threads", "std. deviation");
    }
    try {
        // allow sipp to settle down (in terms of sending its responses and us being up to receive them)
        Thread.sleep(30000);
    } catch (Throwable tex) {
    }
    System.exit(success ? 0 : 1);
}

From source file:net.kolola.msgparsercli.MsgParseCLI.java

public static void main(String[] args) {

    // Parse options

    OptionParser parser = new OptionParser("f:a:bi?*");
    OptionSet options = parser.parse(args);

    // Get the filename
    if (!options.has("f")) {
        System.err.print("Specify a msg file with the -f option");
        System.exit(0);//  w w  w  .  ja  va2s. c  om
    }

    File file = new File((String) options.valueOf("f"));

    MsgParser msgp = new MsgParser();
    Message msg = null;

    try {
        msg = msgp.parseMsg(file);
    } catch (UnsupportedOperationException | IOException e) {
        System.err.print("File does not exist or is not a valid msg file");
        //e.printStackTrace();
        System.exit(1);
    }

    // Show info (as JSON)
    if (options.has("i")) {
        Map<String, Object> data = new HashMap<String, Object>();

        String date;

        try {
            Date st = msg.getClientSubmitTime();
            date = st.toString();
        } catch (Exception g) {
            try {
                date = msg.getDate().toString();
            } catch (Exception e) {
                date = "[UNAVAILABLE]";
            }
        }

        data.put("date", date);
        data.put("subject", msg.getSubject());
        data.put("from", "\"" + msg.getFromName() + "\" <" + msg.getFromEmail() + ">");
        data.put("to", "\"" + msg.getToRecipient().toString());

        String cc = "";
        for (RecipientEntry r : msg.getCcRecipients()) {
            if (cc.length() > 0)
                cc.concat("; ");

            cc.concat(r.toString());
        }

        data.put("cc", cc);

        data.put("body_html", msg.getBodyHTML());
        data.put("body_rtf", msg.getBodyRTF());
        data.put("body_text", msg.getBodyText());

        // Attachments
        List<Map<String, String>> atts = new ArrayList<Map<String, String>>();
        for (Attachment a : msg.getAttachments()) {
            HashMap<String, String> info = new HashMap<String, String>();

            if (a instanceof FileAttachment) {
                FileAttachment fa = (FileAttachment) a;

                info.put("type", "file");
                info.put("filename", fa.getFilename());
                info.put("size", Long.toString(fa.getSize()));
            } else {
                info.put("type", "message");
            }

            atts.add(info);
        }

        data.put("attachments", atts);

        JSONObject json = new JSONObject(data);

        try {
            System.out.print(json.toString(4));
        } catch (JSONException e) {
            e.printStackTrace();
        }
    }

    // OR return an attachment in BASE64
    else if (options.has("a")) {
        Integer anum = Integer.parseInt((String) options.valueOf("a"));

        Encoder b64 = Base64.getEncoder();

        List<Attachment> atts = msg.getAttachments();

        if (atts.size() <= anum) {
            System.out.print("Attachment " + anum.toString() + " does not exist");
        }

        Attachment att = atts.get(anum);

        if (att instanceof FileAttachment) {
            FileAttachment fatt = (FileAttachment) att;
            System.out.print(b64.encodeToString(fatt.getData()));
        } else {
            System.err.print("Attachment " + anum.toString() + " is a message - That's not implemented yet :(");
        }
    }
    // OR print the message body
    else if (options.has("b")) {
        System.out.print(msg.getConvertedBodyHTML());
    } else {
        System.err.print(
                "Specify either -i to return msg information or -a <num> to print an attachment as a BASE64 string");
    }

}

From source file:org.ala.harvester.MorphbankHarvester.java

/**
 * Main method for testing this particular Harvester
 *
 * @param args/*from  w  w w .j ava  2  s . c  o  m*/
 */
public static void main(String[] args) throws Exception {
    String[] locations = { "classpath*:spring.xml" };
    ApplicationContext context = new ClassPathXmlApplicationContext(locations);
    MorphbankHarvester h = new MorphbankHarvester();
    Repository r = (Repository) context.getBean("repository");
    h.setRepository(r);

    //set the connection params   
    Map<String, String> connectParams = new HashMap<String, String>();

    if (args.length == 1 && args[0].equals("Coral")) {
        h.setCoral(true);
        connectParams.put("endpoint",
                "http://morphbank-svc.ala.org.au/mb3/request?method=search&objecttype=Image&keywords=Coral+Reef+Research&limit="
                        + RESULT_LIMIT
                        + "&firstResult=0&user=&group=&change=&lastDateChanged=&numChangeDays=1&id=&taxonName=&format=svc");
    } else if (args.length == 1) {
        connectParams.put("endpoint",
                "http://morphbank-svc.ala.org.au/mb3/request?method=search&objecttype=Image&keywords=Australia&limit="
                        + RESULT_LIMIT
                        + "&firstResult=0&user=&group=&change=&lastDateChanged=&numChangeDays=1&id=&taxonName=&format=svc");
        try {
            h.setInit(Integer.valueOf(args[0]));
        } catch (NumberFormatException nfe) {
            System.out.println("Starting id is not a number!");
            System.exit(1);
        }
    } else {
        connectParams.put("endpoint",
                "http://morphbank-svc.ala.org.au/mb3/request?method=search&objecttype=Image&keywords=Australia&limit="
                        + RESULT_LIMIT
                        + "&firstResult=0&user=&group=&change=&lastDateChanged=&numChangeDays=1&id=&taxonName=&format=svc");
    }

    h.setConnectionParams(connectParams);
    h.start(MORPHBANK_INFOSOURCE_ID);
}

From source file:cc.twittertools.search.api.TrecSearchThriftServer.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(new Option(HELP_OPTION, "show help"));
    options.addOption(OptionBuilder.withArgName("port").hasArg().withDescription("port").create(PORT_OPTION));
    options.addOption(//w  w w.j  a va2s .co  m
            OptionBuilder.withArgName("index").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg()
            .withDescription("max number of threads in thread pool").create(MAX_THREADS_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("file containing access tokens").create(CREDENTIALS_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(TrecSearchThriftServer.class.getName(), options);
        System.exit(-1);
    }

    int port = cmdline.hasOption(PORT_OPTION) ? Integer.parseInt(cmdline.getOptionValue(PORT_OPTION))
            : DEFAULT_PORT;
    int maxThreads = cmdline.hasOption(MAX_THREADS_OPTION)
            ? Integer.parseInt(cmdline.getOptionValue(MAX_THREADS_OPTION))
            : DEFAULT_MAX_THREADS;
    File index = new File(cmdline.getOptionValue(INDEX_OPTION));

    Map<String, String> credentials = null;
    if (cmdline.hasOption(CREDENTIALS_OPTION)) {
        credentials = Maps.newHashMap();
        File cfile = new File(cmdline.getOptionValue(CREDENTIALS_OPTION));
        if (!cfile.exists()) {
            System.err.println("Error: " + cfile + " does not exist!");
            System.exit(-1);
        }
        for (String s : Files.readLines(cfile, Charsets.UTF_8)) {
            try {
                String[] arr = s.split(":");
                credentials.put(arr[0], arr[1]);
            } catch (Exception e) {
                // Catch any exceptions from parsing file contain access tokens
                System.err.println("Error reading access tokens from " + cfile + "!");
                System.exit(-1);
            }
        }
    }

    if (!index.exists()) {
        System.err.println("Error: " + index + " does not exist!");
        System.exit(-1);
    }

    TServerSocket serverSocket = new TServerSocket(port);
    TrecSearch.Processor<TrecSearch.Iface> searchProcessor = new TrecSearch.Processor<TrecSearch.Iface>(
            new TrecSearchHandler(index, credentials));

    TThreadPoolServer.Args serverArgs = new TThreadPoolServer.Args(serverSocket);
    serverArgs.maxWorkerThreads(maxThreads);
    TServer thriftServer = new TThreadPoolServer(
            serverArgs.processor(searchProcessor).protocolFactory(new TBinaryProtocol.Factory()));

    thriftServer.serve();
}

From source file:com.act.lcms.db.io.ExportStandardIonResultsFromDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//  w  w  w.  ja v a 2  s  .c o m
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                null, true);
        return;
    }

    try (DB db = DB.openDBFromCLI(cl)) {
        List<String> chemicalNames = new ArrayList<>();
        if (cl.hasOption(OPTION_CONSTRUCT)) {
            // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals
            List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils
                    .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT));

            for (Pair<ChemicalAssociatedWithPathway, Double> pair : productMasses) {
                chemicalNames.add(pair.getLeft().getChemical());
            }
        }

        if (cl.hasOption(OPTION_CHEMICALS)) {
            chemicalNames.addAll(Arrays.asList(cl.getOptionValues(OPTION_CHEMICALS)));
        }

        if (chemicalNames.size() == 0) {
            System.err.format("No chemicals can be found from the input query.\n");
            System.exit(-1);
        }

        List<String> standardIonHeaderFields = new ArrayList<String>() {
            {
                add(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name());
                add(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name());
                add(STANDARD_ION_HEADER_FIELDS.MANUAL_PICK.name());
                add(STANDARD_ION_HEADER_FIELDS.AUTHOR.name());
                add(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name());
                add(STANDARD_ION_HEADER_FIELDS.NOTE.name());
            }
        };

        String outAnalysis;
        if (cl.hasOption(OPTION_OUTPUT_PREFIX)) {
            outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + TSV_FORMAT;
        } else {
            outAnalysis = String.join("-", chemicalNames) + "." + TSV_FORMAT;
        }

        File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY));
        if (!lcmsDir.isDirectory()) {
            System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath());
            HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                    null, true);
            System.exit(1);
        }

        String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR);

        TSVWriter<String, String> resultsWriter = new TSVWriter<>(standardIonHeaderFields);
        resultsWriter.open(new File(outAnalysis));

        // For each chemical, create a TSV row and a corresponding diagnostic plot
        for (String chemicalName : chemicalNames) {
            List<String> graphLabels = new ArrayList<>();
            List<Double> yMaxList = new ArrayList<>();

            String outData = plottingDirectory + "/" + chemicalName + ".data";
            String outImg = plottingDirectory + "/" + chemicalName + ".pdf";

            // For each diagnostic plot, open a new file stream.
            try (FileOutputStream fos = new FileOutputStream(outData)) {

                List<StandardIonResult> getResultByChemicalName = StandardIonResult.getByChemicalName(db,
                        chemicalName);

                if (getResultByChemicalName != null && getResultByChemicalName.size() > 0) {

                    // PART 1: Get the best metlin ion across all standard ion results for a given chemical
                    String bestGlobalMetlinIon = AnalysisHelper
                            .scoreAndReturnBestMetlinIonFromStandardIonResults(getResultByChemicalName,
                                    new HashMap<>(), true, true);

                    // PART 2: Plot all the graphs related to the chemical. The plots are structured as follows:
                    //
                    // Page 1: All graphs (water, MeOH, Yeast) for Global ion picked (best ion among ALL standard ion runs for
                    // the given chemical) by the algorithm
                    // Page 2: All graphs for M+H
                    // Page 3: All graphs for Local ions picked (best ion within a SINGLE standard ion run) + negative controls
                    // for Yeast.
                    //
                    // Each page is demarcated by a blank graph.

                    // Arrange results based on media
                    Map<String, List<StandardIonResult>> categories = StandardIonResult
                            .categorizeListOfStandardWellsByMedia(db, getResultByChemicalName);

                    // This set contains all the best metlin ions corresponding to all the standard ion runs.
                    Set<String> bestLocalIons = new HashSet<>();
                    bestLocalIons.add(bestGlobalMetlinIon);
                    bestLocalIons.add(DEFAULT_ION);

                    for (StandardIonResult result : getResultByChemicalName) {
                        bestLocalIons.add(result.getBestMetlinIon());
                    }

                    // We sort the best local ions are follows:
                    // 1) Global best ion spectra 2) M+H spectra 3) Local best ion spectra
                    List<String> bestLocalIonsArray = new ArrayList<>(bestLocalIons);
                    Collections.sort(bestLocalIonsArray, new Comparator<String>() {
                        @Override
                        public int compare(String o1, String o2) {
                            if (o1.equals(bestGlobalMetlinIon) && !o2.equals(bestGlobalMetlinIon)) {
                                return -1;
                            } else if (o1.equals(DEFAULT_ION) && !o2.equals(bestGlobalMetlinIon)) {
                                return -1;
                            } else {
                                return 1;
                            }
                        }
                    });

                    // This variable stores the index of the array at which all the remaining spectra are contained in one
                    // page. This happens right after the M+H ion spectra.
                    Integer combineAllSpectraIntoPageThreeFromIndex = 0;
                    for (int i = 0; i < bestLocalIonsArray.size(); i++) {
                        if (bestLocalIonsArray.get(i).equals(DEFAULT_ION)) {
                            combineAllSpectraIntoPageThreeFromIndex = i + 1;
                        }
                    }

                    for (int i = 0; i < bestLocalIonsArray.size(); i++) {

                        String ion = bestLocalIonsArray.get(i);
                        for (Map.Entry<String, List<StandardIonResult>> mediaToListOfIonResults : categories
                                .entrySet()) {

                            for (StandardIonResult result : mediaToListOfIonResults.getValue()) {

                                // For every standard ion result, we plot the best global metlin ion and M+H. These plots are in the
                                // pages 1 and 2. For all page 3 (aka miscellaneous spectra), we only plot the best local ion
                                // corresponding to it's spectra and not some other graph's spectra. In the below condition,
                                // we reach the page 3 case with not the same best ion as the spectra, in which case we just continue
                                // and not draw anything on the page.
                                if (i >= combineAllSpectraIntoPageThreeFromIndex
                                        && !(result.getBestMetlinIon().equals(ion))) {
                                    continue;
                                }

                                StandardWell positiveWell = StandardWell.getInstance().getById(db,
                                        result.getStandardWellId());
                                String positiveControlChemical = positiveWell.getChemical();

                                ScanData<StandardWell> encapsulatedDataForPositiveControl = AnalysisHelper
                                        .getScanDataForWell(db, lcmsDir, positiveWell, positiveControlChemical,
                                                positiveControlChemical);

                                Set<String> singletonSet = Collections.singleton(ion);
                                String additionalInfo = generateAdditionalLabelInformation(positiveWell, result,
                                        ion);

                                List<String> labels = AnalysisHelper
                                        .writeScanData(fos, lcmsDir, MAX_INTENSITY,
                                                encapsulatedDataForPositiveControl, false, false, singletonSet)
                                        .stream().map(label -> label + additionalInfo)
                                        .collect(Collectors.toList());

                                yMaxList.add(encapsulatedDataForPositiveControl.getMs1ScanResults()
                                        .getMaxIntensityForIon(ion));

                                List<String> negativeLabels = null;
                                // Only do the negative control in the miscellaneous page (page 3) and if the well is in yeast media.
                                if (mediaToListOfIonResults.getKey()
                                        .equals(StandardWell.MEDIA_TYPE.YEAST.name())
                                        && (i >= combineAllSpectraIntoPageThreeFromIndex
                                                && (result.getBestMetlinIon().equals(ion)))) {
                                    //TODO: Change the representative negative well to one that displays the highest noise in the future.
                                    // For now, we just use the first index among the negative wells.
                                    int representativeIndex = 0;
                                    StandardWell representativeNegativeControlWell = StandardWell.getInstance()
                                            .getById(db, result.getNegativeWellIds().get(representativeIndex));

                                    ScanData encapsulatedDataForNegativeControl = AnalysisHelper
                                            .getScanDataForWell(db, lcmsDir, representativeNegativeControlWell,
                                                    positiveWell.getChemical(),
                                                    representativeNegativeControlWell.getChemical());

                                    String negativePlateAdditionalInfo = generateAdditionalLabelInformation(
                                            representativeNegativeControlWell, null, null);

                                    negativeLabels = AnalysisHelper.writeScanData(fos, lcmsDir, MAX_INTENSITY,
                                            encapsulatedDataForNegativeControl, false, false, singletonSet)
                                            .stream().map(label -> label + negativePlateAdditionalInfo)
                                            .collect(Collectors.toList());

                                    yMaxList.add(encapsulatedDataForNegativeControl.getMs1ScanResults()
                                            .getMaxIntensityForIon(ion));
                                }

                                graphLabels.addAll(labels);

                                if (negativeLabels != null) {
                                    graphLabels.addAll(negativeLabels);
                                }
                            }
                        }

                        // Add a blank graph to demarcate pages.
                        if (i < combineAllSpectraIntoPageThreeFromIndex) {
                            graphLabels.addAll(AnalysisHelper.writeScanData(fos, lcmsDir, 0.0, BLANK_SCAN,
                                    false, false, new HashSet<>()));
                            yMaxList.add(0.0d);
                        }
                    }

                    // We need to pass the yMax values as an array to the Gnuplotter.
                    Double fontScale = null;
                    if (cl.hasOption(FONT_SCALE)) {
                        try {
                            fontScale = Double.parseDouble(cl.getOptionValue(FONT_SCALE));
                        } catch (IllegalArgumentException e) {
                            System.err.format("Argument for font-scale must be a floating point number.\n");
                            System.exit(1);
                        }
                    }

                    Double[] yMaxes = yMaxList.toArray(new Double[yMaxList.size()]);
                    Gnuplotter plotter = fontScale == null ? new Gnuplotter() : new Gnuplotter(fontScale);
                    plotter.plot2D(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]), "time",
                            null, "intensity", "pdf", null, null, yMaxes, outImg + ".gnuplot");

                    Map<String, String> row = new HashMap<>();
                    row.put(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name(), chemicalName);
                    row.put(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name(), bestGlobalMetlinIon);
                    row.put(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name(), outImg);

                    resultsWriter.append(row);
                    resultsWriter.flush();
                }
            }
        }

        resultsWriter.flush();
        resultsWriter.close();
    }
}

From source file:org.ala.harvester.TrinWikiHarvester.java

/**
 * Main method for testing this particular Harvester
 *
 * @param args//from   w w w.java2s  . c  o m
 */
public static void main(String[] args) throws Exception {
    String[] locations = { "classpath*:spring.xml" };
    ApplicationContext context = new ClassPathXmlApplicationContext(locations);
    TrinWikiHarvester h = new TrinWikiHarvester();
    Repository r = (Repository) context.getBean("repository");
    h.setRepository(r);

    //set the connection params 
    Map<String, String> connectParams = new HashMap<String, String>();

    //        connectParams.put("endpoint", "https://wiki.trin.org.au/bin/query/Marine/SeaSlugs/Taxa/Acanbrun/topic.json");
    connectParams.put("endpoint", jsonStr);

    h.setConnectionParams(connectParams);
    h.start(TRIN_WIKI_INFOSOURCE_ID);
}

From source file:de.interactive_instruments.ShapeChange.Target.FeatureCatalogue.XsltWriter.java

/**
 * Parameter identifiers have a leading "-". Parameter values are separated
 * from the parameter identifier via a single space.
 * <ul>//from  w w w .jav a  2s. c o m
 * <li>Parameter {@value #PARAM_xslTransformerFactory}: fully qualified name
 * of the XSLT processor implementation; NOTE: this parameter may not be
 * provided if the default implementation shall be used.</li>
 * <li>Parameter {@value #PARAM_hrefMappings}: list of key-value pairs
 * defining href mappings, structured using URL query syntax (i.e. using '='
 * to separate the key from the value, using '&' to separate pairs, and with
 * URL-encoded value (with UTF-8 character encoding); NOTE: this parameter
 * may not be provided if href mappings are not needed.</li>
 * <li>Parameter {@value #PARAM_transformationParameters}: list of key-value
 * pairs defining the transformation parameters, structured using URL query
 * syntax (i.e. using '=' to separate the key from the value, using '&' to
 * separate pairs, and with URL-encoded value (with UTF-8 character
 * encoding); NOTE: this parameter may not be provided if transformation
 * parameters are not needed.</li>
 * <li>Parameter {@value #PARAM_transformationSourcePath}: path to the
 * transformation source file (may be a relative path); NOTE: this is a
 * required parameter.</li>
 * <li>Parameter {@value #PARAM_xsltMainFileUri}: String representation of
 * the URI to the main XSLT file; NOTE: this is a required parameter.</li>
 * <li>Parameter {@value #PARAM_transformationTargetPath}: path to the
 * transformation target file (may be a relative path); NOTE: this is a
 * required parameter.</li>
 * </ul>
 */
public static void main(String[] args) {

    String xslTransformerFactory = null;
    String hrefMappingsString = null;
    String transformationParametersString = null;
    String transformationSourcePath = null;
    String xsltMainFileUriString = null;
    String transformationTargetPath = null;

    // identify parameters
    String arg = null;

    for (int i = 0; i < args.length; i++) {

        arg = args[i];

        if (arg.equals(PARAM_xslTransformerFactory)) {

            if (i + 1 == args.length || args[i + 1].startsWith("-")) {
                System.err.println("No value provided for invocation parameter " + PARAM_xslTransformerFactory);
                return;
            } else {
                xslTransformerFactory = args[i + 1];
                i++;
            }

        } else if (arg.equals(PARAM_hrefMappings)) {

            if (i + 1 == args.length || args[i + 1].startsWith("-")) {
                System.err.println("No value provided for invocation parameter " + PARAM_hrefMappings);
                return;
            } else {
                hrefMappingsString = args[i + 1];
                i++;
            }

        } else if (arg.equals(PARAM_transformationParameters)) {

            if (i + 1 == args.length || args[i + 1].startsWith("-")) {
                System.err.println(
                        "No value provided for invocation parameter " + PARAM_transformationParameters);
                return;
            } else {
                transformationParametersString = args[i + 1];
                i++;
            }

        } else if (arg.equals(PARAM_transformationSourcePath)) {

            if (i + 1 == args.length || args[i + 1].startsWith("-")) {
                System.err.println(
                        "No value provided for invocation parameter " + PARAM_transformationSourcePath);
                return;
            } else {
                transformationSourcePath = args[i + 1];
                i++;
            }

        } else if (arg.equals(PARAM_transformationTargetPath)) {

            if (i + 1 == args.length || args[i + 1].startsWith("-")) {
                System.err.println(
                        "No value provided for invocation parameter " + PARAM_transformationTargetPath);
                return;
            } else {
                transformationTargetPath = args[i + 1];
                i++;
            }

        } else if (arg.equals(PARAM_xsltMainFileUri)) {

            if (i + 1 == args.length || args[i + 1].startsWith("-")) {
                System.err.println("No value provided for invocation parameter " + PARAM_xsltMainFileUri);
                return;
            } else {
                xsltMainFileUriString = args[i + 1];
                i++;
            }
        }
    }

    try {

        // parse parameter values
        Map<String, URI> hrefMappings = new HashMap<String, URI>();

        List<NameValuePair> hrefMappingsList = URLEncodedUtils.parse(hrefMappingsString, ENCODING_CHARSET);
        for (NameValuePair nvp : hrefMappingsList) {

            hrefMappings.put(nvp.getName(), new URI(nvp.getValue()));
        }

        Map<String, String> transformationParameters = new HashMap<String, String>();

        List<NameValuePair> transParamList = URLEncodedUtils.parse(transformationParametersString,
                ENCODING_CHARSET);
        for (NameValuePair nvp : transParamList) {
            transformationParameters.put(nvp.getName(), nvp.getValue());
        }

        boolean invalidParameters = false;

        if (transformationSourcePath == null) {
            invalidParameters = true;
            System.err.println("Path to transformation source file was not provided.");
        }
        if (xsltMainFileUriString == null) {
            invalidParameters = true;
            System.err.println("Path to main XSLT file was not provided.");
        }
        if (transformationTargetPath == null) {
            invalidParameters = true;
            System.err.println("Path to transformation target file was not provided.");
        }

        if (!invalidParameters) {

            // set up and execute XSL transformation
            XsltWriter writer = new XsltWriter(xslTransformerFactory, hrefMappings, transformationParameters,
                    null);

            File transformationSource = new File(transformationSourcePath);
            URI xsltMainFileUri = new URI(xsltMainFileUriString);
            File transformationTarget = new File(transformationTargetPath);

            writer.xsltWrite(transformationSource, xsltMainFileUri, transformationTarget);
        }

    } catch (Exception e) {

        String m = e.getMessage();

        if (m != null) {
            System.err.println(m);
        } else {
            System.err.println("Exception occurred while processing the XSL transformation.");
        }
    }
}

From source file:org.ala.harvester.FlickrHarvester.java

/**
 * Main method for testing this particular Harvester
 *
 * @param args/* ww  w .  j av  a2 s  .  com*/
 */
public static void main(String[] args) throws Exception {
    String[] locations = { "classpath*:spring.xml" };
    ApplicationContext context = new ClassPathXmlApplicationContext(locations);
    FlickrHarvester h = new FlickrHarvester();
    Repository r = (Repository) context.getBean("repository");
    h.setDocumentMapper(new FlickrDocumentMapper());
    h.setRepository(r);

    //set the connection params   
    Map<String, String> connectParams = new HashMap<String, String>();
    connectParams.put("endpoint",
            "http://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=08f5318120189e9d12669465c0113351&page=1");
    //      connectParams.put("eolGroupId", "806927@N20");
    connectParams.put("eolGroupId", "22545712@N05");
    connectParams.put("flickrRestBaseUrl", "http://api.flickr.com/services/rest");
    connectParams.put("flickrApiKey", "08f5318120189e9d12669465c0113351");
    connectParams.put("recordsPerPage", "50");

    h.setConnectionParams(connectParams);
    h.start(1106); //1013 is the ID for the data source flickr
}

From source file:com.msopentech.odatajclient.engine.performance.PerfTestReporter.java

public static void main(final String[] args) throws Exception {
    // 1. check input directory
    final File reportdir = new File(args[0] + File.separator + "target" + File.separator + "surefire-reports");
    if (!reportdir.isDirectory()) {
        throw new IllegalArgumentException("Expected directory, got " + args[0]);
    }/*from www.j av a2  s.com*/

    // 2. read test data from surefire output
    final File[] xmlReports = reportdir.listFiles(new FilenameFilter() {

        @Override
        public boolean accept(final File dir, final String name) {
            return name.endsWith("-output.txt");
        }
    });

    final Map<String, Map<String, Double>> testData = new TreeMap<String, Map<String, Double>>();

    for (File xmlReport : xmlReports) {
        final BufferedReader reportReader = new BufferedReader(new FileReader(xmlReport));
        try {
            while (reportReader.ready()) {
                String line = reportReader.readLine();
                final String[] parts = line.substring(0, line.indexOf(':')).split("\\.");

                final String testClass = parts[0];
                if (!testData.containsKey(testClass)) {
                    testData.put(testClass, new TreeMap<String, Double>());
                }

                line = reportReader.readLine();

                testData.get(testClass).put(parts[1],
                        Double.valueOf(line.substring(line.indexOf(':') + 2, line.indexOf('['))));
            }
        } finally {
            IOUtils.closeQuietly(reportReader);
        }
    }

    // 3. build XSLX output (from template)
    final HSSFWorkbook workbook = new HSSFWorkbook(new FileInputStream(args[0] + File.separator + "src"
            + File.separator + "test" + File.separator + "resources" + File.separator + XLS));

    for (Map.Entry<String, Map<String, Double>> entry : testData.entrySet()) {
        final Sheet sheet = workbook.getSheet(entry.getKey());

        int rows = 0;

        for (Map.Entry<String, Double> subentry : entry.getValue().entrySet()) {
            final Row row = sheet.createRow(rows++);

            Cell cell = row.createCell(0);
            cell.setCellValue(subentry.getKey());

            cell = row.createCell(1);
            cell.setCellValue(subentry.getValue());
        }
    }

    final FileOutputStream out = new FileOutputStream(
            args[0] + File.separator + "target" + File.separator + XLS);
    try {
        workbook.write(out);
    } finally {
        IOUtils.closeQuietly(out);
    }
}

From source file:com.tengen.MultiArrayFindTest.java

public static void main(String[] args) throws UnknownHostException {

    MongoClient client = new MongoClient();
    DB db = client.getDB("school");
    DBCollection collection = db.getCollection("students");

    System.out.println("Find one:");
    DBObject one = collection.findOne();
    System.out.println(one);/*from   w  w w. j  a  va  2 s  .co  m*/

    System.out.println("\nFind all: ");
    DBCursor cursor = collection.find().sort(new BasicDBObject("_id", 1));
    System.out.println(cursor.count());

    try {
        while (cursor.hasNext()) {

            int id = (Integer) cursor.next().get("_id");
            //String s =  cursor.next().get("name");

            Map<Integer, String> myMap = new HashMap<Integer, String>();

            BasicBSONList bl = (BasicBSONList) cursor.next().get("scores");
            for (Object bo : bl) {

                BasicBSONObject bo1 = (BasicBSONObject) bo;
                System.out.println(bo);
                System.out.println(Integer.toString(id));

                if (1 > 1) {
                }
                double total1 = Double.parseDouble(bo1.get("score").toString());
                System.out.println("score1: " + total1);

                myMap.put(id, bo1.get("score").toString());
                System.out.println("score: " + myMap.get(id));

                double total = Double.parseDouble(myMap.get(id).toString());
                System.out.println("score: " + total);

                //}
            }

        }

    } finally {
        cursor.close();
    }

    System.out.println("\nCount:");
    long count = collection.count();
    System.out.println(count);
}