Example usage for java.util HashMap HashMap

List of usage examples for java.util HashMap HashMap

Introduction

In this page you can find the example usage for java.util HashMap HashMap.

Prototype

public HashMap() 

Source Link

Document

Constructs an empty HashMap with the default initial capacity (16) and the default load factor (0.75).

Usage

From source file:com.npower.dm.util.ConvertMailProfile.java

/**
 * @param args//from  w w w.  j  av a 2s.c  o m
 */
public static void main(String[] args) throws Exception {
    File outputFile = new File("c:/temp/mail.xml");
    FileWriter writer = new FileWriter(outputFile);

    File csvFile = new File("c:/temp/mail.csv");
    BufferedReader reader = new BufferedReader(new FileReader(csvFile));
    String line = reader.readLine();
    while (line != null) {
        line = reader.readLine();
        if (StringUtils.isEmpty(line)) {
            continue;
        }

        String[] cols = StringUtils.split(line, ',');
        Map<String, String> values = new HashMap<String, String>();
        values.put("name", cols[0]);
        values.put("smtp.host", cols[1]);
        values.put("pop.host", cols[2]);

        writeXML(writer, values);
    }

    writer.close();
    reader.close();
}

From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphCreator.java

public static void main(String[] args) {
    Options options = new Options();

    Option inputOpt = OptionBuilder.create(IN);
    inputOpt.setArgName("INPUT");
    inputOpt.setDescription("Input file");
    inputOpt.setArgs(1);/*  w  w  w .  ja va 2  s  . com*/
    inputOpt.setRequired(true);

    Option outputOpt = OptionBuilder.create(OUT);
    outputOpt.setArgName("OUTPUT");
    outputOpt.setDescription("Output directory");
    outputOpt.setArgs(1);
    outputOpt.setRequired(true);

    Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS);
    inClassOpt.setArgName("CLASS");
    inClassOpt.setDescription("FQN of EPackage implementation class");
    inClassOpt.setArgs(1);
    inClassOpt.setRequired(true);

    Option optFileOpt = OptionBuilder.create(OPTIONS_FILE);
    optFileOpt.setArgName("FILE");
    optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource");
    optFileOpt.setArgs(1);

    options.addOption(inputOpt);
    options.addOption(outputOpt);
    options.addOption(inClassOpt);
    options.addOption(optFileOpt);

    CommandLineParser parser = new PosixParser();

    try {

        PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME,
                new BlueprintsPersistenceBackendFactory());

        CommandLine commandLine = parser.parse(options, args);

        URI sourceUri = URI.createFileURI(commandLine.getOptionValue(IN));
        URI targetUri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(OUT)));

        Class<?> inClazz = KyanosGraphCreator.class.getClassLoader()
                .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS));
        inClazz.getMethod("init").invoke(null);

        ResourceSet resourceSet = new ResourceSetImpl();

        resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("xmi",
                new XMIResourceFactoryImpl());
        resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("zxmi",
                new XMIResourceFactoryImpl());
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap()
                .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE);

        Resource sourceResource = resourceSet.createResource(sourceUri);
        Map<String, Object> loadOpts = new HashMap<String, Object>();
        if ("zxmi".equals(sourceUri.fileExtension())) {
            loadOpts.put(XMIResource.OPTION_ZIP, Boolean.TRUE);
        }

        Runtime.getRuntime().gc();
        long initialUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
        LOG.log(Level.INFO, MessageFormat.format("Used memory before loading: {0}",
                MessageUtil.byteCountToDisplaySize(initialUsedMemory)));
        LOG.log(Level.INFO, "Loading source resource");
        sourceResource.load(loadOpts);
        LOG.log(Level.INFO, "Source resource loaded");
        Runtime.getRuntime().gc();
        long finalUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
        LOG.log(Level.INFO, MessageFormat.format("Used memory after loading: {0}",
                MessageUtil.byteCountToDisplaySize(finalUsedMemory)));
        LOG.log(Level.INFO, MessageFormat.format("Memory use increase: {0}",
                MessageUtil.byteCountToDisplaySize(finalUsedMemory - initialUsedMemory)));

        Resource targetResource = resourceSet.createResource(targetUri);

        Map<String, Object> saveOpts = new HashMap<String, Object>();

        if (commandLine.hasOption(OPTIONS_FILE)) {
            Properties properties = new Properties();
            properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE))));
            for (final Entry<Object, Object> entry : properties.entrySet()) {
                saveOpts.put((String) entry.getKey(), (String) entry.getValue());
            }
        }
        List<StoreOption> storeOptions = new ArrayList<StoreOption>();
        storeOptions.add(BlueprintsResourceOptions.EStoreGraphOption.AUTOCOMMIT);
        saveOpts.put(BlueprintsResourceOptions.STORE_OPTIONS, storeOptions);
        targetResource.save(saveOpts);

        LOG.log(Level.INFO, "Start moving elements");
        targetResource.getContents().clear();
        targetResource.getContents().addAll(sourceResource.getContents());
        LOG.log(Level.INFO, "End moving elements");
        LOG.log(Level.INFO, "Start saving");
        targetResource.save(saveOpts);
        LOG.log(Level.INFO, "Saved");

        if (targetResource instanceof PersistentResourceImpl) {
            PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) targetResource);
        } else {
            targetResource.unload();
        }

    } catch (ParseException e) {
        MessageUtil.showError(e.toString());
        MessageUtil.showError("Current arguments: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    } catch (Throwable e) {
        MessageUtil.showError(e.toString());
    }
}

From source file:it.polito.tellmefirst.web.rest.TMFServer.java

/**
 * TMF starting point. From rest directory, launch this command:
 * mvn exec:java -Dexec.mainClass="it.polito.temefirst.web.rest.TMFServer" -Dexec.args="<path_to_TMF_installation>/conf/server.properties"
 * or use the run.sh file in bin directory
 *///from w  ww  .j ava  2s. com
public static void main(String[] args) throws TMFConfigurationException, TMFIndexesWarmUpException,
        URISyntaxException, InterruptedException, IOException {
    LOG.debug("[main] - BEGIN");
    URI serverURI = new URI("http://localhost:2222/rest/");
    String configFileName = args[0];
    new TMFVariables(configFileName);

    // XXX I put the code of IndexUtil.init() here, because, for now, I need a reference of SimpleSearchers for the Enhancer

    // build italian searcher
    Directory contextIndexDirIT = LuceneManager.pickDirectory(new File(TMFVariables.CORPUS_INDEX_IT));
    LOG.info("Corpus index used for italian: " + contextIndexDirIT);
    LuceneManager contextLuceneManagerIT = new LuceneManager(contextIndexDirIT);
    contextLuceneManagerIT
            .setLuceneDefaultAnalyzer(new ItalianAnalyzer(Version.LUCENE_36, TMFVariables.STOPWORDS_IT));
    ITALIAN_CORPUS_INDEX_SEARCHER = new SimpleSearcher(contextLuceneManagerIT);

    // build english searcher
    Directory contextIndexDirEN = LuceneManager.pickDirectory(new File(TMFVariables.CORPUS_INDEX_EN));
    LOG.info("Corpus index used for english: " + contextIndexDirEN);
    LuceneManager contextLuceneManagerEN = new LuceneManager(contextIndexDirEN);
    contextLuceneManagerEN
            .setLuceneDefaultAnalyzer(new EnglishAnalyzer(Version.LUCENE_36, TMFVariables.STOPWORDS_EN));
    ENGLISH_CORPUS_INDEX_SEARCHER = new SimpleSearcher(contextLuceneManagerEN);

    // build kb italian searcher
    String kbDirIT = TMFVariables.KB_IT;
    String residualKbDirIT = TMFVariables.RESIDUAL_KB_IT;
    ITALIAN_KB_INDEX_SEARCHER = new KBIndexSearcher(kbDirIT, residualKbDirIT);

    // build kb english searcher
    String kbDirEN = TMFVariables.KB_EN;
    String residualKbDirEN = TMFVariables.RESIDUAL_KB_EN;
    ENGLISH_KB_INDEX_SEARCHER = new KBIndexSearcher(kbDirEN, residualKbDirEN);

    enhancer = new Enhancer(ITALIAN_CORPUS_INDEX_SEARCHER, ENGLISH_CORPUS_INDEX_SEARCHER,
            ITALIAN_KB_INDEX_SEARCHER, ENGLISH_KB_INDEX_SEARCHER);

    italianClassifier = new Classifier("it", ITALIAN_CORPUS_INDEX_SEARCHER);
    englishClassifier = new Classifier("en", ENGLISH_CORPUS_INDEX_SEARCHER);

    //The following is adapted from DBpedia Spotlight (https://github.com/dbpedia-spotlight/dbpedia-spotlight)
    final Map<String, String> initParams = new HashMap<String, String>();
    initParams.put("com.sun.jersey.config.property.resourceConfigClass",
            "com.sun.jersey.api.core." + "PackagesResourceConfig");
    initParams.put("com.sun.jersey.config.property.packages", "it.polito.tellmefirst.web.rest.services");
    initParams.put("com.sun.jersey.config.property.WadlGeneratorConfig",
            "it.polito.tellmefirst.web.rest.wadl." + "ExternalUriWadlGeneratorConfig");
    SelectorThread threadSelector = GrizzlyWebContainerFactory.create(serverURI, initParams);
    threadSelector.start();
    System.err.println("Server started in " + System.getProperty("user.dir") + " listening on " + serverURI);
    Thread warmUp = new Thread() {
        public void run() {
        }
    };
    warmUp.start();
    while (running) {
        Thread.sleep(100);
    }
    threadSelector.stopEndpoint();
    System.exit(0);
    LOG.debug("[main] - END");
}

From source file:com.clustercontrol.util.StringBinder.java

public static void main(String[] args) {

    String str = "foo #[PARAM] bar #[ESCAPE] #[NOTFOUND] foo";

    Map<String, String> param = new HashMap<String, String>();
    param.put("PARAM", "foofoo");
    byte[] byteCode = { 0x10 };

    param.put("ESCAPE", "foo 'bar' \"foo\" `echo aaa` \\ bar" + " [" + new String(byteCode) + "], ["
            + new String(byteCode) + "]");

    StringBinder binder = new StringBinder(param);
    System.out.println("PARAM : " + param);
    System.out.println("ORIGINAL : " + str);
    System.out.println("BINDED   : " + binder.bindParam(str));
    StringBinder.setReplace(true);//from w w w  . ja  v a 2s .c  o  m
    System.out.println("BINDED   : " + binder.bindParam(str));
    StringBinder.setReplaceChar("$");
    StringBinder.setReplace(true);
    System.out.println("BINDED   : " + binder.bindParam(str));
}

From source file:gpframework.RunExperiment.java

/**
 * Application's entry point./* ww w . j  av a 2s.  c  om*/
 * 
 * @param args
 * @throws ParseException
 * @throws ParameterException 
 */
public static void main(String[] args) throws ParseException, ParameterException {
    // Failsafe parameters
    if (args.length == 0) {
        args = new String[] { "-f", "LasSortednessFunction", "-n", "5", "-ff", "JoinFactory", "-tf",
                "SortingElementFactory", "-pf", "SortingProgramFactory", "-s", "SMOGPSelection", "-a", "SMOGP",
                "-t", "50", "-e", "1000000000", "-mf", "SingleMutationFactory", "-d", "-bn", "other" };
    }

    // Create options
    Options options = new Options();
    setupOptions(options);

    // Read options from the command line
    CommandLineParser parser = new PosixParser();
    CommandLine cmd;

    // Print help if parameter requirements are not met
    try {
        cmd = parser.parse(options, args);
    }

    // If some parameters are missing, print help
    catch (MissingOptionException e) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("java -jar GPFramework \n", options);
        System.out.println();
        System.out.println("Missing parameters: " + e.getMissingOptions());
        return;
    }

    // Re-initialize PRNG
    long seed = System.currentTimeMillis();
    Utils.random = new Random(seed);

    // Set the problem size
    int problemSize = Integer.parseInt(cmd.getOptionValue("n"));

    // Set debug mode and cluster mode
    Utils.debug = cmd.hasOption("d");
    RunExperiment.cluster = cmd.hasOption("c");

    // Initialize fitness function and some factories
    FitnessFunction fitnessFunction = fromName(cmd.getOptionValue("f"), problemSize);
    MutationFactory mutationFactory = fromName(cmd.getOptionValue("mf"));
    Selection selectionCriterion = fromName(cmd.getOptionValue("s"));
    FunctionFactory functionFactory = fromName(cmd.getOptionValue("ff"));
    TerminalFactory terminalFactory = fromName(cmd.getOptionValue("tf"), problemSize);
    ProgramFactory programFactory = fromName(cmd.getOptionValue("pf"), functionFactory, terminalFactory);

    // Initialize algorithm
    Algorithm algorithm = fromName(cmd.getOptionValue("a"), mutationFactory, selectionCriterion);
    algorithm.setParameter("evaluationsBudget", cmd.getOptionValue("e"));
    algorithm.setParameter("timeBudget", cmd.getOptionValue("t"));

    // Initialize problem
    Problem problem = new Problem(programFactory, fitnessFunction);
    Program solution = algorithm.solve(problem);

    Utils.debug("Population results: ");
    Utils.debug(algorithm.getPopulation().toString());
    Utils.debug(algorithm.getPopulation().parse());

    Map<String, Object> entry = new HashMap<String, Object>();

    // Copy algorithm setup
    for (Object o : options.getRequiredOptions()) {
        Option option = options.getOption(o.toString());
        entry.put(option.getLongOpt(), cmd.getOptionValue(option.getOpt()));
    }
    entry.put("seed", seed);

    // Copy results
    entry.put("bestProgram", solution.toString());
    entry.put("bestSolution", fitnessFunction.normalize(solution));

    // Copy all statistics
    entry.putAll(algorithm.getStatistics());

    Utils.debug("Maximum encountered population size: "
            + algorithm.getStatistics().get("maxPopulationSizeToCompleteFront"));
    Utils.debug("Maximum encountered tree size: "
            + algorithm.getStatistics().get("maxProgramComplexityToCompleteFront"));
    Utils.debug("Solution complexity: " + solution.complexity() + "/" + (2 * problemSize - 1));
}

From source file:apps.Source2XML.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption("i", null, true, "input file");
    options.addOption("o", null, true, "output file");
    options.addOption("reparse_xml", null, false, "reparse each XML entry to ensure the parser doesn't fail");

    Joiner commaJoin = Joiner.on(',');

    options.addOption("source_type", null, true,
            "document source type: " + commaJoin.join(SourceFactory.getDocSourceList()));

    Joiner spaceJoin = Joiner.on(' ');

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    BufferedWriter outputFile = null;

    int docNum = 0;

    if (USE_LEMMATIZER && USE_STEMMER) {
        System.err.println("Bug/inconsistent code: cann't use the stemmer and lemmatizer at the same time!");
        System.exit(1);// w ww .ja  v a2s .co m
    }

    //Stemmer stemmer = new Stemmer();
    KrovetzStemmer stemmer = new KrovetzStemmer();

    System.out.println("Using Stanford NLP?        " + USE_STANFORD);
    System.out.println("Using Stanford lemmatizer? " + USE_LEMMATIZER);
    System.out.println("Using stemmer?             " + USE_STEMMER
            + (USE_STEMMER ? " (class: " + stemmer.getClass().getCanonicalName() + ")" : ""));

    try {
        CommandLine cmd = parser.parse(options, args);

        String inputFileName = null, outputFileName = null;

        if (cmd.hasOption("i")) {
            inputFileName = cmd.getOptionValue("i");
        } else {
            Usage("Specify 'input file'", options);
        }

        if (cmd.hasOption("o")) {
            outputFileName = cmd.getOptionValue("o");
        } else {
            Usage("Specify 'output file'", options);
        }

        outputFile = new BufferedWriter(
                new OutputStreamWriter(CompressUtils.createOutputStream(outputFileName)));

        String sourceName = cmd.getOptionValue("source_type");

        if (sourceName == null)
            Usage("Specify document source type", options);

        boolean reparseXML = options.hasOption("reparse_xml");

        DocumentSource inpDocSource = SourceFactory.createDocumentSource(sourceName, inputFileName);
        DocumentEntry inpDoc = null;
        TextCleaner textCleaner = new TextCleaner(
                new DictNoComments(new File("data/stopwords.txt"), true /* lower case */), USE_STANFORD,
                USE_LEMMATIZER);

        Map<String, String> outputMap = new HashMap<String, String>();

        outputMap.put(UtilConst.XML_FIELD_DOCNO, null);
        outputMap.put(UtilConst.XML_FIELD_TEXT, null);

        XmlHelper xmlHlp = new XmlHelper();

        if (reparseXML)
            System.out.println("Will reparse every XML entry to verify correctness!");

        while ((inpDoc = inpDocSource.next()) != null) {
            ++docNum;

            ArrayList<String> toks = textCleaner.cleanUp(inpDoc.mDocText);
            ArrayList<String> goodToks = new ArrayList<String>();
            for (String s : toks)
                if (s.length() <= MAX_WORD_LEN && // Exclude long and short words
                        s.length() >= MIN_WORD_LEN && isGoodWord(s))
                    goodToks.add(USE_STEMMER ? stemmer.stem(s) : s);

            String partlyCleanedText = spaceJoin.join(goodToks);
            String cleanText = XmlHelper.removeInvaildXMLChars(partlyCleanedText);
            // isGoodWord combiend with Stanford tokenizer should be quite restrictive already
            //cleanText = replaceSomePunct(cleanText);

            outputMap.replace(UtilConst.XML_FIELD_DOCNO, inpDoc.mDocId);
            outputMap.replace(UtilConst.XML_FIELD_TEXT, cleanText);

            String xml = xmlHlp.genXMLIndexEntry(outputMap);

            if (reparseXML) {
                try {
                    XmlHelper.parseDocWithoutXMLDecl(xml);
                } catch (Exception e) {
                    System.err.println("Error re-parsing xml for document ID: " + inpDoc.mDocId);
                    System.exit(1);
                }
            }

            /*
            {
              System.out.println(inpDoc.mDocId);
              System.out.println("=====================");
              System.out.println(partlyCleanedText);
              System.out.println("=====================");
              System.out.println(cleanText);
            } 
            */

            try {
                outputFile.write(xml);
                outputFile.write(NL);
            } catch (Exception e) {
                e.printStackTrace();
                System.err.println("Error processing/saving a document!");
            }

            if (docNum % 1000 == 0)
                System.out.println(String.format("Processed %d documents", docNum));
        }

    } catch (ParseException e) {
        e.printStackTrace();
        Usage("Cannot parse arguments" + e, options);
    } catch (Exception e) {
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    } finally {
        System.out.println(String.format("Processed %d documents", docNum));

        try {
            if (null != outputFile) {
                outputFile.close();
                System.out.println("Output file is closed! all seems to be fine...");
            }
        } catch (IOException e) {
            System.err.println("IO exception: " + e);
            e.printStackTrace();
        }
    }
}

From source file:com.thed.zapi.cloud.sample.CycleExecutionReportByVersion.java

public static void main(String[] args) throws JSONException, URISyntaxException, ParseException, IOException {

    String API_GET_EXECUTIONS = "{SERVER}/public/rest/api/1.0/executions/search/cycle/";
    String API_GET_CYCLES = "{SERVER}/public/rest/api/1.0/cycles/search?";
    // Delimiter used in CSV file
    final String NEW_LINE_SEPARATOR = "\n";
    final String fileName = "F:\\cycleExecutionReport.csv";

    /** Declare JIRA,Zephyr URL,access and secret Keys */
    // JIRA Cloud URL of the instance
    String jiraBaseURL = "https://demo.atlassian.net";
    // Replace zephyr baseurl <ZAPI_Cloud_URL> shared with the user for ZAPI Cloud Installation
    String zephyrBaseUrl = "<ZAPI_Cloud_URL>";
    // zephyr accessKey , we can get from Addons >> zapi section
    String accessKey = "YjE2MjdjMGEtNzExNy0zYjY1LWFkMzQtNjcwMDM3OTljFkbWluIGFkbWlu";
    // zephyr secretKey , we can get from Addons >> zapi section
    String secretKey = "qufnbimi96Ob2hq3ISF08yZ8Qw4c1eHGeGlk";

    /** Declare parameter values here */
    String userName = "admin";
    String versionId = "-1";
    String projectId = "10100";
    String projectName = "Support";
    String versionName = "Unscheduled";

    ZFJCloudRestClient client = ZFJCloudRestClient.restBuilder(zephyrBaseUrl, accessKey, secretKey, userName)
            .build();/*w  w  w  .j a  v a 2  s.c om*/
    /**
     * Get List of Cycles by Project and Version
     */

    final String getCyclesUri = API_GET_CYCLES.replace("{SERVER}", zephyrBaseUrl) + "projectId=" + projectId
            + "&versionId=" + versionId;

    Map<String, String> cycles = getCyclesByProjectVersion(getCyclesUri, client, accessKey);
    // System.out.println("cycles :"+ cycles.toString());

    /**
     * Iterating over the Cycles and writing the report to CSV
     * 
     */

    FileWriter fileWriter = null;
    System.out.println("Writing CSV file.....");
    try {
        fileWriter = new FileWriter(fileName);

        // Write the CSV file header

        fileWriter.append("Cycle Execution Report By Version and Project");
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("PROJECT:" + "," + projectName);
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("VERSION:" + "," + versionName);
        fileWriter.append(NEW_LINE_SEPARATOR);

        JSONArray executions;
        int totalUnexecutedCount = 0;
        int totalExecutionCount = 0;

        for (String key : cycles.keySet()) {
            int executionCount = 0;
            int unexecutedCount = 0;
            final String getExecutionsUri = API_GET_EXECUTIONS.replace("{SERVER}", zephyrBaseUrl) + key
                    + "?projectId=" + projectId + "&versionId=" + versionId;
            fileWriter.append("Cycle:" + "," + cycles.get(key));
            fileWriter.append(NEW_LINE_SEPARATOR);
            executions = getExecutionsByCycleId(getExecutionsUri, client, accessKey);
            // System.out.println("executions :" + executions.toString());

            HashMap<String, Integer> counter = new HashMap<String, Integer>();

            String[] statusName = new String[executions.length()];
            for (int i = 0; i < executions.length(); i++) {
                JSONObject executionObj = executions.getJSONObject(i).getJSONObject("execution");
                // System.out.println("executionObj
                // "+executionObj.toString());
                JSONObject statusObj = executionObj.getJSONObject("status");
                // System.out.println("statusObj :"+statusObj.toString());
                statusName[i] = statusObj.getString("name");
            }

            if (statusName.length != 0) {
                // System.out.println(statusName.toString());
                for (String a : statusName) {
                    if (counter.containsKey(a)) {
                        int oldValue = counter.get(a);
                        counter.put(a, oldValue + 1);
                    } else {
                        counter.put(a, 1);
                    }
                }
                for (String status : counter.keySet()) {
                    fileWriter.append(" " + "," + " " + "," + status + "," + counter.get(status));
                    fileWriter.append(NEW_LINE_SEPARATOR);
                    if (status.equalsIgnoreCase("UNEXECUTED")) {
                        unexecutedCount += counter.get(status);
                    } else {
                        executionCount += counter.get(status);
                    }

                }
            }
            totalExecutionCount += executionCount;
            totalUnexecutedCount += unexecutedCount;

            fileWriter.append(NEW_LINE_SEPARATOR);
        }

        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("TOTAL CYCLES:" + "," + cycles.size());
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("TOTAL EXECUTIONS:" + "," + totalExecutionCount);
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("TOTAL ASSIGNED:" + "," + (totalUnexecutedCount + totalExecutionCount));

        System.out.println("CSV file was created successfully !!!");
    } catch (Exception e) {
        System.out.println("Error in CsvFileWriter !!!");
        e.printStackTrace();
    } finally {
        try {
            fileWriter.flush();
            fileWriter.close();
        } catch (IOException e) {
            System.out.println("Error while flushing/closing fileWriter !!!");
            e.printStackTrace();
        }
    }

}

From source file:com.joliciel.lefff.Lefff.java

/**
 * @param args/* www . jav  a2 s  .c  o m*/
 */
public static void main(String[] args) throws Exception {
    long startTime = (new Date()).getTime();
    String command = args[0];

    String memoryBaseFilePath = "";
    String lefffFilePath = "";
    String posTagSetPath = "";
    String posTagMapPath = "";
    String word = null;
    List<String> categories = null;
    int startLine = -1;
    int stopLine = -1;

    boolean firstArg = true;
    for (String arg : args) {
        if (firstArg) {
            firstArg = false;
            continue;
        }
        int equalsPos = arg.indexOf('=');
        String argName = arg.substring(0, equalsPos);
        String argValue = arg.substring(equalsPos + 1);
        if (argName.equals("memoryBase"))
            memoryBaseFilePath = argValue;
        else if (argName.equals("lefffFile"))
            lefffFilePath = argValue;
        else if (argName.equals("startLine"))
            startLine = Integer.parseInt(argValue);
        else if (argName.equals("stopLine"))
            stopLine = Integer.parseInt(argValue);
        else if (argName.equals("posTagSet"))
            posTagSetPath = argValue;
        else if (argName.equals("posTagMap"))
            posTagMapPath = argValue;
        else if (argName.equals("word"))
            word = argValue;
        else if (argName.equals("categories")) {
            String[] parts = argValue.split(",");
            categories = new ArrayList<String>();
            for (String part : parts) {
                categories.add(part);
            }
        } else
            throw new RuntimeException("Unknown argument: " + argName);
    }

    final LefffServiceLocator locator = new LefffServiceLocator();
    locator.setDataSourcePropertiesFile("jdbc-live.properties");

    TalismaneServiceLocator talismaneServiceLocator = TalismaneServiceLocator.getInstance();

    final LefffService lefffService = locator.getLefffService();
    if (command.equals("load")) {
        if (lefffFilePath.length() == 0)
            throw new RuntimeException("Required argument: lefffFile");
        final LefffLoader loader = lefffService.getLefffLoader();
        File file = new File(lefffFilePath);
        if (startLine > 0)
            loader.setStartLine(startLine);
        if (stopLine > 0)
            loader.setStopLine(stopLine);

        loader.LoadFile(file);
    } else if (command.equals("serialiseBase")) {
        if (memoryBaseFilePath.length() == 0)
            throw new RuntimeException("Required argument: memoryBase");
        if (posTagSetPath.length() == 0)
            throw new RuntimeException("Required argument: posTagSet");
        if (posTagMapPath.length() == 0)
            throw new RuntimeException("Required argument: posTagMap");

        PosTaggerServiceLocator posTaggerServiceLocator = talismaneServiceLocator.getPosTaggerServiceLocator();
        PosTaggerService posTaggerService = posTaggerServiceLocator.getPosTaggerService();
        File posTagSetFile = new File(posTagSetPath);
        PosTagSet posTagSet = posTaggerService.getPosTagSet(posTagSetFile);

        File posTagMapFile = new File(posTagMapPath);
        LefffPosTagMapper posTagMapper = lefffService.getPosTagMapper(posTagMapFile, posTagSet);

        Map<PosTagSet, LefffPosTagMapper> posTagMappers = new HashMap<PosTagSet, LefffPosTagMapper>();
        posTagMappers.put(posTagSet, posTagMapper);

        LefffMemoryLoader loader = new LefffMemoryLoader();
        LefffMemoryBase memoryBase = loader.loadMemoryBaseFromDatabase(lefffService, posTagMappers, categories);
        File memoryBaseFile = new File(memoryBaseFilePath);
        memoryBaseFile.delete();
        loader.serializeMemoryBase(memoryBase, memoryBaseFile);
    } else if (command.equals("deserialiseBase")) {
        if (memoryBaseFilePath.length() == 0)
            throw new RuntimeException("Required argument: memoryBase");

        LefffMemoryLoader loader = new LefffMemoryLoader();
        File memoryBaseFile = new File(memoryBaseFilePath);
        LefffMemoryBase memoryBase = loader.deserializeMemoryBase(memoryBaseFile);

        String[] testWords = new String[] { "avoir" };
        if (word != null) {
            testWords = word.split(",");
        }

        for (String testWord : testWords) {
            Set<PosTag> possiblePosTags = memoryBase.findPossiblePosTags(testWord);
            LOG.debug("##### PosTags for '" + testWord + "': " + possiblePosTags.size());
            int i = 1;
            for (PosTag posTag : possiblePosTags) {
                LOG.debug("### PosTag " + (i++) + ":" + posTag);
            }

            List<? extends LexicalEntry> entriesForWord = memoryBase.getEntries(testWord);
            LOG.debug("##### Entries for '" + testWord + "': " + entriesForWord.size());
            i = 1;
            for (LexicalEntry entry : entriesForWord) {
                LOG.debug("### Entry " + (i++) + ":" + entry.getWord());
                LOG.debug("Category " + entry.getCategory());
                LOG.debug("Predicate " + entry.getPredicate());
                LOG.debug("Lemma " + entry.getLemma());
                LOG.debug("Morphology " + entry.getMorphology());
            }

            List<? extends LexicalEntry> entriesForLemma = memoryBase.getEntriesForLemma(testWord, "");
            LOG.debug("##### Entries for '" + testWord + "' lemma: " + entriesForLemma.size());
            for (LexicalEntry entry : entriesForLemma) {
                LOG.debug("### Entry " + entry.getWord());
                LOG.debug("Category " + entry.getCategory());
                LOG.debug("Predicate " + entry.getPredicate());
                LOG.debug("Lemma " + entry.getLemma());
                LOG.debug("Morphology " + entry.getMorphology());
                for (PredicateArgument argument : entry.getPredicateArguments()) {
                    LOG.debug("Argument: " + argument.getFunction() + ",Optional? " + argument.isOptional());
                    for (String realisation : argument.getRealisations()) {
                        LOG.debug("Realisation: " + realisation);
                    }
                }
            }
        }

    } else {
        System.out.println("Usage : Lefff load filepath");
    }
    long endTime = (new Date()).getTime() - startTime;
    LOG.debug("Total runtime: " + ((double) endTime / 1000) + " seconds");
}

From source file:com.zf.util.Post_NetNew.java

public static void main(String[] args) {
    //      String ip = "50.115.163.247";
    //      int port = 80;
    //      System.setProperty("http.proxyHost", "localhost");
    //      System.setProperty("http.proxyPort", "1080");
    //      System.setProperty("http.proxyHost", "207.244.64.132");
    //      System.setProperty("http.proxyPort", "8080");
    System.setProperty("proxyHost", "207.244.64.132");
    System.setProperty("proxyPort", "8080");
    //      System.setProperty("http.proxyHost", "207.244.64.132");
    //      System.setProperty("http.proxyPort", "8080");
    //      System.setProperty("socksProxyHost", "103.30.246.43");
    //      System.setProperty("socksProxyPort", "10800");
    //      System.setProperty("socksProxyHost", "localhost");
    //      System.setProperty("socksProxyPort", "1080");
    //      System.setProperty("socksProxyHost", "207.244.64.132");
    //      System.setProperty("socksProxyPort", "8080");

    Map<String, String> map = new HashMap<String, String>();
    //      map.put("url", "http://www.ip.cn");
    //      map.put("url", "https://www.google.com/");
    map.put("url",
            "http://tracking.crobo.com/aff_c?offer_id=21553&aff_id=1478&aff_sub2=P6P49R4873200591858716615&aff_sub=3181");
    //      map.put("url", "http://www.baidu.com");
    try {/*w w  w . j  a  va2  s .c  om*/
        //         String result = Post_NetNew.pn(map, ip, port);
        String result = Post_NetNew.pn(map);
        System.out.println(result);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:aws.sample.AmazonDynamoDBSample.java

public static void main(String[] args) throws Exception {
    init();/*from   ww  w .  ja  v a2s  .c  o m*/

    try {
        String tableName = "my-favorite-movies-table";

        // Create a table with a primary key named 'name', which holds a string
        CreateTableRequest createTableRequest = new CreateTableRequest().withTableName(tableName)
                .withKeySchema(
                        new KeySchema(new KeySchemaElement().withAttributeName("name").withAttributeType("S")))
                .withProvisionedThroughput(
                        new ProvisionedThroughput().withReadCapacityUnits(10L).withWriteCapacityUnits(10L));
        TableDescription createdTableDescription = dynamoDB.createTable(createTableRequest)
                .getTableDescription();
        System.out.println("Created Table: " + createdTableDescription);

        // Wait for it to become active
        waitForTableToBecomeAvailable(tableName);

        // Describe our new table
        DescribeTableRequest describeTableRequest = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = dynamoDB.describeTable(describeTableRequest).getTable();
        System.out.println("Table Description: " + tableDescription);

        // Add an item
        Map<String, AttributeValue> item = newItem("Bill & Ted's Excellent Adventure", 1989, "****", "James",
                "Sara");
        PutItemRequest putItemRequest = new PutItemRequest(tableName, item);
        PutItemResult putItemResult = dynamoDB.putItem(putItemRequest);
        System.out.println("Result: " + putItemResult);

        // Add another item
        item = newItem("Airplane", 1980, "*****", "James", "Billy Bob");
        putItemRequest = new PutItemRequest(tableName, item);
        putItemResult = dynamoDB.putItem(putItemRequest);
        System.out.println("Result: " + putItemResult);

        // Scan items for movies with a year attribute greater than 1985
        HashMap<String, Condition> scanFilter = new HashMap<String, Condition>();
        Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString())
                .withAttributeValueList(new AttributeValue().withN("1985"));
        scanFilter.put("year", condition);
        ScanRequest scanRequest = new ScanRequest(tableName).withScanFilter(scanFilter);
        ScanResult scanResult = dynamoDB.scan(scanRequest);
        System.out.println("Result: " + scanResult);

        DeleteTableRequest deleteTableRequest = new DeleteTableRequest(tableName);
        dynamoDB.deleteTable(deleteTableRequest);
        System.out.println("Delete Table: ");

    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to AWS, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        ace.printStackTrace();
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with AWS, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}