Example usage for java.util HashMap HashMap

List of usage examples for java.util HashMap HashMap

Introduction

In this page you can find the example usage for java.util HashMap HashMap.

Prototype

public HashMap() 

Source Link

Document

Constructs an empty HashMap with the default initial capacity (16) and the default load factor (0.75).

Usage

From source file:com.myjeeva.poi.ExcelWorkSheetHandlerTest.java

/**
 * @param args//www  .ja va2  s  .  c  o m
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    String SAMPLE_PERSON_DATA_FILE_PATH = "src/test/resources/Sample-Person-Data.xlsx";

    // Input File initialize
    File file = new File(SAMPLE_PERSON_DATA_FILE_PATH);
    InputStream inputStream = new FileInputStream(file);

    // Excel Cell Mapping
    Map<String, String> cellMapping = new HashMap<String, String>();
    cellMapping.put("HEADER", "Person Id,Name,Height,Email Address,DOB,Salary");
    cellMapping.put("A", "personId");
    cellMapping.put("B", "name");
    cellMapping.put("C", "height");
    cellMapping.put("D", "emailId");
    cellMapping.put("E", "dob");
    cellMapping.put("F", "salary");

    // The package open is instantaneous, as it should be.
    OPCPackage pkg = null;
    try {

        ExcelWorkSheetHandler<PersonVO> workSheetHandler = new ExcelWorkSheetHandler<PersonVO>(PersonVO.class,
                cellMapping);

        pkg = OPCPackage.open(inputStream);

        ExcelSheetCallback sheetCallback = new ExcelSheetCallback() {
            private int sheetNumber = 0;

            @Override
            public void startSheet(int sheetNum, String sheetName) {
                this.sheetNumber = sheetNum;
                System.out.println("Started processing sheet number=" + sheetNumber + " and Sheet Name is '"
                        + sheetName + "'");
            }

            @Override
            public void endSheet() {
                System.out.println("Processing completed for sheet number=" + sheetNumber);
            }
        };

        System.out.println("Constructor: pkg, workSheetHandler, sheetCallback");
        ExcelReader example1 = new ExcelReader(pkg, workSheetHandler, sheetCallback);
        example1.process();

        if (workSheetHandler.getValueList().isEmpty()) {
            // No data present
            LOG.error("sHandler.getValueList() is empty");
        } else {

            LOG.info(workSheetHandler.getValueList().size()
                    + " no. of records read from given excel worksheet successfully.");

            // Displaying data ead from Excel file
            displayPersonList(workSheetHandler.getValueList());
        }

        System.out.println("\nConstructor: filePath, workSheetHandler, sheetCallback");
        ExcelReader example2 = new ExcelReader(SAMPLE_PERSON_DATA_FILE_PATH, workSheetHandler, sheetCallback);
        example2.process();

        System.out.println("\nConstructor: file, workSheetHandler, sheetCallback");
        ExcelReader example3 = new ExcelReader(file, workSheetHandler, null);
        example3.process();

    } catch (RuntimeException are) {
        LOG.error(are.getMessage(), are.getCause());
    } catch (InvalidFormatException ife) {
        LOG.error(ife.getMessage(), ife.getCause());
    } catch (IOException ioe) {
        LOG.error(ioe.getMessage(), ioe.getCause());
    } finally {
        IOUtils.closeQuietly(inputStream);
        try {
            if (null != pkg) {
                pkg.close();
            }
        } catch (IOException e) {
            // just ignore IO exception
        }
    }
}

From source file:mlbench.pagerank.PagerankNaive.java

@SuppressWarnings({ "rawtypes", "unchecked" })
public static void main(String[] args) throws IOException, InterruptedException {
    try {//ww  w. ja  v  a2 s.c  om
        parseArgs(args);
        HashMap<String, String> conf = new HashMap<String, String>();
        initConf(conf);
        MPI_D.Init(args, MPI_D.Mode.Common, conf);

        JobConf jobConf = new JobConf(confPath);
        if (MPI_D.COMM_BIPARTITE_O != null) {
            // O communicator
            int rank = MPI_D.Comm_rank(MPI_D.COMM_BIPARTITE_O);
            int size = MPI_D.Comm_size(MPI_D.COMM_BIPARTITE_O);
            if (rank == 0) {
                LOG.info(PagerankNaive.class.getSimpleName() + " O start.");
            }
            FileSplit[] inputs1 = DataMPIUtil.HDFSDataLocalLocator.getTaskInputs(MPI_D.COMM_BIPARTITE_O,
                    jobConf, edgeDir, rank);
            FileSplit[] inputs2 = DataMPIUtil.HDFSDataLocalLocator.getTaskInputs(MPI_D.COMM_BIPARTITE_O,
                    jobConf, vecDir, rank);
            FileSplit[] inputs = (FileSplit[]) ArrayUtils.addAll(inputs2, inputs1);
            for (int i = 0; i < inputs.length; i++) {
                FileSplit fsplit = inputs[i];
                LineRecordReader kvrr = new LineRecordReader(jobConf, fsplit);

                LongWritable key = kvrr.createKey();
                Text value = kvrr.createValue();
                {
                    IntWritable k = new IntWritable();
                    Text v = new Text();
                    while (kvrr.next(key, value)) {
                        String line_text = value.toString();
                        // ignore comments in edge file
                        if (line_text.startsWith("#"))
                            continue;

                        final String[] line = line_text.split("\t");
                        if (line.length < 2)
                            continue;

                        // vector : ROWID VALUE('vNNNN')
                        if (line[1].charAt(0) == 'v') {
                            k.set(Integer.parseInt(line[0]));
                            v.set(line[1]);
                            MPI_D.Send(k, v);
                        } else {
                            /*
                             * In other matrix-vector multiplication, we
                            * output (dst, src) here However, In PageRank,
                            * the matrix-vector computation formula is M^T
                            * * v. Therefore, we output (src,dst) here.
                            */
                            int src_id = Integer.parseInt(line[0]);
                            int dst_id = Integer.parseInt(line[1]);
                            k.set(src_id);
                            v.set(line[1]);
                            MPI_D.Send(k, v);

                            if (make_symmetric == 1) {
                                k.set(dst_id);
                                v.set(line[0]);
                                MPI_D.Send(k, v);
                            }
                        }
                    }
                }
            }

        } else if (MPI_D.COMM_BIPARTITE_A != null) {
            // A communicator
            int rank = MPI_D.Comm_rank(MPI_D.COMM_BIPARTITE_A);
            if (rank == 0) {
                LOG.info(PagerankNaive.class.getSimpleName() + " A start.");
            }

            HadoopWriter<IntWritable, Text> outrw = HadoopIOUtil.getNewWriter(jobConf, outDir,
                    IntWritable.class, Text.class, TextOutputFormat.class, null, rank, MPI_D.COMM_BIPARTITE_A);

            IntWritable oldKey = null;
            int i;
            double cur_rank = 0;
            ArrayList<Integer> dst_nodes_list = new ArrayList<Integer>();
            Object[] keyValue = MPI_D.Recv();
            while (keyValue != null) {
                IntWritable key = (IntWritable) keyValue[0];
                Text value = (Text) keyValue[1];
                if (oldKey == null) {
                    oldKey = key;
                }
                // A new key arrives
                if (!key.equals(oldKey)) {
                    outrw.write(oldKey, new Text("s" + cur_rank));
                    int outdeg = dst_nodes_list.size();
                    if (outdeg > 0) {
                        cur_rank = cur_rank / (double) outdeg;
                    }
                    for (i = 0; i < outdeg; i++) {
                        outrw.write(new IntWritable(dst_nodes_list.get(i)), new Text("v" + cur_rank));
                    }
                    oldKey = key;
                    cur_rank = 0;
                    dst_nodes_list = new ArrayList<Integer>();
                }
                // common record
                String line_text = value.toString();
                final String[] line = line_text.split("\t");
                if (line.length == 1) {
                    if (line_text.charAt(0) == 'v') { // vector : VALUE
                        cur_rank = Double.parseDouble(line_text.substring(1));
                    } else { // edge : ROWID
                        dst_nodes_list.add(Integer.parseInt(line[0]));
                    }
                }
                keyValue = MPI_D.Recv();
            }
            // write the left part
            if (cur_rank != 0) {
                outrw.write(oldKey, new Text("s" + cur_rank));
                int outdeg = dst_nodes_list.size();
                if (outdeg > 0) {
                    cur_rank = cur_rank / (double) outdeg;
                }
                for (i = 0; i < outdeg; i++) {
                    outrw.write(new IntWritable(dst_nodes_list.get(i)), new Text("v" + cur_rank));
                }
            }
            outrw.close();
        }
        MPI_D.Finalize();
    } catch (MPI_D_Exception e) {
        e.printStackTrace();
    }
}

From source file:edu.illinois.cs.cogcomp.nlp.tokenizer.HashCollisionReport.java

/**
 * Read each test file in the directory, tokenize and create the token view. Then check for
 * collisions.//from   w w w.  j a  va 2  s. c o m
 * @param args
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
    if (args.length == 0)
        error("Must pass in the name of a directory with files to test against.");
    File dir = new File(args[0]);
    if (!dir.exists()) {
        error("The directory did not exist : " + dir);
    }
    if (!dir.isDirectory()) {
        error("The path was not a directory : " + dir);
    }
    File[] files = dir.listFiles();
    for (File file : files) {
        if (file.isFile()) {
            String normal = FileUtils.readFileToString(file);
            TextAnnotationBuilder tabldr = new TokenizerTextAnnotationBuilder(new StatefulTokenizer());
            TextAnnotation taNormal = tabldr.createTextAnnotation("test", "normal", normal);
            List<Constituent> normalToks = taNormal.getView(ViewNames.TOKENS).getConstituents();
            HashMap<Integer, Constituent> hashmap = new HashMap<>();

            // add each constituent to the map keyed by it's hashcode. Check first to see if the hashcode
            // is already used, if it is report it.
            for (Constituent c : normalToks) {
                int code = c.hashCode();
                if (hashmap.containsKey(code)) {
                    Constituent dup = hashmap.get(code);
                    System.err.println(c + " == " + dup);
                } else {
                    hashmap.put(code, c);
                }
            }
        }
    }

}

From source file:de.citec.sc.matoll.process.Matoll_CreateMax.java

public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException,
        InstantiationException, IllegalAccessException, ClassNotFoundException, Exception {

    String directory;//from   w w  w .j  av a2 s. c  o m
    String gold_standard_lexicon;
    String output_lexicon;
    String configFile;
    Language language;
    String output;

    Stopwords stopwords = new Stopwords();

    HashMap<String, Double> maxima;
    maxima = new HashMap<String, Double>();

    if (args.length < 3) {
        System.out.print("Usage: Matoll --mode=train/test <DIRECTORY> <CONFIG>\n");
        return;

    }

    //      Classifier classifier;

    directory = args[1];
    configFile = args[2];

    final Config config = new Config();

    config.loadFromFile(configFile);

    gold_standard_lexicon = config.getGoldStandardLexicon();

    String model_file = config.getModel();

    output_lexicon = config.getOutputLexicon();
    output = config.getOutput();

    language = config.getLanguage();

    LexiconLoader loader = new LexiconLoader();
    Lexicon gold = loader.loadFromFile(gold_standard_lexicon);

    Set<String> uris = new HashSet<>();
    //        Map<Integer,String> sentence_list = new HashMap<>();
    Map<Integer, Set<Integer>> mapping_words_sentences = new HashMap<>();

    //consider only properties
    for (LexicalEntry entry : gold.getEntries()) {
        try {
            for (Sense sense : entry.getSenseBehaviours().keySet()) {
                String tmp_uri = sense.getReference().getURI().replace("http://dbpedia.org/ontology/", "");
                if (!Character.isUpperCase(tmp_uri.charAt(0))) {
                    uris.add(sense.getReference().getURI());
                }
            }
        } catch (Exception e) {
        }
        ;
    }

    ModelPreprocessor preprocessor = new ModelPreprocessor(language);
    preprocessor.setCoreferenceResolution(false);
    Set<String> dep = new HashSet<>();
    dep.add("prep");
    dep.add("appos");
    dep.add("nn");
    dep.add("dobj");
    dep.add("pobj");
    dep.add("num");
    preprocessor.setDEP(dep);

    List<File> list_files = new ArrayList<>();

    if (config.getFiles().isEmpty()) {
        File folder = new File(directory);
        File[] files = folder.listFiles();
        for (File file : files) {
            if (file.toString().contains(".ttl"))
                list_files.add(file);
        }
    } else {
        list_files.addAll(config.getFiles());
    }
    System.out.println(list_files.size());

    int sentence_counter = 0;
    Map<String, Set<Integer>> bag_words_uri = new HashMap<>();
    Map<String, Integer> mapping_word_id = new HashMap<>();
    for (File file : list_files) {
        Model model = RDFDataMgr.loadModel(file.toString());
        for (Model sentence : getSentences(model)) {
            String reference = getReference(sentence);
            reference = reference.replace("http://dbpedia/", "http://dbpedia.org/");
            if (uris.contains(reference)) {
                sentence_counter += 1;
                Set<Integer> words_ids = getBagOfWords(sentence, stopwords, mapping_word_id);
                //TODO: add sentence preprocessing
                String obj = getObject(sentence);
                String subj = getSubject(sentence);
                preprocessor.preprocess(sentence, subj, obj, language);
                //TODO: also return marker if object or subject of property (in SPARQL this has to be optional of course)
                String parsed_sentence = getParsedSentence(sentence);
                try (FileWriter fw = new FileWriter("mapping_sentences_to_ids_goldstandard.tsv", true);
                        BufferedWriter bw = new BufferedWriter(fw);
                        PrintWriter out = new PrintWriter(bw)) {
                    out.println(sentence_counter + "\t" + parsed_sentence);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                for (Integer word_id : words_ids) {
                    if (mapping_words_sentences.containsKey(word_id)) {
                        Set<Integer> tmp_set = mapping_words_sentences.get(word_id);
                        tmp_set.add(sentence_counter);
                        mapping_words_sentences.put(word_id, tmp_set);

                    } else {
                        Set<Integer> tmp_set = new HashSet<>();
                        tmp_set.add(sentence_counter);
                        mapping_words_sentences.put(word_id, tmp_set);
                    }

                }
                if (bag_words_uri.containsKey(reference)) {
                    Set<Integer> tmp = bag_words_uri.get(reference);
                    for (Integer w : words_ids) {
                        tmp.add(w);

                    }
                    bag_words_uri.put(reference, tmp);
                } else {
                    Set<Integer> tmp = new HashSet<>();
                    for (Integer w : words_ids) {
                        tmp.add(w);
                    }
                    bag_words_uri.put(reference, tmp);
                }
            }

        }
        model.close();

    }

    PrintWriter writer = new PrintWriter("bag_of_words_only_goldstandard.tsv");
    StringBuilder string_builder = new StringBuilder();
    for (String r : bag_words_uri.keySet()) {
        string_builder.append(r);
        for (Integer i : bag_words_uri.get(r)) {
            string_builder.append("\t");
            string_builder.append(i);
        }
        string_builder.append("\n");
    }
    writer.write(string_builder.toString());
    writer.close();

    writer = new PrintWriter("mapping_words_to_sentenceids_goldstandard.tsv");
    string_builder = new StringBuilder();
    for (Integer w : mapping_words_sentences.keySet()) {
        string_builder.append(w);
        for (int i : mapping_words_sentences.get(w)) {
            string_builder.append("\t");
            string_builder.append(i);
        }
        string_builder.append("\n");
    }
    writer.write(string_builder.toString());
    writer.close();

}

From source file:eu.fbk.dkm.sectionextractor.PageClassMerger.java

public static void main(String args[]) throws IOException {

    CommandLineWithLogger commandLineWithLogger = new CommandLineWithLogger();
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("WikiData ID file").isRequired().withLongOpt("wikidata-id").create("i"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("Airpedia Person file").isRequired().withLongOpt("airpedia").create("a"));
    commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("Output file")
            .isRequired().withLongOpt("output").create("o"));
    CommandLine commandLine = null;/* w w  w.  java  2  s . c o m*/
    try {
        commandLine = commandLineWithLogger.getCommandLine(args);
        PropertyConfigurator.configure(commandLineWithLogger.getLoggerProps());
    } catch (Exception e) {
        System.exit(1);
    }

    String wikiIDFileName = commandLine.getOptionValue("wikidata-id");
    String airpediaFileName = commandLine.getOptionValue("airpedia");
    String outputFileName = commandLine.getOptionValue("output");

    HashMap<Integer, String> wikiIDs = new HashMap<>();
    HashSet<Integer> airpediaClasses = new HashSet<>();

    List<String> strings;

    logger.info("Loading file " + wikiIDFileName);
    strings = Files.readLines(new File(wikiIDFileName), Charsets.UTF_8);
    for (String line : strings) {
        line = line.trim();
        if (line.length() == 0) {
            continue;
        }
        if (line.startsWith("#")) {
            continue;
        }

        String[] parts = line.split("\t");
        if (parts.length < 2) {
            continue;
        }

        int id;
        try {
            id = Integer.parseInt(parts[0]);
        } catch (Exception e) {
            continue;
        }
        wikiIDs.put(id, parts[1]);
    }

    logger.info("Loading file " + airpediaFileName);
    strings = Files.readLines(new File(airpediaFileName), Charsets.UTF_8);
    for (String line : strings) {
        line = line.trim();
        if (line.length() == 0) {
            continue;
        }
        if (line.startsWith("#")) {
            continue;
        }

        String[] parts = line.split("\t");
        if (parts.length < 2) {
            continue;
        }

        int id;
        try {
            id = Integer.parseInt(parts[0]);
        } catch (Exception e) {
            continue;
        }
        airpediaClasses.add(id);
    }

    logger.info("Saving information");
    BufferedWriter writer = new BufferedWriter(new FileWriter(outputFileName));
    for (int i : wikiIDs.keySet()) {
        if (!airpediaClasses.contains(i)) {
            continue;
        }

        writer.append(wikiIDs.get(i)).append("\n");
    }
    writer.close();
}

From source file:Neo4JDataExporter.java

public static void main(String[] args) throws Exception {
    try {//  w  w w .java 2  s .  co  m

        //            URI firstNode = createNode();
        //            addProperty(firstNode, "name", "Joe Strummer");
        //            URI secondNode = createNode();
        //            addProperty(secondNode, "band", "The Clash");
        //
        //            URI relationshipUri = addRelationship(firstNode, secondNode, "singer",
        //                    "{ \"from\" : \"1976\", \"until\" : \"1986\" }");
        //            addMetadataToProperty(relationshipUri, "stars", "5");

        String query = "start device=node:node_auto_index(objectType='Node') match device - [parent] -> interface --> neighbour where device.objectType='Node' and interface.objectType='Discovery Interface' and neighbour.objectType = 'Discovered Neighbor' return device.name, interface.name, neighbour.name";
        String params = "";
        String output = executeCypherQuery(query, params);
        JSONObject json = (JSONObject) new JSONParser().parse(output);
        System.out.println("columns=" + json.get("columns"));
        System.out.println("data=" + json.get("data"));
        HashMap map = new HashMap();

    } catch (Exception e) {

        e.printStackTrace();

    }
}

From source file:DeliverWork.java

public static void main(String[] args) throws UnsupportedEncodingException, MessagingException {
    JdbcFactory jdbcFactoryChanye = new JdbcFactory(
            "jdbc:mysql://127.0.0.1:3306/fp_guimin?useUnicode=true&characterEncoding=UTF-8", "guimin_db",
            "guimin@98fhi3p2hUFHfdfoi", "com.mysql.jdbc.Driver");
    connection = jdbcFactoryChanye.getConnection();
    Map<String, String> map = new HashMap();

    try {/* ww  w .  j av a2s  .  c  o  m*/
        PreparedStatement ps = connection.prepareStatement(selectAccount);
        ResultSet resultSet = ps.executeQuery();
        while (resultSet.next()) {
            map.put(resultSet.getString(1), resultSet.getString(2));
        }
        ps.close();
        map.forEach((k, v) -> {
            syncTheFile(k, v);
        });
        connection.close();
    } catch (SQLException e) {
        e.printStackTrace();
    } finally {
        try {
            connection.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
}

From source file:mlbench.pagerank.PagerankMerge.java

@SuppressWarnings({ "rawtypes", "unchecked" })
public static void main(String[] args) throws IOException, InterruptedException {
    try {//from  www.j  a v a  2  s.  c  om
        parseArgs(args);
        HashMap<String, String> conf = new HashMap<String, String>();
        initConf(conf);
        MPI_D.Init(args, MPI_D.Mode.Common, conf);

        JobConf jobConf = new JobConf(confPath);
        if (MPI_D.COMM_BIPARTITE_O != null) {
            // O communicator
            int rank = MPI_D.Comm_rank(MPI_D.COMM_BIPARTITE_O);
            int size = MPI_D.Comm_size(MPI_D.COMM_BIPARTITE_O);
            if (rank == 0) {
                LOG.info(PagerankMerge.class.getSimpleName() + " O start.");
            }
            FileSplit[] inputs = DataMPIUtil.HDFSDataLocalLocator.getTaskInputs(MPI_D.COMM_BIPARTITE_O, jobConf,
                    inDir, rank);
            for (int i = 0; i < inputs.length; i++) {
                FileSplit fsplit = inputs[i];
                LineRecordReader kvrr = new LineRecordReader(jobConf, fsplit);

                LongWritable key = kvrr.createKey();
                Text value = kvrr.createValue();
                {
                    while (kvrr.next(key, value)) {
                        String line_text = value.toString();
                        final String[] line = line_text.split("\t");
                        if (line.length >= 2) {
                            MPI_D.Send(new IntWritable(Integer.parseInt(line[0])), new Text(line[1]));
                        }
                    }
                }
            }

        } else if (MPI_D.COMM_BIPARTITE_A != null) {
            // A communicator
            int rank = MPI_D.Comm_rank(MPI_D.COMM_BIPARTITE_A);
            if (rank == 0) {
                LOG.info(PagerankMerge.class.getSimpleName() + " A start.");
            }
            HadoopWriter<IntWritable, Text> outrw = HadoopIOUtil.getNewWriter(jobConf, outDir,
                    IntWritable.class, Text.class, TextOutputFormat.class, null, rank, MPI_D.COMM_BIPARTITE_A);

            IntWritable oldKey = null;
            double next_rank = 0;
            double previous_rank = 0;
            double diff = 0;
            int local_diffs = 0;
            random_coeff = (1 - mixing_c) / (double) number_nodes;
            converge_threshold = ((double) 1.0 / (double) number_nodes) / 10;
            Object[] keyValue = MPI_D.Recv();
            while (keyValue != null) {
                IntWritable key = (IntWritable) keyValue[0];
                Text value = (Text) keyValue[1];
                if (oldKey == null) {
                    oldKey = key;
                }
                if (!key.equals(oldKey)) {
                    next_rank = next_rank * mixing_c + random_coeff;
                    outrw.write(oldKey, new Text("v" + next_rank));
                    diff = Math.abs(previous_rank - next_rank);
                    if (diff > converge_threshold) {
                        local_diffs += 1;
                    }
                    oldKey = key;
                    next_rank = 0;
                    previous_rank = 0;
                }

                String cur_value_str = value.toString();
                if (cur_value_str.charAt(0) == 's') {
                    previous_rank = Double.parseDouble(cur_value_str.substring(1));
                } else {
                    next_rank += Double.parseDouble(cur_value_str.substring(1));
                }

                keyValue = MPI_D.Recv();
            }
            if (previous_rank != 0) {
                next_rank = next_rank * mixing_c + random_coeff;
                outrw.write(oldKey, new Text("v" + next_rank));
                diff = Math.abs(previous_rank - next_rank);
                if (diff > converge_threshold)
                    local_diffs += 1;
            }
            outrw.close();
            reduceDiffs(local_diffs, rank);
        }

        MPI_D.Finalize();
    } catch (MPI_D_Exception e) {
        e.printStackTrace();
    }
}

From source file:jmxbf.java

public static void main(String[] args) throws IOException, MalformedObjectNameException {

    String HOST = "";
    String PORT = "";
    String usersFile = "";
    String pwdFile = "";

    CommandLine cmd = getParsedCommandLine(args);

    if (cmd != null) {

        HOST = cmd.getOptionValue("host");
        PORT = cmd.getOptionValue("port");
        usersFile = cmd.getOptionValue("usernames-file");
        pwdFile = cmd.getOptionValue("passwords-file");

    } else {/*from www .  ja  v  a  2 s  .  co  m*/

        System.exit(1);
    }

    String finalResults = "";

    BufferedReader users = new BufferedReader(new FileReader(usersFile));
    BufferedReader pwds = new BufferedReader(new FileReader(pwdFile));

    JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://" + HOST + ":" + PORT + "/jmxrmi");
    //new JMXServiceURL("service:jmx:remoting-jmx://" + HOST + ":" + PORT);

    String user = null;
    boolean found = false;
    while ((user = users.readLine()) != null) {
        String pwd = null;
        while ((pwd = pwds.readLine()) != null) {
            //System.out.println(user+":"+pwd);

            Map<String, String[]> env = new HashMap<>();
            String[] credentials = { user, pwd };
            env.put(JMXConnector.CREDENTIALS, credentials);
            try {

                JMXConnector jmxConnector = JMXConnectorFactory.connect(url, env);

                System.out.println();
                System.out.println();
                System.out.println();
                System.out.println(
                        "[+] ###SUCCESS### - We got a valid connection for: " + user + ":" + pwd + "\r\n\r\n");
                finalResults = finalResults + "\n" + user + ":" + pwd;
                jmxConnector.close();
                found = true;
                break;

            } catch (java.lang.SecurityException e) {
                System.out.println("Auth failed!!!\r\n");

            }
        }
        if (found) {
            System.out.println("Found some valid credentials - continuing brute force");
            found = false;

        }
        //closing and reopening pwds
        pwds.close();
        pwds = new BufferedReader(new FileReader(pwdFile));

    }

    users.close();
    //print final results
    if (finalResults.length() != 0) {
        System.out.println("The following valid credentials were found:\n");
        System.out.println(finalResults);
    }

}

From source file:de.ingrid.iplug.AdminServer.java

/**
 * To start the admin web server from the commandline. 
 * @param args The server port and the web app folder. 
 * @throws Exception Something goes wrong.
 *//*from   ww  w . j  a v  a 2s  .  co m*/
public static void main(String[] args) throws Exception {

    String usage = "<serverPort> <webappFolder>";
    if ((args.length == 0) && (args.length != 4) && (args.length != 6)) {
        System.err.println(usage);
        return;
    }

    Map arguments = readParameters(args);
    File plugDescriptionFile = new File(PLUG_DESCRIPTION);
    if (arguments.containsKey("--plugdescription")) {
        plugDescriptionFile = new File((String) arguments.get("--plugdescription"));
    }
    File communicationProperties = new File(COMMUNICATION_PROPERTES);
    if (arguments.containsKey("--descriptor")) {
        communicationProperties = new File((String) arguments.get("--descriptor"));
    }

    int port = Integer.parseInt(args[0]);
    File webFolder = new File(args[1]);

    //push init params for all contexts (e.g. step1 and step2)
    HashMap hashMap = new HashMap();
    hashMap.put("plugdescription.xml", plugDescriptionFile.getAbsolutePath());
    hashMap.put("communication.xml", communicationProperties.getAbsolutePath());

    WebContainer container = startWebContainer(hashMap, port, webFolder, false, null);
    container.join();
}