Example usage for java.util Map entrySet

List of usage examples for java.util Map entrySet

Introduction

In this page you can find the example usage for java.util Map entrySet.

Prototype

Set<Map.Entry<K, V>> entrySet();

Source Link

Document

Returns a Set view of the mappings contained in this map.

Usage

From source file:org.apache.s4.client.Adapter.java

@SuppressWarnings("static-access")
public static void main(String args[]) throws IOException, InterruptedException {

    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("corehome").hasArg().withDescription("core home").create("c"));

    options.addOption(//from   www.  j  av  a 2s.c  om
            OptionBuilder.withArgName("instanceid").hasArg().withDescription("instance id").create("i"));

    options.addOption(
            OptionBuilder.withArgName("configtype").hasArg().withDescription("configuration type").create("t"));

    options.addOption(OptionBuilder.withArgName("userconfig").hasArg()
            .withDescription("user-defined legacy data adapter configuration file").create("d"));

    CommandLineParser parser = new GnuParser();
    CommandLine commandLine = null;

    try {
        commandLine = parser.parse(options, args);
    } catch (ParseException pe) {
        System.err.println(pe.getLocalizedMessage());
        System.exit(1);
    }

    int instanceId = -1;
    if (commandLine.hasOption("i")) {
        String instanceIdStr = commandLine.getOptionValue("i");
        try {
            instanceId = Integer.parseInt(instanceIdStr);
        } catch (NumberFormatException nfe) {
            System.err.println("Bad instance id: %s" + instanceIdStr);
            System.exit(1);
        }
    }

    if (commandLine.hasOption("c")) {
        coreHome = commandLine.getOptionValue("c");
    }

    String configType = "typical";
    if (commandLine.hasOption("t")) {
        configType = commandLine.getOptionValue("t");
    }

    String userConfigFilename = null;
    if (commandLine.hasOption("d")) {
        userConfigFilename = commandLine.getOptionValue("d");
    }

    File userConfigFile = new File(userConfigFilename);
    if (!userConfigFile.isFile()) {
        System.err.println("Bad user configuration file: " + userConfigFilename);
        System.exit(1);
    }

    File coreHomeFile = new File(coreHome);
    if (!coreHomeFile.isDirectory()) {
        System.err.println("Bad core home: " + coreHome);
        System.exit(1);
    }

    if (instanceId > -1) {
        System.setProperty("instanceId", "" + instanceId);
    } else {
        System.setProperty("instanceId", "" + S4Util.getPID());
    }

    String configBase = coreHome + File.separatorChar + "conf" + File.separatorChar + configType;
    String configPath = configBase + File.separatorChar + "client-adapter-conf.xml";
    File configFile = new File(configPath);
    if (!configFile.exists()) {
        System.err.printf("adapter config file %s does not exist\n", configPath);
        System.exit(1);
    }

    // load adapter config xml
    ApplicationContext coreContext;
    coreContext = new FileSystemXmlApplicationContext("file:" + configPath);
    ApplicationContext context = coreContext;

    Adapter adapter = (Adapter) context.getBean("client_adapter");

    ApplicationContext appContext = new FileSystemXmlApplicationContext(
            new String[] { "file:" + userConfigFilename }, context);

    Map<?, ?> inputStubBeanMap = appContext.getBeansOfType(InputStub.class);
    Map<?, ?> outputStubBeanMap = appContext.getBeansOfType(OutputStub.class);

    if (inputStubBeanMap.size() == 0 && outputStubBeanMap.size() == 0) {
        System.err.println("No user-defined input/output stub beans");
        System.exit(1);
    }

    ArrayList<InputStub> inputStubs = new ArrayList<InputStub>(inputStubBeanMap.size());
    ArrayList<OutputStub> outputStubs = new ArrayList<OutputStub>(outputStubBeanMap.size());

    // add all input stubs
    for (Map.Entry<?, ?> e : inputStubBeanMap.entrySet()) {
        String beanName = (String) e.getKey();
        System.out.println("Adding InputStub " + beanName);
        inputStubs.add((InputStub) e.getValue());
    }

    // add all output stubs
    for (Map.Entry<?, ?> e : outputStubBeanMap.entrySet()) {
        String beanName = (String) e.getKey();
        System.out.println("Adding OutputStub " + beanName);
        outputStubs.add((OutputStub) e.getValue());
    }

    adapter.setInputStubs(inputStubs);
    adapter.setOutputStubs(outputStubs);

}

From source file:com.px100systems.data.utility.RestoreUtility.java

public static void main(String[] args) {
    if (args.length < 3) {
        System.err.println("Usage: java -cp ... com.px100systems.data.utility.RestoreUtility "
                + "<springXmlConfigFile> <persisterBeanName> <backupDirectory> [compare]");
        return;/* w  ww . ja v a  2 s  .c o m*/
    }

    FileSystemXmlApplicationContext ctx = new FileSystemXmlApplicationContext("file:" + args[0]);
    try {
        PersistenceProvider persister = ctx.getBean(args[1], PersistenceProvider.class);

        File directory = new File(args[2]);
        if (!directory.isDirectory()) {
            System.err.println(directory.getName() + " is not a directory");
            return;
        }

        List<File> files = new ArrayList<File>();
        //noinspection ConstantConditions
        for (File file : directory.listFiles())
            if (BackupFile.isBackup(file))
                files.add(file);

        if (files.isEmpty()) {
            System.err.println(directory.getName() + " directory has no backup files");
            return;
        }

        if (args.length == 4 && args[3].equalsIgnoreCase("compare")) {
            final Map<String, Map<Long, RawRecord>> units = new HashMap<String, Map<Long, RawRecord>>();

            for (String storage : persister.storage()) {
                System.out.println("Storage " + storage);
                persister.loadByStorage(storage, new PersistenceProvider.LoadCallback() {
                    @Override
                    public void process(RawRecord record) {
                        Map<Long, RawRecord> unitList = units.get(record.getUnitName());
                        if (unitList == null) {
                            unitList = new HashMap<Long, RawRecord>();
                            units.put(record.getUnitName(), unitList);
                        }
                        unitList.put(record.getId(), record);
                    }
                });

                for (final Map.Entry<String, Map<Long, RawRecord>> unit : units.entrySet()) {
                    BackupFile file = null;
                    for (int i = 0, n = files.size(); i < n; i++)
                        if (BackupFile.isBackup(files.get(i), unit.getKey())) {
                            file = new BackupFile(files.get(i));
                            files.remove(i);
                            break;
                        }

                    if (file == null)
                        throw new RuntimeException("Could not find backup file for unit " + unit.getKey());

                    final Long[] count = new Long[] { 0L };
                    file.read(new PersistenceProvider.LoadCallback() {
                        @Override
                        public void process(RawRecord record) {
                            RawRecord r = unit.getValue().get(record.getId());
                            if (r == null)
                                throw new RuntimeException("Could not find persisted record " + record.getId()
                                        + " for unit " + unit.getKey());
                            if (!r.equals(record))
                                throw new RuntimeException(
                                        "Record " + record.getId() + " mismatch for unit " + unit.getKey());
                            count[0] = count[0] + 1;
                        }
                    });

                    if (count[0] != unit.getValue().size())
                        throw new RuntimeException("Extra persisted records for unit " + unit.getKey());
                    System.out.println("   Unit " + unit.getKey() + ": OK");
                }

                units.clear();
            }

            if (!files.isEmpty()) {
                System.err.println("Extra backups: ");
                for (File file : files)
                    System.err.println("   " + file.getName());
            }
        } else {
            persister.init();
            for (File file : files) {
                InMemoryDatabase.readBackupFile(file, persister);
                System.out.println("Loaded " + file.getName());
            }
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    } finally {
        ctx.close();
    }
}

From source file:edu.cmu.lti.oaqa.knn4qa.apps.LuceneIndexer.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption(CommonParams.ROOT_DIR_PARAM, null, true, CommonParams.ROOT_DIR_DESC);
    options.addOption(CommonParams.SUB_DIR_TYPE_PARAM, null, true, CommonParams.SUB_DIR_TYPE_DESC);
    options.addOption(CommonParams.MAX_NUM_REC_PARAM, null, true, CommonParams.MAX_NUM_REC_DESC);
    options.addOption(CommonParams.SOLR_FILE_NAME_PARAM, null, true, CommonParams.SOLR_FILE_NAME_DESC);
    options.addOption(CommonParams.OUT_INDEX_PARAM, null, true, CommonParams.OUT_MINDEX_DESC);

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    try {/* www.  j  a v  a  2s. co  m*/
        CommandLine cmd = parser.parse(options, args);

        String rootDir = null;

        rootDir = cmd.getOptionValue(CommonParams.ROOT_DIR_PARAM);

        if (null == rootDir)
            Usage("Specify: " + CommonParams.ROOT_DIR_DESC, options);

        String outputDirName = cmd.getOptionValue(CommonParams.OUT_INDEX_PARAM);

        if (null == outputDirName)
            Usage("Specify: " + CommonParams.OUT_MINDEX_DESC, options);

        String subDirTypeList = cmd.getOptionValue(CommonParams.SUB_DIR_TYPE_PARAM);

        if (null == subDirTypeList || subDirTypeList.isEmpty())
            Usage("Specify: " + CommonParams.SUB_DIR_TYPE_DESC, options);

        String solrFileName = cmd.getOptionValue(CommonParams.SOLR_FILE_NAME_PARAM);
        if (null == solrFileName)
            Usage("Specify: " + CommonParams.SOLR_FILE_NAME_DESC, options);

        int maxNumRec = Integer.MAX_VALUE;

        String tmp = cmd.getOptionValue(CommonParams.MAX_NUM_REC_PARAM);

        if (tmp != null) {
            try {
                maxNumRec = Integer.parseInt(tmp);
                if (maxNumRec <= 0) {
                    Usage("The maximum number of records should be a positive integer", options);
                }
            } catch (NumberFormatException e) {
                Usage("The maximum number of records should be a positive integer", options);
            }
        }

        File outputDir = new File(outputDirName);
        if (!outputDir.exists()) {
            if (!outputDir.mkdirs()) {
                System.out.println("couldn't create " + outputDir.getAbsolutePath());
                System.exit(1);
            }
        }
        if (!outputDir.isDirectory()) {
            System.out.println(outputDir.getAbsolutePath() + " is not a directory!");
            System.exit(1);
        }
        if (!outputDir.canWrite()) {
            System.out.println("Can't write to " + outputDir.getAbsolutePath());
            System.exit(1);
        }

        String subDirs[] = subDirTypeList.split(",");

        int docNum = 0;

        // No English analyzer here, all language-related processing is done already,
        // here we simply white-space tokenize and index tokens verbatim.
        Analyzer analyzer = new WhitespaceAnalyzer();
        FSDirectory indexDir = FSDirectory.open(outputDir);
        IndexWriterConfig indexConf = new IndexWriterConfig(analyzer.getVersion(), analyzer);

        System.out.println("Creating a new Lucene index, maximum # of docs to process: " + maxNumRec);
        indexConf.setOpenMode(OpenMode.CREATE);
        IndexWriter indexWriter = new IndexWriter(indexDir, indexConf);

        for (int subDirId = 0; subDirId < subDirs.length && docNum < maxNumRec; ++subDirId) {
            String inputFileName = rootDir + "/" + subDirs[subDirId] + "/" + solrFileName;

            System.out.println("Input file name: " + inputFileName);

            BufferedReader inpText = new BufferedReader(
                    new InputStreamReader(CompressUtils.createInputStream(inputFileName)));
            String docText = XmlHelper.readNextXMLIndexEntry(inpText);

            for (; docText != null && docNum < maxNumRec; docText = XmlHelper.readNextXMLIndexEntry(inpText)) {
                ++docNum;
                Map<String, String> docFields = null;

                Document luceneDoc = new Document();

                try {
                    docFields = XmlHelper.parseXMLIndexEntry(docText);
                } catch (Exception e) {
                    System.err.println(String.format("Parsing error, offending DOC #%d:\n%s", docNum, docText));
                    System.exit(1);
                }

                String id = docFields.get(UtilConst.TAG_DOCNO);

                if (id == null) {
                    System.err.println(String.format("No ID tag '%s', offending DOC #%d:\n%s",
                            UtilConst.TAG_DOCNO, docNum, docText));
                }

                luceneDoc.add(new StringField(UtilConst.TAG_DOCNO, id, Field.Store.YES));

                for (Map.Entry<String, String> e : docFields.entrySet())
                    if (!e.getKey().equals(UtilConst.TAG_DOCNO)) {
                        luceneDoc.add(new TextField(e.getKey(), e.getValue(), Field.Store.YES));
                    }
                indexWriter.addDocument(luceneDoc);
                if (docNum % 1000 == 0)
                    System.out.println("Indexed " + docNum + " docs");
            }
            System.out.println("Indexed " + docNum + " docs");
        }

        indexWriter.commit();
        indexWriter.close();

    } catch (ParseException e) {
        Usage("Cannot parse arguments", options);
    } catch (Exception e) {
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }

}

From source file:com.medicaid.mmis.util.DataLoader.java

/**
 * The main function, imports the files given as arguments.
 * //from   www . ja va2 s  . c o m
 * @param args the file names
 * @throws IOException for read/write errors
 * @throws PortalServiceException for any other errors
 */
public static void main(String[] args) throws IOException, PortalServiceException {
    if (args.length != 2) {
        System.out.println("2 file path arguments are required.");
        return;
    }

    PropertyConfigurator.configure("log4j.properties");
    logger = Logger.getLogger(DataLoader.class);

    LookupServiceBean lookupBean = new LookupServiceBean();
    EntityManagerFactory emf = Persistence.createEntityManagerFactory("cms-data-load");
    EntityManager em = emf.createEntityManager();
    lookupBean.setEm(em);
    DataLoader loader = new DataLoader();
    loader.setLookup(lookupBean);

    SequenceGeneratorBean sequence = new SequenceGeneratorBean();
    sequence.setEm(em);

    ProviderEnrollmentServiceBean enrollmentBean = new ProviderEnrollmentServiceBean();
    enrollmentBean.setEm(em);
    enrollmentBean.setSequence(sequence);
    enrollmentBean.setLookupService(lookupBean);

    loader.setEnrollmentService(enrollmentBean);

    long processId = sequence.getNextValue("PROCESS_ID");
    System.out.println("Started process id " + processId);

    BufferedReader br = null;
    PrintWriter accepted = null;
    PrintWriter rejected = null;
    try {
        System.out.println("Processing file 1...");
        File success = new File("accepted_1_" + processId + ".txt");
        File failure = new File("rejected_1_" + processId + ".txt");
        success.createNewFile();
        failure.createNewFile();
        accepted = new PrintWriter(success);
        rejected = new PrintWriter(failure);
        br = new BufferedReader(new FileReader(args[0]));
        String line = null;
        int total = 0;
        int errors = 0;
        while ((line = br.readLine()) != null) {
            total++;
            try {
                em.getTransaction().begin();
                loader.readProviderFile(new ByteArrayInputStream(line.getBytes()));
                em.getTransaction().commit();
                accepted.println(line);
                logger.info("Commit row " + total);
            } catch (PortalServiceException e) {
                rejected.println(line);
                em.getTransaction().rollback();
                errors++;
                logger.error("Rollback row " + total + " :" + e.getMessage());
            }
        }

        accepted.flush();
        accepted.close();
        rejected.flush();
        rejected.close();
        br.close();
        System.out.println("Total records read: " + total);
        System.out.println("Total rejected: " + errors);

        System.out.println("Processing file 2...");
        success = new File("accepted_2_" + processId + ".txt");
        failure = new File("rejected_2_" + processId + ".txt");
        success.createNewFile();
        failure.createNewFile();
        accepted = new PrintWriter(success);
        rejected = new PrintWriter(failure);
        br = new BufferedReader(new FileReader(args[1]));
        line = null;
        total = 0;
        errors = 0;
        while ((line = br.readLine()) != null) {
            total++;
            try {
                em.getTransaction().begin();
                Map<String, OwnershipInformation> owners = loader
                        .readWS000EXT2OWNBEN(new ByteArrayInputStream(line.getBytes()));
                for (Map.Entry<String, OwnershipInformation> entry : owners.entrySet()) {
                    enrollmentBean.addBeneficialOwners(entry.getKey(), entry.getValue());
                }
                em.getTransaction().commit();
                accepted.println(line);
                logger.info("Commit row " + total);
            } catch (PortalServiceException e) {
                rejected.println(line);
                em.getTransaction().rollback();
                errors++;
                logger.error("Rollback row " + total + " :" + e.getMessage());
            }
        }
        accepted.flush();
        rejected.flush();
        System.out.println("Total records read: " + total);
        System.out.println("Total rejected: " + errors);

    } finally {
        if (br != null) {
            br.close();
        }
        if (accepted != null) {
            accepted.close();
        }
        if (rejected != null) {
            rejected.close();
        }
    }
}

From source file:com.msopentech.odatajclient.engine.performance.PerfTestReporter.java

public static void main(final String[] args) throws Exception {
    // 1. check input directory
    final File reportdir = new File(args[0] + File.separator + "target" + File.separator + "surefire-reports");
    if (!reportdir.isDirectory()) {
        throw new IllegalArgumentException("Expected directory, got " + args[0]);
    }/*w  w w.  j av a 2 s.  co  m*/

    // 2. read test data from surefire output
    final File[] xmlReports = reportdir.listFiles(new FilenameFilter() {

        @Override
        public boolean accept(final File dir, final String name) {
            return name.endsWith("-output.txt");
        }
    });

    final Map<String, Map<String, Double>> testData = new TreeMap<String, Map<String, Double>>();

    for (File xmlReport : xmlReports) {
        final BufferedReader reportReader = new BufferedReader(new FileReader(xmlReport));
        try {
            while (reportReader.ready()) {
                String line = reportReader.readLine();
                final String[] parts = line.substring(0, line.indexOf(':')).split("\\.");

                final String testClass = parts[0];
                if (!testData.containsKey(testClass)) {
                    testData.put(testClass, new TreeMap<String, Double>());
                }

                line = reportReader.readLine();

                testData.get(testClass).put(parts[1],
                        Double.valueOf(line.substring(line.indexOf(':') + 2, line.indexOf('['))));
            }
        } finally {
            IOUtils.closeQuietly(reportReader);
        }
    }

    // 3. build XSLX output (from template)
    final HSSFWorkbook workbook = new HSSFWorkbook(new FileInputStream(args[0] + File.separator + "src"
            + File.separator + "test" + File.separator + "resources" + File.separator + XLS));

    for (Map.Entry<String, Map<String, Double>> entry : testData.entrySet()) {
        final Sheet sheet = workbook.getSheet(entry.getKey());

        int rows = 0;

        for (Map.Entry<String, Double> subentry : entry.getValue().entrySet()) {
            final Row row = sheet.createRow(rows++);

            Cell cell = row.createCell(0);
            cell.setCellValue(subentry.getKey());

            cell = row.createCell(1);
            cell.setCellValue(subentry.getValue());
        }
    }

    final FileOutputStream out = new FileOutputStream(
            args[0] + File.separator + "target" + File.separator + XLS);
    try {
        workbook.write(out);
    } finally {
        IOUtils.closeQuietly(out);
    }
}

From source file:org.kuali.student.git.importer.ConvertBuildTagBranchesToGitTags.java

/**
 * @param args/*  w ww . jav  a 2  s .  c  o m*/
 */
public static void main(String[] args) {

    if (args.length < 3 || args.length > 6) {
        System.err.println("USAGE: <git repository> <bare> <ref mode> [<ref prefix> <username> <password>]");
        System.err.println("\t<bare> : 0 (false) or 1 (true)");
        System.err.println("\t<ref mode> : local or name of remote");
        System.err.println("\t<ref prefix> : refs/heads (default) or say refs/remotes/origin (test clone)");
        System.exit(-1);
    }

    boolean bare = false;

    if (args[1].trim().equals("1")) {
        bare = true;
    }

    String remoteName = args[2].trim();

    String refPrefix = Constants.R_HEADS;

    if (args.length == 4)
        refPrefix = args[3].trim();

    String userName = null;
    String password = null;

    if (args.length == 5)
        userName = args[4].trim();

    if (args.length == 6)
        password = args[5].trim();

    try {

        Repository repo = GitRepositoryUtils.buildFileRepository(new File(args[0]).getAbsoluteFile(), false,
                bare);

        Git git = new Git(repo);

        ObjectInserter objectInserter = repo.newObjectInserter();

        Collection<Ref> repositoryHeads = repo.getRefDatabase().getRefs(refPrefix).values();

        RevWalk rw = new RevWalk(repo);

        Map<String, ObjectId> tagNameToTagId = new HashMap<>();

        Map<String, Ref> tagNameToRef = new HashMap<>();

        for (Ref ref : repositoryHeads) {

            String branchName = ref.getName().substring(refPrefix.length() + 1);

            if (branchName.contains("tag") && branchName.contains("builds")) {

                String branchParts[] = branchName.split("_");

                int buildsIndex = ArrayUtils.indexOf(branchParts, "builds");

                String moduleName = StringUtils.join(branchParts, "_", buildsIndex + 1, branchParts.length);

                RevCommit commit = rw.parseCommit(ref.getObjectId());

                ObjectId tag = GitRefUtils.insertTag(moduleName, commit, objectInserter);

                tagNameToTagId.put(moduleName, tag);

                tagNameToRef.put(moduleName, ref);

            }

        }

        BatchRefUpdate batch = repo.getRefDatabase().newBatchUpdate();

        List<RefSpec> branchesToDelete = new ArrayList<>();

        for (Entry<String, ObjectId> entry : tagNameToTagId.entrySet()) {

            String tagName = entry.getKey();

            // create the reference to the tag object
            batch.addCommand(
                    new ReceiveCommand(null, entry.getValue(), Constants.R_TAGS + tagName, Type.CREATE));

            // delete the original branch object

            Ref branch = tagNameToRef.get(entry.getKey());

            if (remoteName.equals("local")) {

                batch.addCommand(new ReceiveCommand(branch.getObjectId(), null, branch.getName(), Type.DELETE));

            } else {
                String adjustedBranchName = branch.getName().substring(refPrefix.length() + 1);

                branchesToDelete.add(new RefSpec(":" + Constants.R_HEADS + adjustedBranchName));
            }

        }

        // create the tags
        batch.execute(rw, new TextProgressMonitor());

        if (!remoteName.equals("local")) {
            // push the tag to the remote right now
            PushCommand pushCommand = git.push().setRemote(remoteName).setPushTags()
                    .setProgressMonitor(new TextProgressMonitor());

            if (userName != null)
                pushCommand.setCredentialsProvider(new UsernamePasswordCredentialsProvider(userName, password));

            Iterable<PushResult> results = pushCommand.call();

            for (PushResult pushResult : results) {

                if (!pushResult.equals(Result.NEW)) {
                    log.warn("failed to push tag " + pushResult.getMessages());
                }
            }

            // delete the branches from the remote
            results = git.push().setRemote(remoteName).setRefSpecs(branchesToDelete)
                    .setProgressMonitor(new TextProgressMonitor()).call();

            log.info("");

        }

        //         Result result = GitRefUtils.createTagReference(repo, moduleName, tag);
        //         
        //         if (!result.equals(Result.NEW)) {
        //            log.warn("failed to create tag {} for branch {}", moduleName, branchName);
        //            continue;
        //         }
        //         
        //         if (deleteMode) {
        //         result = GitRefUtils.deleteRef(repo, ref);
        //   
        //         if (!result.equals(Result.NEW)) {
        //            log.warn("failed to delete branch {}", branchName);
        //            continue;
        //         }

        objectInserter.release();

        rw.release();

    } catch (Exception e) {

        log.error("unexpected Exception ", e);
    }
}

From source file:com.example.geomesa.kafka.KafkaListener.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the consumer KafkaDataStore object
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore object properly
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }//from   ww  w .j a v a 2  s  .  com

    Map<String, FeatureListener> listeners = new HashMap<>();

    try {
        for (String typeName : consumerDS.getTypeNames()) {
            System.out.println("Registering a feature listener for type " + typeName + ".");
            FeatureListener listener = new FeatureListener() {
                @Override
                public void changed(FeatureEvent featureEvent) {
                    System.out.println("Received FeatureEvent from layer " + typeName + " of Type: "
                            + featureEvent.getType());
                    if (featureEvent.getType() == FeatureEvent.Type.CHANGED
                            && featureEvent instanceof KafkaFeatureChanged) {
                        printFeature(((KafkaFeatureChanged) featureEvent).feature());
                    } else if (featureEvent.getType() == FeatureEvent.Type.REMOVED) {
                        System.out.println("Received Delete for filter: " + featureEvent.getFilter());
                    }
                }
            };
            consumerDS.getFeatureSource(typeName).addFeatureListener(listener);
            listeners.put(typeName, listener);
        }

        while (true) {
            // Wait for user to terminate with ctrl-C.
        }
    } finally {
        for (Entry<String, FeatureListener> entry : listeners.entrySet()) {
            consumerDS.getFeatureSource(entry.getKey()).removeFeatureListener(entry.getValue());
        }
        consumerDS.dispose();
    }
}

From source file:MyMap.java

public static void main(String[] argv) {

    // Construct and load the hash. This simulates loading a
    // database or reading from a file, or wherever the data is.

    Map map = new MyMap();

    // The hash maps from company name to address.
    // In real life this might map to an Address object...
    map.put("Adobe", "Mountain View, CA");
    map.put("Learning Tree", "Los Angeles, CA");
    map.put("IBM", "White Plains, NY");
    map.put("Netscape", "Mountain View, CA");
    map.put("Microsoft", "Redmond, WA");
    map.put("Sun", "Mountain View, CA");
    map.put("O'Reilly", "Sebastopol, CA");

    // Two versions of the "retrieval" phase.
    // Version 1: get one pair's value given its key
    // (presumably the key would really come from user input):
    String queryString = "O'Reilly";
    System.out.println("You asked about " + queryString + ".");
    String resultString = (String) map.get(queryString);
    System.out.println("They are located in: " + resultString);
    System.out.println();//from   w  w w  .  j  av  a2s.c  om

    // Version 2: get ALL the keys and pairs 
    // (maybe to print a report, or to save to disk)
    Iterator k = map.keySet().iterator();
    while (k.hasNext()) {
        String key = (String) k.next();
        System.out.println("Key " + key + "; Value " + (String) map.get(key));
    }

    // Step 3 - try out the entrySet() method.
    Set es = map.entrySet();
    System.out.println("entrySet() returns " + es.size() + " Map.Entry's");
}

From source file:io.minimum.minecraft.rbclean.RedisBungeeClean.java

public static void main(String... args) {
    Options options = new Options();

    Option hostOption = new Option("h", "host", true, "Sets the Redis host to use.");
    hostOption.setRequired(true);//from w ww  .j a va2s  .c o  m
    options.addOption(hostOption);

    Option portOption = new Option("p", "port", true, "Sets the Redis port to use.");
    options.addOption(portOption);

    Option passwordOption = new Option("w", "password", true, "Sets the Redis password to use.");
    options.addOption(passwordOption);

    Option dryRunOption = new Option("d", "dry-run", false, "Performs a dry run (no data is modified).");
    options.addOption(dryRunOption);

    CommandLine commandLine;

    try {
        commandLine = new DefaultParser().parse(options, args);
    } catch (ParseException e) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("RedisBungeeClean", options);
        return;
    }

    int port = commandLine.hasOption('p') ? Integer.parseInt(commandLine.getOptionValue('p')) : 6379;

    try (Jedis jedis = new Jedis(commandLine.getOptionValue('h'), port, 0)) {
        if (commandLine.hasOption('w')) {
            jedis.auth(commandLine.getOptionValue('w'));
        }

        System.out.println("Fetching UUID cache...");
        Map<String, String> uuidCache = jedis.hgetAll("uuid-cache");
        Gson gson = new Gson();

        // Just in case we need it, compress everything in JSON format.
        if (!commandLine.hasOption('d')) {
            SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-hh-mm-ss");
            File file = new File("uuid-cache-previous-" + dateFormat.format(new Date()) + ".json.gz");
            try {
                file.createNewFile();
            } catch (IOException e) {
                System.out.println("Can't write backup of the UUID cache, will NOT proceed.");
                e.printStackTrace();
                return;
            }

            System.out.println("Creating backup (as " + file.getName() + ")...");

            try (OutputStreamWriter bw = new OutputStreamWriter(
                    new GZIPOutputStream(new FileOutputStream(file)))) {
                gson.toJson(uuidCache, bw);
            } catch (IOException e) {
                System.out.println("Can't write backup of the UUID cache, will NOT proceed.");
                e.printStackTrace();
                return;
            }
        }

        System.out.println("Cleaning out the bird cage (this may take a while...)");
        int originalSize = uuidCache.size();
        for (Iterator<Map.Entry<String, String>> it = uuidCache.entrySet().iterator(); it.hasNext();) {
            CachedUUIDEntry entry = gson.fromJson(it.next().getValue(), CachedUUIDEntry.class);

            if (entry.expired()) {
                it.remove();
            }
        }
        int newSize = uuidCache.size();

        if (commandLine.hasOption('d')) {
            System.out.println(
                    (originalSize - newSize) + " records would be expunged if a dry run was not conducted.");
        } else {
            System.out.println("Expunging " + (originalSize - newSize) + " records...");
            Transaction transaction = jedis.multi();
            transaction.del("uuid-cache");
            transaction.hmset("uuid-cache", uuidCache);
            transaction.exec();
            System.out.println("Expunging complete.");
        }
    }
}

From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step6HITPreparator.java

public static void main(String[] args) throws Exception {
    // input dir - list of xml query containers
    // step5-linguistic-annotation/
    System.err.println("Starting step 6 HIT Preparation");

    File inputDir = new File(args[0]);

    // output dir
    File outputDir = new File(args[1]);
    if (outputDir.exists()) {
        outputDir.delete();/*  www.j a v  a  2  s  .  c o  m*/
    }
    outputDir.mkdir();

    List<String> queries = new ArrayList<>();

    // iterate over query containers
    int countClueWeb = 0;
    int countSentence = 0;
    for (File f : FileUtils.listFiles(inputDir, new String[] { "xml" }, false)) {
        QueryResultContainer queryResultContainer = QueryResultContainer
                .fromXML(FileUtils.readFileToString(f, "utf-8"));
        if (queries.contains(f.getName()) || queries.size() == 0) {
            // groups contain only non-empty documents
            Map<Integer, List<QueryResultContainer.SingleRankedResult>> groups = new HashMap<>();

            // split to groups according to number of sentences
            for (QueryResultContainer.SingleRankedResult rankedResult : queryResultContainer.rankedResults) {
                if (rankedResult.originalXmi != null) {
                    byte[] bytes = new BASE64Decoder()
                            .decodeBuffer(new ByteArrayInputStream(rankedResult.originalXmi.getBytes()));
                    JCas jCas = JCasFactory.createJCas();
                    XmiCasDeserializer.deserialize(new ByteArrayInputStream(bytes), jCas.getCas());

                    Collection<Sentence> sentences = JCasUtil.select(jCas, Sentence.class);

                    int groupId = sentences.size() / 40;
                    if (rankedResult.originalXmi == null) {
                        System.err.println("Empty document: " + rankedResult.clueWebID);
                    } else {
                        if (!groups.containsKey(groupId)) {
                            groups.put(groupId, new ArrayList<>());

                        }
                    }
                    //handle it
                    groups.get(groupId).add(rankedResult);
                    countClueWeb++;
                }
            }

            for (Map.Entry<Integer, List<QueryResultContainer.SingleRankedResult>> entry : groups.entrySet()) {
                Integer groupId = entry.getKey();
                List<QueryResultContainer.SingleRankedResult> rankedResults = entry.getValue();

                // make sure the results are sorted
                // DEBUG
                //                for (QueryResultContainer.SingleRankedResult r : rankedResults) {
                //                    System.out.print(r.rank + "\t");
                //                }

                Collections.sort(rankedResults, (o1, o2) -> o1.rank.compareTo(o2.rank));

                // iterate over results for one query and group
                for (int i = 0; i < rankedResults.size() && i < TOP_RESULTS_PER_GROUP; i++) {
                    QueryResultContainer.SingleRankedResult rankedResult = rankedResults.get(i);

                    QueryResultContainer.SingleRankedResult r = rankedResults.get(i);
                    int rank = r.rank;
                    MustacheFactory mf = new DefaultMustacheFactory();
                    Mustache mustache = mf.compile("template/template.html");
                    String queryId = queryResultContainer.qID;
                    String query = queryResultContainer.query;
                    // make the first letter uppercase
                    query = query.substring(0, 1).toUpperCase() + query.substring(1);

                    List<String> relevantInformationExamples = queryResultContainer.relevantInformationExamples;
                    List<String> irrelevantInformationExamples = queryResultContainer.irrelevantInformationExamples;
                    byte[] bytes = new BASE64Decoder()
                            .decodeBuffer(new ByteArrayInputStream(rankedResult.originalXmi.getBytes()));

                    JCas jCas = JCasFactory.createJCas();
                    XmiCasDeserializer.deserialize(new ByteArrayInputStream(bytes), jCas.getCas());

                    List<generators.Sentence> sentences = new ArrayList<>();
                    List<Integer> paragraphs = new ArrayList<>();
                    paragraphs.add(0);

                    for (WebParagraph webParagraph : JCasUtil.select(jCas, WebParagraph.class)) {
                        for (Sentence s : JCasUtil.selectCovered(Sentence.class, webParagraph)) {

                            String sentenceBegin = String.valueOf(s.getBegin());
                            generators.Sentence sentence = new generators.Sentence(s.getCoveredText(),
                                    sentenceBegin);
                            sentences.add(sentence);
                            countSentence++;
                        }
                        int SentenceID = paragraphs.get(paragraphs.size() - 1);
                        if (sentences.size() > 120)
                            while (SentenceID < sentences.size()) {
                                if (!paragraphs.contains(SentenceID))
                                    paragraphs.add(SentenceID);
                                SentenceID = SentenceID + 120;
                            }
                        paragraphs.add(sentences.size());

                    }
                    System.err.println("Output dir: " + outputDir);
                    int startID = 0;
                    int endID;

                    for (int j = 0; j < paragraphs.size(); j++) {

                        endID = paragraphs.get(j);
                        int sentLength = endID - startID;
                        if (sentLength > 120 || j == paragraphs.size() - 1) {
                            if (sentLength > 120) {

                                endID = paragraphs.get(j - 1);
                                j--;
                            }
                            sentLength = endID - startID;
                            if (sentLength <= 40)
                                groupId = 40;
                            else if (sentLength <= 80 && sentLength > 40)
                                groupId = 80;
                            else if (sentLength > 80)
                                groupId = 120;

                            File folder = new File(outputDir + "/" + groupId);
                            if (!folder.exists()) {
                                System.err.println("creating directory: " + outputDir + "/" + groupId);
                                boolean result = false;

                                try {
                                    folder.mkdir();
                                    result = true;
                                } catch (SecurityException se) {
                                    //handle it
                                }
                                if (result) {
                                    System.out.println("DIR created");
                                }
                            }

                            String newHtmlFile = folder.getAbsolutePath() + "/" + f.getName() + "_"
                                    + rankedResult.clueWebID + "_" + sentLength + ".html";
                            System.err.println("Printing a file: " + newHtmlFile);
                            File newHTML = new File(newHtmlFile);
                            int t = 0;
                            while (newHTML.exists()) {
                                newHTML = new File(folder.getAbsolutePath() + "/" + f.getName() + "_"
                                        + rankedResult.clueWebID + "_" + sentLength + "." + t + ".html");
                                t++;
                            }
                            mustache.execute(new PrintWriter(new FileWriter(newHTML)),
                                    new generators(query, relevantInformationExamples,
                                            irrelevantInformationExamples, sentences.subList(startID, endID),
                                            queryId, rank))
                                    .flush();
                            startID = endID;
                        }
                    }
                }
            }

        }
    }
    System.out.println("Printed " + countClueWeb + " documents with " + countSentence + " sentences");
}