Example usage for java.util Map remove

List of usage examples for java.util Map remove

Introduction

In this page you can find the example usage for java.util Map remove.

Prototype

V remove(Object key);

Source Link

Document

Removes the mapping for a key from this map if it is present (optional operation).

Usage

From source file:HashMapExample.java

public static void main(String[] args) {
    Map<Integer, String> map = new HashMap<Integer, String>();

    map.put(new Integer(1), "One");
    map.put(new Integer(2), "Two");
    map.put(new Integer(3), "Three");
    map.put(new Integer(4), "Four");
    map.put(new Integer(5), "Five");

    System.out.println("Map Values Before: ");
    Set keys = map.keySet();/*from w w  w.  j  a  va2s  . co  m*/
    for (Iterator i = keys.iterator(); i.hasNext();) {
        Integer key = (Integer) i.next();
        String value = (String) map.get(key);
        System.out.println(key + " = " + value);
    }

    System.out.println("\nRemove element with key 6");
    map.remove(new Integer(6));

    System.out.println("\nMap Values After: ");
    keys = map.keySet();
    for (Iterator i = keys.iterator(); i.hasNext();) {
        Integer key = (Integer) i.next();
        String value = (String) map.get(key);
        System.out.println(key + " = " + value);
    }
}

From source file:com.joliciel.talismane.fr.TalismaneFrench.java

/**
 * @param args// w w w  .  j  a  v a2s.com
 * @throws Exception 
 */
public static void main(String[] args) throws Exception {
    Map<String, String> argsMap = TalismaneConfig.convertArgs(args);
    CorpusFormat corpusReaderType = null;
    String treebankDirPath = null;
    boolean keepCompoundPosTags = true;

    if (argsMap.containsKey("corpusReader")) {
        corpusReaderType = CorpusFormat.valueOf(argsMap.get("corpusReader"));
        argsMap.remove("corpusReader");
    }
    if (argsMap.containsKey("treebankDir")) {
        treebankDirPath = argsMap.get("treebankDir");
        argsMap.remove("treebankDir");
    }
    if (argsMap.containsKey("keepCompoundPosTags")) {
        keepCompoundPosTags = argsMap.get("keepCompoundPosTags").equalsIgnoreCase("true");
        argsMap.remove("keepCompoundPosTags");
    }

    Extensions extensions = new Extensions();
    extensions.pluckParameters(argsMap);

    TalismaneFrench talismaneFrench = new TalismaneFrench();

    TalismaneConfig config = new TalismaneConfig(argsMap, talismaneFrench);
    if (config.getCommand() == null)
        return;

    if (corpusReaderType != null) {
        if (corpusReaderType == CorpusFormat.ftbDep) {
            File inputFile = new File(config.getInFilePath());
            FtbDepReader ftbDepReader = new FtbDepReader(inputFile, config.getInputCharset());

            ftbDepReader.setKeepCompoundPosTags(keepCompoundPosTags);
            ftbDepReader.setPredictTransitions(config.isPredictTransitions());

            config.setParserCorpusReader(ftbDepReader);
            config.setPosTagCorpusReader(ftbDepReader);
            config.setTokenCorpusReader(ftbDepReader);

            if (config.getCommand().equals(Command.compare)) {
                File evaluationFile = new File(config.getEvaluationFilePath());
                FtbDepReader ftbDepEvaluationReader = new FtbDepReader(evaluationFile,
                        config.getInputCharset());
                ftbDepEvaluationReader.setKeepCompoundPosTags(keepCompoundPosTags);
                config.setParserEvaluationCorpusReader(ftbDepEvaluationReader);
                config.setPosTagEvaluationCorpusReader(ftbDepEvaluationReader);
            }
        } else if (corpusReaderType == CorpusFormat.ftb) {
            TalismaneServiceLocator talismaneServiceLocator = TalismaneServiceLocator.getInstance();
            TreebankServiceLocator treebankServiceLocator = TreebankServiceLocator
                    .getInstance(talismaneServiceLocator);
            TreebankUploadService treebankUploadService = treebankServiceLocator
                    .getTreebankUploadServiceLocator().getTreebankUploadService();
            TreebankExportService treebankExportService = treebankServiceLocator
                    .getTreebankExportServiceLocator().getTreebankExportService();
            File treebankFile = new File(treebankDirPath);
            TreebankReader treebankReader = treebankUploadService.getXmlReader(treebankFile);

            // we prepare both the tokeniser and pos-tag readers, just in case they are needed
            InputStream posTagMapStream = talismaneFrench.getDefaultPosTagMapFromStream();
            Scanner scanner = new Scanner(posTagMapStream, "UTF-8");
            List<String> descriptors = new ArrayList<String>();
            while (scanner.hasNextLine())
                descriptors.add(scanner.nextLine());
            FtbPosTagMapper ftbPosTagMapper = treebankExportService.getFtbPosTagMapper(descriptors,
                    talismaneFrench.getDefaultPosTagSet());
            PosTagAnnotatedCorpusReader posTagAnnotatedCorpusReader = treebankExportService
                    .getPosTagAnnotatedCorpusReader(treebankReader, ftbPosTagMapper, keepCompoundPosTags);
            config.setPosTagCorpusReader(posTagAnnotatedCorpusReader);

            TokeniserAnnotatedCorpusReader tokenCorpusReader = treebankExportService
                    .getTokeniserAnnotatedCorpusReader(treebankReader, ftbPosTagMapper, keepCompoundPosTags);
            config.setTokenCorpusReader(tokenCorpusReader);

            SentenceDetectorAnnotatedCorpusReader sentenceCorpusReader = treebankExportService
                    .getSentenceDetectorAnnotatedCorpusReader(treebankReader);
            config.setSentenceCorpusReader(sentenceCorpusReader);
        } else if (corpusReaderType == CorpusFormat.conll || corpusReaderType == CorpusFormat.spmrl) {
            File inputFile = new File(config.getInFilePath());

            ParserRegexBasedCorpusReader corpusReader = config.getParserService()
                    .getRegexBasedCorpusReader(inputFile, config.getInputCharset());

            corpusReader.setPredictTransitions(config.isPredictTransitions());

            config.setParserCorpusReader(corpusReader);
            config.setPosTagCorpusReader(corpusReader);
            config.setTokenCorpusReader(corpusReader);

            if (corpusReaderType == CorpusFormat.spmrl) {
                corpusReader.setRegex(
                        "%INDEX%\\t%TOKEN%\\t.*\\t.*\\t%POSTAG%\\t.*\\t.*\\t.*\\t%GOVERNOR%\\t%LABEL%");
            }

            if (config.getCommand().equals(Command.compare)) {
                File evaluationFile = new File(config.getEvaluationFilePath());
                ParserRegexBasedCorpusReader evaluationReader = config.getParserService()
                        .getRegexBasedCorpusReader(evaluationFile, config.getInputCharset());
                config.setParserEvaluationCorpusReader(evaluationReader);
                config.setPosTagEvaluationCorpusReader(evaluationReader);
            }
        } else {
            throw new TalismaneException("Unknown corpusReader: " + corpusReaderType);
        }
    }
    Talismane talismane = config.getTalismane();

    extensions.prepareCommand(config, talismane);

    talismane.process();
}

From source file:eu.trentorise.smartcampus.mobility.util.GamificationHelper.java

public static void main(String[] args)
        throws UnknownHostException, MongoException, SecurityException, RemoteException {
    MongoTemplate mg = new MongoTemplate(new Mongo("127.0.0.1", 37017), "mobility-logging");
    List<Map> findAll = mg.findAll(Map.class, "forgamification");
    for (Map m : findAll) {
        m.remove("_id");
        RemoteConnector.postJSON("http://localhost:8900", "/execute", JsonUtils.toJSON(m), null);
    }/*from   w w  w. ja  va2 s . c om*/
}

From source file:TreeMapExample.java

public static void main(String[] args) {
    Map<Integer, String> map = new TreeMap<Integer, String>();

    // Add Items to the TreeMap
    map.put(new Integer(1), "One");
    map.put(new Integer(2), "Two");
    map.put(new Integer(3), "Three");
    map.put(new Integer(4), "Four");
    map.put(new Integer(5), "Five");
    map.put(new Integer(6), "Six");
    map.put(new Integer(7), "Seven");
    map.put(new Integer(8), "Eight");
    map.put(new Integer(9), "Nine");
    map.put(new Integer(10), "Ten");

    // Use iterator to display the keys and associated values
    System.out.println("Map Values Before: ");
    Set keys = map.keySet();//from w  ww .j  a  v  a2 s .c o  m
    for (Iterator i = keys.iterator(); i.hasNext();) {
        Integer key = (Integer) i.next();
        String value = (String) map.get(key);
        System.out.println(key + " = " + value);
    }

    // Remove the entry with key 6
    System.out.println("\nRemove element with key 6");
    map.remove(new Integer(6));

    // Use iterator to display the keys and associated values
    System.out.println("\nMap Values After: ");
    keys = map.keySet();
    for (Iterator i = keys.iterator(); i.hasNext();) {
        Integer key = (Integer) i.next();
        String value = (String) map.get(key);
        System.out.println(key + " = " + value);
    }
}

From source file:Main.java

public static void main(String args[]) {
    Map<String, Integer> atomNums = new TreeMap<String, Integer>();

    atomNums.put("A", 1);
    atomNums.put("B", 2);
    atomNums.put("C", 3);
    atomNums.put("D", 4);
    atomNums.put("E", 5);
    atomNums.put("F", 6);

    System.out.println("The map contains these " + atomNums.size() + " entries:");

    Set<Map.Entry<String, Integer>> set = atomNums.entrySet();

    for (Map.Entry<String, Integer> me : set) {
        System.out.print(me.getKey() + ", Atomic Number: ");
        System.out.println(me.getValue());
    }/*w w  w  .j a  v a2s.  co m*/
    TreeMap<String, Integer> atomNums2 = new TreeMap<String, Integer>();

    atomNums2.put("Q", 30);
    atomNums2.put("W", 82);

    atomNums.putAll(atomNums2);

    set = atomNums.entrySet();

    System.out.println("The map now contains these " + atomNums.size() + " entries:");
    for (Map.Entry<String, Integer> me : set) {
        System.out.print(me.getKey() + ", Atomic Number: ");
        System.out.println(me.getValue());
    }

    if (atomNums.containsKey("A"))
        System.out.println("A has an atomic number of " + atomNums.get("A"));

    if (atomNums.containsValue(82))
        System.out.println("The atomic number 82 is in the map.");
    System.out.println();

    if (atomNums.remove("A") != null)
        System.out.println("A has been removed.\n");
    else
        System.out.println("Entry not found.\n");

    Set<String> keys = atomNums.keySet();
    for (String str : keys)
        System.out.println(str + " ");

    Collection<Integer> vals = atomNums.values();
    for (Integer n : vals)
        System.out.println(n + " ");

    atomNums.clear();
    if (atomNums.isEmpty())
        System.out.println("The map is now empty.");
}

From source file:de.unileipzig.ub.indexer.App.java

public static void main(String[] args) throws IOException {

    // create Options object
    Options options = new Options();

    options.addOption("h", "help", false, "display this help");

    options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed");
    options.addOption("i", "index", true, "the name of the target index");
    options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)");

    options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)");
    options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)");
    options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)");

    options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)");
    options.addOption("v", "verbose", false, "show processing speed while indexing");
    options.addOption("s", "status", false, "only show status of index for file");

    options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go");
    options.addOption("e", "debug", false, "set logging level to debug");
    options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout");

    options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)");
    options.addOption("z", "latest-flag-on", true,
            "enable latest flag according to field (within content, e.g. 001)");
    options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;//from   w w  w .jav  a 2  s .c om

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException ex) {
        logger.error(ex);
        System.exit(1);
    }

    // setup logging
    Properties systemProperties = System.getProperties();
    systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger");
    System.setProperties(systemProperties);
    Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR);

    Properties props = new Properties();
    props.load(props.getClass().getResourceAsStream("/log4j.properties"));

    if (cmd.hasOption("debug")) {
        props.setProperty("log4j.logger.de.unileipzig", "DEBUG");
    }

    if (cmd.hasOption("logfile")) {
        props.setProperty("log4j.rootLogger", "INFO, stdout, F");
        props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender");
        props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile"));
        props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout");
        props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n");
    }

    PropertyConfigurator.configure(props);

    InetAddress addr = InetAddress.getLocalHost();
    String memcachedHostAndPort = addr.getHostAddress() + ":11211";
    if (cmd.hasOption("m")) {
        memcachedHostAndPort = cmd.getOptionValue("m");
    }

    // setup caching
    try {
        if (memcachedClient == null) {
            memcachedClient = new MemcachedClient(
                    new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(),
                    AddrUtil.getAddresses("0.0.0.0:11211"));
            try {
                // give client and server 500ms
                Thread.sleep(300);
            } catch (InterruptedException ex) {
            }

            Collection availableServers = memcachedClient.getAvailableServers();
            logger.info(availableServers);
            if (availableServers.size() == 0) {
                logger.info("no memcached servers found");
                memcachedClient.shutdown();
                memcachedClient = null;
            } else {
                logger.info(availableServers.size() + " memcached server(s) detected, fine.");
            }
        }
    } catch (IOException ex) {
        logger.warn("couldn't create a connection, bailing out: " + ex.getMessage());
    }

    // process options

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("indexer", options, true);
        quit(0);
    }

    boolean verbose = false;
    if (cmd.hasOption("verbose")) {
        verbose = true;
    }

    // ES options
    String[] hosts = new String[] { "0.0.0.0" };
    int port = 9300;
    String clusterName = "elasticsearch_mdma";
    int bulkSize = 3000;

    if (cmd.hasOption("host")) {
        hosts = cmd.getOptionValues("host");
    }
    if (cmd.hasOption("port")) {
        port = Integer.parseInt(cmd.getOptionValue("port"));
    }
    if (cmd.hasOption("cluster")) {
        clusterName = cmd.getOptionValue("cluster");
    }
    if (cmd.hasOption("bulksize")) {
        bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize"));
        if (bulkSize < 1 || bulkSize > 100000) {
            logger.error("bulksize must be between 1 and 100,000");
            quit(1);
        }
    }

    // ES Client
    final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma")
            .build();
    final TransportClient client = new TransportClient(settings);
    for (String host : hosts) {
        client.addTransportAddress(new InetSocketTransportAddress(host, port));
    }

    if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) {

        final String filename = cmd.getOptionValue("filename");

        final File _file = new File(filename);
        if (_file.length() == 0) {
            logger.info(_file.getAbsolutePath() + " is empty, skipping");
            quit(0); // file is empty
        }

        // for flat mode: leave a stampfile beside the json to 
        // indicate previous successful processing
        File directory = new File(filename).getParentFile();
        File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed");

        long start = System.currentTimeMillis();
        long lineCount = 0;

        final String indexName = cmd.getOptionValue("index");
        final String docType = cmd.getOptionValue("doctype");
        BulkRequestBuilder bulkRequest = client.prepareBulk();

        try {
            if (cmd.hasOption("flat")) {
                // flat mode
                // .........
                if (stampfile.exists()) {
                    logger.info("SKIPPING, since it seems this file has already " + "been imported (found: "
                            + stampfile.getAbsolutePath() + ")");
                    quit(0);
                }
            } else {

                final String srcSHA1 = extractSrcSHA1(filename);

                logger.debug(filename + " srcsha1: " + srcSHA1);

                long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1);
                logger.debug(filename + " indexed: " + docsInIndex);

                long docsInFile = getLineCount(filename);
                logger.debug(filename + " lines: " + docsInFile);

                // in non-flat-mode, indexing would take care
                // of inconsistencies
                if (docsInIndex == docsInFile) {
                    logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")");
                    client.close();
                    quit(0);
                }

                if (docsInIndex > 0) {
                    logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:"
                            + docsInFile);

                    if (!cmd.hasOption("r")) {
                        logger.warn(
                                "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE "
                                        + hosts[0] + ":9200/" + indexName
                                        + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1
                                        + "\" }}'");
                        client.close();
                        quit(1);
                    } else {
                        logger.info("Attempting to clear residues...");
                        // attempt to repair once
                        DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName)
                                .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet();

                        Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator();
                        long deletions = 0;
                        while (it.hasNext()) {
                            IndexDeleteByQueryResponse response = it.next();
                            deletions += 1;
                        }
                        logger.info("Deleted residues of " + filename);
                        logger.info("Refreshing [" + indexName + "]");
                        RefreshResponse refreshResponse = client.admin().indices()
                                .refresh(new RefreshRequest(indexName)).actionGet();

                        long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1);
                        logger.info(indexedAfterDelete + " docs remained");
                        if (indexedAfterDelete > 0) {
                            logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE "
                                    + hosts[0] + ":9200/" + indexName
                                    + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'");
                            quit(1);
                        } else {
                            logger.info("Residues are gone. Now trying to reindex: " + filename);
                        }
                    }
                }
            }

            logger.info("INDEXING-REQUIRED: " + filename);
            if (cmd.hasOption("status")) {
                quit(0);
            }

            HashSet idsInBatch = new HashSet();

            String idField = null;
            if (cmd.hasOption("z")) {
                idField = cmd.getOptionValue("z");
            }

            final FileReader fr = new FileReader(filename);
            final BufferedReader br = new BufferedReader(fr);

            String line;
            // one line is one document
            while ((line = br.readLine()) != null) {

                // "Latest-Flag" machine
                // This gets obsolete with a "flat" index
                if (cmd.hasOption("z")) {
                    // flag that indicates, whether the document
                    // about to be indexed will be the latest
                    boolean willBeLatest = true;

                    // check if there is a previous (lower meta.timestamp) document with 
                    // the same identifier (whatever that may be - queried under "content")
                    final String contentIdentifier = getContentIdentifier(line, idField);
                    idsInBatch.add(contentIdentifier);

                    // assumed in meta.timestamp
                    final Long timestamp = Long.parseLong(getTimestamp(line));

                    logger.debug("Checking whether record is latest (line: " + lineCount + ")");
                    logger.debug(contentIdentifier + ", " + timestamp);

                    // get all docs, which match the contentIdentifier
                    // by filter, which doesn't score
                    final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField,
                            contentIdentifier);
                    final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType);
                    final AndFilterBuilder afb = new AndFilterBuilder();
                    afb.add(idFilter).add(kindFilter);
                    final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb);

                    final SearchResponse searchResponse = client.prepareSearch(indexName)
                            .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0)
                            .setSize(1200) // 3 years and 105 days assuming daily updates at the most
                            .setExplain(false).execute().actionGet();

                    final SearchHits searchHits = searchResponse.getHits();

                    logger.debug("docs with this id in the index: " + searchHits.getTotalHits());

                    for (final SearchHit hit : searchHits.getHits()) {
                        final String docId = hit.id();
                        final Map<String, Object> source = hit.sourceAsMap();
                        final Map meta = (Map) source.get("meta");
                        final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString());
                        // if the indexed doc timestamp is lower the the current one, 
                        // remove any latest flag
                        if (timestamp >= docTimestamp) {
                            source.remove("latest");
                            final ObjectMapper mapper = new ObjectMapper();
                            // put the updated doc back
                            // IndexResponse response = 
                            client.prepareIndex(indexName, docType).setCreate(false).setId(docId)
                                    .setSource(mapper.writeValueAsBytes(source))
                                    .execute(new ActionListener<IndexResponse>() {
                                        public void onResponse(IndexResponse rspns) {
                                            logger.debug("Removed latest flag from " + contentIdentifier + ", "
                                                    + docTimestamp + ", " + hit.id() + " since (" + timestamp
                                                    + " > " + docTimestamp + ")");
                                        }

                                        public void onFailure(Throwable thrwbl) {
                                            logger.error("Could not remove flag from " + hit.id() + ", "
                                                    + contentIdentifier);
                                        }
                                    });
                            // .execute()
                            //.actionGet();
                        } else {
                            logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")");
                            willBeLatest = false;
                        }
                    }

                    if (willBeLatest) {
                        line = setLatestFlag(line);
                        logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp);
                    }

                    // end of latest-flag machine
                    // beware - this will be correct as long as there
                    // are no dups within one bulk!
                }

                bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line));
                lineCount++;
                logger.debug("Added line " + lineCount + " to BULK");
                logger.debug(line);

                if (lineCount % bulkSize == 0) {

                    if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) {
                        logger.error(
                                "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy");
                        logger.error(
                                "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)");
                    }
                    idsInBatch.clear();

                    logger.debug("Issuing BULK request");

                    final long actionCount = bulkRequest.numberOfActions();
                    final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                    final long tookInMillis = bulkResponse.getTookInMillis();

                    if (bulkResponse.hasFailures()) {
                        logger.fatal("FAILED, bulk not indexed. exiting now.");
                        Iterator<BulkItemResponse> it = bulkResponse.iterator();
                        while (it.hasNext()) {
                            BulkItemResponse bir = it.next();
                            if (bir.isFailed()) {
                                Failure failure = bir.getFailure();
                                logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                        + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                            }
                        }
                        quit(1);
                    } else {
                        if (verbose) {
                            final double elapsed = System.currentTimeMillis() - start;
                            final double speed = (lineCount / elapsed * 1000);
                            logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount
                                    + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                        }
                    }
                    bulkRequest = client.prepareBulk();
                }
            }

            // handle the remaining items
            final long actionCount = bulkRequest.numberOfActions();
            if (actionCount > 0) {
                final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                final long tookInMillis = bulkResponse.getTookInMillis();

                if (bulkResponse.hasFailures()) {
                    logger.fatal("FAILED, bulk not indexed. exiting now.");
                    Iterator<BulkItemResponse> it = bulkResponse.iterator();
                    while (it.hasNext()) {
                        BulkItemResponse bir = it.next();
                        if (bir.isFailed()) {
                            Failure failure = bir.getFailure();
                            logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                    + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                        }
                    }
                    quit(1);
                } else {

                    // trigger update now
                    RefreshResponse refreshResponse = client.admin().indices()
                            .refresh(new RefreshRequest(indexName)).actionGet();

                    if (verbose) {
                        final double elapsed = System.currentTimeMillis() - start;
                        final double speed = (lineCount / elapsed * 1000);
                        logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/"
                                + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                    }

                }

            }

            br.close();
            client.close();
            final double elapsed = (System.currentTimeMillis() - start) / 1000;
            final double speed = (lineCount / elapsed);
            logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: "
                    + String.format("%.2f", speed) + "r/s)");
            if (cmd.hasOption("flat")) {
                try {
                    FileUtils.touch(stampfile);
                } catch (IOException ioe) {
                    logger.warn(".indexed files not created. Will reindex everything everytime.");
                }
            }
        } catch (IOException e) {
            client.close();
            logger.error(e);
            quit(1);
        } finally {
            client.close();
        }
    }
    quit(0);
}

From source file:Main.java

public static void removeFontAttr(Component component, Object key) {
    Font original = component.getFont();
    Map attributes = original.getAttributes();
    attributes.remove(key);
    component.setFont(original.deriveFont(attributes));
}

From source file:Main.java

public static final Object remove(Object key) {
    Map<Object, Object> cache = LOCAL_CACHE.get();
    return cache == null ? null : cache.remove(key);
}

From source file:Main.java

public static void removeKeys(Collection<?> keys, Map<?, ?> map) {
    for (Object key : keys) {
        map.remove(key);
    }/*from w w w  .j a  va 2 s  .  c  o m*/
}

From source file:Main.java

/**
 * Puts a new <code>key</code> in a <code>Map</code> and increasing the
 * count of the frequency if <code>key</code> is already present. If
 * <code>key</code> is not present, the <code>Rational</code> value will
 * be initialized to 1./* w w w  .j av  a 2 s  .  c  om*/
 *
 * @param map the map to store <code>key</code> in.
 * @param key the key to store in the map and count the frequency of.
 */
private static <K> void putFrequencyMap(Map<K, Integer> map, K key) {
    Integer count = map.remove(key);
    if (count == null)
        count = 0;
    count++;
    map.put(key, count);
}