Example usage for java.util TreeSet size

List of usage examples for java.util TreeSet size

Introduction

In this page you can find the example usage for java.util TreeSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:Main.java

public static void main(String[] args) {
    TreeSet<Integer> tSet = new TreeSet<Integer>();
    System.out.println("Size of TreeSet : " + tSet.size());

    tSet.add(new Integer("1"));
    tSet.add(new Integer("2"));
    tSet.add(new Integer("3"));

    System.out.println(tSet.size());

    // remove one element from TreeSet using remove method

    tSet.remove(new Integer("1"));
    System.out.println("Size of TreeSet after removal : " + tSet.size());
}

From source file:Main.java

public static void main(String[] args) {

    TreeSet<Integer> treeadd = new TreeSet<Integer>();

    treeadd.add(1);/*from ww  w . j  a va 2  s .  c  o m*/
    treeadd.add(13);
    treeadd.add(17);
    treeadd.add(2);

    System.out.println("Size of the tree set is: " + treeadd.size());
}

From source file:org.apache.accumulo.server.test.TestRandomDeletes.java

static public void main(String[] args) {
    Option usernameOpt = new Option("username", "username", true, "username");
    Option passwordOpt = new Option("password", "password", true, "password");

    Options opts = new Options();

    opts.addOption(usernameOpt);//from w w  w  . j a  v a 2s .  com
    opts.addOption(passwordOpt);

    Parser p = new BasicParser();
    CommandLine cl = null;
    try {
        cl = p.parse(opts, args);
    } catch (ParseException e1) {
        System.out.println("Parse Exception, exiting.");
        return;
    }
    credentials = new AuthInfo(cl.getOptionValue("username", "root"),
            ByteBuffer.wrap(cl.getOptionValue("password", "secret").getBytes()),
            HdfsZooInstance.getInstance().getInstanceID());

    try {
        long deleted = 0;

        Text t = new Text("test_ingest");

        TreeSet<RowColumn> doomed = scanAll(t);
        log.info("Got " + doomed.size() + " rows");

        long startTime = System.currentTimeMillis();
        while (true) {
            long half = scrambleDeleteHalfAndCheck(t, doomed);
            deleted += half;
            if (half == 0)
                break;
        }
        long stopTime = System.currentTimeMillis();

        long elapsed = (stopTime - startTime) / 1000;
        log.info("deleted " + deleted + " values in " + elapsed + " seconds");
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:Main.java

public static void main(String[] args) {

    TreeSet<Integer> treeadd = new TreeSet<Integer>();

    treeadd.add(12);/*from   www .j a va  2s  .  co m*/
    treeadd.add(13);
    treeadd.add(14);
    treeadd.add(15);

    Iterator<Integer> iterator = treeadd.iterator();

    while (iterator.hasNext()) {
        System.out.println(iterator.next());
    }

    treeadd.clear();
    System.out.println(treeadd.size());
}

From source file:se.lth.cs.nlp.wikiforia.App.java

/**
 * Application entrypoint//  w  w  w . j ava2s .c om
 * @param args input arguments
 */
public static void main(String[] args) {
    Logger logger = LoggerFactory.getLogger(App.class);

    logger.info("Wikiforia v1.1.1 by Marcus Klang");

    Options options = new Options();
    options.addOption(index);
    options.addOption(pages);
    options.addOption(threads);
    options.addOption(batch);
    options.addOption(output);
    options.addOption(lang);
    options.addOption(hadoop);
    options.addOption(gzip);
    options.addOption(filterNs);

    CommandLineParser parser = new PosixParser();
    try {
        CommandLine cmdline = parser.parse(options, args);

        File indexPath = null, pagesPath, outputPath;
        int batchsize = 100;
        int numThreads = Runtime.getRuntime().availableProcessors();

        //Read batch size
        if (cmdline.hasOption(batch.getOpt())) {
            batchsize = Integer.parseInt(cmdline.getOptionValue(batch.getOpt()));
        }

        //Read num threads
        if (cmdline.hasOption(threads.getOpt())) {
            numThreads = Integer.parseInt(cmdline.getOptionValue(threads.getOpt()));
        }

        //Read required paths
        pagesPath = new File(cmdline.getOptionValue(pages.getOpt()));
        outputPath = new File(cmdline.getOptionValue(output.getOpt()));

        //Create output directories if they do not exist
        if (!outputPath.getParentFile().getAbsoluteFile().exists()) {
            if (!outputPath.getParentFile().getAbsoluteFile().mkdirs()) {
                throw new IOError(new IOException(
                        "Failed to create directories for " + outputPath.getParentFile().getAbsolutePath()));
            }
        }

        //To to automatically select an index file if it does not exits
        if (!cmdline.hasOption(index.getOpt())) {
            //try to automatically identify if there is an index file
            if (pagesPath.getAbsolutePath().toLowerCase().endsWith("-multistream.xml.bz2")) {
                int pos = pagesPath.getAbsolutePath().lastIndexOf("-multistream.xml.bz2");
                indexPath = new File(
                        pagesPath.getAbsolutePath().substring(0, pos) + "-multistream-index.txt.bz2");
                if (!indexPath.exists())
                    indexPath = null;
            }
        } else {
            indexPath = new File(cmdline.getOptionValue(index.getOpt()));
        }

        //Validation
        if (!pagesPath.exists()) {
            logger.error("pages with absolute filepath {} could not be found.", pagesPath.getAbsolutePath());
            return;
        }

        if (indexPath != null && !indexPath.exists()) {
            logger.error("Could not find index file {}.", indexPath.getAbsolutePath());
            logger.error("Skipping index and continuing with singlestream parsing (no threaded decompression)");
            indexPath = null;
        }

        String langId;
        if (cmdline.hasOption(lang.getOpt())) {
            langId = cmdline.getOptionValue(lang.getOpt());
        } else {
            Pattern langmatcher = Pattern.compile("([a-z]{2})wiki-");
            Matcher matcher = langmatcher.matcher(pagesPath.getName());
            if (matcher.find()) {
                langId = matcher.group(1).toLowerCase();
            } else {
                logger.error("Could not find a suitable language, will default to English");
                langId = "en";
            }
        }

        ArrayList<Filter<WikipediaPage>> filters = new ArrayList<Filter<WikipediaPage>>();
        if (cmdline.hasOption(filterNs.getOpt())) {
            String optionValue = cmdline.getOptionValue(filterNs.getOpt());
            final TreeSet<Integer> ns = new TreeSet<Integer>();
            for (String s : optionValue.split(",")) {
                ns.add(Integer.parseInt(s));
            }

            if (ns.size() > 0) {
                filters.add(new Filter<WikipediaPage>() {
                    @Override
                    protected boolean accept(WikipediaPage item) {
                        return ns.contains(item.getNamespace());
                    }

                    @Override
                    public String toString() {
                        return String.format("Namespace filter { namespaces: %s }", StringUtils.join(ns, ","));
                    }
                });
            }
        }

        TemplateConfig config;
        if (langId.equals("sv")) {
            config = new SwedishConfig();
        } else if (langId.equals("en")) {
            config = new EnglishConfig();
        } else {
            config = new EnglishConfig();
            logger.error(
                    "language {} is not yet supported and will be defaulted to a English setting for Sweble.",
                    langId);
            langId = "en";
        }

        if (cmdline.hasOption(hadoop.getOpt())) {
            if (outputPath.exists()) {
                logger.error("The target location already exists, please remove before using the tool!");
                System.exit(1);
            } else {
                int splitsize = 64000000;
                if (cmdline.hasOption(App.splitsize.getOpt())) {
                    splitsize = Integer.parseInt(cmdline.getOptionValue(App.splitsize.getOpt()));
                }

                hadoopConvert(config, indexPath, pagesPath, outputPath, numThreads, batchsize, splitsize,
                        cmdline.hasOption(gzip.getOpt()), filters);
            }
        } else {
            convert(config, indexPath, pagesPath, outputPath, numThreads, batchsize, filters);
        }

    } catch (ParseException e) {
        System.out.println(e.getMessage());
        HelpFormatter writer = new HelpFormatter();
        writer.printHelp("wikiforia", options);
    }
}

From source file:nlp.wikiforia.App.java

/**
 * Application entrypoint//w  ww.j  a v a  2 s.c o m
 * @param args input arguments
 */
public static void main(String[] args) {
    Logger logger = LoggerFactory.getLogger(App.class);

    logger.info("Wikiforia v1.2.1 by Marcus Klang");

    Options options = new Options();
    options.addOption(index);
    options.addOption(pages);
    options.addOption(threads);
    options.addOption(batch);
    options.addOption(output);
    options.addOption(lang);
    options.addOption(hadoop);
    options.addOption(gzip);
    options.addOption(testDecompression);
    options.addOption(filterNs);
    options.addOption(outputFormatOption);

    CommandLineParser parser = new PosixParser();
    try {
        CommandLine cmdline = parser.parse(options, args);

        File indexPath = null, pagesPath, outputPath;
        int batchsize = 100;
        int numThreads = Runtime.getRuntime().availableProcessors();
        String outputFormat = OUTPUT_FORMAT_DEFAULT;

        //Read batch size
        if (cmdline.hasOption(batch.getOpt())) {
            batchsize = Integer.parseInt(cmdline.getOptionValue(batch.getOpt()));
        }

        //Read num threads
        if (cmdline.hasOption(threads.getOpt())) {
            numThreads = Integer.parseInt(cmdline.getOptionValue(threads.getOpt()));
        }

        //Output format
        if (cmdline.hasOption(outputFormatOption.getOpt())) {
            outputFormat = cmdline.getOptionValue(outputFormatOption.getOpt());
        }

        //Read required paths
        pagesPath = new File(cmdline.getOptionValue(pages.getOpt()));
        outputPath = new File(cmdline.getOptionValue(output.getOpt()));

        //Create output directories if they do not exist
        if (!outputPath.getAbsoluteFile().getParentFile().getAbsoluteFile().exists()) {
            if (!outputPath.getParentFile().getAbsoluteFile().mkdirs()) {
                throw new IOError(new IOException(
                        "Failed to create directories for " + outputPath.getParentFile().getAbsolutePath()));
            }
        }

        //To to automatically select an index file if it does not exits
        if (!cmdline.hasOption(index.getOpt())) {
            //try to automatically identify if there is an index file
            if (pagesPath.getAbsolutePath().toLowerCase().endsWith("-multistream.xml.bz2")) {
                int pos = pagesPath.getAbsolutePath().lastIndexOf("-multistream.xml.bz2");
                indexPath = new File(
                        pagesPath.getAbsolutePath().substring(0, pos) + "-multistream-index.txt.bz2");
                if (!indexPath.exists())
                    indexPath = null;
            }
        } else {
            indexPath = new File(cmdline.getOptionValue(index.getOpt()));
        }

        //Validation
        if (!pagesPath.exists()) {
            logger.error("pages with absolute filepath {} could not be found.", pagesPath.getAbsolutePath());
            return;
        }

        if (indexPath != null && !indexPath.exists()) {
            logger.error("Could not find index file {}.", indexPath.getAbsolutePath());
            logger.error("Skipping index and continuing with singlestream parsing (no threaded decompression)");
            indexPath = null;
        }

        String langId;
        if (cmdline.hasOption(lang.getOpt())) {
            langId = cmdline.getOptionValue(lang.getOpt());
        } else {
            Pattern langmatcher = Pattern.compile("([a-z]{2})wiki-");
            Matcher matcher = langmatcher.matcher(pagesPath.getName());
            if (matcher.find()) {
                langId = matcher.group(1).toLowerCase();
            } else {
                logger.error("Could not find a suitable language, will default to English");
                langId = "en";
            }
        }

        ArrayList<Filter<WikipediaPage>> filters = new ArrayList<Filter<WikipediaPage>>();
        if (cmdline.hasOption(filterNs.getOpt())) {
            String optionValue = cmdline.getOptionValue(filterNs.getOpt());
            final TreeSet<Integer> ns = new TreeSet<Integer>();
            for (String s : optionValue.split(",")) {
                ns.add(Integer.parseInt(s));
            }

            if (ns.size() > 0) {
                filters.add(new Filter<WikipediaPage>() {
                    @Override
                    protected boolean accept(WikipediaPage item) {
                        return ns.contains(item.getNamespace());
                    }

                    @Override
                    public String toString() {
                        return String.format("Namespace filter { namespaces: %s }", StringUtils.join(ns, ","));
                    }
                });
            }
        }

        TemplateConfig config;
        Class<? extends TemplateConfig> configClazz = LangFactory.get(langId);
        if (configClazz != null) {
            try {
                config = configClazz.newInstance();
            } catch (InstantiationException e) {
                throw new RuntimeException(e);
            } catch (IllegalAccessException e) {
                throw new RuntimeException(e);
            }
        } else {
            config = new EnglishConfig();
            logger.error(
                    "language {} is not yet supported and will be defaulted to a English setting for Sweble.",
                    langId);
            langId = "en";
        }

        if (cmdline.hasOption(hadoop.getOpt())) {
            if (outputPath.exists()) {
                logger.error("The target location already exists, please remove before using the tool!");
                System.exit(1);
            } else {
                int splitsize = 64000000;
                if (cmdline.hasOption(App.splitsize.getOpt())) {
                    splitsize = Integer.parseInt(cmdline.getOptionValue(App.splitsize.getOpt()));
                }

                hadoopConvert(config, indexPath, pagesPath, outputPath, numThreads, batchsize, splitsize,
                        cmdline.hasOption(gzip.getOpt()), filters);
            }
        } else {
            if (cmdline.hasOption(testDecompression.getOpt())) {
                test(config, indexPath, pagesPath, numThreads, batchsize);
            } else {
                convert(config, indexPath, pagesPath, outputPath, numThreads, batchsize, filters, outputFormat);
            }
        }

    } catch (ParseException e) {
        System.out.println(e.getMessage());
        HelpFormatter writer = new HelpFormatter();
        writer.printHelp("wikiforia", options);
    }
}

From source file:eval.dataset.ParseWikiLog.java

public static void main(String[] ss) throws FileNotFoundException, ParserConfigurationException, IOException {
    FileInputStream fin = new FileInputStream("data/enwiki-20151201-pages-logging.xml.gz");
    GzipCompressorInputStream gzIn = new GzipCompressorInputStream(fin);
    InputStreamReader reader = new InputStreamReader(gzIn);
    BufferedReader br = new BufferedReader(reader);
    PrintWriter pw = new PrintWriter(new FileWriter("data/user_page.txt"));
    pw.println(// ww  w. j  ava  2s .  c  o  m
            "#list of user names and pages that they have edited, deleted or created. These info are mined from logitems of enwiki-20150304-pages-logging.xml.gz");
    TreeMap<String, Set<String>> userPageList = new TreeMap();
    TreeSet<String> pageList = new TreeSet();
    int counterEntry = 0;
    String currentUser = null;
    String currentPage = null;
    try {
        for (String line = br.readLine(); line != null; line = br.readLine()) {

            if (line.trim().equals("</logitem>")) {
                counterEntry++;
                if (currentUser != null && currentPage != null) {
                    updateMap(userPageList, currentUser, currentPage);
                    pw.println(currentUser + "\t" + currentPage);
                    pageList.add(currentPage);
                }
                currentUser = null;
                currentPage = null;
            } else if (line.trim().startsWith("<username>")) {
                currentUser = line.trim().split(">")[1].split("<")[0].replace(" ", "_");

            } else if (line.trim().startsWith("<logtitle>")) {
                String content = line.trim().split(">")[1].split("<")[0];
                if (content.split(":").length == 1) {
                    currentPage = content.replace(" ", "_");
                }
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(ParseWikiLog.class.getName()).log(Level.SEVERE, null, ex);
    }
    pw.println("#analysed " + counterEntry + " entries of wikipesia log file");
    pw.println("#gathered a list of unique user of size " + userPageList.size());
    pw.println("#gathered a list of pages of size " + pageList.size());
    pw.close();
    gzIn.close();

    PrintWriter pwUser = new PrintWriter(new FileWriter("data/user_list_page_edited.txt"));
    pwUser.println(
            "#list of unique users and pages that they have edited, extracted from logitems of enwiki-20150304-pages-logging.xml.gz");
    for (String user : userPageList.keySet()) {
        pwUser.print(user);
        Set<String> getList = userPageList.get(user);
        for (String page : getList) {
            pwUser.print("\t" + page);
        }
        pwUser.println();
    }
    pwUser.close();

    PrintWriter pwPage = new PrintWriter(new FileWriter("data/all_pages.txt"));
    pwPage.println("#list of the unique pages that are extracted from enwiki-20150304-pages-logging.xml.gz");
    for (String page : pageList) {
        pwPage.println(page);
    }
    pwPage.close();
    System.out.println("#analysed " + counterEntry + " entries of wikipesia log file");
    System.out.println("#gathered a list of unique user of size " + userPageList.size());
    System.out.println("#gathered a list of pages of size " + pageList.size());
}

From source file:Main.java

public static <T> SortedSet<T> sortTopN(Iterable<T> iterable, int n, Comparator<T> comparator) {
    TreeSet<T> r = Sets.newTreeSet(comparator);
    for (T t : iterable) {
        r.add(t);/*from  w ww.  j av  a 2 s.c  om*/
        if (r.size() > n) {
            r.pollLast();
        }
    }
    return r;
}

From source file:ImageIOTest.java

/**
 * Gets a set of "preferred" format names of all image writers. The preferred format name is the
 * first format name that a writer specifies.
 * @return the format name set//from w w  w.  j  a  va2s .  c o  m
 */
public static Set<String> getWriterFormats() {
    TreeSet<String> writerFormats = new TreeSet<String>();
    TreeSet<String> formatNames = new TreeSet<String>(Arrays.asList(ImageIO.getWriterFormatNames()));
    while (formatNames.size() > 0) {
        String name = formatNames.iterator().next();
        Iterator<ImageWriter> iter = ImageIO.getImageWritersByFormatName(name);
        ImageWriter writer = iter.next();
        String[] names = writer.getOriginatingProvider().getFormatNames();
        String format = names[0];
        if (format.equals(format.toLowerCase()))
            format = format.toUpperCase();
        writerFormats.add(format);
        formatNames.removeAll(Arrays.asList(names));
    }
    return writerFormats;
}

From source file:com.ettoremastrogiacomo.sktradingjava.starters.Temp.java

public static <T> java.util.Set<T> longestSet(ArrayList<TreeSet<T>> list) {
    if (list.isEmpty())
        return new java.util.TreeSet<>();
    java.util.TreeSet<T> best = list.get(0);
    for (TreeSet<T> s : list) {
        if (best.size() < s.size())
            best = s;/*from  w w  w.  ja  va  2  s. c  om*/
    }
    return best;
}