Example usage for java.util Map get

List of usage examples for java.util Map get

Introduction

In this page you can find the example usage for java.util Map get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.sun.faces.generate.HtmlTaglibGenerator.java

/**
 * Main routine.//w w  w  .  j  a  v a 2 s  . c  o m
 */
public static void main(String args[]) throws Exception {

    try {
        // Perform setup operations
        if (log.isDebugEnabled()) {
            log.debug("Processing command line options");
        }
        Map options = options(args);
        String dtd = (String) options.get("--dtd");
        if (log.isDebugEnabled()) {
            log.debug("Configuring digester instance with public identifiers and DTD '" + dtd + "'");
        }
        StringTokenizer st = new StringTokenizer(dtd, "|");
        int arrayLen = st.countTokens();
        if (arrayLen == 0) {
            // PENDING I18n
            throw new Exception("No DTDs specified");
        }
        String[] dtds = new String[arrayLen];
        int i = 0;
        while (st.hasMoreTokens()) {
            dtds[i] = st.nextToken();
            i++;
        }
        copyright((String) options.get("--copyright"));
        directories((String) options.get("--tlddir"), false);
        Digester digester = digester(dtds, false, true, false);
        String config = (String) options.get("--config");
        loadOptionalTags((String) options.get("--tagdef"));
        if (log.isDebugEnabled()) {
            log.debug("Parsing configuration file '" + config + "'");
        }
        digester.push(new FacesConfigBean());
        fcb = parse(digester, config);
        if (log.isInfoEnabled()) {
            log.info("Generating Tag Library Descriptor file");
        }

        // Generate TLD File
        generateTld();

        // Generate Tag Handler Classes
        directories((String) options.get("--dir"), true);
        generateTagClasses();

    } catch (Exception e) {
        e.printStackTrace();
        System.exit(1);
    }
    System.exit(0);
}

From source file:com.sishuok.es.generate.Generate.java

public static void main(String[] args) throws Exception {

    // ==========  ?? ====================

    // ??????/*from ww w  .  j a v  a2s  .com*/
    // ?{packageName}/{moduleName}/{dao,entity,service,web}/{subModuleName}/{className}

    // packageName ????applicationContext.xmlsrping-mvc.xml?base-package?packagesToScan?4?
    String packageName = "com.sishuok.es";

    String sysName = "sys"; // ??sys?showcase?maintain?personal?shop
    String moduleName = "xxs"; // ???? 
    String tableName = "sys_xxs_attribute"; // user
    String className = "XxsAttribute"; // ??User
    String permissionName = "sys:xxsAttribute";//??????????
    String folderName = "xxs";//??
    String classAuthor = "xxs"; // ThinkGem
    String functionName = "??????"; // ??

    // ???
    //Boolean isEnable = false;
    Boolean isEnable = true;

    // ==========  ?? ====================

    if (!isEnable) {
        logger.error("????isEnable = true");
        return;
    }

    if (StringUtils.isBlank(moduleName) || StringUtils.isBlank(moduleName) || StringUtils.isBlank(className)
            || StringUtils.isBlank(functionName)) {
        logger.error("??????????????");
        return;
    }

    // ?
    String separator = File.separator;

    // ?
    File projectPath = new DefaultResourceLoader().getResource("").getFile();
    while (!new File(projectPath.getPath() + separator + "src" + separator + "main").exists()) {
        projectPath = projectPath.getParentFile();
    }
    logger.info("Project Path: {}", projectPath);

    // ?
    String tplPath = StringUtils.replace(projectPath + "/src/main/java/com/sishuok/es/generate/template", "/",
            separator);
    logger.info("Template Path: {}", tplPath);

    // Java
    String javaPath = StringUtils.replaceEach(
            projectPath + "/src/main/java/" + StringUtils.lowerCase(packageName), new String[] { "/", "." },
            new String[] { separator, separator });
    logger.info("Java Path: {}", javaPath);

    // 
    String viewPath = StringUtils.replace(projectPath + "/src/main/webapp/WEB-INF/jsp/admin", "/", separator);
    logger.info("View Path: {}", viewPath);

    // ???
    Configuration cfg = new Configuration();
    cfg.setDefaultEncoding("UTF-8");
    cfg.setDirectoryForTemplateLoading(new File(tplPath));

    // ???
    Map<String, String> model = Maps.newHashMap();
    model.put("packageName", StringUtils.lowerCase(packageName)); //
    model.put("sysName", StringUtils.lowerCase(sysName)); //???
    model.put("moduleName", StringUtils.lowerCase(moduleName)); //???
    model.put("tableName", StringUtils.lowerCase(tableName)); //
    model.put("className", StringUtils.uncapitalize(className)); //???
    model.put("permissionName", permissionName); //????
    model.put("ClassName", StringUtils.capitalize(className)); //??
    model.put("classAuthor", StringUtils.isNotBlank(classAuthor) ? classAuthor : "Generate Tools"); //
    model.put("classVersion", DateUtils.getDate()); //
    model.put("functionName", functionName); //???
    model.put("folderName", folderName); //??
    model.put("urlPrefix", model.get("moduleName") + "_" + model.get("className")); //jsp??
    model.put("viewPrefix", //StringUtils.substringAfterLast(model.get("packageName"),".")+"/"+
            model.get("urlPrefix"));
    model.put("permissionPrefix", model.get("sysName") + ":" + model.get("moduleName")); //??

    // ? Entity
    Template template = cfg.getTemplate("entity.ftl");
    String content = FreeMarkers.renderTemplate(template, model);
    String filePath = javaPath + separator + model.get("sysName") + separator + model.get("moduleName")
            + separator + "entity" + separator + model.get("ClassName") + ".java";
    System.out.println("Entity   filePath" + filePath);
    writeFile(content, filePath);
    logger.info("Entity: {}", filePath);

    // ? Repository
    template = cfg.getTemplate("repository.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("sysName") + separator + model.get("moduleName") + separator
            + "repository" + separator + separator + model.get("ClassName") + "Repository.java";
    System.out.println("repository   filePath" + filePath);
    writeFile(content, filePath);
    logger.info("Dao: {}", filePath);

    // ? Service
    template = cfg.getTemplate("service.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("sysName") + separator + model.get("moduleName") + separator
            + "service" + separator + separator + model.get("ClassName") + "Service.java";
    System.out.println("Service   filePath" + filePath);
    writeFile(content, filePath);
    logger.info("Service: {}", filePath);

    // ? ??Controller
    template = cfg.getTemplate("frontController.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("sysName") + separator + model.get("moduleName") + separator
            + "web" + separator + "controller" + separator + "front" + separator + model.get("ClassName")
            + "Controller.java";
    System.out.println("Controller   filePath" + filePath);
    writeFile(content, filePath);
    logger.info("Controller: {}", filePath);

    // ? ??Controller
    template = cfg.getTemplate("adminController.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("sysName") + separator + model.get("moduleName") + separator
            + "web" + separator + "controller" + separator + "admin" + separator + model.get("ClassName")
            + "Controller.java";
    System.out.println("Controller   filePath" + filePath);
    writeFile(content, filePath);
    logger.info("Controller: {}", filePath);

    // ? editForm
    template = cfg.getTemplate("editForm.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + model.get("sysName") + separator + model.get("folderName") + separator
            + "editForm.jsp";
    System.out.println("---------------------------------------------------");
    System.out.println("ViewForm   filePath" + filePath);
    writeFile(content, filePath);
    logger.info("ViewForm: {}", filePath);

    // ? list
    template = cfg.getTemplate("list.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + model.get("sysName") + separator + model.get("folderName") + separator
            + "list.jsp";
    writeFile(content, filePath);
    System.out.println("ViewListfilePath" + filePath);
    logger.info("ViewList: {}", filePath);

    // ? searcheForm
    template = cfg.getTemplate("searchForm.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + model.get("sysName") + separator + model.get("folderName") + separator
            + "searchForm.jsp";
    writeFile(content, filePath);
    System.out.println("searcheForm filePath" + filePath);
    logger.info("ViewList: {}", filePath);

    // ? listTable
    template = cfg.getTemplate("listTable.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + model.get("sysName") + separator + model.get("folderName") + separator
            + "listTable.jsp";
    writeFile(content, filePath);
    System.out.println("listTable filePath" + filePath);
    logger.info("ViewList: {}", filePath);

    logger.info("Generate Success.");
}

From source file:com.act.lcms.db.io.PrintConstructInfo.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//from www  .j a  va2s .c  o  m
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        return;
    }

    File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY));
    if (!lcmsDir.isDirectory()) {
        System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath());
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    try (DB db = DB.openDBFromCLI(cl)) {
        System.out.print("Loading/updating LCMS scan files into DB\n");
        ScanFile.insertOrUpdateScanFilesInDirectory(db, lcmsDir);

        String construct = cl.getOptionValue(OPTION_CONSTRUCT);
        List<LCMSWell> lcmsWells = LCMSWell.getInstance().getByConstructID(db, construct);
        Collections.sort(lcmsWells, new Comparator<LCMSWell>() {
            @Override
            public int compare(LCMSWell o1, LCMSWell o2) {
                return o1.getId().compareTo(o2.getId());
            }
        });

        Set<String> uniqueMSIDs = new HashSet<>();
        Map<Integer, Plate> platesById = new HashMap<>();

        System.out.format("\n\n-- Construct %s --\n\n", construct);

        List<ChemicalAssociatedWithPathway> pathwayChems = ChemicalAssociatedWithPathway.getInstance()
                .getChemicalsAssociatedWithPathwayByConstructId(db, construct);
        System.out.print("Chemicals associated with pathway:\n");
        System.out.format("  %-8s%-15s%-45s\n", "index", "kind", "chemical");
        for (ChemicalAssociatedWithPathway chem : pathwayChems) {
            System.out.format("  %-8d%-15s%-45s\n", chem.getIndex(), chem.getKind(), chem.getChemical());
        }

        System.out.print("\nLCMS wells:\n");
        System.out.format("  %-15s%-6s%-15s%-15s%-15s\n", "barcode", "well", "msid", "fed", "lcms_count");
        for (LCMSWell well : lcmsWells) {
            uniqueMSIDs.add(well.getMsid());

            Plate p = platesById.get(well.getPlateId());
            if (p == null) {
                // TODO: migrate Plate to be a subclass of BaseDBModel.
                p = Plate.getPlateById(db, well.getPlateId());
                platesById.put(p.getId(), p);
            }

            String chem = well.getChemical();
            List<ScanFile> scanFiles = ScanFile.getScanFileByPlateIDRowAndColumn(db, p.getId(),
                    well.getPlateRow(), well.getPlateColumn());

            System.out.format("  %-15s%-6s%-15s%-15s%-15d\n", p.getBarcode(), well.getCoordinatesString(),
                    well.getMsid(), chem == null || chem.isEmpty() ? "--" : chem, scanFiles.size());
            System.out.flush();
        }

        List<Integer> plateIds = Arrays.asList(platesById.keySet().toArray(new Integer[platesById.size()]));
        Collections.sort(plateIds);
        System.out.print("\nAppears in plates:\n");
        for (Integer id : plateIds) {
            Plate p = platesById.get(id);
            System.out.format("  %s: %s\n", p.getBarcode(), p.getName());
        }

        List<String> msids = Arrays.asList(uniqueMSIDs.toArray(new String[uniqueMSIDs.size()]));
        Collections.sort(msids);
        System.out.format("\nMSIDS: %s\n", StringUtils.join(msids, ", "));

        Set<String> availableNegativeControls = new HashSet<>();
        for (Map.Entry<Integer, Plate> entry : platesById.entrySet()) {
            List<LCMSWell> wells = LCMSWell.getInstance().getByPlateId(db, entry.getKey());
            for (LCMSWell well : wells) {
                if (!construct.equals(well.getComposition())) {
                    availableNegativeControls.add(well.getComposition());
                }
            }
        }

        // Print available standards for each step w/ plate barcodes and coordinates.
        System.out.format("\nAvailable Standards:\n");
        Map<Integer, Plate> plateCache = new HashMap<>();
        for (ChemicalAssociatedWithPathway chem : pathwayChems) {
            List<StandardWell> matchingWells = StandardWell.getInstance().getStandardWellsByChemical(db,
                    chem.getChemical());
            for (StandardWell well : matchingWells) {
                if (!plateCache.containsKey(well.getPlateId())) {
                    Plate p = Plate.getPlateById(db, well.getPlateId());
                    plateCache.put(p.getId(), p);
                }
            }
            Map<Integer, List<StandardWell>> standardWellsByPlateId = new HashMap<>();
            for (StandardWell well : matchingWells) {
                List<StandardWell> plateWells = standardWellsByPlateId.get(well.getPlateId());
                if (plateWells == null) {
                    plateWells = new ArrayList<>();
                    standardWellsByPlateId.put(well.getPlateId(), plateWells);
                }
                plateWells.add(well);
            }
            List<Pair<String, Integer>> plateBarcodes = new ArrayList<>(plateCache.size());
            for (Plate p : plateCache.values()) {
                if (p.getBarcode() == null) {
                    plateBarcodes.add(Pair.of("(no barcode)", p.getId()));
                } else {
                    plateBarcodes.add(Pair.of(p.getBarcode(), p.getId()));
                }
            }
            Collections.sort(plateBarcodes);
            System.out.format("  %s:\n", chem.getChemical());
            for (Pair<String, Integer> barcodePair : plateBarcodes) {
                // TODO: hoist this whole sorting/translation step into a utility class.
                List<StandardWell> wells = standardWellsByPlateId.get(barcodePair.getRight());
                if (wells == null) {
                    // Don't print plates that don't apply to this chemical, which can happen because we're caching the plates.
                    continue;
                }
                Collections.sort(wells, new Comparator<StandardWell>() {
                    @Override
                    public int compare(StandardWell o1, StandardWell o2) {
                        int c = o1.getPlateRow().compareTo(o2.getPlateRow());
                        if (c != 0)
                            return c;
                        return o1.getPlateColumn().compareTo(o2.getPlateColumn());
                    }
                });
                List<String> descriptions = new ArrayList<>(wells.size());
                for (StandardWell well : wells) {
                    descriptions.add(String.format("%s in %s%s", well.getCoordinatesString(), well.getMedia(),
                            well.getConcentration() == null ? ""
                                    : String.format(" c. %f", well.getConcentration())));
                }
                System.out.format("    %s: %s\n", barcodePair.getLeft(), StringUtils.join(descriptions, ", "));
            }
        }

        List<String> negativeControlStrains = Arrays
                .asList(availableNegativeControls.toArray(new String[availableNegativeControls.size()]));
        Collections.sort(negativeControlStrains);
        System.out.format("\nAvailable negative controls: %s\n", StringUtils.join(negativeControlStrains, ","));
        System.out.print("\n----------\n");
        System.out.print("\n\n");
    }
}

From source file:com.example.geomesa.kafka08.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//  w w  w  .  j a v  a  2 s.  co  m
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart08";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka09.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*  w  w w.  ja  va2 s. c  o  m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart09";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka10.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*  w  w  w .  j  a v  a  2 s  .  c  o m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart10";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.moss.schematrax.SchemaUpdater.java

public static void main(String[] args) throws Exception {

    boolean configureLogging = new Boolean(System.getProperty("configureLogging", "true")).booleanValue();
    if (configureLogging) {
        BasicConfigurator.configure();/*from  w  w w .  j a  va  2s  . com*/
        Logger.getRootLogger().setLevel(Level.INFO);
    }

    Map<String, String> argsMap = new HashMap<String, String>();
    for (int x = 0; x < args.length; x++) {
        String arg = args[x];
        String[] pair = arg.split("=");
        if (pair.length != 2) {
            System.out.println("Error parsing command line arguments in value pair:" + arg);
            System.out.println(usage);
            System.exit(1);
        }
        String name = pair[0];
        String value = pair[1];
        argsMap.put(name, value);
    }

    String mode = (String) argsMap.get("mode");
    String databaseTypeName = (String) argsMap.get("dbtype");
    String databaseName = (String) argsMap.get("catalog");
    String logon = (String) argsMap.get("logon");
    String password = (String) argsMap.get("password");
    String host = (String) argsMap.get("host");
    if (args.length < 5 || databaseTypeName == null || databaseName == null || logon == null || password == null
            || host == null || mode == null) {
        System.out.println(usage);
    }

    String schematraxFileLocation = (String) argsMap.get("schematraxFile");

    if (schematraxFileLocation == null)
        schematraxFileLocation = "";

    String schemaName = (String) argsMap.get("schema");
    if (schemaName == null)
        schemaName = logon;

    DatabaseType dbType = DatabaseType.getDatabaseType(databaseTypeName);

    String schemaVersion = (String) argsMap.get("version");

    JdbcConnectionConfig connectionConfig = new JdbcConnectionConfig();
    connectionConfig.setJdbcDriverClassName(dbType.getJdbcDriver().getClassName());
    connectionConfig
            .setJdbcUrl(dbType.getJdbcDriver().createJdbcUrl(host, null, databaseName, logon, password));
    connectionConfig.setLogon(logon);
    connectionConfig.setPassword(password);

    SchemaUpdater updater = new SchemaUpdater(new File(schematraxFileLocation).toURL());

    if (mode.equals("update")) {
        if (schemaVersion == null) {
            System.err.println("You must specify a schema version");
            System.err.println(usage);
        }
        updater.updateSchema(dbType, connectionConfig, schemaName, schemaVersion);
    } else if (mode.equals("create")) {
        updater.createSchema(dbType, connectionConfig, schemaName, schemaVersion);
    } else {
        System.err.println("Invalid mode:" + mode);
        System.err.println(usage);
    }
}

From source file:com.genentech.chemistry.tool.mm.SDFMMMinimize.java

/**
 * Main function for running on the command line
 * @param args//www  .  j av  a  2s  .  c om
 */
public static void main(String... args) throws IOException {
    // Get the available options from the programs
    Map<String, List<String>> allowedProgramsAndForceFields = getAllowedProgramsAndForceFields();
    Map<String, List<String>> allowedProgramsAndSolvents = getAllowedProgramsAndSolvents();

    // create command line Options object
    Options options = new Options();
    Option opt = new Option(OPT_INFILE, true,
            "input file oe-supported Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_OUTFILE, true, "output file oe-supported. Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    StringBuilder programOptions = new StringBuilder("Program to use for minimization.  Choices are\n");

    for (String program : allowedProgramsAndForceFields.keySet()) {
        programOptions.append("\n***   -program " + program + "   ***\n");
        String forcefields = "";
        for (String option : allowedProgramsAndForceFields.get(program)) {
            forcefields += option + " ";
        }
        programOptions.append("-forcefield " + forcefields + "\n");

        String solvents = "";
        for (String option : allowedProgramsAndSolvents.get(program)) {
            solvents += option + " ";
        }
        programOptions.append("-solvent " + solvents + "\n");
    }

    opt = new Option(OPT_PROGRAM, true, programOptions.toString());
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_FORCEFIELD, true, "Forcefield options.  See -program for choices");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_SOLVENT, true, "Solvent options.  See -program for choices");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_FIXED_ATOM_TAG, true, "SD tag name which contains the atom numbers to be held fixed.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_FIX_TORSION, true,
            "true/false. if true, the atoms in fixedAtomTag contains 4 indices of atoms defining a torsion angle to be held fixed");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_WORKING_DIR, true, "Working directory to put files.");
    opt.setRequired(false);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (args.length != 0) {
        System.err.println("Unknown arguments" + args);
        exitWithHelp(options);
    }

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue(OPT_INFILE);
    String outFile = cmd.getOptionValue(OPT_OUTFILE);
    String fixedAtomTag = cmd.getOptionValue(OPT_FIXED_ATOM_TAG);
    boolean fixTorsion = (cmd.getOptionValue(OPT_FIX_TORSION) != null
            && cmd.getOptionValue(OPT_FIX_TORSION).equalsIgnoreCase("true"));
    String programName = cmd.getOptionValue(OPT_PROGRAM);
    String forcefield = cmd.getOptionValue(OPT_FORCEFIELD);
    String solvent = cmd.getOptionValue(OPT_SOLVENT);
    String workDir = cmd.getOptionValue(OPT_WORKING_DIR);

    if (workDir == null || workDir.trim().length() == 0)
        workDir = ".";

    // Create a minimizer 
    SDFMMMinimize minimizer = new SDFMMMinimize();
    minimizer.setMethod(programName, forcefield, solvent);
    minimizer.run(inFile, outFile, fixedAtomTag, fixTorsion, workDir);
    minimizer.close();
    System.err.println("Minimization complete.");
}

From source file:com.example.geomesa.kafka08.KafkaLoadTester.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);
    String visibility = getVisibility(cmd);

    if (visibility == null) {
        System.out.println("visibility: null");
    } else {/*from w w  w .  jav a 2  s  .co  m*/
        System.out.println("visibility: '" + visibility + "'");
    }

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    System.out.println("KDS config: " + dsConf);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaStressTest";
    final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
    System.in.read();

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");

    SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft);

    Integer numFeats = getLoad(cmd);

    System.out.println("Building a list of " + numFeats + " SimpleFeatures.");
    List<SimpleFeature> features = IntStream.range(1, numFeats)
            .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList());

    // set variables to estimate feature production rate
    Long startTime = null;
    Long featuresSinceStartTime = 0L;
    int cycle = 0;
    int cyclesToSkip = 50000 / numFeats; // collect enough features
                                         // to get an accurate rate estimate

    while (true) {
        // write features
        features.forEach(feat -> {
            try {
                DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
                featureCollection.add(feat);
                producerFS.addFeatures(featureCollection);
            } catch (Exception e) {
                System.out.println("Caught an exception while writing features.");
                e.printStackTrace();
            }
            updateFeature(feat);
        });

        // count features written
        Integer consumerSize = consumerFS.getFeatures().size();
        cycle++;
        featuresSinceStartTime += consumerSize;
        System.out.println("At " + new Date() + " wrote " + consumerSize + " features");

        // if we've collected enough features, calculate the rate
        if (cycle >= cyclesToSkip || startTime == null) {
            Long endTime = System.currentTimeMillis();
            if (startTime != null) {
                Long diffTime = endTime - startTime;
                Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue();
                System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime);
            }
            cycle = 0;
            startTime = endTime;
            featuresSinceStartTime = 0L;
        }
    }
}

From source file:com.act.lcms.v2.MZCollisionCounter.java

public static void main(String[] args) throws Exception {
    CLIUtil cliUtil = new CLIUtil(MassChargeCalculator.class, HELP_MESSAGE, OPTION_BUILDERS);
    CommandLine cl = cliUtil.parseCommandLine(args);

    File inputFile = new File(cl.getOptionValue(OPTION_INPUT_INCHI_LIST));
    if (!inputFile.exists()) {
        cliUtil.failWithMessage("Input file at does not exist at %s", inputFile.getAbsolutePath());
    }//ww w  .  j a  v  a  2 s . com

    List<MassChargeCalculator.MZSource> sources = new ArrayList<>();
    try (BufferedReader reader = new BufferedReader(new FileReader(inputFile))) {
        String line;
        while ((line = reader.readLine()) != null) {
            line = line.trim();
            sources.add(new MassChargeCalculator.MZSource(line));
            if (sources.size() % 1000 == 0) {
                LOGGER.info("Loaded %d sources from input file", sources.size());
            }
        }
    }

    Set<String> considerIons = Collections.emptySet();
    if (cl.hasOption(OPTION_ONLY_CONSIDER_IONS)) {
        List<String> ions = Arrays.asList(cl.getOptionValues(OPTION_ONLY_CONSIDER_IONS));
        LOGGER.info("Only considering ions for m/z calculation: %s", StringUtils.join(ions, ", "));
        considerIons = new HashSet<>(ions);
    }

    TSVWriter<String, Long> tsvWriter = new TSVWriter<>(Arrays.asList("collisions", "count"));
    tsvWriter.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE)));

    try {
        LOGGER.info("Loaded %d sources in total from input file", sources.size());

        MassChargeCalculator.MassChargeMap mzMap = MassChargeCalculator.makeMassChargeMap(sources,
                considerIons);

        if (!cl.hasOption(OPTION_COUNT_WINDOW_INTERSECTIONS)) {
            // Do an exact analysis of the m/z collisions if windowing is not specified.

            LOGGER.info("Computing precise collision histogram.");
            Iterable<Double> mzs = mzMap.ionMZIter();
            Map<Integer, Long> collisionHistogram = histogram(
                    StreamSupport.stream(mzs.spliterator(), false).map(mz -> { // See comment about Iterable below.
                        try {
                            return mzMap.ionMZToMZSources(mz).size();
                        } catch (NoSuchElementException e) {
                            LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage());
                            throw e;
                        }
                    }));
            List<Integer> sortedCollisions = new ArrayList<>(collisionHistogram.keySet());
            Collections.sort(sortedCollisions);
            for (Integer collision : sortedCollisions) {
                tsvWriter.append(new HashMap<String, Long>() {
                    {
                        put("collisions", collision.longValue());
                        put("count", collisionHistogram.get(collision));
                    }
                });
            }
        } else {
            /* After some deliberation (thanks Gil!), the windowed variant of this calculation counts the number of
             * structures whose 0.01 Da m/z windows (for some set of ions) overlap with each other.
             *
             * For example, let's assume we have five total input structures, and are only searching for one ion.  Let's
             * also assume that three of those structures have m/z A and the remaining two have m/z B.  The windows might
             * look like this in the m/z domain:
             * |----A----|
             *        |----B----|
             * Because A represents three structures and overlaps with B, which represents two, we assign A a count of 5--
             * this is the number of structures we believe could fall into the range of A given our current peak calling
             * approach.  Similarly, B is assigned a count of 5, as the possibility for collision/confusion is symmetric.
             *
             * Note that this is an over-approximation of collisions, as we could more precisely only consider intersections
             * when the exact m/z of B falls within the window around A and vice versa.  However, because we have observed
             * cases where the MS sensor doesn't report structures at exactly the m/z we predict, we employ this weaker
             * definition of intersection to give a slightly pessimistic view of what confusions might be possible. */
            // Compute windows for every m/z.  We don't care about the original mz values since we just want the count.
            List<Double> mzs = mzMap.ionMZsSorted();

            final Double windowHalfWidth;
            if (cl.hasOption(OPTION_WINDOW_HALFWIDTH)) {
                // Don't use get with default for this option, as we want the exact FP value of the default tolerance.
                windowHalfWidth = Double.valueOf(cl.getOptionValue(OPTION_WINDOW_HALFWIDTH));
            } else {
                windowHalfWidth = DEFAULT_WINDOW_TOLERANCE;
            }

            /* Window = (lower bound, upper bound), counter of represented m/z's that collide with this window, and number
             * of representative structures (which will be used in counting collisions). */
            LinkedList<CollisionWindow> allWindows = new LinkedList<CollisionWindow>() {
                {
                    for (Double mz : mzs) {
                        // CPU for memory trade-off: don't re-compute the window bounds over and over and over and over and over.
                        try {
                            add(new CollisionWindow(mz, windowHalfWidth, mzMap.ionMZToMZSources(mz).size()));
                        } catch (NoSuchElementException e) {
                            LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage());
                            throw e;
                        }
                    }
                }
            };

            // Sweep line time!  The window ranges are the interesting points.  We just accumulate overlap counts as we go.
            LinkedList<CollisionWindow> workingSet = new LinkedList<>();
            List<CollisionWindow> finished = new LinkedList<>();

            while (allWindows.size() > 0) {
                CollisionWindow thisWindow = allWindows.pop();
                // Remove any windows from the working set that don't overlap with the next window.
                while (workingSet.size() > 0 && workingSet.peekFirst().getMaxMZ() < thisWindow.getMinMZ()) {
                    finished.add(workingSet.pop());
                }

                for (CollisionWindow w : workingSet) {
                    /* Add the size of the new overlapping window's structure count to each of the windows in the working set,
                     * which represents the number of possible confused structures that fall within the overlapping region.
                     * We exclude the window itself as it should already have counted the colliding structures it represents. */
                    w.getAccumulator().add(thisWindow.getStructureCount());

                    /* Reciprocally, add the structure counts of all windows with which the current window overlaps to it. */
                    thisWindow.getAccumulator().add(w.getStructureCount());
                }

                // Now that accumulation is complete, we can safely add the current window.
                workingSet.add(thisWindow);
            }

            // All the interesting events are done, so drop the remaining windows into the finished set.
            finished.addAll(workingSet);

            Map<Long, Long> collisionHistogram = histogram(
                    finished.stream().map(w -> w.getAccumulator().longValue()));
            List<Long> sortedCollisions = new ArrayList<>(collisionHistogram.keySet());
            Collections.sort(sortedCollisions);
            for (Long collision : sortedCollisions) {
                tsvWriter.append(new HashMap<String, Long>() {
                    {
                        put("collisions", collision);
                        put("count", collisionHistogram.get(collision));
                    }
                });
            }
        }
    } finally {
        if (tsvWriter != null) {
            tsvWriter.close();
        }
    }
}