Example usage for java.util.stream IntStream range

List of usage examples for java.util.stream IntStream range

Introduction

In this page you can find the example usage for java.util.stream IntStream range.

Prototype

public static IntStream range(int startInclusive, int endExclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endExclusive (exclusive) by an incremental step of 1 .

Usage

From source file:com.khartec.waltz.jobs.sample.UnknownFlowGenerator.java

public static void main(String[] args) {
    AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);

    ApplicationService applicationDao = ctx.getBean(ApplicationService.class);
    DSLContext dsl = ctx.getBean(DSLContext.class);

    List<Application> allApps = applicationDao.findAll();

    Set<DataFlowRecord> records = IntStream.range(0, 2000)
            .mapToObj(i -> Tuple.tuple(randomPick(allApps).id().get(), randomPick(allApps).id().get()))
            .map(t -> new DataFlowRecord("APPLICATION", t.v1(), "APPLICATION", t.v2(), "UNKNOWN", "UNK_TEST"))
            .collect(Collectors.toSet());

    dsl.deleteFrom(DATA_FLOW).where(DATA_FLOW.PROVENANCE.eq("UNK_TEST")).execute();

    dsl.batchInsert(records).execute();//from   w  w  w  . j  a  v a  2s.c om

    System.out.println("Done");
}

From source file:com.khartec.waltz.jobs.sample.ServerGenerator.java

public static void main(String[] args) {

    AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
    ServerInformationDao serverDao = ctx.getBean(ServerInformationDao.class);
    DSLContext dsl = ctx.getBean(DSLContext.class);

    dsl.delete(SERVER_INFORMATION).where(SERVER_INFORMATION.PROVENANCE.eq("RANDOM_GENERATOR")).execute();

    List<ServerInformation> servers = ListUtilities.newArrayList();

    IntStream
            .range(0,//ww  w . jav  a2s . c  o  m
                    10_000)
            .forEach(
                    i -> servers
                            .add(ImmutableServerInformation.builder().hostname(mkHostName(i))
                                    .environment(randomPick(SampleData.environments))
                                    .location(randomPick(SampleData.locations))
                                    .operatingSystem(randomPick(SampleData.operatingSystems))
                                    .operatingSystemVersion(randomPick(SampleData.operatingSystemVersions))
                                    .country(
                                            "UK")
                                    .assetCode(
                                            "wltz-0" + rnd.nextInt(4000))
                                    .hardwareEndOfLifeDate(rnd.nextInt(10) > 5
                                            ? Date.valueOf(
                                                    LocalDate.now().plusMonths(rnd.nextInt(12 * 6) - (12 * 3)))
                                            : null)
                                    .operatingSystemEndOfLifeDate(rnd.nextInt(10) > 5
                                            ? Date.valueOf(
                                                    LocalDate.now().plusMonths(rnd.nextInt(12 * 6) - (12 * 3)))
                                            : null)
                                    .virtual(rnd.nextInt(10) > 7).provenance("RANDOM_GENERATOR").build()));

    // servers.forEach(System.out::println);
    serverDao.bulkSave(servers);

}

From source file:com.khartec.waltz.jobs.sample.AppCapabilityGenerator.java

public static void main(String[] args) {
    AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
    CapabilityService capabilityDao = ctx.getBean(CapabilityService.class);
    ApplicationService applicationDao = ctx.getBean(ApplicationService.class);
    AppCapabilityService appCapabilityDao = ctx.getBean(AppCapabilityService.class);

    DSLContext dsl = ctx.getBean(DSLContext.class);

    dsl.delete(APP_CAPABILITY).execute();

    List<Capability> capabilities = capabilityDao.findAll();

    applicationDao.findAll().forEach(app -> {
        int count = rnd.nextInt(4) + 1;

        Set<Long> ids = IntStream.range(0, count).mapToObj(i -> randomPick(capabilities)).map(c -> c.id().get())
                .collect(Collectors.toSet());

        appCapabilityDao.addCapabilitiesToApp(app.id().get(), new ArrayList<>(ids));

    });// ww  w  .  j av  a2s.c o  m

}

From source file:com.khartec.waltz.jobs.sample.FlowGenerator.java

public static void main(String[] args) {
    AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);

    AuthoritativeSourceDao authSourceDao = ctx.getBean(AuthoritativeSourceDao.class);
    ApplicationService applicationDao = ctx.getBean(ApplicationService.class);
    DataTypeService dataTypesDao = ctx.getBean(DataTypeService.class);
    DataFlowService dataFlowDao = ctx.getBean(DataFlowService.class);
    OrganisationalUnitService orgUnitDao = ctx.getBean(OrganisationalUnitService.class);
    DataSource dataSource = ctx.getBean(DataSource.class);
    DSLContext dsl = ctx.getBean(DSLContext.class);

    List<AuthoritativeSource> authSources = authSourceDao.findByEntityKind(EntityKind.ORG_UNIT);
    List<String> dataTypes = dataTypesDao.getAll().stream().map(dt -> dt.code()).collect(toList());
    List<Application> apps = applicationDao.findAll();
    List<OrganisationalUnit> orgUnits = orgUnitDao.findAll();

    Set<DataFlow> expectedFlows = authSources.stream().flatMap(a -> {
        long orgUnitId = a.parentReference().id();

        return IntStream.range(0, rnd.nextInt(40))
                .mapToObj(i -> ImmutableDataFlow.builder().dataType(a.dataType())
                        .source(a.applicationReference()).target(randomAppPick(apps, orgUnitId)).build());
    }).collect(Collectors.toSet());

    Set<DataFlow> probableFlows = authSources.stream().flatMap(a -> IntStream.range(0, rnd.nextInt(30))
            .mapToObj(i -> ImmutableDataFlow.builder().dataType(a.dataType()).source(a.applicationReference())
                    .target(randomAppPick(apps, randomPick(orgUnits).id().get())).build()))
            .collect(Collectors.toSet());

    Set<DataFlow> randomFlows = apps.stream()
            .map(a -> ImmutableDataFlow.builder().source(a.toEntityReference()))
            .map(b -> b.target(randomAppPick(apps, randomPick(orgUnits).id().get())))
            .map(b -> b.dataType(randomPick(dataTypes)).build()).collect(Collectors.toSet());

    dsl.deleteFrom(DATA_FLOW).execute();

    Set<DataFlow> all = new HashSet<>();
    all.addAll(randomFlows);/*from  w  ww  . j  ava2  s  .com*/
    all.addAll(expectedFlows);
    all.addAll(probableFlows);

    dataFlowDao.addFlows(new ArrayList<>(all));

    System.out.println("Done");

}

From source file:net.sf.mzmine.chartbasics.graphicsexport.GraphicsExportDialog.java

/**
 * Launch the application.//from   w  ww  .  j a  v  a 2 s.  c  o  m
 */
public static void main(String[] args) {
    try {
        XYSeries s = new XYSeries("1");
        IntStream.range(0, 10).forEach(i -> s.add(i, i));
        XYSeriesCollection data = new XYSeriesCollection(s);
        JFreeChart chart = ChartFactory.createXYLineChart("XY", "time (s)", "intensity", data);
        GraphicsExportDialog.createInstance();
        GraphicsExportDialog.openDialog(chart);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:kishida.cnn.NeuralNetwork.java

public static void main(String[] args) throws IOException {
    NeuralNetwork nn = new NeuralNetwork();
    nn.getLayers()/*from w  w w  . ja v  a  2s.c o  m*/
            .addAll(Arrays.asList(new InputLayer(20, 20), new ConvolutionLayer("conv1", 3, 7, 2, 1, true),
                    new MaxPoolingLayer("pool", 3, 2), new MultiNormalizeLayer("norm1", 5, .0001f, true),
                    new FullyConnect("test", 3, 0, 1, new LogisticFunction(), true)));
    nn.init();
    nn.random.nextInt();
    StringWriter sw = new StringWriter();
    nn.writeAsJson(sw);
    System.out.println(sw);

    // ??????????????
    StringReader sr0 = new StringReader(sw.toString());
    NeuralNetwork nn0 = nn.readFromJson(sr0);
    nn0.init();
    ConvolutionLayer conv1o = (ConvolutionLayer) nn.findLayerByName("conv1").get();
    ConvolutionLayer conv1r = (ConvolutionLayer) nn0.findLayerByName("conv1").get();
    System.out.println("org:" + Arrays.toString(conv1o.getFilter()));
    System.out.println("red:" + Arrays.toString(conv1r.getFilter()));
    double loss = IntStream.range(0, conv1o.getFilter().length)
            .mapToDouble(i -> (conv1o.getFilter()[i] - conv1r.getFilter()[i])
                    * (conv1o.getFilter()[i] - conv1r.getFilter()[i]))
            .sum();
    System.out.println(Math.sqrt(loss));

    NeuralNetwork v = NeuralNetwork.readFromJson(new StringReader("{\n" + "  \"weightDecay\" : 5.0E-4,\n"
            + "  \"miniBatch\" : 128,\n" + "  \"random\" : \"c3EAfgAAAT/wWGBKFyCXAAATnQ6sF654\",\n"
            + "  \"imageRandom\" : \"c3EAfgAAAAAAAAAAAAAAAAAABd7s70R4\",\n" + "  \"momentam\" : 0.9,\n"
            + "  \"layers\" : [ {\n" + "    \"InputLayer\" : {\n" + "      \"width\" : 250,\n"
            + "      \"height\" : 220,\n" + "      \"name\" : \"input\"\n" + "    }\n" + "  }, {\n"
            + "    \"ConvolutionLayer\" : {\n" + "      \"name\" : \"conv1\",\n" + "      \"filter\" : null,\n"
            + "      \"bias\" : [ 1.0, 1.0, 1.0 ],\n" + "      \"filterDelta\" : null,\n"
            + "      \"biasDelta\" : [ 0.0, 0.0, 0.0 ],\n" + "      \"stride\" : 2,\n"
            + "      \"filterSize\" : 7,\n" + "      \"useGpu\" : true\n" + "    }\n" + "  }, {\n"
            + "    \"MaxPoolingLayer\" : {\n" + "      \"name\" : \"pool\",\n" + "      \"size\" : 3,\n"
            + "      \"stride\" : 2\n" + "    }\n" + "  }, {\n" + "    \"MultiNormalizeLayer\" : {\n"
            + "      \"name\" : \"norm1\",\n" + "      \"size\" : 5,\n" + "      \"threshold\" : 1.0E-4,\n"
            + "      \"useGpu\" : true\n" + "    }\n" + "  }, {\n" + "    \"FullyConnect\" : {\n"
            + "      \"name\" : \"test\",\n" + "      \"outputSize\" : 3,\n"
            + "      \"weight\" : [ 0.0014115907, 0.0043465886, 0.01138472, -0.0013297468, "
            + "-0.0060525155, -0.0109255025, -0.015493984, 0.011872963, -0.0015145391 ],\n"
            + "      \"initBias\" : 0.5, " + "      \"bias\" : [ 0.0, 0.2, 0.4 ],\n"
            + "      \"weightDelta\" : [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ],\n"
            + "      \"biasDelta\" : [ 0.0, 0.0, 0.0 ],\n" + "      \"dropoutRate\" : 1.0,\n"
            + "      \"activation\" : \"LogisticFunction\",\n" + "      \"useGpu\" : true\n" + "    }\n"
            + "  } ],\n" + "  \"learningRate\" : 0.01\n" + "}"));
    System.out.println(nn.random.nextInt());
    System.out.println(v.random.nextInt());
    v.findLayerByName("test").ifPresent(layer -> {
        FullyConnect f = (FullyConnect) layer;
        System.out.println(f.getActivation().getClass());
        System.out.println(Arrays.toString(f.getBias()));
    });
    v.init();
    v.findLayerByName("test").ifPresent(layer -> {
        FullyConnect f = (FullyConnect) layer;
        System.out.println(f.getActivation().getClass());
        System.out.println(Arrays.toString(f.getBias()));
    });
}

From source file:diffhunter.DiffHunter.java

/**
 * @param args the command line arguments
 * @throws org.apache.commons.cli.ParseException
 * @throws java.io.IOException/*from  w  ww  .j a  va 2 s. c  o  m*/
 */
public static void main(String[] args) throws ParseException, IOException {

    //String test_ = Paths.get("J:\\VishalData\\additional\\", "Sasan" + "_BDB").toAbsolutePath().toString();

    // TODO code application logic here
    /*args = new String[]
    {
    "-i", "-b", "J:\\VishalData\\additional\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted.bed", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-o", "J:\\VishalData"
    };*/

    /*args = new String[]
    {
    "-c", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-1", "J:\\VishalData\\Ptbp2_Adult_testis_CLIP_mm9_plus_strand_sorted_BDB", "-2", "J:\\VishalData\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted_BDB", "-w", "200", "-s", "50", "-o", "J:\\VishalData"
    };*/
    Options options = new Options();

    // add t option
    options.addOption("i", "index", false, "Indexing BED files.");
    options.addOption("b", "bed", true, "bed file to be indexed");
    options.addOption("o", "output", true, "Folder that the index/comparison file will be created.");
    options.addOption("r", "reference", true, "Reference annotation file to be used for indexing");
    options.addOption("c", "compare", false, "Finding differences between two conditions");
    options.addOption("1", "first", true, "First sample index location");
    options.addOption("2", "second", true, "Second sample index location");
    options.addOption("w", "window", true, "Length of window for identifying differences");
    options.addOption("s", "sliding", true, "Length of sliding");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);

    boolean indexing = false;
    boolean comparing = false;

    //Indexing!
    if (cmd.hasOption("i")) {
        //if(cmd.hasOption("1"))
        //System.err.println("sasan");

        //System.out.println("sasa");
        indexing = true;

    } else if (cmd.hasOption("c")) {
        //System.err.println("");
        comparing = true;

    } else {
        //System.err.println("Option is not deteced.");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("diffhunter", options);
        return;
    }

    //Indexing is selected
    //
    if (indexing == true) {
        //Since indexing is true.
        //User have to provide file for indexing.
        if (!(cmd.hasOption("o") || cmd.hasOption("r") || cmd.hasOption("b"))) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("diffhunter", options);
            return;
        }
        String bedfile_ = cmd.getOptionValue("b");
        String reference_file = cmd.getOptionValue("r");
        String folder_loc = cmd.getOptionValue("o");

        String sample_name = FilenameUtils.getBaseName(bedfile_);

        try (Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(
                Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString(), true, sample_name)) {
            Indexer indexing_ = new Indexer(reference_file);
            indexing_.Make_Index(B2, bedfile_,
                    Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString());
            B2.close();

        }
    } else if (comparing == true) {
        if (!(cmd.hasOption("o") || cmd.hasOption("w") || cmd.hasOption("s") || cmd.hasOption("1")
                || cmd.hasOption("2"))) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("diffhunter", options);
            return;
        }
        String folder_loc = cmd.getOptionValue("o");
        int window_ = Integer.parseInt(cmd.getOptionValue("w"));
        //int window_=600;

        int slide_ = Integer.parseInt(cmd.getOptionValue("s"));

        String first = cmd.getOptionValue("1").replace("_BDB", "");
        String second = cmd.getOptionValue("2").replace("_BDB", "");
        String reference_file = cmd.getOptionValue("r");
        //String folder_loc=cmd.getOptionValue("o");

        String sample_name_first = FilenameUtils.getBaseName(first);
        String sample_name_second = FilenameUtils.getBaseName(second);

        Database B1 = BerkeleyDB_Box.Get_BerkeleyDB(first + "_BDB", false, sample_name_first);
        Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(second + "_BDB", false, sample_name_second);

        List<String> first_condition_genes = Files
                .lines(Paths.get(first + "_BDB", sample_name_first + ".txt").toAbsolutePath())
                .collect(Collectors.toList());
        List<String> second_condition_genes = Files
                .lines(Paths.get(second + "_BDB", sample_name_second + ".txt").toAbsolutePath())
                .collect(Collectors.toList());
        System.out.println("First and second condition are loaded!!! ");
        List<String> intersection_ = new ArrayList<>(first_condition_genes);
        intersection_.retainAll(second_condition_genes);

        BufferedWriter output = new BufferedWriter(
                new FileWriter(Paths.get(folder_loc, "differences_" + window_ + "_s" + slide_ + "_c" + ".txt")
                        .toAbsolutePath().toString(), false));
        List<Result_Window> final_results = Collections.synchronizedList(new ArrayList<>());
        Worker_New worker_class = new Worker_New();
        worker_class.Read_Reference(reference_file);

        while (!intersection_.isEmpty()) {
            List<String> selected_genes = new ArrayList<>();
            //if (intersection_.size()<=10000){selected_genes.addAll(intersection_.subList(0, intersection_.size()));}
            //else selected_genes.addAll(intersection_.subList(0, 10000));
            if (intersection_.size() <= intersection_.size()) {
                selected_genes.addAll(intersection_.subList(0, intersection_.size()));
            } else {
                selected_genes.addAll(intersection_.subList(0, intersection_.size()));
            }
            intersection_.removeAll(selected_genes);
            //System.out.println("Intersection count is:"+intersection_.size());
            //final List<Result_Window> resultssss_=new ArrayList<>();
            IntStream.range(0, selected_genes.size()).parallel().forEach(i -> {
                System.out.println(selected_genes.get(i) + "\tprocessing......");
                String gene_of_interest = selected_genes.get(i);//"ENSG00000142657|PGD";//intersection_.get(6);////"ENSG00000163395|IGFN1";//"ENSG00000270066|SCARNA2";
                int start = worker_class.dic_genes.get(gene_of_interest).start_loc;
                int end = worker_class.dic_genes.get(gene_of_interest).end_loc;

                Map<Integer, Integer> first_ = Collections.EMPTY_MAP;
                try {
                    first_ = BerkeleyDB_Box.Get_Coord_Read(B1, gene_of_interest);
                } catch (IOException | ClassNotFoundException ex) {
                    Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex);
                }

                Map<Integer, Integer> second_ = Collections.EMPTY_MAP;
                try {
                    second_ = BerkeleyDB_Box.Get_Coord_Read(B2, gene_of_interest);
                } catch (IOException | ClassNotFoundException ex) {
                    Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex);
                }
                List<Window> top_windows_first = worker_class.Get_Top_Windows(window_, first_, slide_);
                List<Window> top_windows_second = worker_class.Get_Top_Windows(window_, second_, slide_);
                //System.out.println("passed for window peak call for gene \t"+selected_genes.get(i));
                // System.out.println("top_window_first_Count\t"+top_windows_first.size());
                // System.out.println("top_window_second_Count\t"+top_windows_second.size());
                if (top_windows_first.isEmpty() && top_windows_second.isEmpty()) {
                    return;
                }

                List<Result_Window> res_temp = new Worker_New().Get_Significant_Windows(gene_of_interest, start,
                        end, top_windows_first, top_windows_second, second_, first_, sample_name_first,
                        sample_name_second, 0.01);
                if (!res_temp.isEmpty()) {
                    final_results.addAll(res_temp);//final_results.addAll(worker_class.Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, first_condition, second_condition, 0.01));

                } //System.out.println(selected_genes.get(i)+"\tprocessed.");

            });

            /*selected_genes.parallelStream().forEach(i ->
             {
                    
                    
             });*/
            List<Double> pvals = new ArrayList<>();

            for (int i = 0; i < final_results.size(); i++) {
                pvals.add(final_results.get(i).p_value);
            }
            List<Double> qvals = MultipleTestCorrection.benjaminiHochberg(pvals);

            System.out.println("Writing to file...");
            output.append("Gene_Symbol\tContributing_Sample\tStart\tEnd\tOddsRatio\tp_Value\tFDR");
            output.newLine();

            for (int i = 0; i < final_results.size(); i++) {
                Result_Window item = final_results.get(i);
                output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t"
                        + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value
                        + "\t" + qvals.get(i)); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non);
                output.newLine();
            }

            /* for (Result_Window item : final_results)
             {
            output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non);
            output.newLine();
             }
               */
            final_results.clear();

        }
        output.close();

    }
    System.out.println("Done.");

}

From source file:com.example.geomesa.kafka08.KafkaLoadTester.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);
    String visibility = getVisibility(cmd);

    if (visibility == null) {
        System.out.println("visibility: null");
    } else {//from   w  ww. ja  v a2  s.  c om
        System.out.println("visibility: '" + visibility + "'");
    }

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    System.out.println("KDS config: " + dsConf);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaStressTest";
    final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
    System.in.read();

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");

    SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft);

    Integer numFeats = getLoad(cmd);

    System.out.println("Building a list of " + numFeats + " SimpleFeatures.");
    List<SimpleFeature> features = IntStream.range(1, numFeats)
            .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList());

    // set variables to estimate feature production rate
    Long startTime = null;
    Long featuresSinceStartTime = 0L;
    int cycle = 0;
    int cyclesToSkip = 50000 / numFeats; // collect enough features
                                         // to get an accurate rate estimate

    while (true) {
        // write features
        features.forEach(feat -> {
            try {
                DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
                featureCollection.add(feat);
                producerFS.addFeatures(featureCollection);
            } catch (Exception e) {
                System.out.println("Caught an exception while writing features.");
                e.printStackTrace();
            }
            updateFeature(feat);
        });

        // count features written
        Integer consumerSize = consumerFS.getFeatures().size();
        cycle++;
        featuresSinceStartTime += consumerSize;
        System.out.println("At " + new Date() + " wrote " + consumerSize + " features");

        // if we've collected enough features, calculate the rate
        if (cycle >= cyclesToSkip || startTime == null) {
            Long endTime = System.currentTimeMillis();
            if (startTime != null) {
                Long diffTime = endTime - startTime;
                Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue();
                System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime);
            }
            cycle = 0;
            startTime = endTime;
            featuresSinceStartTime = 0L;
        }
    }
}

From source file:com.example.geomesa.kafka.KafkaLoadTester.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);
    String visibility = getVisibility(cmd);
    Integer delay = getDelay(cmd);

    if (visibility == null) {
        System.out.println("visibility: null");
    } else {//from  w ww.  ja v a 2 s .c  om
        System.out.println("visibility: '" + visibility + "'");
    }

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    System.out.println("KDS config: " + dsConf);
    dsConf.put("kafka.consumer.count", "0");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("kafka.consumer.count", "1");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    try {
        // create the schema which creates a topic in Kafka
        // (only needs to be done once)
        final String sftName = "KafkaStressTest";
        final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326";
        SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
        producerDS.createSchema(sft);

        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();

        // the live consumer must be created before the producer writes features
        // in order to read streaming data.
        // i.e. the live consumer will only read data written after its instantiation
        SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);
        SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);

        // creates and adds SimpleFeatures to the producer every 1/5th of a second
        System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");

        SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft);

        Integer numFeats = getLoad(cmd);

        System.out.println("Building a list of " + numFeats + " SimpleFeatures.");
        List<SimpleFeature> features = IntStream.range(1, numFeats)
                .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList());

        // set variables to estimate feature production rate
        Long startTime = null;
        Long featuresSinceStartTime = 0L;
        int cycle = 0;
        int cyclesToSkip = 50000 / numFeats; // collect enough features
                                             // to get an accurate rate estimate

        while (true) {
            // write features
            features.forEach(feat -> {
                try {
                    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
                    featureCollection.add(feat);
                    producerFS.addFeatures(featureCollection);
                } catch (Exception e) {
                    System.out.println("Caught an exception while writing features.");
                    e.printStackTrace();
                }
                updateFeature(feat);
            });

            // count features written
            Integer consumerSize = consumerFS.getFeatures().size();
            cycle++;
            featuresSinceStartTime += consumerSize;
            System.out.println("At " + new Date() + " wrote " + consumerSize + " features");

            // if we've collected enough features, calculate the rate
            if (cycle >= cyclesToSkip || startTime == null) {
                Long endTime = System.currentTimeMillis();
                if (startTime != null) {
                    Long diffTime = endTime - startTime;
                    Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue();
                    System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime);
                }
                cycle = 0;
                startTime = endTime;
                featuresSinceStartTime = 0L;
            }

            // sleep before next write
            if (delay != null) {
                System.out.printf("Sleeping for %d ms\n", delay);
                Thread.sleep(delay);
            }
        }

    } finally {
        producerDS.dispose();
        consumerDS.dispose();
    }
}

From source file:Main.java

public static <A, B> List<Pair<A, B>> zip(final List<A> as, final List<B> bs) {
    return IntStream.range(0, Math.min(as.size(), bs.size())).mapToObj(i -> new Pair<>(as.get(i), bs.get(i)))
            .collect(Collectors.toList());
}