Example usage for java.util Map put

List of usage examples for java.util Map put

Introduction

In this page you can find the example usage for java.util Map put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:com.example.geomesa.kafka10.KafkaQuickStart.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from   w w  w  .j  ava  2 s .co m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart10";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.alibaba.jstorm.utils.FileAttribute.java

public static void main(String[] args) {
    Map<String, FileAttribute> map = new HashMap<String, FileAttribute>();

    FileAttribute attribute = new FileAttribute();
    attribute.setFileName("test");
    attribute.setIsDir("true");
    attribute.setModifyTime(new Date().toString());
    attribute.setSize("4096");

    map.put("test", attribute);

    System.out.println("Before:" + map);

    String jsonString = JStormUtils.to_json(map);

    Map<String, Map> map2 = (Map<String, Map>) JStormUtils.from_json(jsonString);

    Map jObject = map2.get("test");

    FileAttribute attribute2 = FileAttribute.fromJSONObject(jObject);

    System.out.println("attribute2:" + attribute2);
}

From source file:apps.Source2XML.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption("i", null, true, "input file");
    options.addOption("o", null, true, "output file");
    options.addOption("reparse_xml", null, false, "reparse each XML entry to ensure the parser doesn't fail");

    Joiner commaJoin = Joiner.on(',');

    options.addOption("source_type", null, true,
            "document source type: " + commaJoin.join(SourceFactory.getDocSourceList()));

    Joiner spaceJoin = Joiner.on(' ');

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    BufferedWriter outputFile = null;

    int docNum = 0;

    if (USE_LEMMATIZER && USE_STEMMER) {
        System.err.println("Bug/inconsistent code: cann't use the stemmer and lemmatizer at the same time!");
        System.exit(1);// w  ww . ja  va2 s.c o m
    }

    //Stemmer stemmer = new Stemmer();
    KrovetzStemmer stemmer = new KrovetzStemmer();

    System.out.println("Using Stanford NLP?        " + USE_STANFORD);
    System.out.println("Using Stanford lemmatizer? " + USE_LEMMATIZER);
    System.out.println("Using stemmer?             " + USE_STEMMER
            + (USE_STEMMER ? " (class: " + stemmer.getClass().getCanonicalName() + ")" : ""));

    try {
        CommandLine cmd = parser.parse(options, args);

        String inputFileName = null, outputFileName = null;

        if (cmd.hasOption("i")) {
            inputFileName = cmd.getOptionValue("i");
        } else {
            Usage("Specify 'input file'", options);
        }

        if (cmd.hasOption("o")) {
            outputFileName = cmd.getOptionValue("o");
        } else {
            Usage("Specify 'output file'", options);
        }

        outputFile = new BufferedWriter(
                new OutputStreamWriter(CompressUtils.createOutputStream(outputFileName)));

        String sourceName = cmd.getOptionValue("source_type");

        if (sourceName == null)
            Usage("Specify document source type", options);

        boolean reparseXML = options.hasOption("reparse_xml");

        DocumentSource inpDocSource = SourceFactory.createDocumentSource(sourceName, inputFileName);
        DocumentEntry inpDoc = null;
        TextCleaner textCleaner = new TextCleaner(
                new DictNoComments(new File("data/stopwords.txt"), true /* lower case */), USE_STANFORD,
                USE_LEMMATIZER);

        Map<String, String> outputMap = new HashMap<String, String>();

        outputMap.put(UtilConst.XML_FIELD_DOCNO, null);
        outputMap.put(UtilConst.XML_FIELD_TEXT, null);

        XmlHelper xmlHlp = new XmlHelper();

        if (reparseXML)
            System.out.println("Will reparse every XML entry to verify correctness!");

        while ((inpDoc = inpDocSource.next()) != null) {
            ++docNum;

            ArrayList<String> toks = textCleaner.cleanUp(inpDoc.mDocText);
            ArrayList<String> goodToks = new ArrayList<String>();
            for (String s : toks)
                if (s.length() <= MAX_WORD_LEN && // Exclude long and short words
                        s.length() >= MIN_WORD_LEN && isGoodWord(s))
                    goodToks.add(USE_STEMMER ? stemmer.stem(s) : s);

            String partlyCleanedText = spaceJoin.join(goodToks);
            String cleanText = XmlHelper.removeInvaildXMLChars(partlyCleanedText);
            // isGoodWord combiend with Stanford tokenizer should be quite restrictive already
            //cleanText = replaceSomePunct(cleanText);

            outputMap.replace(UtilConst.XML_FIELD_DOCNO, inpDoc.mDocId);
            outputMap.replace(UtilConst.XML_FIELD_TEXT, cleanText);

            String xml = xmlHlp.genXMLIndexEntry(outputMap);

            if (reparseXML) {
                try {
                    XmlHelper.parseDocWithoutXMLDecl(xml);
                } catch (Exception e) {
                    System.err.println("Error re-parsing xml for document ID: " + inpDoc.mDocId);
                    System.exit(1);
                }
            }

            /*
            {
              System.out.println(inpDoc.mDocId);
              System.out.println("=====================");
              System.out.println(partlyCleanedText);
              System.out.println("=====================");
              System.out.println(cleanText);
            } 
            */

            try {
                outputFile.write(xml);
                outputFile.write(NL);
            } catch (Exception e) {
                e.printStackTrace();
                System.err.println("Error processing/saving a document!");
            }

            if (docNum % 1000 == 0)
                System.out.println(String.format("Processed %d documents", docNum));
        }

    } catch (ParseException e) {
        e.printStackTrace();
        Usage("Cannot parse arguments" + e, options);
    } catch (Exception e) {
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    } finally {
        System.out.println(String.format("Processed %d documents", docNum));

        try {
            if (null != outputFile) {
                outputFile.close();
                System.out.println("Output file is closed! all seems to be fine...");
            }
        } catch (IOException e) {
            System.err.println("IO exception: " + e);
            e.printStackTrace();
        }
    }
}

From source file:org.ala.harvester.WaissHarvester.java

/**
 * Main method for testing this particular Harvester
 *
 * @param args// w ww .j a  v a 2  s.  c o  m
 */
public static void main(String[] args) throws Exception {
    String[] locations = { "classpath*:spring.xml" };
    ApplicationContext context = new ClassPathXmlApplicationContext(locations);
    WaissHarvester h = new WaissHarvester();
    Repository r = (Repository) context.getBean("repository");
    h.setRepository(r);

    //set the connection params   
    Map<String, String> connectParams = new HashMap<String, String>();
    connectParams.put("endpoint", "http://www.museum.wa.gov.au/waiss/pages/image.htm");

    h.setConnectionParams(connectParams);
    h.start(WAISS_INFOSOURCE_ID);
}

From source file:Main.java

public static void main(String[] args) {
    Map<String, String> map = new HashMap<String, String>();
    ObservableMap<String, String> observableMap = FXCollections.observableMap(map);
    observableMap.addListener(new MapChangeListener() {
        @Override/* w  w  w .j a v  a  2  s .  co  m*/
        public void onChanged(MapChangeListener.Change change) {
            System.out.println("change! ");
        }
    });
    observableMap.put("key 1", "value 1");
    map.put("key 2", "value 2");

}

From source file:mensajes.SmackCcsClient.java

public static void main(String[] args) throws Exception {
    // final long senderId = 1234567890L; // your GCM sender id
    final long senderId = 964281068865L; // your GCM sender id
    // final String password = "Your API key";
    final String password = "AIzaSyDeDwyXm8mG_EwBtqsaS98z1FGqoqc2BRM";

    SmackCcsClient ccsClient = new SmackCcsClient();

    ccsClient.connect(senderId, password);

    // Send a sample hello downstream message to a device.
    // String toRegId = "RegistrationIdOfTheTargetDevice";
    String toRegId = "APA91bFb6j8_M8HbyD2hFiSUAFIdNHWlxPSem8y-6f3CA1U_0h5W81rMzfMpEtPMLA6x8CmuUGWhI8iFZS_O8PXApbucc-RUwgEVZSOBMzpIPaUu4J70R9N9TvYzzcI7tf3_FCbWcJK8";
    String messageId = ccsClient.nextMessageId();
    Map<String, String> payload = new HashMap<String, String>();
    payload.put("title", "Campitoos");
    payload.put("message", "Pinche retrasadito mental");
    payload.put("EmbeddedMessageId", messageId);
    String collapseKey = "sample";
    Long timeToLive = 10000L;/*w  w w.  j a  v  a2  s .  c  o m*/
    String message = createJsonMessage(toRegId, messageId, payload, collapseKey, timeToLive, true);

    ccsClient.sendDownstreamMessage(message);
}

From source file:net.itransformers.postDiscoverer.core.ReportManager.java

public static void main(String[] args) throws IOException {

    File projectDir = new File(".");
    File scriptPath = new File("postDiscoverer/src/main/resources/postDiscoverer/conf/groovy/");

    ResourceManagerFactory resourceManagerFactory = new XmlResourceManagerFactory(
            "iDiscover/resourceManager/xmlResourceManager/src/main/resources/xmlResourceManager/conf/xml/resource.xml");
    Map<String, String> resourceManagerParams = new HashMap<>();
    resourceManagerParams.put("projectPath", projectDir.getAbsolutePath());
    ResourceManager resourceManager = resourceManagerFactory.createResourceManager("xml",
            resourceManagerParams);/*from   w ww  .j a v a2  s.  co m*/

    Map<String, String> params = new HashMap<String, String>();
    params.put("protocol", "telnet");
    params.put("deviceName", "R1");
    params.put("deviceType", "CISCO");
    params.put("address", "10.17.1.5");
    params.put("port", "23");

    ResourceType resource = resourceManager.findFirstResourceBy(params);
    List connectParameters = resource.getConnectionParams();

    for (int i = 0; i < connectParameters.size(); i++) {
        ConnectionParamsType connParamsType = (ConnectionParamsType) connectParameters.get(i);

        String connectionType = connParamsType.getConnectionType();
        if (connectionType.equalsIgnoreCase(params.get("protocol"))) {

            for (ParamType param : connParamsType.getParam()) {
                params.put(param.getName(), param.getValue());
            }

        }
    }

    File postDiscoveryConfing = new File(
            projectDir + "/postDiscoverer/src/main/resources/postDiscoverer/conf/xml/reportGenerator.xml");
    if (!postDiscoveryConfing.exists()) {
        System.out.println("File missing: " + postDiscoveryConfing.getAbsolutePath());
        return;
    }

    ReportGeneratorType reportGenerator = null;

    FileInputStream is = new FileInputStream(postDiscoveryConfing);
    try {
        reportGenerator = JaxbMarshalar.unmarshal(ReportGeneratorType.class, is);
    } catch (JAXBException e) {
        logger.info(e); //To change body of catch statement use File | Settings | File Templates.
    } finally {
        is.close();
    }

    ReportManager reportManager = new ReportManager(reportGenerator, scriptPath.getPath(), projectDir,
            "postDiscoverer/conf/xslt/table_creator.xslt");
    StringBuffer report = null;

    HashMap<String, Object> groovyExecutorParams = new HashMap<String, Object>();

    for (String s : params.keySet()) {
        groovyExecutorParams.put(s, params.get(s));
    }

    try {
        report = reportManager.reportExecutor(
                new File("/Users/niau/Projects/Projects/netTransformer10/version1/post-discovery"),
                groovyExecutorParams);
    } catch (ParserConfigurationException e) {
        e.printStackTrace();
    } catch (SAXException e) {
        e.printStackTrace();
    }
    if (report != null) {
        System.out.println(report.toString());

    } else {
        System.out.println("Report generation failed!");

    }
}

From source file:com.example.geomesa.kafka08.KafkaLoadTester.java

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);
    String visibility = getVisibility(cmd);

    if (visibility == null) {
        System.out.println("visibility: null");
    } else {/* w  ww. j a  v a  2  s.  com*/
        System.out.println("visibility: '" + visibility + "'");
    }

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    System.out.println("KDS config: " + dsConf);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaStressTest";
    final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
    System.in.read();

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");

    SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft);

    Integer numFeats = getLoad(cmd);

    System.out.println("Building a list of " + numFeats + " SimpleFeatures.");
    List<SimpleFeature> features = IntStream.range(1, numFeats)
            .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList());

    // set variables to estimate feature production rate
    Long startTime = null;
    Long featuresSinceStartTime = 0L;
    int cycle = 0;
    int cyclesToSkip = 50000 / numFeats; // collect enough features
                                         // to get an accurate rate estimate

    while (true) {
        // write features
        features.forEach(feat -> {
            try {
                DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
                featureCollection.add(feat);
                producerFS.addFeatures(featureCollection);
            } catch (Exception e) {
                System.out.println("Caught an exception while writing features.");
                e.printStackTrace();
            }
            updateFeature(feat);
        });

        // count features written
        Integer consumerSize = consumerFS.getFeatures().size();
        cycle++;
        featuresSinceStartTime += consumerSize;
        System.out.println("At " + new Date() + " wrote " + consumerSize + " features");

        // if we've collected enough features, calculate the rate
        if (cycle >= cyclesToSkip || startTime == null) {
            Long endTime = System.currentTimeMillis();
            if (startTime != null) {
                Long diffTime = endTime - startTime;
                Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue();
                System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime);
            }
            cycle = 0;
            startTime = endTime;
            featuresSinceStartTime = 0L;
        }
    }
}

From source file:org.jetbrains.webdemo.executors.JunitExecutor.java

public static void main(String[] args) {
    try {/*from www.  j a va2s .co  m*/
        JUnitCore jUnitCore = new JUnitCore();
        jUnitCore.addListener(new MyRunListener());
        List<Class> classes = getAllClassesFromTheDir(new File(args[0]));
        for (Class cl : classes) {
            boolean hasTestMethods = false;
            for (Method method : cl.getMethods()) {
                if (method.isAnnotationPresent(Test.class)) {
                    hasTestMethods = true;
                    break;
                }
            }
            if (!hasTestMethods)
                continue;

            Request request = Request.aClass(cl);
            jUnitCore.run(request);
        }
        try {
            ObjectMapper objectMapper = new ObjectMapper();
            SimpleModule module = new SimpleModule();
            module.addSerializer(Throwable.class, new ThrowableSerializer());
            module.addSerializer(junit.framework.ComparisonFailure.class,
                    new JunitFrameworkComparisonFailureSerializer());
            module.addSerializer(org.junit.ComparisonFailure.class, new OrgJunitComparisonFailureSerializer());
            objectMapper.registerModule(module);
            System.setOut(standardOutput);

            Map<String, List<TestRunInfo>> groupedTestResults = new HashMap<>();
            for (TestRunInfo testRunInfo : output) {
                if (!groupedTestResults.containsKey(testRunInfo.className)) {
                    groupedTestResults.put(testRunInfo.className, new ArrayList<TestRunInfo>());
                }
                groupedTestResults.get(testRunInfo.className).add(testRunInfo);
            }

            System.out.print(objectMapper.writeValueAsString(groupedTestResults));
        } catch (IOException e) {
            e.printStackTrace();
        }
    } catch (Throwable e) {
        System.setOut(standardOutput);
        System.out.print("[\"");
        e.printStackTrace();
        System.out.print("\"]");
    }
}

From source file:net.roboconf.iaas.openstack.IaasOpenstack.java

public static void main(String args[]) throws Exception {

    Map<String, String> conf = new HashMap<String, String>();

    java.util.Properties p = new java.util.Properties();
    p.load(new java.io.FileReader(args[0]));

    for (Object name : p.keySet()) {
        conf.put(name.toString(), p.get(name).toString());
    }/*ww  w.java  2s  .c  o m*/
    // conf.put("openstack.computeUrl", "http://localhost:8888/v2");

    IaasOpenstack iaas = new IaasOpenstack();
    iaas.setIaasProperties(conf);

    String machineImageId = conf.get("openstack.image");
    String channelName = "test";
    String applicationName = "roboconf";
    String ipMessagingServer = "localhost";
    String serverId = iaas.createVM(machineImageId, ipMessagingServer, channelName, applicationName);
    /*Thread.sleep(25000);
    iaas.terminateVM(serverId);*/
}