Example usage for java.util HashMap HashMap

List of usage examples for java.util HashMap HashMap

Introduction

In this page you can find the example usage for java.util HashMap HashMap.

Prototype

public HashMap() 

Source Link

Document

Constructs an empty HashMap with the default initial capacity (16) and the default load factor (0.75).

Usage

From source file:com.github.fritaly.graphml4j.samples.GradleDependenciesWithGroups.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.out//w w w. j  a va  2  s. c  o m
                .println(String.format("%s <output-file>", GradleDependenciesWithGroups.class.getSimpleName()));
        System.exit(1);
    }

    final File file = new File(args[0]);

    System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ...");

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;
    Reader reader = null;
    LineNumberReader lineReader = null;

    try {
        fileWriter = new FileWriter(file);
        graphWriter = new GraphMLWriter(fileWriter);

        // Customize the rendering of nodes
        final NodeStyle nodeStyle = graphWriter.getNodeStyle();
        nodeStyle.setWidth(250.0f);
        nodeStyle.setHeight(50.0f);

        graphWriter.setNodeStyle(nodeStyle);

        // The dependency graph has been generated by Gradle with the
        // command "gradle dependencies". The output of this command has
        // been saved to a text file which will be parsed to rebuild the
        // dependency graph
        reader = new InputStreamReader(
                GradleDependenciesWithGroups.class.getResourceAsStream("gradle-dependencies.txt"));
        lineReader = new LineNumberReader(reader);

        String line = null;

        // Stack containing the artifacts per depth inside the dependency
        // graph (the topmost dependency is the first one in the stack)
        final Stack<Artifact> stack = new Stack<Artifact>();

        final Map<String, Set<Artifact>> artifactsByGroup = new HashMap<String, Set<Artifact>>();

        // List of parent/child relationships between artifacts
        final List<Relationship> relationships = new ArrayList<Relationship>();

        while ((line = lineReader.readLine()) != null) {
            // Determine the depth of the current dependency inside the
            // graph. The depth can be inferred from the indentation used by
            // Gradle. Each level of depth adds 5 more characters of
            // indentation
            final int initialLength = line.length();

            // Remove the strings used by Gradle to indent dependencies
            line = StringUtils.replace(line, "+--- ", "");
            line = StringUtils.replace(line, "|    ", "");
            line = StringUtils.replace(line, "\\--- ", "");
            line = StringUtils.replace(line, "     ", "");

            // The depth can easily be inferred now
            final int depth = (initialLength - line.length()) / 5;

            // Remove unnecessary artifacts
            while (depth <= stack.size()) {
                stack.pop();
            }

            // Create an artifact from the dependency (group, artifact,
            // version) tuple
            final Artifact artifact = createArtifact(line);

            stack.push(artifact);

            if (stack.size() > 1) {
                // Store the artifact and its parent
                relationships.add(new Relationship(stack.get(stack.size() - 2), artifact));
            }

            if (!artifactsByGroup.containsKey(artifact.group)) {
                artifactsByGroup.put(artifact.group, new HashSet<Artifact>());
            }

            artifactsByGroup.get(artifact.group).add(artifact);
        }

        // Open the graph
        graphWriter.graph();

        final Map<Artifact, String> nodeIdsByArtifact = new HashMap<Artifact, String>();

        // Loop over the groups and generate the associated nodes
        for (String group : artifactsByGroup.keySet()) {
            graphWriter.group(group, true);

            for (Artifact artifact : artifactsByGroup.get(group)) {
                final String nodeId = graphWriter.node(artifact.getLabel());

                nodeIdsByArtifact.put(artifact, nodeId);
            }

            graphWriter.closeGroup();
        }

        // Generate the edges
        for (Relationship relationship : relationships) {
            final String parentId = nodeIdsByArtifact.get(relationship.parent);
            final String childId = nodeIdsByArtifact.get(relationship.child);

            graphWriter.edge(parentId, childId);
        }

        // Close the graph
        graphWriter.closeGraph();

        System.out.println("Done");
    } finally {
        // Calling GraphMLWriter.close() is necessary to dispose the underlying resources
        graphWriter.close();
        fileWriter.close();
        lineReader.close();
        reader.close();
    }
}

From source file:com.linecorp.platform.channel.sample.Main.java

public static void main(String[] args) {

    BusinessConnect bc = new BusinessConnect();

    /**// www.  j  a  v a2 s .c om
     * Prepare the required channel secret and access token
     */
    String channelSecret = System.getenv("CHANNEL_SECRET");
    String channelAccessToken = System.getenv("CHANNEL_ACCESS_TOKEN");
    if (channelSecret == null || channelSecret.isEmpty() || channelAccessToken == null
            || channelAccessToken.isEmpty()) {
        System.err.println("Error! Environment variable CHANNEL_SECRET and CHANNEL_ACCESS_TOKEN not defined.");
        return;
    }

    port(Integer.valueOf(System.getenv("PORT")));
    staticFileLocation("/public");

    /**
     * Define the callback url path
     */
    post("/events", (request, response) -> {
        String requestBody = request.body();

        /**
         * Verify whether the channel signature is valid or not
         */
        String channelSignature = request.headers("X-LINE-CHANNELSIGNATURE");
        if (channelSignature == null || channelSignature.isEmpty()) {
            response.status(400);
            return "Please provide valid channel signature and try again.";
        }
        if (!bc.validateBCRequest(requestBody, channelSecret, channelSignature)) {
            response.status(401);
            return "Invalid channel signature.";
        }

        /**
         * Parse the http request body
         */
        ObjectMapper objectMapper = new ObjectMapper();
        objectMapper.configure(MapperFeature.USE_WRAPPER_NAME_AS_PROPERTY_NAME, true);
        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
        objectMapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false);
        objectMapper.setAnnotationIntrospector(new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
        objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);

        EventList events;
        try {
            events = objectMapper.readValue(requestBody, EventList.class);
        } catch (IOException e) {
            response.status(400);
            return "Invalid request body.";
        }

        ApiHttpClient apiHttpClient = new ApiHttpClient(channelAccessToken);

        /**
         * Process the incoming messages/operations one by one
         */
        List<String> toUsers;
        for (Event event : events.getResult()) {
            switch (event.getEventType()) {
            case Constants.EventType.MESSAGE:
                toUsers = new ArrayList<>();
                toUsers.add(event.getContent().getFrom());

                // @TODO: We strongly suggest you should modify this to process the incoming message/operation async
                bc.sendTextMessage(toUsers, "You said: " + event.getContent().getText(), apiHttpClient);
                break;
            case Constants.EventType.OPERATION:
                if (event.getContent().getOpType() == Constants.OperationType.ADDED_AS_FRIEND) {
                    String newFriend = event.getContent().getParams().get(0);
                    Profile profile = bc.getProfile(newFriend, apiHttpClient);
                    String displayName = profile == null ? "Unknown" : profile.getDisplayName();
                    toUsers = new ArrayList<>();
                    toUsers.add(newFriend);
                    bc.sendTextMessage(toUsers, displayName + ", welcome to be my friend!", apiHttpClient);
                    Connection connection = null;
                    connection = DatabaseUrl.extract().getConnection();
                    toUsers = bc.getFriends(newFriend, connection);
                    if (toUsers.size() > 0) {
                        bc.sendTextMessage(toUsers, displayName + " just join us, let's welcome him/her!",
                                apiHttpClient);
                    }
                    bc.addFriend(newFriend, displayName, connection);
                    if (connection != null) {
                        connection.close();
                    }
                }
                break;
            default:
                // Unknown type?
            }
        }
        return "Events received successfully.";
    });

    get("/", (request, response) -> {
        Map<String, Object> attributes = new HashMap<>();
        attributes.put("message", "Hello World!");
        return new ModelAndView(attributes, "index.ftl");
    }, new FreeMarkerEngine());
}

From source file:com.alibaba.jstorm.daemon.supervisor.SandBoxMaker.java

public static void main(String[] args) {
    Map<Object, Object> conf = Utils.readStormConfig();

    conf.put("java.sandbox.enable", Boolean.valueOf(true));

    SandBoxMaker maker = new SandBoxMaker(conf);

    try {//from  w w w  .j a  va2 s.co  m
        System.out.println("sandboxPolicy:" + maker.sandboxPolicy("simple", new HashMap<String, String>()));
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.pinterest.rocksplicator.Participant.java

/**
 * Start a Helix participant./*  ww w.  ja  v  a  2  s  .  c o m*/
 * @param args command line parameters
 */
public static void main(String[] args) throws Exception {
    org.apache.log4j.Logger.getRootLogger().setLevel(Level.WARN);
    BasicConfigurator
            .configure(new ConsoleAppender(new PatternLayout("%d{HH:mm:ss.SSS} [%t] %-5p %30.30c - %m%n")));
    CommandLine cmd = processCommandLineArgs(args);
    final String zkConnectString = cmd.getOptionValue(zkServer);
    final String clusterName = cmd.getOptionValue(cluster);
    final String domainName = cmd.getOptionValue(domain);
    final String host = cmd.getOptionValue(hostAddress);
    final String port = cmd.getOptionValue(hostPort);
    final String stateModelType = cmd.getOptionValue(stateModel);
    final String postUrl = cmd.getOptionValue(configPostUrl);
    final String instanceName = host + "_" + port;

    LOG.error("Starting participant with ZK:" + zkConnectString);
    Participant participant = new Participant(zkConnectString, clusterName, instanceName, stateModelType,
            Integer.parseInt(port), postUrl);

    HelixAdmin helixAdmin = new ZKHelixAdmin(zkConnectString);
    HelixConfigScope scope = new HelixConfigScopeBuilder(HelixConfigScope.ConfigScopeProperty.PARTICIPANT)
            .forCluster(clusterName).forParticipant(instanceName).build();
    Map<String, String> properties = new HashMap<String, String>();
    properties.put("DOMAIN", domainName + ",instance=" + instanceName);
    helixAdmin.setConfig(scope, properties);

    Thread.currentThread().join();
}

From source file:jena.RuleMap.java

/**
 * General command line utility to process one RDF file into another
 * by application of a set of forward chaining rules. 
 * <pre>//from w w w.  j av  a  2s. com
 * Usage:  RuleMap [-il inlang] [-ol outlang] -d infile rulefile
 * </pre>
 */
public static void main(String[] args) {
    try {

        // Parse the command line
        String usage = "Usage:  RuleMap [-il inlang] [-ol outlang] [-d] rulefile infile (- for stdin)";
        final CommandLineParser parser = new DefaultParser();
        Options options = new Options().addOption("il", "inputLang", true, "input language")
                .addOption("ol", "outputLang", true, "output language").addOption("d", "Deductions only?");
        CommandLine cl = parser.parse(options, args);
        final List<String> filenameArgs = cl.getArgList();
        if (filenameArgs.size() != 2) {
            System.err.println(usage);
            System.exit(1);
        }

        String inLang = cl.getOptionValue("inputLang");
        String fname = filenameArgs.get(1);
        Model inModel = null;
        if (fname.equals("-")) {
            inModel = ModelFactory.createDefaultModel();
            inModel.read(System.in, null, inLang);
        } else {
            inModel = FileManager.get().loadModel(fname, inLang);
        }

        String outLang = cl.hasOption("outputLang") ? cl.getOptionValue("outputLang") : "N3";

        boolean deductionsOnly = cl.hasOption('d');

        // Fetch the rule set and create the reasoner
        BuiltinRegistry.theRegistry.register(new Deduce());
        Map<String, String> prefixes = new HashMap<>();
        List<Rule> rules = loadRules(filenameArgs.get(0), prefixes);
        Reasoner reasoner = new GenericRuleReasoner(rules);

        // Process
        InfModel infModel = ModelFactory.createInfModel(reasoner, inModel);
        infModel.prepare();
        infModel.setNsPrefixes(prefixes);

        // Output
        try (PrintWriter writer = new PrintWriter(System.out)) {
            if (deductionsOnly) {
                Model deductions = infModel.getDeductionsModel();
                deductions.setNsPrefixes(prefixes);
                deductions.setNsPrefixes(inModel);
                deductions.write(writer, outLang);
            } else {
                infModel.write(writer, outLang);
            }
        }
    } catch (Throwable t) {
        System.err.println("An error occured: \n" + t);
        t.printStackTrace();
    }
}

From source file:com.curecomp.primefaces.migrator.PrimefacesMigration.java

public static void main(String[] args) throws Exception {
    // Let's use some colors :)
    //        AnsiConsole.systemInstall();
    CommandLineParser cliParser = new BasicParser();
    CommandLine cli = null;//  ww w.j  ava  2s . co  m
    try {
        cli = cliParser.parse(OPTIONS, args);
    } catch (ParseException e) {
        printHelp();
    }
    if (!cli.hasOption("s")) {
        printHelp();
    }

    String sourcePattern;
    if (cli.hasOption("p")) {
        sourcePattern = cli.getOptionValue("p");
    } else {
        sourcePattern = DEFAULT_SOURCE_PATTERN;
    }

    String defaultAnswer;
    if (cli.hasOption("default-answer")) {
        defaultAnswer = cli.getOptionValue("default-answer");
    } else {
        defaultAnswer = DEFAULT_DEFAULT_PROMPT_ANSWER;
    }

    boolean defaultAnswerYes = defaultAnswer.equalsIgnoreCase("y");
    boolean quiet = cli.hasOption("q");
    boolean testWrite = cli.hasOption("t");
    Path sourceDirectory = Paths.get(cli.getOptionValue("s")).toAbsolutePath();
    // Since we use IO we will have some blocking threads hanging around
    int threadCount = Runtime.getRuntime().availableProcessors() * 2;
    ThreadPoolExecutor threadPool = new ThreadPoolExecutor(threadCount, threadCount, 0L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<>());
    BlockingQueue<WidgetVarLocation> foundUsages = new LinkedBlockingQueue<>();
    BlockingQueue<WidgetVarLocation> unusedOrAmbiguous = new LinkedBlockingQueue<>();
    BlockingQueue<WidgetVarLocation> skippedUsages = new LinkedBlockingQueue<>();
    List<Future<?>> futures = new ArrayList<>();

    findWidgetVars(sourceDirectory, sourcePattern, threadPool).forEach(widgetVarLocation -> {
        // We can't really find usages of widget vars that use EL expressions :(
        if (widgetVarLocation.widgetVar.contains("#")) {
            unusedOrAmbiguous.add(widgetVarLocation);
            return;
        }

        try {
            FileActionVisitor visitor = new FileActionVisitor(sourceDirectory, sourcePattern,
                    sourceFile -> futures.add(threadPool.submit((Callable<?>) () -> {
                        findWidgetVarUsages(sourceFile, widgetVarLocation, foundUsages, skippedUsages,
                                unusedOrAmbiguous);
                        return null;
                    })));

            Files.walkFileTree(sourceDirectory, visitor);
        } catch (IOException ex) {
            throw new RuntimeException(ex);
        }
    });

    awaitAll(futures);

    new TreeSet<>(skippedUsages).forEach(widgetUsage -> {
        int startIndex = widgetUsage.columnNr;
        int endIndex = startIndex + widgetUsage.widgetVar.length();
        String relativePath = widgetUsage.location.toAbsolutePath().toString()
                .substring(sourceDirectory.toString().length());
        String previous = replace(widgetUsage.line, startIndex, endIndex,
                Ansi.ansi().bold().fg(Ansi.Color.RED).a(widgetUsage.widgetVar).reset().toString());
        System.out.println("Skipped " + relativePath + " at line " + widgetUsage.lineNr + " and col "
                + widgetUsage.columnNr + " for widgetVar '" + widgetUsage.widgetVar + "'");
        System.out.println("\t" + previous);
    });

    Map<WidgetVarLocation, List<WidgetVarLocation>> written = new HashMap<>();

    new TreeSet<>(foundUsages).forEach(widgetUsage -> {
        WidgetVarLocation key = new WidgetVarLocation(null, widgetUsage.location, widgetUsage.lineNr, -1, null);
        List<WidgetVarLocation> writtenList = written.get(key);
        int existing = writtenList == null ? 0 : writtenList.size();
        int startIndex = widgetUsage.columnNr;
        int endIndex = startIndex + widgetUsage.widgetVar.length();
        String relativePath = widgetUsage.location.toAbsolutePath().toString()
                .substring(sourceDirectory.toString().length());
        String next = replace(widgetUsage.line, startIndex, endIndex, Ansi.ansi().bold().fg(Ansi.Color.RED)
                .a("PF('" + widgetUsage.widgetVar + "')").reset().toString());
        System.out
                .println(relativePath + " at line " + widgetUsage.lineNr + " and col " + widgetUsage.columnNr);
        System.out.println("\t" + next);
        System.out.print("Replace (Y/N)? [" + (defaultAnswerYes ? "Y" : "N") + "]: ");

        String input;

        if (quiet) {
            input = "";
            System.out.println();
        } else {
            try {
                do {
                    input = in.readLine();
                } while (input != null && !input.isEmpty() && !"y".equalsIgnoreCase(input)
                        && !"n".equalsIgnoreCase(input));
            } catch (IOException ex) {
                throw new RuntimeException(ex);
            }
        }

        if (input == null) {
            System.out.println("Aborted!");
        } else if (input.isEmpty() && defaultAnswerYes || !input.isEmpty() && !"n".equalsIgnoreCase(input)) {
            System.out.println("Replaced!");
            System.out.print("\t");
            if (writtenList == null) {
                writtenList = new ArrayList<>();
                written.put(key, writtenList);
            }

            writtenList.add(widgetUsage);
            List<String> lines;
            try {
                lines = Files.readAllLines(widgetUsage.location);
            } catch (IOException ex) {
                throw new RuntimeException(ex);
            }

            try (OutputStream os = testWrite ? new ByteArrayOutputStream()
                    : Files.newOutputStream(widgetUsage.location);
                    PrintWriter pw = new PrintWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8))) {
                String line;

                for (int i = 0; i < lines.size(); i++) {
                    int lineNr = i + 1;
                    line = lines.get(i);

                    if (lineNr == widgetUsage.lineNr) {
                        int begin = widgetUsage.columnNr + (testWrite ? 0 : existing * 6);
                        int end = begin + widgetUsage.widgetVar.length();
                        String newLine = replace(line, begin, end, "PF('" + widgetUsage.widgetVar + "')",
                                false);

                        if (testWrite) {
                            System.out.println(newLine);
                        } else {
                            pw.println(newLine);
                        }
                    } else {
                        if (!testWrite) {
                            pw.println(line);
                        }
                    }
                }
            } catch (IOException ex) {
                throw new RuntimeException(ex);
            }
        } else {
            System.out.println("Skipped!");
        }
    });

    new TreeSet<>(unusedOrAmbiguous).forEach(widgetUsage -> {
        int startIndex = widgetUsage.columnNr;
        int endIndex = startIndex + widgetUsage.widgetVar.length();
        String relativePath = widgetUsage.location.toAbsolutePath().toString()
                .substring(sourceDirectory.toString().length());
        String previous = replace(widgetUsage.line, startIndex, endIndex,
                Ansi.ansi().bold().fg(Ansi.Color.RED).a(widgetUsage.widgetVar).reset().toString());
        System.out.println("Skipped unused or ambiguous " + relativePath + " at line " + widgetUsage.lineNr
                + " and col " + widgetUsage.columnNr);
        System.out.println("\t" + previous);
    });

    threadPool.shutdown();
}

From source file:com.sxj.spring.modules.mapper.JsonMapper.java

public static void main(String... args) throws JsonProcessingException, IOException {
    JsonMapper mapper = JsonMapper.nonEmptyMapper();
    CJ30 cj30 = new CJ30();
    Map<String, String> name = cj30.getName();
    name.put("2", "");
    name.put("3", "?");
    name.put("4", "");
    name.put("5", "");
    name.put("7", "");
    name.put("8", "?");
    Map<String, Map<String, Data>> data = cj30.getData();
    Map<String, Data> subject1 = new HashMap<String, Data>();
    Data d1 = new Data();
    d1.setDate("01/14");
    d1.setMin(41700);/*w  w  w  .  ja va 2  s .c o m*/
    d1.setMax(41780);
    d1.setAverage(41740);
    subject1.put("1421164800", d1);

    Data d2 = new Data();
    d2.setDate("01/15");
    d2.setMin(41550);
    d2.setMax(41620);
    d2.setAverage(41585);
    subject1.put("1421251200", d2);
    data.put("2", subject1);

    Map<String, Data> subject2 = new HashMap<String, Data>();
    Data d3 = new Data();
    d3.setDate("01/14");
    d3.setMin(12450);
    d3.setMax(12490);
    d3.setAverage(12470);
    subject2.put("1421164800", d3);

    Data d4 = new Data();
    d4.setDate("01/15");
    d4.setMin(12730);
    d4.setMax(12770);
    d4.setAverage(12750);
    subject2.put("1421251200", d4);
    data.put("3", subject2);
    String json = mapper.toJson(cj30);
    System.out.println(json);

    FileReader reader = new FileReader(new File("E:\\cj30.js"));
    char[] buffer = new char[1024];
    int read = 0;
    StringBuilder sb = new StringBuilder();
    while ((read = reader.read(buffer)) > 0) {
        sb.append(buffer, 0, read);
    }
    CJ30 fromJson = mapper.fromJson(sb.toString(), CJ30.class);
    System.out.println(fromJson.getName());
}

From source file:ensai.RMQtestKmeans.java

public static void main(String[] args) throws Exception {

    //TODO javadoc : expliquer ce bloc metadata et pourquoi !file.exists
    Metadata metadata = new Metadata();

    File file = new File(cheminConfiguration);

    if (!file.exists()) {
        metadata.readData();//from   w  ww .  j  a v a  2s  . com
    }

    /**
     * Cration de la map qui contient la nombre de clusters pour le KMean et le seuil de dtection d'anomalies pour chaque capteurs.
     * @see Metadata 
     */
    Map<Integer, Tuple2<Integer, Double>> mapSensors_ClustersSeuils = new HashMap<Integer, Tuple2<Integer, Double>>();
    mapSensors_ClustersSeuils = metadata.load();

    /**
     * Cration de l'environnement de streaming de Flink
     * @see https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/api_concepts.html
     */
    final StreamExecutionEnvironment environnementStream = StreamExecutionEnvironment.getExecutionEnvironment();

    //TODO javadoc, expliquer demarche, expliquer localhost, port, guest, guest
    final RMQConnectionConfig connectionConfig = new RMQConnectionConfig.Builder().setHost("localhost")
            .setPort(5672).setUserName("guest").setPassword("guest").setVirtualHost("/").build();

    //TODO Changer coucou dans tout le systme, et dans la javadoc en dessous, expliquer  true et simple schema
    /**
     * Cration du datastream grce  la source RabbitMQ
     * connectionConfig : Configuration de la connection de RabbitMQ
     * coucou : le nom de la queue RabbitMQ  consommer
     * true : 
     * @see StreamExecutionEnvironment.addSource()
     */
    final DataStream<String> streamFromRabbitMQ = environnementStream
            .addSource(new RMQSource<String>(connectionConfig, "coucou", true, // use correlation ids; can be false if only at-least-once is required
                    new SimpleStringSchema())); // deserialization schema to turn messages into Java objects
    /**
     * Application de la map LineSplitter
     * @see LineSplitter
     */
    DataStream<Tuple3<String, String, String>> streamAfterSplit = streamFromRabbitMQ
            .flatMap(new LineSplitter1()).flatMap(new LineSplitter2()).flatMap(new LineSplitter3())
            .flatMap(new LineSplitter4()).flatMap(new LineSplitter5());

    /**
     * Application de la flatMap InputAnalyze
     * @see InputAnalyze
     */
    DataStream<Tuple4<Integer, Integer, Float, String>> streamAfterAnalyze = streamAfterSplit
            .flatMap(new InputAnalyze(mapSensors_ClustersSeuils));

    //streamAfterAnalyze.print();
    /**
     * Application du keyBy pour sparer le stream selon la valeur du couple (field0, field1) de chaque tuple du stream
     * Mise en place d'une window glissante de taille countWindowSize. L'apparition de chaque nouveau input trigger la fonction apply.
     * @see https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/api_concepts.html
     * 
     * Application de la fonction KMean
     * @see KMean
     */
    DataStream<ArrayList<Tuple5<Integer, Integer, Float, String, Integer>>> streamAfterKMean = streamAfterAnalyze
            .keyBy(0, 1).countWindow(countWindowSize, 1).apply(new KMean(mapSensors_ClustersSeuils));

    /**
     * Application de la map Markov
     * @see Markov
     */
    DataStream<Tuple5<Integer, Integer, Float, String, Double>> streamAfterMarkov = streamAfterKMean
            .flatMap(new Markov(mapSensors_ClustersSeuils));

    /**
     * Application du filter FilterAnomalies
     * @see FilterAnomalies
     */
    DataStream<Tuple5<Integer, Integer, Float, String, Double>> streamAfterFilterAnomalies = streamAfterMarkov
            .filter(new FilterAnomalies(mapSensors_ClustersSeuils));

    //TODO 
    streamAfterFilterAnomalies.print();

    /**
     * Application de la flatMap SortieTransformation
     * @see SortieTransformation
     */
    DataStream<String> streamAfterSortieTransformation = streamAfterFilterAnomalies
            .flatMap(new SortieTransformation());

    //TODO enlever ce print
    //streamAfterSortieTransformation.print();

    /**
     * Cration de la sortie du stream dans RabbitMQ
     * connectionConfig : Configuration de la connection de RabbitMQ
     * voila : le nom de la queue RabbitMQ  laquelle envoyer des messages
     * true : 
     * @see StreamExecutionEnvironment.addSink()
     */
    streamAfterSortieTransformation.addSink(new RMQSink<String>(connectionConfig, // config for the RabbitMQ connection
            "voila", // name of the RabbitMQ queue to send messages to
            new SimpleStringSchema()));

    /**
     * Excution de l'environnement de streaming
     * @see https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/api_concepts.html
     */
    environnementStream.execute("DEBS 2017 ENSAI SOLUTION");
}

From source file:com.fengduo.bee.commons.util.ObjectUtils.java

public static void main(String[] args) {
    Map<String, String> kvMap = new HashMap<String, String>();
    kvMap.put(" _zll_ ", " zsdfasdaaaaaaaaaaa ");
    kvMap.put("                               zxc ", " zsdfasdaaqrwerqweraaaaa ");
    kvMap.put(" _qwl", " rrrrrrrrrrrrrrrrrrrr  ");
    kvMap.put("12safa", "qweweq");
    trim(kvMap);/*from www.j  av  a2 s  .co m*/
    System.out.println(kvMap);
}

From source file:io.anserini.index.IndexGov2.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(/*w  w  w  . j  av  a 2 s  .c om*/
            OptionBuilder.withArgName("path").hasArg().withDescription("input data path").create(INPUT_OPTION));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output index path")
            .create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of indexer threads")
            .create(THREADS_OPTION));
    options.addOption(OptionBuilder.withArgName("num").hasArg()
            .withDescription("max number of documents to index (-1 to index everything)")
            .create(DOCLIMIT_OPTION));

    options.addOption(POSITIONS_OPTION, false, "index positions");
    options.addOption(OPTIMIZE_OPTION, false, "merge all index segments");

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_OPTION)
            || !cmdline.hasOption(THREADS_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(100);
        formatter.printHelp(IndexGov2.class.getCanonicalName(), options);
        System.exit(-1);
    }

    final String dirPath = cmdline.getOptionValue(INDEX_OPTION);
    final String dataDir = cmdline.getOptionValue(INPUT_OPTION);
    final int docCountLimit = cmdline.hasOption(DOCLIMIT_OPTION)
            ? Integer.parseInt(cmdline.getOptionValue(DOCLIMIT_OPTION))
            : -1;
    final int numThreads = Integer.parseInt(cmdline.getOptionValue(THREADS_OPTION));

    final boolean doUpdate = cmdline.hasOption(UPDATE_OPTION);
    final boolean positions = cmdline.hasOption(POSITIONS_OPTION);
    final boolean optimize = cmdline.hasOption(OPTIMIZE_OPTION);

    final Analyzer a = new EnglishAnalyzer();
    final TrecContentSource trecSource = createGov2Source(dataDir);
    final Directory dir = FSDirectory.open(Paths.get(dirPath));

    LOG.info("Index path: " + dirPath);
    LOG.info("Doc limit: " + (docCountLimit == -1 ? "all docs" : "" + docCountLimit));
    LOG.info("Threads: " + numThreads);
    LOG.info("Positions: " + positions);
    LOG.info("Optimize (merge segments): " + optimize);

    final IndexWriterConfig config = new IndexWriterConfig(a);

    if (doUpdate) {
        config.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
    } else {
        config.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
    }

    final IndexWriter writer = new IndexWriter(dir, config);
    Gov2IndexThreads threads = new Gov2IndexThreads(writer, positions, trecSource, numThreads, docCountLimit);
    LOG.info("Indexer: start");

    final long t0 = System.currentTimeMillis();

    threads.start();

    while (!threads.done()) {
        Thread.sleep(100);
    }
    threads.stop();

    final long t1 = System.currentTimeMillis();
    LOG.info("Indexer: indexing done (" + (t1 - t0) / 1000.0 + " sec); total " + writer.maxDoc() + " docs");
    if (!doUpdate && docCountLimit != -1 && writer.maxDoc() != docCountLimit) {
        throw new RuntimeException("w.maxDoc()=" + writer.maxDoc() + " but expected " + docCountLimit);
    }
    if (threads.failed.get()) {
        throw new RuntimeException("exceptions during indexing");
    }

    final long t2;
    t2 = System.currentTimeMillis();

    final Map<String, String> commitData = new HashMap<String, String>();
    commitData.put("userData", "multi");
    writer.setCommitData(commitData);
    writer.commit();
    final long t3 = System.currentTimeMillis();
    LOG.info("Indexer: commit multi (took " + (t3 - t2) / 1000.0 + " sec)");

    if (optimize) {
        LOG.info("Indexer: merging all segments");
        writer.forceMerge(1);
        final long t4 = System.currentTimeMillis();
        LOG.info("Indexer: segments merged (took " + (t4 - t3) / 1000.0 + " sec)");
    }

    LOG.info("Indexer: at close: " + writer.segString());
    final long tCloseStart = System.currentTimeMillis();
    writer.close();
    LOG.info("Indexer: close took " + (System.currentTimeMillis() - tCloseStart) / 1000.0 + " sec");
    dir.close();
    final long tFinal = System.currentTimeMillis();
    LOG.info("Indexer: finished (" + (tFinal - t0) / 1000.0 + " sec)");
    LOG.info("Indexer: net bytes indexed " + threads.getBytesIndexed());
    LOG.info("Indexer: " + (threads.getBytesIndexed() / 1024. / 1024. / 1024. / ((tFinal - t0) / 3600000.))
            + " GB/hour plain text");
}