Example usage for java.util Collections emptyMap

List of usage examples for java.util Collections emptyMap

Introduction

In this page you can find the example usage for java.util Collections emptyMap.

Prototype

@SuppressWarnings("unchecked")
public static final <K, V> Map<K, V> emptyMap() 

Source Link

Document

Returns an empty map (immutable).

Usage

From source file:Main.java

public static void main(String args[]) {
    List list = Collections.EMPTY_LIST;
    Set set = Collections.EMPTY_SET;
    Map map = Collections.EMPTY_MAP;

    List<String> s = Collections.emptyList();
    Set<Long> l = Collections.emptySet();
    Map<Date, String> d = Collections.emptyMap();
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkSqlTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    try {/*from  w w  w.jav a2s.com*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String sql_string = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting SparkSqlTopology");

        SparkConf spark_context = new SparkConf().setAppName("SparkSqlTopology");

        final Optional<Double> sub_sample = test_spec
                .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1)))
                .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1)))
                .filter(d -> d > 0);

        //INFO:
        sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d));
        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));
        System.out.println("OPTIONS: sql = " + sql_string);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            SQLContext sql_context = new SQLContext(jsc);

            final Multimap<String, DataFrame> inputs = SparkTechnologyUtils.buildBatchSparkSqlInputs(context,
                    test_spec, sql_context, Collections.emptySet());

            //INFO
            System.out.println("Registered tables = " + inputs.keySet().toString());

            final DataFrame filtered_df = sql_context.sql(sql_string);
            final String[] columns = filtered_df.columns(); // (have to do this here because columns() depends on transient code)

            final long written = filtered_df.javaRDD().map(row -> {
                final ObjectNode j = _mapper.createObjectNode(); //.put("message", row.toString()); (Don't think we want this now that we're using the columns)
                for (int ii = 0; ii < row.length(); ++ii) {
                    j.set(columns[ii], _mapper.convertValue(row.get(ii), JsonNode.class));
                }
                return context.emitObject(Optional.empty(), context.getJob().get(), Either.left(j),
                        Optional.empty());
            }).count();

            //INFO:
            System.out.println("Wrote: data_objects=" + written);
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
    }
}

From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosMapQueryRenameAllMethods.java

public static void main(String[] args) {
    Options options = new Options();

    Option inputOpt = OptionBuilder.create(IN);
    inputOpt.setArgName("INPUT");
    inputOpt.setDescription("Input Kyanos resource directory");
    inputOpt.setArgs(1);/*from  w w w  .j a  va 2  s. c o  m*/
    inputOpt.setRequired(true);

    Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS);
    inClassOpt.setArgName("CLASS");
    inClassOpt.setDescription("FQN of EPackage implementation class");
    inClassOpt.setArgs(1);
    inClassOpt.setRequired(true);

    options.addOption(inputOpt);
    options.addOption(inClassOpt);

    CommandLineParser parser = new PosixParser();

    try {
        PersistenceBackendFactoryRegistry.getFactories().put(NeoMapURI.NEO_MAP_SCHEME,
                new MapPersistenceBackendFactory());

        CommandLine commandLine = parser.parse(options, args);

        URI uri = NeoMapURI.createNeoMapURI(new File(commandLine.getOptionValue(IN)));

        Class<?> inClazz = KyanosMapQueryRenameAllMethods.class.getClassLoader()
                .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS));
        inClazz.getMethod("init").invoke(null);

        ResourceSet resourceSet = new ResourceSetImpl();
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap().put(NeoMapURI.NEO_MAP_SCHEME,
                PersistentResourceFactory.eINSTANCE);

        Resource resource = resourceSet.createResource(uri);

        Map<String, Object> loadOpts = new HashMap<String, Object>();
        resource.load(loadOpts);
        String name = UUID.randomUUID().toString();
        {
            LOG.log(Level.INFO, "Start query");
            long begin = System.currentTimeMillis();
            JavaQueries.renameAllMethods(resource, name);
            long end = System.currentTimeMillis();
            resource.save(Collections.emptyMap());
            LOG.log(Level.INFO, "End query");
            LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin)));
        }

        if (resource instanceof PersistentResourceImpl) {
            PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource);
        } else {
            resource.unload();
        }

    } catch (ParseException e) {
        MessageUtil.showError(e.toString());
        MessageUtil.showError("Current arguments: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    } catch (Throwable e) {
        MessageUtil.showError(e.toString());
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkScalaInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> logger = new SetOnce<>();

    try {//from w  ww .  ja v a 2s.c  om
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        logger.set(context.getLogger(context.getBucket()));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkScalaInterpreterTopology logging=" + logger.optional().isPresent());

        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.INFO,
                    ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                            "Starting SparkScalaInterpreterTopology.{0}",
                            Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
        });

        final SparkTopologyConfigBean job_config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String scala_script = Optional.ofNullable(job_config.script()).orElse("");

        final String wrapper_script = IOUtils.toString(
                SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
                "UTF-8");
        final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);
        final SparkCompilerService scs = new SparkCompilerService();
        final Tuple2<ClassLoader, Object> o = scs.buildClass(to_compile, "ScriptRunner", logger.optional());

        Thread.currentThread().setContextClassLoader(o._1());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final long streaming_batch_interval = (long) spark_context
                .getInt(SparkTopologyConfigBean.STREAMING_BATCH_INTERVAL, 10);

        // MAIN PROCESSING

        final Method m = o._2().getClass().getMethod("runScript", SparkScriptEngine.class);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        boolean is_streaming = context.getJob().map(j -> j.analytic_type())
                .map(t -> MasterEnrichmentType.streaming == t).orElse(false);
        final Either<JavaSparkContext, JavaStreamingContext> jsc = Lambdas.get(() -> {
            return is_streaming
                    ? Either.<JavaSparkContext, JavaStreamingContext>right(new JavaStreamingContext(
                            spark_context, Durations.seconds(streaming_batch_interval)))
                    : Either.<JavaSparkContext, JavaStreamingContext>left(new JavaSparkContext(spark_context));
        });
        try {
            final JavaSparkContext jsc_batch = jsc.either(l -> l, r -> r.sparkContext());

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc_batch, Collections.emptySet());

            final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> streaming_inputs = jsc
                    .<Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>>either(
                            l -> HashMultimap
                                    .<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>create(),
                            r -> SparkTechnologyUtils.buildStreamingSparkInputs(context, test_spec, r,
                                    Collections.emptySet()));

            final SparkScriptEngine script_engine_bridge = new SparkScriptEngine(context, inputs,
                    streaming_inputs, test_spec, jsc_batch, jsc.either(l -> null, r -> r), job_config);

            // Add driver and generated JARs to path:
            jsc_batch.addJar(LiveInjector.findPathJar(o._2().getClass()));

            m.invoke(o._2(), script_engine_bridge);

            jsc.either(l -> {
                l.stop();
                return null;
            }, r -> {
                r.stop();
                return null;
            });

            logger.optional().ifPresent(l -> {
                l.inefficientLog(Level.INFO,
                        ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                                "Stopping SparkScalaInterpreterTopology.{0}",
                                Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
            });

            //INFO:
            System.out.println("Finished interpreter");
        } finally {
            jsc.either(l -> {
                l.close();
                return null;
            }, r -> {
                r.close();
                return null;
            });
        }
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
    } catch (Throwable t) {
        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.ERROR, ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology",
                    "main",
                    ErrorUtils.getLongForm("Error executing SparkScalaInterpreterTopology.unknown: {0}", t)));
        });

        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
        System.exit(-1);
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkJsInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> bucket_logger = new SetOnce<>();
    final SetOnce<String> job_name = new SetOnce<>(); // (the string we'll use in logging activities)
    try {/*from ww  w.j av a2 s  .  c o m*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        bucket_logger.set(context.getLogger(context.getBucket()));
        job_name.set(context.getJob().map(j -> j.name()).orElse("no_name"));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();

        final String js_script = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting " + job_name.get());

        SparkConf spark_context = new SparkConf().setAppName(job_name.get());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());
            final JavaPairRDD<Object, Tuple2<Long, IBatchRecord>> all_inputs = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2)).orElse(null);

            // Load globals:
            ScriptEngineManager manager = new ScriptEngineManager();
            ScriptEngine engine = manager.getEngineByName("JavaScript");
            engine.put("_a2_global_context", context);
            engine.put("_a2_global_bucket", context.getBucket().get());
            engine.put("_a2_global_job", context.getJob().get());
            engine.put("_a2_global_config",
                    BeanTemplateUtils.configureMapper(Optional.empty()).convertValue(config, JsonNode.class));
            engine.put("_a2_global_mapper", BeanTemplateUtils.configureMapper(Optional.empty()));
            //TODO (until bucket logger is serializable, don't allow anywhere)
            //engine.put("_a2_bucket_logger", bucket_logger.optional().orElse(null));
            engine.put("_a2_enrichment_name", job_name.get());
            engine.put("_a2_spark_inputs", inputs);
            engine.put("_a2_spark_inputs_all", all_inputs);
            engine.put("_a2_spark_context", jsc);

            Stream.concat(config.uploaded_lang_files().stream(),
                    Stream.of("aleph2_sparkjs_globals_before.js", ""))
                    .flatMap(Lambdas.flatWrap_i(import_path -> {
                        try {
                            if (import_path.equals("")) { // also import the user script just before here
                                return js_script;
                            } else
                                return IOUtils.toString(SparkJsInterpreterTopology.class.getClassLoader()
                                        .getResourceAsStream(import_path), "UTF-8");
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (script {1}): {2}",
                                                            job_name.get(), import_path, e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    })).forEach(Lambdas.wrap_consumer_i(script -> {
                        try {
                            engine.eval(script);
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (main script): {1}",
                                                            job_name.get(), e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    }));
            ;

            jsc.stop();

            //INFO:
            System.out.println("Finished " + job_name.get());
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));

        bucket_logger.optional().ifPresent(l -> l.log(Level.ERROR, ErrorUtils.lazyBuildMessage(false,
                () -> SparkJsInterpreterTopology.class.getSimpleName()
                        + job_name.optional().map(j -> "." + j).orElse(""),
                () -> job_name.optional().orElse("global") + ".main", () -> null,
                () -> ErrorUtils.get("Error on batch in job {0}: {1}",
                        job_name.optional().orElse("global") + ".main", t.getMessage()),
                () -> ImmutableMap.<String, Object>of("full_error", ErrorUtils.getLongForm("{0}", t)))));
    }
}

From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryRenameAllMethods.java

public static void main(String[] args) {
    Options options = new Options();

    Option inputOpt = OptionBuilder.create(IN);
    inputOpt.setArgName("INPUT");
    inputOpt.setDescription("Input Kyanos resource directory");
    inputOpt.setArgs(1);//from ww w  .jav a 2 s .  co m
    inputOpt.setRequired(true);

    Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS);
    inClassOpt.setArgName("CLASS");
    inClassOpt.setDescription("FQN of EPackage implementation class");
    inClassOpt.setArgs(1);
    inClassOpt.setRequired(true);

    Option optFileOpt = OptionBuilder.create(OPTIONS_FILE);
    optFileOpt.setArgName("FILE");
    optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource");
    optFileOpt.setArgs(1);

    options.addOption(inputOpt);
    options.addOption(inClassOpt);
    options.addOption(optFileOpt);

    CommandLineParser parser = new PosixParser();

    try {
        PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME,
                new BlueprintsPersistenceBackendFactory());

        CommandLine commandLine = parser.parse(options, args);

        URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN)));

        Class<?> inClazz = KyanosGraphQueryRenameAllMethods.class.getClassLoader()
                .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS));
        inClazz.getMethod("init").invoke(null);

        ResourceSet resourceSet = new ResourceSetImpl();
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap()
                .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE);

        Resource resource = resourceSet.createResource(uri);

        Map<String, Object> loadOpts = new HashMap<String, Object>();

        if (commandLine.hasOption(OPTIONS_FILE)) {
            Properties properties = new Properties();
            properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE))));
            for (final Entry<Object, Object> entry : properties.entrySet()) {
                loadOpts.put((String) entry.getKey(), (String) entry.getValue());
            }
        }
        resource.load(loadOpts);
        String name = UUID.randomUUID().toString();
        {
            LOG.log(Level.INFO, "Start query");
            long begin = System.currentTimeMillis();
            JavaQueries.renameAllMethods(resource, name);
            long end = System.currentTimeMillis();
            resource.save(Collections.emptyMap());
            LOG.log(Level.INFO, "End query");
            LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin)));
        }

        if (resource instanceof PersistentResourceImpl) {
            PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource);
        } else {
            resource.unload();
        }

    } catch (ParseException e) {
        MessageUtil.showError(e.toString());
        MessageUtil.showError("Current arguments: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    } catch (Throwable e) {
        MessageUtil.showError(e.toString());
    }
}

From source file:fr.inria.atlanmod.instantiator.SpecimenGenerator.java

public static void main(String[] args) throws GenerationException, IOException {

    Options options = new Options();

    configureOptions(options);/*  w w  w .  j a  va 2 s.  c  o m*/

    CommandLineParser parser = new GnuParser();

    try {
        CommandLine commandLine = parser.parse(options, args);

        String metamodel = commandLine.getOptionValue(METAMODEL);
        DefaultModelGenerator modelGen = new DefaultModelGenerator(URI.createFileURI(metamodel));

        if (commandLine.hasOption(ADDITIONAL_METAMODEL)) {
            for (String additionalMetamodel : commandLine.getOptionValues(ADDITIONAL_METAMODEL)) {
                URI additionalMetamodelUri = URI.createFileURI(additionalMetamodel);
                Resource resource = new XMIResourceImpl(additionalMetamodelUri);
                resource.load(Collections.emptyMap());
                registerPackages(resource);
            }
        }

        if (commandLine.hasOption(OUTPUT_DIR)) {
            String outDir = commandLine.getOptionValue(OUTPUT_DIR);
            modelGen.setSamplesPath(Paths.get(outDir));
        } else {
            modelGen.setSamplesPath(Paths.get("."));
        }
        if (commandLine.hasOption(N_MODELS)) {
            int models = ((Number) commandLine.getParsedOptionValue(N_MODELS)).intValue();
            modelGen.setSetSize(new int[] { models });
        } else {
            modelGen.setSetSize(new int[] { 1 });
        }
        if (commandLine.hasOption(SIZE)) {
            long size = ((Number) commandLine.getParsedOptionValue(SIZE)).longValue();
            modelGen.setModelsSize(new long[] { size });
        } else {
            modelGen.setModelsSize(new long[] { 1000 });
        }
        if (commandLine.hasOption(SEED)) {
            long seed = ((Number) commandLine.getParsedOptionValue(SEED)).longValue();
            modelGen.setSeed(seed);
        } else {
            modelGen.setSeed(System.currentTimeMillis());
        }
        modelGen.runGeneration();
    } catch (ParseException e) {
        System.err.println(e.getLocalizedMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.setOptionComparator(new OptionComarator<Option>());
        try {
            formatter.setWidth(Math.max(TerminalFactory.get().getWidth(), 80));
        } catch (Throwable t) {
            // Nothing to do...
        }
        ;
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    }
}

From source file:Main.java

public static final <K, V> Map<K, V> emptyMap() {
    return Collections.emptyMap();
}

From source file:Main.java

public static <K, V> Map<K, V> copyNullSafeHashMap(Map<? extends K, ? extends V> map) {
    if (map.isEmpty()) {
        return Collections.emptyMap();
    }// ww  w. j ava  2  s .co  m
    return Collections.unmodifiableMap(copyNullSafeMutableHashMap(map));
}

From source file:Main.java

public static <K, V> Map<K, V> nonNullMap(final Map<K, V> map) {
    if (map == null) {
        return Collections.emptyMap();
    }//from ww  w . j  a  v a 2s.c o  m

    return map;
}