Example usage for com.google.common.collect Maps newHashMap

List of usage examples for com.google.common.collect Maps newHashMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newHashMap.

Prototype

public static <K, V> HashMap<K, V> newHashMap() 

Source Link

Document

Creates a mutable, empty HashMap instance.

Usage

From source file:org.apache.niolex.common.primitive.IntMapMain.java

/**
 * @param args/*  w  ww .  j  a va 2  s. c o m*/
 */
public static void main(String[] args) {
    StopWatch w = new StopWatch(1);
    w.begin(true);
    for (int i = 0; i < 1000; ++i) {
        Map<Integer, Boolean> map = Maps.newHashMap();
        Stop s = w.start();
        for (int j = 0; j < 10000; ++j) {
            map.put(j, j % 2 == 0);
        }
        for (int j = 0; j < 10000; ++j) {
            Check.isTrue(map.get(j).booleanValue() == (j % 2 == 0));
        }
        s.stop();
    }
    w.done();
    w.print();
    // --
    w.begin(true);
    for (int i = 0; i < 1000; ++i) {
        TIntCharMap primary = new TIntCharHashMap();
        Stop s = w.start();
        for (int j = 0; j < 10000; ++j) {
            primary.put(j, (char) ((j % 2) + '0'));
        }
        for (int j = 0; j < 10000; ++j) {
            Check.isTrue(primary.get(j) == (char) ((j % 2) + '0'));
        }
        s.stop();
    }
    w.done();
    w.print();
    // --
    w.begin(true);
    for (int i = 0; i < 1000; ++i) {
        Int2CharMap int2c = new Int2CharOpenHashMap();
        Stop s = w.start();
        for (int j = 0; j < 10000; ++j) {
            int2c.put(j, (char) ((j % 2) + '0'));
        }
        for (int j = 0; j < 10000; ++j) {
            Check.isTrue(int2c.get(j) == (char) ((j % 2) + '0'));
        }
        s.stop();
    }
    w.done();
    w.print();
}

From source file:org.auraframework.tools.definition.ApplicationSerializer.java

public static void main(String[] args) {
    ContextService contextService = Aura.getContextService();
    DefinitionService definitionService = Aura.getDefinitionService();
    DefDescriptor<ApplicationDef> appDesc = definitionService.getDefDescriptor("aura:test",
            ApplicationDef.class);
    contextService.startContext(Mode.PROD, Format.HTML, Authentication.AUTHENTICATED, appDesc);

    SerializationService serializationService = Aura.getSerializationService();
    try {/*from w  w  w . ja  va2  s.com*/
        Map<String, Object> atts = Maps.newHashMap();
        File outputDir = new File(
                "/home/dpletter/dev/lumen-beta/aura-integration-test/src/test/resources/htdocs/app");
        if (outputDir.exists()) {
            IOUtil.delete(outputDir);
        }
        atts.put("outputPath", outputDir.getAbsolutePath());

        serializationService.write(appDesc.getDef(), atts, ApplicationDef.class, System.out, "OFFLINE_HTML");
    } catch (Throwable e) {
        e.printStackTrace();
        System.exit(1);
    }

    contextService.endContext();
}

From source file:cuchaz.enigma.MainFormatConverter.java

public static void main(String[] args) throws Exception {

    System.out.println("Getting field types from jar...");

    JarFile jar = new JarFile(System.getProperty("user.home") + "/.minecraft/versions/1.8/1.8.jar");
    Map<String, Type> fieldTypes = Maps.newHashMap();
    for (CtClass c : JarClassIterator.classes(jar)) {
        for (CtField field : c.getDeclaredFields()) {
            FieldEntry fieldEntry = EntryFactory.getFieldEntry(field);
            fieldTypes.put(getFieldKey(fieldEntry), moveClasssesOutOfDefaultPackage(fieldEntry.getType()));
        }// ww w .  ja v  a 2s  .c o  m
    }

    System.out.println("Reading mappings...");

    File fileMappings = new File("../Enigma Mappings/1.8.mappings");
    MappingsReader mappingsReader = new MappingsReader();
    Mappings mappings = mappingsReader.read(new FileReader(fileMappings));

    System.out.println("Updating field types...");

    for (ClassMapping classMapping : mappings.classes()) {
        updateFieldsInClass(fieldTypes, classMapping);
    }

    System.out.println("Saving mappings...");

    try (FileWriter writer = new FileWriter(fileMappings)) {
        new MappingsWriter().write(writer, mappings);
    }

    System.out.println("Done!");
}

From source file:net.sourceforge.docfetcher.enums.MsgMigrator.java

public static void main(String[] args) throws Exception {
    File oldTransDir = new File("dev/old-translations-from-1.0.3");
    final String enPropName = "Resource.properties";

    Properties oldEnProp = CharsetDetectorHelper.load(new File(oldTransDir, enPropName));
    List<File> oldPropFiles = Arrays.asList(Util.listFiles(oldTransDir, new FilenameFilter() {
        public boolean accept(File dir, String name) {
            return !name.equals(enPropName);
        }/*from www.j ava 2  s  .  c  om*/
    }));

    final Map<Properties, File> propToFileMap = Maps.newHashMap();

    List<Properties> oldProps = Lists.transform(oldPropFiles, new Function<File, Properties>() {
        public Properties apply(File file) {
            try {
                Properties prop = CharsetDetectorHelper.load(file);
                propToFileMap.put(prop, file);
                return prop;
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        };
    });

    StringBuilder sb0 = new StringBuilder();
    for (Msg msg : Msg.values()) {
        String key = ConfLoader.convert(msg.name(), true);
        String value = ConfLoader.convert(msg.get(), false);
        String comments = msg.getComment();

        if (!comments.isEmpty())
            sb0.append("# " + comments + Util.LS);
        sb0.append(key + "=" + value);
        sb0.append(Util.LS);
    }
    File enOutFile = new File(Util.TEMP_DIR, enPropName);
    Files.write(sb0.toString(), enOutFile, Charsets.UTF_8);
    Util.println("File written: " + enOutFile.getPath());

    for (Properties oldProp : oldProps) {
        StringBuilder sb = new StringBuilder();
        for (Msg msg : Msg.values()) {
            String key = msg.name();

            String enOldValue = oldEnProp.getProperty(key);
            if (enOldValue == null) // New key?
                continue;
            else if (!enOldValue.equals(msg.get())) // Changed value?
                continue;

            String value = oldProp.getProperty(key);
            if (value == null)
                value = enOldValue;
            else if (value.equals("$TODO$"))
                continue;

            key = ConfLoader.convert(key, true);
            value = ConfLoader.convert(value, false);
            sb.append(key + "=" + value);
            sb.append(Util.LS);
        }

        String filename = propToFileMap.get(oldProp).getName();
        File outFile = new File(Util.TEMP_DIR, filename);
        Files.write(sb.toString(), outFile, Charsets.UTF_8);
        Util.println("File written: " + outFile.getPath());
    }
}

From source file:org.carrot2.examples.clustering.UsingComponentSuites.java

public static void main(String[] args) throws Exception {
    @SuppressWarnings("unchecked")
    final Controller controller = ControllerFactory.createCachingPooling(IDocumentSource.class);

    // Initialization-time attributes that will apply to all components.
    final Map<String, Object> initAttributes = Maps.newHashMap();

    // Prepare resource lookup facade. We will use the suites directory 
    // and class path resources.
    final ResourceLookup resourceLookup = new ResourceLookup(new DirLocator(new File("suites")),
            new ContextClassLoaderLocator());

    // We know we'll be using Bing so set up its access key.
    // use your own ID here!
    Bing3WebDocumentSourceDescriptor.attributeBuilder(initAttributes).appid(BingKeyAccess.getKey());

    // We'll read the component suite definition from an XML stream.
    // IResource is an abstraction layer over resources in Carrot2.
    IResource suiteXml = resourceLookup.getFirst("suite-examples.xml");

    // Deserialize the component suite definition.
    final ProcessingComponentSuite suite = ProcessingComponentSuite.deserialize(suiteXml, resourceLookup);

    // Initialize the controller with the suite. All components from the suite
    // will be available for processing within this controller.
    controller.init(initAttributes, suite.getComponentConfigurations());

    // From the suite definition, you can get the document sources and clustering
    // algorithm descriptors.
    final List<DocumentSourceDescriptor> sources = suite.getSources();
    final List<String> sourceIds = Lists.transform(sources,
            ProcessingComponentDescriptor.ProcessingComponentDescriptorToId.INSTANCE);
    System.out.println("Found " + sourceIds.size() + " document sources: " + sourceIds);

    final List<ProcessingComponentDescriptor> algorithms = suite.getAlgorithms();
    final List<String> algorithmIds = Lists.transform(algorithms,
            ProcessingComponentDescriptor.ProcessingComponentDescriptorToId.INSTANCE);
    System.out.println("Found " + algorithmIds.size() + " clutering algorithms: " + algorithmIds + "\n\n");

    // Run not more than two algorithms on not more than two sources
    for (int s = 0; s < Math.min(sourceIds.size(), 2); s++) {
        for (int a = 0; a < Math.min(algorithmIds.size(), 2); a++) {
            // You can retrieve some metadata about the components, such as
            // human-readable label, from their descriptors.
            System.out.println("Querying " + sources.get(s).getLabel() + ", clustering with "
                    + algorithms.get(a).getLabel());

            // As usual, we pass attributes for processing
            final Map<String, Object> attributes = Maps.newHashMap();
            CommonAttributesDescriptor.attributeBuilder(attributes).query("data mining");

            // Pass component ids to the controller to perform processing
            final ProcessingResult result = controller.process(attributes, sourceIds.get(s),
                    algorithmIds.get(a));
            ConsoleFormatter.displayClusters(result.getClusters());
            System.out.println();
        }//from w  w w  .ja  v  a2  s  .  co m
    }
}

From source file:org.opennms.newts.gsod.Web.java

public static void main(String... args) {

    staticFileLocation("/static");

    get(new VelocityRoute("/stations") {

        @Override//from   ww w.  j ava 2  s  .  c o  m
        public Object handle(Request request, Response response) {
            Map<String, Object> model = Maps.newHashMap();
            model.put("stationsMap", STATION_NAMES);
            return modelAndView(model, "index.wm");
        }
    });

    get(new VelocityRoute("/summer88") {

        @Override
        public Object handle(Request arg0, Response arg1) {
            Map<String, Object> model = Maps.newHashMap();
            model.put("stationIds", STATION_IDS);
            return modelAndView(model, "summer.wm");
        }
    });

    get(new VelocityRoute("/stations/:stationName") {

        @Override
        public Object handle(Request request, Response response) {

            String stationName = request.params(":stationName");
            String id = STATION_IDS.get(stationName);

            if (id == null) {
                halt(404, "No such station");
            }

            Map<String, String> model = Maps.newHashMap();
            model.put("location", STATION_NAMES.get(stationName));
            model.put("id", id);
            model.put("start", request.queryParams("start"));
            model.put("end", request.queryParams("end"));
            model.put("resolution", request.queryParams("resolution"));

            return modelAndView(model, "station.wm");
        }
    });

}

From source file:org.carrot2.examples.clustering.UsingCustomLexicalResources.java

public static void main(String[] args) {
    @SuppressWarnings("unchecked")
    final Controller controller = ControllerFactory.createCachingPooling(IDocumentSource.class);

    // We will pass our custom resource locator at initialization time. There is a
    // variety of implementations of IResourceLocator interface, we will use
    // an explicit filesystem folder in the current working directory.
    File resourcesDir = new File("resources");
    ResourceLookup resourceLookup = new ResourceLookup(new DirLocator(resourcesDir));

    Map<String, Object> attrs = Maps.newHashMap();

    // Note that we tell the linguistic component to merge all lexical resources,
    // this is the default setting and it usually helps with multi-lingual content.
    DefaultLexicalDataFactoryDescriptor.attributeBuilder(attrs).mergeResources(true);
    LexicalDataLoaderDescriptor.attributeBuilder(attrs).resourceLookup(resourceLookup);

    controller.init(attrs);/*from   w  w  w  .  j a  v  a 2 s  . co m*/

    // Cluster some data with Lingo and STC.
    clusterAndDisplayClusters(controller, LingoClusteringAlgorithm.class);
    clusterAndDisplayClusters(controller, STCClusteringAlgorithm.class);
}

From source file:zookeeper.example.discovery.DiscoveryExample.java

public static void main(String[] args) throws Exception {
    // This method is scaffolding to get the example up and running

    TestingServer server = new TestingServer();
    CuratorFramework client = null;//  www  .j a  v  a 2 s .  c om
    ServiceDiscovery<InstanceDetails> serviceDiscovery = null;
    Map<String, ServiceProvider<InstanceDetails>> providers = Maps.newHashMap();
    try {
        client = CuratorFrameworkFactory.newClient(server.getConnectString(),
                new ExponentialBackoffRetry(1000, 3));
        client.start();

        JsonInstanceSerializer<InstanceDetails> serializer = new JsonInstanceSerializer<InstanceDetails>(
                InstanceDetails.class);
        serviceDiscovery = ServiceDiscoveryBuilder.builder(InstanceDetails.class).client(client).basePath(PATH)
                .serializer(serializer).build();
        serviceDiscovery.start();

        processCommands(serviceDiscovery, providers, client);
    } finally {
        for (ServiceProvider<InstanceDetails> cache : providers.values()) {
            Closeables.closeQuietly(cache);
        }

        Closeables.closeQuietly(serviceDiscovery);
        Closeables.closeQuietly(client);
        Closeables.closeQuietly(server);
    }
}

From source file:org.apache.streams.datasift.example.DatasiftConsoleElasticsearch.java

public static void main(String[] args) {
    LOGGER.info(StreamsConfigurator.config.toString());

    Config elasticsearch = StreamsConfigurator.config.getConfig("elasticsearch");
    ElasticsearchWriterConfiguration elasticsearchWriterConfiguration = ElasticsearchConfigurator
            .detectWriterConfiguration(elasticsearch);

    Map<String, Object> streamConfig = Maps.newHashMap();
    streamConfig.put(LocalStreamBuilder.TIMEOUT_KEY, 20 * 60 * 1000 * 1000);

    StreamBuilder builder = new LocalStreamBuilder(100, streamConfig);

    ConsolePersistReader consolePersistReader = new ConsolePersistReader();
    DatasiftTypeConverterProcessor datasiftTypeConverter = new DatasiftTypeConverterProcessor(Activity.class);
    RegexMentionsExtractor regexMentionsExtractor = new RegexMentionsExtractor();
    ElasticsearchPersistWriter writer = new ElasticsearchPersistWriter(elasticsearchWriterConfiguration);

    builder.newPerpetualStream("console", consolePersistReader);
    builder.addStreamsProcessor("converter", datasiftTypeConverter, 1, "console");
    builder.addStreamsProcessor("RegexMentionsExtractor", regexMentionsExtractor, 2, "converter");
    builder.addStreamsPersistWriter(ElasticsearchPersistWriter.STREAMS_ID, writer, 1, "RegexMentionsExtractor");
    builder.start();/*from ww w .j a v a  2 s  . c  om*/

}

From source file:org.carrot2.examples.clustering.UsingCustomLanguageModel.java

public static void main(String[] args) {
    @SuppressWarnings("unchecked")
    final Controller controller = ControllerFactory.createCachingPooling(IDocumentSource.class);

    // We will pass our custom language model element factories classes as a
    // initialization-time attributes. It is preferred to passing them as
    // processing-time attributes because the instances created at initialization
    // time will be reused for all further requests.
    Map<String, Object> attrs = Maps.newHashMap();
    BasicPreprocessingPipelineDescriptor.attributeBuilder(attrs).stemmerFactory(CustomStemmerFactory.class)
            .tokenizerFactory(CustomTokenizerFactory.class).lexicalDataFactory(CustomLexicalDataFactory.class);
    controller.init(attrs);/*from   w  w  w.j av a2  s .  c  o m*/

    // Cluster some data with Lingo and STC. Notice how the cluster quality degrades
    // when the stop word list is empty (especially for STC).
    clusterAndDisplayClusters(controller, LingoClusteringAlgorithm.class);
    clusterAndDisplayClusters(controller, STCClusteringAlgorithm.class);
}