List of usage examples for com.google.common.collect ImmutableMap of
public static <K, V> ImmutableMap<K, V> of(K k1, V v1)
From source file:rapture.server.rest.RestServer.java
public static void main(String[] args) { try {/*from ww w . ja v a 2 s. c om*/ Kernel.initBootstrap(ImmutableMap.of("STD", ConfigLoader.getConf().StandardTemplate), RestServer.class, false); RaptureAppService.setupApp("RestServer"); } catch (RaptureException e) { log.error("Failed to start RestServer", e); return; } new RestServer().run(); }
From source file:rapture.server.RaptureAPIServer.java
/** * @param args// w w w . j a va 2s . c o m */ public static void main(String[] args) { // Give -v flag or --version to print out version numbers and quit if (args.length > 0) { if (args[0].equals("-v") || args[0].equalsIgnoreCase("--version")) { System.out.println("Version : \n" + Kernel.versions()); return; } } try { Kernel.initBootstrap(ImmutableMap.of("STD", ConfigLoader.getConf().StandardTemplate), RaptureAPIServer.class, false); RaptureAppService.setupApp("RaptureAPIServer"); RaptureAPIServer s = new RaptureAPIServer(); s.startApiWebServer(); s.addHooks(); String categories = ConfigLoader.getConf().Categories; logger.info(String.format("Categories are %s", categories)); String[] cats = categories.split(","); for (String cat : cats) { logger.info("Category " + cat); Kernel.setCategoryMembership(cat); } Map<String, Object> capabilities = getCapabilities(categories); StatusHelper.setStatusAndCapability(ContextFactory.getKernelUser(), "RUNNING", capabilities, Kernel.getRunner()); StatusHelper.startStatusUpdating(ContextFactory.getKernelUser(), Kernel.getRunner()); logger.debug("Updated status"); s.joinServer(); } catch (RaptureException e) { logger.error("Error when running Rapture API Server - " + ExceptionToString.format(e)); } logger.info("Rapture API Server exited"); }
From source file:watchserver.server.WatchServer.java
public static void main(String[] args) { try {//ww w. java2s . c o m Kernel.initBootstrap(ImmutableMap.of("STD", ConfigLoader.getConf().StandardTemplate), WatchServer.class, false); RaptureAppService.setupApp("WatchServer"); } catch (RaptureException e1) { log.error("Failed to start WatchServer with " + ExceptionToString.format(e1)); System.exit(-1); } try { rawConfig = Kernel.getSys().retrieveSystemConfig(ContextFactory.getKernelUser(), "CONFIG", CONFIG_URI); } catch (Exception e) { log.error("Failed to load configuration from " + CONFIG_URI, e); System.exit(-1); } log.info("------------------------------------------------------"); try { JSONParser jsonParser = new JSONParser(); JSONObject jsonObject = (JSONObject) jsonParser.parse(rawConfig); JSONArray sources = (JSONArray) jsonObject.get("sources"); Iterator<?> it = sources.iterator(); ObjectMapper mapper = new ObjectMapper(); while (it.hasNext()) { JSONObject source = (JSONObject) it.next(); switch (SourceType.valueOf(source.get("type").toString().toUpperCase())) { case LOCAL: LocalConfig localconfig = mapper.readValue(source.get("config").toString(), LocalConfig.class); localconfig.setSourceType(SourceType.LOCAL); localconfigs.add(localconfig); log.info("Loaded config for Local folder: " + localconfig.getFolder()); break; case FTP: FTPConfig ftpconfig = mapper.readValue(source.get("config").toString(), FTPConfig.class); ftpconfig.setSourceType(SourceType.FTP); ftpconfigs.add(ftpconfig); log.info("Loaded config for FTP folder: " + ftpconfig.getFolder() + ftpconfig.getConnection().getPathtomonitor()); break; case SFTP: log.info("SFTP NooP"); break; default: throw new Exception( "SourceType " + source.get("type").toString().toUpperCase() + " not supported!"); } } } catch (ConfigException e) { log.error("ConfigException handling configuration from " + CONFIG_URI, e); System.exit(-1); } catch (JsonParseException e) { log.error("Json Parse Exception handling configuration from " + CONFIG_URI, e); System.exit(-1); } catch (JsonMappingException e) { log.error("Json Mapping Exception handling configuration from " + CONFIG_URI, e); System.exit(-1); } catch (IOException e) { log.error("IO Exception handling configuration from " + CONFIG_URI, e); System.exit(-1); } catch (Exception e) { log.error("Exception handling configuration from " + CONFIG_URI, e); System.exit(-1); } log.info("------------------------------------------------------"); for (LocalConfig config : localconfigs) { new WatchLocalRunner(config).startThread(); log.info("Local Directory Monitor setup for " + config.getFolder()); } for (FTPConfig config : ftpconfigs) { new WatchFTPRunner(config).startThread(); log.info("FTP Monitor setup for " + config.getFolder()); } log.info("------------------------------------------------------"); log.info("WatchServer started and ready to process events."); }
From source file:es.upm.oeg.tools.rdfshapes.utils.CardinalityTemplateGenerator.java
public static void main(String[] args) throws Exception { OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_RULE_INF, ModelFactory.createDefaultModel()); model.read("http://dublincore.org/2012/06/14/dcelements.ttl"); String endpoint = "http://infra2.dia.fi.upm.es:8899/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {//from w w w. j a v a 2 s.c o m continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } // System.out.println("***"); // System.out.println("### **" + clazz + "** (" + individualCount + ")"); // System.out.println("***"); // System.out.println(); classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); for (RDFNode property : nodeList) { if (property.isURIResource()) { String propertyURI = property.asResource().getURI(); // System.out.println("* " + propertyURI); // System.out.println(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); // System.out.println("| Min Card. |Max Card. |"); // System.out.println("|---|---|"); // System.out.println("| ? | ? |"); // System.out.println(); } } //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1)); //We have finished writting properties of one class, now it's time to merge the cells int classEndRow = currentExcelRow - 1; if (classStartRow < classEndRow) { sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0)); } } String filename = "test.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java
public static void main(String[] args) throws Exception { String endpoint = "http://3cixty.eurecom.fr/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); DecimalFormat df = new DecimalFormat("0.0000"); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {// w w w .j a v a 2 s . co m continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } // System.out.println("***"); // System.out.println("### **" + clazz + "** (" + individualCount + ")"); // System.out.println("***"); // System.out.println(); classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); for (RDFNode property : nodeList) { if (property.isURIResource()) { DescriptiveStatistics stats = new DescriptiveStatistics(); String propertyURI = property.asResource().getURI(); // System.out.println("* " + propertyURI); // System.out.println(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); Map<String, String> litMap2 = new HashMap<>(); Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(propertyCardinalityQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2)); List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint, ImmutableSet.of("card", "count")); int sum = 0; List<CardinalityCount> cardinalityList = new ArrayList<>(); if (solnMaps.size() > 0) { for (Map<String, RDFNode> soln : solnMaps) { int count = soln.get("count").asLiteral().getInt(); int card = soln.get("card").asLiteral().getInt(); for (int i = 0; i < count; i++) { stats.addValue(card); } CardinalityCount cardinalityCount = new CardinalityCount(card, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); sum += count; } // Check for zero cardinality instances int count = individualCount - sum; if (count > 0) { for (int i = 0; i < count; i++) { stats.addValue(0); } CardinalityCount cardinalityCount = new CardinalityCount(0, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); } } Map<Integer, Double> cardMap = new HashMap<>(); for (CardinalityCount count : cardinalityList) { cardMap.put(count.getCardinality(), count.getPrecentage()); } XSSFCell instanceCountCell = propertyRow.createCell(2); instanceCountCell.setCellValue(individualCount); XSSFCell minCell = propertyRow.createCell(3); minCell.setCellValue(stats.getMin()); XSSFCell maxCell = propertyRow.createCell(4); maxCell.setCellValue(stats.getMax()); XSSFCell p1 = propertyRow.createCell(5); p1.setCellValue(stats.getPercentile(1)); XSSFCell p99 = propertyRow.createCell(6); p99.setCellValue(stats.getPercentile(99)); XSSFCell mean = propertyRow.createCell(7); mean.setCellValue(df.format(stats.getMean())); for (int i = 0; i < 21; i++) { XSSFCell dataCell = propertyRow.createCell(8 + i); Double percentage = cardMap.get(i); if (percentage != null) { dataCell.setCellValue(df.format(percentage)); } else { dataCell.setCellValue(0); } } // System.out.println("| Min Card. |Max Card. |"); // System.out.println("|---|---|"); // System.out.println("| ? | ? |"); // System.out.println(); } } //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1)); //We have finished writting properties of one class, now it's time to merge the cells int classEndRow = currentExcelRow - 1; if (classStartRow < classEndRow) { sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0)); } } String filename = "3cixty.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:rapture.server.RaptureWebServer.java
/** * @param args//from w ww . j a va2 s.com */ public static void main(String[] args) { // Give -v flag or --version to print out version numbers and quit if (args.length > 0) { if (args[0].equals("-v") || args[0].equalsIgnoreCase("--version")) { System.out.println("Version : \n" + Kernel.versions()); return; } } try { Kernel.initBootstrap(ImmutableMap.of("STD", ConfigLoader.getConf().StandardTemplate), RaptureWebServer.class, false); RaptureAppService.setupApp("RaptureWebServer"); RaptureWebServer s = new RaptureWebServer(); String categories = ConfigLoader.getConf().Categories; logger.info(String.format("Categories are %s", categories)); String[] cats = categories.split(","); for (String cat : cats) { logger.info("Category " + cat); Kernel.setCategoryMembership(cat); } s.startApiWebServer(); s.startWebServer(); s.addHooks(); Map<String, Object> capabilities = getCapabilities(categories); StatusHelper.setStatusAndCapability(ContextFactory.getKernelUser(), "RUNNING", capabilities, Kernel.getRunner()); StatusHelper.startStatusUpdating(ContextFactory.getKernelUser(), Kernel.getRunner()); logger.debug("Updated status"); s.joinServer(); } catch (RaptureException e) { logger.error("Error when running Rapture - " + ExceptionToString.format(e)); } logger.info("Application Rapture exited"); }
From source file:io.prestosql.plugin.thrift.integration.ThriftQueryRunner.java
public static void main(String[] args) throws Exception { Logging.initialize();//w w w.j a v a 2s .co m Map<String, String> properties = ImmutableMap.of("http-server.http.port", "8080"); ThriftQueryRunnerWithServers queryRunner = (ThriftQueryRunnerWithServers) createThriftQueryRunner(3, 3, true, properties); Thread.sleep(10); Logger log = Logger.get(ThriftQueryRunner.class); log.info("======== SERVER STARTED ========"); log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl()); }
From source file:es.upm.oeg.tools.rdfshapes.patterns.DatatypeObjectPropertyPatterns.java
public static void main(String[] args) throws Exception { String endpoint = "http://3cixty.eurecom.fr/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); String objectCountQueryString = readFile(objectCountQueryPath, Charset.defaultCharset()); String tripleCountQueryString = readFile(tripleCountQueryPath, Charset.defaultCharset()); String literalCountQueryString = readFile(literalCountQueryPath, Charset.defaultCharset()); String blankCountQueryString = readFile(blankCountQueryPath, Charset.defaultCharset()); String iriCountQueryString = readFile(iriCountQueryPath, Charset.defaultCharset()); String datatypeCountQueryString = readFile(datatypeCountsPath, Charset.defaultCharset()); DecimalFormat df = new DecimalFormat("0.0000"); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { System.out.println("Class: " + clazz); Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {//from w w w . ja v a 2 s . c o m continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); litMap = new HashMap<>(); iriMap = ImmutableMap.of("class", clazz); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); // System.out.println("***"); // System.out.println("### **" + clazz + "**"); // System.out.println("***"); // System.out.println(); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); for (RDFNode property : nodeList) { if (property.isURIResource()) { System.out.println(" " + property); int tripleCount; int objectCount; int literalCount; int blankCount; int iriCount; String propertyURI = property.asResource().getURI(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); litMap = new HashMap<>(); iriMap = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(tripleCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { tripleCount = c.get(0).asLiteral().getInt(); } else { tripleCount = 0; } queryString = bindQueryString(objectCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { objectCount = c.get(0).asLiteral().getInt(); } else { objectCount = 0; } queryString = bindQueryString(literalCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { literalCount = c.get(0).asLiteral().getInt(); } else { literalCount = 0; } queryString = bindQueryString(blankCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { blankCount = c.get(0).asLiteral().getInt(); } else { blankCount = 0; } queryString = bindQueryString(iriCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { iriCount = c.get(0).asLiteral().getInt(); } else { iriCount = 0; } XSSFCell objectCountCell = propertyRow.createCell(2); objectCountCell.setCellValue(objectCount); XSSFCell uniqueObjectsCell = propertyRow.createCell(3); uniqueObjectsCell.setCellValue(df.format(((double) objectCount) / tripleCount)); XSSFCell literalCell = propertyRow.createCell(4); literalCell.setCellValue(df.format((((double) literalCount) / objectCount))); XSSFCell iriCell = propertyRow.createCell(5); iriCell.setCellValue(df.format((((double) iriCount) / objectCount))); XSSFCell blankCell = propertyRow.createCell(6); blankCell.setCellValue(df.format((((double) blankCount) / objectCount))); if (literalCount > 0) { litMap = new HashMap<>(); iriMap = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(datatypeCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint, ImmutableSet.of("datatype", "c")); int i = 1; for (Map<String, RDFNode> soln : solnMaps) { String datatype = soln.get("datatype").asResource().getURI(); int count = soln.get("c").asLiteral().getInt(); XSSFCell dataCell = propertyRow.createCell(6 + i++); dataCell.setCellValue(datatype); dataCell = propertyRow.createCell(6 + i++); dataCell.setCellValue(df.format((((double) count) / objectCount))); } } // System.out.println("* " + propertyURI); // System.out.println(); // // System.out.println("| Object Count | Unique Objects | Literals | IRIs | Blank Nodes | "); // System.out.println("|---|---|---|---|---|"); // System.out.println(String.format("|%d|%d (%.2f%%) |%d (%.2f%%)|%d (%.2f%%)|%d (%.2f%%)|", // tripleCount, // objectCount, ((((double) objectCount)/tripleCount)*100), // literalCount, ((((double) literalCount)/objectCount)*100), // iriCount, ((((double) iriCount)/objectCount)*100), // blankCount, ((((double) blankCount)/objectCount)*100))); // System.out.println(); } } } String filename = "literals.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:twittermarkovchain.Main.java
public static void main(String[] args) throws TwitterException, IOException { Args.parseOrExit(Main.class, args); Twitter twitter = TwitterFactory.getSingleton(); List<String> tweets = new ArrayList<>(); File file = new File(user + ".txt"); if (file.exists()) { BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8")); String line;/*from w w w . j av a 2 s . c o m*/ while ((line = br.readLine()) != null) { tweets.add(line); } } else { BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8")); int total = 0; int page = 1; int size; do { ResponseList<Status> statuses = twitter.timelines().getUserTimeline(user, new Paging(page++, 200)); size = statuses.size(); total += size; for (Status status : statuses) { if (status.getInReplyToUserId() == -1 && status.getRetweetedStatus() == null) { String text = status.getText().replaceAll("\n", " "); bw.write(text); bw.newLine(); tweets.add(text); } } } while (size > 0); bw.close(); } // We need to generate a map of pair frequencies indexed by the first in the pair Map<String, Map<String, Integer>> frequencyMap = tweets.stream().flatMap((String s) -> { Stream.Builder<Pair> builder = Stream.builder(); String last = null; for (String current : s.toLowerCase().replaceAll("https?://.+\\b", "").replaceAll("[^a-z@# ]", "") .split(" ")) { if (current.equals("")) continue; if (last == null) { builder.add(new Pair("", current)); } else { builder.add(new Pair(last, current)); } last = current; } if (last != null) { builder.add(new Pair(last, "")); } return builder.build(); }).collect(Collectors.toMap(p -> p.s1, p -> ImmutableMap.of(p.s2, 1), (m1, m2) -> { HashMap<String, Integer> newmap = new HashMap<>(m1); for (Map.Entry<String, Integer> e : m2.entrySet()) { String key = e.getKey(); Integer integer = newmap.get(key); if (integer == null) { newmap.put(key, 1); } else { newmap.put(key, integer + e.getValue()); } } return newmap; })); // Random! Random random = new SecureRandom(); // Check using language JLanguageTool language = new JLanguageTool(Language.ENGLISH); for (int i = 0; i < 1000; i++) { StringBuilder sb = new StringBuilder(); // Now that we have the frequency map we can generate a message. String word = ""; do { Map<String, Integer> distribution = frequencyMap.get(word); int total = 0; for (Map.Entry<String, Integer> e : distribution.entrySet()) { total += e.getValue(); } int which = random.nextInt(total); int current = 0; for (Map.Entry<String, Integer> e : distribution.entrySet()) { Integer value = e.getValue(); if (which >= current && which < current + value) { word = e.getKey(); } current += value; } if (sb.length() > 0) { if (word.length() > 0) { sb.append(" "); sb.append(word); } } else { sb.append(word.substring(0, 1).toUpperCase()); if (word.length() > 1) sb.append(word.substring(1)); } } while (!word.equals("")); sb.append("."); List<RuleMatch> check = language.check(sb.toString()); if (check.isEmpty()) { System.out.println(sb); } } }
From source file:com.metamx.druid.http.ServerMain.java
public static void main(String[] args) throws Exception { LogLevelAdjuster.register();//from www .j a v a 2 s. c om final ObjectMapper jsonMapper = new DefaultObjectMapper(); final ObjectMapper smileMapper = new DefaultObjectMapper(new SmileFactory()); smileMapper.getJsonFactory().setCodec(smileMapper); final Properties props = Initialization.loadProperties(); final ConfigurationObjectFactory configFactory = Config.createFactory(props); final Lifecycle lifecycle = new Lifecycle(); final HttpClient httpClient = HttpClientInit .createClient(HttpClientConfig.builder().withNumConnections(1).build(), lifecycle); final ServiceEmitter emitter = new ServiceEmitter(props.getProperty("druid.service"), props.getProperty("druid.host"), Emitters.create(props, httpClient, jsonMapper, lifecycle)); final ExecutorService executorService = ExecutorServices.create(lifecycle, configFactory.buildWithReplacements(ExecutorServiceConfig.class, ImmutableMap.of("base_path", "druid.processing"))); StupidPool<ByteBuffer> computationBufferPool = ServerInit.makeComputeScratchPool(Integer .parseInt(props.getProperty("druid.computation.buffer.size", String.valueOf(1024 * 1024 * 1024)))); Map<Class<? extends Query>, QueryRunnerFactory> queryRunners = ServerInit .initDefaultQueryTypes(configFactory, computationBufferPool); final RestS3Service s3Client = new RestS3Service(new AWSCredentials( props.getProperty("com.metamx.aws.accessKey"), props.getProperty("com.metamx.aws.secretKey"))); QueryableLoaderConfig queryableLoaderConfig = configFactory.build(QueryableLoaderConfig.class); final ServerManager serverManager = new ServerManager( ServerInit.makeDefaultQueryableLoader(s3Client, queryableLoaderConfig), new DefaultQueryRunnerFactoryConglomerate(queryRunners), emitter, executorService); final ZkClient zkClient = Initialization.makeZkClient(configFactory.build(ZkClientConfig.class), lifecycle); final DruidServer druidServer = new DruidServer(configFactory.build(DruidServerConfig.class)); final PhoneBook coordinatorYp = Initialization.createYellowPages(jsonMapper, zkClient, "Coordinator-ZKYP--%s", lifecycle); final ZkCoordinator coordinator = new ZkCoordinator(jsonMapper, configFactory.build(ZkCoordinatorConfig.class), druidServer, coordinatorYp, serverManager, emitter); lifecycle.addManagedInstance(coordinator); final ScheduledExecutorFactory scheduledExecutorFactory = ScheduledExecutors.createFactory(lifecycle); final ScheduledExecutorService globalScheduledExec = scheduledExecutorFactory.create(1, "Global--%d"); final List<Monitor> monitors = Lists.<Monitor>newArrayList(new ServerMonitor(druidServer, serverManager), new JvmMonitor()); if (Boolean.parseBoolean(props.getProperty("druid.monitoring.monitorSystem", "true"))) { monitors.add(new SysMonitor()); } final MonitorScheduler healthMonitor = new MonitorScheduler( configFactory.build(MonitorSchedulerConfig.class), globalScheduledExec, emitter, monitors); lifecycle.addManagedInstance(healthMonitor); final RequestLogger requestLogger = Initialization .makeRequestLogger(scheduledExecutorFactory.create(1, "RequestLogger--%d"), props); lifecycle.addManagedInstance(requestLogger); try { lifecycle.start(); } catch (Throwable t) { log.error(t, "Error when starting up. Failing."); System.exit(1); } Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { log.info("Running shutdown hook"); lifecycle.stop(); } })); final Server server = Initialization.makeJettyServer(configFactory.build(ServerConfig.class)); final Context root = new Context(server, "/", Context.SESSIONS); root.addServlet(new ServletHolder(new StatusServlet()), "/status"); root.addServlet( new ServletHolder(new QueryServlet(jsonMapper, smileMapper, serverManager, emitter, requestLogger)), "/*"); server.start(); server.join(); }