List of usage examples for com.google.common.collect ImmutableSet of
@SuppressWarnings("unchecked") public static <E> ImmutableSet<E> of(E e1, E e2)
From source file:org.graylog2.bootstrap.Main.java
public static void main(String[] args) { final CliBuilder<CliCommand> builder = Cli.<CliCommand>builder("graylog") .withDescription("Open source, centralized log management").withDefaultCommand(Help.class) .withCommands(ImmutableSet.of(ShowVersion.class, Help.class)); // add rest from classpath final ServiceLoader<CliCommandsProvider> commandsProviders = ServiceLoader.load(CliCommandsProvider.class); for (CliCommandsProvider provider : commandsProviders) { provider.addTopLevelCommandsOrGroups(builder); }//w w w .j av a2 s.com final Cli<CliCommand> cli = builder.build(); final Runnable command = cli.parse(args); command.run(); }
From source file:org.apache.jclouds.DestroyNodes.java
public static void main(final String[] args) throws Exception { String provider = "google-compute-engine"; ProviderConfig config = ProviderConfig.load(provider); // Create the connection to the compute provider ComputeServiceContext ctx = ContextBuilder.newBuilder(provider) .credentials(config.identity(), config.credential()).overrides(config.overrides()) .modules(ImmutableSet.of(new SshjSshClientModule(), new SLF4JLoggingModule())) .buildView(ComputeServiceContext.class); // Create the connection to the Chef server ProviderConfig chefConfig = ProviderConfig.load("chef"); ChefApi chef = ContextBuilder.newBuilder("chef") .endpoint("https://api.chef.io/organizations/apache-jclouds") .credentials(chefConfig.identity(), chefConfig.credential()).overrides(chefConfig.overrides()) .modules(ImmutableSet.of(new SLF4JLoggingModule())).buildApi(ChefApi.class); try {// ww w .j av a 2 s . com // Destroy all nodes that belong to the used groups ComputeService compute = ctx.getComputeService(); compute.destroyNodesMatching(inGroup("acna2016-webserver")); compute.destroyNodesMatching(inGroup("acna2016-load-balancer")); // Delete the information from the Chef server using the Chef API ChefService chefService = chef.chefService(); chefService.deleteAllClientsInList(withPrefix(chef.listClients(), "acna2016")); chefService.deleteAllNodesInList(withPrefix(chef.listNodes(), "acna2016")); chef.deleteDatabag("bootstrap"); } finally { // Close the connections and free resources ctx.close(); chef.close(); } }
From source file:org.apache.jclouds.ACNA2016Demo.java
public static void main(final String[] args) throws Exception { // Create the connection to the compute provider String provider = "google-compute-engine"; ProviderConfig config = ProviderConfig.load(provider); ComputeServiceContext ctx = ContextBuilder.newBuilder(provider) .credentials(config.identity(), config.credential()).overrides(config.overrides()) .modules(ImmutableSet.of(new SshjSshClientModule(), new SLF4JLoggingModule())) .buildView(ComputeServiceContext.class); // Create the connection to the Chef Server ProviderConfig chefConfig = ProviderConfig.load("chef"); ChefApi chef = ContextBuilder.newBuilder("chef") .endpoint("https://api.chef.io/organizations/apache-jclouds") .credentials(chefConfig.identity(), chefConfig.credential()).overrides(chefConfig.overrides()) .modules(ImmutableSet.of(new SLF4JLoggingModule())).buildApi(ChefApi.class); try {// w ww . j av a 2 s . c om String chefRole = "load-balancer"; String group = "acna2016-" + chefRole; int[] inPorts = { 22, 80, 22002 }; int numInstances = 1; // Create the scripts to be executed on the nodes when they are started Statement userAccess = AdminAccess.standard(); Statement chefBootstrap = generateChefBootstrap(chef, group, chefRole); Statement bootstrap = new StatementList(userAccess, chefBootstrap); // Select and configure the image and hardware profile to be deployed ComputeService compute = ctx.getComputeService(); Template template = compute.templateBuilder().smallest().osFamily(OsFamily.DEBIAN).os64Bit(true) .options(runScript(bootstrap).inboundPorts(inPorts)).build(); // Create the nodes and bootstrap them compute.createNodesInGroup(group, numInstances, template); System.out.println("Done!"); } finally { // Close the connections and free resources ctx.close(); chef.close(); } }
From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java
public static void main(String[] args) throws Exception { String endpoint = "http://3cixty.eurecom.fr/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); DecimalFormat df = new DecimalFormat("0.0000"); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {// w w w .j a va2s . com continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } // System.out.println("***"); // System.out.println("### **" + clazz + "** (" + individualCount + ")"); // System.out.println("***"); // System.out.println(); classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); for (RDFNode property : nodeList) { if (property.isURIResource()) { DescriptiveStatistics stats = new DescriptiveStatistics(); String propertyURI = property.asResource().getURI(); // System.out.println("* " + propertyURI); // System.out.println(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); Map<String, String> litMap2 = new HashMap<>(); Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(propertyCardinalityQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2)); List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint, ImmutableSet.of("card", "count")); int sum = 0; List<CardinalityCount> cardinalityList = new ArrayList<>(); if (solnMaps.size() > 0) { for (Map<String, RDFNode> soln : solnMaps) { int count = soln.get("count").asLiteral().getInt(); int card = soln.get("card").asLiteral().getInt(); for (int i = 0; i < count; i++) { stats.addValue(card); } CardinalityCount cardinalityCount = new CardinalityCount(card, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); sum += count; } // Check for zero cardinality instances int count = individualCount - sum; if (count > 0) { for (int i = 0; i < count; i++) { stats.addValue(0); } CardinalityCount cardinalityCount = new CardinalityCount(0, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); } } Map<Integer, Double> cardMap = new HashMap<>(); for (CardinalityCount count : cardinalityList) { cardMap.put(count.getCardinality(), count.getPrecentage()); } XSSFCell instanceCountCell = propertyRow.createCell(2); instanceCountCell.setCellValue(individualCount); XSSFCell minCell = propertyRow.createCell(3); minCell.setCellValue(stats.getMin()); XSSFCell maxCell = propertyRow.createCell(4); maxCell.setCellValue(stats.getMax()); XSSFCell p1 = propertyRow.createCell(5); p1.setCellValue(stats.getPercentile(1)); XSSFCell p99 = propertyRow.createCell(6); p99.setCellValue(stats.getPercentile(99)); XSSFCell mean = propertyRow.createCell(7); mean.setCellValue(df.format(stats.getMean())); for (int i = 0; i < 21; i++) { XSSFCell dataCell = propertyRow.createCell(8 + i); Double percentage = cardMap.get(i); if (percentage != null) { dataCell.setCellValue(df.format(percentage)); } else { dataCell.setCellValue(0); } } // System.out.println("| Min Card. |Max Card. |"); // System.out.println("|---|---|"); // System.out.println("| ? | ? |"); // System.out.println(); } } //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1)); //We have finished writting properties of one class, now it's time to merge the cells int classEndRow = currentExcelRow - 1; if (classStartRow < classEndRow) { sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0)); } } String filename = "3cixty.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:org.commoncrawl.mapred.ec2.postprocess.linkCollector.LinkMergerJob.java
public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); // establish merge timestamp long mergeTimesmap = System.currentTimeMillis(); // get a temp directory ... Path outputPath = JobBuilder.tempDir(conf, Long.toString(mergeTimesmap)); // find latest merge timestamp ... long latestMergeDBTimestamp = findLatestMergeDBTimestamp(fs, conf); LOG.info("Latest MergeDB Timestmap is:" + latestMergeDBTimestamp); // find list of merge candidates ... List<Path> candidateList = filterMergeCandidtes(fs, conf, latestMergeDBTimestamp); LOG.info("Merge Candidate List is:" + candidateList); if (candidateList.size() != 0) { ArrayList<Path> inputPaths = new ArrayList<Path>(); // add all input paths to list inputPaths.addAll(candidateList); // establish an affinity path ... Path affinityPath = candidateList.get(0); // add merge db path if it exists if (latestMergeDBTimestamp != -1L) { affinityPath = new Path(internalMergedDBPath, Long.toString(latestMergeDBTimestamp)); inputPaths.add(affinityPath); }/*w w w .j ava 2s. c o m*/ JobConf jobConf = new JobBuilder("Final Merge Job", conf).inputs(inputPaths) .inputFormat(MultiFileMergeInputFormat.class).mapperKeyValue(IntWritable.class, Text.class) .outputKeyValue(TextBytes.class, TextBytes.class).outputFormat(SequenceFileOutputFormat.class) .reducer(LinkMergerJob.class, false).partition(MultiFileMergePartitioner.class) .numReducers(CrawlEnvironment.NUM_DB_SHARDS).speculativeExecution(false).output(outputPath) .setAffinityNoBalancing(affinityPath, ImmutableSet.of("ccd001.commoncrawl.org", "ccd006.commoncrawl.org")) .compressMapOutput(false).compressor(CompressionType.BLOCK, SnappyCodec.class) .build(); JsonArray hack = new JsonArray(); hack.add(new JsonPrimitive(11)); hack.add(new JsonPrimitive(21)); hack.add(new JsonPrimitive(82)); hack.add(new JsonPrimitive(83)); hack.add(new JsonPrimitive(90)); jobConf.set("hack", hack.toString()); LOG.info("Starting JOB"); JobClient.runJob(jobConf); Path finalOutputPath = new Path(internalMergedDBPath, Long.toString(mergeTimesmap)); LOG.info("Renaming tempoutput:" + outputPath + " to:" + finalOutputPath); fs.rename(outputPath, finalOutputPath); } }
From source file:es.upm.oeg.tools.rdfshapes.patterns.DatatypeObjectPropertyPatterns.java
public static void main(String[] args) throws Exception { String endpoint = "http://3cixty.eurecom.fr/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); String objectCountQueryString = readFile(objectCountQueryPath, Charset.defaultCharset()); String tripleCountQueryString = readFile(tripleCountQueryPath, Charset.defaultCharset()); String literalCountQueryString = readFile(literalCountQueryPath, Charset.defaultCharset()); String blankCountQueryString = readFile(blankCountQueryPath, Charset.defaultCharset()); String iriCountQueryString = readFile(iriCountQueryPath, Charset.defaultCharset()); String datatypeCountQueryString = readFile(datatypeCountsPath, Charset.defaultCharset()); DecimalFormat df = new DecimalFormat("0.0000"); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { System.out.println("Class: " + clazz); Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {/*from w w w. ja v a2s . c o m*/ continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); litMap = new HashMap<>(); iriMap = ImmutableMap.of("class", clazz); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); // System.out.println("***"); // System.out.println("### **" + clazz + "**"); // System.out.println("***"); // System.out.println(); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); for (RDFNode property : nodeList) { if (property.isURIResource()) { System.out.println(" " + property); int tripleCount; int objectCount; int literalCount; int blankCount; int iriCount; String propertyURI = property.asResource().getURI(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); litMap = new HashMap<>(); iriMap = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(tripleCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { tripleCount = c.get(0).asLiteral().getInt(); } else { tripleCount = 0; } queryString = bindQueryString(objectCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { objectCount = c.get(0).asLiteral().getInt(); } else { objectCount = 0; } queryString = bindQueryString(literalCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { literalCount = c.get(0).asLiteral().getInt(); } else { literalCount = 0; } queryString = bindQueryString(blankCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { blankCount = c.get(0).asLiteral().getInt(); } else { blankCount = 0; } queryString = bindQueryString(iriCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); c = executeQueryForList(queryString, endpoint, "c"); if (c.size() > 0) { iriCount = c.get(0).asLiteral().getInt(); } else { iriCount = 0; } XSSFCell objectCountCell = propertyRow.createCell(2); objectCountCell.setCellValue(objectCount); XSSFCell uniqueObjectsCell = propertyRow.createCell(3); uniqueObjectsCell.setCellValue(df.format(((double) objectCount) / tripleCount)); XSSFCell literalCell = propertyRow.createCell(4); literalCell.setCellValue(df.format((((double) literalCount) / objectCount))); XSSFCell iriCell = propertyRow.createCell(5); iriCell.setCellValue(df.format((((double) iriCount) / objectCount))); XSSFCell blankCell = propertyRow.createCell(6); blankCell.setCellValue(df.format((((double) blankCount) / objectCount))); if (literalCount > 0) { litMap = new HashMap<>(); iriMap = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(datatypeCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint, ImmutableSet.of("datatype", "c")); int i = 1; for (Map<String, RDFNode> soln : solnMaps) { String datatype = soln.get("datatype").asResource().getURI(); int count = soln.get("c").asLiteral().getInt(); XSSFCell dataCell = propertyRow.createCell(6 + i++); dataCell.setCellValue(datatype); dataCell = propertyRow.createCell(6 + i++); dataCell.setCellValue(df.format((((double) count) / objectCount))); } } // System.out.println("* " + propertyURI); // System.out.println(); // // System.out.println("| Object Count | Unique Objects | Literals | IRIs | Blank Nodes | "); // System.out.println("|---|---|---|---|---|"); // System.out.println(String.format("|%d|%d (%.2f%%) |%d (%.2f%%)|%d (%.2f%%)|%d (%.2f%%)|", // tripleCount, // objectCount, ((((double) objectCount)/tripleCount)*100), // literalCount, ((((double) literalCount)/objectCount)*100), // iriCount, ((((double) iriCount)/objectCount)*100), // blankCount, ((((double) blankCount)/objectCount)*100))); // System.out.println(); } } } String filename = "literals.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:com.google.template.soy.coredirectives.CoreDirectives.java
public static ImmutableSet<SoyPrintDirective> directives() { return ImmutableSet.of(new NoAutoescapeDirective(), new EscapeHtmlDirective()); }
From source file:com.onyxscheduler.util.TriggerTestUtils.java
public static Set<Trigger> buildTriggers() { return ImmutableSet.of(Trigger.fromCronExpression(CRON), Trigger.fromFixedTime(FIXED_TIME)); }
From source file:azkaban.flowtrigger.Status.java
public static boolean isDone(final Status status) { final Set<Status> terminalStatus = ImmutableSet.of(SUCCEEDED, CANCELLED); return terminalStatus.contains(status); }
From source file:com.google.template.soy.bididirectives.BidiDirectives.java
public static ImmutableSet<SoyPrintDirective> directives(Supplier<BidiGlobalDir> bidiProvider) { return ImmutableSet.of(new BidiSpanWrapDirective(bidiProvider), new BidiUnicodeWrapDirective(bidiProvider)); }