List of usage examples for com.google.common.base Splitter on
@CheckReturnValue @GwtIncompatible("java.util.regex") public static Splitter on(final Pattern separatorPattern)
From source file:tv.icntv.grade.film.recommend.CorrelateJob.java
public static void main(String[] args) throws Exception { final Configuration configuration = HBaseConfiguration.create(); configuration.addResource("grade.xml"); String tables = configuration.get("hbase.cdn.tables"); if (Strings.isNullOrEmpty(tables)) { return;//from w w w. ja va 2s .c om } List<String> list = Lists.newArrayList(Splitter.on(",").split(tables)); List<String> results = Lists.transform(list, new Function<String, String>() { @Override public String apply(@Nullable java.lang.String input) { return String.format(configuration.get("hdfs.directory.base.db"), new Date(), input); } }); String middleDirectory = String.format(configuration.get("icntv.correlate.input"), new Date()); StringBuilder sb = new StringBuilder(); sb.append("minSupport=").append(configuration.get("correlate.minSupport", "3")).append("--") .append("maxHeapSize=100").append("--").append("splitterPattern='[\t ]'").append("--") .append("input=").append(middleDirectory).append("--").append("output=") .append(String.format(configuration.get("icntv.correlate.fp.growth.output"), new Date())); ToolRunner.run(configuration, new CorrelateJob(), new String[] { Joiner.on(",").join(results), middleDirectory, sb.toString(), String.format(configuration.get("icntv.correlate.output"), new Date()) }); }
From source file:com.sina.dip.twill.HelloWorldServiceDiscovery.java
public static void main(String[] args) { String zkStr = "localhost:2181"; YarnConfiguration yarnConfiguration = new YarnConfiguration(); final TwillRunnerService twillRunner = new YarnTwillRunnerService(yarnConfiguration, zkStr); twillRunner.start();// ww w . ja v a 2 s.co m String yarnClasspath = yarnConfiguration.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, "/usr/lib/hadoop/*,/usr/lib/hadoop-0.20-mapreduce/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-yarn/*"); List<String> applicationClassPaths = Lists.newArrayList(); Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath)); final TwillController controller = twillRunner.prepare(new HelloWorldApplication()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationClassPaths(applicationClassPaths) .withBundlerClassAcceptor(new HadoopClassExcluder()).start(); ServiceDiscovered helloWorldService = controller.discoverService("HelloWorldService"); ServiceDiscovered helloWorldService2 = controller.discoverService("HelloWorldService2"); int count = 0; while (true) { boolean flag = true; Iterator<Discoverable> iterator = helloWorldService.iterator(); while (iterator.hasNext()) { Discoverable discoverable = iterator.next(); System.out.println(discoverable.getName() + " : " + discoverable.getSocketAddress()); flag = false; } iterator = helloWorldService2.iterator(); while (iterator.hasNext()) { Discoverable discoverable = iterator.next(); System.out.println(discoverable.getName() + " : " + discoverable.getSocketAddress()); flag = false; } try { Thread.sleep(5 * 1000); } catch (InterruptedException e) { } if (++count >= 36 && flag) { break; } } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Futures.getUnchecked(controller.terminate()); } finally { twillRunner.stop(); } } }); try { controller.awaitTerminated(); } catch (ExecutionException e) { e.printStackTrace(); } }
From source file:com.sina.dip.twill.HelloWorldControllingLiveApplications.java
public static void main(String[] args) { String zkStr = "localhost:2181"; YarnConfiguration yarnConfiguration = new YarnConfiguration(); final TwillRunnerService twillRunner = new YarnTwillRunnerService(yarnConfiguration, zkStr); twillRunner.start();/*from ww w. ja v a 2 s. c o m*/ String yarnClasspath = yarnConfiguration.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH, "/usr/lib/hadoop/*,/usr/lib/hadoop-0.20-mapreduce/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-yarn/*"); List<String> applicationClassPaths = Lists.newArrayList(); Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath)); final TwillController controller = twillRunner.prepare(new HelloWorldApplication()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationClassPaths(applicationClassPaths) .withBundlerClassAcceptor(new HadoopClassExcluder()).start(); ServiceDiscovered helloWorldService = controller.discoverService("HelloWorldService"); ServiceDiscovered helloWorldService2 = controller.discoverService("HelloWorldService2"); int count = 0; while (true) { boolean flag = true; Iterator<Discoverable> iterator = helloWorldService.iterator(); while (iterator.hasNext()) { Discoverable discoverable = iterator.next(); System.out.println(discoverable.getName() + " : " + discoverable.getSocketAddress()); flag = false; } iterator = helloWorldService2.iterator(); while (iterator.hasNext()) { Discoverable discoverable = iterator.next(); System.out.println(discoverable.getName() + " : " + discoverable.getSocketAddress()); flag = false; } try { Thread.sleep(5 * 1000); } catch (InterruptedException e) { } ++count; if (count == 10) { controller.changeInstances("hello1", 3); controller.changeInstances("hello2", 5); } else if (count == 20) { controller.changeInstances("hello1", 5); controller.changeInstances("hello2", 3); } if (count >= 36 && flag) { break; } } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Futures.getUnchecked(controller.terminate()); } finally { twillRunner.stop(); } } }); try { controller.awaitTerminated(); } catch (ExecutionException e) { e.printStackTrace(); } }
From source file:tv.icntv.log.stb.cdnModule.CdnStbMapper.java
public static void main(String[] args) throws IOException { String test = "id=493&url=http://hot.sp.media.ysten.com/media/new/2013/icntv2/media/2014/09/04/HD1M2d97d9a54\n" + "75f48cb8b368bfb6f6714a4.ts&block3=0&block5=0&block10=0&host=111.20.240.41&taskCnt=4&sucCnt=4&failCnt=0&conFailCnt=0&timeOutCnt=0&nooFileErrorCnt=0&srvCloseCnt=0&srvErrorCnt=0&socketErrorCnt=0&reqUn\n" + "acceptCnt=0&revByte=5914kB&revSpeed=552kB/s&dnsAvgTime=0ms&dnsMaxTime=0ms&dnsMinTime=0ms&conAvgTime=28ms&conMaxTime=73ms&conMinTime=0ms&dnsRedList=111.20.240.41(,120.192.247.55),120.192.247.55(),dnsRedList=mibox.vod01.icntvcdn.com(111.1.57.14),010121009660446201410232000184450000/20141023200018445/0/010121009660446////100.107.182.162/2014-10-23 20:00:18 445/2014-10-23 20:00:18 445/2014-10-23 20:00:18 445/2014-10-23 20:00:18 445/1/900/ConsumAction/catgId=, startDate=2014-10-23 20:00:18 445, endReason=, deviceCode=010121009660446, endDate=, contentType=MOVIE, videoType=, id=, programId=9381361, bufferingTotalTime=, programSeriesName=, bufferingCnt=, chargeType=0, epgCode=, outerCode=966269, ipAddress=, programName="; Map<String, String> maps = Splitter.on("&").withKeyValueSeparator("=").split(test); // Set<String> keys = maps.keySet(); // for(String key:keys){ // System.out.println(key+"\t"+maps.get(key)); // }// ww w. jav a2 s .com List<String> fieldValue = Lists.newArrayList("dnsRedList", "conMinTime", "conMaxTime", "conAvgTime", "dnsMinTime", "dnsMaxTime", "dnsAvgTime", "revSpeed", "revByte", "socketErrorCnt", "srvErrorCnt", "srvCloseCnt", "nooFileErrorCnt", "timeOutCnt", "conFailCnt", "failCnt", "sucCnt", "taskCnt", "host", "url"); // Map<String,String> maps = Splitter.on("&").omitEmptyStrings().withKeyValueSeparator("=").split(content); CdnStbDomain cdnStbDomain = new CdnStbDomain(); for (String filed : fieldValue) { try { ReflectUtils.setFieldValue(cdnStbDomain.getClass().getDeclaredField(filed), cdnStbDomain, new String[] { maps.get(filed) }); } catch (NoSuchFieldException e) { System.out.println("reflect error" + e); } } System.out.println(cdnStbDomain.toString()); }
From source file:tv.icntv.grade.film.grade.GradeJob.java
public static void main(String[] args) throws Exception { final Configuration configuration = HBaseConfiguration.create(); configuration.addResource("grade.xml"); String tables = configuration.get("hbase.cdn.tables"); if (Strings.isNullOrEmpty(tables)) { return;//from www . j a v a 2 s.com } List<String> list = Lists.newArrayList(Splitter.on(",").split(tables)); List<String> results = Lists.transform(list, new Function<String, String>() { @Override public String apply(@Nullable java.lang.String input) { return String.format(configuration.get("hdfs.directory.base.db"), new Date(), input); } }); String[] arrays = new String[] { Joiner.on(",").join(results), configuration.get("film.see.num.table"), String.format(configuration.get("hdfs.directory.base.score"), new Date()), String.format(configuration.get("icntv.correlate.input"), new Date()) }; int i = ToolRunner.run(configuration, new GradeJob(), arrays); System.exit(i); }
From source file:org.corpus_tools.graphannis.console.Console.java
public static void main(String[] args) { if (args.length < 1) { System.err.println("Must give the database directory as argument."); System.exit(-1);/*from ww w.j a v a 2s . c o m*/ } Console c = new Console(args[0]); try { Splitter cmdArgSplitter = Splitter.on(" ").omitEmptyStrings().trimResults().limit(2); FileHistory history = new FileHistory(new File(".graphannis_history.txt")); ConsoleReader reader = new ConsoleReader(); reader.setHistory(history); reader.setHistoryEnabled(true); reader.setPrompt("graphannis> "); reader.addCompleter( new StringsCompleter("quit", "exit", "count", "find", "subgraph", "list", "relannis")); boolean exit = false; String line; while (!exit && (line = reader.readLine()) != null) { List<String> parsed = cmdArgSplitter.splitToList(line); String cmd = parsed.get(0); String arguments = ""; if (parsed.size() > 1) { arguments = parsed.get(1); } switch (cmd) { case "list": c.list(); break; case "count": c.count(arguments); break; case "find": c.find(arguments); break; case "subgraph": c.subgraph(arguments); break; case "relannis": c.relannis(arguments); break; case "exit": case "quit": System.out.println("Good bye!"); exit = true; break; } } } catch (IOException ex) { Logger.getLogger(Console.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.google.cloud.genomics.dataflow.pipelines.DeleteVariants.java
public static void main(String[] args) throws IOException, GeneralSecurityException { // Register the options so that they show up via --help PipelineOptionsFactory.register(Options.class); Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); // Option validation is not yet automatic, we make an explicit call here. Options.Methods.validateOptions(options); OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); GenomicsOptions.Methods.requestConfirmation("*** The pipeline will delete variants whose " + "ids are listed in: " + options.getInput() + ". ***"); Pipeline p = Pipeline.create(options); p.apply(TextIO.Read.named("ReadLines").from(options.getInput())) .apply(ParDo.named("ParseVariantIds").of(new DoFn<String, String>() { @Override/* w ww . j av a 2 s . com*/ public void processElement(ProcessContext c) { String record = c.element(); // The variant id will be retrieved from the first column. Any other columns // will be ignored. Iterable<String> fields = Splitter .on(CharMatcher.BREAKING_WHITESPACE.or(CharMatcher.is(','))).omitEmptyStrings() .trimResults().split(record); java.util.Iterator<String> iter = fields.iterator(); if (iter.hasNext()) { c.output(iter.next()); } } })).apply(ParDo.of(new DeleteVariantFn(auth))).apply(Sum.integersGlobally()) .apply(ParDo.named("FormatResults").of(new DoFn<Integer, String>() { @Override public void processElement(ProcessContext c) { c.output("Deleted Variant Count: " + c.element()); } })).apply(TextIO.Write.named("Write Count").to(options.getOutput())); p.run(); }
From source file:com.github.rinde.vanlon15prima.PerformExperiment.java
public static void main(String[] args) { final long time = System.currentTimeMillis(); final Experiment.Builder experimentBuilder = Experiment.build(SUM).computeLocal().withRandomSeed(123) .withThreads(Runtime.getRuntime().availableProcessors()).repeat(1) .addScenarios(FileProvider.builder().add(Paths.get(DATASET)).filter("glob:**[09].scen")) .addResultListener(new CommandLineProgress(System.out)) .usePostProcessor(PostProcessors.statisticsPostProcessor()) // central: cheapest insertion configuration .addConfiguration(//from w w w .j ava 2 s .c o m Central.solverConfiguration(CheapestInsertionHeuristic.supplier(SUM), "CheapInsert")) // central: random .addConfiguration(Central.solverConfiguration(RandomSolver.supplier())) // mas: auction cheapest insertion with 2-opt per vehicle .addConfiguration(MASConfiguration.pdptwBuilder().setName("Auction-R-opt2cih-B-cih") .addEventHandler(AddVehicleEvent.class, new VehicleHandler( SolverRoutePlanner.supplier( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM)), SolverBidder.supplier(SUM, CheapestInsertionHeuristic.supplier(SUM)))) .addModel(SolverModel.builder()).addModel(AuctionCommModel.builder()).build()); final Optional<ExperimentResults> results = experimentBuilder.perform(System.out, args); final long duration = System.currentTimeMillis() - time; if (!results.isPresent()) { return; } System.out.println("Done, computed " + results.get().getResults().size() + " simulations in " + duration / 1000d + "s"); final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create(); for (final SimulationResult sr : results.get().sortedResults()) { groupedResults.put(sr.getSimArgs().getMasConfig(), sr); } for (final MASConfiguration config : groupedResults.keySet()) { final Collection<SimulationResult> group = groupedResults.get(config); final File configResult = new File(RESULTS + config.getName() + ".csv"); try { Files.createParentDirs(configResult); } catch (final IOException e1) { throw new IllegalStateException(e1); } // deletes the file in case it already exists configResult.delete(); try { Files.append( "dynamism,urgency,scale,cost,travel_time,tardiness,over_time,is_valid,scenario_id,random_seed,comp_time,num_vehicles,num_orders\n", configResult, Charsets.UTF_8); } catch (final IOException e1) { throw new IllegalStateException(e1); } for (final SimulationResult sr : group) { final String pc = sr.getSimArgs().getScenario().getProblemClass().getId(); final String id = sr.getSimArgs().getScenario().getProblemInstanceId(); final int numVehicles = FluentIterable.from(sr.getSimArgs().getScenario().getEvents()) .filter(AddVehicleEvent.class).size(); try { final String scenarioName = Joiner.on("-").join(pc, id); final List<String> propsStrings = Files .readLines(new File(DATASET + scenarioName + ".properties"), Charsets.UTF_8); final Map<String, String> properties = Splitter.on("\n").withKeyValueSeparator(" = ") .split(Joiner.on("\n").join(propsStrings)); final double dynamism = Double.parseDouble(properties.get("dynamism_bin")); final long urgencyMean = Long.parseLong(properties.get("urgency")); final double scale = Double.parseDouble(properties.get("scale")); final StatisticsDTO stats = (StatisticsDTO) sr.getResultObject(); final double cost = SUM.computeCost(stats); final double travelTime = SUM.travelTime(stats); final double tardiness = SUM.tardiness(stats); final double overTime = SUM.overTime(stats); final boolean isValidResult = SUM.isValidResult(stats); final long computationTime = stats.computationTime; final long numOrders = Long.parseLong(properties.get("AddParcelEvent")); final String line = Joiner.on(",") .appendTo(new StringBuilder(), asList(dynamism, urgencyMean, scale, cost, travelTime, tardiness, overTime, isValidResult, scenarioName, sr.getSimArgs().getRandomSeed(), computationTime, numVehicles, numOrders)) .append(System.lineSeparator()).toString(); if (!isValidResult) { System.err.println("WARNING: FOUND AN INVALID RESULT: "); System.err.println(line); } Files.append(line, configResult, Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } } } }
From source file:com.google.cloud.genomics.dataflow.pipelines.IdentifyPrivateVariants.java
public static void main(String[] args) throws IOException, GeneralSecurityException { // Register the options so that they show up via --help PipelineOptionsFactory.register(Options.class); Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); // Option validation is not yet automatic, we make an explicit call here. Options.Methods.validateOptions(options); OfflineAuth auth = GenomicsOptions.Methods.getGenomicsAuth(options); // Grab and parse the list of callset IDs. String fileContents = Files.toString(new File(options.getCallSetIdsFilepath()), Charset.defaultCharset()); ImmutableSet<String> callSetIds = ImmutableSet.<String>builder().addAll( Splitter.on(CharMatcher.BREAKING_WHITESPACE).omitEmptyStrings().trimResults().split(fileContents)) .build();//from w ww .j av a2 s. c o m LOG.info("The pipeline will identify and write to Cloud Storage variants " + "private to " + callSetIds.size() + " genomes with callSetIds: " + callSetIds); if (options.getIdentifyVariantsWithoutCalls()) { LOG.info("* The pipeline will also identify variants with no callsets. *"); } List<StreamVariantsRequest> shardRequests = options.isAllReferences() ? ShardUtils.getVariantRequests(options.getVariantSetId(), ShardUtils.SexChromosomeFilter.INCLUDE_XY, options.getBasesPerShard(), auth) : ShardUtils.getVariantRequests(options.getVariantSetId(), options.getReferences(), options.getBasesPerShard()); Pipeline p = Pipeline.create(options); PCollection<Variant> variants = p.begin().apply(Create.of(shardRequests)) .apply(new VariantStreamer(auth, ShardBoundary.Requirement.STRICT, VARIANT_FIELDS)).apply(ParDo .of(new PrivateVariantsFilterFn(callSetIds, options.getIdentifyVariantsWithoutCalls()))); variants.apply(ParDo.named("FormatResults").of(new DoFn<Variant, String>() { @Override public void processElement(ProcessContext c) { Variant v = c.element(); c.output(Joiner.on("\t").join(v.getId(), v.getReferenceName(), v.getStart(), v.getEnd(), v.getReferenceBases(), Joiner.on(",").join(v.getAlternateBasesList()))); } })).apply(TextIO.Write.to(options.getOutput())); p.run(); }
From source file:org.apache.jclouds.examples.chef.basics.MainApp.java
public static void main(final String[] args) { if (args.length < PARAMETERS) { throw new IllegalArgumentException(INVALID_SYNTAX); }/*w w w . j av a 2 s . c o m*/ String provider = args[0]; String identity = args[1]; String credential = args[2]; String groupName = args[3]; Action action = Action.valueOf(args[4].toUpperCase()); if ((action == Action.CHEF || action == Action.SOLO) && args.length < PARAMETERS + 1) { throw new IllegalArgumentException( "please provide the list of recipes to install, separated by commas"); } String recipes = action == Action.CHEF || action == Action.SOLO ? args[5] : "apache2"; String minRam = System.getProperty("minRam"); // note that you can check if a provider is present ahead of time checkArgument(contains(allKeys, provider), "provider %s not in supported list: %s", provider, allKeys); LoginCredentials login = action != Action.DESTROY ? getLoginForCommandExecution(action) : null; ComputeService compute = initComputeService(provider, identity, credential); try { switch (action) { case ADD: System.out.printf(">> adding node to group %s%n", groupName); // Default template chooses the smallest size on an operating // system that tested to work with java TemplateBuilder templateBuilder = compute.templateBuilder(); templateBuilder.osFamily(OsFamily.UBUNTU); // If you want to up the ram and leave everything default, you // can just tweak minRam if (minRam != null) { templateBuilder.minRam(Integer.parseInt(minRam)); } // note this will create a user with the same name as you on the // node. ex. you can connect via ssh publicip Statement bootInstructions = AdminAccess.standard(); // to run commands as root, we use the runScript option in the // template. templateBuilder.options(runScript(bootInstructions)); NodeMetadata node = getOnlyElement( compute.createNodesInGroup(groupName, 1, templateBuilder.build())); System.out.printf("<< node %s: %s%n", node.getId(), concat(node.getPrivateAddresses(), node.getPublicAddresses())); case SOLO: System.out.printf(">> installing [%s] on group %s as %s%n", recipes, groupName, login.identity); Iterable<String> recipeList = Splitter.on(',').split(recipes); ImmutableList.Builder<Statement> bootstrapBuilder = ImmutableList.builder(); bootstrapBuilder.add(new InstallGit()); // Clone community cookbooks into the node for (String recipe : recipeList) { bootstrapBuilder.add(CloneGitRepo.builder() .repository("git://github.com/opscode-cookbooks/" + recipe + ".git") .directory("/var/chef/cookbooks/" + recipe) // .build()); } // Configure Chef Solo to bootstrap the selected recipes bootstrapBuilder.add(new InstallChefUsingOmnibus()); bootstrapBuilder.add(ChefSolo.builder() // .cookbookPath("/var/chef/cookbooks") // .runlist(RunList.builder().recipes(recipeList).build()) // .build()); // Build the statement that will perform all the operations above StatementList bootstrap = new StatementList(bootstrapBuilder.build()); // Run the script in the nodes of the group runScriptOnGroup(compute, login, groupName, bootstrap); break; case CHEF: // Create the connection to the Chef server ChefService chef = initChefService(System.getProperty("chef.client"), System.getProperty("chef.validator")); // Build the runlist for the deployed nodes System.out.println("Configuring node runlist in the Chef server..."); List<String> runlist = new RunListBuilder().addRecipes(recipes.split(",")).build(); BootstrapConfig config = BootstrapConfig.builder().runList(runlist).build(); chef.updateBootstrapConfigForGroup(groupName, config); Statement chefServerBootstrap = chef.createBootstrapScriptForGroup(groupName); // Run the script in the nodes of the group System.out.printf(">> installing [%s] on group %s as %s%n", recipes, groupName, login.identity); runScriptOnGroup(compute, login, groupName, chefServerBootstrap); break; case DESTROY: System.out.printf(">> destroying nodes in group %s%n", groupName); // you can use predicates to select which nodes you wish to // destroy. Set<? extends NodeMetadata> destroyed = compute.destroyNodesMatching(// Predicates.<NodeMetadata>and(not(TERMINATED), inGroup(groupName))); System.out.printf("<< destroyed nodes %s%n", destroyed); break; } } catch (RunNodesException e) { System.err.println("error adding node to group " + groupName + ": " + e.getMessage()); error = 1; } catch (RunScriptOnNodesException e) { System.err.println("error installing " + recipes + " on group " + groupName + ": " + e.getMessage()); error = 1; } catch (Exception e) { System.err.println("error: " + e.getMessage()); error = 1; } finally { compute.getContext().close(); System.exit(error); } }