List of usage examples for com.google.common.base Splitter on
@CheckReturnValue @GwtIncompatible("java.util.regex") public static Splitter on(final Pattern separatorPattern)
From source file:org.jclouds.examples.chef.basics.MainApp.java
public static void main(String[] args) { if (args.length < PARAMETERS) { throw new IllegalArgumentException(INVALID_SYNTAX); }/*from w ww. ja v a 2 s. co m*/ String provider = args[0]; String identity = args[1]; String credential = args[2]; String groupName = args[3]; Action action = Action.valueOf(args[4].toUpperCase()); if ((action == Action.CHEF || action == Action.SOLO) && args.length < PARAMETERS + 1) { throw new IllegalArgumentException( "please provide the list of recipes to install, separated by commas"); } String recipes = action == Action.CHEF || action == Action.SOLO ? args[5] : "apache2"; String minRam = System.getProperty("minRam"); // note that you can check if a provider is present ahead of time checkArgument(contains(allKeys, provider), "provider %s not in supported list: %s", provider, allKeys); LoginCredentials login = action != Action.DESTROY ? getLoginForCommandExecution(action) : null; ComputeService compute = initComputeService(provider, identity, credential); try { switch (action) { case ADD: System.out.printf(">> adding node to group %s%n", groupName); // Default template chooses the smallest size on an operating // system that tested to work with java, which tends to be Ubuntu // or CentOS TemplateBuilder templateBuilder = compute.templateBuilder(); // If you want to up the ram and leave everything default, you // can just tweak minRam if (minRam != null) { templateBuilder.minRam(Integer.parseInt(minRam)); } // note this will create a user with the same name as you on the // node. ex. you can connect via ssh publicip Statement bootInstructions = AdminAccess.standard(); // to run commands as root, we use the runScript option in the // template. templateBuilder.options(runScript(bootInstructions)); NodeMetadata node = getOnlyElement( compute.createNodesInGroup(groupName, 1, templateBuilder.build())); System.out.printf("<< node %s: %s%n", node.getId(), concat(node.getPrivateAddresses(), node.getPublicAddresses())); case SOLO: System.out.printf(">> installing [%s] on group %s as %s%n", recipes, groupName, login.identity); Iterable<String> recipeList = Splitter.on(',').split(recipes); ImmutableList.Builder<Statement> bootstrapBuilder = ImmutableList.builder(); bootstrapBuilder.add(new InstallGit()); // Clone community cookbooks into the node for (String recipe : recipeList) { bootstrapBuilder.add(CloneGitRepo.builder() .repository("git://github.com/opscode-cookbooks/" + recipe + ".git") .directory("/var/chef/cookbooks/" + recipe) // .build()); } // Configure Chef Solo to bootstrap the selected recipes bootstrapBuilder.add(InstallRuby.builder().build()); bootstrapBuilder.add(InstallRubyGems.builder().build()); bootstrapBuilder.add(ChefSolo.builder() // .cookbookPath("/var/chef/cookbooks") // .runlist(RunList.builder().recipes(recipeList).build()) // .build()); // Build the statement that will perform all the operations above StatementList bootstrap = new StatementList(bootstrapBuilder.build()); // Run the script in the nodes of the group runScriptOnGroup(compute, login, groupName, bootstrap); break; case CHEF: // Create the connection to the Chef server ChefService chef = initChefService(System.getProperty("chef.client"), System.getProperty("chef.validator")); // Build the runlist for the deployed nodes System.out.println("Configuring node runlist in the Chef server..."); List<String> runlist = new RunListBuilder().addRecipes(recipes.split(",")).build(); chef.updateRunListForGroup(runlist, groupName); Statement chefServerBootstrap = chef.createBootstrapScriptForGroup(groupName); // Run the script in the nodes of the group System.out.printf(">> installing [%s] on group %s as %s%n", recipes, groupName, login.identity); runScriptOnGroup(compute, login, groupName, chefServerBootstrap); break; case DESTROY: System.out.printf(">> destroying nodes in group %s%n", groupName); // you can use predicates to select which nodes you wish to // destroy. Set<? extends NodeMetadata> destroyed = compute.destroyNodesMatching(// Predicates.<NodeMetadata>and(not(TERMINATED), inGroup(groupName))); System.out.printf("<< destroyed nodes %s%n", destroyed); break; } } catch (RunNodesException e) { System.err.println("error adding node to group " + groupName + ": " + e.getMessage()); error = 1; } catch (RunScriptOnNodesException e) { System.err.println("error installing " + recipes + " on group " + groupName + ": " + e.getMessage()); error = 1; } catch (Exception e) { System.err.println("error: " + e.getMessage()); error = 1; } finally { compute.getContext().close(); System.exit(error); } }
From source file:edu.cmu.lti.oaqa.knn4qa.apps.ExtractDataAndQueryAsSparseVectors.java
public static void main(String[] args) { String optKeys[] = { CommonParams.MAX_NUM_QUERY_PARAM, MAX_NUM_DATA_PARAM, CommonParams.MEMINDEX_PARAM, IN_QUERIES_PARAM, OUT_QUERIES_PARAM, OUT_DATA_PARAM, TEXT_FIELD_PARAM, TEST_QTY_PARAM, }; String optDescs[] = { CommonParams.MAX_NUM_QUERY_DESC, MAX_NUM_DATA_DESC, CommonParams.MEMINDEX_DESC, IN_QUERIES_DESC, OUT_QUERIES_DESC, OUT_DATA_DESC, TEXT_FIELD_DESC, TEST_QTY_DESC }; boolean hasArg[] = { true, true, true, true, true, true, true, true }; ParamHelper prmHlp = null;/*from w w w. ja va 2s. c om*/ try { prmHlp = new ParamHelper(args, optKeys, optDescs, hasArg); CommandLine cmd = prmHlp.getCommandLine(); Options opt = prmHlp.getOptions(); int maxNumQuery = Integer.MAX_VALUE; String tmpn = cmd.getOptionValue(CommonParams.MAX_NUM_QUERY_PARAM); if (tmpn != null) { try { maxNumQuery = Integer.parseInt(tmpn); } catch (NumberFormatException e) { UsageSpecify(CommonParams.MAX_NUM_QUERY_PARAM, opt); } } int maxNumData = Integer.MAX_VALUE; tmpn = cmd.getOptionValue(MAX_NUM_DATA_PARAM); if (tmpn != null) { try { maxNumData = Integer.parseInt(tmpn); } catch (NumberFormatException e) { UsageSpecify(MAX_NUM_DATA_PARAM, opt); } } String memIndexPref = cmd.getOptionValue(CommonParams.MEMINDEX_PARAM); if (null == memIndexPref) { UsageSpecify(CommonParams.MEMINDEX_PARAM, opt); } String textField = cmd.getOptionValue(TEXT_FIELD_PARAM); if (null == textField) { UsageSpecify(TEXT_FIELD_PARAM, opt); } textField = textField.toLowerCase(); int fieldId = -1; for (int i = 0; i < FeatureExtractor.mFieldNames.length; ++i) if (FeatureExtractor.mFieldNames[i].compareToIgnoreCase(textField) == 0) { fieldId = i; break; } if (-1 == fieldId) { Usage("Wrong field index, should be one of the following: " + String.join(",", FeatureExtractor.mFieldNames), opt); } InMemForwardIndex indx = new InMemForwardIndex( FeatureExtractor.indexFileName(memIndexPref, FeatureExtractor.mFieldNames[fieldId])); BM25SimilarityLucene bm25simil = new BM25SimilarityLucene(FeatureExtractor.BM25_K1, FeatureExtractor.BM25_B, indx); String inQueryFile = cmd.getOptionValue(IN_QUERIES_PARAM); String outQueryFile = cmd.getOptionValue(OUT_QUERIES_PARAM); if ((inQueryFile == null) != (outQueryFile == null)) { Usage("You should either specify both " + IN_QUERIES_PARAM + " and " + OUT_QUERIES_PARAM + " or none of them", opt); } String outDataFile = cmd.getOptionValue(OUT_DATA_PARAM); tmpn = cmd.getOptionValue(TEST_QTY_PARAM); int testQty = 0; if (tmpn != null) { try { testQty = Integer.parseInt(tmpn); } catch (NumberFormatException e) { UsageSpecify(TEST_QTY_PARAM, opt); } } ArrayList<DocEntry> testDocEntries = new ArrayList<DocEntry>(); ArrayList<DocEntry> testQueryEntries = new ArrayList<DocEntry>(); ArrayList<TrulySparseVector> testDocVectors = new ArrayList<TrulySparseVector>(); ArrayList<TrulySparseVector> testQueryVectors = new ArrayList<TrulySparseVector>(); if (outDataFile != null) { BufferedWriter out = new BufferedWriter( new OutputStreamWriter(CompressUtils.createOutputStream(outDataFile))); ArrayList<DocEntryExt> docEntries = indx.getDocEntries(); for (int id = 0; id < Math.min(maxNumData, docEntries.size()); ++id) { DocEntry e = docEntries.get(id).mDocEntry; TrulySparseVector v = bm25simil.getDocSparseVector(e, false); if (id < testQty) { testDocEntries.add(e); testDocVectors.add(v); } outputVector(out, v); } out.close(); } Splitter splitOnSpace = Splitter.on(' ').trimResults().omitEmptyStrings(); if (outQueryFile != null) { BufferedReader inpText = new BufferedReader( new InputStreamReader(CompressUtils.createInputStream(inQueryFile))); BufferedWriter out = new BufferedWriter( new OutputStreamWriter(CompressUtils.createOutputStream(outQueryFile))); String queryText = XmlHelper.readNextXMLIndexEntry(inpText); for (int queryQty = 0; queryText != null && queryQty < maxNumQuery; queryText = XmlHelper .readNextXMLIndexEntry(inpText), queryQty++) { Map<String, String> queryFields = null; // 1. Parse a query try { queryFields = XmlHelper.parseXMLIndexEntry(queryText); } catch (Exception e) { System.err.println("Parsing error, offending QUERY:\n" + queryText); throw new Exception("Parsing error."); } String fieldText = queryFields.get(FeatureExtractor.mFieldsSOLR[fieldId]); if (fieldText == null) { fieldText = ""; } ArrayList<String> tmpa = new ArrayList<String>(); for (String s : splitOnSpace.split(fieldText)) tmpa.add(s); DocEntry e = indx.createDocEntry(tmpa.toArray(new String[tmpa.size()])); TrulySparseVector v = bm25simil.getDocSparseVector(e, true); if (queryQty < testQty) { testQueryEntries.add(e); testQueryVectors.add(v); } outputVector(out, v); } out.close(); } int testedQty = 0, diffQty = 0; // Now let's do some testing for (int iq = 0; iq < testQueryEntries.size(); ++iq) { DocEntry queryEntry = testQueryEntries.get(iq); TrulySparseVector queryVector = testQueryVectors.get(iq); for (int id = 0; id < testDocEntries.size(); ++id) { DocEntry docEntry = testDocEntries.get(id); TrulySparseVector docVector = testDocVectors.get(id); float val1 = bm25simil.compute(queryEntry, docEntry); float val2 = TrulySparseVector.scalarProduct(queryVector, docVector); ++testedQty; if (Math.abs(val1 - val2) > 1e5) { System.err.println( String.format("Potential mismatch BM25=%f <-> scalar product=%f", val1, val2)); ++diffQty; } } } if (testedQty > 0) System.out.println(String.format("Tested %d Mismatched %d", testedQty, diffQty)); } catch (ParseException e) { Usage("Cannot parse arguments: " + e, prmHlp != null ? prmHlp.getOptions() : null); e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); System.err.println("Terminating due to an exception: " + e); System.exit(1); } }
From source file:com.github.rinde.dynurg.Experimentation.java
public static void main(String[] args) { System.out.println(System.getProperty("jppf.config")); final long time = System.currentTimeMillis(); final Experiment.Builder experimentBuilder = Experiment.build(SUM).computeDistributed().withRandomSeed(123) .repeat(10).numBatches(10)//from w w w .ja v a 2 s .c o m .addScenarios( FileProvider.builder().add(Paths.get(DATASET)).filter("glob:**[01].[0-9]0#[0-5].scen")) .addResultListener(new CommandLineProgress(System.out)) .addConfiguration( Central.solverConfiguration(CheapestInsertionHeuristic.supplier(SUM), "-CheapInsert")) .addConfiguration(Central.solverConfiguration(CheapestInsertionHeuristic.supplier(TARDINESS), "-CheapInsert-Tard")) .addConfiguration(Central.solverConfiguration(CheapestInsertionHeuristic.supplier(DISTANCE), "-CheapInsert-Dist")) .addConfiguration(Central.solverConfiguration( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM), "-bfsOpt2-CheapInsert")) .addConfiguration(Central.solverConfiguration( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(TARDINESS), TARDINESS), "-bfsOpt2-CheapInsert-Tard")) .addConfiguration(Central.solverConfiguration( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(DISTANCE), DISTANCE), "-bfsOpt2-CheapInsert-Dist")) .addConfiguration(Central.solverConfiguration( Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM), "-dfsOpt2-CheapInsert")) .addConfiguration(Central.solverConfiguration( Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(TARDINESS), TARDINESS), "-dfsOpt2-CheapInsert-Tard")) .addConfiguration(Central.solverConfiguration( Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(DISTANCE), DISTANCE), "-dfsOpt2-CheapInsert-Dist")); final Menu m = ExperimentCli.createMenuBuilder(experimentBuilder) .add(Option.builder("nv", ArgumentParser.INTEGER).longName("number-of-vehicles") .description("Changes the number of vehicles in all scenarios.").build(), experimentBuilder, new ArgHandler<Experiment.Builder, Integer>() { @Override public void execute(Experiment.Builder subject, Optional<Integer> argument) { subject.setScenarioReader(new NumVehiclesScenarioParser(argument.get())); } }) .build(); final Optional<String> error = m.safeExecute(args); if (error.isPresent()) { System.err.println(error.get()); return; } final ExperimentResults results = experimentBuilder.perform(); final long duration = System.currentTimeMillis() - time; System.out .println("Done, computed " + results.results.size() + " simulations in " + duration / 1000d + "s"); final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create(); for (final SimulationResult sr : results.sortedResults()) { groupedResults.put(sr.masConfiguration, sr); } for (final MASConfiguration config : groupedResults.keySet()) { final Collection<SimulationResult> group = groupedResults.get(config); final File configResult = new File(RESULTS + config.toString() + ".csv"); try { Files.createParentDirs(configResult); } catch (final IOException e1) { throw new IllegalStateException(e1); } // deletes the file in case it already exists configResult.delete(); try { Files.append( "dynamism,urgency_mean,urgency_sd,cost,travel_time,tardiness,over_time,is_valid,scenario_id,random_seed,comp_time,num_vehicles\n", configResult, Charsets.UTF_8); } catch (final IOException e1) { throw new IllegalStateException(e1); } for (final SimulationResult sr : group) { final String pc = sr.scenario.getProblemClass().getId(); final String id = sr.scenario.getProblemInstanceId(); final int numVehicles = FluentIterable.from(sr.scenario.asList()).filter(AddVehicleEvent.class) .size(); try { final List<String> propsStrings = Files .readLines(new File("files/dataset/" + pc + id + ".properties"), Charsets.UTF_8); final Map<String, String> properties = Splitter.on("\n").withKeyValueSeparator(" = ") .split(Joiner.on("\n").join(propsStrings)); final double dynamism = Double.parseDouble(properties.get("dynamism")); final double urgencyMean = Double.parseDouble(properties.get("urgency_mean")); final double urgencySd = Double.parseDouble(properties.get("urgency_sd")); final double cost = SUM.computeCost(sr.stats); final double travelTime = SUM.travelTime(sr.stats); final double tardiness = SUM.tardiness(sr.stats); final double overTime = SUM.overTime(sr.stats); final boolean isValidResult = SUM.isValidResult(sr.stats); final long computationTime = sr.stats.computationTime; final String line = Joiner.on(",") .appendTo(new StringBuilder(), asList(dynamism, urgencyMean, urgencySd, cost, travelTime, tardiness, overTime, isValidResult, pc + id, sr.seed, computationTime, numVehicles)) .append(System.lineSeparator()).toString(); if (!isValidResult) { System.err.println("WARNING: FOUND AN INVALID RESULT: "); System.err.println(line); } Files.append(line, configResult, Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } } } }
From source file:org.dllearner.algorithms.qtl.experiments.BenchmarkDescriptionGeneratorDatabase.java
public static void main(String[] args) throws Exception { OptionParser parser = new OptionParser(); OptionSpec<File> benchmarkDirectorySpec = parser.accepts("d", "base directory").withRequiredArg() .ofType(File.class).required(); OptionSpec<File> queriesFileSpec = parser.accepts("i", "input queries file").withRequiredArg() .ofType(File.class).required(); OptionSpec<String> tableNameSpec = parser.accepts("db", "database name").withRequiredArg() .ofType(String.class).required(); OptionSpec<URL> endpointURLSpec = parser.accepts("e", "endpoint URL").withRequiredArg().ofType(URL.class) .required();/*from w w w. java 2 s . co m*/ OptionSpec<String> defaultGraphSpec = parser.accepts("g", "default graph").withRequiredArg() .ofType(String.class); OptionSpec<Boolean> useCacheSpec = parser.accepts("cache", "use cache").withOptionalArg() .ofType(Boolean.class).defaultsTo(Boolean.TRUE); OptionSpec<Boolean> queriesHaveIdSpec = parser.accepts("id", "input file contains ID, SPARQL query") .withOptionalArg().ofType(Boolean.class).defaultsTo(Boolean.TRUE); OptionSpec<String> cbdSpec = parser.accepts("cbd", "CBD structure tree string").withOptionalArg() .ofType(String.class).required(); OptionSpec<String> queriesToOmitTokensSpec = parser .accepts("omitTokens", "comma-separated list of tokens such that queries containing any of them will be omitted") .withRequiredArg().ofType(String.class).defaultsTo(""); OptionSpec<Boolean> workaroundSpec = parser.accepts("workaround", "Virtuoso parse error workaround enabled") .withRequiredArg().ofType(Boolean.class).defaultsTo(Boolean.FALSE); OptionSet options = parser.parse(args); File benchmarkDirectory = options.valueOf(benchmarkDirectorySpec); File inputFile = options.valueOf(queriesFileSpec); String tableName = options.valueOf(tableNameSpec); URL endpointURL = options.valueOf(endpointURLSpec); List<String> defaultGraphs = options.has(defaultGraphSpec) ? Lists.newArrayList(options.valueOf(defaultGraphSpec)) : Collections.emptyList(); SparqlEndpoint endpoint = SparqlEndpoint.create(endpointURL.toString(), defaultGraphs); // SparqlEndpointKS ks = new SparqlEndpointKS(endpoint); // ks.setUseCache(options.valueOf(useCacheSpec)); // ks.setCacheDir(benchmarkDirectory.getPath()); // ks.setQueryDelay(1000); // ks.setRetryCount(0); // ks.init(); QueryExecutionFactory qef = buildQueryExecutionFactory(endpoint, options.valueOf(useCacheSpec), benchmarkDirectory.getPath(), TimeUnit.DAYS.toMillis(30), 0, 60); CBDStructureTree cbdStructureTree = CBDStructureTree.fromTreeString(options.valueOf(cbdSpec).trim()); List<String> omitTokens = Splitter.on(",").omitEmptyStrings().trimResults() .splitToList(options.valueOf(queriesToOmitTokensSpec)); BenchmarkDescriptionGeneratorDatabase generator = new BenchmarkDescriptionGeneratorDatabase(qef); generator.setDefaultCbdStructure(cbdStructureTree); generator.setSkipQueryTokens(omitTokens); generator.setEndpoint(endpoint); generator.setWorkaroundEnabled(options.valueOf(workaroundSpec)); generator.generateBenchmarkDescription(inputFile, tableName, options.valueOf(queriesHaveIdSpec)); }
From source file:org.attribyte.api.pubsub.impl.server.Server.java
/** * Starts the server.//from w w w . j a va 2s . c o m * @param args The startup args. * @throws Exception on startup error. */ public static void main(String[] args) throws Exception { if (args.length < 1) { System.err.println("Start-up error: Expecting <config file> [allowed topics file]"); System.exit(1); } Properties commandLineOverrides = new Properties(); args = InitUtil.fromCommandLine(args, commandLineOverrides); Properties props = new Properties(); Properties logProps = new Properties(); CLI.loadProperties(args, props, logProps); props.putAll(commandLineOverrides); logProps.putAll(commandLineOverrides); final Logger logger = initLogger(props, logProps); logger.info("Applied command line overrides: " + commandLineOverrides.toString()); //Buffer and log hub events for logging and debug... final int MAX_STORED_SUBSCRIPTION_REQUESTS = 200; final ArrayBlockingQueue<SubscriptionEvent> recentSubscriptionRequests = new ArrayBlockingQueue<>( MAX_STORED_SUBSCRIPTION_REQUESTS); final HubEndpoint.EventHandler hubEventHandler = new HubEndpoint.EventHandler() { private synchronized void offer(SubscriptionEvent record) { if (!recentSubscriptionRequests.offer(record)) { List<SubscriptionEvent> drain = Lists .newArrayListWithCapacity(MAX_STORED_SUBSCRIPTION_REQUESTS / 2); recentSubscriptionRequests.drainTo(drain, drain.size()); recentSubscriptionRequests.offer(record); } } @Override public void subscriptionRequestAccepted(final Request request, final Response response, final Subscriber subscriber) { final SubscriptionEvent record; try { record = new SubscriptionRequestRecord(request, response, subscriber); } catch (IOException ioe) { return; } logger.info(record.toString()); offer(record); } @Override public void subscriptionRequestRejected(final Request request, final Response response, final Subscriber subscriber) { final SubscriptionEvent record; try { record = new SubscriptionRequestRecord(request, response, subscriber); } catch (IOException ioe) { return; } logger.warn(record.toString()); offer(record); } @Override public void subscriptionVerifyFailure(String callbackURL, int callbackResponseCode, String reason, int attempts, boolean abandoned) { final SubscriptionEvent record = new SubscriptionVerifyRecord(callbackURL, callbackResponseCode, reason, attempts, abandoned); logger.warn(record.toString()); offer(record); } @Override public void subscriptionVerified(Subscription subscription) { final SubscriptionEvent record = new SubscriptionVerifyRecord(subscription); logger.info(record.toString()); offer(record); } }; /** * A source for subscription request records (for console, etc). */ final SubscriptionEvent.Source subscriptionEventSource = new SubscriptionEvent.Source() { public List<SubscriptionEvent> latestEvents(int limit) { List<SubscriptionEvent> records = Lists.newArrayList(recentSubscriptionRequests); Collections.sort(records); return records.size() < limit ? records : records.subList(0, limit); } }; /** * A queue to which new topics are added as reported by the datastore event handler. */ final BlockingQueue<Topic> newTopicQueue = new LinkedBlockingDeque<>(); /** * A datastore event handler that offers new topics to a queue. */ final HubDatastore.EventHandler topicEventHandler = new HubDatastore.EventHandler() { @Override public void newTopic(final Topic topic) throws DatastoreException { newTopicQueue.offer(topic); } @Override public void newSubscription(final Subscription subscription) throws DatastoreException { //Ignore } @Override public void exception(final Throwable t) { //Ignore } @Override public void setNext(final HubDatastore.EventHandler next) { //Ignore } }; final HubEndpoint endpoint = new HubEndpoint("endpoint.", props, logger, hubEventHandler, topicEventHandler); final String topicAddedTopicURL = Strings.emptyToNull(props.getProperty("endpoint.topicAddedTopic", "")); final Topic topicAddedTopic = topicAddedTopicURL != null ? endpoint.getDatastore().getTopic(topicAddedTopicURL, true) : null; final Thread topicAddedNotifier = topicAddedTopic != null ? new Thread(new TopicAddedNotifier(newTopicQueue, endpoint, topicAddedTopic)) : null; if (topicAddedNotifier != null) { topicAddedNotifier.setName("topic-added-notifier"); topicAddedNotifier.start(); } if (props.getProperty("endpoint.topics") != null) { //Add supported topics... for (String topicURL : Splitter.on(",").omitEmptyStrings().trimResults() .split(props.getProperty("endpoint.topics"))) { Topic topic = endpoint.getDatastore().getTopic(topicURL, true); System.out.println("Added topic, '" + topicURL + "' (" + topic.getId() + ")"); } } final MetricRegistry registry = props.getProperty("endpoint.instrumentJVM", "true").equalsIgnoreCase("true") ? instrumentJVM(new MetricRegistry()) : new MetricRegistry(); if (props.getProperty("endpoint.instrumentSystem", "true").equalsIgnoreCase("true")) { instrumentSystem(registry); } registry.registerAll(endpoint); final HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry(); //TODO final Reporting reporting = new Reporting("metrics-reporting.", props, registry, null); //No filter... String httpAddress = props.getProperty("http.address", "127.0.0.1"); int httpPort = Integer.parseInt(props.getProperty("http.port", "8086")); org.eclipse.jetty.server.Server server = new org.eclipse.jetty.server.Server(); server.addLifeCycleListener(new LifeCycle.Listener() { public void lifeCycleFailure(LifeCycle event, Throwable cause) { System.out.println("Failure " + cause.toString()); } public void lifeCycleStarted(LifeCycle event) { System.out.println("Started..."); } public void lifeCycleStarting(LifeCycle event) { System.out.println("Server Starting..."); } public void lifeCycleStopped(LifeCycle event) { System.out.println("Server Stopped..."); } public void lifeCycleStopping(LifeCycle event) { System.out.println("Shutting down metrics reporting..."); reporting.stop(); if (topicAddedNotifier != null) { System.out.println("Shutting down new topic notifier..."); topicAddedNotifier.interrupt(); } System.out.println("Shutting down endpoint..."); endpoint.shutdown(); System.out.println("Shutdown endpoint..."); } }); HttpConfiguration httpConfig = new HttpConfiguration(); httpConfig.setOutputBufferSize(32768); httpConfig.setRequestHeaderSize(8192); httpConfig.setResponseHeaderSize(8192); httpConfig.setSendServerVersion(false); httpConfig.setSendDateHeader(false); ServerConnector httpConnector = new ServerConnector(server, new HttpConnectionFactory(httpConfig)); httpConnector.setHost(httpAddress); httpConnector.setPort(httpPort); httpConnector.setIdleTimeout(30000L); server.addConnector(httpConnector); HandlerCollection serverHandlers = new HandlerCollection(); server.setHandler(serverHandlers); ServletContextHandler rootContext = new ServletContextHandler(ServletContextHandler.NO_SESSIONS); rootContext.setContextPath("/"); final AdminConsole adminConsole; final List<String> allowedAssetPaths; if (props.getProperty("admin.enabled", "false").equalsIgnoreCase("true")) { File assetDirFile = getSystemFile("admin.assetDirectory", props); if (assetDirFile == null) { System.err.println("The 'admin.assetDirectory' must be configured"); System.exit(1); } if (!assetDirFile.exists()) { System.err.println("The 'admin.assetDirectory'" + assetDirFile.getAbsolutePath() + "' must exist"); System.exit(1); } if (!assetDirFile.isDirectory()) { System.err.println( "The 'admin.assetDirectory'" + assetDirFile.getAbsolutePath() + "' must be a directory"); System.exit(1); } if (!assetDirFile.canRead()) { System.err.println( "The 'admin.assetDirectory'" + assetDirFile.getAbsolutePath() + "' must be readable"); System.exit(1); } char[] adminUsername = props.getProperty("admin.username", "").toCharArray(); char[] adminPassword = props.getProperty("admin.password", "").toCharArray(); String adminRealm = props.getProperty("admin.realm", "pubsubhub"); if (adminUsername.length == 0 || adminPassword.length == 0) { System.err.println("The 'admin.username' and 'admin.password' must be specified"); System.exit(1); } File templateDirFile = getSystemFile("admin.templateDirectory", props); if (templateDirFile == null) { System.err.println("The 'admin.templateDirectory' must be specified"); System.exit(1); } if (!templateDirFile.exists()) { System.err .println("The 'admin.templateDirectory'" + assetDirFile.getAbsolutePath() + "' must exist"); System.exit(1); } if (!templateDirFile.isDirectory()) { System.err.println( "The 'admin.templateDirectory'" + assetDirFile.getAbsolutePath() + "' must be a directory"); System.exit(1); } if (!templateDirFile.canRead()) { System.err.println( "The 'admin.templateDirectory'" + assetDirFile.getAbsolutePath() + "' must be readable"); System.exit(1); } adminConsole = new AdminConsole(rootContext, assetDirFile.getAbsolutePath(), endpoint, new AdminAuth(adminRealm, adminUsername, adminPassword), templateDirFile.getAbsolutePath(), logger); allowedAssetPaths = Lists.newArrayList(Splitter.on(',').omitEmptyStrings().trimResults() .split(props.getProperty("admin.assetPaths", ""))); System.out.println("Admin console is enabled..."); } else { adminConsole = null; allowedAssetPaths = ImmutableList.of(); } serverHandlers.addHandler(rootContext); //TODO: Introduces incompatible dependency... /* InstrumentedHandler instrumentedHandler = new InstrumentedHandler(registry); instrumentedHandler.setName("http-server"); instrumentedHandler.setHandler(rootContext); serverHandlers.addHandler(instrumentedHandler); */ File requestLogPathFile = getSystemFile("http.log.path", props); if (requestLogPathFile != null) { if (!requestLogPathFile.exists()) { System.err .println("The 'http.log.path', '" + requestLogPathFile.getAbsolutePath() + "' must exist"); System.exit(1); } if (!requestLogPathFile.isDirectory()) { System.err.println( "The 'http.log.path', '" + requestLogPathFile.getAbsolutePath() + "' must be a directory"); System.exit(1); } if (!requestLogPathFile.canWrite()) { System.err.println( "The 'http.log.path', '" + requestLogPathFile.getAbsolutePath() + "' is not writable"); System.exit(1); } int requestLogRetainDays = Integer.parseInt(props.getProperty("http.log.retainDays", "14")); boolean requestLogExtendedFormat = props.getProperty("http.log.extendedFormat", "true") .equalsIgnoreCase("true"); String requestLogTimeZone = props.getProperty("http.log.timeZone", TimeZone.getDefault().getID()); String requestLogPrefix = props.getProperty("http.log.prefix", "requests"); String requestLogPath = requestLogPathFile.getAbsolutePath(); if (!requestLogPath.endsWith("/")) { requestLogPath = requestLogPath + "/"; } NCSARequestLog requestLog = new NCSARequestLog(requestLogPath + requestLogPrefix + "-yyyy_mm_dd.log"); requestLog.setRetainDays(requestLogRetainDays); requestLog.setAppend(true); requestLog.setExtended(requestLogExtendedFormat); requestLog.setLogTimeZone(requestLogTimeZone); requestLog.setLogCookies(false); requestLog.setPreferProxiedForAddress(true); RequestLogHandler requestLogHandler = new RequestLogHandler(); requestLogHandler.setRequestLog(requestLog); serverHandlers.addHandler(requestLogHandler); } HubServlet hubServlet = new HubServlet(endpoint, logger); rootContext.addServlet(new ServletHolder(hubServlet), "/subscribe/*"); InitUtil filterInit = new InitUtil("publish.", props); List<BasicAuthFilter> publishURLFilters = Lists.newArrayList(); List<Object> publishURLFilterObjects = filterInit.initClassList("topicURLFilters", BasicAuthFilter.class); for (Object o : publishURLFilterObjects) { BasicAuthFilter filter = (BasicAuthFilter) o; filter.init(filterInit.getProperties()); publishURLFilters.add(filter); } final long topicCacheMaxAgeSeconds = Long .parseLong(props.getProperty("endpoint.topicCache.maxAgeSeconds", "0")); final Cache<String, Topic> topicCache; if (topicCacheMaxAgeSeconds > 0) { topicCache = CacheBuilder.newBuilder().concurrencyLevel(16) .expireAfterWrite(topicCacheMaxAgeSeconds, TimeUnit.SECONDS).maximumSize(4096).build(); } else { topicCache = null; } final String replicationTopicURL = Strings.emptyToNull(props.getProperty("endpoint.replicationTopic", "")); //Get or create replication topic, if configured. final Topic replicationTopic = replicationTopicURL != null ? endpoint.getDatastore().getTopic(replicationTopicURL, true) : null; int maxBodySizeBytes = filterInit.getIntProperty("maxBodySizeBytes", BroadcastServlet.DEFAULT_MAX_BODY_BYTES); boolean autocreateTopics = filterInit.getProperty("autocreateTopics", "false").equalsIgnoreCase("true"); int maxSavedNotifications = filterInit.getIntProperty("maxSavedNotifications", 0); boolean jsonEnabled = filterInit.getProperty("jsonEnabled", "false").equalsIgnoreCase("true"); final BroadcastServlet broadcastServlet = new BroadcastServlet(endpoint, maxBodySizeBytes, autocreateTopics, logger, publishURLFilters, topicCache, replicationTopic, maxSavedNotifications, jsonEnabled); rootContext.addServlet(new ServletHolder(broadcastServlet), "/notify/*"); CallbackMetricsServlet callbackMetricsServlet = new CallbackMetricsServlet(endpoint); ServletHolder callbackMetricsServletHolder = new ServletHolder(callbackMetricsServlet); rootContext.addServlet(callbackMetricsServletHolder, "/metrics/callback/*"); NotificationMetricsServlet notificationMetricsServlet = new NotificationMetricsServlet(endpoint); ServletHolder notificationMetricsServletHolder = new ServletHolder(notificationMetricsServlet); rootContext.addServlet(notificationMetricsServletHolder, "/metrics/notification/*"); MetricsServlet metricsServlet = new MetricsServlet(registry); ServletHolder metricsServletHolder = new ServletHolder(metricsServlet); rootContext.setInitParameter(MetricsServlet.RATE_UNIT, "SECONDS"); rootContext.setInitParameter(MetricsServlet.DURATION_UNIT, "MILLISECONDS"); rootContext.setInitParameter(MetricsServlet.SHOW_SAMPLES, "false"); rootContext.addServlet(metricsServletHolder, "/metrics/*"); boolean outputHostAddys = props.getProperty("ping.outputHostAddresses", "false").equalsIgnoreCase("true"); PingServlet pingServlet = new PingServlet(props.getProperty("http.instanceName", ""), outputHostAddys); rootContext.addServlet(new ServletHolder(pingServlet), "/ping/*"); HealthCheckServlet healthCheckServlet = new HealthCheckServlet(healthCheckRegistry); for (Map.Entry<String, HealthCheck> healthCheck : endpoint.getDatastore().getHealthChecks().entrySet()) { healthCheckRegistry.register(healthCheck.getKey(), healthCheck.getValue()); } healthCheckRegistry.register("no-deadlocked-threads", new ThreadDeadlockHealthCheck()); rootContext.addServlet(new ServletHolder(healthCheckServlet), "/health/*"); ThreadDumpServlet threadDumpServlet = new ThreadDumpServlet(); rootContext.addServlet(new ServletHolder(threadDumpServlet), "/threads/*"); if (adminConsole != null && allowedAssetPaths.size() > 0) { String adminPath = props.getProperty("admin.path", "/admin/"); List<Invalidatable> invalidatables = Collections.<Invalidatable>singletonList(new Invalidatable() { @Override public void invalidate() { broadcastServlet.invalidateCaches(); if (topicCache != null) { topicCache.invalidateAll(); } } }); adminConsole.initServlets(rootContext, adminPath, allowedAssetPaths, invalidatables, subscriptionEventSource, broadcastServlet); } int numReporters = reporting.start(); logger.info("Started " + numReporters + " metrics reporters"); server.setDumpBeforeStop(false); server.setStopAtShutdown(true); server.start(); server.join(); }
From source file:org.geogit.cli.ArgumentTokenizer.java
public static String[] tokenize(String s) { Iterable<String> tokens = Splitter.on(new UnquotedSpace()).split(s); return Iterables.toArray(tokens, String.class); }
From source file:org.apache.jackrabbit.oak.scalability.ScalabilityRunner.java
public static void main(String[] args) throws Exception { OptionParser parser = new OptionParser(); OptionSpec<File> base = parser.accepts("base", "Base directory").withRequiredArg().ofType(File.class) .defaultsTo(new File("target")); OptionSpec<String> host = parser.accepts("host", "MongoDB host").withRequiredArg().defaultsTo("localhost"); OptionSpec<Integer> port = parser.accepts("port", "MongoDB port").withRequiredArg().ofType(Integer.class) .defaultsTo(27017);/* w w w.j a v a 2 s. co m*/ OptionSpec<String> dbName = parser.accepts("db", "MongoDB database").withRequiredArg(); OptionSpec<Boolean> dropDBAfterTest = parser .accepts("dropDBAfterTest", "Whether to drop the MongoDB database after the test").withOptionalArg() .ofType(Boolean.class).defaultsTo(true); OptionSpec<String> rdbjdbcuri = parser.accepts("rdbjdbcuri", "RDB JDBC URI").withOptionalArg() .defaultsTo("jdbc:h2:./target/benchmark"); OptionSpec<String> rdbjdbcuser = parser.accepts("rdbjdbcuser", "RDB JDBC user").withOptionalArg() .defaultsTo(""); OptionSpec<String> rdbjdbcpasswd = parser.accepts("rdbjdbcpasswd", "RDB JDBC password").withOptionalArg() .defaultsTo(""); OptionSpec<String> rdbjdbctableprefix = parser.accepts("rdbjdbctableprefix", "RDB JDBC table prefix") .withOptionalArg().defaultsTo(""); OptionSpec<Boolean> mmap = parser.accepts("mmap", "TarMK memory mapping").withOptionalArg() .ofType(Boolean.class).defaultsTo("64".equals(System.getProperty("sun.arch.data.model"))); OptionSpec<Integer> cache = parser.accepts("cache", "cache size (MB)").withRequiredArg() .ofType(Integer.class).defaultsTo(100); OptionSpec<Integer> fdsCache = parser.accepts("blobCache", "cache size (MB)").withRequiredArg() .ofType(Integer.class).defaultsTo(32); OptionSpec<Boolean> withStorage = parser.accepts("storage", "Index storage enabled").withOptionalArg() .ofType(Boolean.class); OptionSpec<File> csvFile = parser.accepts("csvFile", "File to write a CSV version of the benchmark data.") .withOptionalArg().ofType(File.class); OptionSpec help = parser.acceptsAll(asList("h", "?", "help"), "show help").forHelp(); OptionSpec<String> nonOption = parser.nonOptions(); OptionSet options = parser.parse(args); if (options.has(help)) { parser.printHelpOn(System.out); System.exit(0); } int cacheSize = cache.value(options); RepositoryFixture[] allFixtures = new RepositoryFixture[] { new JackrabbitRepositoryFixture(base.value(options), cacheSize), OakRepositoryFixture.getMemoryNS(cacheSize * MB), OakRepositoryFixture.getMongo(host.value(options), port.value(options), dbName.value(options), dropDBAfterTest.value(options), cacheSize * MB), OakRepositoryFixture.getMongoWithFDS(host.value(options), port.value(options), dbName.value(options), dropDBAfterTest.value(options), cacheSize * MB, base.value(options), fdsCache.value(options)), OakRepositoryFixture.getMongoNS(host.value(options), port.value(options), dbName.value(options), dropDBAfterTest.value(options), cacheSize * MB), OakRepositoryFixture.getTar(base.value(options), 256, cacheSize, mmap.value(options)), OakRepositoryFixture.getTarWithBlobStore(base.value(options), 256, cacheSize, mmap.value(options)), OakRepositoryFixture.getSegmentTar(base.value(options), 256, cacheSize, mmap.value(options)), OakRepositoryFixture.getSegmentTarWithBlobStore(base.value(options), 256, cacheSize, mmap.value(options)), OakRepositoryFixture.getRDB(rdbjdbcuri.value(options), rdbjdbcuser.value(options), rdbjdbcpasswd.value(options), rdbjdbctableprefix.value(options), dropDBAfterTest.value(options), cacheSize * MB), OakRepositoryFixture.getRDBWithFDS(rdbjdbcuri.value(options), rdbjdbcuser.value(options), rdbjdbcpasswd.value(options), rdbjdbctableprefix.value(options), dropDBAfterTest.value(options), cacheSize * MB, base.value(options), fdsCache.value(options)) }; ScalabilitySuite[] allSuites = new ScalabilitySuite[] { new ScalabilityBlobSearchSuite(withStorage.value(options)).addBenchmarks(new FullTextSearcher(), new NodeTypeSearcher(), new FormatSearcher(), new FacetSearcher(), new LastModifiedSearcher(Date.LAST_2_HRS), new LastModifiedSearcher(Date.LAST_24_HRS), new LastModifiedSearcher(Date.LAST_7_DAYS), new LastModifiedSearcher(Date.LAST_MONTH), new LastModifiedSearcher(Date.LAST_YEAR), new OrderByDate()), new ScalabilityNodeSuite(withStorage.value(options)).addBenchmarks(new OrderBySearcher(), new SplitOrderBySearcher(), new OrderByOffsetPageSearcher(), new SplitOrderByOffsetPageSearcher(), new OrderByKeysetPageSearcher(), new SplitOrderByKeysetPageSearcher(), new MultiFilterOrderBySearcher(), new MultiFilterSplitOrderBySearcher(), new MultiFilterOrderByOffsetPageSearcher(), new MultiFilterSplitOrderByOffsetPageSearcher(), new MultiFilterOrderByKeysetPageSearcher(), new MultiFilterSplitOrderByKeysetPageSearcher(), new ConcurrentReader(), new ConcurrentWriter()), new ScalabilityNodeRelationshipSuite(withStorage.value(options)) .addBenchmarks(new AggregateNodeSearcher()) }; Set<String> argset = Sets.newHashSet(nonOption.values(options)); List<RepositoryFixture> fixtures = Lists.newArrayList(); for (RepositoryFixture fixture : allFixtures) { if (argset.remove(fixture.toString())) { fixtures.add(fixture); } } Map<String, List<String>> argmap = Maps.newHashMap(); // Split the args to get suites and benchmarks (i.e. suite:benchmark1,benchmark2) for (String arg : argset) { List<String> tokens = Splitter.on(":").limit(2).splitToList(arg); if (tokens.size() > 1) { argmap.put(tokens.get(0), Splitter.on(",").trimResults().splitToList(tokens.get(1))); } else { argmap.put(tokens.get(0), null); } argset.remove(arg); } if (argmap.isEmpty()) { System.err.println( "Warning: no scalability suites specified, " + "supported are: " + Arrays.asList(allSuites)); } List<ScalabilitySuite> suites = Lists.newArrayList(); for (ScalabilitySuite suite : allSuites) { if (argmap.containsKey(suite.toString())) { List<String> benchmarks = argmap.get(suite.toString()); // Only keep requested benchmarks if (benchmarks != null) { Iterator<String> iter = suite.getBenchmarks().keySet().iterator(); for (; iter.hasNext();) { String availBenchmark = iter.next(); if (!benchmarks.contains(availBenchmark)) { iter.remove(); } } } suites.add(suite); argmap.remove(suite.toString()); } } if (argmap.isEmpty()) { PrintStream out = null; if (options.has(csvFile)) { out = new PrintStream(FileUtils.openOutputStream(csvFile.value(options), true), false, Charsets.UTF_8.name()); } for (ScalabilitySuite suite : suites) { if (suite instanceof CSVResultGenerator) { ((CSVResultGenerator) suite).setPrintStream(out); } suite.run(fixtures); } if (out != null) { out.close(); } } else { System.err.println("Unknown arguments: " + argset); } }
From source file:org.pathirage.freshet.utils.Utilities.java
public static Map<String, String> parseMap(String formattedMap) { return Splitter.on(",").withKeyValueSeparator("=").split(formattedMap); }
From source file:org.opendaylight.openflowplugin.extension.vendor.nicira.convertor.IpConverter.java
public static long Ipv4AddressToLong(Ipv4Address ipv4Address) { long result = 0; Iterable<String> splitted = Splitter.on('.').trimResults().omitEmptyStrings().split(ipv4Address.getValue()); List<String> splittedAddress = Lists.newArrayList(splitted.iterator()); int maxIndex = splittedAddress.size() - 1; ListIterator<String> listIter = splittedAddress.listIterator(); while (listIter.hasNext()) { String current = listIter.next(); int i = splittedAddress.indexOf(current); result |= (Long.parseLong(current) << ((maxIndex - i) * 8)); }//from www .j a v a 2s . co m return result & 0xFFFFFFFF; }
From source file:eu.cloudwave.wp5.common.util.Splitters.java
public static Iterable<String> onComma(final String text) { return Splitter.on(COMMA).trimResults().omitEmptyStrings().split(text); }