List of usage examples for java.lang String format
public static String format(String format, Object... args)
From source file:com.sludev.mssqlapplylog.MSSQLApplyLogMain.java
public static void main(String[] args) { CommandLineParser parser = new DefaultParser(); Options options = new Options(); // Most of the following defaults should be changed in // the --conf or "conf.properties" file String sqlURL = null;/*w w w .j av a 2 s .co m*/ String sqlUser = null; String sqlPass = null; String sqlDb = null; String sqlHost = "127.0.0.1"; String backupDirStr = null; String laterThanStr = ""; String fullBackupPathStr = null; String fullBackupPatternStr = "(?:[\\w_-]+?)(\\d+)\\.bak"; String fullBackupDatePatternStr = "yyyyMMddHHmm"; String sqlProcessUser = null; String logBackupPatternStr = "(.*)\\.trn"; String logBackupDatePatternStr = "yyyyMMddHHmmss"; boolean doFullRestore = false; Boolean useLogFileLastMode = null; Boolean monitorLogBackupDir = null; options.addOption(Option.builder().longOpt("conf").desc("Configuration file.").hasArg().build()); options.addOption(Option.builder().longOpt("laterthan").desc("'Later Than' file filter.").hasArg().build()); options.addOption(Option.builder().longOpt("restore-full") .desc("Restore the full backup before continuing.").build()); options.addOption(Option.builder().longOpt("use-lastmod") .desc("Sort/filter the log backups using their File-System 'Last Modified' date.").build()); options.addOption(Option.builder().longOpt("monitor-backup-dir") .desc("Monitor the backup directory for new log backups, and apply them.").build()); CommandLine line = null; try { try { line = parser.parse(options, args); } catch (ParseException ex) { throw new MSSQLApplyLogException(String.format("Error parsing command line.'%s'", ex.getMessage()), ex); } String confFile = null; // Process the command line arguments Iterator cmdI = line.iterator(); while (cmdI.hasNext()) { Option currOpt = (Option) cmdI.next(); String currOptName = currOpt.getLongOpt(); switch (currOptName) { case "conf": // Parse the configuration file confFile = currOpt.getValue(); break; case "laterthan": // "Later Than" file date filter laterThanStr = currOpt.getValue(); break; case "restore-full": // Do a full backup restore before restoring logs doFullRestore = true; break; case "monitor-backup-dir": // Monitor the backup directory for new logs monitorLogBackupDir = true; break; case "use-lastmod": // Use the last-modified date on Log Backup files for sorting/filtering useLogFileLastMode = true; break; } } Properties confProperties = null; if (StringUtils.isBlank(confFile) || Files.isReadable(Paths.get(confFile)) == false) { throw new MSSQLApplyLogException( "Missing or unreadable configuration file. Please specify --conf"); } else { // Process the conf.properties file confProperties = new Properties(); try { confProperties.load(Files.newBufferedReader(Paths.get(confFile))); } catch (IOException ex) { throw new MSSQLApplyLogException("Error loading properties file", ex); } sqlURL = confProperties.getProperty("sqlURL", ""); sqlUser = confProperties.getProperty("sqlUser", ""); sqlPass = confProperties.getProperty("sqlPass", ""); sqlDb = confProperties.getProperty("sqlDb", ""); sqlHost = confProperties.getProperty("sqlHost", ""); backupDirStr = confProperties.getProperty("backupDir", ""); if (StringUtils.isBlank(laterThanStr)) { laterThanStr = confProperties.getProperty("laterThan", ""); } fullBackupPathStr = confProperties.getProperty("fullBackupPath", fullBackupPathStr); fullBackupPatternStr = confProperties.getProperty("fullBackupPattern", fullBackupPatternStr); fullBackupDatePatternStr = confProperties.getProperty("fullBackupDatePattern", fullBackupDatePatternStr); sqlProcessUser = confProperties.getProperty("sqlProcessUser", ""); logBackupPatternStr = confProperties.getProperty("logBackupPattern", logBackupPatternStr); logBackupDatePatternStr = confProperties.getProperty("logBackupDatePattern", logBackupDatePatternStr); if (useLogFileLastMode == null) { String useLogFileLastModeStr = confProperties.getProperty("useLogFileLastMode", "false"); useLogFileLastMode = Boolean .valueOf(StringUtils.lowerCase(StringUtils.trim(useLogFileLastModeStr))); } if (monitorLogBackupDir == null) { String monitorBackupDirStr = confProperties.getProperty("monitorBackupDir", "false"); monitorLogBackupDir = Boolean .valueOf(StringUtils.lowerCase(StringUtils.trim(monitorBackupDirStr))); } } } catch (MSSQLApplyLogException ex) { try (StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw)) { pw.append(String.format("Error : '%s'\n\n", ex.getMessage())); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(pw, 80, "\njava -jar mssqlapplylog.jar ", "\nThe MSSQLApplyLog application can be used in a variety of options and modes.\n", options, 0, 2, " All Rights Reserved.", true); System.out.println(sw.toString()); } catch (IOException iex) { LOGGER.debug("Error processing usage", iex); } System.exit(1); } MSSQLApplyLogConfig config = MSSQLApplyLogConfig.from(backupDirStr, fullBackupPathStr, fullBackupDatePatternStr, laterThanStr, fullBackupPatternStr, logBackupPatternStr, logBackupDatePatternStr, sqlHost, sqlDb, sqlUser, sqlPass, sqlURL, sqlProcessUser, useLogFileLastMode, doFullRestore, monitorLogBackupDir); MSSQLApplyLog logProc = MSSQLApplyLog.from(config); BasicThreadFactory thFactory = new BasicThreadFactory.Builder().namingPattern("restoreThread-%d").build(); ExecutorService mainThreadExe = Executors.newSingleThreadExecutor(thFactory); Future<Integer> currRunTask = mainThreadExe.submit(logProc); mainThreadExe.shutdown(); Integer resp = 0; try { resp = currRunTask.get(); } catch (InterruptedException ex) { LOGGER.error("Application 'main' thread was interrupted", ex); } catch (ExecutionException ex) { LOGGER.error("Application 'main' thread execution error", ex); } finally { // If main leaves for any reason, shutdown all threads mainThreadExe.shutdownNow(); } System.exit(resp); }
From source file:de.tudarmstadt.ukp.argumentation.data.roomfordebate.DataFetcher.java
public static void main(String[] args) throws Exception { File crawledPagesFolder = new File(args[0]); if (!crawledPagesFolder.exists()) { crawledPagesFolder.mkdirs();/*from www.j a va 2 s. c o m*/ } File outputFolder = new File(args[1]); if (!outputFolder.exists()) { outputFolder.mkdirs(); } // read links from text file final String urlsResourceName = "roomfordebate-urls.txt"; InputStream urlsStream = DataFetcher.class.getClassLoader().getResourceAsStream(urlsResourceName); if (urlsStream == null) { throw new IOException("Cannot find resource " + urlsResourceName + " on the classpath"); } // read list of urls List<String> urls = new ArrayList<>(); LineIterator iterator = IOUtils.lineIterator(urlsStream, "utf-8"); while (iterator.hasNext()) { // ignore commented url (line starts with #) String line = iterator.nextLine(); if (!line.startsWith("#") && !line.trim().isEmpty()) { urls.add(line.trim()); } } // download all crawlPages(urls, crawledPagesFolder); List<File> files = new ArrayList<>(FileUtils.listFiles(crawledPagesFolder, null, false)); Collections.sort(files, new Comparator<File>() { @Override public int compare(File o1, File o2) { return o1.getName().compareTo(o2.getName()); } }); int idCounter = 0; for (File file : files) { NYTimesCommentsScraper commentsScraper = new NYTimesCommentsScraper(); NYTimesArticleExtractor extractor = new NYTimesArticleExtractor(); String html = FileUtils.readFileToString(file, "utf-8"); idCounter++; File outputFileArticle = new File(outputFolder, String.format("Cx%03d.txt", idCounter)); File outputFileComments = new File(outputFolder, String.format("Dx%03d.txt", idCounter)); try { List<Comment> comments = commentsScraper.extractComments(html); Article article = extractor.extractArticle(html); saveArticleToText(article, outputFileArticle); System.out.println("Saved to " + outputFileArticle); saveCommentsToText(comments, outputFileComments, article); System.out.println("Saved to " + outputFileComments); } catch (IOException ex) { System.err.println(file.getName() + "\n" + ex.getMessage()); } } }
From source file:com.amazonaws.services.kinesis.samples.datavis.HttpReferrerCounterApplication.java
/** * Start the Kinesis Client application. * //from w w w . j av a 2 s. c om * @param args Expecting 4 arguments: Application name to use for the Kinesis Client Application, Stream name to * read from, DynamoDB table name to persist counts into, and the AWS region in which these resources * exist or should be created. */ public static void main(String[] args) throws UnknownHostException { if (args.length != 4) { System.err.println("Usage: " + HttpReferrerCounterApplication.class.getSimpleName() + " <application name> <stream name> <DynamoDB table name> <region>"); System.exit(1); } String applicationName = args[0]; String streamName = args[1]; String countsTableName = args[2]; Region region = SampleUtils.parseRegion(args[3]); AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain(); ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration()); AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig); kinesis.setRegion(region); AmazonDynamoDB dynamoDB = new AmazonDynamoDBClient(credentialsProvider, clientConfig); dynamoDB.setRegion(region); // Creates a stream to write to, if it doesn't exist StreamUtils streamUtils = new StreamUtils(kinesis); streamUtils.createStreamIfNotExists(streamName, 2); LOG.info(String.format("%s stream is ready for use", streamName)); DynamoDBUtils dynamoDBUtils = new DynamoDBUtils(dynamoDB); dynamoDBUtils.createCountTableIfNotExists(countsTableName); LOG.info(String.format("%s DynamoDB table is ready for use", countsTableName)); String workerId = String.valueOf(UUID.randomUUID()); LOG.info(String.format("Using working id: %s", workerId)); KinesisClientLibConfiguration kclConfig = new KinesisClientLibConfiguration(applicationName, streamName, credentialsProvider, workerId); kclConfig.withCommonClientConfig(clientConfig); kclConfig.withRegionName(region.getName()); kclConfig.withInitialPositionInStream(InitialPositionInStream.LATEST); // Persist counts to DynamoDB DynamoDBCountPersister persister = new DynamoDBCountPersister( dynamoDBUtils.createMapperForTable(countsTableName)); IRecordProcessorFactory recordProcessor = new CountingRecordProcessorFactory<HttpReferrerPair>( HttpReferrerPair.class, persister, COMPUTE_RANGE_FOR_COUNTS_IN_MILLIS, COMPUTE_INTERVAL_IN_MILLIS); Worker worker = new Worker(recordProcessor, kclConfig); int exitCode = 0; try { worker.run(); } catch (Throwable t) { LOG.error("Caught throwable while processing data.", t); exitCode = 1; } System.exit(exitCode); }
From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java
public static void main(final String[] args) throws Exception { // default config String configFile = DEFAULT_CONF_FOLDER_NAME + File.separatorChar + DEFAULT_CONFIG_PROPERTIES_FILE_NAME; // read program parameters if (args.length > 0) { for (final String arg : args) { LOG.info("arg = " + arg); if (arg.startsWith("-conf=")) { configFile = arg.split("=")[1]; }/*from w w w . j ava2 s . com*/ } } final Properties config; // Init properties try { try (final InputStream inputStream = new FileInputStream(configFile)) { try (final BufferedReader reader = new BufferedReader( new InputStreamReader(inputStream, TPUUtil.UTF_8))) { config = new Properties(); config.load(reader); } } } catch (final IOException e) { LOG.error("something went wrong", e); LOG.error(String.format("FATAL ERROR: Could not read '%s'!", configFile)); throw e; } startTPU(configFile, config); }
From source file:eu.roschi.obdkinesis.HttpReferrerCounterApplication.java
/** * Start the Kinesis Client application. * //from www. ja v a 2 s .c o m * @param args Expecting 4 arguments: Application name to use for the Kinesis Client Application, Stream name to * read from, DynamoDB table name to persist counts into, and the AWS region in which these resources * exist or should be created. */ public static void main(String[] args) throws UnknownHostException { if (args.length != 2) { System.err.println("Using default values"); COMPUTE_RANGE_FOR_COUNTS_IN_MILLIS = 30000; COMPUTE_INTERVAL_IN_MILLIS = 2000; } else { COMPUTE_RANGE_FOR_COUNTS_IN_MILLIS = Integer.parseInt(args[0]); COMPUTE_INTERVAL_IN_MILLIS = Integer.parseInt(args[1]); System.err.println( "Using values " + Integer.parseInt(args[0]) + " width " + Integer.parseInt(args[1]) + " rate"); } String applicationName = "obd_kinesis"; String streamName = "obd_input_stream"; String countsTableName = "obd_kinesis_count"; Region region = SampleUtils.parseRegion("us-west-2"); AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain(); ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration()); AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig); kinesis.setRegion(region); AmazonDynamoDB dynamoDB = new AmazonDynamoDBClient(credentialsProvider, clientConfig); dynamoDB.setRegion(region); // Creates a stream to write to, if it doesn't exist StreamUtils streamUtils = new StreamUtils(kinesis); streamUtils.createStreamIfNotExists(streamName, 2); LOG.info(String.format("%s stream is ready for use", streamName)); DynamoDBUtils dynamoDBUtils = new DynamoDBUtils(dynamoDB); dynamoDBUtils.createCountTableIfNotExists(countsTableName); LOG.info(String.format("%s DynamoDB table is ready for use", countsTableName)); String workerId = String.valueOf(UUID.randomUUID()); LOG.info(String.format("Using working id: %s", workerId)); KinesisClientLibConfiguration kclConfig = new KinesisClientLibConfiguration(applicationName, streamName, credentialsProvider, workerId); kclConfig.withCommonClientConfig(clientConfig); kclConfig.withRegionName(region.getName()); kclConfig.withInitialPositionInStream(InitialPositionInStream.LATEST); // Persist counts to DynamoDB DynamoDBCountPersister persister = new DynamoDBCountPersister( dynamoDBUtils.createMapperForTable(countsTableName)); IRecordProcessorFactory recordProcessor = new CountingRecordProcessorFactory<HttpReferrerPair>( HttpReferrerPair.class, persister, COMPUTE_RANGE_FOR_COUNTS_IN_MILLIS, COMPUTE_INTERVAL_IN_MILLIS); Worker worker = new Worker(recordProcessor, kclConfig); int exitCode = 0; try { worker.run(); } catch (Throwable t) { LOG.error("Caught throwable while processing data.", t); exitCode = 1; } System.exit(exitCode); }
From source file:com.github.enr.markdownj.extras.MarkdownApp.java
public static void main(String[] args) { MarkdownApp app = new MarkdownApp(); app.log().debug("Markdown app starting with args: {}", Arrays.toString(args)); CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption("s", "source", true, "The source directory for markdown files"); options.addOption("d", "destination", true, "The destination directory for html files"); options.addOption("h", "header", true, "The path to the html header file"); options.addOption("f", "footer", true, "The path to the html footer file"); options.addOption("t", "code-template", true, "The template for code blocks"); options.addOption("e", "extensions", true, "A comma separated list of file extensions to process. If setted, files with extension not in list won't be processed"); options.addOption("c", "char-encoding", true, "The encoding to read and write files"); HelpFormatter formatter = new HelpFormatter(); String helpHeader = String.format("%s", MarkdownApp.class.getName()); try {//w ww. j a va 2 s .c o m CommandLine line = parser.parse(options, args); app.process(line); } catch (ParseException e) { app.log().warn(e.getMessage(), e); formatter.printHelp(helpHeader, options); } }
From source file:edu.indiana.d2i.sloan.internal.QueryVMSimulator.java
public static void main(String[] args) { QueryVMSimulator simulator = new QueryVMSimulator(); CommandLineParser parser = new PosixParser(); try {//w ww .ja v a2 s . com CommandLine line = simulator.parseCommandLine(parser, args); String wdir = line.getOptionValue(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.WORKING_DIR)); if (!HypervisorCmdSimulator.resourceExist(wdir)) { logger.error(String.format("Cannot find VM working dir: %s", wdir)); System.exit(ERROR_CODE.get(ERROR_STATE.VM_NOT_EXIST)); } Properties prop = new Properties(); String filename = HypervisorCmdSimulator.cleanPath(wdir) + HypervisorCmdSimulator.VM_INFO_FILE_NAME; prop.load(new FileInputStream(new File(filename))); VMStatus status = new VMStatus( VMMode.valueOf(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.VM_MODE))), VMState.valueOf(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.VM_STATE))), ip, Integer.parseInt(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.VNC_PORT))), Integer.parseInt(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.SSH_PORT))), Integer.parseInt(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.VCPU))), Integer.parseInt(prop.getProperty(CMD_FLAG_VALUE.get(CMD_FLAG_KEY.MEM)))); // write state query file so shell script can read the vm state info filename = HypervisorCmdSimulator.cleanPath(wdir) + QueryVMSimulator.QUERY_RES_FILE_NAME; FileUtils.writeStringToFile(new File(filename), status.toString(), Charset.forName("UTF-8")); } catch (ParseException e) { logger.error(String.format("Cannot parse input arguments: %s%n, expected:%n%s", StringUtils.join(args, " "), simulator.getUsage(100, "", 5, 5, ""))); System.exit(ERROR_CODE.get(ERROR_STATE.INVALID_INPUT_ARGS)); } catch (IOException e) { logger.error(e.getMessage(), e); System.exit(ERROR_CODE.get(ERROR_STATE.IO_ERR)); } }
From source file:com.bialx.ebics.FDL.java
public static void main(String[] args) throws Exception { String userId = ""; Boolean isTest = false;// w ww . jav a 2s . c o m Date startDate = null; Date endDate = null; SimpleDateFormat dtFormat = new SimpleDateFormat("yyyyMMdd"); CommandLineParser parser = new DefaultParser(); BialxOptions options = new BialxOptions(); // Parse the program arguments CommandLine commandLine = parser.parse(options, args); // optional values if (commandLine.hasOption('s')) { startDate = dtFormat.parse(commandLine.getOptionValue('s')); } if (commandLine.hasOption('e')) { endDate = dtFormat.parse(commandLine.getOptionValue('e')); } if (commandLine.hasOption('t')) { isTest = true; } FDL fdl; PasswordCallback pwdHandler; Product product; fdl = new FDL(); pwdHandler = new UserPasswordHandler(userId, CERT_PASSWORD); product = new Product("Bial-x EBICS FDL", Locale.FRANCE, null); if (commandLine.hasOption(BialxOptions.OPTION_CREATION) && !commandLine.hasOption(BialxOptions.OPTION_DOWNLOAD)) { if (options.checkCreationOptions(commandLine)) { CreationOptions co = options.loadCreationOptions(commandLine); fdl.configuration.getLogger().info(String.format("Banque : %s", co.getBankName())); fdl.configuration.getLogger().info(String.format("Host : %s", co.getHostId())); fdl.configuration.getLogger().info(String.format("URL : %s", co.getBankUrl())); fdl.configuration.getLogger().info(String.format("Partner : %s", co.getPartnerId())); fdl.configuration.getLogger().info(String.format("User : %s", co.getUserId())); User user = fdl.createUser(co.getUserId(), co.getHostId(), co.getPartnerId(), co.getBankName(), co.getBankUrl(), pwdHandler); fdl.sendHPBRequest(user, product); } else { fdl.configuration.getLogger().info("Vrifiez les paramtres de la commande."); System.exit(0); } } if (!commandLine.hasOption(BialxOptions.OPTION_CREATION) && commandLine.hasOption(BialxOptions.OPTION_DOWNLOAD)) { if (options.checkDownloadOptions(commandLine)) { DownloadOptions dop = options.loadDownloadOptions(commandLine); fdl.configuration.getLogger().info(String.format("Banque : %s", dop.getHostId())); fdl.configuration.getLogger().info(String.format("Host : %s", dop.getHostId())); fdl.configuration.getLogger().info(String.format("Partner : %s", dop.getPartnerId())); fdl.configuration.getLogger().info(String.format("User : %s", dop.getUserId())); fdl.configuration.getLogger().info(String.format("Format : %s", dop.getFormat())); fdl.configuration.getLogger().info(String.format("Destination : %s", dop.getDestination())); if (startDate != null) { fdl.configuration.getLogger() .info(String.format("Date dbut : %s", dtFormat.format(startDate))); } if (endDate != null) { fdl.configuration.getLogger().info(String.format("Date fin : %s", dtFormat.format(endDate))); } fdl.loadUser(dop.getHostId(), dop.getPartnerId(), dop.getUserId(), pwdHandler); fdl.fetchFile(dop.getDestination(), dop.getUserId(), dop.getFormat(), product, OrderType.FDL, isTest, startDate, endDate); } else { fdl.configuration.getLogger().info("Vrifiez les paramtres de la commande."); System.exit(0); } } if (commandLine.hasOption(BialxOptions.OPTION_DOWNLOAD) && commandLine.hasOption(BialxOptions.OPTION_CREATION)) { fdl.configuration.getLogger().error("Impossible d'avoir les options C et D dans la mme commande."); System.exit(0); } fdl.quit(); }
From source file:com.alertlogic.aws.kinesis.test1.StreamWriter.java
/** * Start a number of threads and send randomly generated {@link HttpReferrerPair}s to a Kinesis Stream until the * program is terminated./*from ww w . j av a2 s . com*/ * * @param args Expecting 3 arguments: A numeric value indicating the number of threads to use to send * data to Kinesis and the name of the stream to send records to, and the AWS region in which these resources * exist or should be created. * @throws InterruptedException If this application is interrupted while sending records to Kinesis. */ public static void main(String[] args) throws InterruptedException { if (args.length != 3) { System.err.println( "Usage: " + StreamWriter.class.getSimpleName() + " <number of threads> <stream name> <region>"); System.exit(1); } int numberOfThreads = Integer.parseInt(args[0]); String streamName = args[1]; Region region = SampleUtils.parseRegion(args[2]); AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain(); ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration()); AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig); kinesis.setRegion(region); // The more resources we declare the higher write IOPS we need on our DynamoDB table. // We write a record for each resource every interval. // If interval = 500ms, resource count = 7 we need: (1000/500 * 7) = 14 write IOPS minimum. List<String> resources = new ArrayList<>(); resources.add("/index.html"); // These are the possible referrers to use when generating pairs List<String> referrers = new ArrayList<>(); referrers.add("http://www.amazon.com"); referrers.add("http://www.google.com"); referrers.add("http://www.yahoo.com"); referrers.add("http://www.bing.com"); referrers.add("http://www.stackoverflow.com"); referrers.add("http://www.reddit.com"); HttpReferrerPairFactory pairFactory = new HttpReferrerPairFactory(resources, referrers); // Creates a stream to write to with 2 shards if it doesn't exist StreamUtils streamUtils = new StreamUtils(kinesis); streamUtils.createStreamIfNotExists(streamName, 2); LOG.info(String.format("%s stream is ready for use", streamName)); final HttpReferrerKinesisPutter putter = new HttpReferrerKinesisPutter(pairFactory, kinesis, streamName); ExecutorService es = Executors.newCachedThreadPool(); Runnable pairSender = new Runnable() { @Override public void run() { try { putter.sendPairsIndefinitely(DELAY_BETWEEN_RECORDS_IN_MILLIS, TimeUnit.MILLISECONDS); } catch (Exception ex) { LOG.warn( "Thread encountered an error while sending records. Records will no longer be put by this thread.", ex); } } }; for (int i = 0; i < numberOfThreads; i++) { es.submit(pairSender); } LOG.info(String.format("Sending pairs with a %dms delay between records with %d thread(s).", DELAY_BETWEEN_RECORDS_IN_MILLIS, numberOfThreads)); es.shutdown(); es.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); }
From source file:com.amazonaws.services.kinesis.samples.datavis.HttpReferrerStreamWriter.java
/** * Start a number of threads and send randomly generated {@link HttpReferrerPair}s to a Kinesis Stream until the * program is terminated.//w ww .ja v a 2 s . c om * * @param args Expecting 3 arguments: A numeric value indicating the number of threads to use to send * data to Kinesis and the name of the stream to send records to, and the AWS region in which these resources * exist or should be created. * @throws InterruptedException If this application is interrupted while sending records to Kinesis. */ public static void main(String[] args) throws InterruptedException { if (args.length != 3) { System.err.println("Usage: " + HttpReferrerStreamWriter.class.getSimpleName() + " <number of threads> <stream name> <region>"); System.exit(1); } int numberOfThreads = Integer.parseInt(args[0]); String streamName = args[1]; Region region = SampleUtils.parseRegion(args[2]); AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain(); ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration()); AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig); kinesis.setRegion(region); // The more resources we declare the higher write IOPS we need on our DynamoDB table. // We write a record for each resource every interval. // If interval = 500ms, resource count = 7 we need: (1000/500 * 7) = 14 write IOPS minimum. List<String> resources = new ArrayList<>(); resources.add("/index.html"); // These are the possible referrers to use when generating pairs List<String> referrers = new ArrayList<>(); referrers.add("http://www.amazon.com"); referrers.add("http://www.google.com"); referrers.add("http://www.yahoo.com"); referrers.add("http://www.bing.com"); referrers.add("http://www.stackoverflow.com"); referrers.add("http://www.reddit.com"); HttpReferrerPairFactory pairFactory = new HttpReferrerPairFactory(resources, referrers); // Creates a stream to write to with 2 shards if it doesn't exist StreamUtils streamUtils = new StreamUtils(kinesis); streamUtils.createStreamIfNotExists(streamName, 2); LOG.info(String.format("%s stream is ready for use", streamName)); final HttpReferrerKinesisPutter putter = new HttpReferrerKinesisPutter(pairFactory, kinesis, streamName); ExecutorService es = Executors.newCachedThreadPool(); Runnable pairSender = new Runnable() { @Override public void run() { try { putter.sendPairsIndefinitely(DELAY_BETWEEN_RECORDS_IN_MILLIS, TimeUnit.MILLISECONDS); } catch (Exception ex) { LOG.warn( "Thread encountered an error while sending records. Records will no longer be put by this thread.", ex); } } }; for (int i = 0; i < numberOfThreads; i++) { es.submit(pairSender); } LOG.info(String.format("Sending pairs with a %dms delay between records with %d thread(s).", DELAY_BETWEEN_RECORDS_IN_MILLIS, numberOfThreads)); es.shutdown(); es.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); }