List of usage examples for com.google.common.base Strings isNullOrEmpty
public static boolean isNullOrEmpty(@Nullable String string)
From source file:fr.rjoakim.app.checklink.Main.java
public static void main(String[] args) { final Injector injector = Guice.createInjector(new CheckHttpAppModule()); final FileResourceService fileResource = injector.getInstance(FileResourceService.class); final SendMailService sendMail = injector.getInstance(SendMailService.class); try {//from ww w . jav a 2 s. c om final String pathFileConfiguration = getArgsOrDefaultPathConfiguration(fileResource, args); if (Strings.isNullOrEmpty(pathFileConfiguration)) { Logger.info("path of configuration file is not defined - programme aborted"); return; } final Properties properties = fileResource.load(pathFileConfiguration); final Configuration configuration = new Configuration.Builder().properties(properties).build(); final Collection<String> URLsHttp = configuration.listURLsHttp(); for (String URL : URLsHttp) { checkAndSendMail(sendMail, configuration, URL); } } catch (Exception e) { Logger.error(e.getMessage(), e); } }
From source file:org.apache.s4.tools.Deploy.java
public static void main(String[] args) { DeployAppArgs deployArgs = new DeployAppArgs(); Tools.parseArgs(deployArgs, args);//from w w w .j av a 2s.c om try { ZkClient zkClient = new ZkClient(deployArgs.zkConnectionString, deployArgs.timeout); zkClient.setZkSerializer(new ZNRecordSerializer()); URI s4rURI = null; if (deployArgs.s4rPath != null) { s4rURI = new URI(deployArgs.s4rPath); if (Strings.isNullOrEmpty(s4rURI.getScheme())) { // default is file s4rURI = new File(deployArgs.s4rPath).toURI(); } logger.info("Using specified S4R [{}]", s4rURI.toString()); } else { if (!Strings.isNullOrEmpty(deployArgs.appClass)) { // 3. otherwise if there is at least an app class specified (e.g. for running "s4 adapter"), we use // it and won't use an S4R logger.info( "No S4R path specified, nor build file specified: this assumes the app is in the classpath"); } else { logger.error("You must specify an S4R file or an appClass that will be in the classpath"); System.exit(1); } } DeploymentUtils.initAppConfig(new AppConfig.Builder().appName(deployArgs.appName) .appURI(s4rURI == null ? null : s4rURI.toString()) .customModulesNames(deployArgs.modulesClassesNames).customModulesURIs(deployArgs.modulesURIs) .appClassName(deployArgs.appClass) .namedParameters(ParsingUtils.convertListArgsToMap(deployArgs.extraNamedParameters)).build(), deployArgs.clusterName, false, deployArgs.zkConnectionString); // Explicitly shutdown the JVM since Gradle leaves non-daemon threads running that delay the termination if (!deployArgs.testMode) { System.exit(0); } } catch (Exception e) { LoggerFactory.getLogger(Deploy.class).error("Cannot deploy app", e); } }
From source file:io.hops.examples.spark.kafka.StructuredStreamingKafka.java
public static void main(String[] args) throws StreamingQueryException, InterruptedException { final String type = args[0]; //Producer/* w w w . j a v a 2s .c o m*/ if (!Strings.isNullOrEmpty(type) && type.equalsIgnoreCase("producer")) { Set<String> topicsSet = new HashSet<>(Hops.getTopics()); SparkConf sparkConf = new SparkConf().setAppName(Hops.getJobName()); JavaSparkContext jsc = new JavaSparkContext(sparkConf); final List<HopsProducer> sparkProducers = new ArrayList<>(); final DateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS"); final List<String> messages = new ArrayList(); final List<String> priorities = new ArrayList(); final List<String> loggers = new ArrayList(); /** * ********************************* Setup dummy test data *********************************** */ messages.add("Container container_e01_1494850115055_0016_01_000002 succeeded"); messages.add("Container container_e01_1494850115251_0015_01_000002 succeeded"); messages.add("rollingMonitorInterval is set as -1. The log rolling mornitoring interval is disabled. " + "The logs will be aggregated after this application is finished."); messages.add("rollingMonitorInterval is set as -1. The log rolling mornitoring interval is disabled. " + "The logs will be aggregated after this application is finished."); messages.add("Sending out 2 container statuses: " + "[ContainerStatus: [ContainerId: container_e01_1494850115055_0016_01_000001, State: RUNNING, " + "Diagnostics: , ExitStatus: -1000, ], " + "ContainerStatus: [ContainerId: container_e01_1494850115055_0016_01_000002, " + "State: RUNNING, Diagnostics: , ExitStatus: -1000, ]]"); messages.add("Node's health-status : true"); messages.add("Cannot create writer for app application_1494433225517_0008. Skip log upload this time."); priorities.add("INFO"); priorities.add("INFO"); priorities.add("WARN"); priorities.add("DEBUG"); priorities.add("DEBUG"); priorities.add("DEBUG"); priorities.add("ERROR"); loggers.add("org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorImpl"); loggers.add("org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorImpl"); loggers.add( "org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AppLogAggregatorImpl"); loggers.add("org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl"); loggers.add("org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl"); loggers.add("org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl"); loggers.add( "org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AppLogAggregatorImpl"); //End setup dummy data //Get a broker for the producer LOG.log(Level.INFO, "Producing to:{0}", Hops.getBrokerEndpointsList().get(0)); Properties props = new Properties(); props.put("bootstrap.servers", Hops.getBrokerEndpointsList().get(0)); for (final String topic : topicsSet) { new Thread() { @Override public void run() { try { SparkProducer sparkProducer = Hops.getSparkProducer(topic, props); sparkProducers.add(sparkProducer); Map<String, String> message = new HashMap<>(); int i = 0; //Produce Kafka messages to topic while (true) { message.put("message", messages.get(i % messages.size())); message.put("priority", priorities.get(i % priorities.size())); message.put("logger", loggers.get(i % loggers.size())); Date date = new Date(); message.put("timestamp", sdf.format(date)); sparkProducer.produce(message); Thread.sleep(100); i++; } } catch (SchemaNotFoundException | CredentialsNotFoundException | InterruptedException ex) { LOG.log(Level.SEVERE, ex.getMessage(), ex); } } }.start(); } //Keep application running Hops.shutdownGracefully(jsc); for (HopsProducer hopsProducer : sparkProducers) { hopsProducer.close(); } //Consumer } else { // Create DataSet representing the stream of input lines from kafka DataStreamReader dsr = Hops.getSparkConsumer().getKafkaDataStreamReader(); Dataset<Row> lines = dsr.load(); // Generate running word count Dataset<LogEntry> logEntries = lines.map(new MapFunction<Row, LogEntry>() { @Override public LogEntry call(Row record) throws Exception { GenericRecord genericRecord = RECORD_INJECTIONS.entrySet().iterator().next().getValue() .invert(record.getAs("value")).get(); LogEntry logEntry = new LogEntry(genericRecord.get("timestamp").toString(), genericRecord.get("priority").toString(), genericRecord.get("logger").toString(), genericRecord.get("message").toString()); return logEntry; } }, Encoders.bean(LogEntry.class)); Dataset<String> logEntriesRaw = lines.map(new MapFunction<Row, String>() { @Override public String call(Row record) throws Exception { GenericRecord genericRecord = RECORD_INJECTIONS.entrySet().iterator().next().getValue() .invert(record.getAs("value")).get(); return genericRecord.toString(); } }, Encoders.STRING()); // Start running the query that prints the running counts to the console StreamingQuery queryFile = logEntries.writeStream().format("parquet") .option("path", "/Projects/" + Hops.getProjectName() + "/Resources/data-parquet-" + Hops.getAppId()) .option("checkpointLocation", "/Projects/" + Hops.getProjectName() + "/Resources/checkpoint-parquet-" + Hops.getAppId()) .trigger(Trigger.ProcessingTime(10000)).start(); StreamingQuery queryFile2 = logEntriesRaw.writeStream().format("text") .option("path", "/Projects/" + Hops.getProjectName() + "/Resources/data-text-" + Hops.getAppId()) .option("checkpointLocation", "/Projects/" + Hops.getProjectName() + "/Resources/checkpoint-text-" + Hops.getAppId()) .trigger(Trigger.ProcessingTime(10000)).start(); Hops.shutdownGracefully(queryFile); } }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemCacheCleaner.java
public static void main(String[] args) throws IOException { GenericOptionsParser parser = new GenericOptionsParser(args); args = parser.getRemainingArgs();/*from ww w . ja v a 2s .c o m*/ Configuration configuration = parser.getConfiguration(); // TODO: Wire out constants and defaults through GoogleHadoopFileSystemBase once submitted. if ("FILESYSTEM_BACKED".equals(configuration.get("fs.gs.metadata.cache.type", "IN_MEMORY"))) { String fsStringPath = configuration.get("fs.gs.metadata.cache.directory", ""); Preconditions.checkState(!Strings.isNullOrEmpty(fsStringPath)); LOG.info("Performing GC on cache directory {}", fsStringPath); Path path = Paths.get(fsStringPath); if (Files.exists(path)) { FileSystemBackedDirectoryListCache cache = new FileSystemBackedDirectoryListCache(fsStringPath); cleanCache(cache); } } LOG.info("Done with GC."); }
From source file:cli.Main.java
public static void main(String[] args) throws IOException { CommandLineParameters cli = new CommandLineParameters(); JCommander jCmd = new JCommander(cli, args); jCmd.setProgramName("emailconverter"); if (cli.isGui()) { MainWindow.main(new String[0]); return;/*from w w w . j a va2 s .co m*/ } if (cli.isHelp()) { jCmd.usage(); return; } if (cli.isVersion()) { System.out.println(Main.class.getPackage().getImplementationVersion()); return; } if (cli.isDebug()) { Logger.level = LogLevel.Debug; } if (cli.isError()) { Logger.level = LogLevel.Error; } if (cli.isQuiet()) { Logger.level = LogLevel.Quiet; } if (cli.getFiles().isEmpty()) { Logger.error("Please provide the path of an EML file."); jCmd.usage(); return; } String in = cli.getFiles().get(0); if (!(new File(in).exists())) { Logger.error("Input EML file %s could not be found!", in); return; } String out = cli.getOutput(); if (Strings.isNullOrEmpty(cli.getOutput())) { out = Files.getNameWithoutExtension(in) + ".pdf"; File parent = new File(in).getParentFile(); if (parent != null) { out = new File(parent, out).toString(); } } List<String> extParams = new ArrayList<String>(); if ("auto".equalsIgnoreCase(cli.getProxy())) { Proxy defaultProxy = HttpUtils.getDefaultProxy(); InetSocketAddress defaultProxyAddress = (InetSocketAddress) defaultProxy.address(); String proxy = defaultProxy.type().toString() + "://" + defaultProxyAddress.toString(); extParams.add("--proxy"); extParams.add(proxy.toLowerCase()); Logger.debug("Use default proxy %s", proxy); } else if (!Strings.isNullOrEmpty(cli.getProxy())) { extParams.add("--proxy"); extParams.add(cli.getProxy()); Logger.debug("Use proxy from parameters %s", cli.getProxy()); } try { MimeMessageConverter.convertToPdf(in, out, cli.isOutputHTML(), cli.isHideHeaders(), cli.isExtractAttachments(), cli.getExtractAttachmentsDir(), extParams); } catch (Exception e) { Logger.error("The eml could not be converted. Error: %s", Throwables.getStackTraceAsString(e)); if (!cli.isDisableCrashreports()) { /* Try to send the bugreport via email */ StringBuilder bugdetails = new StringBuilder(800); bugdetails.append("User: "); bugdetails.append(System.getProperty("user.name")); bugdetails.append("\n"); InetAddress localHost = InetAddress.getLocalHost(); bugdetails.append("Localhost: "); bugdetails.append(localHost.getHostAddress()); bugdetails.append(" - "); bugdetails.append(localHost.getHostName()); bugdetails.append("\n"); bugdetails.append("GEO: "); bugdetails.append(HttpUtils.getRequest("http://ipinfo.io/json").replaceAll("\"", "")); bugdetails.append("\n"); bugdetails.append("OS: "); bugdetails.append(System.getProperty("os.name")); bugdetails.append(" "); bugdetails.append(System.getProperty("os.version")); bugdetails.append(" "); bugdetails.append(System.getProperty("os.arch")); bugdetails.append("\n"); bugdetails.append("Java: "); bugdetails.append(System.getProperty("java.vendor")); bugdetails.append(" "); bugdetails.append(System.getProperty("java.version")); bugdetails.append("\n\n"); bugdetails.append("Exception\n"); bugdetails.append(Throwables.getStackTraceAsString(e)); String subject = "Bugreport from " + System.getProperty("user.name") + " | " + new Date(); HttpUtils.postRequest(BUG_EMAIL_URL, String.format("subject=%s&body=%s", subject, bugdetails.toString())); } } }
From source file:co.mitro.core.util.RpcLogReader.java
public static void main(String[] args) throws IOException { AtomicLongMap<String> counter = AtomicLongMap.<String>create(); Map<String, Span> txnLength = new HashMap<>(); Span duration = new Span(); for (int i = 0; i < args.length; ++i) { String filename = args[i]; System.err.println("Reading file: " + filename); JsonRecordReader rr = JsonRecordReader.MakeFromFilename(filename); JsonRecordReader.JsonLog log;/* ww w.j a v a 2 s .co m*/ try { while (null != (log = rr.readJson())) { counter.incrementAndGet(log.metadata.endpoint); duration.addTime(log.metadata.timestamp); if (log.metadata.endpoint.endsWith("BeignTransaction") || log.payload.implicitBeginTransaction) { txnLength.put((String) ((Map) log.metadata.response).get("transactionId"), new Span(log.metadata.timestamp)); } else if (!Strings.isNullOrEmpty(log.payload.transactionId)) { txnLength.get(log.payload.transactionId).addTime(log.metadata.timestamp); } } } catch (EOFException e) { System.err.println("unexpected end of file; skipping"); } } System.out.println("total duration: " + duration.duration()); for (String k : counter.asMap().keySet()) { System.out.println(k + ": " + counter.get(k)); } List<Long> times = new ArrayList<>(); for (Span s : txnLength.values()) { times.add(s.duration()); } Collections.sort(times); double meanTime = 0; for (Long l : times) { meanTime += l; } meanTime /= txnLength.size(); double stdDev = 0; for (Long l : times) { stdDev += Math.pow((l - meanTime), 2); } stdDev /= txnLength.size(); stdDev = Math.pow(stdDev, 0.5); // percentiles long PERCENTILES = 10; for (int i = 0; i <= PERCENTILES; i += 1) { System.out.println("percentile " + i * PERCENTILES + ": " + times.get((int) ((times.size() - 1) * i / PERCENTILES))); } System.out.println("write txns:"); System.out.println("num: " + txnLength.size() + ", mean:" + meanTime + ", stddev:" + stdDev); }
From source file:tv.icntv.grade.film.recommend.TopNJob.java
public static void main(String[] args) throws Exception { final Configuration configuration = HBaseConfiguration.create(); configuration.addResource("grade.xml"); String tables = configuration.get("hbase.cdn.tables"); if (Strings.isNullOrEmpty(tables)) { return;//from ww w . j a va 2 s . c o m } List<String> list = Lists.newArrayList(Splitter.on(",").split(tables)); List<String> results = Lists.transform(list, new Function<String, String>() { @Override public String apply(@Nullable java.lang.String input) { return String.format(configuration.get("hdfs.directory.base.db"), new Date(), input); } }); String[] arrays = new String[] { Joiner.on(",").join(results), String.format(configuration.get("hdfs.directory.num.middle"), new Date()), String.format(configuration.get("hdfs.directory.num.result"), new Date()) }; AbstractJob job = new TopNJob(); // job.setStart(true); int i = ToolRunner.run(configuration, job, arrays); System.exit(i); }
From source file:com.google.devtools.kythe.analyzers.jvm.ClassFileIndexer.java
public static void main(String[] args) throws AnalysisException { StandaloneConfig config = new StandaloneConfig(); config.parseCommandLine(args);//from w ww . j a va 2 s . com try (OutputStream stream = Strings.isNullOrEmpty(config.getOutputPath()) ? System.out : new BufferedOutputStream(new FileOutputStream(config.getOutputPath()))) { FactEmitter emitter = new StreamFactEmitter(stream); MemoryStatisticsCollector statistics = config.getPrintStatistics() ? new MemoryStatisticsCollector() : null; KytheClassVisitor classVisitor = new KytheClassVisitor( statistics == null ? NullStatisticsCollector.getInstance() : statistics, emitter); for (String fileName : config.getFilesToIndex()) { File file = new File(fileName); if (fileName.endsWith(JAR_FILE_EXT)) { visitJarClassFiles(file, classVisitor); } else if (fileName.endsWith(CLASS_FILE_EXT)) { visitClassFile(file, classVisitor); } else if (fileName.endsWith(IndexInfoUtils.INDEX_FILE_EXT)) { CompilationDescription desc = IndexInfoUtils.readIndexInfoFromFile(fileName); analyzeCompilation(desc.getCompilationUnit(), new FileDataCache(desc.getFileContents()), classVisitor); } else { throw new IllegalArgumentException("unknown file path extension: " + fileName); } } if (statistics != null) { statistics.printStatistics(System.err); } } catch (IOException ioe) { throw new AnalysisException("error writing output", ioe); } }
From source file:com.cloudbees.api.Main.java
public static void main(String[] args) throws Exception { File beesCredentialsFile = new File(System.getProperty("user.home"), ".bees/bees.config"); Preconditions.checkArgument(beesCredentialsFile.exists(), "File %s not found", beesCredentialsFile); Properties beesCredentials = new Properties(); beesCredentials.load(new FileInputStream(beesCredentialsFile)); String apiUrl = "https://api.cloudbees.com/api"; String apiKey = beesCredentials.getProperty("bees.api.key"); String secret = beesCredentials.getProperty("bees.api.secret"); BeesClient client = new BeesClient(apiUrl, apiKey, secret, "xml", "1.0"); client.setVerbose(false);/*from www .j ava 2 s . c om*/ URL databasesUrl = Thread.currentThread().getContextClassLoader().getResource("databases.txt"); Preconditions.checkNotNull(databasesUrl, "File 'databases.txt' NOT found in the classpath"); Collection<String> databaseNames; try { databaseNames = Sets.newTreeSet(Resources.readLines(databasesUrl, Charsets.ISO_8859_1)); } catch (Exception e) { throw Throwables.propagate(e); } databaseNames = Collections2.transform(databaseNames, new Function<String, String>() { @Nullable @Override public String apply(@Nullable String input) { // {host_db_create,<<"tco_q5rm">>,<<"TCO_q5rm">>, if (input == null) return null; if (input.startsWith("#")) return null; if (input.indexOf('"') == -1) { logger.warn("Skip invalid line {}", input); return null; } input = input.substring(input.indexOf('"') + 1); if (input.indexOf('"') == -1) { logger.warn("Skip invalid line {}", input); return null; } return input.substring(0, input.indexOf('"')); } }); databaseNames = Collections2.filter(databaseNames, new Predicate<String>() { @Override public boolean apply(@Nullable String s) { return !Strings.isNullOrEmpty(s); } }); Multimap<String, String> databasesByAccount = ArrayListMultimap.create(); Class.forName("com.mysql.jdbc.Driver"); for (String databaseName : databaseNames) { try { DatabaseInfo databaseInfo = client.databaseInfo(databaseName, true); databasesByAccount.put(databaseInfo.getOwner(), databaseInfo.getName()); logger.debug("Evaluate " + databaseInfo.getName()); if (true == false) { // Hibernate logger.info("Hibernate {}", databaseName); Map<String, String> params = new HashMap<String, String>(); params.put("database_id", databaseName); String url = client.getRequestURL("database.hibernate", params); String response = client.executeRequest(url); DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response); logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(), apiResponse.getDatabaseInfo().getStatus()); } if (true == false) { // Hibernate logger.info("Activate {}", databaseName); Map<String, String> params = new HashMap<String, String>(); params.put("database_id", databaseName); String url = client.getRequestURL("database.activate", params); String response = client.executeRequest(url); DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response); logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(), apiResponse.getDatabaseInfo().getStatus()); } String dbUrl = "jdbc:mysql://" + databaseInfo.getMaster() + "/" + databaseInfo.getName(); logger.info("Connect to {} user={}", dbUrl, databaseInfo.getUsername()); Connection cnn = DriverManager.getConnection(dbUrl, databaseInfo.getUsername(), databaseInfo.getPassword()); cnn.setAutoCommit(false); cnn.close(); } catch (Exception e) { logger.warn("Exception for {}", databaseName, e); } } System.out.println("OWNERS"); for (String account : databasesByAccount.keySet()) { System.out.println(account + ": " + Joiner.on(", ").join(databasesByAccount.get(account))); } }
From source file:org.cinchapi.concourse.shell.ConcourseShell.java
/** * Run the program...//from w ww.j a v a 2 s .c o m * * @param args - see {@link Options} * @throws IOException */ public static void main(String... args) throws IOException { ConsoleReader console = new ConsoleReader(); console.setExpandEvents(false); Options opts = new Options(); JCommander parser = new JCommander(opts, args); parser.setProgramName("concourse-shell"); if (opts.help) { parser.usage(); System.exit(1); } if (Strings.isNullOrEmpty(opts.password)) { opts.password = console.readLine("Password [" + opts.username + "]: ", '*'); } try { Concourse concourse = Concourse.connect(opts.host, opts.port, opts.username, opts.password, opts.environment); final String env = concourse.getServerEnvironment(); CommandLine.displayWelcomeBanner(); Binding binding = new Binding(); GroovyShell shell = new GroovyShell(binding); Stopwatch watch = Stopwatch.createUnstarted(); console.println("Client Version " + Version.getVersion(ConcourseShell.class)); console.println("Server Version " + concourse.getServerVersion()); console.println(""); console.println("Connected to the '" + env + "' environment."); console.println(""); console.println("Type HELP for help."); console.println("Type EXIT to quit."); console.println("Use TAB for completion."); console.println(""); console.setPrompt(MessageFormat.format("[{0}/cash]$ ", env)); console.addCompleter(new StringsCompleter(getAccessibleApiMethodsUsingShortSyntax())); final List<String> methods = Lists.newArrayList(getAccessibleApiMethods()); String line; while ((line = console.readLine().trim()) != null) { line = SyntaxTools.handleShortSyntax(line, methods); binding.setVariable("concourse", concourse); binding.setVariable("eq", Operator.EQUALS); binding.setVariable("ne", Operator.NOT_EQUALS); binding.setVariable("gt", Operator.GREATER_THAN); binding.setVariable("gte", Operator.GREATER_THAN_OR_EQUALS); binding.setVariable("lt", Operator.LESS_THAN); binding.setVariable("lte", Operator.LESS_THAN_OR_EQUALS); binding.setVariable("bw", Operator.BETWEEN); binding.setVariable("regex", Operator.REGEX); binding.setVariable("nregex", Operator.NOT_REGEX); binding.setVariable("lnk2", Operator.LINKS_TO); binding.setVariable("date", STRING_TO_TIME); binding.setVariable("time", STRING_TO_TIME); binding.setVariable("where", WHERE); binding.setVariable("tag", STRING_TO_TAG); if (line.equalsIgnoreCase("exit")) { concourse.exit(); System.exit(0); } else if (line.equalsIgnoreCase("help") || line.equalsIgnoreCase("man")) { Process p = Runtime.getRuntime() .exec(new String[] { "sh", "-c", "echo \"" + HELP_TEXT + "\" | less > /dev/tty" }); p.waitFor(); } else if (containsBannedCharSequence(line)) { System.err.println( "Cannot complete command because " + "it contains an illegal character sequence."); } else if (Strings.isNullOrEmpty(line)) { // CON-170 continue; } else { watch.reset().start(); Object value = null; try { value = shell.evaluate(line, "ConcourseShell"); watch.stop(); if (value != null) { System.out.println( "Returned '" + value + "' in " + watch.elapsed(TimeUnit.MILLISECONDS) + " ms"); } else { System.out.println("Completed in " + watch.elapsed(TimeUnit.MILLISECONDS) + " ms"); } } catch (Exception e) { if (e.getCause() instanceof TTransportException) { die(e.getMessage()); } else if (e.getCause() instanceof TSecurityException) { die("A security change has occurred and your " + "session cannot continue"); } else { System.err.print("ERROR: " + e.getMessage()); } } } System.out.print("\n"); } } catch (Exception e) { if (e.getCause() instanceof TTransportException) { die("Unable to connect to " + opts.username + "@" + opts.host + ":" + opts.port + " with the specified password"); } else if (e.getCause() instanceof TSecurityException) { die("Invalid username/password combination."); } else { die(e.getMessage()); } } finally { try { TerminalFactory.get().restore(); } catch (Exception e) { die(e.getMessage()); } } }