Example usage for java.lang String format

List of usage examples for java.lang String format

Introduction

In this page you can find the example usage for java.lang String format.

Prototype

public static String format(String format, Object... args) 

Source Link

Document

Returns a formatted string using the specified format string and arguments.

Usage

From source file:edu.cmu.tetrad.cli.search.FgsCli.java

/**
 * @param args the command line arguments
 *///from  w ww.j  a v  a2 s  .c  o m
public static void main(String[] args) {
    if (args == null || args.length == 0 || Args.hasLongOption(args, "help")) {
        Args.showHelp("fgs", MAIN_OPTIONS);
        return;
    }

    parseArgs(args);

    System.out.println("================================================================================");
    System.out.printf("FGS Discrete (%s)%n", DateTime.printNow());
    System.out.println("================================================================================");

    String argInfo = createArgsInfo();
    System.out.println(argInfo);
    LOGGER.info("=== Starting FGS Discrete: " + Args.toString(args, ' '));
    LOGGER.info(argInfo.trim().replaceAll("\n", ",").replaceAll(" = ", "="));

    Set<String> excludedVariables = (excludedVariableFile == null) ? Collections.EMPTY_SET
            : getExcludedVariables();

    runPreDataValidations(excludedVariables, System.err);
    DataSet dataSet = readInDataSet(excludedVariables);
    runOptionalDataValidations(dataSet, System.err);

    Path outputFile = Paths.get(dirOut.toString(), outputPrefix + ".txt");
    try (PrintStream writer = new PrintStream(
            new BufferedOutputStream(Files.newOutputStream(outputFile, StandardOpenOption.CREATE)))) {
        String runInfo = createOutputRunInfo(excludedVariables, dataSet);
        writer.println(runInfo);
        String[] infos = runInfo.trim().replaceAll("\n\n", ";").split(";");
        for (String s : infos) {
            LOGGER.info(s.trim().replaceAll("\n", ",").replaceAll(":,", ":").replaceAll(" = ", "="));
        }

        Graph graph = runFgs(dataSet, writer);

        writer.println();
        writer.println(graph.toString());
    } catch (IOException exception) {
        LOGGER.error("FGS failed.", exception);
        System.err.printf("%s: FGS failed.%n", DateTime.printNow());
        System.out.println("Please see log file for more information.");
        System.exit(-128);
    }
    System.out.printf("%s: FGS finished!  Please see %s for details.%n", DateTime.printNow(),
            outputFile.getFileName().toString());
    LOGGER.info(
            String.format("FGS finished!  Please see %s for details.", outputFile.getFileName().toString()));
}

From source file:com.hortonworks.registries.storage.tool.sql.TablesInitializer.java

public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(Option.builder("s").numberOfArgs(1).longOpt(OPTION_SCRIPT_ROOT_PATH)
            .desc("Root directory of script path").build());

    options.addOption(Option.builder("c").numberOfArgs(1).longOpt(OPTION_CONFIG_FILE_PATH)
            .desc("Config file path").build());

    options.addOption(Option.builder("m").numberOfArgs(1).longOpt(OPTION_MYSQL_JAR_URL_PATH)
            .desc("Mysql client jar url to download").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.CREATE.toString())
            .desc("Run sql migrations from scatch").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.DROP.toString())
            .desc("Drop all the tables in the target database").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.CHECK_CONNECTION.toString())
            .desc("Check the connection for configured data source").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.MIGRATE.toString())
            .desc("Execute schema migration from last check point").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.INFO.toString())
            .desc("Show the status of the schema migration compared to the target database").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.VALIDATE.toString())
            .desc("Validate the target database changes with the migration scripts").build());

    options.addOption(Option.builder().hasArg(false).longOpt(SchemaMigrationOption.REPAIR.toString()).desc(
            "Repairs the DATABASE_CHANGE_LOG by removing failed migrations and correcting checksum of existing migration script")
            .build());//from   w w  w . j  av a 2  s  . c om

    options.addOption(Option.builder().hasArg(false).longOpt(DISABLE_VALIDATE_ON_MIGRATE)
            .desc("Disable flyway validation checks while running migrate").build());

    CommandLineParser parser = new BasicParser();
    CommandLine commandLine = parser.parse(options, args);

    if (!commandLine.hasOption(OPTION_CONFIG_FILE_PATH) || !commandLine.hasOption(OPTION_SCRIPT_ROOT_PATH)) {
        usage(options);
        System.exit(1);
    }

    boolean isSchemaMigrationOptionSpecified = false;
    SchemaMigrationOption schemaMigrationOptionSpecified = null;
    for (SchemaMigrationOption schemaMigrationOption : SchemaMigrationOption.values()) {
        if (commandLine.hasOption(schemaMigrationOption.toString())) {
            if (isSchemaMigrationOptionSpecified) {
                System.out.println(
                        "Only one operation can be execute at once, please select one of 'create', ',migrate', 'validate', 'info', 'drop', 'repair', 'check-connection'.");
                System.exit(1);
            }
            isSchemaMigrationOptionSpecified = true;
            schemaMigrationOptionSpecified = schemaMigrationOption;
        }
    }

    if (!isSchemaMigrationOptionSpecified) {
        System.out.println(
                "One of the option 'create', ',migrate', 'validate', 'info', 'drop', 'repair', 'check-connection' must be specified to execute.");
        System.exit(1);
    }

    String confFilePath = commandLine.getOptionValue(OPTION_CONFIG_FILE_PATH);
    String scriptRootPath = commandLine.getOptionValue(OPTION_SCRIPT_ROOT_PATH);
    String mysqlJarUrl = commandLine.getOptionValue(OPTION_MYSQL_JAR_URL_PATH);

    StorageProviderConfiguration storageProperties;
    Map<String, Object> conf;
    try {
        conf = Utils.readConfig(confFilePath);

        StorageProviderConfigurationReader confReader = new StorageProviderConfigurationReader();
        storageProperties = confReader.readStorageConfig(conf);
    } catch (IOException e) {
        System.err.println("Error occurred while reading config file: " + confFilePath);
        System.exit(1);
        throw new IllegalStateException("Shouldn't reach here");
    }

    String bootstrapDirPath = null;
    try {
        bootstrapDirPath = System.getProperty("bootstrap.dir");
        Proxy proxy = Proxy.NO_PROXY;
        String httpProxyUrl = (String) conf.get(HTTP_PROXY_URL);
        String httpProxyUsername = (String) conf.get(HTTP_PROXY_USERNAME);
        String httpProxyPassword = (String) conf.get(HTTP_PROXY_PASSWORD);
        if ((httpProxyUrl != null) && !httpProxyUrl.isEmpty()) {
            URL url = new URL(httpProxyUrl);
            proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(url.getHost(), url.getPort()));
            if ((httpProxyUsername != null) && !httpProxyUsername.isEmpty()) {
                Authenticator.setDefault(getBasicAuthenticator(url.getHost(), url.getPort(), httpProxyUsername,
                        httpProxyPassword));
            }
        }
        MySqlDriverHelper.downloadMySQLJarIfNeeded(storageProperties, bootstrapDirPath, mysqlJarUrl, proxy);
    } catch (Exception e) {
        System.err.println("Error occurred while downloading MySQL jar. bootstrap dir: " + bootstrapDirPath);
        System.exit(1);
        throw new IllegalStateException("Shouldn't reach here");
    }

    boolean disableValidateOnMigrate = commandLine.hasOption(DISABLE_VALIDATE_ON_MIGRATE);
    if (disableValidateOnMigrate) {
        System.out.println("Disabling validation on schema migrate");
    }
    SchemaMigrationHelper schemaMigrationHelper = new SchemaMigrationHelper(
            SchemaFlywayFactory.get(storageProperties, scriptRootPath, !disableValidateOnMigrate));
    try {
        schemaMigrationHelper.execute(schemaMigrationOptionSpecified);
        System.out
                .println(String.format("\"%s\" option successful", schemaMigrationOptionSpecified.toString()));
    } catch (Exception e) {
        System.err.println(
                String.format("\"%s\" option failed : %s", schemaMigrationOptionSpecified.toString(), e));
        System.exit(1);
    }

}

From source file:com.twentyn.bioreactor.pH.ControlSystem.java

public static void main(String[] args) throws Exception {

    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/*from   w w w  . jav a2 s .com*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        LOGGER.error(String.format("Argument parsing failed: %s\n", e.getMessage()));
        HELP_FORMATTER.printHelp(ControlSystem.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(ControlSystem.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    SOLUTION solution = null;
    String acidOrBase = cl.getOptionValue(OPTION_CONTROL_SOLUTION);

    if (acidOrBase.equals(SOLUTION.ACID.name())) {
        solution = SOLUTION.ACID;
    }

    if (acidOrBase.equals(SOLUTION.BASE.name())) {
        solution = SOLUTION.BASE;
    }

    if (solution == null) {
        LOGGER.error("Input solution is neither %s or %s", SOLUTION.ACID.name(), SOLUTION.BASE.name());
        return;
    }

    Double targetPH = Double.parseDouble(cl.getOptionValue(OPTION_TARGET_PH));

    File sensorReadingDataFile = new File(
            cl.getOptionValue(OPTION_SENSOR_READING_FILE_LOCATION, SENSOR_READING_FILE_LOCATION));

    MotorPinConfiguration motorPinConfiguration = new MotorPinConfiguration(
            MotorPinConfiguration.PinNumberingScheme.BOARD);
    motorPinConfiguration.initializeGPIOPinsAndSetConfigToStartState();

    ControlSystem controlSystem = new ControlSystem(motorPinConfiguration, solution, targetPH,
            sensorReadingDataFile);
    controlSystem.registerModuleForObjectMapper(new JodaModule());
    try {
        controlSystem.run();
    } finally {
        LOGGER.info("Shutting down");
        controlSystem.shutdownFermentation();
    }
}

From source file:com.act.lcms.v2.TraceIndexExtractor.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/*from   w  w  w  .  j av a 2  s.co  m*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    // Not enough memory available?  We're gonna need a bigger heap.
    long maxMemory = Runtime.getRuntime().maxMemory();
    if (maxMemory < 1 << 34) { // 16GB
        String msg = StringUtils.join(
                String.format(
                        "You have run this class with a maximum heap size of less than 16GB (%d to be exact). ",
                        maxMemory),
                "There is no way this process will complete with that much space available. ",
                "Crank up your heap allocation with -Xmx and try again.", "");
        throw new RuntimeException(msg);
    }

    File inputFile = new File(cl.getOptionValue(OPTION_SCAN_FILE));
    if (!inputFile.exists()) {
        System.err.format("Cannot find input scan file at %s\n", inputFile.getAbsolutePath());
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    File rocksDBFile = new File(cl.getOptionValue(OPTION_INDEX_PATH));
    if (rocksDBFile.exists()) {
        System.err.format("Index file at %s already exists--remove and retry\n", rocksDBFile.getAbsolutePath());
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    List<Double> targetMZs = new ArrayList<>();
    try (BufferedReader reader = new BufferedReader(new FileReader(cl.getOptionValue(OPTION_TARGET_MASSES)))) {
        String line;
        while ((line = reader.readLine()) != null) {
            targetMZs.add(Double.valueOf(line));
        }
    }

    TraceIndexExtractor extractor = new TraceIndexExtractor();
    extractor.processScan(targetMZs, inputFile, rocksDBFile);
}

From source file:edu.cmu.tetrad.cli.search.FgsDiscrete.java

/**
 * @param args the command line arguments
 *//*from w w w  .  ja va 2 s  . c o  m*/
public static void main(String[] args) {
    if (args == null || args.length == 0 || Args.hasLongOption(args, "help")) {
        Args.showHelp("fgs-discrete", MAIN_OPTIONS);
        return;
    }

    parseArgs(args);

    System.out.println("================================================================================");
    System.out.printf("FGS Discrete (%s)%n", DateTime.printNow());
    System.out.println("================================================================================");

    String argInfo = createArgsInfo();
    System.out.println(argInfo);
    LOGGER.info("=== Starting FGS Discrete: " + Args.toString(args, ' '));
    LOGGER.info(argInfo.trim().replaceAll("\n", ",").replaceAll(" = ", "="));

    Set<String> excludedVariables = (excludedVariableFile == null) ? Collections.EMPTY_SET
            : getExcludedVariables();

    runPreDataValidations(excludedVariables, System.err);

    DataSet dataSet = readInDataSet(excludedVariables);

    runOptionalDataValidations(dataSet, System.err);

    Path outputFile = Paths.get(dirOut.toString(), outputPrefix + ".txt");
    try (PrintStream writer = new PrintStream(
            new BufferedOutputStream(Files.newOutputStream(outputFile, StandardOpenOption.CREATE)))) {
        String runInfo = createOutputRunInfo(excludedVariables, dataSet);
        writer.println(runInfo);
        String[] infos = runInfo.trim().replaceAll("\n\n", ";").split(";");
        for (String s : infos) {
            LOGGER.info(s.trim().replaceAll("\n", ",").replaceAll(":,", ":").replaceAll(" = ", "="));
        }

        Graph graph = runFgsDiscrete(dataSet, writer);

        writer.println();
        writer.println(graph.toString());

        if (graphML) {
            writeOutGraphML(graph, Paths.get(dirOut.toString(), outputPrefix + "_graph.txt"));
        }
    } catch (IOException exception) {
        LOGGER.error("FGS Discrete failed.", exception);
        System.err.printf("%s: FGS Discrete failed.%n", DateTime.printNow());
        System.out.println("Please see log file for more information.");
        System.exit(-128);
    }
    System.out.printf("%s: FGS Discrete finished!  Please see %s for details.%n", DateTime.printNow(),
            outputFile.getFileName().toString());
    LOGGER.info(String.format("FGS Discrete finished!  Please see %s for details.",
            outputFile.getFileName().toString()));
}

From source file:alluxio.cli.LogLevel.java

/**
 * Sets or gets log level of master and worker through their REST API.
 *
 * @param args same arguments as {@link LogLevel}
 *//*from  w w  w .ja  v a2s  . c  o  m*/
public static void main(String[] args) {
    int exitCode = 1;
    try {
        logLevel(args);
        exitCode = 0;
    } catch (ParseException e) {
        printHelp("Unable to parse input args: " + e.getMessage());
    } catch (IOException e) {
        e.printStackTrace();
        System.err.println(String.format("Failed to set log level: %s", e.getMessage()));
    }
    System.exit(exitCode);
}

From source file:com.spectralogic.ds3cli.Main.java

public static void main(final String[] args) {

    try {/* ww  w .j ava  2s  .  co  m*/
        final Properties props = CliUtils.readProperties(PROPERTY_FILE);

        // constructor parses for command, help, version, and logging settings
        final Arguments arguments = new Arguments(args);

        // turn root log wide open, filters will be set to argument levels
        configureLogging(arguments.getConsoleLogLevel(), arguments.getFileLogLevel());

        LOG.info("Version: {}", CliUtils.getVersion(props));
        LOG.info("Build Date: {}", CliUtils.getBuildDate(props));
        LOG.info("Command line args: {}", Joiner.on(", ").join(args));
        LOG.info("Console log level: {}", arguments.getConsoleLogLevel().toString());
        LOG.info("Log file log level: {}", arguments.getFileLogLevel().toString());
        LOG.info(CliCommand.getPlatformInformation());

        if (arguments.isHelp()) {
            printHelp(arguments);
            System.exit(0);
        }

        if (arguments.isPrintVersion()) {
            printVersion(props);
            System.exit(0);
        }

        // then it had better be a command
        try {
            if (arguments.getCommand() == null) {
                throw new MissingOptionException(COMMAND.getOpt());
            }
        } catch (final IllegalArgumentException e) {
            throw new BadArgumentException("Unknown command", e);
        }

        final Ds3Client client = ClientFactory.createClient(arguments);
        if (!CliUtils.isVersionSupported(client)) {
            System.out.println(String.format("ERROR: Minimum Black Pearl supported is %s",
                    CliUtils.MINIMUM_VERSION_SUPPORTED));
            System.exit(2);
        }

        final Ds3Provider provider = new Ds3ProviderImpl(client, Ds3ClientHelpers.wrap(client));
        final FileSystemProvider fileSystemProvider = new FileSystemProviderImpl();

        // get command, parse args
        final CliCommand command = CliCommandFactory.getCommandExecutor(arguments.getCommand())
                .withProvider(provider, fileSystemProvider);
        command.init(arguments);

        final CommandResponse response = command.render();
        System.out.println(response.getMessage());
        System.exit(response.getReturnCode());
    } catch (final Exception e) {
        EXCEPTION.handleException(e);
        System.exit(2);
    }
}

From source file:com.ning.hfind.Find.java

public static void main(String[] origArgs) throws ParseException, IOException {
    PrinterConfig printerConfig = new PrinterConfig();

    CommandLineParser parser = new PosixParser();
    CommandLine line = parser.parse(options, origArgs);
    String[] args = line.getArgs();

    if (args.length > 1) {
        // find(1) seems to complain about the first argument only, let's do the same
        System.out.println(String.format("hfind: %s: unknown option", args[1]));
        System.exit(COMMAND_LINE_ERROR);
    }/*w  w w  . j a  v  a 2 s .co  m*/
    if (line.hasOption("help") || args.length == 0) {
        usage();
        return;
    }

    String path = args[0];
    // Optimization: check the depth on a top-level basis, not on a per-file basis
    // This avoids crawling files on Hadoop we don't care about
    int maxDepth = Integer.MAX_VALUE;
    if (line.hasOption("maxdepth")) {
        String maxDepthOptionValue = line.getOptionValue("maxdepth");
        maxDepth = Integer.valueOf(maxDepthOptionValue);
    }
    if (line.hasOption("delete")) {
        // -delete implies -d
        printerConfig.setDepthMode(true);
        printerConfig.setDeleteMode(true);
    }
    if (line.hasOption("d")) {
        printerConfig.setDepthMode(true);
    }
    if (line.hasOption("print0")) {
        printerConfig.setEndLineWithNull(true);
    }
    if (line.hasOption("verbose")) {
        printerConfig.setVerbose(true);
    }

    // Ignore certain primaries
    Iterator<Option> optionsIterator = ExpressionFactory.sanitizeCommandLine(line.getOptions());

    Expression expression = null;
    try {
        expression = ExpressionFactory
                .buildExpressionFromCommandLine(new PushbackIterator<Option>(optionsIterator));
        //System.out.println(String.format("find %s: %s", StringUtils.join(origArgs, " "), expression));
    } catch (IllegalArgumentException e) {
        System.err.println(e);
        System.exit(COMMAND_LINE_ERROR);
    }

    try {
        expression.run(path, maxDepth, printerConfig);
        System.exit(0);
    } catch (IOException e) {
        System.err.println(String.format("Error crawling HDFS: %s", e.getLocalizedMessage()));
        System.exit(HADOOP_ERROR);
    }
}

From source file:com.act.lcms.AnimateNetCDFAroundMass.java

public static void main(String[] args) throws Exception {
    if (args.length < 7 || !areNCFiles(Arrays.copyOfRange(args, 5, args.length))) {
        throw new RuntimeException(
                "Needs: \n" + "(1) mass value, e.g., 132.0772 \n" + "(2) time value, e.g., 39.2, (seconds), \n"
                        + "(3) minimum Mz Precision, 0.04 \n" + "(4) max z axis, e.g., 20000 \n"
                        + "(5) prefix for .data and rendered .pdf \n" + "(6..) 2 or more NetCDF .nc files");
    }/*from  w  w  w.  j av  a2s . c o  m*/

    Double mz = Double.parseDouble(args[0]);
    Double time = Double.parseDouble(args[1]);
    Double minMzPrecision = Double.parseDouble(args[2]);
    Double maxZAxis = Double.parseDouble(args[3]);
    String outPrefix = args[4];

    // the mz values go from 50-950, we start with a big window and exponentially narrow down
    double mzWin = 100;
    // time values go from 0-450, we start with a big window and exponentially narrow down
    double timeWin = 50;

    // the factor by which to zoom in every step (has to be >1, a value of 2 is good)
    double factor = 1.2;

    // the animation frame count
    int frame = 1;

    AnimateNetCDFAroundMass c = new AnimateNetCDFAroundMass();
    String[] netCDFFnames = Arrays.copyOfRange(args, 5, args.length);
    List<List<XYZ>> spectra = c.getSpectra(netCDFFnames, time, timeWin, mz, mzWin);

    for (List<XYZ> s : spectra) {
        System.out.format("%d xyz datapoints in (initial narrowed) spectra\n", s.size());
    }

    String[] labels = new String[netCDFFnames.length];
    for (int i = 0; i < labels.length; i++)
        labels[i] = "Dataset: " + i;
    // you could set labels to netCDFFnames to get precise labels on the graphs

    Gnuplotter plotter = new Gnuplotter();
    String fmt = "png";

    List<String> outImgFiles = new ArrayList<>(), outDataFiles = new ArrayList<>();
    while (mzWin > minMzPrecision) {

        // exponentially narrow windows down
        mzWin /= factor;
        timeWin /= factor;

        List<List<XYZ>> windowedSpectra = c.getSpectraInWindowAll(spectra, time, timeWin, mz, mzWin);

        String frameid = String.format("%03d", frame);
        String outPDF = outPrefix + frameid + "." + fmt;
        String outDATA = outPrefix + frameid + ".data";
        outImgFiles.add(outPDF);
        outDataFiles.add(outDATA);
        frame++;

        // Write data output to outfile
        PrintStream out = new PrintStream(new FileOutputStream(outDATA));

        // print out the spectra to outDATA
        for (List<XYZ> windowOfSpectra : windowedSpectra) {
            for (XYZ xyz : windowOfSpectra) {
                out.format("%.4f\t%.4f\t%.4f\n", xyz.time, xyz.mz, xyz.intensity);
                out.flush();
            }
            // delimit this dataset from the rest
            out.print("\n\n");
        }

        // close the .data
        out.close();

        // render outDATA to outPDF using gnuplot
        plotter.plotMulti3D(outDATA, outPDF, fmt, labels, maxZAxis);
    }

    String outImgs = outPrefix + "*." + fmt;
    plotter.makeAnimatedGIF(outImgs, outPrefix + ".gif");
    // all the frames are now in the animated gif, remove the intermediate files
    for (String f : outDataFiles)
        new File(f).delete();
    for (String f : outImgFiles)
        new File(f).delete();
}

From source file:edu.lternet.pasta.portal.search.BrowseGroup.java

public static void main(String[] args) {
    String browseDir = "/home/pasta/local/browse";
    BrowseGroup controlledVocabulary = null;
    String xmlString = null;//from   w w w  .j a v a 2s  .c om
    String htmlString = null;
    File browseCacheFile = null;

    controlledVocabulary = generateKeywordCache();

    if (controlledVocabulary != null) {
        xmlString = controlledVocabulary.toXML();
        htmlString = controlledVocabulary.toHTML();
        browseCacheFile = new File(String.format("%s/browseKeyword.xml", browseDir));
        try {
            FileUtils.writeStringToFile(browseCacheFile, xmlString);
        } catch (Exception e) {
            e.printStackTrace();
        }
        browseCacheFile = new File(String.format("%s/browseKeyword.html", browseDir));
        try {
            FileUtils.writeStringToFile(browseCacheFile, htmlString);
        } catch (Exception e) {
            e.printStackTrace();
        }
        System.err.println("Generation of keyword browse cache and HTML completed.");
    } else {
        System.err.println("Generation of keyword browse cache failed.");
    }
}