Example usage for java.util List clear

List of usage examples for java.util List clear

Introduction

In this page you can find the example usage for java.util List clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this list (optional operation).

Usage

From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {
    final int n = 10000;
    final int nDist = 10000;
    //prepare problems and methods
    List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7",
            "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text");
    List<String> mappers = new ArrayList<>();
    for (int gs : new int[] { 64, 128, 256, 512, 1024 }) {
        mappers.add("ge-" + gs + "-2");
        mappers.add("ge-" + gs + "-4");
        mappers.add("ge-" + gs + "-8");
        mappers.add("ge-" + gs + "-12");
        mappers.add("pige-" + gs + "-4");
        mappers.add("pige-" + gs + "-8");
        mappers.add("pige-" + gs + "-16");
        mappers.add("pige-" + gs + "-24");
        mappers.add("hge-" + gs + "-0");
        mappers.add("whge-" + gs + "-2");
        mappers.add("whge-" + gs + "-3");
        mappers.add("whge-" + gs + "-5");
    }/*from w w w. j  av  a 2s . c  o m*/
    mappers.add("sge-0-5");
    mappers.add("sge-0-6");
    mappers.add("sge-0-7");
    mappers.add("sge-0-8");
    mappers.clear();
    mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6"));
    PrintStream filePrintStream = null;
    if (args.length > 0) {
        filePrintStream = new PrintStream(args[0]);
    } else {
        filePrintStream = System.out;
    }
    filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n");
    //prepare distances
    Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>());
    Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming());
    //iterate
    for (String problemName : problems) {
        for (String mapperName : mappers) {
            System.out.printf("%20.20s, %20.20s", problemName, mapperName);
            //build problem
            Problem<String, NumericFitness> problem = null;
            if (problemName.equals("bool-parity5")) {
                problem = new Parity(5);
            } else if (problemName.equals("bool-mopm3")) {
                problem = new MultipleOutputParallelMultiplier(3);
            } else if (problemName.equals("sr-keijzer6")) {
                problem = new HarmonicCurve();
            } else if (problemName.equals("sr-nguyen7")) {
                problem = new Nguyen7(1);
            } else if (problemName.equals("sr-pagie1")) {
                problem = new Pagie1();
            } else if (problemName.equals("sr-vladislavleva4")) {
                problem = new Vladislavleva4(1);
            } else if (problemName.equals("other-klandscapes3")) {
                problem = new KLandscapes(3);
            } else if (problemName.equals("other-klandscapes7")) {
                problem = new KLandscapes(7);
            } else if (problemName.equals("other-text")) {
                problem = new Text();
            }
            //build configuration and evolver
            Mapper mapper = null;
            int genotypeSize = Integer.parseInt(mapperName.split("-")[1]);
            int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]);
            if (mapperName.split("-")[0].equals("ge")) {
                mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("pige")) {
                mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("sge")) {
                mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("hge")) {
                mapper = new HierarchicalMapper<>(problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("whge")) {
                mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar());
            }
            //prepare things
            Random random = new Random(1);
            Set<Sequence> genotypes = new LinkedHashSet<>(n);
            //build genotypes
            if (mapperName.split("-")[0].equals("sge")) {
                SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
                genotypeSize = factory.getBitSize();
            } else {
                BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
            }
            //build and fill map
            Multimap<Node<String>, Sequence> multimap = HashMultimap.create();
            int progress = 0;
            for (Sequence genotype : genotypes) {
                Node<String> phenotype;
                try {
                    if (mapperName.split("-")[0].equals("sge")) {
                        phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>());
                    } else {
                        phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>());
                    }
                } catch (MappingException e) {
                    phenotype = Node.EMPTY_TREE;
                }
                multimap.put(phenotype, genotype);
                progress = progress + 1;
                if (progress % Math.round(n / 10) == 0) {
                    System.out.print(".");
                }
            }
            System.out.println();
            //compute distances
            List<Pair<Double, Double>> allDistances = new ArrayList<>();
            List<Pair<Double, Double>> allValidDistances = new ArrayList<>();
            Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create();
            for (Node<String> phenotype : multimap.keySet()) {
                for (Sequence genotype1 : multimap.get(phenotype)) {
                    for (Sequence genotype2 : multimap.get(phenotype)) {
                        double gDistance = genotypeDistance.d(genotype1, genotype2);
                        genotypeDistances.put(phenotype, gDistance);
                        if (genotypeDistances.get(phenotype).size() > nDist) {
                            break;
                        }
                    }
                    if (genotypeDistances.get(phenotype).size() > nDist) {
                        break;
                    }
                }
            }
            List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries());
            Collections.shuffle(entries, random);
            for (Map.Entry<Node<String>, Sequence> entry1 : entries) {
                for (Map.Entry<Node<String>, Sequence> entry2 : entries) {
                    double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue());
                    double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey());
                    allDistances.add(new Pair<>(gDistance, pDistance));
                    if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) {
                        allValidDistances.add(new Pair<>(gDistance, pDistance));
                    }
                    if (allDistances.size() > nDist) {
                        break;
                    }
                }
                if (allDistances.size() > nDist) {
                    break;
                }
            }
            //compute properties
            double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size();
            double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size();
            double validRedundancy = redundancy;
            if (multimap.keySet().contains(Node.EMPTY_TREE)) {
                validRedundancy = 1 - ((double) multimap.keySet().size() - 1d)
                        / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size());
            }
            double locality = Utils.pearsonCorrelation(allDistances);
            double validLocality = Utils.pearsonCorrelation(allValidDistances);
            double[] sizes = new double[multimap.keySet().size()];
            double[] meanGenotypeDistances = new double[multimap.keySet().size()];
            int invalidIndex = -1;
            int c = 0;
            for (Node<String> phenotype : multimap.keySet()) {
                if (Node.EMPTY_TREE.equals(phenotype)) {
                    invalidIndex = c;
                }
                sizes[c] = multimap.get(phenotype).size();
                double[] distances = new double[genotypeDistances.get(phenotype).size()];
                int k = 0;
                for (Double distance : genotypeDistances.get(phenotype)) {
                    distances[k] = distance;
                    k = k + 1;
                }
                meanGenotypeDistances[c] = StatUtils.mean(distances);
                c = c + 1;
            }
            double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes);
            double nonSynonymousity = StatUtils.mean(meanGenotypeDistances)
                    / StatUtils.mean(firsts(allDistances));
            double validNonUniformity = nonUniformity;
            double validNonSynonymousity = nonSynonymousity;
            if (invalidIndex != -1) {
                double[] validSizes = new double[multimap.keySet().size() - 1];
                double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1];
                if (invalidIndex > 0) {
                    System.arraycopy(sizes, 0, validSizes, 0, invalidIndex);
                    System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex);
                }
                System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex,
                        sizes.length - invalidIndex - 1);
                System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances,
                        invalidIndex, meanGenotypeDistances.length - invalidIndex - 1);
                validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes);
                validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances)
                        / StatUtils.mean(firsts(allValidDistances));
            }
            //compute locality
            filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, invalidity);
            filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, redundancy);
            filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validRedundancy);
            filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, locality);
            filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validLocality);
            filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonSynonymousity);
            filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity);
        }
    }
    if (filePrintStream != null) {
        filePrintStream.close();
    }
}

From source file:com.hp.test.framework.Reporting.Utlis.java

public static void main(String ar[]) throws IOException {

    ArrayList<String> runs_list = new ArrayList<String>();
    String basepath = replacelogs();
    String path = basepath + "ATU Reports\\Results";
    File directory = new File(path);
    File[] subdirs = directory.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY);
    String htmlReport = basepath + "HTML_Design_Files\\CSS\\HtmlReport.html";
    for (File dir : subdirs) {

        runs_list.add(dir.getName());//w  w  w .  j  ava 2 s. c o  m
    }
    String Reports_path = basepath + "ATU Reports\\";
    int LatestRun = Utlis.getLastRun(Reports_path);
    String Last_run_path = Reports_path + "Results\\Run_" + String.valueOf(LatestRun) + "\\";

    HtmlParse.replaceMainTable(false, new File(Last_run_path + "/CurrentRun.html"));
    HtmlParse.replaceCountsinJSFile(new File("HTML_Design_Files/JS/3dChart.js"), Last_run_path);
    UpdateTestCaseDesciption.basepath = Last_run_path;
    UpdateTestCaseDesciption.getTestCaseHtmlPath(Last_run_path + "/CurrentRun.html");
    UpdateTestCaseDesciption.replaceDetailsTable(Last_run_path + "/CurrentRun.html");
    //   GenerateFailReport.genarateFailureReport(new File(htmlReport), Reports_path + "Results\\Run_" + String.valueOf(LatestRun));
    genarateFailureReport(new File(htmlReport), Reports_path + "Results\\");
    Map<String, List<String>> runCounts = GetCountsrunsWise(runs_list, path);

    int success = replaceCounts(runCounts, path);

    if (success == 0) {
        File SourceFile = new File(path + "\\lineChart_temp.js");
        File RenameFile = new File(path + "\\lineChart.js");

        renameOriginalFile(SourceFile, RenameFile);

        File SourceFile1 = new File(path + "\\barChart_temp.js");
        File RenameFile1 = new File(path + "\\barChart.js");

        renameOriginalFile(SourceFile1, RenameFile1);

        Utlis.getpaths(Reports_path + "\\Results\\Run_" + String.valueOf(LatestRun));
        try {
            Utlis.replaceMainTable(false, new File(Reports_path + "index.html"));
            Utlis.replaceMainTable(true, new File(Reports_path + "results\\" + "ConsolidatedPage.html"));
            Utlis.replaceMainTable(true,
                    new File(Reports_path + "Results\\Run_" + String.valueOf(LatestRun) + "CurrentRun.html"));

            fileList.add(
                    new File(Reports_path + "\\Results\\Run_" + String.valueOf(LatestRun) + "CurrentRun.html"));
            for (File f : fileList) {
                Utlis.replaceMainTable(true, f);
            }

        } catch (Exception ex) {
            log.info("Error in updating Report format" + ex.getMessage());
        }

        Last_run_path = Reports_path + "Results\\Run_" + String.valueOf(LatestRun) + "\\";

        //   HtmlParse.replaceMainTable(false, new File(Last_run_path + "/CurrentRun.html"));
        //   HtmlParse.replaceCountsinJSFile(new File("../HTML_Design_Files/JS/3dChart.js"), Last_run_path);
        ArrayList<String> to_list = new ArrayList<String>();
        ReportingProperties reportingproperties = new ReportingProperties();
        String temp_eml = reportingproperties.getProperty("TOLIST");
        String JenkinsURL = reportingproperties.getProperty("JENKINSURL");

        String ScreenShotsDir = reportingproperties.getProperty("ScreenShotsDirectory");
        Boolean cleanScreenshotsDir = Boolean.valueOf(reportingproperties.getProperty("CleanPreScreenShots"));
        Boolean generatescreenshots = Boolean.valueOf(reportingproperties.getProperty("GenerateScreenShots"));
        String HtmlreportFilePrefix = reportingproperties.getProperty("HtmlreportFilePrefix");

        if (cleanScreenshotsDir) {
            if (!CleanFilesinDir(ScreenShotsDir)) {
                log.error("Not able to Clean the Previous Run Details in the paht" + ScreenShotsDir);
            } else {
                log.info("Cleaning Previous Run Details in the paht" + ScreenShotsDir + "Sucess");
            }
        }
        List<String> scr_fileList;
        List<String> html_fileList;
        if (generatescreenshots) {
            scr_fileList = GetFileListinDir(ScreenShotsDir, "png");

            int len = scr_fileList.size();
            len = len + 1;

            screenshot.generatescreenshot(Last_run_path + "CurrentRun.html",
                    ScreenShotsDir + "screenshot_" + len + ".png");
            File source = new File(Reports_path + "Results\\HtmlReport.html");
            File dest = new File(ScreenShotsDir + HtmlreportFilePrefix + "_HtmlReport.html");
            //  Files.copy(f.toPath(), (new File((ScreenShotsDir+HtmlreportFilePrefix+"_HtmlReport.html").toPath(),StandardCopyOption.REPLACE_EXISTING);
            FileUtils.copyFile(source, dest);
            scr_fileList.clear();

        }

        scr_fileList = GetFileListinDir(ScreenShotsDir, "png");
        html_fileList = GetFileListinDir(ScreenShotsDir, "html");

        if (temp_eml.length() > 1) {
            String[] to_list_temp = temp_eml.split(",");

            if (to_list_temp.length > 0) {
                for (String to_list_temp1 : to_list_temp) {
                    to_list.add(to_list_temp1);
                }
            }
            if (to_list.size() > 0) {
                screenshot.generatescreenshot(Last_run_path + "CurrentRun.html",
                        Last_run_path + "screenshot.png");

                //    cleanTempDir.cleanandCreate(Reports_path, LatestRun);
                // ZipUtils.ZipFolder(Reports_path + "temp", Reports_path + "ISTF_Reports.zip");
                if (generatescreenshots) {
                    SendingEmail.sendmail(to_list, JenkinsURL, scr_fileList, html_fileList);
                } else {
                    SendingEmail.sendmail(to_list, JenkinsURL, Reports_path + "/Results/HtmlReport.html",
                            Last_run_path + "screenshot.png");
                }

                //  FileUtils.deleteQuietly(new File(Reports_path + "ISTF_Reports.zip"));
                // FileUtils.deleteDirectory(new File(Reports_path + "temp"));
            }
        }
    }
}

From source file:com.finderbots.miner2.pinterest.PinterestCrawlAndMinerTool.java

public static void main(String[] args) {
    Options options = new Options();
    CmdLineParser parser = new CmdLineParser(options);

    try {//from www  . ja  va  2  s .  c  o  m
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println(e.getMessage());
        printUsageAndExit(parser);
    }

    // Before we get too far along, see if the domain looks valid.
    String domain = options.getDomain();
    String urlsFile = options.getUrlsFile();
    if (domain != null) {
        validateDomain(domain, parser);
    } else {
        if (urlsFile == null) {
            System.err.println(
                    "Either a target domain should be specified or a file with a list of urls needs to be provided");
            printUsageAndExit(parser);
        }
    }

    if (domain != null && urlsFile != null) {
        System.out.println("Warning: Both domain and urls file list provided - using domain");
    }

    String outputDirName = options.getOutputDir();
    if (options.isDebugLogging()) {
        System.setProperty("bixo.root.level", "DEBUG");
    } else {
        System.setProperty("bixo.root.level", "INFO");
    }

    if (options.getLoggingAppender() != null) {
        // Set console vs. DRFA vs. something else
        System.setProperty("bixo.appender", options.getLoggingAppender());
    }

    String logsDir = options.getLogsDir();
    if (!logsDir.endsWith("/")) {
        logsDir = logsDir + "/";
    }

    try {
        JobConf conf = new JobConf();
        Path outputPath = new Path(outputDirName);
        FileSystem fs = outputPath.getFileSystem(conf);

        // First check if the user wants to clean
        if (options.isCleanOutputDir()) {
            if (fs.exists(outputPath)) {
                fs.delete(outputPath, true);
            }
        }

        // See if the user isn't starting from scratch then set up the
        // output directory and create an initial urls subdir.
        if (!fs.exists(outputPath)) {
            fs.mkdirs(outputPath);

            // Create a "0-<timestamp>" sub-directory with just a /crawldb subdir
            // In the /crawldb dir the input file will have a single URL for the target domain.

            Path curLoopDir = CrawlDirUtils.makeLoopDir(fs, outputPath, 0);
            String curLoopDirName = curLoopDir.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, 0);

            Path crawlDbPath = new Path(curLoopDir, CrawlConfig.CRAWLDB_SUBDIR_NAME);

            if (domain != null) {
                importOneDomain(domain, crawlDbPath, conf);
            } else {
                importUrls(urlsFile, crawlDbPath);
            }
        }

        Path latestDirPath = CrawlDirUtils.findLatestLoopDir(fs, outputPath);

        if (latestDirPath == null) {
            System.err.println("No previous cycle output dirs exist in " + outputDirName);
            printUsageAndExit(parser);
        }

        Path crawlDbPath = new Path(latestDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);

        // Set up the start and end loop counts.
        int startLoop = CrawlDirUtils.extractLoopNumber(latestDirPath);
        int endLoop = startLoop + options.getNumLoops();

        // Set up the UserAgent for the fetcher.
        UserAgent userAgent = new UserAgent(options.getAgentName(), CrawlConfig.EMAIL_ADDRESS,
                CrawlConfig.WEB_ADDRESS);

        // You also get to customize the FetcherPolicy
        FetcherPolicy defaultPolicy;
        if (options.getCrawlDuration() != 0) {
            defaultPolicy = new AdaptiveFetcherPolicy(options.getEndCrawlTime(), options.getCrawlDelay());
        } else {
            defaultPolicy = new FetcherPolicy();
        }
        defaultPolicy.setMaxContentSize(CrawlConfig.MAX_CONTENT_SIZE);
        defaultPolicy.setRequestTimeout(10L * 1000L);//10 seconds

        // COMPLETE for crawling a single site, EFFICIENT for many sites
        if (options.getCrawlPolicy().equals(Options.IMPOLITE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.IMPOLITE);
        } else if (options.getCrawlPolicy().equals(Options.EFFICIENT_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.EFFICIENT);
        } else if (options.getCrawlPolicy().equals(Options.COMPLETE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.COMPLETE);
        }

        // It is a good idea to set up a crawl duration when running long crawls as you may
        // end up in situations where the fetch slows down due to a 'long tail' and by
        // specifying a crawl duration you know exactly when the crawl will end.
        int crawlDurationInMinutes = options.getCrawlDuration();
        boolean hasEndTime = crawlDurationInMinutes != Options.NO_CRAWL_DURATION;
        long targetEndTime = hasEndTime
                ? System.currentTimeMillis() + (crawlDurationInMinutes * CrawlConfig.MILLISECONDS_PER_MINUTE)
                : FetcherPolicy.NO_CRAWL_END_TIME;

        // By setting up a url filter we only deal with urls that we want to
        // instead of all the urls that we extract.
        BaseUrlFilter urlFilter = null;
        List<String> patterns = null;
        String regexUrlFiltersFile = options.getRegexUrlFiltersFile();
        if (regexUrlFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlFiltersFile);
        } else {
            patterns = RegexUrlDatumFilter.getDefaultUrlFilterPatterns();
            if (domain != null) {
                String domainPatterStr = "+(?i)^(http|https)://([a-z0-9]*\\.)*" + domain;
                patterns.add(domainPatterStr);
            } else {
                String protocolPatterStr = "+(?i)^(http|https)://*";
                patterns.add(protocolPatterStr);
                //Log.warn("Defaulting to basic url regex filtering (just suffix and protocol");
            }
        }
        urlFilter = new RegexUrlDatumFilter(patterns.toArray(new String[patterns.size()]));

        // get a list of patterns which tell the miner which URLs to include or exclude.
        patterns.clear();
        RegexUrlStringFilter urlsToMineFilter = null;
        String regexUrlsToMineFiltersFile = options.getRegexUrlToMineFile();
        AnalyzeHtml analyzer = null;
        if (regexUrlsToMineFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlsToMineFiltersFile);
            urlsToMineFilter = new RegexUrlStringFilter(patterns.toArray(new String[patterns.size()]));
            analyzer = new AnalyzeHtml(urlsToMineFilter);
        }

        // OK, now we're ready to start looping, since we've got our current
        // settings
        for (int curLoop = startLoop + 1; curLoop <= endLoop; curLoop++) {

            // Adjust target end time, if appropriate.
            if (hasEndTime) {
                int remainingLoops = (endLoop - curLoop) + 1;
                long now = System.currentTimeMillis();
                long perLoopTime = (targetEndTime - now) / remainingLoops;
                defaultPolicy.setCrawlEndTime(now + perLoopTime);
            }

            Path curLoopDirPath = CrawlDirUtils.makeLoopDir(fs, outputPath, curLoop);
            String curLoopDirName = curLoopDirPath.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, curLoop);

            Flow flow = PinterestCrawlAndMinerWorkflow.createFlow(curLoopDirPath, crawlDbPath, defaultPolicy,
                    userAgent, urlFilter, analyzer, options);
            flow.complete();

            // Writing out .dot files is a good way to verify your flows.
            flow.writeDOT("valid-flow.dot");

            // Update crawlDbPath to point to the latest crawl db
            crawlDbPath = new Path(curLoopDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);
        }
    } catch (PlannerException e) {
        e.writeDOT("failed-flow.dot");
        System.err.println("PlannerException: " + e.getMessage());
        e.printStackTrace(System.err);
        System.exit(-1);
    } catch (Throwable t) {
        System.err.println("Exception running tool: " + t.getMessage());
        t.printStackTrace(System.err);
        System.exit(-1);
    }
}

From source file:com.finderbots.miner2.tomatoes.RTCriticsCrawlAndMinerTool.java

public static void main(String[] args) {
    Options options = new Options();
    CmdLineParser parser = new CmdLineParser(options);

    try {//from w w  w. j av  a 2  s  .  co m
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println(e.getMessage());
        printUsageAndExit(parser);
    }

    // Before we get too far along, see if the domain looks valid.
    String domain = options.getDomain();
    String urlsFile = options.getUrlsFile();
    if (domain != null) {
        validateDomain(domain, parser);
    } else {
        if (urlsFile == null) {
            System.err.println(
                    "Either a target domain should be specified or a file with a list of urls needs to be provided");
            printUsageAndExit(parser);
        }
    }

    if (domain != null && urlsFile != null) {
        System.out.println("Warning: Both domain and urls file list provided - using domain");
    }

    String outputDirName = options.getOutputDir();
    if (options.isDebugLogging()) {
        System.setProperty("bixo.root.level", "DEBUG");
    } else {
        System.setProperty("bixo.root.level", "INFO");
    }

    if (options.getLoggingAppender() != null) {
        // Set console vs. DRFA vs. something else
        System.setProperty("bixo.appender", options.getLoggingAppender());
    }

    String logsDir = options.getLogsDir();
    if (!logsDir.endsWith("/")) {
        logsDir = logsDir + "/";
    }

    try {
        JobConf conf = new JobConf();
        Path outputPath = new Path(outputDirName);
        FileSystem fs = outputPath.getFileSystem(conf);

        // First check if the user wants to clean
        if (options.isCleanOutputDir()) {
            if (fs.exists(outputPath)) {
                fs.delete(outputPath, true);
            }
        }

        // See if the user isn't starting from scratch then set up the
        // output directory and create an initial urls subdir.
        if (!fs.exists(outputPath)) {
            fs.mkdirs(outputPath);

            // Create a "0-<timestamp>" sub-directory with just a /crawldb subdir
            // In the /crawldb dir the input file will have a single URL for the target domain.

            Path curLoopDir = CrawlDirUtils.makeLoopDir(fs, outputPath, 0);
            String curLoopDirName = curLoopDir.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, 0);

            Path crawlDbPath = new Path(curLoopDir, CrawlConfig.CRAWLDB_SUBDIR_NAME);

            if (domain != null) {
                importOneDomain(domain, crawlDbPath, conf);
            } else {
                importUrls(urlsFile, crawlDbPath);
            }
        }

        Path latestDirPath = CrawlDirUtils.findLatestLoopDir(fs, outputPath);

        if (latestDirPath == null) {
            System.err.println("No previous cycle output dirs exist in " + outputDirName);
            printUsageAndExit(parser);
        }

        Path crawlDbPath = new Path(latestDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);

        // Set up the start and end loop counts.
        int startLoop = CrawlDirUtils.extractLoopNumber(latestDirPath);
        int endLoop = startLoop + options.getNumLoops();

        // Set up the UserAgent for the fetcher.
        UserAgent userAgent = new UserAgent(options.getAgentName(), CrawlConfig.EMAIL_ADDRESS,
                CrawlConfig.WEB_ADDRESS);

        // You also get to customize the FetcherPolicy
        FetcherPolicy defaultPolicy;
        if (options.getCrawlDuration() != 0) {
            defaultPolicy = new AdaptiveFetcherPolicy(options.getEndCrawlTime(), options.getCrawlDelay());
        } else {
            defaultPolicy = new FetcherPolicy();
        }
        defaultPolicy.setMaxContentSize(CrawlConfig.MAX_CONTENT_SIZE);
        defaultPolicy.setRequestTimeout(10L * 1000L);//10 seconds

        // COMPLETE for crawling a single site, EFFICIENT for many sites
        if (options.getCrawlPolicy().equals(Options.IMPOLITE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.IMPOLITE);
        } else if (options.getCrawlPolicy().equals(Options.EFFICIENT_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.EFFICIENT);
        } else if (options.getCrawlPolicy().equals(Options.COMPLETE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.COMPLETE);
        }

        // It is a good idea to set up a crawl duration when running long crawls as you may
        // end up in situations where the fetch slows down due to a 'long tail' and by
        // specifying a crawl duration you know exactly when the crawl will end.
        int crawlDurationInMinutes = options.getCrawlDuration();
        boolean hasEndTime = crawlDurationInMinutes != Options.NO_CRAWL_DURATION;
        long targetEndTime = hasEndTime
                ? System.currentTimeMillis() + (crawlDurationInMinutes * CrawlConfig.MILLISECONDS_PER_MINUTE)
                : FetcherPolicy.NO_CRAWL_END_TIME;

        // By setting up a url filter we only deal with urls that we want to
        // instead of all the urls that we extract.
        BaseUrlFilter urlFilter = null;
        List<String> patterns = null;
        String regexUrlFiltersFile = options.getRegexUrlFiltersFile();
        if (regexUrlFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlFiltersFile);
        } else {
            patterns = RegexUrlDatumFilter.getDefaultUrlFilterPatterns();
            if (domain != null) {
                String domainPatterStr = "+(?i)^(http|https)://([a-z0-9]*\\.)*" + domain;
                patterns.add(domainPatterStr);
            } else {
                String protocolPatterStr = "+(?i)^(http|https)://*";
                patterns.add(protocolPatterStr);
                //Log.warn("Defaulting to basic url regex filtering (just suffix and protocol");
            }
        }
        urlFilter = new RegexUrlDatumFilter(patterns.toArray(new String[patterns.size()]));

        // get a list of patterns which tell the miner which URLs to include or exclude.
        patterns.clear();
        RegexUrlStringFilter urlsToMineFilter = null;
        String regexUrlsToMineFiltersFile = options.getRegexUrlToMineFile();
        MineRTCriticsPreferences prefsAnalyzer = null;
        if (regexUrlsToMineFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlsToMineFiltersFile);
            urlsToMineFilter = new RegexUrlStringFilter(patterns.toArray(new String[patterns.size()]));
            prefsAnalyzer = new MineRTCriticsPreferences(urlsToMineFilter);
        }

        // OK, now we're ready to start looping, since we've got our current
        // settings
        for (int curLoop = startLoop + 1; curLoop <= endLoop; curLoop++) {

            // Adjust target end time, if appropriate.
            if (hasEndTime) {
                int remainingLoops = (endLoop - curLoop) + 1;
                long now = System.currentTimeMillis();
                long perLoopTime = (targetEndTime - now) / remainingLoops;
                defaultPolicy.setCrawlEndTime(now + perLoopTime);
            }

            Path curLoopDirPath = CrawlDirUtils.makeLoopDir(fs, outputPath, curLoop);
            String curLoopDirName = curLoopDirPath.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, curLoop);

            Flow flow = RTCriticsCrawlAndMinerWorkflow.createFlow(curLoopDirPath, crawlDbPath, defaultPolicy,
                    userAgent, urlFilter, prefsAnalyzer, options);
            flow.complete();

            // Writing out .dot files is a good way to verify your flows.
            flow.writeDOT("valid-flow.dot");

            // Update crawlDbPath to point to the latest crawl db
            crawlDbPath = new Path(curLoopDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);
        }
    } catch (PlannerException e) {
        e.writeDOT("failed-flow.dot");
        System.err.println("PlannerException: " + e.getMessage());
        e.printStackTrace(System.err);
        System.exit(-1);
    } catch (Throwable t) {
        System.err.println("Exception running tool: " + t.getMessage());
        t.printStackTrace(System.err);
        System.exit(-1);
    }
}

From source file:com.github.ansell.shp.SHPDump.java

public static void main(String... args) throws Exception {
    final OptionParser parser = new OptionParser();

    final OptionSpec<Void> help = parser.accepts("help").forHelp();
    final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required()
            .describedAs("The input SHP file");
    final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class).required()
            .describedAs("The output directory to use for debugging files");
    final OptionSpec<String> outputPrefix = parser.accepts("prefix").withRequiredArg().ofType(String.class)
            .defaultsTo("shp-debug").describedAs("The output prefix to use for debugging files");
    final OptionSpec<File> outputMappingTemplate = parser.accepts("output-mapping").withRequiredArg()
            .ofType(File.class).describedAs("The output mapping template file if it needs to be generated.");
    final OptionSpec<Integer> resolution = parser.accepts("resolution").withRequiredArg().ofType(Integer.class)
            .defaultsTo(2048).describedAs("The output image file resolution");
    final OptionSpec<String> format = parser.accepts("format").withRequiredArg().ofType(String.class)
            .defaultsTo("png").describedAs("The output image format");
    final OptionSpec<String> removeIfEmpty = parser.accepts("remove-if-empty").withRequiredArg()
            .ofType(String.class).describedAs(
                    "The name of an attribute to remove if its value is empty before outputting the resulting shapefile. Use multiple times to specify multiple fields to check");

    OptionSet options = null;//  ww w  .  ja  v  a2  s.co  m

    try {
        options = parser.parse(args);
    } catch (final OptionException e) {
        System.out.println(e.getMessage());
        parser.printHelpOn(System.out);
        throw e;
    }

    if (options.has(help)) {
        parser.printHelpOn(System.out);
        return;
    }

    final Path inputPath = input.value(options).toPath();
    if (!Files.exists(inputPath)) {
        throw new FileNotFoundException("Could not find input SHP file: " + inputPath.toString());
    }

    final Path outputPath = output.value(options).toPath();
    if (!Files.exists(outputPath)) {
        throw new FileNotFoundException("Output directory does not exist: " + outputPath.toString());
    }

    final Path outputMappingPath = options.has(outputMappingTemplate)
            ? outputMappingTemplate.value(options).toPath()
            : null;
    if (options.has(outputMappingTemplate) && Files.exists(outputMappingPath)) {
        throw new FileNotFoundException(
                "Output mapping template file already exists: " + outputMappingPath.toString());
    }

    final Set<String> filterFields = ConcurrentHashMap.newKeySet();
    if (options.has(removeIfEmpty)) {
        for (String nextFilterField : removeIfEmpty.values(options)) {
            System.out.println("Will filter field if empty value found: " + nextFilterField);
            filterFields.add(nextFilterField);
        }
    }

    if (!filterFields.isEmpty()) {
        System.out.println("Full set of filter fields: " + filterFields);
    }

    final String prefix = outputPrefix.value(options);

    FileDataStore store = FileDataStoreFinder.getDataStore(inputPath.toFile());

    if (store == null) {
        throw new RuntimeException("Could not read the given input as an ESRI Shapefile: "
                + inputPath.toAbsolutePath().toString());
    }

    for (String typeName : new LinkedHashSet<>(Arrays.asList(store.getTypeNames()))) {
        System.out.println("");
        System.out.println("Type: " + typeName);
        SimpleFeatureSource featureSource = store.getFeatureSource(typeName);
        SimpleFeatureType schema = featureSource.getSchema();

        Name outputSchemaName = new NameImpl(schema.getName().getNamespaceURI(),
                schema.getName().getLocalPart().replace(" ", "").replace("%20", ""));
        System.out.println("Replacing name on schema: " + schema.getName() + " with " + outputSchemaName);
        SimpleFeatureType outputSchema = SHPUtils.changeSchemaName(schema, outputSchemaName);

        List<String> attributeList = new ArrayList<>();
        for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
            System.out.println("Attribute: " + attribute.getName().toString());
            attributeList.add(attribute.getName().toString());
        }
        CsvSchema csvSchema = CSVUtil.buildSchema(attributeList);

        SimpleFeatureCollection collection = featureSource.getFeatures();
        int featureCount = 0;
        Path nextCSVFile = outputPath.resolve(prefix + ".csv");
        Path nextSummaryCSVFile = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-Summary.csv");
        List<SimpleFeature> outputFeatureList = new CopyOnWriteArrayList<>();

        try (SimpleFeatureIterator iterator = collection.features();
                Writer bufferedWriter = Files.newBufferedWriter(nextCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                SequenceWriter csv = CSVUtil.newCSVWriter(bufferedWriter, csvSchema);) {
            List<String> nextLine = new ArrayList<>();
            while (iterator.hasNext()) {
                SimpleFeature feature = iterator.next();
                featureCount++;
                if (featureCount <= 2) {
                    System.out.println("");
                    System.out.println(feature.getIdentifier());
                } else if (featureCount % 100 == 0) {
                    System.out.print(".");
                }
                boolean filterThisFeature = false;
                for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
                    String featureString = Optional.ofNullable(feature.getAttribute(attribute.getName()))
                            .orElse("").toString();
                    nextLine.add(featureString);
                    if (filterFields.contains(attribute.getName().toString())
                            && featureString.trim().isEmpty()) {
                        filterThisFeature = true;
                    }
                    if (featureString.length() > 100) {
                        featureString = featureString.substring(0, 100) + "...";
                    }
                    if (featureCount <= 2) {
                        System.out.print(attribute.getName() + "=");
                        System.out.println(featureString);
                    }
                }
                if (!filterThisFeature) {
                    outputFeatureList.add(SHPUtils.changeSchemaName(feature, outputSchema));
                    csv.write(nextLine);
                }
                nextLine.clear();
            }
        }
        try (Reader csvReader = Files.newBufferedReader(nextCSVFile, StandardCharsets.UTF_8);
                Writer summaryOutput = Files.newBufferedWriter(nextSummaryCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                final Writer mappingWriter = options.has(outputMappingTemplate)
                        ? Files.newBufferedWriter(outputMappingPath)
                        : NullWriter.NULL_WRITER) {
            CSVSummariser.runSummarise(csvReader, summaryOutput, mappingWriter,
                    CSVSummariser.DEFAULT_SAMPLE_COUNT, false);
        }
        if (featureCount > 100) {
            System.out.println("");
        }
        System.out.println("");
        System.out.println("Feature count: " + featureCount);

        SimpleFeatureCollection outputCollection = new ListFeatureCollection(outputSchema, outputFeatureList);
        Path outputShapefilePath = outputPath.resolve(prefix + "-" + outputSchema.getTypeName() + "-dump");
        if (!Files.exists(outputShapefilePath)) {
            Files.createDirectory(outputShapefilePath);
        }
        SHPUtils.writeShapefile(outputCollection, outputShapefilePath);

        // Create ZIP file from the contents to keep the subfiles together
        Path outputShapefileZipPath = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-dump.zip");
        try (final OutputStream out = Files.newOutputStream(outputShapefileZipPath,
                StandardOpenOption.CREATE_NEW);
                final ZipOutputStream zip = new ZipOutputStream(out, StandardCharsets.UTF_8);) {
            Files.list(outputShapefilePath).forEachOrdered(Unchecked.consumer(e -> {
                zip.putNextEntry(new ZipEntry(e.getFileName().toString()));
                Files.copy(e, zip);
                zip.closeEntry();
            }));
        }

        try (final OutputStream outputStream = Files.newOutputStream(
                outputPath.resolve(prefix + "." + format.value(options)), StandardOpenOption.CREATE_NEW);) {
            MapContent map = new MapContent();
            map.setTitle(prefix + "-" + outputSchema.getTypeName());
            Style style = SLD.createSimpleStyle(featureSource.getSchema());
            Layer layer = new FeatureLayer(new CollectionFeatureSource(outputCollection), style);
            map.addLayer(layer);
            SHPUtils.renderImage(map, outputStream, resolution.value(options), format.value(options));
        }
    }
}

From source file:cz.muni.fi.xklinec.zipstream.App.java

/**
 * Entry point. //from   www  .  jav a2  s . com
 * 
 * @param args
 * @throws FileNotFoundException
 * @throws IOException
 * @throws NoSuchFieldException
 * @throws ClassNotFoundException
 * @throws NoSuchMethodException 
 */
public static void main(String[] args) throws FileNotFoundException, IOException, NoSuchFieldException,
        ClassNotFoundException, NoSuchMethodException, InterruptedException {
    OutputStream fos = null;
    InputStream fis = null;

    if ((args.length != 0 && args.length != 2)) {
        System.err.println(String.format("Usage: app.jar source.apk dest.apk"));
        return;
    } else if (args.length == 2) {
        System.err.println(
                String.format("Will use file [%s] as input file and [%s] as output file", args[0], args[1]));
        fis = new FileInputStream(args[0]);
        fos = new FileOutputStream(args[1]);
    } else if (args.length == 0) {
        System.err.println(String.format("Will use file [STDIN] as input file and [STDOUT] as output file"));
        fis = System.in;
        fos = System.out;
    }

    final Deflater def = new Deflater(9, true);
    ZipArchiveInputStream zip = new ZipArchiveInputStream(fis);

    // List of postponed entries for further "processing".
    List<PostponedEntry> peList = new ArrayList<PostponedEntry>(6);

    // Output stream
    ZipArchiveOutputStream zop = new ZipArchiveOutputStream(fos);
    zop.setLevel(9);

    // Read the archive
    ZipArchiveEntry ze = zip.getNextZipEntry();
    while (ze != null) {

        ZipExtraField[] extra = ze.getExtraFields(true);
        byte[] lextra = ze.getLocalFileDataExtra();
        UnparseableExtraFieldData uextra = ze.getUnparseableExtraFieldData();
        byte[] uextrab = uextra != null ? uextra.getLocalFileDataData() : null;

        // ZipArchiveOutputStream.DEFLATED
        // 

        // Data for entry
        byte[] byteData = Utils.readAll(zip);
        byte[] deflData = new byte[0];
        int infl = byteData.length;
        int defl = 0;

        // If method is deflated, get the raw data (compress again).
        if (ze.getMethod() == ZipArchiveOutputStream.DEFLATED) {
            def.reset();
            def.setInput(byteData);
            def.finish();

            byte[] deflDataTmp = new byte[byteData.length * 2];
            defl = def.deflate(deflDataTmp);

            deflData = new byte[defl];
            System.arraycopy(deflDataTmp, 0, deflData, 0, defl);
        }

        System.err.println(String.format(
                "ZipEntry: meth=%d " + "size=%010d isDir=%5s " + "compressed=%07d extra=%d lextra=%d uextra=%d "
                        + "comment=[%s] " + "dataDesc=%s " + "UTF8=%s " + "infl=%07d defl=%07d " + "name [%s]",
                ze.getMethod(), ze.getSize(), ze.isDirectory(), ze.getCompressedSize(),
                extra != null ? extra.length : -1, lextra != null ? lextra.length : -1,
                uextrab != null ? uextrab.length : -1, ze.getComment(),
                ze.getGeneralPurposeBit().usesDataDescriptor(), ze.getGeneralPurposeBit().usesUTF8ForNames(),
                infl, defl, ze.getName()));

        final String curName = ze.getName();

        // META-INF files should be always on the end of the archive, 
        // thus add postponed files right before them
        if (curName.startsWith("META-INF") && peList.size() > 0) {
            System.err.println(
                    "Now is the time to put things back, but at first, I'll perform some \"facelifting\"...");

            // Simulate som evil being done
            Thread.sleep(5000);

            System.err.println("OK its done, let's do this.");
            for (PostponedEntry pe : peList) {
                System.err.println(
                        "Adding postponed entry at the end of the archive! deflSize=" + pe.deflData.length
                                + "; inflSize=" + pe.byteData.length + "; meth: " + pe.ze.getMethod());

                pe.dump(zop, false);
            }

            peList.clear();
        }

        // Capturing interesting files for us and store for later.
        // If the file is not interesting, send directly to the stream.
        if ("classes.dex".equalsIgnoreCase(curName) || "AndroidManifest.xml".equalsIgnoreCase(curName)) {
            System.err.println("### Interesting file, postpone sending!!!");

            PostponedEntry pe = new PostponedEntry(ze, byteData, deflData);
            peList.add(pe);
        } else {
            // Write ZIP entry to the archive
            zop.putArchiveEntry(ze);
            // Add file data to the stream
            zop.write(byteData, 0, infl);
            zop.closeArchiveEntry();
        }

        ze = zip.getNextZipEntry();
    }

    // Cleaning up stuff
    zip.close();
    fis.close();

    zop.finish();
    zop.close();
    fos.close();

    System.err.println("THE END!");
}

From source file:edu.upenn.cis.FastAlign.java

/**
 * Prints alignments for options specified by command line arguments.
 * @param argv  parameters to be used by FastAlign.
 *//*  w w w.ja v a 2  s  .  c o m*/
public static void main(String[] argv) {

    FastAlign align = FastAlign.initCommandLine(argv);
    if (align == null) {
        System.err.println("Usage: java " + FastAlign.class.getCanonicalName() + " -i file.fr-en\n"
                + " Standard options ([USE] = strongly recommended):\n" + "  -i: [REQ] Input parallel corpus\n"
                + "  -v: [USE] Use Dirichlet prior on lexical translation distributions\n"
                + "  -d: [USE] Favor alignment points close to the monotonic diagonoal\n"
                + "  -o: [USE] Optimize how close to the diagonal alignment points should be\n"
                + "  -r: Run alignment in reverse (condition on target and predict source)\n"
                + "  -c: Output conditional probability table\n"
                + "  -e: Start with existing conditional probability table\n" + " Advanced options:\n"
                + "  -I: number of iterations in EM training (default = 5)\n"
                + "  -p: p_null parameter (default = 0.08)\n" + "  -N: No null word\n"
                + "  -a: alpha parameter for optional Dirichlet prior (default = 0.01)\n"
                + "  -T: starting lambda for diagonal distance parameter (default = 4)\n");
        System.exit(1);
    }
    boolean use_null = !align.no_null_word;
    if (align.variational_bayes && align.alpha <= 0.0) {
        System.err.println("--alpha must be > 0\n");
        System.exit(1);
    }
    double prob_align_not_null = 1.0 - align.prob_align_null;
    final int kNULL = align.d.Convert("<eps>");
    TTable s2t = new TTable();
    if (!align.existing_probability_filename.isEmpty()) {
        boolean success = s2t.ImportFromFile(align.existing_probability_filename, '\t', align.d);
        if (!success) {
            System.err.println("Can't read table " + align.existing_probability_filename);
            System.exit(1);
        }
    }
    Map<Pair, Integer> size_counts = new HashMap<Pair, Integer>();
    double tot_len_ratio = 0;
    double mean_srclen_multiplier = 0;
    List<Double> probs = new ArrayList<Double>();
    ;
    // E-M Iterations Loop TODO move this into a method?
    for (int iter = 0; iter < align.iterations || (iter == 0 && align.iterations == 0); ++iter) {
        final boolean final_iteration = (iter >= (align.iterations - 1));
        System.err.println("ITERATION " + (iter + 1) + (final_iteration ? " (FINAL)" : ""));
        Scanner in = null;
        try {
            in = new Scanner(new File(align.input));
            if (!in.hasNextLine()) {
                System.err.println("Can't read " + align.input);
                System.exit(1);
            }
        } catch (FileNotFoundException e) {
            e.printStackTrace();
            System.err.println("Can't read " + align.input);
            System.exit(1);
        }

        double likelihood = 0;
        double denom = 0.0;
        int lc = 0;
        boolean flag = false;
        String line;
        //         String ssrc, strg;
        ArrayList<Integer> src = new ArrayList<Integer>();
        ArrayList<Integer> trg = new ArrayList<Integer>();
        double c0 = 0;
        double emp_feat = 0;
        double toks = 0;
        // Iterate over each line of the input file
        while (in.hasNextLine()) {
            line = in.nextLine();
            ++lc;
            if (lc % 1000 == 0) {
                System.err.print('.');
                flag = true;
            }
            if (lc % 50000 == 0) {
                System.err.println(" [" + lc + "]\n");
                System.err.flush();
                flag = false;
            }
            src.clear();
            trg.clear(); // TODO this is redundant; src and tgt cleared in ParseLine
            // Integerize and split source and target lines.
            align.ParseLine(line, src, trg);
            if (align.is_reverse) {
                ArrayList<Integer> tmp = src;
                src = trg;
                trg = tmp;
            }
            // TODO Empty lines break the parser. Should this be true?
            if (src.size() == 0 || trg.size() == 0) {
                System.err.println("Error in line " + lc + "\n" + line);
                System.exit(1);
            }
            if (iter == 0) {
                tot_len_ratio += ((double) trg.size()) / ((double) src.size());
            }
            denom += trg.size();
            probs.clear();
            // Add to pair length counts only if first iteration.
            if (iter == 0) {
                Pair pair = new Pair(trg.size(), src.size());
                Integer value = size_counts.get(pair);
                if (value == null)
                    value = 0;
                size_counts.put(pair, value + 1);
            }
            boolean first_al = true; // used when printing alignments
            toks += trg.size();
            // Iterate through the English tokens
            for (int j = 0; j < trg.size(); ++j) {
                final int f_j = trg.get(j);
                double sum = 0;
                double prob_a_i = 1.0 / (src.size() + (use_null ? 1 : 0)); // uniform (model 1)
                if (use_null) {
                    if (align.favor_diagonal) {
                        prob_a_i = align.prob_align_null;
                    }
                    probs.add(0, s2t.prob(kNULL, f_j) * prob_a_i);
                    sum += probs.get(0);
                }
                double az = 0;
                if (align.favor_diagonal)
                    az = DiagonalAlignment.computeZ(j + 1, trg.size(), src.size(), align.diagonal_tension)
                            / prob_align_not_null;
                for (int i = 1; i <= src.size(); ++i) {
                    if (align.favor_diagonal)
                        prob_a_i = DiagonalAlignment.unnormalizedProb(j + 1, i, trg.size(), src.size(),
                                align.diagonal_tension) / az;
                    probs.add(i, s2t.prob(src.get(i - 1), f_j) * prob_a_i);
                    sum += probs.get(i);
                }
                if (final_iteration) {
                    double max_p = -1;
                    int max_index = -1;
                    if (use_null) {
                        max_index = 0;
                        max_p = probs.get(0);
                    }
                    for (int i = 1; i <= src.size(); ++i) {
                        if (probs.get(i) > max_p) {
                            max_index = i;
                            max_p = probs.get(i);
                        }
                    }
                    if (max_index > 0) {
                        if (first_al)
                            first_al = false;
                        else
                            System.out.print(' ');
                        if (align.is_reverse)
                            System.out.print("" + j + '-' + (max_index - 1));
                        else
                            System.out.print("" + (max_index - 1) + '-' + j);
                    }
                } else {
                    if (use_null) {
                        double count = probs.get(0) / sum;
                        c0 += count;
                        s2t.Increment(kNULL, f_j, count);
                    }
                    for (int i = 1; i <= src.size(); ++i) {
                        final double p = probs.get(i) / sum;
                        s2t.Increment(src.get(i - 1), f_j, p);
                        emp_feat += DiagonalAlignment.feature(j, i, trg.size(), src.size()) * p;
                    }
                }
                likelihood += Math.log(sum);
            }
            if (final_iteration)
                System.out.println();
        }

        // log(e) = 1.0
        double base2_likelihood = likelihood / Math.log(2);

        if (flag) {
            System.err.println();
        }
        if (iter == 0) {
            mean_srclen_multiplier = tot_len_ratio / lc;
            System.err.println("expected target length = source length * " + mean_srclen_multiplier);
        }
        emp_feat /= toks;
        System.err.println("  log_e likelihood: " + likelihood);
        System.err.println("  log_2 likelihood: " + base2_likelihood);
        System.err.println("     cross entropy: " + (-base2_likelihood / denom));
        System.err.println("        perplexity: " + Math.pow(2.0, -base2_likelihood / denom));
        System.err.println("      posterior p0: " + c0 / toks);
        System.err.println(" posterior al-feat: " + emp_feat);
        //System.err.println("     model tension: " + mod_feat / toks );
        System.err.println("       size counts: " + size_counts.size());
        if (!final_iteration) {
            if (align.favor_diagonal && align.optimize_tension && iter > 0) {
                for (int ii = 0; ii < 8; ++ii) {
                    double mod_feat = 0;
                    Iterator<Map.Entry<Pair, Integer>> it = size_counts.entrySet().iterator();
                    for (; it.hasNext();) {
                        Map.Entry<Pair, Integer> entry = it.next();
                        final Pair p = entry.getKey();
                        for (int j = 1; j <= p.first; ++j)
                            mod_feat += entry.getValue() * DiagonalAlignment.computeDLogZ(j, p.first, p.second,
                                    align.diagonal_tension);
                    }
                    mod_feat /= toks;
                    System.err.println("  " + ii + 1 + "  model al-feat: " + mod_feat + " (tension="
                            + align.diagonal_tension + ")");
                    align.diagonal_tension += (emp_feat - mod_feat) * 20.0;
                    if (align.diagonal_tension <= 0.1)
                        align.diagonal_tension = 0.1;
                    if (align.diagonal_tension > 14)
                        align.diagonal_tension = 14;
                }
                System.err.println("     final tension: " + align.diagonal_tension);
            }
            if (align.variational_bayes)
                s2t.NormalizeVB(align.alpha);
            else
                s2t.Normalize();
            //prob_align_null *= 0.8; // XXX
            //prob_align_null += (c0 / toks) * 0.2;
            prob_align_not_null = 1.0 - align.prob_align_null;
        }

    }
    if (!align.conditional_probability_filename.isEmpty()) {
        System.err.println("conditional probabilities: " + align.conditional_probability_filename);
        s2t.ExportToFile(align.conditional_probability_filename, align.d);
    }
    System.exit(0);
}

From source file:diffhunter.DiffHunter.java

/**
 * @param args the command line arguments
 * @throws org.apache.commons.cli.ParseException
 * @throws java.io.IOException/*from w  w  w.  j av a 2 s.c o  m*/
 */
public static void main(String[] args) throws ParseException, IOException {

    //String test_ = Paths.get("J:\\VishalData\\additional\\", "Sasan" + "_BDB").toAbsolutePath().toString();

    // TODO code application logic here
    /*args = new String[]
    {
    "-i", "-b", "J:\\VishalData\\additional\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted.bed", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-o", "J:\\VishalData"
    };*/

    /*args = new String[]
    {
    "-c", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-1", "J:\\VishalData\\Ptbp2_Adult_testis_CLIP_mm9_plus_strand_sorted_BDB", "-2", "J:\\VishalData\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted_BDB", "-w", "200", "-s", "50", "-o", "J:\\VishalData"
    };*/
    Options options = new Options();

    // add t option
    options.addOption("i", "index", false, "Indexing BED files.");
    options.addOption("b", "bed", true, "bed file to be indexed");
    options.addOption("o", "output", true, "Folder that the index/comparison file will be created.");
    options.addOption("r", "reference", true, "Reference annotation file to be used for indexing");
    options.addOption("c", "compare", false, "Finding differences between two conditions");
    options.addOption("1", "first", true, "First sample index location");
    options.addOption("2", "second", true, "Second sample index location");
    options.addOption("w", "window", true, "Length of window for identifying differences");
    options.addOption("s", "sliding", true, "Length of sliding");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);

    boolean indexing = false;
    boolean comparing = false;

    //Indexing!
    if (cmd.hasOption("i")) {
        //if(cmd.hasOption("1"))
        //System.err.println("sasan");

        //System.out.println("sasa");
        indexing = true;

    } else if (cmd.hasOption("c")) {
        //System.err.println("");
        comparing = true;

    } else {
        //System.err.println("Option is not deteced.");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("diffhunter", options);
        return;
    }

    //Indexing is selected
    //
    if (indexing == true) {
        //Since indexing is true.
        //User have to provide file for indexing.
        if (!(cmd.hasOption("o") || cmd.hasOption("r") || cmd.hasOption("b"))) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("diffhunter", options);
            return;
        }
        String bedfile_ = cmd.getOptionValue("b");
        String reference_file = cmd.getOptionValue("r");
        String folder_loc = cmd.getOptionValue("o");

        String sample_name = FilenameUtils.getBaseName(bedfile_);

        try (Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(
                Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString(), true, sample_name)) {
            Indexer indexing_ = new Indexer(reference_file);
            indexing_.Make_Index(B2, bedfile_,
                    Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString());
            B2.close();

        }
    } else if (comparing == true) {
        if (!(cmd.hasOption("o") || cmd.hasOption("w") || cmd.hasOption("s") || cmd.hasOption("1")
                || cmd.hasOption("2"))) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("diffhunter", options);
            return;
        }
        String folder_loc = cmd.getOptionValue("o");
        int window_ = Integer.parseInt(cmd.getOptionValue("w"));
        //int window_=600;

        int slide_ = Integer.parseInt(cmd.getOptionValue("s"));

        String first = cmd.getOptionValue("1").replace("_BDB", "");
        String second = cmd.getOptionValue("2").replace("_BDB", "");
        String reference_file = cmd.getOptionValue("r");
        //String folder_loc=cmd.getOptionValue("o");

        String sample_name_first = FilenameUtils.getBaseName(first);
        String sample_name_second = FilenameUtils.getBaseName(second);

        Database B1 = BerkeleyDB_Box.Get_BerkeleyDB(first + "_BDB", false, sample_name_first);
        Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(second + "_BDB", false, sample_name_second);

        List<String> first_condition_genes = Files
                .lines(Paths.get(first + "_BDB", sample_name_first + ".txt").toAbsolutePath())
                .collect(Collectors.toList());
        List<String> second_condition_genes = Files
                .lines(Paths.get(second + "_BDB", sample_name_second + ".txt").toAbsolutePath())
                .collect(Collectors.toList());
        System.out.println("First and second condition are loaded!!! ");
        List<String> intersection_ = new ArrayList<>(first_condition_genes);
        intersection_.retainAll(second_condition_genes);

        BufferedWriter output = new BufferedWriter(
                new FileWriter(Paths.get(folder_loc, "differences_" + window_ + "_s" + slide_ + "_c" + ".txt")
                        .toAbsolutePath().toString(), false));
        List<Result_Window> final_results = Collections.synchronizedList(new ArrayList<>());
        Worker_New worker_class = new Worker_New();
        worker_class.Read_Reference(reference_file);

        while (!intersection_.isEmpty()) {
            List<String> selected_genes = new ArrayList<>();
            //if (intersection_.size()<=10000){selected_genes.addAll(intersection_.subList(0, intersection_.size()));}
            //else selected_genes.addAll(intersection_.subList(0, 10000));
            if (intersection_.size() <= intersection_.size()) {
                selected_genes.addAll(intersection_.subList(0, intersection_.size()));
            } else {
                selected_genes.addAll(intersection_.subList(0, intersection_.size()));
            }
            intersection_.removeAll(selected_genes);
            //System.out.println("Intersection count is:"+intersection_.size());
            //final List<Result_Window> resultssss_=new ArrayList<>();
            IntStream.range(0, selected_genes.size()).parallel().forEach(i -> {
                System.out.println(selected_genes.get(i) + "\tprocessing......");
                String gene_of_interest = selected_genes.get(i);//"ENSG00000142657|PGD";//intersection_.get(6);////"ENSG00000163395|IGFN1";//"ENSG00000270066|SCARNA2";
                int start = worker_class.dic_genes.get(gene_of_interest).start_loc;
                int end = worker_class.dic_genes.get(gene_of_interest).end_loc;

                Map<Integer, Integer> first_ = Collections.EMPTY_MAP;
                try {
                    first_ = BerkeleyDB_Box.Get_Coord_Read(B1, gene_of_interest);
                } catch (IOException | ClassNotFoundException ex) {
                    Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex);
                }

                Map<Integer, Integer> second_ = Collections.EMPTY_MAP;
                try {
                    second_ = BerkeleyDB_Box.Get_Coord_Read(B2, gene_of_interest);
                } catch (IOException | ClassNotFoundException ex) {
                    Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex);
                }
                List<Window> top_windows_first = worker_class.Get_Top_Windows(window_, first_, slide_);
                List<Window> top_windows_second = worker_class.Get_Top_Windows(window_, second_, slide_);
                //System.out.println("passed for window peak call for gene \t"+selected_genes.get(i));
                // System.out.println("top_window_first_Count\t"+top_windows_first.size());
                // System.out.println("top_window_second_Count\t"+top_windows_second.size());
                if (top_windows_first.isEmpty() && top_windows_second.isEmpty()) {
                    return;
                }

                List<Result_Window> res_temp = new Worker_New().Get_Significant_Windows(gene_of_interest, start,
                        end, top_windows_first, top_windows_second, second_, first_, sample_name_first,
                        sample_name_second, 0.01);
                if (!res_temp.isEmpty()) {
                    final_results.addAll(res_temp);//final_results.addAll(worker_class.Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, first_condition, second_condition, 0.01));

                } //System.out.println(selected_genes.get(i)+"\tprocessed.");

            });

            /*selected_genes.parallelStream().forEach(i ->
             {
                    
                    
             });*/
            List<Double> pvals = new ArrayList<>();

            for (int i = 0; i < final_results.size(); i++) {
                pvals.add(final_results.get(i).p_value);
            }
            List<Double> qvals = MultipleTestCorrection.benjaminiHochberg(pvals);

            System.out.println("Writing to file...");
            output.append("Gene_Symbol\tContributing_Sample\tStart\tEnd\tOddsRatio\tp_Value\tFDR");
            output.newLine();

            for (int i = 0; i < final_results.size(); i++) {
                Result_Window item = final_results.get(i);
                output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t"
                        + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value
                        + "\t" + qvals.get(i)); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non);
                output.newLine();
            }

            /* for (Result_Window item : final_results)
             {
            output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non);
            output.newLine();
             }
               */
            final_results.clear();

        }
        output.close();

    }
    System.out.println("Done.");

}

From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java

public static void main(String[] args) {
    if (args.length == 0) {
        printUsage();/*w w w. ja v a2  s  .  c o m*/
        return;
    }
    try {
        parseOptions(args);
    } catch (IllegalArgumentException e) {
        System.err.println("Error: " + e.getMessage());
        printUsage();
        return;
    }

    // Read in chrList, if specified
    if (chrFile != null) {
        chrFilter = new HashSet<String>();
        try {
            BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile));
            String chr = null;
            while ((chr = chrFilterIn.readLine()) != null)
                chrFilter.add(chr);
            chrFilterIn.close();
            if (verbose > 0)
                System.err.println(
                        "Only looking at alignments on " + chrFilter.size() + " specified chromosomes");
        } catch (IOException e) {
            System.err.println("Error: " + e.getMessage());
            return;
        }
    }

    if (verbose > 0) {
        // Start the processMonitor
        processMonitor = new Timer();
        // Start the ProcessStatusTask
        statusTask = new ProcessStatusTask();
        // Schedule to show the status every 1 second
        processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq);
    }

    // Read in known SNP file, if specified
    if (knownSnpFile != null) {
        if (verbose > 0)
            System.err.println("Checking known SNPs from user specified VCF file");
        knownVCF = new VCFFileReader(new File(knownSnpFile));
    }

    SamReaderFactory readerFac = SamReaderFactory.makeDefault();
    SAMFileWriterFactory writerFac = new SAMFileWriterFactory();
    if (!isSilent)
        readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency
    else
        readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency

    SamReader in = readerFac.open(new File(inFile));
    SAMFileHeader inHeader = in.getFileHeader();
    if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate)
        System.err.println("Warning: Input file '" + inFile
                + "' might be sorted by coordinate and cannot be correctly processed!");

    SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header
    // Add new programHeader
    SAMProgramRecord progRec = new SAMProgramRecord(progName);
    progRec.setProgramName(progName);
    progRec.setProgramVersion(progVer);
    progRec.setCommandLine(StringUtils.join(" ", args));
    header.addProgramRecord(progRec);
    //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder());
    // reset the orders
    header.setGroupOrder(groupOrder);
    header.setSortOrder(sortOrder);

    // write SAMHeader
    String prevID = null;
    SAMRecord prevRecord = null;
    List<SAMRecord> alnList = new ArrayList<SAMRecord>();
    List<SAMRecordPair> alnPEList = null;

    // Estimate fragment length distribution by scan one-pass through the alignments
    SAMRecordIterator results = in.iterator();
    if (!NO_ESTIMATE) {
        if (verbose > 0) {
            System.err.println("Estimating insert fragment size distribution ...");
            statusTask.reset();
            statusTask.setInfo("alignments scanned");
        }
        long N = 0;
        double fragL_S = 0; // fragLen sum
        double fragL_SS = 0; // fragLen^2 sum
        while (results.hasNext()) {
            SAMRecord record = results.next();
            if (verbose > 0)
                statusTask.updateStatus();
            if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) {
                double fragLen = Math.abs(record.getInferredInsertSize());
                if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments
                    N++;
                    fragL_S += fragLen;
                    fragL_SS += fragLen * fragLen;
                }
                // stop estimate if already enough
                if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN)
                    break;
            }
        }
        if (verbose > 0)
            statusTask.finish();
        // estimate fragment size
        if (N >= MIN_ESTIMATE_BASE) { // override command line values
            MEAN_FRAG_LEN = fragL_S / N;
            SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1)));
            String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN,
                    SD_FRAG_LEN);
            if (verbose > 0)
                System.err.println(estStr);
            // also add the estimation to comment
            header.addComment(estStr);
        } else {
            System.err.println(
                    "Unable to estimate the fragment size distribution due to too few observed alignments");
            System.err.println(
                    "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step");
            statusTask.cancel();
            processMonitor.cancel();
            return;
        }
        // Initiate the normal model
        normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN);
        // reset the iterator, if necessary
        if (in.type() == SamReader.Type.SAM_TYPE) {
            try {
                in.close();
            } catch (IOException e) {
                System.err.println(e.getMessage());
            }
            in = readerFac.open(new File(inFile));
        }
        results.close();
        results = in.iterator();
    } // end of NO_ESTIMATE

    SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile))
            : writerFac.makeBAMWriter(header, false, new File(outFile));

    // check each alignment again
    if (verbose > 0) {
        System.err.println("Filtering alignments ...");
        statusTask.reset();
        statusTask.setInfo("alignments processed");
    }
    while (results.hasNext()) {
        SAMRecord record = results.next();
        if (verbose > 0)
            statusTask.updateStatus();
        String ID = record.getReadName();
        // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment
        if (ID.equals(prevID) && record.getReadLength() == 0)
            SAMAlignFixer.fixSAMRecordRead(record, prevRecord);
        if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) {
            prevID = ID;
            prevRecord = record;
            continue;
        }

        // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto)
        if (fixMD)
            SAMAlignFixer.fixMisStr(record);

        // fix alignment, ignore if failed (unmapped or empty)
        if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) {
            prevID = ID;
            prevRecord = record;
            continue;
        }
        if (!record.getReadPairedFlag()) {
            System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString());
            out.close();
            statusTask.cancel();
            processMonitor.cancel();
            return;
        }

        if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments
            // create alnPEList from filtered alnList
            alnPEList = createAlnPEListFromAlnList(alnList);
            //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size());
            int totalPair = alnPEList.size();
            // filter highly unlikely PEhits
            filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY);
            // calculate posterior mapQ for each pair
            calcPEHitPostP(alnPEList, totalPair, MAX_HIT);
            // filter hits by mapQ
            if (MIN_MAPQ > 0)
                filterPEHits(alnPEList, MIN_MAPQ);

            // sort the list first with an anonymous class of comparator, with DESCREASING order
            Collections.sort(alnPEList, Collections.reverseOrder());
            // control max-best
            if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits
                int nBestStratum = 0;
                int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE
                for (SAMRecordPair pr : alnPEList)
                    if (pr.getPEMapQ() == bestMapQ)
                        nBestStratum++;
                    else
                        break; // stop searching for sorted list
                if (nBestStratum > MAX_BEST)
                    alnPEList.clear();
            }
            // filter alignments with auxiliary filters
            if (!MAX_SENSITIVITY)
                filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL);

            // report remaining secondary alignments, up-to MAX_REPORT
            for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) {
                SAMRecordPair repPair = alnPEList.get(i);
                if (doUpdateBit)
                    repPair.setNotPrimaryAlignmentFlags(i != 0);
                int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size();
                int nFiltered = alnPEList.size();
                if (repPair.fwdRecord != null) {
                    repPair.fwdRecord.setAttribute("NH", nReport);
                    repPair.fwdRecord.setAttribute("XN", nFiltered);
                    out.addAlignment(repPair.fwdRecord);
                }
                if (repPair.revRecord != null) {
                    repPair.revRecord.setAttribute("NH", nReport);
                    repPair.revRecord.setAttribute("XN", nFiltered);
                    out.addAlignment(repPair.revRecord);
                }
            }
            // reset list
            alnList.clear();
            alnPEList.clear();
        }
        // update
        if (!ID.equals(prevID)) {
            prevID = ID;
            prevRecord = record;
        }
        alnList.add(record);
    } // end while
    try {
        in.close();
        out.close();
    } catch (IOException e) {
        System.err.println(e.getMessage());
    }
    // Terminate the monitor task and monitor
    if (verbose > 0) {
        statusTask.cancel();
        statusTask.finish();
        processMonitor.cancel();
    }
}

From source file:com.netscape.cms.servlet.test.DRMTest.java

public static void main(String args[]) throws InvalidKeyException, NoSuchAlgorithmException,
        InvalidKeySpecException, SignatureException, IOException {
    String host = null;//ww w. ja v  a2s .  co  m
    String port = null;
    String token_pwd = null;
    String db_dir = "./";
    String protocol = "http";
    String clientCertNickname = "KRA Administrator of Instance pki-kra's SjcRedhat Domain ID";

    // parse command line arguments
    Options options = new Options();
    options.addOption("h", true, "Hostname of the DRM");
    options.addOption("p", true, "Port of the DRM");
    options.addOption("w", true, "Token password");
    options.addOption("d", true, "Directory for tokendb");
    options.addOption("s", true, "Attempt Optional Secure SSL connection");
    options.addOption("c", true, "Optional SSL Client cert Nickname");

    try {
        CommandLineParser parser = new PosixParser();
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption("h")) {
            host = cmd.getOptionValue("h");
        } else {
            System.err.println("Error: no hostname provided.");
            usage(options);
        }

        if (cmd.hasOption("p")) {
            port = cmd.getOptionValue("p");
        } else {
            System.err.println("Error: no port provided");
            usage(options);
        }

        if (cmd.hasOption("w")) {
            token_pwd = cmd.getOptionValue("w");
        } else {
            System.err.println("Error: no token password provided");
            usage(options);
        }

        if (cmd.hasOption("d")) {
            db_dir = cmd.getOptionValue("d");
        }

        if (cmd.hasOption("s")) {
            if (cmd.getOptionValue("s") != null && cmd.getOptionValue("s").equals("true")) {
                protocol = "https";
            }
        }

        if (cmd.hasOption("c")) {
            String nick = cmd.getOptionValue("c");

            if (nick != null && protocol.equals("https")) {
                clientCertNickname = nick;
            }
        }

    } catch (ParseException e) {
        System.err.println("Error in parsing command line options: " + e.getMessage());
        usage(options);
    }

    // used for crypto operations
    byte iv[] = { 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1 };

    try {
        iv = genIV(8);
    } catch (Exception e) {
        log("Can't generate initialization vector use default: " + e.toString());
    }

    // used for wrapping to send data to DRM
    String transportCert = null;

    // Data to be archived
    SymmetricKey vek = null;
    String passphrase = null;

    // Session keys and passphrases for recovery
    SymmetricKey sessionKey = null;
    byte[] wrappedRecoveryKey = null;
    String recoveryPassphrase = null;
    byte[] wrappedRecoveryPassphrase = null;

    // retrieved data (should match archived data)
    byte[] encryptedData = null;
    String recoveredKey = null;

    // various ids used in recovery/archival operations
    KeyId keyId = null;
    String clientKeyId = null;
    RequestId recoveryRequestId = null;

    // Variables for data structures from calls
    KeyRequestResponse requestResponse = null;
    Key keyData = null;
    KeyInfo keyInfo = null;

    // Initialize token
    try {
        CryptoManager.initialize(db_dir);
    } catch (AlreadyInitializedException e) {
        // it is ok if it is already initialized
    } catch (Exception e) {
        log("INITIALIZATION ERROR: " + e.toString());
        System.exit(1);
    }

    // Set base URI and get client

    KRAClient client;
    SystemCertClient systemCertClient;
    KeyClient keyClient;
    NSSCryptoProvider nssCrypto;
    try {
        ClientConfig config = new ClientConfig();
        config.setServerURI(protocol + "://" + host + ":" + port + "/kra");
        config.setCertNickname(clientCertNickname);
        config.setCertDatabase(db_dir);
        config.setCertPassword(token_pwd);
        nssCrypto = new NSSCryptoProvider(config);

        client = new KRAClient(new PKIClient(config, nssCrypto));
        systemCertClient = (SystemCertClient) client.getClient("systemcert");
        keyClient = (KeyClient) client.getClient("key");

    } catch (Exception e) {
        e.printStackTrace();
        return;
    }

    // Test 1: Get transport certificate from DRM
    transportCert = systemCertClient.getTransportCert().getEncoded();
    transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER));
    keyClient.setTransportCert(transportCert);

    log("Transport Cert retrieved from DRM: " + transportCert);

    // Test 2: Get list of completed key archival requests
    log("\n\nList of completed archival requests");
    KeyRequestInfoCollection list = keyClient.listRequests("complete", "securityDataEnrollment");
    if (list.getTotal() == 0) {
        log("No requests found");
    } else {
        Iterator<KeyRequestInfo> iter = list.getEntries().iterator();
        while (iter.hasNext()) {
            KeyRequestInfo info = iter.next();
            printRequestInfo(info);
        }
    }

    // Test 3: Get list of key recovery requests
    log("\n\nList of completed recovery requests");
    KeyRequestInfoCollection list2 = keyClient.listRequests("complete", "securityDataRecovery");
    if (list2.getTotal() == 0) {
        log("No requests found");
    } else {
        Iterator<KeyRequestInfo> iter2 = list2.getEntries().iterator();
        while (iter2.hasNext()) {
            KeyRequestInfo info = iter2.next();
            printRequestInfo(info);
        }
    }

    // Test 4: Generate and archive a symmetric key
    log("Archiving symmetric key");
    clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString();
    try {
        vek = nssCrypto.generateSessionKey();
        byte[] encoded = nssCrypto.createPKIArchiveOptions(transportCert, vek, null,
                KeyRequestResource.DES3_ALGORITHM, 0, iv);

        KeyRequestResponse info = keyClient.archivePKIOptions(clientKeyId,
                KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.DES3_ALGORITHM, 0, encoded);
        log("Archival Results:");
        printRequestInfo(info.getRequestInfo());
        keyId = info.getKeyId();
    } catch (Exception e) {
        log("Exception in archiving symmetric key:" + e.getMessage());
        e.printStackTrace();
    }

    //Test 5: Get keyId for active key with client ID

    log("Getting key ID for symmetric key");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    KeyId keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: keyids from search and archival match.");
    }

    // Test 6: Submit a recovery request for the symmetric key using a session key
    log("Submitting a recovery request for the  symmetric key using session key");
    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryKey = CryptoUtil.wrapSymmetricKey(nssCrypto.getManager(), nssCrypto.getToken(),
                transportCert, sessionKey);
        keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using session key: " + e.getMessage());
    }

    encryptedData = keyData.getEncryptedData();

    try {
        recoveredKey = Utils.base64encode(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey,
                KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData()));
    } catch (Exception e) {
        log("Exception in unwrapping key: " + e.toString());
        e.printStackTrace();
    }

    if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) {
        log("Error: recovered and archived keys do not match!");
    } else {
        log("Success: recoverd and archived keys match!");
    }

    // Test 7: Submit a recovery request for the symmetric key using a passphrase
    log("Submitting a recovery request for the  symmetric key using a passphrase");
    recoveryPassphrase = "Gimme me keys please";

    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey,
                KeyRequestResource.DES3_ALGORITHM);
        wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert);

        keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey,
                wrappedRecoveryPassphrase, iv);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using passphrase" + e.toString());
        e.printStackTrace();
    }

    encryptedData = keyData.getEncryptedData();

    try {
        recoveredKey = Utils.base64encode(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase));
    } catch (Exception e) {
        log("Error: unable to unwrap key using passphrase");
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) {
        log("Error: recovered and archived keys do not match!");
    } else {
        log("Success: recovered and archived keys do match!");
    }

    passphrase = "secret12345";
    // Test 8: Generate and archive a passphrase
    clientKeyId = "UUID: 123-45-6789 RKEK " + Calendar.getInstance().getTime().toString();
    try {
        requestResponse = keyClient.archivePassphrase(clientKeyId, passphrase);

        log("Archival Results:");
        printRequestInfo(requestResponse.getRequestInfo());
        keyId = requestResponse.getKeyId();
    } catch (Exception e) {
        log("Exception in archiving symmetric key:" + e.toString());
        e.printStackTrace();
    }

    //Test 9: Get keyId for active passphrase with client ID
    log("Getting key ID for passphrase");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: key ids from search and archival do match!");
    }

    // Test 10: Submit a recovery request for the passphrase using a session key
    log("Submitting a recovery request for the passphrase using session key");
    sessionKey = null;
    wrappedRecoveryKey = null;
    try {
        keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase);
    } catch (Exception e) {
        log("Exception in recovering passphrase using session key: " + e.getMessage());
    }
    encryptedData = keyData.getEncryptedData();
    try {
        recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8");
    } catch (Exception e) {
        log("Exception in unwrapping key: " + e.toString());
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(passphrase)) {
        log("Error: recovered and archived passphrases do not match!");
    } else {
        log("Success: recovered and archived passphrases do match!");
    }

    // Test 11: Submit a recovery request for the passphrase using a passphrase
    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert);
        wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey,
                KeyRequestResource.DES3_ALGORITHM);
        keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey,
                wrappedRecoveryPassphrase, iv);
    } catch (Exception e1) {
        e1.printStackTrace();
        System.out.println("Test 17: " + e1.getMessage());
        System.exit(-1);
    }
    encryptedData = keyData.getEncryptedData();
    try {
        recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8");
    } catch (Exception e) {
        log("Error: cannot unwrap key using passphrase");
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(passphrase)) {
        log("Error: recovered and archived passphrases do not match!");
    } else {
        log("Success: recovered and archived passphrases do match!");
    }

    // Test 12: Get key
    log("Getting passphrase: " + keyId);
    try {
        keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase);
    } catch (Exception e1) {
        e1.printStackTrace();
    }
    encryptedData = keyData.getEncryptedData();
    try {
        recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8");
    } catch (Exception e) {
        log("Error: Can't unwrap recovered key using passphrase");
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(passphrase)) {
        log("Error: recovered and archived passphrases do not match!");
    } else {
        log("Success: recovered and archived passphrases do match!");
    }

    // Test 13: Get non-existent request
    RequestId requestId = new RequestId("0xabcdef");
    log("Getting non-existent request: " + requestId.toHexString());
    try {
        keyClient.getRequestInfo(requestId);
        log("Error: getting non-existent request does not throw an exception");
    } catch (RequestNotFoundException e) {
        log("Success: getting non-existent request throws an exception: " + e.getMessage() + " ("
                + e.getRequestId().toHexString() + ")");
    }

    // Test 14: Request x509 key recovery
    // This test requires to retrieve keyId and matching certificate
    // from installed instances of CA and DRM
    String keyID = "1";
    String b64Certificate = "MIIC+TCCAeGgAwIBAgIBDDANBgkqhkiG9w0BAQsFADBOMSswKQYDVQQKDCJ1c2Vy"
            + "c3lzLnJlZGhhdC5jb20gU2VjdXJpdHkgRG9tYWluMR8wHQYDVQQDDBZDQSBTaWdu"
            + "aW5nIENlcnRpZmljYXRlMB4XDTEzMTAyNTE5MzQwM1oXDTE0MDQyMzE5MzQwM1ow"
            + "EzERMA8GCgmSJomT8ixkAQEMAXgwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB"
            + "ALhLfGmKvxFsKXPh49q1QsluXU3WlyS1XnpDLgOAhgTNgO4sG6CpPdv6hZYIvQBb"
            + "ZQ5bhuML+NXK+Q+EIiNk1cUTxgL3a30sPzy6QaFWxwM8i4uXm4nCBYv7T+n4V6/O"
            + "xHIM2Ch/dviAb3vz+M9trErv9t+d2H8jNXT3sHuDb/kvAgMBAAGjgaAwgZ0wHwYD"
            + "VR0jBBgwFoAUh1cxWFRY+nMsx4odQQI1GqyFxP8wSwYIKwYBBQUHAQEEPzA9MDsG"
            + "CCsGAQUFBzABhi9odHRwOi8vZG9ndGFnMjAudXNlcnN5cy5yZWRoYXQuY29tOjgw"
            + "ODAvY2Evb2NzcDAOBgNVHQ8BAf8EBAMCBSAwHQYDVR0lBBYwFAYIKwYBBQUHAwIG"
            + "CCsGAQUFBwMEMA0GCSqGSIb3DQEBCwUAA4IBAQCvmbUzQOouE2LgQQcKfmgwwJMJ"
            + "9tMrPwDUtyFdaIFoPL4uZaujSscaN4IWK2r5vIMJ65jwYCI7sI9En2ZfO28J9dQj"
            + "lpqu6TaJ+xtaMk7OvXpVB7lJk73HAttMGjETlkoq/6EjxcugmJsDqVD0b2tO7Vd0"
            + "hroBe2uPDHM2ASewZF415lUcRh0URtmxSazTInbyxpmy1wgSJQ0C6fMCeT+hUFlA"
            + "0P4k1TIprapGVq7FpKcqlhK2gTBfTSnoO7gmXG/9MxJiYpb/Aph8ptXq6quHz1Mj"
            + "greWr3xTsy6gF2yphUEkGHh4v22XvK+FLx9Jb6zloMWA2GG9gpUpvMnl1fH4";

    log("Requesting X509 key recovery.");
    recoveryRequestId = keyClient.recoverKey(new KeyId(keyID), null, null, null, b64Certificate)
            .getRequestInfo().getRequestId();
    log("Requesting X509 key recovery request: " + recoveryRequestId);

    // Test 55: Approve x509 key recovery
    log("Approving X509 key recovery request: " + recoveryRequestId);
    keyClient.approveRequest(recoveryRequestId);

    // Test 16: Recover x509 key
    log("Recovering X509 key based on request: " + recoveryRequestId);
    try {
        // KeyData recoveredX509Key = client.recoverKey(recoveryRequestId, "netscape");
        // log("Success: X509Key recovered: "+ recoveredX509Key.getP12Data());
    } catch (RequestNotFoundException e) {
        log("Error: recovering X509Key");
    }

    // Test 1: Get transport certificate from DRM
    transportCert = systemCertClient.getTransportCert().getEncoded();
    transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER));

    log("Transport Cert retrieved from DRM: " + transportCert);

    // Test 17: Get list of completed key archival requests
    log("\n\nList of completed archival requests");
    list = keyClient.listRequests("complete", IRequest.SYMKEY_GENERATION_REQUEST);
    if (list.getTotal() == 0) {
        log("No requests found");
    } else {
        Iterator<KeyRequestInfo> iter = list.getEntries().iterator();
        while (iter.hasNext()) {
            KeyRequestInfo info = iter.next();
            printRequestInfo(info);
        }
    }

    // test 18: Generate symmetric key
    clientKeyId = "Symmetric Key #1234f " + Calendar.getInstance().getTime().toString();
    List<String> usages = new ArrayList<String>();
    usages.add(SymKeyGenerationRequest.DECRYPT_USAGE);
    usages.add(SymKeyGenerationRequest.ENCRYPT_USAGE);
    KeyRequestResponse genKeyResponse = keyClient.generateSymmetricKey(clientKeyId,
            KeyRequestResource.AES_ALGORITHM, 128, usages, null);
    printRequestInfo(genKeyResponse.getRequestInfo());
    keyId = genKeyResponse.getKeyId();

    // test 19: Get keyId for active key with client ID
    log("Getting key ID for symmetric key");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: keyids from search and archival match.");
    }

    // Test 20: Submit a recovery request for the symmetric key using a session key
    log("Submitting a recovery request for the  symmetric key using session key");
    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert);
        keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using session key: " + e.getMessage());
    }

    encryptedData = keyData.getEncryptedData();

    try {
        recoveredKey = new String(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey,
                KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData()));
    } catch (Exception e) {
        log("Exception in unwrapping key: " + e.toString());
        e.printStackTrace();
    }

    // test 21: Generate symmetric key - invalid algorithm
    try {
        genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1235", "AFS", 128, usages, null);
    } catch (Exception e) {
        log("Exception: " + e);
    }

    // test 22: Generate symmetric key - invalid key size
    try {
        genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "AES", 0, usages, null);
    } catch (Exception e) {
        log("Exception: " + e);
    }

    // test 23: Generate symmetric key - usages not defined
    try {
        genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "DES", 56, null, null);
    } catch (Exception e) {
        log("Exception: " + e);
    }

    // Test 24: Generate and archive a symmetric key of type AES
    log("Archiving symmetric key");
    clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString();
    try {
        vek = nssCrypto.generateSymmetricKey(KeyRequestResource.AES_ALGORITHM, 128);

        byte[] encoded = CryptoUtil.createPKIArchiveOptions(nssCrypto.getManager(), nssCrypto.getToken(),
                transportCert, vek, null, KeyGenAlgorithm.DES3, 0, new IVParameterSpec(iv));

        KeyRequestResponse response = keyClient.archivePKIOptions(clientKeyId,
                KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.AES_ALGORITHM, 128, encoded);
        log("Archival Results:");
        printRequestInfo(response.getRequestInfo());
        keyId = response.getKeyId();
    } catch (Exception e) {
        log("Exception in archiving symmetric key:" + e.getMessage());
        e.printStackTrace();
    }

    //Test 25: Get keyId for active key with client ID
    log("Getting key ID for symmetric key");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: keyids from search and archival match.");
    }

    // Test 26: Submit a recovery request for the symmetric key
    log("Submitting a recovery request for the  symmetric key without using session key");
    try {
        keyData = keyClient.retrieveKey(keyId);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using session key: " + e.getMessage());
    }

    // Since no session key is provided externally, the retrieveKey method
    // generates a session key, wraps it with transport cert and completes the request.
    // The encrypted data is then unwrapped using the temporary session key and set to
    // the attribute privateData.
    recoveredKey = Utils.base64encode(keyData.getData());

    if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) {
        log("Error: recovered and archived keys do not match!");
    } else {
        log("Success: recoverd and archived keys match!");
    }

    // Test 27: Get key info
    log("getting key info for existing key");
    printKeyInfo(keyClient.getKeyInfo(keyId));

    //Test 28: Modify status
    log("modify the key status");
    keyClient.modifyKeyStatus(keyId, KeyResource.KEY_STATUS_INACTIVE);
    keyInfo = keyClient.getKeyInfo(keyId);
    printKeyInfo(keyInfo);

    //Test 29:  Confirm no more active keys with this ID
    log("look for active keys with this id");
    clientKeyId = keyInfo.getClientKeyID();
    try {
        keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
        printKeyInfo(keyInfo);
    } catch (ResourceNotFoundException e) {
        log("Success: ResourceNotFound exception thrown: " + e);
    }

    // Test asymmetric key generation.

    String[] algs = { "RSA", "DSA" };
    for (int i = 0; i < algs.length; i++) {
        // Test 30: Generate Asymmetric keys - RSA key
        System.out.println("\nTesting asymmetric key generation for algorithm " + algs[i]);
        clientKeyId = "AsymKey #" + Calendar.getInstance().getTimeInMillis();
        usages.clear();
        usages.add(AsymKeyGenerationRequest.SIGN);
        usages.add(AsymKeyGenerationRequest.VERIFY);
        KeyRequestResponse response = keyClient.generateAsymmetricKey(clientKeyId, algs[i], 1024, usages, null);
        printRequestInfo(response.getRequestInfo());
        System.out.println();

        // Test 31: Get information of the newly generated asymmetric keys
        System.out.println("Fetch information of the newly generated asymmetric keys.");
        System.out.println();
        KeyInfo info = keyClient.getKeyInfo(response.getKeyId());
        printKeyInfo(info);
        System.out.println();

        // Test 32: Retrieve private key data
        System.out.println("Retrieving and verifying the generated private key.");
        try {
            keyData = keyClient.retrieveKey(response.getKeyId());
        } catch (Exception e) {
            log("Exception retrieving the private key data.");
            e.printStackTrace();
        }

        // Test 33: Verify the generated key pair.
        if (isKeyPairValid(algs[i], keyData.getData(), info.getPublicKey())) {
            log("The key pair generated using " + algs[i] + " algorithm is valid.");
        } else {
            log("The key pair generated using " + algs[i] + " algorithm is invalid.");
        }
        System.out.println();
    }

    // Test 34:
}