Example usage for java.util List get

List of usage examples for java.util List get

Introduction

In this page you can find the example usage for java.util List get.

Prototype

E get(int index);

Source Link

Document

Returns the element at the specified position in this list.

Usage

From source file:com.twitter.hraven.rest.client.HRavenRestClient.java

public static void main(String[] args) throws IOException {
    String apiHostname = null;/*from  w  w  w  .ja  v a 2s  . c  o  m*/
    String cluster = null;
    String username = null;
    String batchDesc = null;
    String signature = null;
    int limit = 2;
    boolean useHBaseAPI = false;
    boolean dumpJson = false;
    boolean hydrateTasks = false;
    List<String> taskResponseFilters = new ArrayList<String>();
    List<String> jobResponseFilters = new ArrayList<String>();
    List<String> flowResponseFilters = new ArrayList<String>();
    List<String> configFields = new ArrayList<String>();

    StringBuffer usage = new StringBuffer("Usage: java ");
    usage.append(HRavenRestClient.class.getName()).append(" [-options]\n");
    usage.append("Returns data from recent flows and their associated jobs\n");
    usage.append("where options include: \n");
    usage.append(" -a <API hostname> [required]\n");
    usage.append(" -c <cluster> [required]\n");
    usage.append(" -u <username> [required]\n");
    usage.append(" -f <flowName> [required]\n");
    usage.append(" -s <signature>\n");
    usage.append(" -l <limit>\n");
    usage.append(" -h - print this message and return\n");
    usage.append(" -H - use HBase API, not the REST API\n");
    usage.append(" -j - output json\n");
    usage.append(" -t - retrieve task information as well");
    usage.append(" -w - config field to be included in job response");
    usage.append(" -z - field to be included in task response");
    usage.append(" -y - field to be included in job response");
    usage.append(" -x - field to be included in flow response");

    for (int i = 0; i < args.length; i++) {
        if ("-a".equals(args[i])) {
            apiHostname = args[++i];
            continue;
        } else if ("-c".equals(args[i])) {
            cluster = args[++i];
            continue;
        } else if ("-u".equals(args[i])) {
            username = args[++i];
            continue;
        } else if ("-f".equals(args[i])) {
            batchDesc = args[++i];
            continue;
        } else if ("-s".equals(args[i])) {
            signature = args[++i];
            continue;
        } else if ("-l".equals(args[i])) {
            limit = Integer.parseInt(args[++i]);
            continue;
        } else if ("-H".equals(args[i])) {
            useHBaseAPI = true;
            continue;
        } else if ("-j".equals(args[i])) {
            dumpJson = true;
            continue;
        } else if ("-t".equals(args[i])) {
            hydrateTasks = true;
            continue;
        } else if ("-z".equals(args[i])) {
            String taskFilters = args[++i];
            taskResponseFilters = Arrays.asList(taskFilters.split(","));
            continue;
        } else if ("-y".equals(args[i])) {
            String jobFilters = args[++i];
            jobResponseFilters = Arrays.asList(jobFilters.split(","));
            continue;
        } else if ("-x".equals(args[i])) {
            String flowFilters = args[++i];
            flowResponseFilters = Arrays.asList(flowFilters.split(","));
            continue;
        } else if ("-w".equals(args[i])) {
            String configFilters = args[++i];
            configFields = Arrays.asList(configFilters.split(","));
            continue;
        } else if ("-h".equals(args[i])) {
            System.err.println(usage.toString());
            System.exit(1);
        } else {

        }
    }

    if (apiHostname == null || cluster == null || username == null || batchDesc == null) {
        System.err.println(usage.toString());
        System.exit(1);
    }

    List<Flow> flows;
    if (useHBaseAPI) {
        JobHistoryService jobHistoryService = new JobHistoryService(HBaseConfiguration.create());
        flows = jobHistoryService.getFlowSeries(cluster, username, batchDesc, signature, hydrateTasks, limit);
    } else {
        HRavenRestClient client = new HRavenRestClient(apiHostname, 100000, 100000);

        // use this call to call flows without configs
        flows = client.fetchFlows(cluster, username, batchDesc, signature, flowResponseFilters,
                jobResponseFilters, limit);
        // use this call to call flows with configs
        flows = client.fetchFlowsWithConfig(cluster, username, batchDesc, signature, limit, flowResponseFilters,
                jobResponseFilters, configFields);
        // use this call to call flows with config patterns
        flows = client.fetchFlowsWithConfig(cluster, username, batchDesc, signature, limit, flowResponseFilters,
                jobResponseFilters, configFields);

        if (hydrateTasks) {
            for (Flow flow : flows) {
                for (JobDetails jd : flow.getJobs()) {
                    String jobId = jd.getJobId();
                    List<TaskDetails> td = client.fetchTaskDetails(cluster, jobId, taskResponseFilters);
                    jd.addTasks(td);
                }
            }
        }
    }

    if (dumpJson) {
        ObjectMapper om = ObjectMapperProvider.createCustomMapper();
        SimpleModule module = new SimpleModule("hRavenModule", new Version(0, 4, 0, null));
        module.addSerializer(Flow.class, new FlowSerializer());
        module.addSerializer(JobDetails.class, new JobDetailsSerializer());
        om.registerModule(module);
        if (flows.size() > 0) {
            System.out.println(om.writeValueAsString(flows.get(0)));
        }
        return;
    }

    System.out.println("Found " + flows.size() + " flows");
    StringBuilder sb = new StringBuilder();
    sb.append("\t\t").append("jobId");
    sb.append("\t\t").append("version");
    sb.append("\t\t").append("status");
    sb.append("\t").append("maps");
    sb.append("\t").append("reduces");
    sb.append("\t").append("rBytesRead");
    sb.append("\t").append("feature");
    sb.append("\t\t\t").append("alias");
    System.out.println(sb.toString());

    int i = 0;
    for (Flow flow : flows) {
        long minSubmitTime = -1, maxFinishTime = -1;
        for (JobDetails job : flow.getJobs()) {
            if (minSubmitTime == -1 && job.getSubmitTime() > 0) {
                minSubmitTime = job.getSubmitTime();
            }
            minSubmitTime = Math.min(minSubmitTime, job.getSubmitTime());
            maxFinishTime = Math.max(maxFinishTime, job.getFinishTime());
        }

        if (minSubmitTime > 0 && maxFinishTime > 0) {
            System.out.println(String.format("Flow #%d: %s - %s", i++, DATE_FORMAT.format(minSubmitTime),
                    DATE_FORMAT.format(maxFinishTime)));
        } else {
            System.out.println(String.format("Flow #%d:", i++));
        }

        for (JobDetails job : flow.getJobs()) {
            sb = new StringBuilder();
            sb.append(" - ").append(job.getJobId());
            sb.append("\t").append(job.getVersion());
            sb.append("\t").append(job.getStatus());
            sb.append("\t").append(job.getTotalMaps());
            sb.append("\t").append(job.getTotalReduces());
            long reduceBytesRead = job.getReduceCounters().getCounter("FileSystemCounters",
                    "FILE_BYTES_READ") != null
                            ? job.getReduceCounters().getCounter("FileSystemCounters", "FILE_BYTES_READ")
                                    .getValue()
                            : -1;
            sb.append("\t").append(reduceBytesRead);
            sb.append("\t").append(job.getConfiguration().get("pig.job.feature"));
            sb.append("\t").append(job.getConfiguration().get("pig.alias"));
            System.out.println(sb.toString());
        }
    }
}

From source file:com.searchcode.app.App.java

public static void main(String[] args) {
    injector = Guice.createInjector(new InjectorConfig());
    int server_port = Helpers.tryParseInt(
            Properties.getProperties().getProperty(Values.SERVERPORT, Values.DEFAULTSERVERPORT),
            Values.DEFAULTSERVERPORT);//  w  w  w  . jav a2 s . c o  m
    boolean onlyLocalhost = Boolean
            .parseBoolean(Properties.getProperties().getProperty("only_localhost", "false"));

    // Database migrations happen before we start
    databaseMigrations();

    LOGGER.info("Starting searchcode server on port " + server_port);
    Spark.port(server_port);

    JobService js = injector.getInstance(JobService.class);
    Repo repo = injector.getInstance(Repo.class);
    Data data = injector.getInstance(Data.class);
    Api api = injector.getInstance(Api.class);

    ApiService apiService = injector.getInstance(ApiService.class);
    StatsService statsService = new StatsService();

    scl = Singleton.getSearchcodeLib(data);
    js.initialJobs();

    Gson gson = new Gson();

    Spark.staticFileLocation("/public");

    before((request, response) -> {
        if (onlyLocalhost) {
            if (!request.ip().equals("127.0.0.1")) {
                halt(204);
            }
        }
    });

    get("/", (req, res) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());

        if (req.queryParams().contains("q") && !req.queryParams("q").trim().equals("")) {
            String query = req.queryParams("q").trim();
            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            List<String> reposList = new ArrayList<>();
            List<String> langsList = new ArrayList<>();
            List<String> ownsList = new ArrayList<>();

            if (req.queryParams().contains("repo")) {
                String[] repos = new String[0];
                repos = req.queryParamsValues("repo");

                if (repos.length != 0) {
                    reposList = Arrays.asList(repos);
                }
            }

            if (req.queryParams().contains("lan")) {
                String[] langs = new String[0];
                langs = req.queryParamsValues("lan");

                if (langs.length != 0) {
                    langsList = Arrays.asList(langs);
                }
            }

            if (req.queryParams().contains("own")) {
                String[] owns = new String[0];
                owns = req.queryParamsValues("own");

                if (owns.length != 0) {
                    ownsList = Arrays.asList(owns);
                }
            }

            map.put("searchValue", query);
            map.put("searchResultJson",
                    gson.toJson(new CodePreload(query, page, langsList, reposList, ownsList)));

            map.put("logoImage", getLogo());
            map.put("isCommunity", ISCOMMUNITY);
            return new ModelAndView(map, "search_test.ftl");
        }

        // Totally pointless vanity but lets rotate the image every week
        int photoId = getWeekOfMonth();

        if (photoId <= 0) {
            photoId = 3;
        }
        if (photoId > 4) {
            photoId = 2;
        }

        CodeSearcher cs = new CodeSearcher();

        map.put("photoId", photoId);
        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "index.ftl");
    }, new FreeMarkerEngine());

    get("/html/", (req, res) -> {
        CodeSearcher cs = new CodeSearcher();
        CodeMatcher cm = new CodeMatcher(data);
        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());

        if (req.queryParams().contains("q")) {
            String query = req.queryParams("q").trim();
            String altquery = query.replaceAll("[^A-Za-z0-9 ]", " ").trim().replaceAll(" +", " ");
            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            String[] repos = new String[0];
            String[] langs = new String[0];
            String reposFilter = "";
            String langsFilter = "";
            String reposQueryString = "";
            String langsQueryString = "";

            if (req.queryParams().contains("repo")) {
                repos = req.queryParamsValues("repo");

                if (repos.length != 0) {
                    List<String> reposList = Arrays.asList(repos).stream()
                            .map((s) -> "reponame:" + QueryParser.escape(s)).collect(Collectors.toList());

                    reposFilter = " && (" + StringUtils.join(reposList, " || ") + ")";

                    List<String> reposQueryList = Arrays.asList(repos).stream()
                            .map((s) -> "&repo=" + URLEncoder.encode(s)).collect(Collectors.toList());

                    reposQueryString = StringUtils.join(reposQueryList, "");
                }
            }

            if (req.queryParams().contains("lan")) {
                langs = req.queryParamsValues("lan");

                if (langs.length != 0) {
                    List<String> langsList = Arrays.asList(langs).stream()
                            .map((s) -> "languagename:" + QueryParser.escape(s)).collect(Collectors.toList());

                    langsFilter = " && (" + StringUtils.join(langsList, " || ") + ")";

                    List<String> langsQueryList = Arrays.asList(langs).stream()
                            .map((s) -> "&lan=" + URLEncoder.encode(s)).collect(Collectors.toList());

                    langsQueryString = StringUtils.join(langsQueryList, "");
                }
            }

            // split the query escape it and and it together
            String cleanQueryString = scl.formatQueryString(query);

            SearchResult searchResult = cs.search(cleanQueryString + reposFilter + langsFilter, page);
            searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), query, true));

            for (CodeFacetRepo f : searchResult.getRepoFacetResults()) {
                if (Arrays.asList(repos).contains(f.getRepoName())) {
                    f.setSelected(true);
                }
            }

            for (CodeFacetLanguage f : searchResult.getLanguageFacetResults()) {
                if (Arrays.asList(langs).contains(f.getLanguageName())) {
                    f.setSelected(true);
                }
            }

            map.put("searchValue", query);
            map.put("searchResult", searchResult);
            map.put("reposQueryString", reposQueryString);
            map.put("langsQueryString", langsQueryString);

            map.put("altQuery", altquery);

            map.put("isHtml", true);
            map.put("logoImage", getLogo());
            map.put("isCommunity", ISCOMMUNITY);
            return new ModelAndView(map, "searchresults.ftl");
        }

        // Totally pointless vanity but lets rotate the image every week
        int photoId = getWeekOfMonth();

        if (photoId <= 0) {
            photoId = 3;
        }
        if (photoId > 4) {
            photoId = 2;
        }

        map.put("photoId", photoId);
        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "index.ftl");
    }, new FreeMarkerEngine());

    /**
     * Allows one to write literal lucene search queries against the index
     * TODO This is still very much WIP
     */
    get("/literal/", (req, res) -> {
        CodeSearcher cs = new CodeSearcher();
        CodeMatcher cm = new CodeMatcher(data);
        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());

        if (req.queryParams().contains("q")) {
            String query = req.queryParams("q").trim();

            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            String altquery = query.replaceAll("[^A-Za-z0-9 ]", " ").trim().replaceAll(" +", " ");

            SearchResult searchResult = cs.search(query, page);
            searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), altquery, false));

            map.put("searchValue", query);
            map.put("searchResult", searchResult);
            map.put("reposQueryString", "");
            map.put("langsQueryString", "");

            map.put("altQuery", "");

            map.put("logoImage", getLogo());
            map.put("isCommunity", ISCOMMUNITY);
            return new ModelAndView(map, "searchresults.ftl");
        }

        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "index.ftl");
    }, new FreeMarkerEngine());

    /**
     * This is the endpoint used by the frontend.
     */
    get("/api/codesearch/", (req, res) -> {
        CodeSearcher cs = new CodeSearcher();
        CodeMatcher cm = new CodeMatcher(data);

        if (req.queryParams().contains("q") && req.queryParams("q").trim() != "") {
            String query = req.queryParams("q").trim();

            int page = 0;

            if (req.queryParams().contains("p")) {
                try {
                    page = Integer.parseInt(req.queryParams("p"));
                    page = page > 19 ? 19 : page;
                } catch (NumberFormatException ex) {
                    page = 0;
                }
            }

            String[] repos = new String[0];
            String[] langs = new String[0];
            String[] owners = new String[0];
            String reposFilter = "";
            String langsFilter = "";
            String ownersFilter = "";

            if (req.queryParams().contains("repo")) {
                repos = req.queryParamsValues("repo");

                if (repos.length != 0) {
                    List<String> reposList = Arrays.asList(repos).stream()
                            .map((s) -> "reponame:" + QueryParser.escape(s)).collect(Collectors.toList());

                    reposFilter = " && (" + StringUtils.join(reposList, " || ") + ")";
                }
            }

            if (req.queryParams().contains("lan")) {
                langs = req.queryParamsValues("lan");

                if (langs.length != 0) {
                    List<String> langsList = Arrays.asList(langs).stream()
                            .map((s) -> "languagename:" + QueryParser.escape(s)).collect(Collectors.toList());

                    langsFilter = " && (" + StringUtils.join(langsList, " || ") + ")";
                }
            }

            if (req.queryParams().contains("own")) {
                owners = req.queryParamsValues("own");

                if (owners.length != 0) {
                    List<String> ownersList = Arrays.asList(owners).stream()
                            .map((s) -> "codeowner:" + QueryParser.escape(s)).collect(Collectors.toList());

                    ownersFilter = " && (" + StringUtils.join(ownersList, " || ") + ")";
                }
            }

            // Need to pass in the filters into this query
            String cacheKey = query + page + reposFilter + langsFilter + ownersFilter;

            if (cache.containsKey(cacheKey)) {
                return cache.get(cacheKey);
            }

            // split the query escape it and and it together
            String cleanQueryString = scl.formatQueryString(query);

            SearchResult searchResult = cs.search(cleanQueryString + reposFilter + langsFilter + ownersFilter,
                    page);
            searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), query, true));

            searchResult.setQuery(query);

            for (String altQuery : scl.generateAltQueries(query)) {
                searchResult.addAltQuery(altQuery);
            }

            // Null out code as it isnt required and there is no point in bloating our ajax requests
            for (CodeResult codeSearchResult : searchResult.getCodeResultList()) {
                codeSearchResult.setCode(null);
            }

            cache.put(cacheKey, searchResult);
            return searchResult;
        }

        return null;
    }, new JsonTransformer());

    get("/api/repo/add/", "application/json", (request, response) -> {
        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        if (!apiEnabled) {
            return new ApiResponse(false, "API not enabled");
        }

        String publicKey = request.queryParams("pub");
        String signedKey = request.queryParams("sig");
        String reponames = request.queryParams("reponame");
        String repourls = request.queryParams("repourl");
        String repotype = request.queryParams("repotype");
        String repousername = request.queryParams("repousername");
        String repopassword = request.queryParams("repopassword");
        String reposource = request.queryParams("reposource");
        String repobranch = request.queryParams("repobranch");

        if (reponames == null || reponames.trim().equals(Values.EMPTYSTRING)) {
            return new ApiResponse(false, "reponame is a required parameter");
        }

        if (repourls == null || repourls.trim().equals(Values.EMPTYSTRING)) {
            return new ApiResponse(false, "repourl is a required parameter");
        }

        if (repotype == null) {
            return new ApiResponse(false, "repotype is a required parameter");
        }

        if (repousername == null) {
            return new ApiResponse(false, "repousername is a required parameter");
        }

        if (repopassword == null) {
            return new ApiResponse(false, "repopassword is a required parameter");
        }

        if (reposource == null) {
            return new ApiResponse(false, "reposource is a required parameter");
        }

        if (repobranch == null) {
            return new ApiResponse(false, "repobranch is a required parameter");
        }

        if (apiAuth) {
            if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "pub is a required parameter");
            }

            if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "sig is a required parameter");
            }

            String toValidate = String.format(
                    "pub=%s&reponame=%s&repourl=%s&repotype=%s&repousername=%s&repopassword=%s&reposource=%s&repobranch=%s",
                    URLEncoder.encode(publicKey), URLEncoder.encode(reponames), URLEncoder.encode(repourls),
                    URLEncoder.encode(repotype), URLEncoder.encode(repousername),
                    URLEncoder.encode(repopassword), URLEncoder.encode(reposource),
                    URLEncoder.encode(repobranch));

            boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate);

            if (!validRequest) {
                return new ApiResponse(false, "invalid signed url");
            }
        }

        // Clean
        if (repobranch == null || repobranch.trim().equals(Values.EMPTYSTRING)) {
            repobranch = "master";
        }

        repotype = repotype.trim().toLowerCase();
        if (!"git".equals(repotype) && !"svn".equals(repotype)) {
            repotype = "git";
        }

        RepoResult repoResult = repo.getRepoByName(reponames);

        if (repoResult != null) {
            return new ApiResponse(false, "repository name already exists");
        }

        repo.saveRepo(new RepoResult(-1, reponames, repotype, repourls, repousername, repopassword, reposource,
                repobranch));

        return new ApiResponse(true, "added repository successfully");
    }, new JsonTransformer());

    get("/api/repo/delete/", "application/json", (request, response) -> {
        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        if (!apiEnabled) {
            return new ApiResponse(false, "API not enabled");
        }

        String publicKey = request.queryParams("pub");
        String signedKey = request.queryParams("sig");
        String reponames = request.queryParams("reponame");

        if (reponames == null || reponames.trim().equals(Values.EMPTYSTRING)) {
            return new ApiResponse(false, "reponame is a required parameter");
        }

        if (apiAuth) {
            if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "pub is a required parameter");
            }

            if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "sig is a required parameter");
            }

            String toValidate = String.format("pub=%s&reponame=%s", URLEncoder.encode(publicKey),
                    URLEncoder.encode(reponames));

            boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate);

            if (!validRequest) {
                return new ApiResponse(false, "invalid signed url");
            }
        }

        RepoResult rr = repo.getRepoByName(reponames);
        if (rr == null) {
            return new ApiResponse(false, "repository already deleted");
        }

        Singleton.getUniqueDeleteRepoQueue().add(rr);

        return new ApiResponse(true, "repository queued for deletion");
    }, new JsonTransformer());

    get("/api/repo/list/", "application/json", (request, response) -> {
        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        if (!apiEnabled) {
            return new ApiResponse(false, "API not enabled");
        }

        String publicKey = request.queryParams("pub");
        String signedKey = request.queryParams("sig");

        if (apiAuth) {
            if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "pub is a required parameter");
            }

            if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) {
                return new ApiResponse(false, "sig is a required parameter");
            }

            String toValidate = String.format("pub=%s", URLEncoder.encode(publicKey));

            boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate);

            if (!validRequest) {
                return new ApiResponse(false, "invalid signed url");
            }
        }

        List<RepoResult> repoResultList = repo.getAllRepo();

        return new RepoResultApiResponse(true, Values.EMPTYSTRING, repoResultList);
    }, new JsonTransformer());

    get("/admin/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        CodeSearcher cs = new CodeSearcher();

        Map<String, Object> map = new HashMap<>();

        map.put("repoCount", repo.getRepoCount());
        map.put("numDocs", cs.getTotalNumberDocumentsIndexed());

        map.put("numSearches", statsService.getSearchCount());
        map.put("uptime", statsService.getUptime());

        // Put all properties here
        map.put(Values.SQLITEFILE,
                Properties.getProperties().getProperty(Values.SQLITEFILE, Values.DEFAULTSQLITEFILE));
        map.put(Values.SERVERPORT,
                Properties.getProperties().getProperty(Values.SERVERPORT, Values.DEFAULTSERVERPORT));
        map.put(Values.REPOSITORYLOCATION, Properties.getProperties().getProperty(Values.REPOSITORYLOCATION,
                Values.DEFAULTREPOSITORYLOCATION));
        map.put(Values.INDEXLOCATION,
                Properties.getProperties().getProperty(Values.INDEXLOCATION, Values.DEFAULTINDEXLOCATION));
        map.put(Values.FACETSLOCATION,
                Properties.getProperties().getProperty(Values.FACETSLOCATION, Values.DEFAULTFACETSLOCATION));
        map.put(Values.CHECKREPOCHANGES, Properties.getProperties().getProperty(Values.CHECKREPOCHANGES,
                Values.DEFAULTCHECKREPOCHANGES));
        map.put(Values.ONLYLOCALHOST,
                Properties.getProperties().getProperty(Values.ONLYLOCALHOST, Values.DEFAULTONLYLOCALHOST));
        map.put(Values.LOWMEMORY,
                Properties.getProperties().getProperty(Values.LOWMEMORY, Values.DEFAULTLOWMEMORY));
        map.put(Values.SPELLINGCORRECTORSIZE, Properties.getProperties()
                .getProperty(Values.SPELLINGCORRECTORSIZE, Values.DEFAULTSPELLINGCORRECTORSIZE));
        map.put(Values.USESYSTEMGIT,
                Properties.getProperties().getProperty(Values.USESYSTEMGIT, Values.DEFAULTUSESYSTEMGIT));
        map.put(Values.GITBINARYPATH,
                Properties.getProperties().getProperty(Values.GITBINARYPATH, Values.DEFAULTGITBINARYPATH));
        map.put(Values.APIENABLED,
                Properties.getProperties().getProperty(Values.APIENABLED, Values.DEFAULTAPIENABLED));
        map.put(Values.APIKEYAUTH,
                Properties.getProperties().getProperty(Values.APIKEYAUTH, Values.DEFAULTAPIKEYAUTH));
        map.put(Values.SVNBINARYPATH,
                Properties.getProperties().getProperty(Values.SVNBINARYPATH, Values.DEFAULTSVNBINARYPATH));
        map.put(Values.SVNENABLED,
                Properties.getProperties().getProperty(Values.SVNENABLED, Values.DEFAULTSVNENABLED));
        map.put(Values.MAXDOCUMENTQUEUESIZE, Properties.getProperties().getProperty(Values.MAXDOCUMENTQUEUESIZE,
                Values.DEFAULTMAXDOCUMENTQUEUESIZE));
        map.put(Values.MAXDOCUMENTQUEUELINESIZE, Properties.getProperties()
                .getProperty(Values.MAXDOCUMENTQUEUELINESIZE, Values.DEFAULTMAXDOCUMENTQUEUELINESIZE));
        map.put(Values.MAXFILELINEDEPTH, Properties.getProperties().getProperty(Values.MAXFILELINEDEPTH,
                Values.DEFAULTMAXFILELINEDEPTH));

        map.put("deletionQueue", Singleton.getUniqueDeleteRepoQueue().size());

        map.put("version", VERSION);

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin.ftl");
    }, new FreeMarkerEngine());

    get("/admin/repo/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        int repoCount = repo.getRepoCount();
        String offSet = request.queryParams("offset");
        String searchQuery = request.queryParams("q");
        int indexOffset = 0;

        Map<String, Object> map = new HashMap<>();

        if (offSet != null) {
            try {
                indexOffset = Integer.parseInt(offSet);
                if (indexOffset > repoCount || indexOffset < 0) {
                    indexOffset = 0;
                }

            } catch (NumberFormatException ex) {
                indexOffset = 0;
            }
        }

        if (searchQuery != null) {
            map.put("repoResults", repo.searchRepo(searchQuery));
        } else {
            map.put("repoResults", repo.getPagedRepo(indexOffset, 100));
        }

        map.put("searchQuery", searchQuery);
        map.put("hasPrevious", indexOffset > 0);
        map.put("hasNext", (indexOffset + 100) < repoCount);
        map.put("previousOffset", "" + (indexOffset - 100));
        map.put("nextOffset", "" + (indexOffset + 100));

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin_repo.ftl");
    }, new FreeMarkerEngine());

    get("/admin/bulk/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin_bulk.ftl");
    }, new FreeMarkerEngine());

    get("/admin/api/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        Map<String, Object> map = new HashMap<>();

        map.put("apiKeys", api.getAllApi());

        boolean apiEnabled = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false"));
        boolean apiAuth = Boolean
                .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true"));

        map.put("apiAuthentication", apiEnabled && apiAuth);
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "admin_api.ftl");
    }, new FreeMarkerEngine());

    post("/admin/api/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        apiService.createKeys();

        response.redirect("/admin/api/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    get("/admin/api/delete/", "application/json", (request, response) -> {
        if (getAuthenticatedUser(request) == null || !request.queryParams().contains("publicKey")) {
            response.redirect("/login/");
            halt();
            return false;
        }

        String publicKey = request.queryParams("publicKey");
        apiService.deleteKey(publicKey);

        return true;
    }, new JsonTransformer());

    get("/admin/settings/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        String[] highlighters = "agate,androidstudio,arta,ascetic,atelier-cave.dark,atelier-cave.light,atelier-dune.dark,atelier-dune.light,atelier-estuary.dark,atelier-estuary.light,atelier-forest.dark,atelier-forest.light,atelier-heath.dark,atelier-heath.light,atelier-lakeside.dark,atelier-lakeside.light,atelier-plateau.dark,atelier-plateau.light,atelier-savanna.dark,atelier-savanna.light,atelier-seaside.dark,atelier-seaside.light,atelier-sulphurpool.dark,atelier-sulphurpool.light,brown_paper,codepen-embed,color-brewer,dark,darkula,default,docco,far,foundation,github-gist,github,googlecode,grayscale,hopscotch,hybrid,idea,ir_black,kimbie.dark,kimbie.light,magula,mono-blue,monokai,monokai_sublime,obsidian,paraiso.dark,paraiso.light,pojoaque,railscasts,rainbow,school_book,solarized_dark,solarized_light,sunburst,tomorrow-night-blue,tomorrow-night-bright,tomorrow-night-eighties,tomorrow-night,tomorrow,vs,xcode,zenburn"
                .split(",");

        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("syntaxHighlighter", getSyntaxHighlighter());
        map.put("highlighters", highlighters);
        map.put("averageSalary", "" + (int) getAverageSalary());
        map.put("matchLines", "" + (int) getMatchLines());
        map.put("maxLineDepth", "" + (int) getMaxLineDepth());
        map.put("minifiedLength", "" + (int) getMinifiedLength());
        map.put("isCommunity", ISCOMMUNITY);

        return new ModelAndView(map, "admin_settings.ftl");
    }, new FreeMarkerEngine());

    get("/admin/reports/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);

        return new ModelAndView(map, "admin_reports.ftl");
    }, new FreeMarkerEngine());

    post("/admin/settings/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        if (ISCOMMUNITY) {
            response.redirect("/admin/settings/");
            halt();
        }

        String logo = request.queryParams("logo").trim();
        String syntaxHighlighter = request.queryParams("syntaxhighligher");

        try {
            double averageSalary = Double.parseDouble(request.queryParams("averagesalary"));
            data.saveData(Values.AVERAGESALARY, "" + (int) averageSalary);
        } catch (NumberFormatException ex) {
            data.saveData(Values.AVERAGESALARY, Values.DEFAULTAVERAGESALARY);
        }

        try {
            double averageSalary = Double.parseDouble(request.queryParams("matchlines"));
            data.saveData(Values.MATCHLINES, "" + (int) averageSalary);
        } catch (NumberFormatException ex) {
            data.saveData(Values.MATCHLINES, Values.DEFAULTMATCHLINES);
        }

        try {
            double averageSalary = Double.parseDouble(request.queryParams("maxlinedepth"));
            data.saveData(Values.MAXLINEDEPTH, "" + (int) averageSalary);
        } catch (NumberFormatException ex) {
            data.saveData(Values.MAXLINEDEPTH, Values.DEFAULTMAXLINEDEPTH);
        }

        try {
            double minifiedlength = Double.parseDouble(request.queryParams("minifiedlength"));
            data.saveData(Values.MINIFIEDLENGTH, "" + (int) minifiedlength);
        } catch (NumberFormatException ex) {
            data.saveData(Values.MINIFIEDLENGTH, Values.DEFAULTMINIFIEDLENGTH);
        }

        data.saveData(Values.LOGO, logo);
        data.saveData(Values.SYNTAXHIGHLIGHTER, syntaxHighlighter);

        // Redo anything that requires updates at this point
        scl = Singleton.getSearchcodeLib(data);

        response.redirect("/admin/settings/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    post("/admin/bulk/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        String repos = request.queryParams("repos");
        String repolines[] = repos.split("\\r?\\n");

        for (String line : repolines) {
            String[] repoparams = line.split(",", -1);

            if (repoparams.length == 7) {

                String branch = repoparams[6].trim();
                if (branch.equals(Values.EMPTYSTRING)) {
                    branch = "master";
                }

                String scm = repoparams[1].trim().toLowerCase();
                if (scm.equals(Values.EMPTYSTRING)) {
                    scm = "git";
                }

                RepoResult rr = repo.getRepoByName(repoparams[0]);

                if (rr == null) {
                    repo.saveRepo(new RepoResult(-1, repoparams[0], scm, repoparams[2], repoparams[3],
                            repoparams[4], repoparams[5], branch));
                }
            }
        }

        response.redirect("/admin/bulk/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    post("/admin/repo/", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return null;
        }

        String[] reponames = request.queryParamsValues("reponame");
        String[] reposcms = request.queryParamsValues("reposcm");
        String[] repourls = request.queryParamsValues("repourl");
        String[] repousername = request.queryParamsValues("repousername");
        String[] repopassword = request.queryParamsValues("repopassword");
        String[] reposource = request.queryParamsValues("reposource");
        String[] repobranch = request.queryParamsValues("repobranch");

        for (int i = 0; i < reponames.length; i++) {
            if (reponames[i].trim().length() != 0) {

                String branch = repobranch[i].trim();
                if (branch.equals(Values.EMPTYSTRING)) {
                    branch = "master";
                }

                repo.saveRepo(new RepoResult(-1, reponames[i], reposcms[i], repourls[i], repousername[i],
                        repopassword[i], reposource[i], branch));
            }
        }

        response.redirect("/admin/repo/");
        halt();
        return null;
    }, new FreeMarkerEngine());

    get("/login/", (request, response) -> {
        if (getAuthenticatedUser(request) != null) {
            response.redirect("/admin/");
            halt();
            return null;
        }
        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "login.ftl");
    }, new FreeMarkerEngine());

    post("/login/", (req, res) -> {
        if (req.queryParams().contains("password") && req.queryParams("password")
                .equals(com.searchcode.app.util.Properties.getProperties().getProperty("password"))) {
            addAuthenticatedUser(req);
            res.redirect("/admin/");
            halt();
        }
        Map<String, Object> map = new HashMap<>();
        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "login.ftl");
    }, new FreeMarkerEngine());

    get("/logout/", (req, res) -> {
        removeAuthenticatedUser(req);
        res.redirect("/");
        return null;
    });

    get("/admin/delete/", "application/json", (request, response) -> {
        if (getAuthenticatedUser(request) == null || !request.queryParams().contains("repoName")) {
            response.redirect("/login/");
            halt();
            return false;
        }

        String repoName = request.queryParams("repoName");
        RepoResult rr = repo.getRepoByName(repoName);

        if (rr != null) {
            Singleton.getUniqueDeleteRepoQueue().add(rr);
        }

        return true;
    }, new JsonTransformer());

    get("/admin/checkversion/", "application/json", (request, response) -> {
        if (getAuthenticatedUser(request) == null) {
            response.redirect("/login/");
            halt();
            return false;
        }

        String version;
        try {
            version = IOUtils.toString(new URL("https://searchcode.com/product/version/")).replace("\"",
                    Values.EMPTYSTRING);
        } catch (IOException ex) {
            return "Unable to determine if running the latest version. Check https://searchcode.com/product/download/ for the latest release.";
        }

        if (App.VERSION.equals(version)) {
            return "Your searchcode server version " + version + " is the latest.";
        } else {
            return "Your searchcode server version " + App.VERSION
                    + " instance is out of date. The latest version is " + version + ".";
        }
    }, new JsonTransformer());

    get("/file/:codeid/:reponame/*", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        CodeSearcher cs = new CodeSearcher();
        Cocomo2 coco = new Cocomo2();

        StringBuilder code = new StringBuilder();

        String fileName = Values.EMPTYSTRING;
        if (request.splat().length != 0) {
            fileName = request.splat()[0];
        }

        CodeResult codeResult = cs.getByRepoFileName(request.params(":reponame"), fileName);

        if (codeResult == null) {
            int codeid = Integer.parseInt(request.params(":codeid"));
            codeResult = cs.getById(codeid);
        }

        if (codeResult == null) {
            response.redirect("/404/");
            halt();
        }

        List<String> codeLines = codeResult.code;
        for (int i = 0; i < codeLines.size(); i++) {
            code.append("<span id=\"" + (i + 1) + "\"></span>");
            code.append(StringEscapeUtils.escapeHtml4(codeLines.get(i)));
            code.append("\n");
        }

        boolean highlight = true;
        if (Integer.parseInt(codeResult.codeLines) > 1000) {
            highlight = false;
        }

        RepoResult repoResult = repo.getRepoByName(codeResult.repoName);

        if (repoResult != null) {
            map.put("source", repoResult.getSource());
        }

        map.put("fileName", codeResult.fileName);
        map.put("codePath", codeResult.codePath);
        map.put("codeLength", codeResult.codeLines);
        map.put("languageName", codeResult.languageName);
        map.put("md5Hash", codeResult.md5hash);
        map.put("repoName", codeResult.repoName);
        map.put("highlight", highlight);
        map.put("repoLocation", codeResult.getRepoLocation());

        map.put("codeValue", code.toString());
        map.put("highligher", getSyntaxHighlighter());
        map.put("codeOwner", codeResult.getCodeOwner());

        double estimatedEffort = coco.estimateEffort(scl.countFilteredLines(codeResult.getCode()));
        int estimatedCost = (int) coco.estimateCost(estimatedEffort, getAverageSalary());
        if (estimatedCost != 0 && !scl.languageCostIgnore(codeResult.getLanguageName())) {
            map.put("estimatedCost", estimatedCost);
        }

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "coderesult.ftl");
    }, new FreeMarkerEngine());

    /**
     * Deprecated should not be used
     * TODO delete this method
     */
    get("/codesearch/view/:codeid", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        int codeid = Integer.parseInt(request.params(":codeid"));
        CodeSearcher cs = new CodeSearcher();
        Cocomo2 coco = new Cocomo2();

        StringBuilder code = new StringBuilder();

        // escape all the lines and include deeplink for line number
        CodeResult codeResult = cs.getById(codeid);

        if (codeResult == null) {
            response.redirect("/404/");
            halt();
        }

        List<String> codeLines = codeResult.code;
        for (int i = 0; i < codeLines.size(); i++) {
            code.append("<span id=\"" + (i + 1) + "\"></span>");
            code.append(StringEscapeUtils.escapeHtml4(codeLines.get(i)));
            code.append("\n");
        }

        boolean highlight = true;
        if (Integer.parseInt(codeResult.codeLines) > 1000) {
            highlight = false;
        }

        RepoResult repoResult = repo.getRepoByName(codeResult.repoName);

        if (repoResult != null) {
            map.put("source", repoResult.getSource());
        }

        map.put("fileName", codeResult.fileName);
        map.put("codePath", codeResult.codePath);
        map.put("codeLength", codeResult.codeLines);
        map.put("languageName", codeResult.languageName);
        map.put("md5Hash", codeResult.md5hash);
        map.put("repoName", codeResult.repoName);
        map.put("highlight", highlight);
        map.put("repoLocation", codeResult.getRepoLocation());

        map.put("codeValue", code.toString());
        map.put("highligher", getSyntaxHighlighter());
        map.put("codeOwner", codeResult.getCodeOwner());

        double estimatedEffort = coco.estimateEffort(scl.countFilteredLines(codeResult.getCode()));
        int estimatedCost = (int) coco.estimateCost(estimatedEffort, getAverageSalary());
        if (estimatedCost != 0 && !scl.languageCostIgnore(codeResult.getLanguageName())) {
            map.put("estimatedCost", estimatedCost);
        }

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "coderesult.ftl");
    }, new FreeMarkerEngine());

    get("/documentation/", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "documentation.ftl");
    }, new FreeMarkerEngine());

    get("/search_test/", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "search_test.ftl");
    }, new FreeMarkerEngine());

    get("/404/", (request, response) -> {
        Map<String, Object> map = new HashMap<>();

        map.put("logoImage", getLogo());
        map.put("isCommunity", ISCOMMUNITY);
        return new ModelAndView(map, "404.ftl");

    }, new FreeMarkerEngine());

    /**
     * Test that was being used to display blame information
     */
    //        get("/test/:reponame/*", (request, response) -> {
    //            User user = injector.getInstance(User.class);
    //            user.Blame(request.params(":reponame"), request.splat()[0]);
    //            return "";
    //        }, new JsonTransformer());
}

From source file:de.huxhorn.lilith.Lilith.java

public static void main(String[] args) {
    {/*from w  ww.jav  a 2  s . c om*/
        // initialize java.util.logging to use slf4j...
        Handler handler = new Slf4JHandler();
        java.util.logging.Logger rootLogger = java.util.logging.Logger.getLogger("");
        rootLogger.addHandler(handler);
        rootLogger.setLevel(java.util.logging.Level.WARNING);
    }

    StringBuilder appTitle = new StringBuilder();
    appTitle.append(APP_NAME).append(" V").append(APP_VERSION);
    if (APP_SNAPSHOT) {
        // always append timestamp for SNAPSHOT
        appTitle.append(" (").append(APP_TIMESTAMP_DATE).append(")");
    }

    CommandLineArgs cl = new CommandLineArgs();
    JCommander commander = new JCommander(cl);
    Cat cat = new Cat();
    commander.addCommand(Cat.NAME, cat);
    Tail tail = new Tail();
    commander.addCommand(Tail.NAME, tail);
    Filter filter = new Filter();
    commander.addCommand(Filter.NAME, filter);
    Index index = new Index();
    commander.addCommand(Index.NAME, index);
    Md5 md5 = new Md5();
    commander.addCommand(Md5.NAME, md5);
    Help help = new Help();
    commander.addCommand(Help.NAME, help);

    try {
        commander.parse(args);
    } catch (ParameterException ex) {
        printAppInfo(appTitle.toString(), false);
        System.out.println(ex.getMessage() + "\n");
        printHelp(commander);
        System.exit(-1);
    }
    if (cl.verbose) {
        if (!APP_SNAPSHOT) {
            // timestamp is always appended for SNAPSHOT
            // don't append it twice
            appTitle.append(" (").append(APP_TIMESTAMP_DATE).append(")");
        }
        appTitle.append(" - ").append(APP_REVISION);
    }

    String appTitleString = appTitle.toString();
    if (cl.showHelp) {
        printAppInfo(appTitleString, false);
        printHelp(commander);
        System.exit(0);
    }

    String command = commander.getParsedCommand();
    if (!Tail.NAME.equals(command) && !Cat.NAME.equals(command) && !Filter.NAME.equals(command)) // don't print info in case of cat, tail or filter
    {
        printAppInfo(appTitleString, true);
    }

    if (cl.logbackConfig != null) {
        File logbackFile = new File(cl.logbackConfig);
        if (!logbackFile.isFile()) {
            System.out.println(logbackFile.getAbsolutePath() + " is not a valid file.");
            System.exit(-1);
        }
        try {
            initLogbackConfig(logbackFile.toURI().toURL());
        } catch (MalformedURLException e) {
            System.out.println("Failed to convert " + logbackFile.getAbsolutePath() + " to URL. " + e);
            System.exit(-1);
        }
    } else if (cl.verbose) {
        initVerboseLogging();
    }

    if (cl.printBuildTimestamp) {
        System.out.println("Build-Date     : " + APP_TIMESTAMP_DATE);
        System.out.println("Build-Revision : " + APP_REVISION);
        System.out.println("Build-Timestamp: " + APP_TIMESTAMP);
        System.exit(0);
    }

    if (Help.NAME.equals(command)) {
        commander.usage();
        if (help.commands == null || help.commands.size() == 0) {
            commander.usage(Help.NAME);
        } else {
            Map<String, JCommander> commands = commander.getCommands();
            for (String current : help.commands) {
                if (commands.containsKey(current)) {
                    commander.usage(current);
                } else {
                    System.out.println("Unknown command '" + current + "'!");
                }
            }
        }
        System.exit(0);
    }

    if (Md5.NAME.equals(command)) {
        List<String> files = md5.files;
        if (files == null || files.isEmpty()) {
            printHelp(commander);
            System.out.println("Missing files!");
            System.exit(-1);
        }
        boolean error = false;
        for (String current : files) {
            if (!CreateMd5Command.createMd5(new File(current))) {
                error = true;
            }
        }
        if (error) {
            System.exit(-1);
        }
        System.exit(0);
    }

    if (Index.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        List<String> files = index.files;
        if (files == null || files.size() == 0) {
            printHelp(commander);
            System.exit(-1);
        }
        boolean error = false;
        for (String current : files) {
            if (!IndexCommand.indexLogFile(new File(current))) {
                error = true;
            }
        }
        if (error) {
            System.exit(-1);
        }
        System.exit(0);
    }

    if (Cat.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        List<String> files = cat.files;
        if (files == null || files.size() != 1) {
            printHelp(commander);
            System.exit(-1);
        }
        if (CatCommand.catFile(new File(files.get(0)), cat.pattern, cat.numberOfLines)) {
            System.exit(0);
        }
        System.exit(-1);
    }

    if (Tail.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        List<String> files = tail.files;
        if (files == null || files.size() != 1) {
            printHelp(commander);
            System.exit(-1);
        }
        if (TailCommand.tailFile(new File(files.get(0)), tail.pattern, tail.numberOfLines, tail.keepRunning)) {
            System.exit(0);
        }
        System.exit(-1);
    }

    if (Filter.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        if (FilterCommand.filterFile(new File(filter.input), new File(filter.output),
                new File(filter.condition), filter.searchString, filter.pattern, filter.overwrite,
                filter.keepRunning, filter.exclusive)) {
            System.exit(0);
        }
        System.exit(-1);
    }

    if (cl.flushPreferences) {
        flushPreferences();
    }

    if (cl.exportPreferencesFile != null) {
        exportPreferences(cl.exportPreferencesFile);
    }

    if (cl.importPreferencesFile != null) {
        importPreferences(cl.importPreferencesFile);
    }

    if (cl.exportPreferencesFile != null || cl.importPreferencesFile != null) {
        System.exit(0);
    }

    if (cl.flushLicensed) {
        flushLicensed();
    }

    startLilith(appTitleString);
}

From source file:eu.amidst.core.inference.MAPInferenceExperiments.java

/**
 * The class constructor./*from w  ww  . j a va 2s .c o  m*/
 * @param args Array of options: "filename variable a b N useVMP" if variable is continuous or "filename variable w N useVMP" for discrete
 */
public static void main(String[] args) throws Exception {

    String filename = ""; //Filename with the Bayesian Network
    //filename = "networks/randomlyGeneratedBN.bn";
    //BayesianNetworkGenerator.generateBNtoFile(nDiscrete, nStates, nContin, nLinks, seedNetwork, filename);
    //BayesianNetwork bn = BayesianNetworkLoader.loadFromFile(filename);

    //        int seedNetwork = 61236719 + 123;
    //
    //        int nDiscrete = 20;
    //        int nStates = 2;
    //        int nContin = 0;
    //        int nLinks = (int)Math.round(1.3*(nDiscrete+nContin));
    //
    //
    //        BayesianNetworkGenerator.setSeed(seedNetwork);
    //        BayesianNetworkGenerator.setNumberOfGaussianVars(nContin);
    //        BayesianNetworkGenerator.setNumberOfMultinomialVars(nDiscrete, nStates);
    //        BayesianNetworkGenerator.setNumberOfLinks(nLinks);
    //
    //        BayesianNetwork bn = BayesianNetworkGenerator.generateBayesianNetwork();
    //
    //
    //        int seed = seedNetwork + 2315;
    //
    //
    //        if (Main.VERBOSE) System.out.println(bn.getDAG());
    //
    //        if (Main.VERBOSE) System.out.println(bn.toString());
    //
    //
    //
    //        MAPInference mapInference = new MAPInference();
    //        mapInference.setModel(bn);
    //        mapInference.setParallelMode(true);
    //        mapInference.setSampleSize(1);
    //
    //        List<Variable> causalOrder = Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG());
    //
    //        if (Main.VERBOSE) System.out.println("CausalOrder: " + Arrays.toString(Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG()).stream().map(Variable::getName).toArray()));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //        int parallelSamples=20;
    //        int samplingMethodSize=50000;
    //        mapInference.setSampleSize(parallelSamples);
    //
    //
    //
    //        long timeStart;
    //        long timeStop;
    //        double execTime;
    //        Assignment mapEstimate;
    //
    //
    //        /***********************************************
    //         *        INCLUDING EVIDENCE
    //         ************************************************/
    //
    //        double observedVariablesRate = 0.05;
    //        Assignment evidence = randomEvidence(seed, observedVariablesRate, bn);
    //
    //        mapInference.setEvidence(evidence);
    //        //if (Main.VERBOSE) System.out.println(evidence.outputString());
    //
    //
    //
    //        /***********************************************
    //         *        VARIABLES OF INTEREST
    //         ************************************************/
    //
    //        Variable varInterest1 = causalOrder.get(6);
    //        Variable varInterest2 = causalOrder.get(7);
    //
    //
    //        List<Variable> varsInterest = new ArrayList<>();
    //        varsInterest.add(varInterest1);
    //        varsInterest.add(varInterest2);
    //        mapInference.setMAPVariables(varsInterest);
    //
    //        if (Main.VERBOSE) System.out.println("MAP Variables of Interest: " + Arrays.toString(varsInterest.stream().map(Variable::getName).toArray()));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //        /***********************************************
    //         *        SIMULATED ANNEALING
    //         ************************************************/
    //
    //
    //        // MAP INFERENCE WITH SIMULATED ANNEALING, MOVING ALL VARIABLES EACH TIME
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(1);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate  (SA.All): " + mapEstimate.outputString(varsInterest));
    //        if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //        // MAP INFERENCE WITH SIMULATED ANNEALING, SOME VARIABLES EACH TIME
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(0);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate  (SA.Some): " + mapEstimate.outputString(varsInterest));
    //        if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //        /***********************************************
    //         *        HILL CLIMBING
    //         ************************************************/
    //
    //        //  MAP INFERENCE WITH HILL CLIMBING, MOVING ALL VARIABLES EACH TIME
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(3);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate  (HC.All): " + mapEstimate.outputString(varsInterest));
    //        if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //        //  MAP INFERENCE WITH HILL CLIMBING, SOME VARIABLES EACH TIME
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(2);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate  (HC.Some): " + mapEstimate.outputString(varsInterest));
    //        if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //        /***********************************************
    //         *        SAMPLING
    //         ************************************************/
    //
    //        // MAP INFERENCE WITH SIMULATION AND PICKING MAX
    //        mapInference.setSampleSize(samplingMethodSize);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(-1);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (SAMPLING): " + mapEstimate.outputString(varsInterest));
    //        if (Main.VERBOSE) System.out.println("with probability: " + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //
    //
    //        // PROBABILITIES OF INDIVIDUAL CONFIGURATIONS
    //
    //
    //        double s1 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s1);
    //
    //        mapEstimate.setValue(varInterest2, 1);
    //        double s2 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s2);
    //
    //        mapEstimate.setValue(varInterest1, 1);
    //        mapEstimate.setValue(varInterest2, 0);
    //        double s3 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s3);
    //
    //        mapEstimate.setValue(varInterest2, 1);
    //        double s4 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s4);
    //
    //        double sumNonStateless = s1+s2+s3+s4;
    //
    //        if (Main.VERBOSE) System.out.println();
    //        if (Main.VERBOSE) System.out.println("Sum = " + sumNonStateless + "; Normalized probs: [V1=0,V2=0]=" + s1/sumNonStateless + ", [V1=0,V2=1]=" + s2/sumNonStateless + ", [V1=1,V2=0]=" + s3/sumNonStateless + ", [V1=1,V2=1]=" + s4/sumNonStateless );

    int seedNetwork = 1253473;

    int nDiscrete = 50;
    int nStates = 2;
    int nContin = 50;
    int nLinks = (int) Math.round(1.3 * (nDiscrete + nContin));

    BayesianNetworkGenerator.setSeed(seedNetwork);
    BayesianNetworkGenerator.setNumberOfGaussianVars(nContin);
    BayesianNetworkGenerator.setNumberOfMultinomialVars(nDiscrete, nStates);
    BayesianNetworkGenerator.setNumberOfLinks(nLinks);

    BayesianNetwork bn = BayesianNetworkGenerator.generateBayesianNetwork();

    int seed = seedNetwork + 23715;

    if (Main.VERBOSE)
        System.out.println(bn.getDAG());

    if (Main.VERBOSE)
        System.out.println(bn.toString());

    MAPInference mapInference = new MAPInference();
    mapInference.setModel(bn);
    mapInference.setParallelMode(true);
    mapInference.setSampleSize(1);

    List<Variable> causalOrder = Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG());

    if (Main.VERBOSE)
        System.out.println("CausalOrder: "
                + Arrays.toString(Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG()).stream()
                        .map(Variable::getName).toArray()));
    if (Main.VERBOSE)
        System.out.println();

    int parallelSamples = 20;
    int samplingMethodSize = 100000;
    mapInference.setSampleSize(parallelSamples);

    long timeStart;
    long timeStop;
    double execTime;
    Assignment mapEstimate;

    /***********************************************
     *        INCLUDING EVIDENCE
     ************************************************/

    double observedVariablesRate = 0.05;
    Assignment evidence = randomEvidence(seed, observedVariablesRate, bn);

    mapInference.setEvidence(evidence);
    //if (Main.VERBOSE) System.out.println(evidence.outputString());

    /***********************************************
     *        VARIABLES OF INTEREST
     ************************************************/

    Variable varInterest1 = causalOrder.get(6);
    Variable varInterest2 = causalOrder.get(7);
    Variable varInterest3 = causalOrder.get(60);

    List<Variable> varsInterest = new ArrayList<>();
    varsInterest.add(varInterest1);
    varsInterest.add(varInterest2);
    varsInterest.add(varInterest3);
    mapInference.setMAPVariables(varsInterest);

    if (Main.VERBOSE)
        System.out.println("MAP Variables of Interest: "
                + Arrays.toString(varsInterest.stream().map(Variable::getName).toArray()));
    if (Main.VERBOSE)
        System.out.println();

    /***********************************************
     *        SIMULATED ANNEALING
     ************************************************/

    // MAP INFERENCE WITH SIMULATED ANNEALING, MOVING ALL VARIABLES EACH TIME
    timeStart = System.nanoTime();
    mapInference.runInference(MAPInference.SearchAlgorithm.SA_GLOBAL);

    mapEstimate = mapInference.getEstimate();
    if (Main.VERBOSE)
        System.out.println("MAP estimate  (SA.All): " + mapEstimate.outputString(varsInterest));
    if (Main.VERBOSE)
        System.out.println(
                "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate()));
    timeStop = System.nanoTime();
    execTime = (double) (timeStop - timeStart) / 1000000000.0;
    if (Main.VERBOSE)
        System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    if (Main.VERBOSE)
        System.out.println();

    // MAP INFERENCE WITH SIMULATED ANNEALING, MOVING SOME VARIABLES EACH TIME
    timeStart = System.nanoTime();
    mapInference.runInference(MAPInference.SearchAlgorithm.SA_LOCAL);

    mapEstimate = mapInference.getEstimate();
    if (Main.VERBOSE)
        System.out.println("MAP estimate  (SA.Some): " + mapEstimate.outputString(varsInterest));
    if (Main.VERBOSE)
        System.out.println(
                "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate()));
    timeStop = System.nanoTime();
    execTime = (double) (timeStop - timeStart) / 1000000000.0;
    if (Main.VERBOSE)
        System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    if (Main.VERBOSE)
        System.out.println();

    /***********************************************
     *        HILL CLIMBING
     ************************************************/

    //  MAP INFERENCE WITH HILL CLIMBING, MOVING ALL VARIABLES EACH TIME
    timeStart = System.nanoTime();
    mapInference.runInference(MAPInference.SearchAlgorithm.HC_GLOBAL);

    mapEstimate = mapInference.getEstimate();
    if (Main.VERBOSE)
        System.out.println("MAP estimate  (HC.All): " + mapEstimate.outputString(varsInterest));
    if (Main.VERBOSE)
        System.out.println(
                "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate()));
    timeStop = System.nanoTime();
    execTime = (double) (timeStop - timeStart) / 1000000000.0;
    if (Main.VERBOSE)
        System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    if (Main.VERBOSE)
        System.out.println();

    //  MAP INFERENCE WITH HILL CLIMBING, MOVING SOME VARIABLES EACH TIME
    timeStart = System.nanoTime();
    mapInference.runInference(MAPInference.SearchAlgorithm.HC_LOCAL);

    mapEstimate = mapInference.getEstimate();
    if (Main.VERBOSE)
        System.out.println("MAP estimate  (HC.Some): " + mapEstimate.outputString(varsInterest));
    if (Main.VERBOSE)
        System.out.println(
                "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate()));
    timeStop = System.nanoTime();
    execTime = (double) (timeStop - timeStart) / 1000000000.0;
    if (Main.VERBOSE)
        System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    if (Main.VERBOSE)
        System.out.println();

    /***********************************************
     *        SAMPLING
     ************************************************/

    // MAP INFERENCE WITH SIMULATION AND PICKING MAX
    mapInference.setSampleSize(samplingMethodSize);
    timeStart = System.nanoTime();
    mapInference.runInference(MAPInference.SearchAlgorithm.SAMPLING);

    mapEstimate = mapInference.getEstimate();
    if (Main.VERBOSE)
        System.out.println("MAP estimate (SAMPLING): " + mapEstimate.outputString(varsInterest));
    if (Main.VERBOSE)
        System.out.println("with probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate()));
    timeStop = System.nanoTime();
    execTime = (double) (timeStop - timeStart) / 1000000000.0;
    if (Main.VERBOSE)
        System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    if (Main.VERBOSE)
        System.out.println();

    // PROBABILITIES OF INDIVIDUAL CONFIGURATIONS

    double s1 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    if (Main.VERBOSE)
        System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s1);

    mapEstimate.setValue(varInterest2, 1);
    double s2 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    if (Main.VERBOSE)
        System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s2);

    mapEstimate.setValue(varInterest1, 1);
    mapEstimate.setValue(varInterest2, 0);
    double s3 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    if (Main.VERBOSE)
        System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s3);

    mapEstimate.setValue(varInterest2, 1);
    double s4 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    if (Main.VERBOSE)
        System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s4);

    double sum = s1 + s2 + s3 + s4;

    if (Main.VERBOSE)
        System.out.println();
    if (Main.VERBOSE)
        System.out.println("Sum = " + sum + "; Normalized probs: [V1=0,V2=0]=" + s1 / sum + ", [V1=0,V2=1]="
                + s2 / sum + ", [V1=1,V2=0]=" + s3 / sum + ", [V1=1,V2=1]=" + s4 / sum);

    //        long timeStart = System.nanoTime();
    //        mapInference.runInference(1);
    //
    //        Assignment mapEstimate1 = mapInference.getMAPestimate();
    //        List<Variable> modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate: " + mapEstimate1.toString()); //toString(modelVariables);
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate1)));
    //        long timeStop = System.nanoTime();
    //        double execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getVariables().iterator().));
    //
    //
    //
    //
    //        // MAP INFERENCE WITH A BIG SAMPLE TO CHECK
    //        mapInference.setSampleSize(50000);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(-1);
    //
    //        Assignment mapEstimate2 = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (huge sample): " + mapEstimate2.toString());
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate2)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //
    //
    //
    //        // DETERMINISTIC SEQUENTIAL SEARCH ON DISCRETE VARIABLES
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(-2);
    //
    //        Assignment mapEstimate3 = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (sequential): " + mapEstimate3.toString());
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate3)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //
    //        if (Main.VERBOSE) System.out.println();
    //        if (Main.VERBOSE) System.out.println();
    //        if (Main.VERBOSE) System.out.println();
    //        if (Main.VERBOSE) System.out.println();

    //mapInference.changeCausalOrder(bn,evidence);

    /*// AD-HOC MAP
    mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar0"),-0.11819702417804305);
    mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar1"),-1.706);
    mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar2"),4.95);
    mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar3"),14.33);
    mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar4"),11.355);
            
    modelVariables = mapInference.getOriginalModel().getVariables().getListOfParamaterVariables();
    if (Main.VERBOSE) System.out.println("Other estimate: " + mapEstimate.toString(modelVariables));
    if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    */

    //
    //
    //
    //
    //
    //        BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("./networks/randomlyGeneratedBN.bn");
    //        //BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("./networks/asia.bn");
    //        if (Main.VERBOSE) System.out.println(bn.getDAG());
    //
    //        if (Main.VERBOSE) System.out.println(bn.toString());
    //
    //
    //        MAPInference mapInference = new MAPInference();
    //        mapInference.setModel(bn);
    //        mapInference.setParallelMode(true);
    //
    //
    //        if (Main.VERBOSE) System.out.println("CausalOrder: " + Arrays.toString(mapInference.causalOrder.stream().map(v -> v.getName()).toArray()));
    //        if (Main.VERBOSE) System.out.println();
    //
    //        // Including evidence:
    //        Variable variable1 = mapInference.causalOrder.get(1);  // causalOrder: A, S, L, T, E, X, B, D
    //        Variable variable2 = mapInference.causalOrder.get(2);
    //        //Variable variable3 = mapInference.causalOrder.get(11);
    //        Variable variable3 = mapInference.causalOrder.get(4);
    //
    //        int var1value=0;
    //        int var2value=1;
    //        //double var3value=1.27;
    //        int var3value=1;
    //
    //        if (Main.VERBOSE) System.out.println("Evidence: Variable " + variable1.getName() + " = " + var1value + ", Variable " + variable2.getName() + " = " + var2value + " and Variable " + variable3.getName() + " = " + var3value);
    //        if (Main.VERBOSE) System.out.println();
    //
    //        HashMapAssignment evidenceAssignment = new HashMapAssignment(3);
    //
    //        evidenceAssignment.setValue(variable1,var1value);
    //        evidenceAssignment.setValue(variable2,var2value);
    //        evidenceAssignment.setValue(variable3,var3value);
    //
    //        mapInference.setEvidence(evidenceAssignment);
    //
    //        List<Variable> modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables();
    //        //if (Main.VERBOSE) System.out.println(evidenceAssignment.outputString(modelVariables));
    //
    //
    //
    //        long timeStart;
    //        long timeStop;
    //        double execTime;
    //        Assignment mapEstimate;
    //
    //
    //        /*
    //        // MAP INFERENCE WITH A SMALL SAMPLE AND SIMULATED ANNEALING
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(1);
    //
    //
    //        Assignment mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (SA): " + mapEstimate.outputString(modelVariables));   //toString(modelVariables)
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //        // MAP INFERENCE WITH A BIG SAMPLE  AND SIMULATED ANNEALING
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(1);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (SA): " + mapEstimate.outputString(modelVariables));
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //        // MAP INFERENCE WITH A BIG SAMPLE AND SIMULATED ANNEALING ON ONE VARIABLE EACH TIME
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(0);
    //
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate  (SA.1V): " + mapEstimate.outputString(modelVariables));   //toString(modelVariables)
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //        // MAP INFERENCE WITH A BIG SAMPLE  AND HILL CLIMBING
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(3);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (HC): " + mapEstimate.outputString(modelVariables));
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //        // MAP INFERENCE WITH A BIG SAMPLE AND HILL CLIMBING ON ONE VARIABLE EACH TIME
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(2);
    //
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        if (Main.VERBOSE) System.out.println("MAP estimate  (HC.1V): " + mapEstimate.outputString(modelVariables));   //toString(modelVariables)
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //
    //        // MAP INFERENCE WITH SIMULATION AND PICKING MAX
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(-1);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (SAMPLING): " + mapEstimate.outputString(modelVariables));
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //        // MAP INFERENCE, DETERMINISTIC
    //        mapInference.setSampleSize(1);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(-2);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (DETERM.): " + mapEstimate.outputString(modelVariables));
    //        if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate)));
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //        */
    //
    //
    //
    //
    //        /**
    //         *
    //         *
    //         *   MAP OVER SPECIFIC VARIABLES
    //         *
    //         *
    //         */
    //
    //
    //        if (Main.VERBOSE) System.out.println();
    //        if (Main.VERBOSE) System.out.println();
    //
    //        Variable varInterest1 = mapInference.causalOrder.get(6);  // causalOrder: A, S, L, T, E, X, B, D
    //        Variable varInterest2 = mapInference.causalOrder.get(7);
    //
    //
    //        Set<Variable> varsInterest = new HashSet<>();
    //        varsInterest.add(varInterest1);
    //        varsInterest.add(varInterest2);
    //        mapInference.setMAPVariables(varsInterest);
    //
    //        if (Main.VERBOSE) System.out.println("MAP Variables of Interest: " + Arrays.toString(mapInference.MAPvariables.stream().map(Variable::getName).toArray()));
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //        // MAP INFERENCE
    //        mapInference.setSampleSize(1);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(1);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate: " + mapEstimate.outputString(new ArrayList(mapInference.MAPvariables)));
    //        if (Main.VERBOSE) System.out.println("with probability: " +  + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //
    //
    //        // MAP INFERENCE
    //        mapInference.setSampleSize(100);
    //        timeStart = System.nanoTime();
    //        mapInference.runInference(-3);
    //
    //        mapEstimate = mapInference.getMAPestimate();
    //        modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables();
    //        if (Main.VERBOSE) System.out.println("MAP estimate (-3): " + mapEstimate.outputString(new ArrayList(mapInference.MAPvariables)));
    //        if (Main.VERBOSE) System.out.println("with probability: " + mapInference.getMAPestimateProbability());
    //        timeStop = System.nanoTime();
    //        execTime = (double) (timeStop - timeStart) / 1000000000.0;
    //        if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds");
    //        if (Main.VERBOSE) System.out.println();
    //
    //
    //        // MAP Assignments
    //
    //
    //        double s1 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s1);
    //
    //        mapEstimate.setValue((Variable)mapEstimate.getVariables().toArray()[0],1);
    //        double s2 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s2);
    //
    //        mapEstimate.setValue((Variable)mapEstimate.getVariables().toArray()[1],1);
    //        double s3 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s3);
    //
    //        mapEstimate.setValue((Variable)mapEstimate.getVariables().toArray()[0],0);
    //        double s4 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate);
    //        if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s4);
    //
    //        double sumNonStateless = s1+s2+s3+s4;
    //
    //        if (Main.VERBOSE) System.out.println("Probs: " + s1/sumNonStateless + ", " + s2/sumNonStateless + ", " + s3/sumNonStateless + ", " + s4/sumNonStateless + ", suma = " + sumNonStateless );

}

From source file:edu.msu.cme.rdp.readseq.utils.QualityTrimmer.java

public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption("f", "fastq-out", false, "Write fastq instead of fasta file");
    options.addOption("l", "less-than", false, "Trim at <= instead of strictly =");
    options.addOption("i", "illumina", false, "Illumina trimming mode");

    FastqWriter fastqOut = null;/*from  w  w w.ja  v  a 2s .  c o  m*/
    FastaWriter fastaOut = null;

    byte qualTrim = -1;

    boolean writeFasta = true;
    boolean trimle = false;
    boolean illumina = false;

    List<SeqReader> readers = new ArrayList();
    List<File> seqFiles = new ArrayList();

    try {
        CommandLine line = new PosixParser().parse(options, args);

        if (line.hasOption("fastq-out")) {
            writeFasta = false;
        }

        if (line.hasOption("less-than")) {
            trimle = true;
        }
        if (line.hasOption("illumina")) {
            illumina = true;
        }

        args = line.getArgs();

        if (args.length < 2) {
            throw new Exception("Unexpected number of arguments");
        }

        if (args[0].length() != 1) {
            throw new Exception("Expected single character quality score");
        }

        qualTrim = FastqCore.Phred33QualFunction.translate(args[0].charAt(0));

        for (int index = 1; index < args.length; index++) {
            File seqFile = new File(args[index]);
            SeqReader reader;
            if (SeqUtils.guessFileFormat(seqFile) == SequenceFormat.FASTA) {
                if (index + 1 == args.length) {
                    throw new Exception("Fasta files must be immediately followed by their quality file");
                }

                File qualFile = new File(args[index + 1]);
                if (SeqUtils.guessFileFormat(qualFile) != SequenceFormat.FASTA) {
                    throw new Exception(seqFile + " was not followed by a fasta quality file");
                }

                reader = new QSeqReader(seqFile, qualFile);
                index++;
            } else {
                if (seqFile.getName().endsWith(".gz")) {
                    reader = new SequenceReader(new GZIPInputStream(new FileInputStream(seqFile)));
                } else {
                    reader = new SequenceReader(seqFile);
                }
            }

            readers.add(reader);
            seqFiles.add(seqFile);
        }
    } catch (Exception e) {
        new HelpFormatter().printHelp("USAGE: QualityTrimmer [options] <ascii_score> <seq_file> [qual_file]",
                options, true);
        System.err.println("Error: " + e.getMessage());
        System.exit(1);
    }

    for (int readerIndex = 0; readerIndex < readers.size(); readerIndex++) {
        File seqFile = seqFiles.get(readerIndex);
        String outStem = "trimmed_" + seqFile.getName().substring(0, seqFile.getName().lastIndexOf("."));
        if (writeFasta) {
            fastaOut = new FastaWriter(outStem + ".fasta");
        } else {
            fastqOut = new FastqWriter(outStem + ".fastq", FastqCore.Phred33QualFunction);
        }

        int[] lengthHisto = new int[200];

        SeqReader reader = readers.get(readerIndex);

        QSequence qseq;

        long totalLength = 0;
        int totalSeqs = 0;
        long trimmedLength = 0;
        int trimmedSeqs = 0;

        int zeroLengthAfterTrimming = 0;

        long startTime = System.currentTimeMillis();

        while ((qseq = (QSequence) reader.readNextSequence()) != null) {
            char[] bases = qseq.getSeqString().toCharArray();
            byte[] qual = qseq.getQuality();

            if (bases.length != qual.length) {
                System.err.println(qseq.getSeqName() + ": Quality length doesn't match seq length for seq");
                continue;
            }

            totalSeqs++;
            totalLength += bases.length;

            int trimIndex = -1;
            if (illumina && qual[bases.length - 1] == qualTrim) {
                trimIndex = bases.length - 1;
                while (trimIndex >= 0 && qual[trimIndex] == qualTrim) {
                    trimIndex--;
                }

                trimIndex++; //Technically we're positioned over the first good base, move back to the last bad base
            } else if (!illumina) {
                for (int index = 0; index < bases.length; index++) {
                    if (qual[index] == qualTrim || (trimle && qual[index] < qualTrim)) {
                        trimIndex = index;
                        break;
                    }
                }
            }

            String outSeq;
            byte[] outQual;
            if (trimIndex == -1) {
                outSeq = qseq.getSeqString();
                outQual = qseq.getQuality();
            } else {
                outSeq = new String(bases, 0, trimIndex);
                outQual = Arrays.copyOfRange(qual, 0, trimIndex);
                trimmedSeqs++;
            }
            int len = outSeq.length();
            trimmedLength += len;

            if (len >= lengthHisto.length) {
                lengthHisto = Arrays.copyOf(lengthHisto, len + 1);
            }
            lengthHisto[len]++;

            if (outSeq.length() == 0) {
                //System.err.println(qseq.getSeqName() + ": length 0 after trimming");
                zeroLengthAfterTrimming++;
                continue;
            }

            if (writeFasta) {
                fastaOut.writeSeq(qseq.getSeqName(), qseq.getDesc(), outSeq);
            } else {
                fastqOut.writeSeq(qseq.getSeqName(), qseq.getDesc(), outSeq, outQual);
            }
        }

        reader.close();

        if (writeFasta) {
            fastaOut.close();
        } else {
            fastqOut.close();
        }

        System.out.println(
                "Processed " + seqFile + " in " + (System.currentTimeMillis() - startTime) / 1000.0 + "s");
        System.out.println("Before trimming:");
        System.out.println("Total Sequences:           " + totalSeqs);
        System.out.println("Total Sequence Data:       " + totalLength);
        System.out.println("Average sequence length:   " + ((float) totalLength / totalSeqs));
        System.out.println();
        System.out.println("After trimming:");
        System.out.println("Total Sequences:           " + (totalSeqs - zeroLengthAfterTrimming));
        System.out.println("Sequences Trimmed:         " + trimmedSeqs);
        System.out.println("Total Sequence Data:       " + trimmedLength);
        System.out.println("Average sequence length:   "
                + ((float) trimmedLength / (totalSeqs - zeroLengthAfterTrimming)));
        System.out.println();

        System.out.println("Length\tCount");
        for (int index = 0; index < lengthHisto.length; index++) {
            if (lengthHisto[index] == 0) {
                continue;
            }

            System.out.println(index + "\t" + lengthHisto[index]);
        }

        System.out.println();
        System.out.println();
        System.out.println();
    }
}

From source file:kellinwood.zipsigner.cmdline.Main.java

public static void main(String[] args) {
    try {//from  w  w w. j  a  v a 2 s .  co  m

        Options options = new Options();
        CommandLine cmdLine = null;
        Option helpOption = new Option("h", "help", false, "Display usage information");

        Option modeOption = new Option("m", "keymode", false,
                "Keymode one of: auto, auto-testkey, auto-none, media, platform, shared, testkey, none");
        modeOption.setArgs(1);

        Option keyOption = new Option("k", "key", false, "PCKS#8 encoded private key file");
        keyOption.setArgs(1);

        Option pwOption = new Option("p", "keypass", false, "Private key password");
        pwOption.setArgs(1);

        Option certOption = new Option("c", "cert", false, "X.509 public key certificate file");
        certOption.setArgs(1);

        Option sbtOption = new Option("t", "template", false, "Signature block template file");
        sbtOption.setArgs(1);

        Option keystoreOption = new Option("s", "keystore", false, "Keystore file");
        keystoreOption.setArgs(1);

        Option aliasOption = new Option("a", "alias", false, "Alias for key/cert in the keystore");
        aliasOption.setArgs(1);

        options.addOption(helpOption);
        options.addOption(modeOption);
        options.addOption(keyOption);
        options.addOption(certOption);
        options.addOption(sbtOption);
        options.addOption(pwOption);
        options.addOption(keystoreOption);
        options.addOption(aliasOption);

        Parser parser = new BasicParser();

        try {
            cmdLine = parser.parse(options, args);
        } catch (MissingOptionException x) {
            System.out.println("One or more required options are missing: " + x.getMessage());
            usage(options);
        } catch (ParseException x) {
            System.out.println(x.getClass().getName() + ": " + x.getMessage());
            usage(options);
        }

        if (cmdLine.hasOption(helpOption.getOpt()))
            usage(options);

        Properties log4jProperties = new Properties();
        log4jProperties.load(new FileReader("log4j.properties"));
        PropertyConfigurator.configure(log4jProperties);
        LoggerManager.setLoggerFactory(new Log4jLoggerFactory());

        List<String> argList = cmdLine.getArgList();
        if (argList.size() != 2)
            usage(options);

        ZipSigner signer = new ZipSigner();

        signer.addAutoKeyObserver(new Observer() {
            @Override
            public void update(Observable observable, Object o) {
                System.out.println("Signing with key: " + o);
            }
        });

        Class bcProviderClass = Class.forName("org.bouncycastle.jce.provider.BouncyCastleProvider");
        Provider bcProvider = (Provider) bcProviderClass.newInstance();

        KeyStoreFileManager.setProvider(bcProvider);

        signer.loadProvider("org.spongycastle.jce.provider.BouncyCastleProvider");

        PrivateKey privateKey = null;
        if (cmdLine.hasOption(keyOption.getOpt())) {
            if (!cmdLine.hasOption(certOption.getOpt())) {
                System.out.println("Certificate file is required when specifying a private key");
                usage(options);
            }

            String keypw = null;
            if (cmdLine.hasOption(pwOption.getOpt()))
                keypw = pwOption.getValue();
            else {
                keypw = new String(readPassword("Key password"));
                if (keypw.equals(""))
                    keypw = null;
            }
            URL privateKeyUrl = new File(keyOption.getValue()).toURI().toURL();

            privateKey = signer.readPrivateKey(privateKeyUrl, keypw);
        }

        X509Certificate cert = null;
        if (cmdLine.hasOption(certOption.getOpt())) {

            if (!cmdLine.hasOption(keyOption.getOpt())) {
                System.out.println("Private key file is required when specifying a certificate");
                usage(options);
            }

            URL certUrl = new File(certOption.getValue()).toURI().toURL();
            cert = signer.readPublicKey(certUrl);
        }

        byte[] sigBlockTemplate = null;
        if (cmdLine.hasOption(sbtOption.getOpt())) {
            URL sbtUrl = new File(sbtOption.getValue()).toURI().toURL();
            sigBlockTemplate = signer.readContentAsBytes(sbtUrl);
        }

        if (cmdLine.hasOption(keyOption.getOpt())) {
            signer.setKeys("custom", cert, privateKey, sigBlockTemplate);
            signer.signZip(argList.get(0), argList.get(1));
        } else if (cmdLine.hasOption(modeOption.getOpt())) {
            signer.setKeymode(modeOption.getValue());
            signer.signZip(argList.get(0), argList.get(1));
        } else if (cmdLine.hasOption((keystoreOption.getOpt()))) {
            String alias = null;

            if (!cmdLine.hasOption(aliasOption.getOpt())) {

                KeyStore keyStore = KeyStoreFileManager.loadKeyStore(keystoreOption.getValue(), (char[]) null);
                for (Enumeration<String> e = keyStore.aliases(); e.hasMoreElements();) {
                    alias = e.nextElement();
                    System.out.println("Signing with key: " + alias);
                    break;
                }
            } else
                alias = aliasOption.getValue();

            String keypw = null;
            if (cmdLine.hasOption(pwOption.getOpt()))
                keypw = pwOption.getValue();
            else {
                keypw = new String(readPassword("Key password"));
                if (keypw.equals(""))
                    keypw = null;
            }

            CustomKeySigner.signZip(signer, keystoreOption.getValue(), null, alias, keypw.toCharArray(),
                    "SHA1withRSA", argList.get(0), argList.get(1));
        } else {
            signer.setKeymode("auto-testkey");
            signer.signZip(argList.get(0), argList.get(1));
        }

    } catch (Throwable t) {
        t.printStackTrace();
    }
}

From source file:squash.tools.FakeBookingCreator.java

public static void main(String[] args) throws IOException {
    int numberOfDays = 21;
    int numberOfCourts = 5;
    int maxCourtSpan = 5;
    int numberOfSlots = 16;
    int maxSlotSpan = 3;
    int minSurnameLength = 2;
    int maxSurnameLength = 20;
    int minBookingsPerDay = 0;
    int maxBookingsPerDay = 8;
    LocalDate startDate = LocalDate.of(2016, 7, 5);

    DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
    List<Booking> bookings = new ArrayList<>();
    for (LocalDate date = startDate; date.isBefore(startDate.plusDays(numberOfDays)); date = date.plusDays(1)) {
        int numBookings = ThreadLocalRandom.current().nextInt(minBookingsPerDay, maxBookingsPerDay + 1);
        List<Booking> daysBookings = new ArrayList<>();
        for (int bookingIndex = 0; bookingIndex < numBookings; bookingIndex++) {
            String player1 = RandomStringUtils.randomAlphabetic(1) + "." + RandomStringUtils.randomAlphabetic(
                    ThreadLocalRandom.current().nextInt(minSurnameLength, maxSurnameLength + 1));
            String player2 = RandomStringUtils.randomAlphabetic(1) + "." + RandomStringUtils.randomAlphabetic(
                    ThreadLocalRandom.current().nextInt(minSurnameLength, maxSurnameLength + 1));

            Set<ImmutablePair<Integer, Integer>> bookedCourts = new HashSet<>();
            daysBookings.forEach((booking) -> {
                addBookingToSet(booking, bookedCourts);
            });/* w  w w.  j a va 2  s.c om*/

            Booking booking;
            Set<ImmutablePair<Integer, Integer>> courtsToBook = new HashSet<>();
            do {
                // Loop until we create a booking of free courts
                int court = ThreadLocalRandom.current().nextInt(1, numberOfCourts + 1);
                int courtSpan = ThreadLocalRandom.current().nextInt(1,
                        Math.min(maxCourtSpan + 1, numberOfCourts - court + 2));
                int slot = ThreadLocalRandom.current().nextInt(1, numberOfSlots + 1);
                int slotSpan = ThreadLocalRandom.current().nextInt(1,
                        Math.min(maxSlotSpan + 1, numberOfSlots - slot + 2));
                booking = new Booking(court, courtSpan, slot, slotSpan, player1 + "/" + player2);
                booking.setDate(date.format(formatter));
                courtsToBook.clear();
                addBookingToSet(booking, courtsToBook);
            } while (Boolean.valueOf(Sets.intersection(courtsToBook, bookedCourts).size() > 0));

            daysBookings.add(booking);
        }
        bookings.addAll(daysBookings);
    }

    // Encode bookings as JSON
    // Create the node factory that gives us nodes.
    JsonNodeFactory factory = new JsonNodeFactory(false);
    // Create a json factory to write the treenode as json.
    JsonFactory jsonFactory = new JsonFactory();
    ObjectNode rootNode = factory.objectNode();

    ArrayNode bookingsNode = rootNode.putArray("bookings");
    for (int i = 0; i < bookings.size(); i++) {
        Booking booking = bookings.get(i);
        ObjectNode bookingNode = factory.objectNode();
        bookingNode.put("court", booking.getCourt());
        bookingNode.put("courtSpan", booking.getCourtSpan());
        bookingNode.put("slot", booking.getSlot());
        bookingNode.put("slotSpan", booking.getSlotSpan());
        bookingNode.put("name", booking.getName());
        bookingNode.put("date", booking.getDate());
        bookingsNode.add(bookingNode);
    }
    // Add empty booking rules array - just so restore works
    rootNode.putArray("bookingRules");
    rootNode.put("clearBeforeRestore", true);

    try (JsonGenerator generator = jsonFactory.createGenerator(new File("FakeBookings.json"),
            JsonEncoding.UTF8)) {
        ObjectMapper mapper = new ObjectMapper();
        mapper.setSerializationInclusion(Include.NON_EMPTY);
        mapper.setSerializationInclusion(Include.NON_NULL);
        mapper.writeTree(generator, rootNode);
    }
}

From source file:com.cyberway.issue.io.arc.ARCReader.java

/**
 * Command-line interface to ARCReader./*from w w  w .  ja  va 2s  .  c  o m*/
 *
 * Here is the command-line interface:
 * <pre>
 * usage: java com.cyberway.issue.io.arc.ARCReader [--offset=#] ARCFILE
 *  -h,--help      Prints this message and exits.
 *  -o,--offset    Outputs record at this offset into arc file.</pre>
 *
 * <p>See in <code>$HERITRIX_HOME/bin/arcreader</code> for a script that'll
 * take care of classpaths and the calling of ARCReader.
 *
 * <p>Outputs using a pseudo-CDX format as described here:
 * <a href="http://www.archive.org/web/researcher/cdx_legend.php">CDX
 * Legent</a> and here
 * <a href="http://www.archive.org/web/researcher/example_cdx.php">Example</a>.
 * Legend used in below is: 'CDX b e a m s c V (or v if uncompressed) n g'.
 * Hash is hard-coded straight SHA-1 hash of content.
 *
 * @param args Command-line arguments.
 * @throws ParseException Failed parse of the command line.
 * @throws IOException
 * @throws java.text.ParseException
 */
public static void main(String[] args) throws ParseException, IOException, java.text.ParseException {
    Options options = getOptions();
    options.addOption(new Option("p", "parse", false, "Parse headers."));
    PosixParser parser = new PosixParser();
    CommandLine cmdline = parser.parse(options, args, false);
    List cmdlineArgs = cmdline.getArgList();
    Option[] cmdlineOptions = cmdline.getOptions();
    HelpFormatter formatter = new HelpFormatter();

    // If no args, print help.
    if (cmdlineArgs.size() <= 0) {
        usage(formatter, options, 0);
    }

    // Now look at options passed.
    long offset = -1;
    boolean digest = false;
    boolean strict = false;
    boolean parse = false;
    String format = CDX;
    for (int i = 0; i < cmdlineOptions.length; i++) {
        switch (cmdlineOptions[i].getId()) {
        case 'h':
            usage(formatter, options, 0);
            break;

        case 'o':
            offset = Long.parseLong(cmdlineOptions[i].getValue());
            break;

        case 's':
            strict = true;
            break;

        case 'p':
            parse = true;
            break;

        case 'd':
            digest = getTrueOrFalse(cmdlineOptions[i].getValue());
            break;

        case 'f':
            format = cmdlineOptions[i].getValue().toLowerCase();
            boolean match = false;
            // List of supported formats.
            final String[] supportedFormats = { CDX, DUMP, GZIP_DUMP, HEADER, NOHEAD, CDX_FILE };
            for (int ii = 0; ii < supportedFormats.length; ii++) {
                if (supportedFormats[ii].equals(format)) {
                    match = true;
                    break;
                }
            }
            if (!match) {
                usage(formatter, options, 1);
            }
            break;

        default:
            throw new RuntimeException("Unexpected option: " + +cmdlineOptions[i].getId());
        }
    }

    if (offset >= 0) {
        if (cmdlineArgs.size() != 1) {
            System.out.println("Error: Pass one arcfile only.");
            usage(formatter, options, 1);
        }
        ARCReader arc = ARCReaderFactory.get((String) cmdlineArgs.get(0), offset);
        arc.setStrict(strict);
        // We must parse headers if we need to skip them.
        if (format.equals(NOHEAD) || format.equals(HEADER)) {
            parse = true;
        }
        arc.setParseHttpHeaders(parse);
        outputRecord(arc, format);
    } else {
        for (Iterator i = cmdlineArgs.iterator(); i.hasNext();) {
            String urlOrPath = (String) i.next();
            try {
                ARCReader r = ARCReaderFactory.get(urlOrPath);
                r.setStrict(strict);
                r.setParseHttpHeaders(parse);
                r.setDigest(digest);
                output(r, format);
            } catch (RuntimeException e) {
                // Write out name of file we failed on to help with
                // debugging.  Then print stack trace and try to keep
                // going.  We do this for case where we're being fed
                // a bunch of ARCs; just note the bad one and move
                // on to the next.
                System.err.println("Exception processing " + urlOrPath + ": " + e.getMessage());
                e.printStackTrace(System.err);
                System.exit(1);
            }
        }
    }
}

From source file:com.thesmartweb.swebrank.Main.java

/**
 * @param args the command line arguments
 */// w  w  w. jav a2 s  . c  om
public static void main(String[] args) {
    Path input_path = Paths.get("//mnt//var//DBs//inputsL10//nba//");//input directory
    String output_parent_directory = "//mnt//var//DBs//outputsConfL10//nba//";//output directory
    String config_path = "//mnt//var//DBs//config//";//input directory
    //---Disable apache log manually----
    //System.setProperty("org.apache.commons.logging.Log","org.apache.commons.logging.impl.NoOpLog");
    System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.Log4JLogger");
    //--------------Domain that is searched----------
    String domain = "";
    //------------------search engine related options----------------------
    List<String> queries = null;
    int results_number = 0;//the number of results that are returned from each search engine
    List<Boolean> enginechoice = null;
    //list element #0. True/False Bing
    //list element #1. True/False Google
    //list element #2. True/False Yahoo!
    //list element #3. True/False Merged
    //-----------Moz options---------------------
    List<Boolean> mozMetrics = null;
    //The list is going to contain the moz related input in the following order
    //list element #1. True/False, True we use Moz API, false not
    //list element #2. True if we use Domain Authority
    //list element #3. True if we use External MozRank
    //list element #4. True if we use MozRank
    //list element #5. True if we use MozTrust
    //list element #6. True if we use Subdomain MozRank
    //list element #7. True if we use Page Authority
    //only one is used (the first to be set to true)
    boolean moz_threshold_option = false;//set to true we use the threshold
    Double moz_threshold = 0.0;//if we want to have a threshold in moz
    int top_count_moz = 0;//if we want to get the moz top-something results
    //---------------Semantic Analysis method----------------
    List<Boolean> ContentSemantics = null;
    int SensebotConcepts = 0;//define the amount of concepts that sensebot is going to recognize
    List<Double> SWebRankSettings = null;
    //------(string)directory is going to be used later-----
    String output_child_directory;
    //-------we get all the paths of the txt (input) files from the input directory-------
    DataManipulation getfiles = new DataManipulation();//class responsible for the extraction of paths
    Collection<File> inputs_files;//array to include the paths of the txt files
    inputs_files = getfiles.getinputfiles(input_path.toString(), "txt");//method to retrieve all the path of the input documents
    //------------read the txt files------------
    for (File input : inputs_files) {
        ReadInput ri = new ReadInput();//function to read the input
        boolean check_reading_input = ri.perform(input);
        if (check_reading_input) {
            domain = ri.domain;
            //----------
            queries = ri.queries;
            results_number = ri.results_number;
            enginechoice = ri.enginechoice;
            //------------
            mozMetrics = ri.mozMetrics;
            moz_threshold_option = ri.moz_threshold_option;
            moz_threshold = ri.moz_threshold.doubleValue();
            //---------------
            ContentSemantics = ri.ContentSemantics;
            SWebRankSettings = ri.SWebRankSettings;
        }
        int top_visible = 0;//option to set the amount of results you can get in the merged search engine
        //------if we choose to use a Moz metric or Visibility score for our ranking, we need to set the results_number for the search engines to its max which is 50 
        //-----we set the top results number for moz or Visibility rank----
        if (mozMetrics.get(0) || enginechoice.get(3)) {
            if (mozMetrics.get(0)) {
                top_count_moz = results_number;
            } //if moz is true, top_count_moz gets the value of result number
            if (enginechoice.get(3)) {
                top_visible = results_number;
            } //if merged engine is true, top_visible gets the value of result number
            results_number = 50;//this is the max amount of results that you can get from the search engine APIs
        }
        //-----if we want to use Moz we should check first if it works
        if (mozMetrics.get(0)) {
            Moz Moz = new Moz();
            //---if it works, moz remains true, otherwise it is set to false
            mozMetrics.add(0, Moz.check(config_path));
            //if it is false and we have chosen to use Visibility score with Moz, we reset back to the standard settings (ranking and not merged)
            //therefore, we reset the number of results from 50 to the top_count_moz which contained the original number of results
            if (!mozMetrics.get(0)) {
                if (!enginechoice.get(3)) {
                    results_number = top_count_moz;
                }
            }
        }
        //----------we set the wordLists that we are going to use---------------------
        List<String> finalList = new ArrayList<String>();//finalList is going to contain all the content in the end
        Total_analysis ta = new Total_analysis();//we call total analysis
        int iteration_counter = 0;//the iteration_counter is used in order to count the number of iterations of the algorithm and to be checked with perf_limit
        //this list of arraylists  is going to contain all the wordLists that are produced for every term of the String[] query,
        //in order to calculate the NGD scores between every term of the wordList and the term that was used as query in order to produce the spesific wordList
        List<ArrayList<String>> array_wordLists = new ArrayList<>();
        List<String> wordList_previous = new ArrayList<>();
        List<String> wordList_new = new ArrayList<>();
        double convergence = 0;//we create the convergence percentage and initialize it
        String conv_percentages = "";//string that contains all the convergence percentages
        DataManipulation wordsmanipulation = new DataManipulation();//method to manipulate various word data (String, list<String>, etc)
        do { //if we run the algorithm for the 1st time we already have the query so we skip the loop below that produces the new array of query
            if (iteration_counter != 0) {
                wordList_previous = wordList_new;
                //we add the previous wordList to the finalList
                finalList = wordsmanipulation.AddAList(wordList_previous, finalList);
                List<String> query_new_list_total = new ArrayList<>();
                int iteration_previous = iteration_counter - 1;
                Combinations_Engine cn = new Combinations_Engine();//call the class to combine the terms produced
                for (String query : queries) {
                    List<String> ids = new ArrayList<>();
                    if (enginechoice.get(0)) {
                        String id = domain + "/" + query + "/bing" + "/" + iteration_previous;
                        ids.add(id);
                    }
                    if (enginechoice.get(1)) {
                        String id = domain + "/" + query + "/google" + "/" + iteration_previous;
                        ids.add(id);
                    }
                    if (enginechoice.get(2)) {
                        String id = domain + "/" + query + "/yahoo" + "/" + iteration_previous;
                        ids.add(id);
                    }
                    ElasticGetWordList ESget = new ElasticGetWordList();//we call this class to get the wordlist from the Elastic Search
                    List<String> maxWords = ESget.getMaxWords(ids, SWebRankSettings.get(9).intValue(),
                            config_path);//we are going to get a max amount of words
                    int query_index = queries.indexOf(query);
                    int size_query_new = SWebRankSettings.get(10).intValue();//the amount of new queries we are willing to create
                    //we create the new queries for every query of the previous round by combining the words produced from this query
                    List<String> query_new_list = cn.perform(maxWords, SWebRankSettings.get(7), queries,
                            SWebRankSettings.get(6), query_index, size_query_new, config_path);
                    //we add the list of new queries to the total list that containas all the new queries
                    query_new_list_total.addAll(query_new_list);
                    System.out.println("query pointer=" + query_index + "");
                }
                //---------------------the following cleans a list from null and duplicates
                query_new_list_total = wordsmanipulation.clearListString(query_new_list_total);
                //--------------we create the new directory that our files are going to be saved 
                String txt_directory = FilenameUtils.getBaseName(input.getName());
                output_child_directory = output_parent_directory + txt_directory + "_level_" + iteration_counter
                        + "//";
                //----------------append the wordlist to a file------------------
                wordsmanipulation.AppendWordList(query_new_list_total,
                        output_child_directory + "queries_" + iteration_counter + ".txt");
                if (query_new_list_total.size() < 1) {
                    break;
                } //if we don't create new queries we end the while loop
                //total analysis' function is going to do all the work and return back what we need
                ta = new Total_analysis();
                ta.perform(wordList_previous, iteration_counter, output_child_directory, domain, enginechoice,
                        query_new_list_total, results_number, top_visible, mozMetrics, moz_threshold_option,
                        moz_threshold.doubleValue(), top_count_moz, ContentSemantics, SensebotConcepts,
                        SWebRankSettings, config_path);
                //we get the array of wordlists
                array_wordLists = ta.getarray_wordLists();
                //get the wordlist that includes all the new queries
                wordList_new = ta.getwordList_total();
                //---------------------the following cleans a list from null and duplicates-------------
                wordList_new = wordsmanipulation.clearListString(wordList_new);
                //----------------append the wordlist to a file--------------------
                wordsmanipulation.AppendWordList(wordList_new, output_child_directory + "wordList.txt");
                //the concergence percentage of this iteration
                convergence = ta.getConvergence();//we are going to use convergence score to check the convergence
                //a string that contains all the convergence percentage for each round separated by \n character
                conv_percentages = conv_percentages + "\n" + convergence;
                //a file that is going to include the convergence percentages
                wordsmanipulation.AppendString(conv_percentages,
                        output_child_directory + "convergence_percentage.txt");
                //we add the new wordList to the finalList
                finalList = wordsmanipulation.AddAList(wordList_new, finalList);
                //we set the query array to be equal to the query new total that we have created
                queries = query_new_list_total;
                //we increment the iteration_counter in order to count the iterations of the algorithm and to use the perf_limit
                iteration_counter++;
            } else {//the following source code is performed on the 1st run of the loop
                    //------------we extract the parent path of the file 
                String txt_directory = FilenameUtils.getBaseName(input.getName());
                //----------we create a string that is going to be used for the corresponding directory of outputs
                output_child_directory = output_parent_directory + txt_directory + "_level_" + iteration_counter
                        + "//";
                //we call total analysis function performOld
                ta.perform(wordList_new, iteration_counter, output_child_directory, domain, enginechoice,
                        queries, results_number, top_visible, mozMetrics, moz_threshold_option,
                        moz_threshold.doubleValue(), top_count_moz, ContentSemantics, SensebotConcepts,
                        SWebRankSettings, config_path);
                //we get the array of wordlists
                array_wordLists = ta.getarray_wordLists();
                //get the wordlist that includes all the new queries
                wordList_new = ta.getwordList_total();
                //---------------------the following cleans a list from null and duplicates
                wordList_new = wordsmanipulation.clearListString(wordList_new);
                //----------------append the wordlist to a file
                wordsmanipulation.AppendWordList(wordList_new, output_child_directory + "wordList.txt");
                //-----------------------------------------
                iteration_counter++;//increase the iteration_counter that counts the iterations of the algorithm
            }
        } while (convergence < SWebRankSettings.get(5).doubleValue()
                && iteration_counter < SWebRankSettings.get(8).intValue());//while the convergence percentage is below the limit and the iteration_counter below the performance limit
        if (iteration_counter == 1) {
            finalList = wordsmanipulation.AddAList(wordList_new, finalList);
        }
        //--------------------content List----------------
        if (!finalList.isEmpty()) {
            //---------------------the following cleans the final list from null and duplicates
            finalList = wordsmanipulation.clearListString(finalList);
            //write the keywords to a file
            boolean flag_file = false;//boolean flag to declare successful write to file
            flag_file = wordsmanipulation.AppendWordList(finalList,
                    output_parent_directory + "total_content.txt");
            if (!flag_file) {
                System.out.print("can not create the content file for: " + output_parent_directory
                        + "total_content.txt");
            }
        }
        //we are going to save the total content with its convergence on the ElasticSearch cluster in a separated index
        //Node node = nodeBuilder().client(true).clusterName("lshrankldacluster").node();
        //Client client = node.client();
        //get the elastic search indexes in a list
        List<String> elasticIndexes = ri.GetKeyFile(config_path, "elasticSearchIndexes");
        Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "lshrankldacluster")
                .build();
        Client client = new TransportClient(settings)
                .addTransportAddress(new InetSocketTransportAddress("localhost", 9300));
        JSONObject objEngineLevel = new JSONObject();
        objEngineLevel.put("TotalContent", finalList);//we save the total content
        objEngineLevel.put("Convergences", conv_percentages);//we save the convergence percentages
        IndexRequest indexReq = new IndexRequest(elasticIndexes.get(0), "content", domain);//we save also the domain 
        indexReq.source(objEngineLevel);
        IndexResponse indexRes = client.index(indexReq).actionGet();
        //node.close();
        client.close();
        //----------------------convergence percentages writing to file---------------
        //use the conv_percentages string
        if (conv_percentages.length() != 0) {
            boolean flag_file = false;//boolean flag to declare successful write to file
            flag_file = wordsmanipulation.AppendString(conv_percentages,
                    output_parent_directory + "convergence_percentages.txt");
            if (!flag_file) {
                System.out.print("can not create the convergence file for: " + output_parent_directory
                        + "convergence_percentages.txt");
            }
        }
    }
}

From source file:com.act.lcms.db.analysis.BestMoleculesPickerFromLCMSIonAnalysis.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/* www. j  a va 2s.co m*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        HELP_FORMATTER.printHelp(BestMoleculesPickerFromLCMSIonAnalysis.class.getCanonicalName(), HELP_MESSAGE,
                opts, null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(BestMoleculesPickerFromLCMSIonAnalysis.class.getCanonicalName(), HELP_MESSAGE,
                opts, null, true);
        System.exit(1);
    }

    List<String> positiveReplicateResults = new ArrayList<>(
            Arrays.asList(cl.getOptionValues(OPTION_INPUT_FILES)));

    if (cl.hasOption(OPTION_MIN_OF_REPLICATES)) {
        HitOrMissReplicateFilterAndTransformer transformer = new HitOrMissReplicateFilterAndTransformer();

        List<IonAnalysisInterchangeModel> models = positiveReplicateResults.stream().map(file -> {
            try {
                return IonAnalysisInterchangeModel.loadIonAnalysisInterchangeModelFromFile(file);
            } catch (IOException e) {
                throw new UncheckedIOException(e);
            }
        }).collect(Collectors.toList());

        IonAnalysisInterchangeModel transformedModel = IonAnalysisInterchangeModel
                .filterAndOperateOnMoleculesFromMultipleReplicateModels(models, transformer);

        printInchisAndIonsToFile(transformedModel, cl.getOptionValue(OPTION_OUTPUT_FILE),
                cl.hasOption(OPTION_JSON_FORMAT));
        return;
    }

    Double minSnrThreshold = Double.parseDouble(cl.getOptionValue(OPTION_MIN_SNR_THRESHOLD));
    Double minIntensityThreshold = Double.parseDouble(cl.getOptionValue(OPTION_MIN_INTENSITY_THRESHOLD));
    Double minTimeThreshold = Double.parseDouble(cl.getOptionValue(OPTION_MIN_TIME_THRESHOLD));

    if (cl.hasOption(OPTION_GET_IONS_SUPERSET)) {
        List<IonAnalysisInterchangeModel> models = positiveReplicateResults.stream().map(file -> {
            try {
                return IonAnalysisInterchangeModel.loadIonAnalysisInterchangeModelFromFile(file);
            } catch (IOException e) {
                throw new UncheckedIOException(e);
            }
        }).collect(Collectors.toList());

        IonAnalysisInterchangeModel transformedModel = IonAnalysisInterchangeModel
                .getSupersetOfIonicVariants(models, minSnrThreshold, minIntensityThreshold, minTimeThreshold);

        printInchisAndIonsToFile(transformedModel, cl.getOptionValue(OPTION_OUTPUT_FILE),
                cl.hasOption(OPTION_JSON_FORMAT));
        return;
    }

    if (cl.hasOption(OPTION_THRESHOLD_ANALYSIS)) {

        if (positiveReplicateResults.size() > 1) {
            LOGGER.error("Since this is a threshold analysis, the number of positive replicates should be 1.");
            System.exit(1);
        }

        // We need to set this variable as a final since it is used in a lambda function below.
        final Set<String> ions = new HashSet<>();
        if (cl.hasOption(OPTION_FILTER_BY_IONS)) {
            ions.addAll(Arrays.asList(cl.getOptionValues(OPTION_FILTER_BY_IONS)));
        }

        HitOrMissSingleSampleFilterAndTransformer hitOrMissSingleSampleTransformer = new HitOrMissSingleSampleFilterAndTransformer(
                minIntensityThreshold, minSnrThreshold, minTimeThreshold, ions);

        IonAnalysisInterchangeModel model = IonAnalysisInterchangeModel.filterAndOperateOnMoleculesFromModel(
                IonAnalysisInterchangeModel.loadIonAnalysisInterchangeModelFromFile(
                        positiveReplicateResults.get(0)),
                hitOrMissSingleSampleTransformer);

        printInchisAndIonsToFile(model, cl.getOptionValue(OPTION_OUTPUT_FILE),
                cl.hasOption(OPTION_JSON_FORMAT));
    }
}