List of usage examples for java.util Map put
V put(K key, V value);
From source file:com.searchcode.app.App.java
public static void main(String[] args) { injector = Guice.createInjector(new InjectorConfig()); int server_port = Helpers.tryParseInt( Properties.getProperties().getProperty(Values.SERVERPORT, Values.DEFAULTSERVERPORT), Values.DEFAULTSERVERPORT);//from w w w. j a v a 2 s. c o m boolean onlyLocalhost = Boolean .parseBoolean(Properties.getProperties().getProperty("only_localhost", "false")); // Database migrations happen before we start databaseMigrations(); LOGGER.info("Starting searchcode server on port " + server_port); Spark.port(server_port); JobService js = injector.getInstance(JobService.class); Repo repo = injector.getInstance(Repo.class); Data data = injector.getInstance(Data.class); Api api = injector.getInstance(Api.class); ApiService apiService = injector.getInstance(ApiService.class); StatsService statsService = new StatsService(); scl = Singleton.getSearchcodeLib(data); js.initialJobs(); Gson gson = new Gson(); Spark.staticFileLocation("/public"); before((request, response) -> { if (onlyLocalhost) { if (!request.ip().equals("127.0.0.1")) { halt(204); } } }); get("/", (req, res) -> { Map<String, Object> map = new HashMap<>(); map.put("repoCount", repo.getRepoCount()); if (req.queryParams().contains("q") && !req.queryParams("q").trim().equals("")) { String query = req.queryParams("q").trim(); int page = 0; if (req.queryParams().contains("p")) { try { page = Integer.parseInt(req.queryParams("p")); page = page > 19 ? 19 : page; } catch (NumberFormatException ex) { page = 0; } } List<String> reposList = new ArrayList<>(); List<String> langsList = new ArrayList<>(); List<String> ownsList = new ArrayList<>(); if (req.queryParams().contains("repo")) { String[] repos = new String[0]; repos = req.queryParamsValues("repo"); if (repos.length != 0) { reposList = Arrays.asList(repos); } } if (req.queryParams().contains("lan")) { String[] langs = new String[0]; langs = req.queryParamsValues("lan"); if (langs.length != 0) { langsList = Arrays.asList(langs); } } if (req.queryParams().contains("own")) { String[] owns = new String[0]; owns = req.queryParamsValues("own"); if (owns.length != 0) { ownsList = Arrays.asList(owns); } } map.put("searchValue", query); map.put("searchResultJson", gson.toJson(new CodePreload(query, page, langsList, reposList, ownsList))); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "search_test.ftl"); } // Totally pointless vanity but lets rotate the image every week int photoId = getWeekOfMonth(); if (photoId <= 0) { photoId = 3; } if (photoId > 4) { photoId = 2; } CodeSearcher cs = new CodeSearcher(); map.put("photoId", photoId); map.put("numDocs", cs.getTotalNumberDocumentsIndexed()); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "index.ftl"); }, new FreeMarkerEngine()); get("/html/", (req, res) -> { CodeSearcher cs = new CodeSearcher(); CodeMatcher cm = new CodeMatcher(data); Map<String, Object> map = new HashMap<>(); map.put("repoCount", repo.getRepoCount()); if (req.queryParams().contains("q")) { String query = req.queryParams("q").trim(); String altquery = query.replaceAll("[^A-Za-z0-9 ]", " ").trim().replaceAll(" +", " "); int page = 0; if (req.queryParams().contains("p")) { try { page = Integer.parseInt(req.queryParams("p")); page = page > 19 ? 19 : page; } catch (NumberFormatException ex) { page = 0; } } String[] repos = new String[0]; String[] langs = new String[0]; String reposFilter = ""; String langsFilter = ""; String reposQueryString = ""; String langsQueryString = ""; if (req.queryParams().contains("repo")) { repos = req.queryParamsValues("repo"); if (repos.length != 0) { List<String> reposList = Arrays.asList(repos).stream() .map((s) -> "reponame:" + QueryParser.escape(s)).collect(Collectors.toList()); reposFilter = " && (" + StringUtils.join(reposList, " || ") + ")"; List<String> reposQueryList = Arrays.asList(repos).stream() .map((s) -> "&repo=" + URLEncoder.encode(s)).collect(Collectors.toList()); reposQueryString = StringUtils.join(reposQueryList, ""); } } if (req.queryParams().contains("lan")) { langs = req.queryParamsValues("lan"); if (langs.length != 0) { List<String> langsList = Arrays.asList(langs).stream() .map((s) -> "languagename:" + QueryParser.escape(s)).collect(Collectors.toList()); langsFilter = " && (" + StringUtils.join(langsList, " || ") + ")"; List<String> langsQueryList = Arrays.asList(langs).stream() .map((s) -> "&lan=" + URLEncoder.encode(s)).collect(Collectors.toList()); langsQueryString = StringUtils.join(langsQueryList, ""); } } // split the query escape it and and it together String cleanQueryString = scl.formatQueryString(query); SearchResult searchResult = cs.search(cleanQueryString + reposFilter + langsFilter, page); searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), query, true)); for (CodeFacetRepo f : searchResult.getRepoFacetResults()) { if (Arrays.asList(repos).contains(f.getRepoName())) { f.setSelected(true); } } for (CodeFacetLanguage f : searchResult.getLanguageFacetResults()) { if (Arrays.asList(langs).contains(f.getLanguageName())) { f.setSelected(true); } } map.put("searchValue", query); map.put("searchResult", searchResult); map.put("reposQueryString", reposQueryString); map.put("langsQueryString", langsQueryString); map.put("altQuery", altquery); map.put("isHtml", true); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "searchresults.ftl"); } // Totally pointless vanity but lets rotate the image every week int photoId = getWeekOfMonth(); if (photoId <= 0) { photoId = 3; } if (photoId > 4) { photoId = 2; } map.put("photoId", photoId); map.put("numDocs", cs.getTotalNumberDocumentsIndexed()); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "index.ftl"); }, new FreeMarkerEngine()); /** * Allows one to write literal lucene search queries against the index * TODO This is still very much WIP */ get("/literal/", (req, res) -> { CodeSearcher cs = new CodeSearcher(); CodeMatcher cm = new CodeMatcher(data); Map<String, Object> map = new HashMap<>(); map.put("repoCount", repo.getRepoCount()); if (req.queryParams().contains("q")) { String query = req.queryParams("q").trim(); int page = 0; if (req.queryParams().contains("p")) { try { page = Integer.parseInt(req.queryParams("p")); page = page > 19 ? 19 : page; } catch (NumberFormatException ex) { page = 0; } } String altquery = query.replaceAll("[^A-Za-z0-9 ]", " ").trim().replaceAll(" +", " "); SearchResult searchResult = cs.search(query, page); searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), altquery, false)); map.put("searchValue", query); map.put("searchResult", searchResult); map.put("reposQueryString", ""); map.put("langsQueryString", ""); map.put("altQuery", ""); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "searchresults.ftl"); } map.put("numDocs", cs.getTotalNumberDocumentsIndexed()); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "index.ftl"); }, new FreeMarkerEngine()); /** * This is the endpoint used by the frontend. */ get("/api/codesearch/", (req, res) -> { CodeSearcher cs = new CodeSearcher(); CodeMatcher cm = new CodeMatcher(data); if (req.queryParams().contains("q") && req.queryParams("q").trim() != "") { String query = req.queryParams("q").trim(); int page = 0; if (req.queryParams().contains("p")) { try { page = Integer.parseInt(req.queryParams("p")); page = page > 19 ? 19 : page; } catch (NumberFormatException ex) { page = 0; } } String[] repos = new String[0]; String[] langs = new String[0]; String[] owners = new String[0]; String reposFilter = ""; String langsFilter = ""; String ownersFilter = ""; if (req.queryParams().contains("repo")) { repos = req.queryParamsValues("repo"); if (repos.length != 0) { List<String> reposList = Arrays.asList(repos).stream() .map((s) -> "reponame:" + QueryParser.escape(s)).collect(Collectors.toList()); reposFilter = " && (" + StringUtils.join(reposList, " || ") + ")"; } } if (req.queryParams().contains("lan")) { langs = req.queryParamsValues("lan"); if (langs.length != 0) { List<String> langsList = Arrays.asList(langs).stream() .map((s) -> "languagename:" + QueryParser.escape(s)).collect(Collectors.toList()); langsFilter = " && (" + StringUtils.join(langsList, " || ") + ")"; } } if (req.queryParams().contains("own")) { owners = req.queryParamsValues("own"); if (owners.length != 0) { List<String> ownersList = Arrays.asList(owners).stream() .map((s) -> "codeowner:" + QueryParser.escape(s)).collect(Collectors.toList()); ownersFilter = " && (" + StringUtils.join(ownersList, " || ") + ")"; } } // Need to pass in the filters into this query String cacheKey = query + page + reposFilter + langsFilter + ownersFilter; if (cache.containsKey(cacheKey)) { return cache.get(cacheKey); } // split the query escape it and and it together String cleanQueryString = scl.formatQueryString(query); SearchResult searchResult = cs.search(cleanQueryString + reposFilter + langsFilter + ownersFilter, page); searchResult.setCodeResultList(cm.formatResults(searchResult.getCodeResultList(), query, true)); searchResult.setQuery(query); for (String altQuery : scl.generateAltQueries(query)) { searchResult.addAltQuery(altQuery); } // Null out code as it isnt required and there is no point in bloating our ajax requests for (CodeResult codeSearchResult : searchResult.getCodeResultList()) { codeSearchResult.setCode(null); } cache.put(cacheKey, searchResult); return searchResult; } return null; }, new JsonTransformer()); get("/api/repo/add/", "application/json", (request, response) -> { boolean apiEnabled = Boolean .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false")); boolean apiAuth = Boolean .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true")); if (!apiEnabled) { return new ApiResponse(false, "API not enabled"); } String publicKey = request.queryParams("pub"); String signedKey = request.queryParams("sig"); String reponames = request.queryParams("reponame"); String repourls = request.queryParams("repourl"); String repotype = request.queryParams("repotype"); String repousername = request.queryParams("repousername"); String repopassword = request.queryParams("repopassword"); String reposource = request.queryParams("reposource"); String repobranch = request.queryParams("repobranch"); if (reponames == null || reponames.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "reponame is a required parameter"); } if (repourls == null || repourls.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "repourl is a required parameter"); } if (repotype == null) { return new ApiResponse(false, "repotype is a required parameter"); } if (repousername == null) { return new ApiResponse(false, "repousername is a required parameter"); } if (repopassword == null) { return new ApiResponse(false, "repopassword is a required parameter"); } if (reposource == null) { return new ApiResponse(false, "reposource is a required parameter"); } if (repobranch == null) { return new ApiResponse(false, "repobranch is a required parameter"); } if (apiAuth) { if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "pub is a required parameter"); } if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "sig is a required parameter"); } String toValidate = String.format( "pub=%s&reponame=%s&repourl=%s&repotype=%s&repousername=%s&repopassword=%s&reposource=%s&repobranch=%s", URLEncoder.encode(publicKey), URLEncoder.encode(reponames), URLEncoder.encode(repourls), URLEncoder.encode(repotype), URLEncoder.encode(repousername), URLEncoder.encode(repopassword), URLEncoder.encode(reposource), URLEncoder.encode(repobranch)); boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate); if (!validRequest) { return new ApiResponse(false, "invalid signed url"); } } // Clean if (repobranch == null || repobranch.trim().equals(Values.EMPTYSTRING)) { repobranch = "master"; } repotype = repotype.trim().toLowerCase(); if (!"git".equals(repotype) && !"svn".equals(repotype)) { repotype = "git"; } RepoResult repoResult = repo.getRepoByName(reponames); if (repoResult != null) { return new ApiResponse(false, "repository name already exists"); } repo.saveRepo(new RepoResult(-1, reponames, repotype, repourls, repousername, repopassword, reposource, repobranch)); return new ApiResponse(true, "added repository successfully"); }, new JsonTransformer()); get("/api/repo/delete/", "application/json", (request, response) -> { boolean apiEnabled = Boolean .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false")); boolean apiAuth = Boolean .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true")); if (!apiEnabled) { return new ApiResponse(false, "API not enabled"); } String publicKey = request.queryParams("pub"); String signedKey = request.queryParams("sig"); String reponames = request.queryParams("reponame"); if (reponames == null || reponames.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "reponame is a required parameter"); } if (apiAuth) { if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "pub is a required parameter"); } if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "sig is a required parameter"); } String toValidate = String.format("pub=%s&reponame=%s", URLEncoder.encode(publicKey), URLEncoder.encode(reponames)); boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate); if (!validRequest) { return new ApiResponse(false, "invalid signed url"); } } RepoResult rr = repo.getRepoByName(reponames); if (rr == null) { return new ApiResponse(false, "repository already deleted"); } Singleton.getUniqueDeleteRepoQueue().add(rr); return new ApiResponse(true, "repository queued for deletion"); }, new JsonTransformer()); get("/api/repo/list/", "application/json", (request, response) -> { boolean apiEnabled = Boolean .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false")); boolean apiAuth = Boolean .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true")); if (!apiEnabled) { return new ApiResponse(false, "API not enabled"); } String publicKey = request.queryParams("pub"); String signedKey = request.queryParams("sig"); if (apiAuth) { if (publicKey == null || publicKey.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "pub is a required parameter"); } if (signedKey == null || signedKey.trim().equals(Values.EMPTYSTRING)) { return new ApiResponse(false, "sig is a required parameter"); } String toValidate = String.format("pub=%s", URLEncoder.encode(publicKey)); boolean validRequest = apiService.validateRequest(publicKey, signedKey, toValidate); if (!validRequest) { return new ApiResponse(false, "invalid signed url"); } } List<RepoResult> repoResultList = repo.getAllRepo(); return new RepoResultApiResponse(true, Values.EMPTYSTRING, repoResultList); }, new JsonTransformer()); get("/admin/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } CodeSearcher cs = new CodeSearcher(); Map<String, Object> map = new HashMap<>(); map.put("repoCount", repo.getRepoCount()); map.put("numDocs", cs.getTotalNumberDocumentsIndexed()); map.put("numSearches", statsService.getSearchCount()); map.put("uptime", statsService.getUptime()); // Put all properties here map.put(Values.SQLITEFILE, Properties.getProperties().getProperty(Values.SQLITEFILE, Values.DEFAULTSQLITEFILE)); map.put(Values.SERVERPORT, Properties.getProperties().getProperty(Values.SERVERPORT, Values.DEFAULTSERVERPORT)); map.put(Values.REPOSITORYLOCATION, Properties.getProperties().getProperty(Values.REPOSITORYLOCATION, Values.DEFAULTREPOSITORYLOCATION)); map.put(Values.INDEXLOCATION, Properties.getProperties().getProperty(Values.INDEXLOCATION, Values.DEFAULTINDEXLOCATION)); map.put(Values.FACETSLOCATION, Properties.getProperties().getProperty(Values.FACETSLOCATION, Values.DEFAULTFACETSLOCATION)); map.put(Values.CHECKREPOCHANGES, Properties.getProperties().getProperty(Values.CHECKREPOCHANGES, Values.DEFAULTCHECKREPOCHANGES)); map.put(Values.ONLYLOCALHOST, Properties.getProperties().getProperty(Values.ONLYLOCALHOST, Values.DEFAULTONLYLOCALHOST)); map.put(Values.LOWMEMORY, Properties.getProperties().getProperty(Values.LOWMEMORY, Values.DEFAULTLOWMEMORY)); map.put(Values.SPELLINGCORRECTORSIZE, Properties.getProperties() .getProperty(Values.SPELLINGCORRECTORSIZE, Values.DEFAULTSPELLINGCORRECTORSIZE)); map.put(Values.USESYSTEMGIT, Properties.getProperties().getProperty(Values.USESYSTEMGIT, Values.DEFAULTUSESYSTEMGIT)); map.put(Values.GITBINARYPATH, Properties.getProperties().getProperty(Values.GITBINARYPATH, Values.DEFAULTGITBINARYPATH)); map.put(Values.APIENABLED, Properties.getProperties().getProperty(Values.APIENABLED, Values.DEFAULTAPIENABLED)); map.put(Values.APIKEYAUTH, Properties.getProperties().getProperty(Values.APIKEYAUTH, Values.DEFAULTAPIKEYAUTH)); map.put(Values.SVNBINARYPATH, Properties.getProperties().getProperty(Values.SVNBINARYPATH, Values.DEFAULTSVNBINARYPATH)); map.put(Values.SVNENABLED, Properties.getProperties().getProperty(Values.SVNENABLED, Values.DEFAULTSVNENABLED)); map.put(Values.MAXDOCUMENTQUEUESIZE, Properties.getProperties().getProperty(Values.MAXDOCUMENTQUEUESIZE, Values.DEFAULTMAXDOCUMENTQUEUESIZE)); map.put(Values.MAXDOCUMENTQUEUELINESIZE, Properties.getProperties() .getProperty(Values.MAXDOCUMENTQUEUELINESIZE, Values.DEFAULTMAXDOCUMENTQUEUELINESIZE)); map.put(Values.MAXFILELINEDEPTH, Properties.getProperties().getProperty(Values.MAXFILELINEDEPTH, Values.DEFAULTMAXFILELINEDEPTH)); map.put("deletionQueue", Singleton.getUniqueDeleteRepoQueue().size()); map.put("version", VERSION); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "admin.ftl"); }, new FreeMarkerEngine()); get("/admin/repo/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } int repoCount = repo.getRepoCount(); String offSet = request.queryParams("offset"); String searchQuery = request.queryParams("q"); int indexOffset = 0; Map<String, Object> map = new HashMap<>(); if (offSet != null) { try { indexOffset = Integer.parseInt(offSet); if (indexOffset > repoCount || indexOffset < 0) { indexOffset = 0; } } catch (NumberFormatException ex) { indexOffset = 0; } } if (searchQuery != null) { map.put("repoResults", repo.searchRepo(searchQuery)); } else { map.put("repoResults", repo.getPagedRepo(indexOffset, 100)); } map.put("searchQuery", searchQuery); map.put("hasPrevious", indexOffset > 0); map.put("hasNext", (indexOffset + 100) < repoCount); map.put("previousOffset", "" + (indexOffset - 100)); map.put("nextOffset", "" + (indexOffset + 100)); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "admin_repo.ftl"); }, new FreeMarkerEngine()); get("/admin/bulk/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "admin_bulk.ftl"); }, new FreeMarkerEngine()); get("/admin/api/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } Map<String, Object> map = new HashMap<>(); map.put("apiKeys", api.getAllApi()); boolean apiEnabled = Boolean .parseBoolean(Properties.getProperties().getProperty("api_enabled", "false")); boolean apiAuth = Boolean .parseBoolean(Properties.getProperties().getProperty("api_key_authentication", "true")); map.put("apiAuthentication", apiEnabled && apiAuth); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "admin_api.ftl"); }, new FreeMarkerEngine()); post("/admin/api/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } apiService.createKeys(); response.redirect("/admin/api/"); halt(); return null; }, new FreeMarkerEngine()); get("/admin/api/delete/", "application/json", (request, response) -> { if (getAuthenticatedUser(request) == null || !request.queryParams().contains("publicKey")) { response.redirect("/login/"); halt(); return false; } String publicKey = request.queryParams("publicKey"); apiService.deleteKey(publicKey); return true; }, new JsonTransformer()); get("/admin/settings/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } String[] highlighters = "agate,androidstudio,arta,ascetic,atelier-cave.dark,atelier-cave.light,atelier-dune.dark,atelier-dune.light,atelier-estuary.dark,atelier-estuary.light,atelier-forest.dark,atelier-forest.light,atelier-heath.dark,atelier-heath.light,atelier-lakeside.dark,atelier-lakeside.light,atelier-plateau.dark,atelier-plateau.light,atelier-savanna.dark,atelier-savanna.light,atelier-seaside.dark,atelier-seaside.light,atelier-sulphurpool.dark,atelier-sulphurpool.light,brown_paper,codepen-embed,color-brewer,dark,darkula,default,docco,far,foundation,github-gist,github,googlecode,grayscale,hopscotch,hybrid,idea,ir_black,kimbie.dark,kimbie.light,magula,mono-blue,monokai,monokai_sublime,obsidian,paraiso.dark,paraiso.light,pojoaque,railscasts,rainbow,school_book,solarized_dark,solarized_light,sunburst,tomorrow-night-blue,tomorrow-night-bright,tomorrow-night-eighties,tomorrow-night,tomorrow,vs,xcode,zenburn" .split(","); Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("syntaxHighlighter", getSyntaxHighlighter()); map.put("highlighters", highlighters); map.put("averageSalary", "" + (int) getAverageSalary()); map.put("matchLines", "" + (int) getMatchLines()); map.put("maxLineDepth", "" + (int) getMaxLineDepth()); map.put("minifiedLength", "" + (int) getMinifiedLength()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "admin_settings.ftl"); }, new FreeMarkerEngine()); get("/admin/reports/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "admin_reports.ftl"); }, new FreeMarkerEngine()); post("/admin/settings/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } if (ISCOMMUNITY) { response.redirect("/admin/settings/"); halt(); } String logo = request.queryParams("logo").trim(); String syntaxHighlighter = request.queryParams("syntaxhighligher"); try { double averageSalary = Double.parseDouble(request.queryParams("averagesalary")); data.saveData(Values.AVERAGESALARY, "" + (int) averageSalary); } catch (NumberFormatException ex) { data.saveData(Values.AVERAGESALARY, Values.DEFAULTAVERAGESALARY); } try { double averageSalary = Double.parseDouble(request.queryParams("matchlines")); data.saveData(Values.MATCHLINES, "" + (int) averageSalary); } catch (NumberFormatException ex) { data.saveData(Values.MATCHLINES, Values.DEFAULTMATCHLINES); } try { double averageSalary = Double.parseDouble(request.queryParams("maxlinedepth")); data.saveData(Values.MAXLINEDEPTH, "" + (int) averageSalary); } catch (NumberFormatException ex) { data.saveData(Values.MAXLINEDEPTH, Values.DEFAULTMAXLINEDEPTH); } try { double minifiedlength = Double.parseDouble(request.queryParams("minifiedlength")); data.saveData(Values.MINIFIEDLENGTH, "" + (int) minifiedlength); } catch (NumberFormatException ex) { data.saveData(Values.MINIFIEDLENGTH, Values.DEFAULTMINIFIEDLENGTH); } data.saveData(Values.LOGO, logo); data.saveData(Values.SYNTAXHIGHLIGHTER, syntaxHighlighter); // Redo anything that requires updates at this point scl = Singleton.getSearchcodeLib(data); response.redirect("/admin/settings/"); halt(); return null; }, new FreeMarkerEngine()); post("/admin/bulk/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } String repos = request.queryParams("repos"); String repolines[] = repos.split("\\r?\\n"); for (String line : repolines) { String[] repoparams = line.split(",", -1); if (repoparams.length == 7) { String branch = repoparams[6].trim(); if (branch.equals(Values.EMPTYSTRING)) { branch = "master"; } String scm = repoparams[1].trim().toLowerCase(); if (scm.equals(Values.EMPTYSTRING)) { scm = "git"; } RepoResult rr = repo.getRepoByName(repoparams[0]); if (rr == null) { repo.saveRepo(new RepoResult(-1, repoparams[0], scm, repoparams[2], repoparams[3], repoparams[4], repoparams[5], branch)); } } } response.redirect("/admin/bulk/"); halt(); return null; }, new FreeMarkerEngine()); post("/admin/repo/", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return null; } String[] reponames = request.queryParamsValues("reponame"); String[] reposcms = request.queryParamsValues("reposcm"); String[] repourls = request.queryParamsValues("repourl"); String[] repousername = request.queryParamsValues("repousername"); String[] repopassword = request.queryParamsValues("repopassword"); String[] reposource = request.queryParamsValues("reposource"); String[] repobranch = request.queryParamsValues("repobranch"); for (int i = 0; i < reponames.length; i++) { if (reponames[i].trim().length() != 0) { String branch = repobranch[i].trim(); if (branch.equals(Values.EMPTYSTRING)) { branch = "master"; } repo.saveRepo(new RepoResult(-1, reponames[i], reposcms[i], repourls[i], repousername[i], repopassword[i], reposource[i], branch)); } } response.redirect("/admin/repo/"); halt(); return null; }, new FreeMarkerEngine()); get("/login/", (request, response) -> { if (getAuthenticatedUser(request) != null) { response.redirect("/admin/"); halt(); return null; } Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "login.ftl"); }, new FreeMarkerEngine()); post("/login/", (req, res) -> { if (req.queryParams().contains("password") && req.queryParams("password") .equals(com.searchcode.app.util.Properties.getProperties().getProperty("password"))) { addAuthenticatedUser(req); res.redirect("/admin/"); halt(); } Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "login.ftl"); }, new FreeMarkerEngine()); get("/logout/", (req, res) -> { removeAuthenticatedUser(req); res.redirect("/"); return null; }); get("/admin/delete/", "application/json", (request, response) -> { if (getAuthenticatedUser(request) == null || !request.queryParams().contains("repoName")) { response.redirect("/login/"); halt(); return false; } String repoName = request.queryParams("repoName"); RepoResult rr = repo.getRepoByName(repoName); if (rr != null) { Singleton.getUniqueDeleteRepoQueue().add(rr); } return true; }, new JsonTransformer()); get("/admin/checkversion/", "application/json", (request, response) -> { if (getAuthenticatedUser(request) == null) { response.redirect("/login/"); halt(); return false; } String version; try { version = IOUtils.toString(new URL("https://searchcode.com/product/version/")).replace("\"", Values.EMPTYSTRING); } catch (IOException ex) { return "Unable to determine if running the latest version. Check https://searchcode.com/product/download/ for the latest release."; } if (App.VERSION.equals(version)) { return "Your searchcode server version " + version + " is the latest."; } else { return "Your searchcode server version " + App.VERSION + " instance is out of date. The latest version is " + version + "."; } }, new JsonTransformer()); get("/file/:codeid/:reponame/*", (request, response) -> { Map<String, Object> map = new HashMap<>(); CodeSearcher cs = new CodeSearcher(); Cocomo2 coco = new Cocomo2(); StringBuilder code = new StringBuilder(); String fileName = Values.EMPTYSTRING; if (request.splat().length != 0) { fileName = request.splat()[0]; } CodeResult codeResult = cs.getByRepoFileName(request.params(":reponame"), fileName); if (codeResult == null) { int codeid = Integer.parseInt(request.params(":codeid")); codeResult = cs.getById(codeid); } if (codeResult == null) { response.redirect("/404/"); halt(); } List<String> codeLines = codeResult.code; for (int i = 0; i < codeLines.size(); i++) { code.append("<span id=\"" + (i + 1) + "\"></span>"); code.append(StringEscapeUtils.escapeHtml4(codeLines.get(i))); code.append("\n"); } boolean highlight = true; if (Integer.parseInt(codeResult.codeLines) > 1000) { highlight = false; } RepoResult repoResult = repo.getRepoByName(codeResult.repoName); if (repoResult != null) { map.put("source", repoResult.getSource()); } map.put("fileName", codeResult.fileName); map.put("codePath", codeResult.codePath); map.put("codeLength", codeResult.codeLines); map.put("languageName", codeResult.languageName); map.put("md5Hash", codeResult.md5hash); map.put("repoName", codeResult.repoName); map.put("highlight", highlight); map.put("repoLocation", codeResult.getRepoLocation()); map.put("codeValue", code.toString()); map.put("highligher", getSyntaxHighlighter()); map.put("codeOwner", codeResult.getCodeOwner()); double estimatedEffort = coco.estimateEffort(scl.countFilteredLines(codeResult.getCode())); int estimatedCost = (int) coco.estimateCost(estimatedEffort, getAverageSalary()); if (estimatedCost != 0 && !scl.languageCostIgnore(codeResult.getLanguageName())) { map.put("estimatedCost", estimatedCost); } map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "coderesult.ftl"); }, new FreeMarkerEngine()); /** * Deprecated should not be used * TODO delete this method */ get("/codesearch/view/:codeid", (request, response) -> { Map<String, Object> map = new HashMap<>(); int codeid = Integer.parseInt(request.params(":codeid")); CodeSearcher cs = new CodeSearcher(); Cocomo2 coco = new Cocomo2(); StringBuilder code = new StringBuilder(); // escape all the lines and include deeplink for line number CodeResult codeResult = cs.getById(codeid); if (codeResult == null) { response.redirect("/404/"); halt(); } List<String> codeLines = codeResult.code; for (int i = 0; i < codeLines.size(); i++) { code.append("<span id=\"" + (i + 1) + "\"></span>"); code.append(StringEscapeUtils.escapeHtml4(codeLines.get(i))); code.append("\n"); } boolean highlight = true; if (Integer.parseInt(codeResult.codeLines) > 1000) { highlight = false; } RepoResult repoResult = repo.getRepoByName(codeResult.repoName); if (repoResult != null) { map.put("source", repoResult.getSource()); } map.put("fileName", codeResult.fileName); map.put("codePath", codeResult.codePath); map.put("codeLength", codeResult.codeLines); map.put("languageName", codeResult.languageName); map.put("md5Hash", codeResult.md5hash); map.put("repoName", codeResult.repoName); map.put("highlight", highlight); map.put("repoLocation", codeResult.getRepoLocation()); map.put("codeValue", code.toString()); map.put("highligher", getSyntaxHighlighter()); map.put("codeOwner", codeResult.getCodeOwner()); double estimatedEffort = coco.estimateEffort(scl.countFilteredLines(codeResult.getCode())); int estimatedCost = (int) coco.estimateCost(estimatedEffort, getAverageSalary()); if (estimatedCost != 0 && !scl.languageCostIgnore(codeResult.getLanguageName())) { map.put("estimatedCost", estimatedCost); } map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "coderesult.ftl"); }, new FreeMarkerEngine()); get("/documentation/", (request, response) -> { Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "documentation.ftl"); }, new FreeMarkerEngine()); get("/search_test/", (request, response) -> { Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "search_test.ftl"); }, new FreeMarkerEngine()); get("/404/", (request, response) -> { Map<String, Object> map = new HashMap<>(); map.put("logoImage", getLogo()); map.put("isCommunity", ISCOMMUNITY); return new ModelAndView(map, "404.ftl"); }, new FreeMarkerEngine()); /** * Test that was being used to display blame information */ // get("/test/:reponame/*", (request, response) -> { // User user = injector.getInstance(User.class); // user.Blame(request.params(":reponame"), request.splat()[0]); // return ""; // }, new JsonTransformer()); }
From source file:org.silverpeas.dbbuilder.DBBuilder.java
/** * @param args/*from w ww .j ava 2 s . c o m*/ * @see */ public static void main(String[] args) { ClassPathXmlApplicationContext springContext = new ClassPathXmlApplicationContext( "classpath:/spring-jdbc-datasource.xml"); try { // Ouverture des traces Date startDate = new Date(); System.out.println( MessageFormat.format(messages.getString("dbbuilder.start"), DBBuilderAppVersion, startDate)); console = new Console(DBBuilder.class); console.printMessage("*************************************************************"); console.printMessage( MessageFormat.format(messages.getString("dbbuilder.start"), DBBuilderAppVersion, startDate)); // Lecture des variables d'environnement partir de dbBuilderSettings dbBuilderResources = FileUtil .loadResource("/org/silverpeas/dbBuilder/settings/dbBuilderSettings.properties"); // Lecture des paramtres d'entre params = new CommandLineParameters(console, args); if (params.isSimulate() && DatabaseType.ORACLE == params.getDbType()) { throw new Exception(messages.getString("oracle.simulate.error")); } console.printMessage(messages.getString("jdbc.connection.configuration")); console.printMessage(ConnectionFactory.getConnectionInfo()); console.printMessage("\tAction : " + params.getAction()); console.printMessage("\tVerbose mode : " + params.isVerbose()); console.printMessage("\tSimulate mode : " + params.isSimulate()); if (Action.ACTION_CONNECT == params.getAction()) { // un petit message et puis c'est tout console.printMessage(messages.getString("connection.success")); System.out.println(messages.getString("connection.success")); } else { // Modules en place sur la BD avant install console.printMessage("DB Status before build :"); List<String> packagesIntoDB = checkDBStatus(); // initialisation d'un vecteur des instructions SQL passer en fin d'upgrade // pour mettre niveau les versions de modules en base MetaInstructions sqlMetaInstructions = new MetaInstructions(); File dirXml = new File(params.getDbType().getDBContributionDir()); DBXmlDocument destXml = loadMasterContribution(dirXml); UninstallInformations processesToCacheIntoDB = new UninstallInformations(); File[] listeFileXml = dirXml.listFiles(); Arrays.sort(listeFileXml); List<DBXmlDocument> listeDBXmlDocument = new ArrayList<DBXmlDocument>(listeFileXml.length); int ignoredFiles = 0; // Ouverture de tous les fichiers de configurations console.printMessage(messages.getString("ignored.contribution")); for (File xmlFile : listeFileXml) { if (xmlFile.isFile() && "xml".equals(FileUtil.getExtension(xmlFile)) && !(FIRST_DBCONTRIBUTION_FILE.equalsIgnoreCase(xmlFile.getName())) && !(MASTER_DBCONTRIBUTION_FILE.equalsIgnoreCase(xmlFile.getName()))) { DBXmlDocument fXml = new DBXmlDocument(dirXml, xmlFile.getName()); fXml.load(); // vrification des dpendances et prise en compte uniquement si dependences OK if (hasUnresolvedRequirements(listeFileXml, fXml)) { console.printMessage( '\t' + xmlFile.getName() + " (because of unresolved requirements)."); ignoredFiles++; } else if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { console.printMessage('\t' + xmlFile.getName() + " (because of " + ACTION_ENFORCE_UNINSTALL + " mode)."); ignoredFiles++; } else { listeDBXmlDocument.add(fXml); } } } if (0 == ignoredFiles) { console.printMessage("\t(none)"); } // prpare une HashMap des modules prsents en fichiers de contribution Map packagesIntoFile = new HashMap(); int j = 0; console.printMessage(messages.getString("merged.contribution")); console.printMessage(params.getAction().toString()); if (ACTION_ENFORCE_UNINSTALL != params.getAction()) { console.printMessage('\t' + FIRST_DBCONTRIBUTION_FILE); j++; } for (DBXmlDocument currentDoc : listeDBXmlDocument) { console.printMessage('\t' + currentDoc.getName()); j++; } if (0 == j) { console.printMessage("\t(none)"); } // merge des diffrents fichiers de contribution ligibles : console.printMessage("Build decisions are :"); // d'abord le fichier dbbuilder-contribution ... DBXmlDocument fileXml; if (ACTION_ENFORCE_UNINSTALL != params.getAction()) { try { fileXml = new DBXmlDocument(dirXml, FIRST_DBCONTRIBUTION_FILE); fileXml.load(); } catch (Exception e) { // contribution de dbbuilder non trouve -> on continue, on est certainement en train // de desinstaller la totale fileXml = null; } if (null != fileXml) { DBBuilderFileItem dbbuilderItem = new DBBuilderFileItem(fileXml); packagesIntoFile.put(dbbuilderItem.getModule(), null); mergeActionsToDo(dbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } } // ... puis les autres for (DBXmlDocument currentDoc : listeDBXmlDocument) { DBBuilderFileItem tmpdbbuilderItem = new DBBuilderFileItem(currentDoc); packagesIntoFile.put(tmpdbbuilderItem.getModule(), null); mergeActionsToDo(tmpdbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } // ... et enfin les pices BD dsinstaller // ... attention, l'ordonnancement n'tant pas dispo, on les traite dans // l'ordre inverse pour faire passer busCore a la fin, de nombreuses contraintes // des autres modules referencant les PK de busCore List<String> itemsList = new ArrayList<String>(); boolean foundDBBuilder = false; for (String dbPackage : packagesIntoDB) { if (!packagesIntoFile.containsKey(dbPackage)) { // Package en base et non en contribution -> candidat desinstallation if (DBBUILDER_MODULE.equalsIgnoreCase(dbPackage)) { foundDBBuilder = true; } else if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { if (dbPackage.equals(params.getModuleName())) { itemsList.add(0, dbPackage); } } else { itemsList.add(0, dbPackage); } } } if (foundDBBuilder) { if (ACTION_ENFORCE_UNINSTALL == params.getAction()) { if (DBBUILDER_MODULE.equals(params.getModuleName())) { itemsList.add(itemsList.size(), DBBUILDER_MODULE); } } else { itemsList.add(itemsList.size(), DBBUILDER_MODULE); } } for (String item : itemsList) { console.printMessage("**** Treating " + item + " ****"); DBBuilderDBItem tmpdbbuilderItem = new DBBuilderDBItem(item); mergeActionsToDo(tmpdbbuilderItem, destXml, processesToCacheIntoDB, sqlMetaInstructions); } destXml.setName("res.txt"); destXml.save(); console.printMessage("Build parts are :"); // Traitement des pices slectionnes // remarque : durant cette phase, les erreurs sont traites -> on les catche en // retour sans les retraiter if (ACTION_INSTALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_INSTALL); } else if (ACTION_UNINSTALL == params.getAction() || ACTION_ENFORCE_UNINSTALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_UNINSTALL); } else if (ACTION_OPTIMIZE == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_OPTIMIZE); } else if (ACTION_ALL == params.getAction()) { processDB(destXml, processesToCacheIntoDB, sqlMetaInstructions, TAGS_TO_MERGE_4_ALL); } // Modules en place sur la BD en final console.printMessage("Finally DB Status :"); checkDBStatus(); } Date endDate = new Date(); console.printMessage(MessageFormat.format(messages.getString("dbbuilder.success"), endDate)); System.out.println("*******************************************************************"); System.out.println(MessageFormat.format(messages.getString("dbbuilder.success"), endDate)); } catch (Exception e) { e.printStackTrace(); console.printError(e.getMessage(), e); Date endDate = new Date(); console.printError(MessageFormat.format(messages.getString("dbbuilder.failure"), endDate)); System.out.println("*******************************************************************"); System.out.println(MessageFormat.format(messages.getString("dbbuilder.failure"), endDate)); System.exit(1); } finally { springContext.close(); console.close(); } }
From source file:com.curecomp.primefaces.migrator.PrimefacesMigration.java
public static void main(String[] args) throws Exception { // Let's use some colors :) // AnsiConsole.systemInstall(); CommandLineParser cliParser = new BasicParser(); CommandLine cli = null;// w w w . j a v a2s .c o m try { cli = cliParser.parse(OPTIONS, args); } catch (ParseException e) { printHelp(); } if (!cli.hasOption("s")) { printHelp(); } String sourcePattern; if (cli.hasOption("p")) { sourcePattern = cli.getOptionValue("p"); } else { sourcePattern = DEFAULT_SOURCE_PATTERN; } String defaultAnswer; if (cli.hasOption("default-answer")) { defaultAnswer = cli.getOptionValue("default-answer"); } else { defaultAnswer = DEFAULT_DEFAULT_PROMPT_ANSWER; } boolean defaultAnswerYes = defaultAnswer.equalsIgnoreCase("y"); boolean quiet = cli.hasOption("q"); boolean testWrite = cli.hasOption("t"); Path sourceDirectory = Paths.get(cli.getOptionValue("s")).toAbsolutePath(); // Since we use IO we will have some blocking threads hanging around int threadCount = Runtime.getRuntime().availableProcessors() * 2; ThreadPoolExecutor threadPool = new ThreadPoolExecutor(threadCount, threadCount, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()); BlockingQueue<WidgetVarLocation> foundUsages = new LinkedBlockingQueue<>(); BlockingQueue<WidgetVarLocation> unusedOrAmbiguous = new LinkedBlockingQueue<>(); BlockingQueue<WidgetVarLocation> skippedUsages = new LinkedBlockingQueue<>(); List<Future<?>> futures = new ArrayList<>(); findWidgetVars(sourceDirectory, sourcePattern, threadPool).forEach(widgetVarLocation -> { // We can't really find usages of widget vars that use EL expressions :( if (widgetVarLocation.widgetVar.contains("#")) { unusedOrAmbiguous.add(widgetVarLocation); return; } try { FileActionVisitor visitor = new FileActionVisitor(sourceDirectory, sourcePattern, sourceFile -> futures.add(threadPool.submit((Callable<?>) () -> { findWidgetVarUsages(sourceFile, widgetVarLocation, foundUsages, skippedUsages, unusedOrAmbiguous); return null; }))); Files.walkFileTree(sourceDirectory, visitor); } catch (IOException ex) { throw new RuntimeException(ex); } }); awaitAll(futures); new TreeSet<>(skippedUsages).forEach(widgetUsage -> { int startIndex = widgetUsage.columnNr; int endIndex = startIndex + widgetUsage.widgetVar.length(); String relativePath = widgetUsage.location.toAbsolutePath().toString() .substring(sourceDirectory.toString().length()); String previous = replace(widgetUsage.line, startIndex, endIndex, Ansi.ansi().bold().fg(Ansi.Color.RED).a(widgetUsage.widgetVar).reset().toString()); System.out.println("Skipped " + relativePath + " at line " + widgetUsage.lineNr + " and col " + widgetUsage.columnNr + " for widgetVar '" + widgetUsage.widgetVar + "'"); System.out.println("\t" + previous); }); Map<WidgetVarLocation, List<WidgetVarLocation>> written = new HashMap<>(); new TreeSet<>(foundUsages).forEach(widgetUsage -> { WidgetVarLocation key = new WidgetVarLocation(null, widgetUsage.location, widgetUsage.lineNr, -1, null); List<WidgetVarLocation> writtenList = written.get(key); int existing = writtenList == null ? 0 : writtenList.size(); int startIndex = widgetUsage.columnNr; int endIndex = startIndex + widgetUsage.widgetVar.length(); String relativePath = widgetUsage.location.toAbsolutePath().toString() .substring(sourceDirectory.toString().length()); String next = replace(widgetUsage.line, startIndex, endIndex, Ansi.ansi().bold().fg(Ansi.Color.RED) .a("PF('" + widgetUsage.widgetVar + "')").reset().toString()); System.out .println(relativePath + " at line " + widgetUsage.lineNr + " and col " + widgetUsage.columnNr); System.out.println("\t" + next); System.out.print("Replace (Y/N)? [" + (defaultAnswerYes ? "Y" : "N") + "]: "); String input; if (quiet) { input = ""; System.out.println(); } else { try { do { input = in.readLine(); } while (input != null && !input.isEmpty() && !"y".equalsIgnoreCase(input) && !"n".equalsIgnoreCase(input)); } catch (IOException ex) { throw new RuntimeException(ex); } } if (input == null) { System.out.println("Aborted!"); } else if (input.isEmpty() && defaultAnswerYes || !input.isEmpty() && !"n".equalsIgnoreCase(input)) { System.out.println("Replaced!"); System.out.print("\t"); if (writtenList == null) { writtenList = new ArrayList<>(); written.put(key, writtenList); } writtenList.add(widgetUsage); List<String> lines; try { lines = Files.readAllLines(widgetUsage.location); } catch (IOException ex) { throw new RuntimeException(ex); } try (OutputStream os = testWrite ? new ByteArrayOutputStream() : Files.newOutputStream(widgetUsage.location); PrintWriter pw = new PrintWriter(new OutputStreamWriter(os, StandardCharsets.UTF_8))) { String line; for (int i = 0; i < lines.size(); i++) { int lineNr = i + 1; line = lines.get(i); if (lineNr == widgetUsage.lineNr) { int begin = widgetUsage.columnNr + (testWrite ? 0 : existing * 6); int end = begin + widgetUsage.widgetVar.length(); String newLine = replace(line, begin, end, "PF('" + widgetUsage.widgetVar + "')", false); if (testWrite) { System.out.println(newLine); } else { pw.println(newLine); } } else { if (!testWrite) { pw.println(line); } } } } catch (IOException ex) { throw new RuntimeException(ex); } } else { System.out.println("Skipped!"); } }); new TreeSet<>(unusedOrAmbiguous).forEach(widgetUsage -> { int startIndex = widgetUsage.columnNr; int endIndex = startIndex + widgetUsage.widgetVar.length(); String relativePath = widgetUsage.location.toAbsolutePath().toString() .substring(sourceDirectory.toString().length()); String previous = replace(widgetUsage.line, startIndex, endIndex, Ansi.ansi().bold().fg(Ansi.Color.RED).a(widgetUsage.widgetVar).reset().toString()); System.out.println("Skipped unused or ambiguous " + relativePath + " at line " + widgetUsage.lineNr + " and col " + widgetUsage.columnNr); System.out.println("\t" + previous); }); threadPool.shutdown(); }
From source file:com.mch.registry.ccs.server.CcsClient.java
/** * Sends messages to registered devices//from ww w . j av a 2 s.c om */ public static void main(String[] args) { Config config = new Config(); final String projectId = config.getProjectId(); final String key = config.getKey(); final CcsClient ccsClient = CcsClient.prepareClient(projectId, key, true); try { ccsClient.connect(); } catch (XMPPException e) { logger.log(Level.WARNING, "XMPP Exception ", e); } final Runnable sendNotifications = new Runnable() { public void run() { try { logger.log(Level.INFO, "Working Q!"); if (!isOffHours()) { //Prepare downstream message String toRegId = ""; String message = ""; String messageId = ""; Map<String, String> payload = new HashMap<String, String>(); String collapseKey = null; Long timeToLive = 10000L; Boolean delayWhileIdle = true; String messagePrefix = ""; int notificationQueueID = 0; boolean sucessfullySent = false; //Read from mysql database MySqlHandler mysql = new MySqlHandler(); ArrayList<Notification> queue = new ArrayList<Notification>(); for (int i = 1; i < 3; i++) { queue = mysql.getNotificationQueue(i); if (queue.size() > 0) { switch (i) { case 1: messagePrefix = "_V: "; break; case 2: messagePrefix = "_R: "; break; default: messagePrefix = ""; logger.log(Level.WARNING, "Unknown message type!"); } Notification notification = new Notification(); Iterator<Notification> iterator = queue.iterator(); while (iterator.hasNext()) { notification = iterator.next(); toRegId = notification.getGcmRegID(); message = notification.getNotificationText(); notificationQueueID = notification.getNotificationQueueID(); messageId = "m-" + Long.toString(random.nextLong()); payload = new HashMap<String, String>(); payload.put("message", messagePrefix + message); try { // Send the downstream message to a device. ccsClient.send(createJsonMessage(toRegId, messageId, payload, collapseKey, timeToLive, delayWhileIdle)); sucessfullySent = true; logger.log(Level.INFO, "Message sent. ID: " + notificationQueueID + ", RegID: " + toRegId + ", Text: " + message); } catch (Exception e) { mysql.prepareNotificationForTheNextDay(notificationQueueID); sucessfullySent = false; logger.log(Level.WARNING, "Message could not be sent! ID: " + notificationQueueID + ", RegID: " + toRegId + ", Text: " + message); } if (sucessfullySent) { mysql.moveNotificationToHistory(notificationQueueID); } } } else { logger.log(Level.INFO, "No notifications to send. Type: " + Integer.toString(i)); } } } } catch (Exception e) { logger.log(Level.WARNING, "Exception ", e); } } }; ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); //Start when server starts and every 30 minutes after ScheduledFuture task = executor.scheduleAtFixedRate(sendNotifications, 0, 30, TimeUnit.MINUTES); try { task.get(); } catch (ExecutionException e) { logger.log(Level.SEVERE, "Exception ", e); } catch (InterruptedException e) { logger.log(Level.SEVERE, "Exception ", e); } task.cancel(false); try { executor.shutdown(); executor.awaitTermination(30, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.log(Level.SEVERE, "Exception ", e); } }
From source file:com.act.lcms.db.analysis.PathwayProductAnalysis.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//from www .j ava 2 s. c om } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY)); if (!lcmsDir.isDirectory()) { System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } Double fontScale = null; if (cl.hasOption("font-scale")) { try { fontScale = Double.parseDouble(cl.getOptionValue("font-scale")); } catch (IllegalArgumentException e) { System.err.format("Argument for font-scale must be a floating point number.\n"); System.exit(1); } } try (DB db = DB.openDBFromCLI(cl)) { Set<Integer> takeSamplesFromPlateIds = null; if (cl.hasOption(OPTION_FILTER_BY_PLATE_BARCODE)) { String[] plateBarcodes = cl.getOptionValues(OPTION_FILTER_BY_PLATE_BARCODE); System.out.format("Considering only sample wells in plates: %s\n", StringUtils.join(plateBarcodes, ", ")); takeSamplesFromPlateIds = new HashSet<>(plateBarcodes.length); for (String plateBarcode : plateBarcodes) { Plate p = Plate.getPlateByBarcode(db, plateBarcode); if (p == null) { System.err.format("WARNING: unable to find plate in DB with barcode %s\n", plateBarcode); } else { takeSamplesFromPlateIds.add(p.getId()); } } // Allow filtering on barcode even if we couldn't find any in the DB. } System.out.format("Loading/updating LCMS scan files into DB\n"); ScanFile.insertOrUpdateScanFilesInDirectory(db, lcmsDir); System.out.format("Processing LCMS scans\n"); Pair<List<LCMSWell>, Set<Integer>> positiveWellsAndPlateIds = Utils.extractWellsAndPlateIds(db, cl.getOptionValues(OPTION_STRAINS), cl.getOptionValues(OPTION_CONSTRUCT), takeSamplesFromPlateIds, false); List<LCMSWell> positiveWells = positiveWellsAndPlateIds.getLeft(); if (positiveWells.size() == 0) { throw new RuntimeException("Found no LCMS wells for specified strains/constructs"); } // Only take negative samples from the plates where we found the positive samples. Pair<List<LCMSWell>, Set<Integer>> negativeWellsAndPlateIds = Utils.extractWellsAndPlateIds(db, cl.getOptionValues(OPTION_NEGATIVE_STRAINS), cl.getOptionValues(OPTION_NEGATIVE_CONSTRUCTS), positiveWellsAndPlateIds.getRight(), true); List<LCMSWell> negativeWells = negativeWellsAndPlateIds.getLeft(); if (negativeWells == null || negativeWells.size() == 0) { System.err.format("WARNING: no valid negative samples found in same plates as positive samples\n"); } // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT)); List<Pair<String, Double>> searchMZs = new ArrayList<>(productMasses.size()); List<ChemicalAssociatedWithPathway> pathwayChems = new ArrayList<>(productMasses.size()); for (Pair<ChemicalAssociatedWithPathway, Double> productMass : productMasses) { String chemName = productMass.getLeft().getChemical(); searchMZs.add(Pair.of(chemName, productMass.getRight())); pathwayChems.add(productMass.getLeft()); } System.out.format("Searching for intermediate/side-reaction products:\n"); for (Pair<String, Double> searchMZ : searchMZs) { System.out.format(" %s: %.3f\n", searchMZ.getLeft(), searchMZ.getRight()); } // Look up the standard by name. List<StandardWell> standardWells = new ArrayList<>(); if (cl.hasOption(OPTION_STANDARD_WELLS)) { Plate standardPlate = Plate.getPlateByBarcode(db, cl.getOptionValue(OPTION_STANDARD_PLATE_BARCODE)); Map<Integer, StandardWell> pathwayIdToStandardWell = extractStandardWellsFromOptionsList(db, pathwayChems, cl.getOptionValues(OPTION_STANDARD_WELLS), standardPlate); for (ChemicalAssociatedWithPathway c : pathwayChems) { // TODO: we can avoid this loop. StandardWell well = pathwayIdToStandardWell.get(c.getId()); if (well != null) { standardWells.add(well); } } } else { for (ChemicalAssociatedWithPathway c : pathwayChems) { String standardName = c.getChemical(); System.out.format("Searching for well containing standard %s\n", standardName); List<StandardWell> wells = StandardIonAnalysis.getStandardWellsForChemical(db, c.getChemical()); if (wells != null) { standardWells.addAll(wells); } } } boolean useFineGrainedMZ = cl.hasOption("fine-grained-mz"); boolean useSNR = cl.hasOption(OPTION_USE_SNR); /* Process the standard, positive, and negative wells, producing ScanData containers that will allow them to be * iterated over for graph writing. We do not need to specify granular includeIons and excludeIons since * this would not take advantage of our caching strategy which uses a list of metlin ions as an index. */ HashMap<Integer, Plate> plateCache = new HashMap<>(); Pair<List<ScanData<StandardWell>>, Double> allStandardScans = AnalysisHelper.processScans(db, lcmsDir, searchMZs, ScanData.KIND.STANDARD, plateCache, standardWells, useFineGrainedMZ, EMPTY_SET, EMPTY_SET, useSNR); Pair<List<ScanData<LCMSWell>>, Double> allPositiveScans = AnalysisHelper.processScans(db, lcmsDir, searchMZs, ScanData.KIND.POS_SAMPLE, plateCache, positiveWells, useFineGrainedMZ, EMPTY_SET, EMPTY_SET, useSNR); Pair<List<ScanData<LCMSWell>>, Double> allNegativeScans = AnalysisHelper.processScans(db, lcmsDir, searchMZs, ScanData.KIND.NEG_CONTROL, plateCache, negativeWells, useFineGrainedMZ, EMPTY_SET, EMPTY_SET, useSNR); String fmt = "pdf"; String outImg = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + fmt; String outData = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + ".data"; String outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + ".tsv"; System.err.format("Writing combined scan data to %s and graphs to %s\n", outData, outImg); String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR); List<ScanData<LCMSWell>> posNegWells = new ArrayList<>(); posNegWells.addAll(allPositiveScans.getLeft()); posNegWells.addAll(allNegativeScans.getLeft()); Map<Integer, String> searchIons; if (cl.hasOption(OPTION_PATHWAY_SEARCH_IONS)) { searchIons = extractPathwayStepIons(pathwayChems, cl.getOptionValues(OPTION_PATHWAY_SEARCH_IONS), cl.getOptionValue(OPTION_SEARCH_ION, "M+H")); /* This is pretty lazy, but works with the existing API. Extract all selected ions for all search masses when * performing the scan, then filter down to the desired ions for the plot at the end. * TODO: specify the masses and scans per sample rather than batching everything together. It might be slower, * but it'll be clearer to read. */ } else { // We need to make sure that the standard metlin ion we choose is consistent with the ion modes of // the given positive, negative and standard scan files. For example, we should not pick a negative // metlin ion if all our available positive control scan files are in the positive ion mode. Map<Integer, Pair<Boolean, Boolean>> ionModes = new HashMap<>(); for (ChemicalAssociatedWithPathway chemical : pathwayChems) { boolean isPositiveScanPresent = false; boolean isNegativeScanPresent = false; for (ScanData<StandardWell> scan : allStandardScans.getLeft()) { if (chemical.getChemical().equals(scan.getWell().getChemical()) && chemical.getChemical().equals(scan.getTargetChemicalName())) { if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.POS) { isPositiveScanPresent = true; } if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.NEG) { isNegativeScanPresent = true; } } } for (ScanData<LCMSWell> scan : posNegWells) { if (chemical.getChemical().equals(scan.getWell().getChemical()) && chemical.getChemical().equals(scan.getTargetChemicalName())) { if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.POS) { isPositiveScanPresent = true; } if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.NEG) { isNegativeScanPresent = true; } } } ionModes.put(chemical.getId(), Pair.of(isPositiveScanPresent, isNegativeScanPresent)); } // Sort in descending order of media where MeOH and Water related media are promoted to the top and // anything derived from yeast media are demoted. We do this because we want to first process the water // and meoh media before processing the yeast media since the yeast media depends on the analysis of the former. Collections.sort(standardWells, new Comparator<StandardWell>() { @Override public int compare(StandardWell o1, StandardWell o2) { if (StandardWell.doesMediaContainYeastExtract(o1.getMedia()) && !StandardWell.doesMediaContainYeastExtract(o2.getMedia())) { return 1; } else { return 0; } } }); searchIons = extractPathwayStepIonsFromStandardIonAnalysis(pathwayChems, lcmsDir, db, standardWells, plottingDirectory, ionModes); } produceLCMSPathwayHeatmaps(lcmsDir, outData, outImg, outAnalysis, pathwayChems, allStandardScans, allPositiveScans, allNegativeScans, fontScale, cl.hasOption(OPTION_USE_HEATMAP), searchIons); } }
From source file:com.github.fritaly.svngraph.SvnGraph.java
public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName())); System.exit(1);/*from ww w . ja v a 2 s.c o m*/ } final File input = new File(args[0]); if (!input.exists()) { throw new IllegalArgumentException( String.format("The given file '%s' doesn't exist", input.getAbsolutePath())); } final File output = new File(args[1]); final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input); final History history = new History(document); final Set<String> rootPaths = history.getRootPaths(); System.out.println(rootPaths); for (String path : rootPaths) { System.out.println(path); System.out.println(history.getHistory(path).getRevisions()); System.out.println(); } int count = 0; FileWriter fileWriter = null; GraphMLWriter graphWriter = null; try { fileWriter = new FileWriter(output); graphWriter = new GraphMLWriter(fileWriter); final NodeStyle tagStyle = graphWriter.getNodeStyle(); tagStyle.setFillColor(Color.WHITE); graphWriter.graph(); // map associating node labels to their corresponding node id in the graph final Map<String, String> nodeIdsPerLabel = new TreeMap<>(); // the node style associated to each branch final Map<String, NodeStyle> nodeStyles = new TreeMap<>(); for (Revision revision : history.getSignificantRevisions()) { System.out.println(revision.getNumber() + " - " + revision.getMessage()); // TODO Render also the deletion of branches // there should be only 1 significant update per revision (the one with action ADD) for (Update update : revision.getSignificantUpdates()) { if (update.isCopy()) { // a merge is also considered a copy final RevisionPath source = update.getCopySource(); System.out.println(String.format(" > %s %s from %s@%d", update.getAction(), update.getPath(), source.getPath(), source.getRevision())); final String sourceRoot = Utils.getRootName(source.getPath()); if (sourceRoot == null) { // skip the revisions whose associated root is // null (happens whether a branch was created // outside the 'branches' directory for // instance) System.err.println(String.format("Skipped revision %d because of a null root", source.getRevision())); continue; } final String sourceLabel = computeNodeLabel(sourceRoot, source.getRevision()); // create a node for the source (path, revision) final String sourceId; if (nodeIdsPerLabel.containsKey(sourceLabel)) { // retrieve the id of the existing node sourceId = nodeIdsPerLabel.get(sourceLabel); } else { // create the new node if (Utils.isTagPath(source.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(sourceRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(sourceRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(sourceRoot)); } sourceId = graphWriter.node(sourceLabel); nodeIdsPerLabel.put(sourceLabel, sourceId); } // and another for the newly created directory final String targetRoot = Utils.getRootName(update.getPath()); if (targetRoot == null) { System.err.println(String.format("Skipped revision %d because of a null root", revision.getNumber())); continue; } final String targetLabel = computeNodeLabel(targetRoot, revision.getNumber()); if (Utils.isTagPath(update.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(targetRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(targetRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(targetRoot)); } final String targetId; if (nodeIdsPerLabel.containsKey(targetLabel)) { // retrieve the id of the existing node targetId = nodeIdsPerLabel.get(targetLabel); } else { // create the new node if (Utils.isTagPath(update.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(targetRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(targetRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(targetRoot)); } targetId = graphWriter.node(targetLabel); nodeIdsPerLabel.put(targetLabel, targetId); } // create an edge between the 2 nodes graphWriter.edge(sourceId, targetId); } else { System.out.println(String.format(" > %s %s", update.getAction(), update.getPath())); } } System.out.println(); count++; } // Dispatch the revisions per corresponding branch final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>(); for (String nodeLabel : nodeIdsPerLabel.keySet()) { if (nodeLabel.contains("@")) { final String branchName = StringUtils.substringBefore(nodeLabel, "@"); final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@")); if (!revisionsPerBranch.containsKey(branchName)) { revisionsPerBranch.put(branchName, new TreeSet<Long>()); } revisionsPerBranch.get(branchName).add(revision); } else { throw new IllegalStateException(nodeLabel); } } // Recreate the missing edges between revisions from a same branch for (String branchName : revisionsPerBranch.keySet()) { final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName)); for (int i = 0; i < branchRevisions.size() - 1; i++) { final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i)); final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i + 1)); graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2)); } } graphWriter.closeGraph(); System.out.println(String.format("Found %d significant revisions", count)); } finally { if (graphWriter != null) { graphWriter.close(); } if (fileWriter != null) { fileWriter.close(); } } System.out.println("Done"); }
From source file:edu.msu.cme.rdp.graph.utils.ContigMerger.java
public static void main(String[] args) throws IOException { final BufferedReader hmmgsResultReader; final IndexedSeqReader nuclContigReader; final double minBits; final int minProtLength; final Options options = new Options(); final PrintStream out; final ProfileHMM hmm; final FastaWriter protSeqOut; final FastaWriter nuclSeqOut; final boolean prot; final boolean all; final String shortSampleName; options.addOption("a", "all", false, "Generate all combinations for multiple paths, instead of just the best"); options.addOption("b", "min-bits", true, "Minimum bits score"); options.addOption("l", "min-length", true, "Minimum length"); options.addOption("s", "short_samplename", true, "short sample name, to be used as part of contig identifiers. This allow analyzing contigs together from different samples in downstream analysis "); options.addOption("o", "out", true, "Write output to file instead of stdout"); try {/* w w w . ja v a 2 s .c om*/ CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("min-bits")) { minBits = Double.valueOf(line.getOptionValue("min-bits")); } else { minBits = Double.NEGATIVE_INFINITY; } if (line.hasOption("min-length")) { minProtLength = Integer.valueOf(line.getOptionValue("min-length")); } else { minProtLength = 0; } if (line.hasOption("short_samplename")) { shortSampleName = line.getOptionValue("short_samplename") + "_"; } else { shortSampleName = ""; } if (line.hasOption("out")) { out = new PrintStream(line.getOptionValue("out")); } else { out = System.err; } all = line.hasOption("all"); args = line.getArgs(); if (args.length != 3) { throw new Exception("Unexpected number of arguments"); } hmmgsResultReader = new BufferedReader(new FileReader(new File(args[1]))); nuclContigReader = new IndexedSeqReader(new File(args[2])); hmm = HMMER3bParser.readModel(new File(args[0])); prot = (hmm.getAlphabet() == SequenceType.Protein); if (prot) { protSeqOut = new FastaWriter(new File("prot_merged.fasta")); } else { protSeqOut = null; } nuclSeqOut = new FastaWriter(new File("nucl_merged.fasta")); } catch (Exception e) { new HelpFormatter().printHelp("USAGE: ContigMerger [options] <hmm> <hmmgs_file> <nucl_contig>", options); System.err.println("Error: " + e.getMessage()); System.exit(1); throw new RuntimeException("I hate you javac"); } String line; SearchDirection lastDir = SearchDirection.left; //So this has an assumption built in //It depends on hmmgs always outputting left fragments, then right //We can't just use the kmer to figure out if we've switched to another starting point //because we allow multiple starting model pos, so two different starting //positions can have the same starting kmer Map<String, Sequence> leftContigs = new HashMap(); Map<String, Sequence> rightContigs = new HashMap(); int contigsMerged = 0; int writtenMerges = 0; long startTime = System.currentTimeMillis(); String kmer = null; String geneName = null; while ((line = hmmgsResultReader.readLine()) != null) { if (line.startsWith("#")) { continue; } String[] lexemes = line.trim().split("\t"); if (lexemes.length != 12 || lexemes[0].equals("-")) { System.err.println("Skipping line: " + line); continue; } //contig_53493 nirk 1500:6:35:16409:3561/1 ADV15048 tcggcgctctacacgttcctgcagcccggg 40 210 70 left -44.692 184 0 int index = 0; String seqid = lexemes[0]; geneName = lexemes[1]; String readid = lexemes[2]; String refid = lexemes[3]; kmer = lexemes[4]; int modelStart = Integer.valueOf(lexemes[5]); int nuclLength = Integer.valueOf(lexemes[6]); int protLength = Integer.valueOf(lexemes[7]); SearchDirection dir = SearchDirection.valueOf(lexemes[8]); if (dir != lastDir) { if (dir == SearchDirection.left) { List<MergedContig> mergedContigs = all ? mergeAllContigs(leftContigs, rightContigs, kmer, geneName, hmm) : mergeContigs(leftContigs, rightContigs, kmer, geneName, hmm); contigsMerged++; for (MergedContig mc : mergedContigs) { String mergedId = shortSampleName + geneName + "_" + mc.leftContig + "_" + mc.rightContig; out.println(mergedId + "\t" + mc.length + "\t" + mc.score); if (mc.score > minBits && mc.length > minProtLength) { if (prot) { protSeqOut.writeSeq(mergedId, mc.protSeq); } nuclSeqOut.writeSeq(mergedId, mc.nuclSeq); writtenMerges++; } } leftContigs.clear(); rightContigs.clear(); } lastDir = dir; } Sequence seq = nuclContigReader.readSeq(seqid); if (dir == SearchDirection.left) { leftContigs.put(seqid, seq); } else if (dir == SearchDirection.right) { rightContigs.put(seqid, seq); } else { throw new IOException("Cannot handle search direction " + dir); } } if (!leftContigs.isEmpty() || !rightContigs.isEmpty()) { List<MergedContig> mergedContigs = all ? mergeAllContigs(leftContigs, rightContigs, kmer, geneName, hmm) : mergeContigs(leftContigs, rightContigs, kmer, geneName, hmm); for (MergedContig mc : mergedContigs) { String mergedId = shortSampleName + mc.gene + "_" + mc.leftContig + "_" + mc.rightContig; out.println(mergedId + "\t" + mc.length + "\t" + mc.score); contigsMerged++; if (mc.score > minBits && mc.length > minProtLength) { if (prot) { protSeqOut.writeSeq(mergedId, mc.protSeq); } nuclSeqOut.writeSeq(mergedId, mc.nuclSeq); writtenMerges++; } } } out.close(); if (prot) { protSeqOut.close(); } nuclSeqOut.close(); System.err.println("Read in " + contigsMerged + " contigs, wrote out " + writtenMerges + " merged contigs in " + ((double) (System.currentTimeMillis() - startTime) / 1000) + "s"); }
From source file:com.acapulcoapp.alloggiatiweb.FileReader.java
public static void main(String[] args) throws UnknownHostException, IOException { // TODO code application logic here SpringApplication app = new SpringApplication(AcapulcoappApp.class); SimpleCommandLinePropertySource source = new SimpleCommandLinePropertySource(args); addDefaultProfile(app, source);/*from ww w.j a va 2s . c o m*/ ConfigurableApplicationContext context = app.run(args); initBeans(context); Map<LocalDate, List<List<String>>> map = new TreeMap<>(); List<File> files = new ArrayList<>(FileUtils.listFiles(new File("/Users/chiccomask/Downloads/ALLOGGIATI"), new String[] { "txt" }, true)); Collections.reverse(files); int count = 0; for (File file : files) { // List<String> allLines = FileUtils.readLines(file, "windows-1252"); List<String> allLines = FileUtils.readLines(file, "UTF-8"); for (int i = 0; i < allLines.size();) { count++; List<String> record = new ArrayList<>(); String line = allLines.get(i); String type = TIPO_ALLOGGIO.parse(line); switch (type) { case "16": record.add(line); i++; break; case "17": { record.add(line); boolean out = false; while (!out) { i++; if (i < allLines.size()) { String subline = allLines.get(i); String subtype = TIPO_ALLOGGIO.parse(subline); if (!subtype.equals("19")) { out = true; } else { record.add(subline); } } else { out = true; } } break; } case "18": { record.add(line); boolean out = false; while (!out) { i++; if (i < allLines.size()) { String subline = allLines.get(i); String subtype = TIPO_ALLOGGIO.parse(subline); if (!subtype.equals("20")) { out = true; } else { record.add(subline); } } else { out = true; } } break; } default: break; } LocalDate arrived = LocalDate.parse(DATA_ARRIVO.parse(line), DateTimeFormatter.ofPattern(DATE_PATTERN)); if (!map.containsKey(arrived)) { map.put(arrived, new ArrayList<>()); } map.get(arrived).add(record); } } for (LocalDate date : map.keySet()) { System.out.println(); System.out.println("process day " + date); for (List<String> record : map.get(date)) { System.out.println(); System.out.println("process record "); for (String line : record) { System.out.println(line); } CheckinRecord checkinRecord = new CheckinRecord(); //non lo setto per adesso String firstLine = record.get(0); String typeStr = TIPO_ALLOGGIO.parse(firstLine); CheckinType cht = checkinTypeRepository.find(typeStr); checkinRecord.setCheckinType(cht); int days = Integer.parseInt(PERMANENZA.parse(firstLine)); checkinRecord.setDays(days); checkinRecord.setArrived(date); boolean isMain = true; List<Person> others = new ArrayList<>(); for (String line : record) { Person p = extractPerson(line); if (p.getDistrictOfBirth() == null) { System.out.println("district of birth not found " + p); } List<Person> duplicates = personRepository.findDuplicates(p.getSurname(), p.getName(), p.getDateOfBirth()); if (duplicates.isEmpty()) { System.out.println("add new person " + p.getId() + " " + p); personRepository.saveAndFlush(p); } else if (duplicates.size() == 1) { Person found = duplicates.get(0); if (p.getIdentityDocument() != null) { //we sorted by date so we suppose //the file version is newer so we update the entity p.setId(found.getId()); System.out.println("update person " + p.getId() + " " + p); personRepository.saveAndFlush(p); } else if (found.getIdentityDocument() != null) { //on db there are more data so I use them. p = found; System.out.println("use already saved person " + p.getId() + " " + p); } else { p.setId(found.getId()); System.out.println("update person " + p.getId() + " " + p); personRepository.saveAndFlush(p); } } else { throw new RuntimeException("More duplicated for " + p.getName()); } if (isMain) { checkinRecord.setMainPerson(p); isMain = false; } else { others.add(p); } } checkinRecord.setOtherPeople(new HashSet<>(others)); if (checkinRecordRepository.alreadyExists(checkinRecord.getMainPerson(), date) != null) { System.out.println("already exists " + date + " p " + checkinRecord.getMainPerson()); } else { System.out.println("save record "); checkinRecordRepository.saveAndFlush(checkinRecord); } } } // // if (type.equals("16")) { // List<String> record = new ArrayList<>(); // record.add(line); // keepOpen = false; // } // // map.get(arrived).add(record); // map.values().forEach((list) -> { // // for (String line : list) { // // Person p = null; // // try { // // p = extractPerson(line); // // List<Person> duplicates = personRepository.findDuplicates(p.getSurname(), p.getName(), p.getDateOfBirth()); // // if (duplicates.isEmpty()) { // personRepository.saveAndFlush(p); // // } else if (duplicates.size() > 1) { // System.out.println(); // System.out.println("MULIPLE DUPLICATED"); // // for (Person dd : duplicates) { // System.out.println(dd); // } // System.out.println("* " + p); // throw new RuntimeException(); // } else { // //// if (!duplicates.get(0).getDistrictOfBirth().equals(p.getDistrictOfBirth())) { //// int index = 0; //// //// System.out.println(); //// System.out.println("DUPLICATED"); //// //// for (Person dd : duplicates) { //// System.out.println(dd); //// index++; //// } //// System.out.println("* " + p); //// System.out.println(file.getAbsolutePath() + " " + p); //// //// System.out.println(); //// System.out.println(); //// } //// duplicates.remove(0); //// personRepository.deleteInBatch(duplicates); //// System.out.println(); //// System.out.println("Seleziona scelta"); //// Scanner s = new Scanner(System.in); //// int selected; //// try { //// selected = s.nextInt(); //// } catch (InputMismatchException e) { //// selected = 0; //// } //// //// if (duplicates.size() <= selected) { //// personRepository.deleteInBatch(duplicates); //// personRepository.saveAndFlush(p); //// } else { //// duplicates.remove(selected); //// personRepository.deleteInBatch(duplicates); //// } // } // // } catch (Exception e) { // // System.out.println(); //// System.out.println("ERROR READING lineCount=" + allLines.indexOf(line) + " line=" + line); //// System.out.println(file.getAbsolutePath()); // System.out.println(p); // e.printStackTrace(); // System.out.println(); // } // } // }); context.registerShutdownHook(); System.exit(0); }
From source file:com.foo.manager.commonManager.thread.HttpHandleThread.java
public static void main(String arg[]) { // String logistics_interface = "<LoadHead><loadContents><loadContent><loadContentId>1</loadContentId><outorderId>6666666666</outorderId></loadContent><loadContent><loadContentId>2</loadContentId><outorderId>7777777777</outorderId></loadContent></loadContents><loadHeadId>12</loadHeadId><loadId>1736474588</loadId><total>2</total><tracyNum>3</tracyNum><TotalWeight>2.5</TotalWeight><CarEcNo>?A234234</CarEcNo></LoadHead>"; ///*from w w w . j a v a2 s.com*/ // String data_digest = CommonUtil.makeSign(logistics_interface); // // System.out.println(data_digest); // try { // System.out // .println("logistics_interface=" // + URLEncoder.encode(logistics_interface, "utf-8") // + "&data_digest=" // + URLEncoder.encode(data_digest, "utf-8")); // } catch (UnsupportedEncodingException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } // // try { // System.out.println(URLEncoder.encode("helloworld", "utf-8")); // System.out.println(URLEncoder.encode("voQc3u6+f6pSflMPdw4ySQ==", // "utf-8")); // } catch (UnsupportedEncodingException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); Map<String, Object> map1 = new HashMap<String, Object>(); map1.put("RECORD_NO", "335"); map1.put("CREAT_TIME", "2018-09-14 21:46:17"); Map<String, Object> map2 = new HashMap<String, Object>(); map2.put("RECORD_NO", "145"); map2.put("CREAT_TIME", "2018-09-14 23:46:17"); Map<String, Object> map3 = new HashMap<String, Object>(); map3.put("RECORD_NO", "285"); map3.put("CREAT_TIME", "2018-09-14 22:46:17"); Map<String, Object> map4 = new HashMap<String, Object>(); map4.put("RECORD_NO", "265"); map4.put("CREAT_TIME", "2018-09-14 22:46:17"); list.add(map1); list.add(map2); list.add(map3); list.add(map4); // ?? for (Map<String, Object> map : list) { System.out.println(map); } Collections.sort(list, new Comparator<Map>() { public int compare(Map o1, Map o2) { // double qty1 = Double.valueOf(o1.get("QTY") // .toString()); // double qty2 = Double.valueOf(o2.get("QTY") // .toString()); String recordNo1 = o1.get("RECORD_NO") != null ? o1.get("RECORD_NO").toString() : ""; String recordNo2 = o2.get("RECORD_NO") != null ? o2.get("RECORD_NO").toString() : ""; // if (qty1 > qty2) { if (recordNo1.compareTo(recordNo2) < 0) { return 0; } else { return 1; } // } else { // return 0; // } } }); System.out.println("-------------------"); for (Map<String, Object> map : list) { System.out.println(map); } // // String s = "<![CDATA[?]]>"; // Pattern p = Pattern.compile(".*<!\\[CDATA\\[(.*)\\]\\]>.*"); // Matcher m = p.matcher(s); // // if (m.matches()) { // System.out.println(m.group(1)); // }else{ // System.out.println(s); // } // // System.out.println(CommonUtil.getDateFormatter(CommonDefine.COMMON_FORMAT_2) // .format(new Date())); // String response = "<?xml version=\"1.0\" encoding=\"utf-8\"?><soapenv:Envelope xmlns:soapenv=\"http://www.w3.org/2003/05/soap-envelope\"><soapenv:Body><ns:sendOrderResponse xmlns:ns=\"http://ws.com\"><ns:return><?xml version=\"1.0\" encoding=\"UTF-8\"?><DATA><ORDER><ORDER_CODE>W100133410</ORDER_CODE><CD>OK</CD><INFO>11811000073</INFO></ORDER></DATA></ns:return></ns:sendOrderResponse></soapenv:Body></soapenv:Envelope>"; // // String xxxx = StringEscapeUtils.escapeXml(response); // String returnXmlData = XmlUtil // .getResponseFromXmlString_CJ(xxxx); // String returnXmlData = XmlUtil.getTotalMidValue(response,"<ns:return>","</ns:return>"); // // Map orderResult = XmlUtil.parseXmlFPAPI_SingleNodes(returnXmlData, "//DATA/ORDER/child::*"); // System.out.println(xxxx); // System.out.println(orderResult); try { String xxx = "<returnInfo>?????????????</returnInfo>"; xxx = "<returnInfo>?????????????/returnInfo>"; // String xxx = "??[??3201966A69??AAAA124613101110701] ?[3201966A69]??[AAAA124613101110701]??? ??[1210680001]???[245119218897]???? ?[3201966A69]??[AAAA124613101110701]??"; // String uft_gbk = new String(xxx.getBytes("UTF-8"),"GBK"); String gbk_utf = new String(xxx.getBytes("GBK"), "UTF-8"); // System.out.println(uft_gbk); System.out.println(gbk_utf); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:edu.upenn.cis.FastAlign.java
/** * Prints alignments for options specified by command line arguments. * @param argv parameters to be used by FastAlign. *//* w w w .ja v a 2s . c o m*/ public static void main(String[] argv) { FastAlign align = FastAlign.initCommandLine(argv); if (align == null) { System.err.println("Usage: java " + FastAlign.class.getCanonicalName() + " -i file.fr-en\n" + " Standard options ([USE] = strongly recommended):\n" + " -i: [REQ] Input parallel corpus\n" + " -v: [USE] Use Dirichlet prior on lexical translation distributions\n" + " -d: [USE] Favor alignment points close to the monotonic diagonoal\n" + " -o: [USE] Optimize how close to the diagonal alignment points should be\n" + " -r: Run alignment in reverse (condition on target and predict source)\n" + " -c: Output conditional probability table\n" + " -e: Start with existing conditional probability table\n" + " Advanced options:\n" + " -I: number of iterations in EM training (default = 5)\n" + " -p: p_null parameter (default = 0.08)\n" + " -N: No null word\n" + " -a: alpha parameter for optional Dirichlet prior (default = 0.01)\n" + " -T: starting lambda for diagonal distance parameter (default = 4)\n"); System.exit(1); } boolean use_null = !align.no_null_word; if (align.variational_bayes && align.alpha <= 0.0) { System.err.println("--alpha must be > 0\n"); System.exit(1); } double prob_align_not_null = 1.0 - align.prob_align_null; final int kNULL = align.d.Convert("<eps>"); TTable s2t = new TTable(); if (!align.existing_probability_filename.isEmpty()) { boolean success = s2t.ImportFromFile(align.existing_probability_filename, '\t', align.d); if (!success) { System.err.println("Can't read table " + align.existing_probability_filename); System.exit(1); } } Map<Pair, Integer> size_counts = new HashMap<Pair, Integer>(); double tot_len_ratio = 0; double mean_srclen_multiplier = 0; List<Double> probs = new ArrayList<Double>(); ; // E-M Iterations Loop TODO move this into a method? for (int iter = 0; iter < align.iterations || (iter == 0 && align.iterations == 0); ++iter) { final boolean final_iteration = (iter >= (align.iterations - 1)); System.err.println("ITERATION " + (iter + 1) + (final_iteration ? " (FINAL)" : "")); Scanner in = null; try { in = new Scanner(new File(align.input)); if (!in.hasNextLine()) { System.err.println("Can't read " + align.input); System.exit(1); } } catch (FileNotFoundException e) { e.printStackTrace(); System.err.println("Can't read " + align.input); System.exit(1); } double likelihood = 0; double denom = 0.0; int lc = 0; boolean flag = false; String line; // String ssrc, strg; ArrayList<Integer> src = new ArrayList<Integer>(); ArrayList<Integer> trg = new ArrayList<Integer>(); double c0 = 0; double emp_feat = 0; double toks = 0; // Iterate over each line of the input file while (in.hasNextLine()) { line = in.nextLine(); ++lc; if (lc % 1000 == 0) { System.err.print('.'); flag = true; } if (lc % 50000 == 0) { System.err.println(" [" + lc + "]\n"); System.err.flush(); flag = false; } src.clear(); trg.clear(); // TODO this is redundant; src and tgt cleared in ParseLine // Integerize and split source and target lines. align.ParseLine(line, src, trg); if (align.is_reverse) { ArrayList<Integer> tmp = src; src = trg; trg = tmp; } // TODO Empty lines break the parser. Should this be true? if (src.size() == 0 || trg.size() == 0) { System.err.println("Error in line " + lc + "\n" + line); System.exit(1); } if (iter == 0) { tot_len_ratio += ((double) trg.size()) / ((double) src.size()); } denom += trg.size(); probs.clear(); // Add to pair length counts only if first iteration. if (iter == 0) { Pair pair = new Pair(trg.size(), src.size()); Integer value = size_counts.get(pair); if (value == null) value = 0; size_counts.put(pair, value + 1); } boolean first_al = true; // used when printing alignments toks += trg.size(); // Iterate through the English tokens for (int j = 0; j < trg.size(); ++j) { final int f_j = trg.get(j); double sum = 0; double prob_a_i = 1.0 / (src.size() + (use_null ? 1 : 0)); // uniform (model 1) if (use_null) { if (align.favor_diagonal) { prob_a_i = align.prob_align_null; } probs.add(0, s2t.prob(kNULL, f_j) * prob_a_i); sum += probs.get(0); } double az = 0; if (align.favor_diagonal) az = DiagonalAlignment.computeZ(j + 1, trg.size(), src.size(), align.diagonal_tension) / prob_align_not_null; for (int i = 1; i <= src.size(); ++i) { if (align.favor_diagonal) prob_a_i = DiagonalAlignment.unnormalizedProb(j + 1, i, trg.size(), src.size(), align.diagonal_tension) / az; probs.add(i, s2t.prob(src.get(i - 1), f_j) * prob_a_i); sum += probs.get(i); } if (final_iteration) { double max_p = -1; int max_index = -1; if (use_null) { max_index = 0; max_p = probs.get(0); } for (int i = 1; i <= src.size(); ++i) { if (probs.get(i) > max_p) { max_index = i; max_p = probs.get(i); } } if (max_index > 0) { if (first_al) first_al = false; else System.out.print(' '); if (align.is_reverse) System.out.print("" + j + '-' + (max_index - 1)); else System.out.print("" + (max_index - 1) + '-' + j); } } else { if (use_null) { double count = probs.get(0) / sum; c0 += count; s2t.Increment(kNULL, f_j, count); } for (int i = 1; i <= src.size(); ++i) { final double p = probs.get(i) / sum; s2t.Increment(src.get(i - 1), f_j, p); emp_feat += DiagonalAlignment.feature(j, i, trg.size(), src.size()) * p; } } likelihood += Math.log(sum); } if (final_iteration) System.out.println(); } // log(e) = 1.0 double base2_likelihood = likelihood / Math.log(2); if (flag) { System.err.println(); } if (iter == 0) { mean_srclen_multiplier = tot_len_ratio / lc; System.err.println("expected target length = source length * " + mean_srclen_multiplier); } emp_feat /= toks; System.err.println(" log_e likelihood: " + likelihood); System.err.println(" log_2 likelihood: " + base2_likelihood); System.err.println(" cross entropy: " + (-base2_likelihood / denom)); System.err.println(" perplexity: " + Math.pow(2.0, -base2_likelihood / denom)); System.err.println(" posterior p0: " + c0 / toks); System.err.println(" posterior al-feat: " + emp_feat); //System.err.println(" model tension: " + mod_feat / toks ); System.err.println(" size counts: " + size_counts.size()); if (!final_iteration) { if (align.favor_diagonal && align.optimize_tension && iter > 0) { for (int ii = 0; ii < 8; ++ii) { double mod_feat = 0; Iterator<Map.Entry<Pair, Integer>> it = size_counts.entrySet().iterator(); for (; it.hasNext();) { Map.Entry<Pair, Integer> entry = it.next(); final Pair p = entry.getKey(); for (int j = 1; j <= p.first; ++j) mod_feat += entry.getValue() * DiagonalAlignment.computeDLogZ(j, p.first, p.second, align.diagonal_tension); } mod_feat /= toks; System.err.println(" " + ii + 1 + " model al-feat: " + mod_feat + " (tension=" + align.diagonal_tension + ")"); align.diagonal_tension += (emp_feat - mod_feat) * 20.0; if (align.diagonal_tension <= 0.1) align.diagonal_tension = 0.1; if (align.diagonal_tension > 14) align.diagonal_tension = 14; } System.err.println(" final tension: " + align.diagonal_tension); } if (align.variational_bayes) s2t.NormalizeVB(align.alpha); else s2t.Normalize(); //prob_align_null *= 0.8; // XXX //prob_align_null += (c0 / toks) * 0.2; prob_align_not_null = 1.0 - align.prob_align_null; } } if (!align.conditional_probability_filename.isEmpty()) { System.err.println("conditional probabilities: " + align.conditional_probability_filename); s2t.ExportToFile(align.conditional_probability_filename, align.d); } System.exit(0); }