List of usage examples for java.util List stream
default Stream<E> stream()
From source file:edu.mit.lib.mama.Mama.java
public static void main(String[] args) { Properties props = findConfig(args); DBI dbi = new DBI(props.getProperty("dburl"), props); // Advanced instrumentation/metrics if requested if (System.getenv("MAMA_DB_METRICS") != null) { dbi.setTimingCollector(new InstrumentedTimingCollector(metrics)); }/* w ww .jav a 2 s .c o m*/ // reassign default port 4567 if (System.getenv("MAMA_SVC_PORT") != null) { port(Integer.valueOf(System.getenv("MAMA_SVC_PORT"))); } // if API key given, use exception monitoring service if (System.getenv("HONEYBADGER_API_KEY") != null) { reporter = new HoneybadgerReporter(); } get("/ping", (req, res) -> { res.type("text/plain"); res.header("Cache-Control", "must-revalidate,no-cache,no-store"); return "pong"; }); get("/metrics", (req, res) -> { res.type("application/json"); res.header("Cache-Control", "must-revalidate,no-cache,no-store"); ObjectMapper objectMapper = new ObjectMapper() .registerModule(new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, true)); try (ServletOutputStream outputStream = res.raw().getOutputStream()) { objectMapper.writer().withDefaultPrettyPrinter().writeValue(outputStream, metrics); } return ""; }); get("/shutdown", (req, res) -> { boolean auth = false; try { if (!isNullOrEmpty(System.getenv("MAMA_SHUTDOWN_KEY")) && !isNullOrEmpty(req.queryParams("key")) && System.getenv("MAMA_SHUTDOWN_KEY").equals(req.queryParams("key"))) { auth = true; return "Shutting down"; } else { res.status(401); return "Not authorized"; } } finally { if (auth) { stop(); } } }); get("/item", (req, res) -> { if (isNullOrEmpty(req.queryParams("qf")) || isNullOrEmpty(req.queryParams("qv"))) { halt(400, "Must supply field and value query parameters 'qf' and 'qv'"); } itemReqs.mark(); Timer.Context context = respTime.time(); try (Handle hdl = dbi.open()) { if (findFieldId(hdl, req.queryParams("qf")) != -1) { List<String> results = findItems(hdl, req.queryParams("qf"), req.queryParams("qv"), req.queryParamsValues("rf")); if (results.size() > 0) { res.type("application/json"); return "{ " + jsonValue("field", req.queryParams("qf"), true) + ",\n" + jsonValue("value", req.queryParams("qv"), true) + ",\n" + jsonValue("items", results.stream().collect(Collectors.joining(",", "[", "]")), false) + "\n" + " }"; } else { res.status(404); return "No items found for: " + req.queryParams("qf") + "::" + req.queryParams("qv"); } } else { res.status(404); return "No such field: " + req.queryParams("qf"); } } catch (Exception e) { if (null != reporter) reporter.reportError(e); res.status(500); return "Internal system error: " + e.getMessage(); } finally { context.stop(); } }); awaitInitialization(); }
From source file:appmain.AppMain.java
public static void main(String[] args) { try {// ww w .j av a 2 s. c o m UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); UIManager.put("OptionPane.yesButtonText", "Igen"); UIManager.put("OptionPane.noButtonText", "Nem"); } catch (Exception ex) { ex.printStackTrace(); } try { AppMain app = new AppMain(); app.init(); File importFolder = new File(DEFAULT_IMPORT_FOLDER); if (!importFolder.isDirectory() || !importFolder.exists()) { JOptionPane.showMessageDialog(null, "Az IMPORT mappa nem elrhet!\n" + "Ellenrizze az elrsi utat s a jogosultsgokat!\n" + "Mappa: " + importFolder.getAbsolutePath(), "Informci", JOptionPane.INFORMATION_MESSAGE); return; } File exportFolder = new File(DEFAULT_EXPORT_FOLDER); if (!exportFolder.isDirectory() || !exportFolder.exists()) { JOptionPane.showMessageDialog(null, "Az EXPORT mappa nem elrhet!\n" + "Ellenrizze az elrsi utat s a jogosultsgokat!\n" + "Mappa: " + exportFolder.getAbsolutePath(), "Informci", JOptionPane.INFORMATION_MESSAGE); return; } List<File> xmlFiles = app.getXMLFiles(importFolder); if (xmlFiles == null || xmlFiles.isEmpty()) { JOptionPane.showMessageDialog(null, "Nincs beolvasand XML fjl!\n" + "Mappa: " + importFolder.getAbsolutePath(), "Informci", JOptionPane.INFORMATION_MESSAGE); return; } StringBuilder fileList = new StringBuilder(); xmlFiles.stream().forEach(xml -> fileList.append("\n").append(xml.getName())); int ret = JOptionPane.showConfirmDialog(null, "Beolvassra elksztett fjlok:\n" + fileList + "\n\nIndulhat a feldolgozs?", "Megtallt XML fjlok", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE); if (ret == JOptionPane.OK_OPTION) { String csvName = "tranzakcio_lista_" + df.format(new Date()) + "_" + System.currentTimeMillis() + ".csv"; File csv = new File(DEFAULT_EXPORT_FOLDER + "/" + csvName); app.writeCSV(csv, Arrays.asList(app.getHeaderLine())); xmlFiles.stream().forEach(xml -> { List<String> lines = app.readXMLData(xml); if (lines != null) app.writeCSV(csv, lines); }); if (csv.isFile() && csv.exists()) { JOptionPane.showMessageDialog(null, "A CSV fjl sikeresen elllt!\n" + "Fjl: " + csv.getAbsolutePath(), "Informci", JOptionPane.INFORMATION_MESSAGE); app.openFile(csv); } } else { JOptionPane.showMessageDialog(null, "Feldolgozs megszaktva!", "Informci", JOptionPane.INFORMATION_MESSAGE); } } catch (Exception ex) { JOptionPane.showMessageDialog(null, "Nem kezelt kivtel!\n" + ExceptionUtils.getStackTrace(ex), "Hiba", JOptionPane.ERROR_MESSAGE); } }
From source file:org.mitre.mpf.rest.client.Main.java
public static void main(String[] args) throws RestClientException, IOException, InterruptedException { System.out.println("Starting rest-client!"); //not necessary for localhost, but left if a proxy configuration is needed //System.setProperty("http.proxyHost",""); //System.setProperty("http.proxyPort",""); String currentDirectory;// ww w .j a v a 2 s . c o m currentDirectory = System.getProperty("user.dir"); System.out.println("Current working directory : " + currentDirectory); String username = "mpf"; String password = "mpf123"; byte[] encodedBytes = Base64.encodeBase64((username + ":" + password).getBytes()); String base64 = new String(encodedBytes); System.out.println("encodedBytes " + base64); final String mpfAuth = "Basic " + base64; RequestInterceptor authorize = new RequestInterceptor() { @Override public void intercept(HttpRequestBase request) { request.addHeader("Authorization", mpfAuth /*credentials*/); } }; //RestClient client = RestClient.builder().requestInterceptor(authorize).build(); CustomRestClient client = (CustomRestClient) CustomRestClient.builder() .restClientClass(CustomRestClient.class).requestInterceptor(authorize).build(); //getAvailableWorkPipelineNames String url = "http://localhost:8080/workflow-manager/rest/pipelines"; Map<String, String> params = new HashMap<String, String>(); List<String> availableWorkPipelines = client.get(url, params, new TypeReference<List<String>>() { }); System.out.println("availableWorkPipelines size: " + availableWorkPipelines.size()); System.out.println(Arrays.toString(availableWorkPipelines.toArray())); //processMedia JobCreationRequest jobCreationRequest = new JobCreationRequest(); URI uri = Paths.get(currentDirectory, "/trunk/workflow-manager/src/test/resources/samples/meds/aa/S001-01-t10_01.jpg").toUri(); jobCreationRequest.getMedia().add(new JobCreationMediaData(uri.toString())); uri = Paths.get(currentDirectory, "/trunk/workflow-manager/src/test/resources/samples/meds/aa/S008-01-t10_01.jpg").toUri(); jobCreationRequest.getMedia().add(new JobCreationMediaData(uri.toString())); jobCreationRequest.setExternalId("external id"); //get first DLIB pipeline String firstDlibPipeline = availableWorkPipelines.stream() //.peek(pipepline -> System.out.println("will filter - " + pipepline)) .filter(pipepline -> pipepline.startsWith("DLIB")).findFirst().get(); System.out.println("found firstDlibPipeline: " + firstDlibPipeline); jobCreationRequest.setPipelineName(firstDlibPipeline); //grabbed from 'rest/pipelines' - see #1 //two optional params jobCreationRequest.setBuildOutput(true); //jobCreationRequest.setPriority(priority); //will be set to 4 (default) if not set JobCreationResponse jobCreationResponse = client.customPostObject( "http://localhost:8080/workflow-manager/rest/jobs", jobCreationRequest, JobCreationResponse.class); System.out.println("jobCreationResponse job id: " + jobCreationResponse.getJobId()); System.out.println("\n---Sleeping for 10 seconds to let the job process---\n"); Thread.sleep(10000); //getJobStatus url = "http://localhost:8080/workflow-manager/rest/jobs"; // /status"; params = new HashMap<String, String>(); //OPTIONAL //params.put("v", "") - no versioning currently implemented //id is now a path var - if not set, all job info will returned url = url + "/" + Long.toString(jobCreationResponse.getJobId()); SingleJobInfo jobInfo = client.get(url, params, SingleJobInfo.class); System.out.println("jobInfo id: " + jobInfo.getJobId()); //getSerializedOutput String jobIdToGetOutputStr = Long.toString(jobCreationResponse.getJobId()); url = "http://localhost:8080/workflow-manager/rest/jobs/" + jobIdToGetOutputStr + "/output/detection"; params = new HashMap<String, String>(); //REQUIRED - job id is now a path var and required for this endpoint String serializedOutput = client.getAsString(url, params); System.out.println("serializedOutput: " + serializedOutput); }
From source file:com.gemini.geminimain.GeminiMain.java
public static void main(String[] args) throws IOException { //setup the mongodb access mongoClient = new MongoClient(DB_SERVER); //create table for the application, networks and servers ds = morphia.createDatastore(mongoClient, "Gemini"); //create the mapper Injector injector = Guice.createInjector(new GeminiMapperModule()); GeminiMapper mapper = injector.getInstance(GeminiMapper.class); //set the current logging level to debug Configurator.currentConfig().level(Level.INFO).activate(); //create some data to transfer to the front end createSampleData();//w ww .j av a 2 s.c o m //close the db client mongoClient.close(); //check if authenticated, create the call context and user context here //for now it is empty!!!! before((request, response) -> { boolean authenticated = true; // ... check if authenticated if (!authenticated) { halt(401, "Nice try, you are not welcome here"); } }); after((request, response) -> { response.header("Access-Control-Allow-Origin", "*"); //response.header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE"); //response.header("Access-Control-Max-Age", "3600"); //response.header("Access-Control-Allow-Headers", "x-requested-with"); }); //get all environments of the tenant get("/environments/:tenantid", "application/json", (request, response) -> { String tenantID = request.params("tenantid"); try { List<GeminiEnvironment> lEnvs = getEnvironments(tenantID); if (lEnvs != null) { response.status(200); Logger.info("Found environments for tenant {}", tenantID); List<GeminiEnvironmentDTO> dtoEnvs = new ArrayList(); lEnvs.stream().forEach(e -> dtoEnvs.add(mapper.getDTOFromEnv(e))); return lEnvs; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {}", DB_SERVER); response.status(500); return "Severe Error: Unknown database host " + DB_SERVER; } return "not implemented yet"; }, new JsonTransformer()); //return all applications for a given tenant and environment get("/applications/:tenantid/:envname", "application/json", (request, response) -> { String tenantID, envName; try { tenantID = URLDecoder.decode(request.params(":tenantid"), "UTF-8"); envName = URLDecoder.decode(request.params(":envname"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - server Name: {} Exception: {}", request.params(":name"), ex); return "Severe Error: Unsupported encoding in URL"; } try { List<GeminiApplication> apps = getEnvApplications(tenantID, envName); if (apps == null || apps.isEmpty()) { response.status(404); Logger.info("Could not find any applications."); return "No Applications found."; } else { response.status(200); Logger.debug("Found applications"); List<GeminiApplicationDTO> dtoApps = new ArrayList(); apps.stream().forEach((a) -> { dtoApps.add(mapper.getDTOFromApp(a)); }); return dtoApps; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {}", DB_SERVER); response.status(500); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); //return application given a name get("/applications/:name", "application/json", (request, response) -> { String appName = ""; //decode the URL as it may contain escape characters, etc. try { appName = URLDecoder.decode(request.params(":name"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - application Name: {} Exception: {}", request.params(":name"), ex); return "Severe Error: Unsupported encoding in URL - server name " + appName; } try { GeminiApplication a = getAppByName(appName); if (a != null) { Logger.debug("Found application {}", appName); return mapper.getDTOFromApp(a); } else { Logger.info("Could not find application {}", appName); return "Could not find application " + appName; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); response.status(500); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); post("/applications", (request, response) -> { String body = request.body(); return "Hello World: " + request.body(); }); //return all networks related to application with ID = ':id' get("/applications/:name/networks", "application/json", (Request request, Response response) -> { String appName; //decode the URL as it may contain escape characters, etc. try { appName = URLDecoder.decode(request.params(":name"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - server Name: {} Exception: {}", request.params(":name"), ex); return "Severe Error: Unsupported encoding in URL"; } try { List<GeminiNetwork> lNet = getAppNetworks(appName); if (lNet != null) { Logger.debug("Found networks for application {}", appName); List<GeminiNetworkDTO> dtoNets = new ArrayList(); lNet.stream().forEach(aNet -> dtoNets.add(mapper.getDTOFromNetwork(aNet))); return dtoNets; } else { response.status(404); Logger.info("Could not find any networks for application {}", appName); return "Could not find any networks for application: " + appName; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); response.status(500); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); //return all servers related to application with ID = ':id' get("/applications/:id/servers", "application/json", (Request request, Response response) -> { String appName = ""; //decode the URL as it may contain escape characters, etc. try { appName = URLDecoder.decode(request.params(":name"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - server Name: {} Exception: {}", request.params(":name"), ex); return "Severe Error: Unsupported encoding in URL"; } try { List<GeminiServer> lSrv = getAppServers(appName); if (lSrv != null) { Logger.debug("Found servers for application {}", appName); List<GeminiServerDTO> dtoSrvs = new ArrayList(); for (GeminiServer s : lSrv) { dtoSrvs.add(mapper.getDTOFromServer(s)); } return dtoSrvs; } else { Logger.info("Could not find servers for application {}", appName); response.status(404); return "Could not find servers for application: " + appName; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); //return all servers related to application with ID = ':appID' AND network with ID = ':nID' get("/applications/:appname/networks/:netstart/:netend/servers", "application/json", (request, response) -> { String appName = "", netStart = "", netEnd = ""; //decode the URL as it may contain escape characters, etc. try { appName = URLDecoder.decode(request.params(":appname"), "UTF-8"); netStart = URLDecoder.decode(request.params(":netstart"), "UTF-8"); netEnd = URLDecoder.decode(request.params(":netend"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error( "Severe Error: Unsupported encoding in URL - application {} with network start: {} and end: {} Exception {}", request.params(":appname"), request.params(":netstart"), request.params(":netend"), ex); return "Severe Error: Unsupported encoding in URL"; } //get the servers for app network try { List<GeminiServer> lSrv = getAppNetworkServers(appName, netStart, netEnd); if (lSrv == null || lSrv.isEmpty()) { Logger.info("No servers for application {} with network start: {} and end: {}", appName, netStart, netEnd); response.status(404); return "No servers for application " + appName + " with network start: " + netStart + " and end: " + netEnd; } else { Logger.debug("Found servers for application {} with network start: {} and end: ", appName, netStart, netEnd); List<GeminiServerDTO> dtoSrvs = new ArrayList(); for (GeminiServer s : lSrv) { dtoSrvs.add(mapper.getDTOFromServer(s)); } return dtoSrvs; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); //get the networks for a tenant and environment, get("/networks/:tenantid/:envname", "application/json", (request, response) -> { String tenantID, envName; try { tenantID = URLDecoder.decode(request.params(":tenantid"), "UTF-8"); envName = URLDecoder.decode(request.params(":envname"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - server Name: {} Exception: {}", request.params(":name"), ex); return "Severe Error: Unsupported encoding in URL"; } try { List<GeminiNetwork> nets = getEnvNetworks(tenantID, envName); if (nets == null) { Logger.info("No networks discovered for tenant {} in environment {}", tenantID, envName); return "No networks discovered for tenant" + tenantID + "in environment" + envName; } else { response.status(200); List<GeminiNetworkDTO> dtoNets = new ArrayList(); nets.stream().forEach(n -> dtoNets.add(mapper.getDTOFromNetwork(n))); Logger.debug("Found {} networks for tenant {} env {}", nets.size(), tenantID, envName); return dtoNets; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown datbase host - {}", DB_SERVER); return "Severe Error: Unknown data host " + DB_SERVER; } }, new JsonTransformer()); get("/networks/:netstart/:netend", "application/json", (request, response) -> { String netStart = "", netEnd = ""; //decode the URL as it may contain escape characters, etc. try { netStart = URLDecoder.decode(request.params(":netstart"), "UTF-8"); netEnd = URLDecoder.decode(request.params(":netend"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - netStart: {} netEnd: {} Exception: {}", request.params(":netstart"), request.params(":netend"), ex); return "Severe Error: Unsupported encoding in URL"; } try { GeminiNetwork n = getNetworkFromDB(netStart, netEnd); if (n == null) { Logger.info("No network with start {} and end {} found", netStart, netEnd); return "No network with start " + netStart + " and end " + netEnd + " found"; } else { Logger.debug("Found network with start {} and end {} ", netStart, netEnd); return mapper.getDTOFromNetwork(n); } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); get("/networks/:netstart/:netend/servers", "application/json", (request, response) -> { String netStart = "", netEnd = ""; //decode the URL as it may contain escape characters, etc. try { netStart = URLDecoder.decode(request.params(":netstart"), "UTF-8"); netEnd = URLDecoder.decode(request.params(":netend"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - netStart: {} netEnd: {} Exception: {}", request.params(":netstart"), request.params(":netend"), ex); return "Severe Error: Unsupported encoding in URL"; } try { List<GeminiServer> lSrv = getNetworkServersFromDB(netStart, netEnd); if (lSrv == null) { Logger.info("No servers in network with start {} and end {} found", netStart, netEnd); return "No servers in network with start " + netStart + " and end " + netEnd + " found"; } else { Logger.debug("Found servers in network with start {} and end {} ", netStart, netEnd); List<GeminiServerDTO> dtoSrvs = new ArrayList(); for (GeminiServer s : lSrv) { dtoSrvs.add(mapper.getDTOFromServer(s)); } return dtoSrvs; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); post("/networks/:netstart/:netend", "application/json", (request, response) -> { String netStart = "", netEnd = ""; //decode the URL as it may contain escape characters, etc. try { netStart = URLDecoder.decode(request.params(":netstart"), "UTF-8"); netEnd = URLDecoder.decode(request.params(":netend"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - netStart: {} netEnd: {} Exception: {}", request.params(":netstart"), request.params(":netend"), ex); return "Severe Error: Unsupported encoding in URL"; } //return the discovered networks // DiscoverNetworkRange newNet = new DiscoverNetworkRange(netStart, netEnd); // List<GeminiNetwork> lNet; // if (autoDiscover) { // try { // //start discovering... // lNet = discoverNetworks(netStart, netEnd); // } catch (IOException ex) { // java.util.logging.Logger.getLogger(GeminiMain.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); // } // } //since all the services are running on the same computer response.header("Access-Control-Allow-Origin", "*"); //return the networks... return "no networks"; }, new JsonTransformer()); //get all servers for tenant within an environment get("/servers", "application/json", (request, response) -> { try { List<GeminiServer> srvs = getServersFromDB(); if (srvs == null) { Logger.info("Found no servers in database"); return "No Networks"; } else { Logger.debug("Found servers in database"); List<GeminiServerDTO> dtoSrvs = new ArrayList(); for (GeminiServer s : srvs) { dtoSrvs.add(mapper.getDTOFromServer(s)); } response.status(200); return dtoSrvs; } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); get("/servers/:name", "application/json", (request, response) -> { String srvName; try { srvName = URLDecoder.decode(request.params(":name"), "UTF-8"); } catch (UnsupportedEncodingException ex) { Logger.error("Severe Error: Unsupported encoding in URL - {} Exception {}", request.params(":name"), ex); return "Severe Error: Unsupported encoding in URL"; } try { GeminiServer s = getServerFromDB(srvName); if (s == null) { Logger.info("No server with name {} found", srvName); return "No server with name " + srvName; } else { Logger.debug("Found server with name {}", srvName); return mapper.getDTOFromServer(s); } } catch (UnknownHostException ex) { Logger.error("Severe Error: Unknown host - {} Exception: {}", DB_SERVER, ex); return "Severe Error: Unknown database host " + DB_SERVER; } }, new JsonTransformer()); }
From source file:eu.amidst.core.inference.MAPInferenceExperiments.java
/** * The class constructor.//from ww w .j av a 2s . co m * @param args Array of options: "filename variable a b N useVMP" if variable is continuous or "filename variable w N useVMP" for discrete */ public static void main(String[] args) throws Exception { String filename = ""; //Filename with the Bayesian Network //filename = "networks/randomlyGeneratedBN.bn"; //BayesianNetworkGenerator.generateBNtoFile(nDiscrete, nStates, nContin, nLinks, seedNetwork, filename); //BayesianNetwork bn = BayesianNetworkLoader.loadFromFile(filename); // int seedNetwork = 61236719 + 123; // // int nDiscrete = 20; // int nStates = 2; // int nContin = 0; // int nLinks = (int)Math.round(1.3*(nDiscrete+nContin)); // // // BayesianNetworkGenerator.setSeed(seedNetwork); // BayesianNetworkGenerator.setNumberOfGaussianVars(nContin); // BayesianNetworkGenerator.setNumberOfMultinomialVars(nDiscrete, nStates); // BayesianNetworkGenerator.setNumberOfLinks(nLinks); // // BayesianNetwork bn = BayesianNetworkGenerator.generateBayesianNetwork(); // // // int seed = seedNetwork + 2315; // // // if (Main.VERBOSE) System.out.println(bn.getDAG()); // // if (Main.VERBOSE) System.out.println(bn.toString()); // // // // MAPInference mapInference = new MAPInference(); // mapInference.setModel(bn); // mapInference.setParallelMode(true); // mapInference.setSampleSize(1); // // List<Variable> causalOrder = Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG()); // // if (Main.VERBOSE) System.out.println("CausalOrder: " + Arrays.toString(Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG()).stream().map(Variable::getName).toArray())); // if (Main.VERBOSE) System.out.println(); // // // // int parallelSamples=20; // int samplingMethodSize=50000; // mapInference.setSampleSize(parallelSamples); // // // // long timeStart; // long timeStop; // double execTime; // Assignment mapEstimate; // // // /*********************************************** // * INCLUDING EVIDENCE // ************************************************/ // // double observedVariablesRate = 0.05; // Assignment evidence = randomEvidence(seed, observedVariablesRate, bn); // // mapInference.setEvidence(evidence); // //if (Main.VERBOSE) System.out.println(evidence.outputString()); // // // // /*********************************************** // * VARIABLES OF INTEREST // ************************************************/ // // Variable varInterest1 = causalOrder.get(6); // Variable varInterest2 = causalOrder.get(7); // // // List<Variable> varsInterest = new ArrayList<>(); // varsInterest.add(varInterest1); // varsInterest.add(varInterest2); // mapInference.setMAPVariables(varsInterest); // // if (Main.VERBOSE) System.out.println("MAP Variables of Interest: " + Arrays.toString(varsInterest.stream().map(Variable::getName).toArray())); // if (Main.VERBOSE) System.out.println(); // // // // // /*********************************************** // * SIMULATED ANNEALING // ************************************************/ // // // // MAP INFERENCE WITH SIMULATED ANNEALING, MOVING ALL VARIABLES EACH TIME // timeStart = System.nanoTime(); // mapInference.runInference(1); // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (SA.All): " + mapEstimate.outputString(varsInterest)); // if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); // if (Main.VERBOSE) System.out.println(); // // // // MAP INFERENCE WITH SIMULATED ANNEALING, SOME VARIABLES EACH TIME // timeStart = System.nanoTime(); // mapInference.runInference(0); // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (SA.Some): " + mapEstimate.outputString(varsInterest)); // if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); // if (Main.VERBOSE) System.out.println(); // // // /*********************************************** // * HILL CLIMBING // ************************************************/ // // // MAP INFERENCE WITH HILL CLIMBING, MOVING ALL VARIABLES EACH TIME // timeStart = System.nanoTime(); // mapInference.runInference(3); // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (HC.All): " + mapEstimate.outputString(varsInterest)); // if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // // MAP INFERENCE WITH HILL CLIMBING, SOME VARIABLES EACH TIME // timeStart = System.nanoTime(); // mapInference.runInference(2); // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (HC.Some): " + mapEstimate.outputString(varsInterest)); // if (Main.VERBOSE) System.out.println("with (unnormalized) probability: " + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // /*********************************************** // * SAMPLING // ************************************************/ // // // MAP INFERENCE WITH SIMULATION AND PICKING MAX // mapInference.setSampleSize(samplingMethodSize); // timeStart = System.nanoTime(); // mapInference.runInference(-1); // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (SAMPLING): " + mapEstimate.outputString(varsInterest)); // if (Main.VERBOSE) System.out.println("with probability: " + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // // // // // PROBABILITIES OF INDIVIDUAL CONFIGURATIONS // // // double s1 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s1); // // mapEstimate.setValue(varInterest2, 1); // double s2 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s2); // // mapEstimate.setValue(varInterest1, 1); // mapEstimate.setValue(varInterest2, 0); // double s3 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s3); // // mapEstimate.setValue(varInterest2, 1); // double s4 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s4); // // double sumNonStateless = s1+s2+s3+s4; // // if (Main.VERBOSE) System.out.println(); // if (Main.VERBOSE) System.out.println("Sum = " + sumNonStateless + "; Normalized probs: [V1=0,V2=0]=" + s1/sumNonStateless + ", [V1=0,V2=1]=" + s2/sumNonStateless + ", [V1=1,V2=0]=" + s3/sumNonStateless + ", [V1=1,V2=1]=" + s4/sumNonStateless ); int seedNetwork = 1253473; int nDiscrete = 50; int nStates = 2; int nContin = 50; int nLinks = (int) Math.round(1.3 * (nDiscrete + nContin)); BayesianNetworkGenerator.setSeed(seedNetwork); BayesianNetworkGenerator.setNumberOfGaussianVars(nContin); BayesianNetworkGenerator.setNumberOfMultinomialVars(nDiscrete, nStates); BayesianNetworkGenerator.setNumberOfLinks(nLinks); BayesianNetwork bn = BayesianNetworkGenerator.generateBayesianNetwork(); int seed = seedNetwork + 23715; if (Main.VERBOSE) System.out.println(bn.getDAG()); if (Main.VERBOSE) System.out.println(bn.toString()); MAPInference mapInference = new MAPInference(); mapInference.setModel(bn); mapInference.setParallelMode(true); mapInference.setSampleSize(1); List<Variable> causalOrder = Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG()); if (Main.VERBOSE) System.out.println("CausalOrder: " + Arrays.toString(Utils.getTopologicalOrder(mapInference.getOriginalModel().getDAG()).stream() .map(Variable::getName).toArray())); if (Main.VERBOSE) System.out.println(); int parallelSamples = 20; int samplingMethodSize = 100000; mapInference.setSampleSize(parallelSamples); long timeStart; long timeStop; double execTime; Assignment mapEstimate; /*********************************************** * INCLUDING EVIDENCE ************************************************/ double observedVariablesRate = 0.05; Assignment evidence = randomEvidence(seed, observedVariablesRate, bn); mapInference.setEvidence(evidence); //if (Main.VERBOSE) System.out.println(evidence.outputString()); /*********************************************** * VARIABLES OF INTEREST ************************************************/ Variable varInterest1 = causalOrder.get(6); Variable varInterest2 = causalOrder.get(7); Variable varInterest3 = causalOrder.get(60); List<Variable> varsInterest = new ArrayList<>(); varsInterest.add(varInterest1); varsInterest.add(varInterest2); varsInterest.add(varInterest3); mapInference.setMAPVariables(varsInterest); if (Main.VERBOSE) System.out.println("MAP Variables of Interest: " + Arrays.toString(varsInterest.stream().map(Variable::getName).toArray())); if (Main.VERBOSE) System.out.println(); /*********************************************** * SIMULATED ANNEALING ************************************************/ // MAP INFERENCE WITH SIMULATED ANNEALING, MOVING ALL VARIABLES EACH TIME timeStart = System.nanoTime(); mapInference.runInference(MAPInference.SearchAlgorithm.SA_GLOBAL); mapEstimate = mapInference.getEstimate(); if (Main.VERBOSE) System.out.println("MAP estimate (SA.All): " + mapEstimate.outputString(varsInterest)); if (Main.VERBOSE) System.out.println( "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate())); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); if (Main.VERBOSE) System.out.println(); // MAP INFERENCE WITH SIMULATED ANNEALING, MOVING SOME VARIABLES EACH TIME timeStart = System.nanoTime(); mapInference.runInference(MAPInference.SearchAlgorithm.SA_LOCAL); mapEstimate = mapInference.getEstimate(); if (Main.VERBOSE) System.out.println("MAP estimate (SA.Some): " + mapEstimate.outputString(varsInterest)); if (Main.VERBOSE) System.out.println( "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate())); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); if (Main.VERBOSE) System.out.println(); /*********************************************** * HILL CLIMBING ************************************************/ // MAP INFERENCE WITH HILL CLIMBING, MOVING ALL VARIABLES EACH TIME timeStart = System.nanoTime(); mapInference.runInference(MAPInference.SearchAlgorithm.HC_GLOBAL); mapEstimate = mapInference.getEstimate(); if (Main.VERBOSE) System.out.println("MAP estimate (HC.All): " + mapEstimate.outputString(varsInterest)); if (Main.VERBOSE) System.out.println( "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate())); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); if (Main.VERBOSE) System.out.println(); // MAP INFERENCE WITH HILL CLIMBING, MOVING SOME VARIABLES EACH TIME timeStart = System.nanoTime(); mapInference.runInference(MAPInference.SearchAlgorithm.HC_LOCAL); mapEstimate = mapInference.getEstimate(); if (Main.VERBOSE) System.out.println("MAP estimate (HC.Some): " + mapEstimate.outputString(varsInterest)); if (Main.VERBOSE) System.out.println( "with (unnormalized) probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate())); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); if (Main.VERBOSE) System.out.println(); /*********************************************** * SAMPLING ************************************************/ // MAP INFERENCE WITH SIMULATION AND PICKING MAX mapInference.setSampleSize(samplingMethodSize); timeStart = System.nanoTime(); mapInference.runInference(MAPInference.SearchAlgorithm.SAMPLING); mapEstimate = mapInference.getEstimate(); if (Main.VERBOSE) System.out.println("MAP estimate (SAMPLING): " + mapEstimate.outputString(varsInterest)); if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getLogProbabilityOfEstimate())); timeStop = System.nanoTime(); execTime = (double) (timeStop - timeStart) / 1000000000.0; if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); if (Main.VERBOSE) System.out.println(); // PROBABILITIES OF INDIVIDUAL CONFIGURATIONS double s1 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s1); mapEstimate.setValue(varInterest2, 1); double s2 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s2); mapEstimate.setValue(varInterest1, 1); mapEstimate.setValue(varInterest2, 0); double s3 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s3); mapEstimate.setValue(varInterest2, 1); double s4 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); if (Main.VERBOSE) System.out.println(mapEstimate.outputString(varsInterest) + " with prob. " + s4); double sum = s1 + s2 + s3 + s4; if (Main.VERBOSE) System.out.println(); if (Main.VERBOSE) System.out.println("Sum = " + sum + "; Normalized probs: [V1=0,V2=0]=" + s1 / sum + ", [V1=0,V2=1]=" + s2 / sum + ", [V1=1,V2=0]=" + s3 / sum + ", [V1=1,V2=1]=" + s4 / sum); // long timeStart = System.nanoTime(); // mapInference.runInference(1); // // Assignment mapEstimate1 = mapInference.getMAPestimate(); // List<Variable> modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate: " + mapEstimate1.toString()); //toString(modelVariables); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate1))); // long timeStop = System.nanoTime(); // double execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getVariables().iterator().)); // // // // // // MAP INFERENCE WITH A BIG SAMPLE TO CHECK // mapInference.setSampleSize(50000); // timeStart = System.nanoTime(); // mapInference.runInference(-1); // // Assignment mapEstimate2 = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (huge sample): " + mapEstimate2.toString()); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate2))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // // // // // DETERMINISTIC SEQUENTIAL SEARCH ON DISCRETE VARIABLES // timeStart = System.nanoTime(); // mapInference.runInference(-2); // // Assignment mapEstimate3 = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (sequential): " + mapEstimate3.toString()); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate3))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // // if (Main.VERBOSE) System.out.println(); // if (Main.VERBOSE) System.out.println(); // if (Main.VERBOSE) System.out.println(); // if (Main.VERBOSE) System.out.println(); //mapInference.changeCausalOrder(bn,evidence); /*// AD-HOC MAP mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar0"),-0.11819702417804305); mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar1"),-1.706); mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar2"),4.95); mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar3"),14.33); mapEstimate.setValue(bn.getVariables().getVariableByName("GaussianVar4"),11.355); modelVariables = mapInference.getOriginalModel().getVariables().getListOfParamaterVariables(); if (Main.VERBOSE) System.out.println("Other estimate: " + mapEstimate.toString(modelVariables)); if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); */ // // // // // // BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("./networks/randomlyGeneratedBN.bn"); // //BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("./networks/asia.bn"); // if (Main.VERBOSE) System.out.println(bn.getDAG()); // // if (Main.VERBOSE) System.out.println(bn.toString()); // // // MAPInference mapInference = new MAPInference(); // mapInference.setModel(bn); // mapInference.setParallelMode(true); // // // if (Main.VERBOSE) System.out.println("CausalOrder: " + Arrays.toString(mapInference.causalOrder.stream().map(v -> v.getName()).toArray())); // if (Main.VERBOSE) System.out.println(); // // // Including evidence: // Variable variable1 = mapInference.causalOrder.get(1); // causalOrder: A, S, L, T, E, X, B, D // Variable variable2 = mapInference.causalOrder.get(2); // //Variable variable3 = mapInference.causalOrder.get(11); // Variable variable3 = mapInference.causalOrder.get(4); // // int var1value=0; // int var2value=1; // //double var3value=1.27; // int var3value=1; // // if (Main.VERBOSE) System.out.println("Evidence: Variable " + variable1.getName() + " = " + var1value + ", Variable " + variable2.getName() + " = " + var2value + " and Variable " + variable3.getName() + " = " + var3value); // if (Main.VERBOSE) System.out.println(); // // HashMapAssignment evidenceAssignment = new HashMapAssignment(3); // // evidenceAssignment.setValue(variable1,var1value); // evidenceAssignment.setValue(variable2,var2value); // evidenceAssignment.setValue(variable3,var3value); // // mapInference.setEvidence(evidenceAssignment); // // List<Variable> modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables(); // //if (Main.VERBOSE) System.out.println(evidenceAssignment.outputString(modelVariables)); // // // // long timeStart; // long timeStop; // double execTime; // Assignment mapEstimate; // // // /* // // MAP INFERENCE WITH A SMALL SAMPLE AND SIMULATED ANNEALING // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(1); // // // Assignment mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (SA): " + mapEstimate.outputString(modelVariables)); //toString(modelVariables) // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); // if (Main.VERBOSE) System.out.println(); // // // // MAP INFERENCE WITH A BIG SAMPLE AND SIMULATED ANNEALING // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(1); // // mapEstimate = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (SA): " + mapEstimate.outputString(modelVariables)); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // // // MAP INFERENCE WITH A BIG SAMPLE AND SIMULATED ANNEALING ON ONE VARIABLE EACH TIME // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(0); // // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (SA.1V): " + mapEstimate.outputString(modelVariables)); //toString(modelVariables) // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); // if (Main.VERBOSE) System.out.println(); // // // // // // MAP INFERENCE WITH A BIG SAMPLE AND HILL CLIMBING // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(3); // // mapEstimate = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (HC): " + mapEstimate.outputString(modelVariables)); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // // // MAP INFERENCE WITH A BIG SAMPLE AND HILL CLIMBING ON ONE VARIABLE EACH TIME // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(2); // // // mapEstimate = mapInference.getMAPestimate(); // if (Main.VERBOSE) System.out.println("MAP estimate (HC.1V): " + mapEstimate.outputString(modelVariables)); //toString(modelVariables) // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // //if (Main.VERBOSE) System.out.println(.toString(mapInference.getOriginalModel().getStaticVariables().iterator().)); // if (Main.VERBOSE) System.out.println(); // // // // // // // MAP INFERENCE WITH SIMULATION AND PICKING MAX // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(-1); // // mapEstimate = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (SAMPLING): " + mapEstimate.outputString(modelVariables)); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // // MAP INFERENCE, DETERMINISTIC // mapInference.setSampleSize(1); // timeStart = System.nanoTime(); // mapInference.runInference(-2); // // mapEstimate = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getStaticVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (DETERM.): " + mapEstimate.outputString(modelVariables)); // if (Main.VERBOSE) System.out.println("with probability: " + Math.exp(mapInference.getOriginalModel().getLogProbabiltyOf(mapEstimate))); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // */ // // // // // /** // * // * // * MAP OVER SPECIFIC VARIABLES // * // * // */ // // // if (Main.VERBOSE) System.out.println(); // if (Main.VERBOSE) System.out.println(); // // Variable varInterest1 = mapInference.causalOrder.get(6); // causalOrder: A, S, L, T, E, X, B, D // Variable varInterest2 = mapInference.causalOrder.get(7); // // // Set<Variable> varsInterest = new HashSet<>(); // varsInterest.add(varInterest1); // varsInterest.add(varInterest2); // mapInference.setMAPVariables(varsInterest); // // if (Main.VERBOSE) System.out.println("MAP Variables of Interest: " + Arrays.toString(mapInference.MAPvariables.stream().map(Variable::getName).toArray())); // if (Main.VERBOSE) System.out.println(); // // // // // MAP INFERENCE // mapInference.setSampleSize(1); // timeStart = System.nanoTime(); // mapInference.runInference(1); // // mapEstimate = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate: " + mapEstimate.outputString(new ArrayList(mapInference.MAPvariables))); // if (Main.VERBOSE) System.out.println("with probability: " + + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // // // MAP INFERENCE // mapInference.setSampleSize(100); // timeStart = System.nanoTime(); // mapInference.runInference(-3); // // mapEstimate = mapInference.getMAPestimate(); // modelVariables = mapInference.getOriginalModel().getVariables().getListOfVariables(); // if (Main.VERBOSE) System.out.println("MAP estimate (-3): " + mapEstimate.outputString(new ArrayList(mapInference.MAPvariables))); // if (Main.VERBOSE) System.out.println("with probability: " + mapInference.getMAPestimateProbability()); // timeStop = System.nanoTime(); // execTime = (double) (timeStop - timeStart) / 1000000000.0; // if (Main.VERBOSE) System.out.println("computed in: " + Double.toString(execTime) + " seconds"); // if (Main.VERBOSE) System.out.println(); // // // // MAP Assignments // // // double s1 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s1); // // mapEstimate.setValue((Variable)mapEstimate.getVariables().toArray()[0],1); // double s2 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s2); // // mapEstimate.setValue((Variable)mapEstimate.getVariables().toArray()[1],1); // double s3 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s3); // // mapEstimate.setValue((Variable)mapEstimate.getVariables().toArray()[0],0); // double s4 = mapInference.estimateProbabilityOfPartialAssignment(mapEstimate); // if (Main.VERBOSE) System.out.println(mapEstimate.outputString() + " with prob. " + s4); // // double sumNonStateless = s1+s2+s3+s4; // // if (Main.VERBOSE) System.out.println("Probs: " + s1/sumNonStateless + ", " + s2/sumNonStateless + ", " + s3/sumNonStateless + ", " + s4/sumNonStateless + ", suma = " + sumNonStateless ); }
From source file:com.act.lcms.v2.MZCollisionCounter.java
public static void main(String[] args) throws Exception { CLIUtil cliUtil = new CLIUtil(MassChargeCalculator.class, HELP_MESSAGE, OPTION_BUILDERS); CommandLine cl = cliUtil.parseCommandLine(args); File inputFile = new File(cl.getOptionValue(OPTION_INPUT_INCHI_LIST)); if (!inputFile.exists()) { cliUtil.failWithMessage("Input file at does not exist at %s", inputFile.getAbsolutePath()); }/* w ww. j ava 2s .co m*/ List<MassChargeCalculator.MZSource> sources = new ArrayList<>(); try (BufferedReader reader = new BufferedReader(new FileReader(inputFile))) { String line; while ((line = reader.readLine()) != null) { line = line.trim(); sources.add(new MassChargeCalculator.MZSource(line)); if (sources.size() % 1000 == 0) { LOGGER.info("Loaded %d sources from input file", sources.size()); } } } Set<String> considerIons = Collections.emptySet(); if (cl.hasOption(OPTION_ONLY_CONSIDER_IONS)) { List<String> ions = Arrays.asList(cl.getOptionValues(OPTION_ONLY_CONSIDER_IONS)); LOGGER.info("Only considering ions for m/z calculation: %s", StringUtils.join(ions, ", ")); considerIons = new HashSet<>(ions); } TSVWriter<String, Long> tsvWriter = new TSVWriter<>(Arrays.asList("collisions", "count")); tsvWriter.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE))); try { LOGGER.info("Loaded %d sources in total from input file", sources.size()); MassChargeCalculator.MassChargeMap mzMap = MassChargeCalculator.makeMassChargeMap(sources, considerIons); if (!cl.hasOption(OPTION_COUNT_WINDOW_INTERSECTIONS)) { // Do an exact analysis of the m/z collisions if windowing is not specified. LOGGER.info("Computing precise collision histogram."); Iterable<Double> mzs = mzMap.ionMZIter(); Map<Integer, Long> collisionHistogram = histogram( StreamSupport.stream(mzs.spliterator(), false).map(mz -> { // See comment about Iterable below. try { return mzMap.ionMZToMZSources(mz).size(); } catch (NoSuchElementException e) { LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage()); throw e; } })); List<Integer> sortedCollisions = new ArrayList<>(collisionHistogram.keySet()); Collections.sort(sortedCollisions); for (Integer collision : sortedCollisions) { tsvWriter.append(new HashMap<String, Long>() { { put("collisions", collision.longValue()); put("count", collisionHistogram.get(collision)); } }); } } else { /* After some deliberation (thanks Gil!), the windowed variant of this calculation counts the number of * structures whose 0.01 Da m/z windows (for some set of ions) overlap with each other. * * For example, let's assume we have five total input structures, and are only searching for one ion. Let's * also assume that three of those structures have m/z A and the remaining two have m/z B. The windows might * look like this in the m/z domain: * |----A----| * |----B----| * Because A represents three structures and overlaps with B, which represents two, we assign A a count of 5-- * this is the number of structures we believe could fall into the range of A given our current peak calling * approach. Similarly, B is assigned a count of 5, as the possibility for collision/confusion is symmetric. * * Note that this is an over-approximation of collisions, as we could more precisely only consider intersections * when the exact m/z of B falls within the window around A and vice versa. However, because we have observed * cases where the MS sensor doesn't report structures at exactly the m/z we predict, we employ this weaker * definition of intersection to give a slightly pessimistic view of what confusions might be possible. */ // Compute windows for every m/z. We don't care about the original mz values since we just want the count. List<Double> mzs = mzMap.ionMZsSorted(); final Double windowHalfWidth; if (cl.hasOption(OPTION_WINDOW_HALFWIDTH)) { // Don't use get with default for this option, as we want the exact FP value of the default tolerance. windowHalfWidth = Double.valueOf(cl.getOptionValue(OPTION_WINDOW_HALFWIDTH)); } else { windowHalfWidth = DEFAULT_WINDOW_TOLERANCE; } /* Window = (lower bound, upper bound), counter of represented m/z's that collide with this window, and number * of representative structures (which will be used in counting collisions). */ LinkedList<CollisionWindow> allWindows = new LinkedList<CollisionWindow>() { { for (Double mz : mzs) { // CPU for memory trade-off: don't re-compute the window bounds over and over and over and over and over. try { add(new CollisionWindow(mz, windowHalfWidth, mzMap.ionMZToMZSources(mz).size())); } catch (NoSuchElementException e) { LOGGER.error("Caught no such element exception for mz %f: %s", mz, e.getMessage()); throw e; } } } }; // Sweep line time! The window ranges are the interesting points. We just accumulate overlap counts as we go. LinkedList<CollisionWindow> workingSet = new LinkedList<>(); List<CollisionWindow> finished = new LinkedList<>(); while (allWindows.size() > 0) { CollisionWindow thisWindow = allWindows.pop(); // Remove any windows from the working set that don't overlap with the next window. while (workingSet.size() > 0 && workingSet.peekFirst().getMaxMZ() < thisWindow.getMinMZ()) { finished.add(workingSet.pop()); } for (CollisionWindow w : workingSet) { /* Add the size of the new overlapping window's structure count to each of the windows in the working set, * which represents the number of possible confused structures that fall within the overlapping region. * We exclude the window itself as it should already have counted the colliding structures it represents. */ w.getAccumulator().add(thisWindow.getStructureCount()); /* Reciprocally, add the structure counts of all windows with which the current window overlaps to it. */ thisWindow.getAccumulator().add(w.getStructureCount()); } // Now that accumulation is complete, we can safely add the current window. workingSet.add(thisWindow); } // All the interesting events are done, so drop the remaining windows into the finished set. finished.addAll(workingSet); Map<Long, Long> collisionHistogram = histogram( finished.stream().map(w -> w.getAccumulator().longValue())); List<Long> sortedCollisions = new ArrayList<>(collisionHistogram.keySet()); Collections.sort(sortedCollisions); for (Long collision : sortedCollisions) { tsvWriter.append(new HashMap<String, Long>() { { put("collisions", collision); put("count", collisionHistogram.get(collision)); } }); } } } finally { if (tsvWriter != null) { tsvWriter.close(); } } }
From source file:com.bobby.peng.learning.java.stream.StreamMap.java
public static void printOut(List<Integer> list) { list.stream().forEach(StreamMap::println); }
From source file:Main.java
public static <T> List<String> toStringsList(List<T> list) { return list.stream().map(x -> x.toString()).collect(Collectors.toList()); }
From source file:Main.java
public static <T> boolean containsAny(List<T> c1, List<? extends T> c2) { return c1.stream().anyMatch((e) -> c2.contains(e)); }
From source file:Main.java
/** * Transform a list of objects into an array of primitives * /*from w ww . j av a2 s . co m*/ * @param listOfObject * @return */ public static double[] convertToPrimitives(List<Double> listOfObjects) { return listOfObjects.stream().mapToDouble(Double::doubleValue).toArray(); }