List of usage examples for java.util LinkedHashMap keySet
public Set<K> keySet()
From source file:com.web.server.WebServer.java
/** * This method obtains the content executor which executes the executor services * @param deployDirectory/* w ww .jav a 2 s . co m*/ * @param resource * @param httpHeaderClient * @param serverdigester * @return byte[] */ public byte[] ObtainContentExecutor(String deployDirectory, String resource, HttpHeaderClient httpHeaderClient, Digester serverdigester, Hashtable urlClassLoaderMap, ConcurrentHashMap servletMapping, com.web.server.HttpSessionServer session) { //System.out.println("In content Executor"); String[] resourcepath = resource.split("/"); //System.out.println("createDigester1"); Method method = null; //System.out.println("createDigester2"); ////System.out.println(); com.web.server.Executors serverconfig; if (resourcepath.length > 1) { ////System.out.println(resource); try { ClassLoader oldCL = null; String urlresource = ObtainUrlFromResource(resourcepath); try { //System.out.println(servletMapping); //System.out.println(deployDirectory+"/"+resourcepath[1]); HttpSessionServer httpSession; logger.info(deployDirectory + "/" + resourcepath[1] + " " + servletMapping.get(deployDirectory + "/" + resourcepath[1])); if (servletMapping.get(deployDirectory + "/" + resourcepath[1]) != null) { WebAppConfig webAppConfig = (WebAppConfig) servletMapping .get(deployDirectory + "/" + resourcepath[1]); webAppConfig = webAppConfig.clone(); webAppConfig.setWebApplicationAbsolutePath(deployDirectory + "/" + resourcepath[1]); WebClassLoader customClassLoader = null; Class customClass = null; customClassLoader = (WebClassLoader) urlClassLoaderMap .get(deployDirectory + "/" + resourcepath[1]); oldCL = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(customClassLoader); ConcurrentHashMap servletMappingsURL = webAppConfig.getServletMappingURL(); Enumeration urlPattern = servletMappingsURL.keys(); while (urlPattern.hasMoreElements()) { String pattern = (String) urlPattern.nextElement(); Pattern r = Pattern.compile(pattern.replace("*", "(.*)")); Matcher m = r.matcher(urlresource); if (m.find()) { urlresource = pattern; break; } } LinkedHashMap<String, Vector<FilterMapping>> filterMappings = webAppConfig .getFilterMappingURL(); Set<String> filterMappingKeys = filterMappings.keySet(); Iterator<String> filterMappingRoller = filterMappingKeys.iterator(); Vector<FilterMapping> filterMapping = null; while (filterMappingRoller.hasNext()) { String pattern = (String) filterMappingRoller.next(); Pattern r = Pattern.compile(pattern.replace("*", "(.*)")); Matcher m = r.matcher(urlresource); if (m.find()) { filterMapping = filterMappings.get(pattern); break; } } if (servletMappingsURL.get(urlresource) != null) { ServletMapping servletMappings = (ServletMapping) servletMappingsURL.get(urlresource); ConcurrentHashMap servlets = webAppConfig.getServlets(); Servlets servlet = (Servlets) servlets.get(servletMappings.getServletName()); HttpServlet httpServlet = null; System.out.println("Session " + session); if (session.getAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName()) != null) { httpServlet = (HttpServlet) session.getAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName()); httpServlet.init(); } else { Class servletClass = customClassLoader.loadClass(servlet.getServletClass()); httpServlet = (HttpServlet) servletClass.newInstance(); httpServlet.init(new WebServletConfig(servlet.getServletName().trim(), webAppConfig, customClassLoader)); httpServlet.init(); session.setAttribute("SERVLETNAME:" + deployDirectory + "/" + resourcepath[1] + servletMappings.getServletName(), httpServlet); //ClassLoaderUtil.closeClassLoader(customClassLoader); } if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("GET") || httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("POST")) { Response response = new Response(httpHeaderClient); StringBuffer servletPath = new StringBuffer(); if (resourcepath.length > 1) { int pathcount = 0; for (String servPath : resourcepath) { if (pathcount > 1) { servletPath.append("/"); servletPath.append(servPath); } pathcount++; } } String servletpath = servletPath.toString(); if (servletpath.length() == 0) servletpath = "/"; Request request = new Request(httpHeaderClient, session, servletpath, customClassLoader); if (filterMapping != null) { WebFilterChain webFilterChain = new WebFilterChain(httpServlet, webAppConfig, filterMapping, customClassLoader); webFilterChain.doFilter(request, response); } else { httpServlet.service(request, response); } //System.out.println("RESPONSE="+new String(response.getResponse())); //httpServlet.destroy(); response.flushBuffer(); return response.getResponse(); } //httpServlet.destroy(); } else { if (customClassLoader != null) { Map map = customClassLoader.classMap; if (map.get(urlresource) != null) { Class jspBaseCls = customClassLoader.loadClass((String) map.get(urlresource)); HttpJspBase jspBase = (HttpJspBase) jspBaseCls.newInstance(); WebServletConfig servletConfig = new WebServletConfig(); servletConfig.getServletContext().setAttribute( "org.apache.tomcat.InstanceManager", new WebInstanceManager(urlresource)); //servletConfig.getServletContext().setAttribute(org.apache.tomcat.InstanceManager, arg1); jspBase.init(servletConfig); jspBase._jspInit(); Response response = new Response(httpHeaderClient); StringBuffer servletPath = new StringBuffer(); if (resourcepath.length > 1) { int pathcount = 0; for (String servPath : resourcepath) { if (pathcount > 1) { servletPath.append("/"); servletPath.append(servPath); } pathcount++; } } String servletpath = servletPath.toString(); if (servletpath.length() == 0) servletpath = "/"; jspBase._jspService( new Request(httpHeaderClient, session, servletpath, customClassLoader), response); jspBase.destroy(); response.flushBuffer(); return response.getResponse(); } } } } } catch (Exception ex) { ex.printStackTrace(); } finally { if (oldCL != null) { Thread.currentThread().setContextClassLoader(oldCL); } } File file = new File(deployDirectory + "/" + resourcepath[1] + "/WEB-INF/executor-config.xml"); if (!file.exists()) { return null; } WebClassLoader customClassLoader = (WebClassLoader) urlClassLoaderMap .get(deployDirectory + "/" + resourcepath[1]); Class customClass = null; if ((file.isFile() && file.exists())) { synchronized (serverdigester) { serverconfig = (com.web.server.Executors) serverdigester.parse(file); } ConcurrentHashMap urlMap = serverconfig.getExecutorMap(); //System.out.println("ObtainUrlFromResource1"); //logger.info("urlresource"+urlresource); Executor executor = (Executor) urlMap.get(urlresource); //System.out.println("ObtainUrlFromResource2"+executor); //System.out.println("custom class Loader1"+urlClassLoaderMap); //System.out.println("custom class Loader2"+customClassLoader); //System.out.println("CUSTOM CLASS lOADER path"+deployDirectory+"/"+resourcepath[1]); ////System.out.println("custom class loader" +customClassLoader); if (executor != null && customClassLoader != null) { customClass = customClassLoader.loadClass(executor.getExecutorclass()); ExecutorInterface executorInstance = (ExecutorInterface) customClass.newInstance(); Object buffer = null; if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("GET")) { buffer = executorInstance.doGet(httpHeaderClient); } else if (httpHeaderClient.getHttpMethod().trim().toUpperCase().equals("POST")) { buffer = executorInstance.doPost(httpHeaderClient); } if (executor.getResponseResource() != null) { httpHeaderClient.setExecutorBuffer(buffer); //System.out.println("Method:"+httpHeaderClient.getHttpMethod()); String resourceClass = (String) customClassLoader.getClassMap() .get(executor.getResponseResource().trim()); customClass = customClassLoader.loadClass(resourceClass); HttpJspBase jspBase = (HttpJspBase) customClass.newInstance(); WebServletConfig servletConfig = new WebServletConfig(); servletConfig.getServletContext().setAttribute("org.apache.tomcat.InstanceManager", new WebInstanceManager(urlresource)); //servletConfig.getServletContext().setAttribute(org.apache.tomcat.InstanceManager, arg1); jspBase.init(servletConfig); jspBase._jspInit(); Response response = new Response(httpHeaderClient); jspBase._jspService(new Request(httpHeaderClient, session, null, customClassLoader), response); jspBase.destroy(); response.flushBuffer(); return response.getResponse(); } return buffer.toString().getBytes(); } } else if (customClassLoader != null) { //System.out.println("url resource"+urlresource); String resourceClass = (String) customClassLoader.getClassMap().get(urlresource); //System.out.println(resourceClass); //System.out.println(customClassLoader.getClassMap()); if (resourceClass == null) return null; customClass = customClassLoader.loadClass(resourceClass); ExecutorInterface executorInstance = (ExecutorInterface) customClass.newInstance(); Object buffer = executorInstance.doGet(httpHeaderClient); return buffer.toString().getBytes(); } ////System.out.println("executor resource 1"); //Object buffer = method.invoke(customClass.newInstance(), new Object[]{httpHeaderClient}); // //logger.info(buffer.toString()); } catch (IOException | SAXException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } /*catch (InvocationTargetException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (NoSuchMethodException e) { // TODO Auto-generated catch block e.printStackTrace(); } */catch (SecurityException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } return null; }
From source file:org.neo4j.gis.spatial.OsmAnalysisTest.java
public void testAnalysis2(String osm, int years, int days) throws IOException { SpatialDatabaseService spatial = new SpatialDatabaseService(graphDb()); OSMLayer layer = (OSMLayer) spatial.getLayer(osm); OSMDataset dataset = (OSMDataset) layer.getDataset(); Map<String, User> userIndex = new HashMap<String, User>(); long latestTimestamp = 0L; long firstTimestamp = Long.MAX_VALUE; try (Transaction tx = spatial.getDatabase().beginTx()) { for (Node cNode : dataset.getAllChangesetNodes()) { long timestamp = (Long) cNode.getProperty("timestamp", 0L); Node userNode = dataset.getUser(cNode); String name = (String) userNode.getProperty("name"); User user = userIndex.get(name); if (user == null) { user = new User(userNode.getId(), name); userIndex.put(name, user); }//from w ww.j a v a 2 s .c om user.addChangeset(cNode, timestamp); if (latestTimestamp < timestamp) latestTimestamp = timestamp; if (firstTimestamp > timestamp) firstTimestamp = timestamp; } tx.success(); } SortedSet<User> topTen = getTopTen(userIndex); Date latest = new Date(latestTimestamp); Calendar time = Calendar.getInstance(); time.setTime(latest); int slidesPerYear = 360 / days; int slideCount = slidesPerYear * years; long msPerSlide = days * 24 * 3600000; int timeWindow = 15; StringBuffer userQuery = new StringBuffer(); int user_rank = 1; for (User user : topTen) { if (userQuery.length() > 0) userQuery.append(" or "); userQuery.append("user = '" + user.name + "'"); user_rank++; } LinkedHashMap<DynamicLayerConfig, Long> slides = new LinkedHashMap<DynamicLayerConfig, Long>(); for (int i = -timeWindow; i < slideCount; i++) { long timestamp = latestTimestamp - i * msPerSlide; long minTime = timestamp; long maxTime = timestamp + 15 * msPerSlide; time.setTimeInMillis(timestamp); Date date = new Date(timestamp); System.out.println("Preparing slides for " + date); String name = osm + "-" + date; DynamicLayerConfig config = layer.addLayerConfig(name, Constants.GTYPE_GEOMETRY, "timestamp > " + minTime + " and timestamp < " + maxTime + " and (" + userQuery + ")"); System.out.println("Added dynamic layer '" + config.getName() + "' with CQL: " + config.getQuery()); slides.put(config, timestamp); } DynamicLayerConfig config = layer.addLayerConfig(osm + "-top-ten", Constants.GTYPE_GEOMETRY, userQuery.toString()); System.out.println("Added dynamic layer '" + config.getName() + "' with CQL: " + config.getQuery()); slides.clear(); slides.put(config, 0L); StyledImageExporter imageExporter = new StyledImageExporter(graphDb()); String exportDir = "target/export/" + osm + "/analysis"; imageExporter.setExportDir(exportDir); imageExporter.setZoom(2.0); imageExporter.setOffset(-0.2, 0.25); imageExporter.setSize(1280, 800); imageExporter.setStyleFiles(new String[] { "sld/background.sld", "sld/rank.sld" }); String[] layerPropertyNames = new String[] { "name", "timestamp", "user", "days", "user_rank" }; StringBuffer userParams = new StringBuffer(); user_rank = 1; for (User user : topTen) { if (userParams.length() > 0) userParams.append(","); userParams.append(user.name).append(":").append(user_rank); user_rank++; } boolean checkedOne = false; for (DynamicLayerConfig layerToExport : slides.keySet()) { layerToExport.setExtraPropertyNames(layerPropertyNames); layerToExport.getPropertyMappingManager().addPropertyMapper("timestamp", "days", "Days", Long.toString(slides.get(layerToExport))); layerToExport.getPropertyMappingManager().addPropertyMapper("user", "user_rank", "Map", userParams.toString()); if (!checkedOne) { int i = 0; System.out.println("Checking layer '" + layerToExport + "' in detail"); try (Transaction tx = db.beginTx()) { SearchRecords records = layerToExport.getIndex().search(new SearchAll()); for (SpatialRecord record : records) { System.out.println("Got record " + i + ": " + record); for (String name : record.getPropertyNames()) { System.out.println("\t" + name + ":\t" + record.getProperty(name)); checkedOne = true; } if (i++ > 10) break; } tx.success(); } } imageExporter.saveLayerImage(new String[] { osm, layerToExport.getName() }, new File(layerToExport.getName() + ".png")); //break; } }
From source file:OSFFM_ORC.FederationActionManager.java
/** * * @param mapContainer//from w w w .j a va 2s . c o m * @param sClient * @param fedsdnURL * @return * @throws WSException * @throws JSONException * @author gtricomi */ private String checkTenantandInsertFEDSDN(FednetsLink mapContainer, Site sClient, String fedsdnURL, DBMongo m) throws WSException, JSONException, Exception { Response r = sClient.getAllSite(fedsdnURL); JSONArray ja = new JSONArray(r.readEntity(String.class)); LinkedHashMap<String, OpenstackInfoContainer> CloudId_To_OIC = mapContainer.getCloudId_To_OIC(); LinkedHashMap<String, JSONObject> tmpSiteList = new LinkedHashMap<String, JSONObject>(); JSONArray inner = new JSONArray(); String tenant = null; String tenant_password = null; String result = ""; //for (int i = 0; i < ja.length(); i++) { //JSONObject jo = (JSONObject) ja.get(i); Set siteNameSet = new HashSet(); //=new HashSet(); for (int i = 0; i < ja.length(); i++) { JSONObject jo = (JSONObject) ja.get(i); /*String siteIdToCheck = (String) jo.get("id"); String siteNameToCheck = (String) jo.get("name"); siteSet.add(siteIdToCheck+"@@@@::::@@@@"+siteNameToCheck);*/ //siteSet.add((String) jo.get("name")); tmpSiteList.put((String) jo.get("name"), jo); } Iterator it = CloudId_To_OIC.keySet().iterator(); while (it.hasNext()) { String siteNameToCheck = (String) it.next(); if ((tmpSiteList.containsKey(siteNameToCheck))) { try { OpenstackInfoContainer oik = (OpenstackInfoContainer) CloudId_To_OIC.get(siteNameToCheck); if (oik == null) { String siteUsernameToCheck = "oneadmin"; String sitePasswordToCheck = "opennebulaone"; String credentials = siteUsernameToCheck + ":" + sitePasswordToCheck; String user_id_insite = "1"; JSONObject inner_jo = new JSONObject("{\"site_id\" :\"ONE\",\"user_id_in_site\":\"" + user_id_insite + "\" ,\"credentials\":\"" + credentials + "\"}"); tenant = "review"; tenant_password = sitePasswordToCheck; } else { String siteUsernameToCheck = oik.getTenant() + "@@@" + oik.getUser(); String sitePasswordToCheck = oik.getPassword(); String credentials = siteUsernameToCheck + ":" + sitePasswordToCheck; KeystoneTest key = new KeystoneTest(oik.getTenant(), oik.getUser(), oik.getPassword(), oik.getEndpoint()); String user_id_insite = null; try { user_id_insite = key.getTenantId(oik.getTenant()); } catch (Exception e) { user_id_insite = "0"; } //>>>BEACON 03/07/2017: statically insert 0, but need to be checked !!! String siteIdToCheck = ((Integer) ((JSONObject) tmpSiteList.get(siteNameToCheck)).get("id")) .toString(); JSONObject inner_jo = new JSONObject( "{\"site_id\" :\"" + siteIdToCheck + "\",\"user_id_in_site\":\"" + user_id_insite + "\" ,\"credentials\":\"" + credentials + "\"}"); inner.put(inner_jo); tenant = oik.getTenant(); //03/07/2017: federation password for the tenant is set equal to the openstack site !!! tenant_password = sitePasswordToCheck; } } catch (Exception e) { LOGGER.error( "Exception occurred in \"Valid Site\" field entry.\nSite skipped: " + siteNameToCheck); } } } //03/07/2017: verify username management like structure "NotManagedUser@userFederation@UME" !!! System.out.println(inner.toString()); JSONObject tenant_jo = new JSONObject("{\"name\" :\"" + tenant + "\",\"password\":\"" + tenant_password + "\" ,\"type\":\"admin\",\"valid_sites\": " + inner.toString(0) + "}"); // m.insertTenantTables(tenant, tenant_jo.toString()); //03-07-2017 : hardcoded credential for tenant Tenant t = new Tenant(tenant, tenant_password);//"root","fedsdn");// boolean ok = false; for (int k = 0; k < inner.length(); k++) { try { System.out.println(tenant_jo.toString(0)); r = t.updateTen(tenant_jo, tenant, fedsdnURL);//createTen(tenant_jo, fedsdnURL);// /*oggetto restituito: { "id": 7, "name": "TESTAGOSTO2", "password": "pass1", "type": "admin", "valid_sites": [ { "tenant_id": 7, "site_id": 2, "user_id_in_site": "28", "credentials": "admin@admin:prova", "token": "86734b78980" } ] } */ String respon = r.readEntity(String.class); if ((respon.contains("Tenant ")) && (respon.contains("updated."))) { respon = respon.substring(respon.indexOf("Tenant")); respon = respon.replace("Tenant ", ""); respon = respon.replace(" updated.\"\n]", ""); Integer rid = new Integer(respon); r = t.getInfoTenant(fedsdnURL, rid.longValue()); respon = r.readEntity(String.class); } else { throw new Exception("Something in the site insertion process isn't working fine."); } JSONObject resp = new JSONObject(respon); JSONObject entry = new JSONObject(); String fedsdntenid = ((Integer) resp.remove("id")).toString(); result = fedsdntenid; entry.put("tenantID", fedsdntenid); entry.put("tenantEntry", resp); m.insertTenantTables(tenant, entry.toString(0)); } catch (WSException ex) { LOGGER.error("Exception is occurred in checkTenantFEDSDN for tenant: " + tenant + "\n" + ex); ok = false; } ok = true; if (ok) { break; } else if (k == 3) { LOGGER.error("Something going wrong! It's Impossible add site on FEDSDN"); throw new Exception("Something in the site insertion process isn't working fine."); //03/07/2017: inserito per bloccare il flusso nel caso in cui qualche sito non venga inserito !!! } } return result; }
From source file:com.opengamma.analytics.financial.model.volatility.surface.VolatilitySurfaceFitter.java
/** * @param forwards Forward values of the underlying at the (increasing) expiry times * @param strikes An array of arrays that gives a set of strikes at each maturity (the outer array corresponds to the expiries and the * inner arrays to the set of strikes at a particular expiry) * @param expiries The set of (increasing) expiry times * @param impliedVols An array of arrays that gives a set of implied volatilities at each maturity (with the same structure as strikes) * @param errors An array of arrays that gives a set of 'measurement' errors at each maturity (with the same structure as strikes) * @param model A smile model/* ww w . j a v a 2 s. co m*/ * @param nodePoints The time position of the nodes on each model parameter curve * @param interpolators The base interpolator used for each model parameter curve */ public VolatilitySurfaceFitter(final double[] forwards, final double[][] strikes, final double[] expiries, final double[][] impliedVols, final double[][] errors, final VolatilityFunctionProvider<T> model, final LinkedHashMap<String, double[]> nodePoints, final LinkedHashMap<String, Interpolator1D> interpolators) { Validate.notNull(forwards, "null forwards"); Validate.notNull(strikes, "null strikes"); Validate.notNull(expiries, "null expiries"); Validate.notNull(impliedVols, "null implied vols"); Validate.notNull(errors, "null error"); Validate.notNull(model, "null model"); _nExpiries = expiries.length; Validate.isTrue(forwards.length == _nExpiries, "#forwards != #expiries"); Validate.isTrue(strikes.length == _nExpiries, "#strike sets != #expiries"); Validate.isTrue(impliedVols.length == _nExpiries, "#vol sets != #expiries"); Validate.isTrue(errors.length == _nExpiries, "#error sets != #expiries"); _volFuncs = new ArrayList<Function1D<T, double[]>>(_nExpiries); _volAdjointFuncs = new ArrayList<Function1D<T, double[][]>>(_nExpiries); _struture = new int[_nExpiries]; //check structure of common expiry strips int sum = 0; for (int i = 0; i < _nExpiries; i++) { final int n = strikes[i].length; Validate.isTrue(impliedVols[i].length == n, "#vols in strip " + i + " is wrong"); Validate.isTrue(errors[i].length == n, "#vols in strip " + i + " is wrong"); final Function1D<T, double[]> func = model.getVolatilityFunction(forwards[i], strikes[i], expiries[i]); _volFuncs.add(func); final Function1D<T, double[][]> funcAdjoint = model.getModelAdjointFunction(forwards[i], strikes[i], expiries[i]); _volAdjointFuncs.add(funcAdjoint); _struture[i] = n; sum += n; } _nOptions = sum; _expiries = expiries; _strikes = strikes; final double[] volsTemp = new double[_nOptions]; final double[] errorsTemp = new double[_nOptions]; int index = 0; for (int i = 0; i < _nExpiries; i++) { for (int j = 0; j < _struture[i]; j++) { volsTemp[index] = impliedVols[i][j]; errorsTemp[index] = errors[i][j]; index++; } } _vols = new DoubleMatrix1D(volsTemp); _errors = new DoubleMatrix1D(errorsTemp); final ParameterLimitsTransform[] transforms = getTransforms(); _parameterNames = nodePoints.keySet(); _nSmileModelParameters = _parameterNames.size(); final LinkedHashMap<String, Interpolator1D> transformedInterpolators = new LinkedHashMap<String, Interpolator1D>( _nSmileModelParameters); sum = 0; index = 0; for (final String name : _parameterNames) { sum += nodePoints.get(name).length; final Interpolator1D tInter = new TransformedInterpolator1D(interpolators.get(name), transforms[index++]); transformedInterpolators.put(name, tInter); } _curveBuilder = new InterpolatedCurveBuildingFunction(nodePoints, transformedInterpolators); _nKnotPoints = sum; }
From source file:au.org.ala.delta.key.Key.java
private void doCalculateKey(TabularKey key, FilteredDataSet dataset, List<Character> includedCharacters, List<Item> includedItems, Specimen specimen, Map<Character, List<MultiStateAttribute>> confirmatoryCharacterValues, Map<Character, Double> usedCharacterCosts) { if (confirmatoryCharacterValues == null) { confirmatoryCharacterValues = new HashMap<Character, List<MultiStateAttribute>>(); }//from ww w . j a v a 2s . c o m if (usedCharacterCosts == null) { usedCharacterCosts = new HashMap<Character, Double>(); } Set<Item> specimenAvailableTaxa = getSpecimenAvailableTaxa(specimen, includedItems); Set<Character> specimenAvailableCharacters = getSpecimenAvailableCharacters(specimen, includedCharacters); if (specimenAvailableTaxa.size() == 0) { return; } else if (specimenAvailableTaxa.size() == 1 || (_context.getStopAfterColumnNumber() != -1 && specimen.getUsedCharacters().size() == _context.getStopAfterColumnNumber())) { // Add a row to the table if a taxon has been identified (only 1 // taxon remains available) // if the column limit set using the STOP AFTER COLUMN directive has // been reached, add a row for each remaining taxon // with the used characters addRowsToTabularKey(key, specimen, specimenAvailableTaxa, confirmatoryCharacterValues, usedCharacterCosts); } else { // These won't be in order but that doesn't matter - best orders // stuff itself List<Integer> specimenAvailableCharacterNumbers = new ArrayList<Integer>(); for (Character ch : specimenAvailableCharacters) { specimenAvailableCharacterNumbers.add(ch.getCharacterId()); } List<Integer> specimenAvailableTaxaNumbers = new ArrayList<Integer>(); for (Item item : specimenAvailableTaxa) { specimenAvailableTaxaNumbers.add(item.getItemNumber()); } MultiStateCharacter bestCharacter; // Find preset character for this column/group, if there is one int currentColumn = specimen.getUsedCharacters().size() + 1; int currentGroup = getGroupCountForColumn(currentColumn - 1, key) + 1; int presetCharacterNumber = _context.getPresetCharacter(currentColumn, currentGroup); LinkedHashMap<Character, Double> bestMap = KeyBest.orderBest(_context.getDataSet(), _context.getCharacterCostsAsArray(), _context.getCalculatedItemAbundanceValuesAsArray(), specimenAvailableCharacterNumbers, specimenAvailableTaxaNumbers, _context.getRBase(), _context.getABase(), _context.getReuse(), _context.getVaryWt(), _context.getAllowImproperSubgroups()); // -1 indicates no preset character for the column/group if (presetCharacterNumber > 0) { Character presetCharacter = _context.getDataSet().getCharacter(presetCharacterNumber); if (checkPresetCharacter(presetCharacter, specimen, includedItems)) { _defaultOutputStream.println(MessageFormat.format("Using preset character {0},{1}:{2}", presetCharacterNumber, currentColumn, currentGroup)); bestCharacter = (MultiStateCharacter) presetCharacter; } else { throw new RuntimeException( MessageFormat.format("Character {0} is not suitable for use at column {1} group {2}", presetCharacterNumber, currentColumn, currentGroup)); } } else { List<Character> bestOrderCharacters = new ArrayList<Character>(bestMap.keySet()); if (bestOrderCharacters.isEmpty()) { if (specimen.getUsedCharacters().isEmpty()) { // No characters have been used therefore this is the // beginning of the investigation. // No suitable characters at the beginning of the // investigation is a fatal error throw new RuntimeException("No suitable characters. Execution terminated."); } else { // Key is incomplete - no characters left to distinguish // the remaining taxa, // so write out the characters which got us this far // through the investigation, and also // raise an error. addRowsToTabularKey(key, specimen, specimenAvailableTaxa, confirmatoryCharacterValues, usedCharacterCosts); // also mark the key as incomplete. key.setKeyIncomplete(true); } return; } else { // KEY only uses multi state characters bestCharacter = (MultiStateCharacter) bestOrderCharacters.get(0); } } double bestCharacterCost = _context.getCharacterCost(bestCharacter.getCharacterId()); List<ConfirmatoryCharacter> confirmatoryCharacters = null; int numberOfConfirmatoryCharacters = _context.getNumberOfConfirmatoryCharacters(); if (numberOfConfirmatoryCharacters > 0) { List<Character> bestOrderCharacters = new ArrayList<Character>(bestMap.keySet()); confirmatoryCharacters = getConfirmatoryCharacters(specimen, includedItems, bestOrderCharacters, bestCharacter, numberOfConfirmatoryCharacters); } for (int i = 0; i < bestCharacter.getNumberOfStates(); i++) { int stateNumber = i + 1; MultiStateAttribute attr = createMultiStateAttribute(bestCharacter, stateNumber); specimen.setAttributeForCharacter(bestCharacter, attr); if (confirmatoryCharacters != null && !confirmatoryCharacters.isEmpty()) { List<MultiStateAttribute> confirmatoryAttributes = new ArrayList<MultiStateAttribute>(); for (ConfirmatoryCharacter confChar : confirmatoryCharacters) { int confCharNumber = confChar.getConfirmatoryCharacterNumber(); int confStateNumber = confChar.getConfirmatoryStateNumber(stateNumber); if (confStateNumber == -1) { // No matching state in the confirmatory character. // Should only be the case when using // the main state's character eliminates all // remaining taxa. Simply ignore this state. continue; } MultiStateAttribute confAttr = createMultiStateAttribute( (MultiStateCharacter) _context.getDataSet().getCharacter(confCharNumber), confStateNumber); confirmatoryAttributes.add(confAttr); } confirmatoryCharacterValues.put(bestCharacter, confirmatoryAttributes); } usedCharacterCosts.put(bestCharacter, bestCharacterCost); doCalculateKey(key, dataset, includedCharacters, includedItems, specimen, confirmatoryCharacterValues, usedCharacterCosts); specimen.removeValueForCharacter(bestCharacter); confirmatoryCharacterValues.remove(bestCharacter); usedCharacterCosts.remove(bestCharacter); } } }
From source file:org.openmrs.module.chica.DynamicFormAccess.java
/** * Save the results of the fields marked as "Export Field". * //from ww w .ja va 2 s . c om * @param formInstance FormInstance object containing the relevant form information. * @param locationTagId The location tag identifier. * @param encounterId The encounter identifier associated with the form. * @param patient The patient the form belongs to. * @param formFieldMap Map from the HTTP request that contains the field name to values. * @param parameterHandler The parameterHandler used for rule execution. */ public void saveExportElements(FormInstance formInstance, Integer locationTagId, Integer encounterId, Patient patient, Map<String, String[]> formFieldMap, ParameterHandler parameterHandler) { HashMap<String, Field> fieldMap = new HashMap<String, Field>(); FormService formService = Context.getFormService(); Form form = formService.getForm(formInstance.getFormId()); LinkedHashMap<FormField, String> formFieldToValue = new LinkedHashMap<FormField, String>(); FieldType exportType = getFieldType("Export Field"); List<FieldType> fieldTypes = new ArrayList<FieldType>(); fieldTypes.add(exportType); List<FormField> formFields = Context.getService(ChirdlUtilBackportsService.class).getFormFields(form, fieldTypes, false); List<Integer> fieldIds = new ArrayList<Integer>(); for (FormField formField : formFields) { fieldIds.add(formField.getField().getFieldId()); } Iterator<FormField> formFieldIterator = formFields.iterator(); while (formFieldIterator.hasNext()) { FormField formField = formFieldIterator.next(); org.openmrs.Field field = formField.getField(); String fieldName = field.getName(); if (!formFieldMap.containsKey(fieldName)) { continue; } Field valueField = new Field(fieldName); fieldMap.put(fieldName, valueField); String[] valueObj = formFieldMap.get(fieldName); if (valueObj == null || valueObj.length == 0) { formFieldToValue.put(formField, null); continue; } String value = valueObj[0]; formFieldToValue.put(formField, value); valueField.setValue(value); } consume(formInstance, patient, locationTagId, encounterId, fieldMap, formFieldToValue, parameterHandler, form); Context.getService(ChicaService.class).saveAnswers(fieldMap, formInstance, encounterId, patient, formFieldToValue.keySet()); fieldMap.clear(); formFieldToValue.clear(); }
From source file:com.streamsets.pipeline.lib.jdbc.multithread.TableContextUtil.java
private TableContext createTableContext(PushSource.Context context, List<Stage.ConfigIssue> issues, Connection connection, String schemaName, String tableName, TableConfigBean tableConfigBean, TableJdbcELEvalContext tableJdbcELEvalContext, QuoteChar quoteChar) throws SQLException, StageException { LinkedHashMap<String, Integer> offsetColumnToType = new LinkedHashMap<>(); //Even though we are using this only find partition column's type, we could cache it if need arises. final String qualifiedTableName = getQualifiedTableName(schemaName, tableName); Map<String, Integer> columnNameToType = getColumnNameType(connection, schemaName, tableName); Map<String, String> offsetColumnToStartOffset = new HashMap<>(); if (tableConfigBean.overrideDefaultOffsetColumns) { if (tableConfigBean.offsetColumns.isEmpty()) { issues.add(context.createConfigIssue(Groups.TABLE.name(), TableJdbcConfigBean.TABLE_CONFIG, JdbcErrors.JDBC_62, tableName)); return null; }// w ww . j av a 2s .c om for (String overridenPartitionColumn : tableConfigBean.offsetColumns) { if (!columnNameToType.containsKey(overridenPartitionColumn)) { issues.add(context.createConfigIssue(Groups.TABLE.name(), TableJdbcConfigBean.TABLE_CONFIG, JdbcErrors.JDBC_63, tableName, overridenPartitionColumn)); return null; } offsetColumnToType.put(overridenPartitionColumn, columnNameToType.get(overridenPartitionColumn)); } } else { List<String> primaryKeys = jdbcUtil.getPrimaryKeys(connection, schemaName, tableName); if (primaryKeys.isEmpty() && !tableConfigBean.enableNonIncremental) { issues.add(context.createConfigIssue(Groups.TABLE.name(), TableJdbcConfigBean.TABLE_CONFIG, JdbcErrors.JDBC_62, tableName)); return null; } primaryKeys.forEach(primaryKey -> offsetColumnToType.put(primaryKey, columnNameToType.get(primaryKey))); } checkForUnsupportedOffsetColumns(offsetColumnToType); Map<String, String> offsetColumnMinValues = new HashMap<>(); if (tableConfigBean.partitioningMode != PartitioningMode.DISABLED) { offsetColumnMinValues.putAll(jdbcUtil.getMinimumOffsetValues(connection, schemaName, tableName, quoteChar, offsetColumnToType.keySet())); } //Initial offset should exist for all partition columns or none at all. if (!tableConfigBean.offsetColumnToInitialOffsetValue.isEmpty()) { Set<String> missingColumns = Sets.difference(offsetColumnToType.keySet(), tableConfigBean.offsetColumnToInitialOffsetValue.keySet()); Set<String> extraColumns = Sets.difference(tableConfigBean.offsetColumnToInitialOffsetValue.keySet(), offsetColumnToType.keySet()); if (!missingColumns.isEmpty() || !extraColumns.isEmpty()) { issues.add(context.createConfigIssue(Groups.TABLE.name(), TableJdbcConfigBean.TABLE_CONFIG, JdbcErrors.JDBC_64, missingColumns.isEmpty() ? "(none)" : COMMA_JOINER.join(missingColumns), extraColumns.isEmpty() ? "(none)" : COMMA_JOINER.join(extraColumns))); return null; } populateInitialOffset(context, issues, tableConfigBean.offsetColumnToInitialOffsetValue, tableJdbcELEvalContext, offsetColumnToStartOffset); checkForInvalidInitialOffsetValues(context, issues, qualifiedTableName, offsetColumnToType, offsetColumnToStartOffset); } final Map<String, String> offsetAdjustments = new HashMap<>(); offsetColumnToType.keySet().forEach(c -> offsetAdjustments.put(c, tableConfigBean.partitionSize)); return new TableContext(schemaName, tableName, offsetColumnToType, offsetColumnToStartOffset, offsetAdjustments, offsetColumnMinValues, tableConfigBean.enableNonIncremental, tableConfigBean.partitioningMode, tableConfigBean.maxNumActivePartitions, tableConfigBean.extraOffsetColumnConditions); }
From source file:ubic.gemma.web.controller.expression.experiment.DEDVController.java
/** * Prepare vvo for display on front end. Uses factors and factor values from layouts * * @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames *///from w w w . j a v a 2 s.com private void prepareFactorsForFrontEndDisplay(VisualizationValueObject vvo, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts) { if (eeLayouts == null || eeLayouts.isEmpty()) { log.warn("No layouts, bail"); vvo.setFactorNames(null); vvo.setFactorValuesToNames(null); return; } LinkedHashSet<ExperimentalFactor> factorNames = getFactorNames(eeLayouts); // colours for conditions/factor values bar chart FIXME make continuous maps different. Map<ExperimentalFactor, Queue<String>> factorColoursMap = createFactorNameToColoursMap(factorNames); String missingValueColour = "#DCDCDC"; Random random = new Random(); LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<>(); // list of maps with entries: key = factorName, value=array of factor values // 1 entry per sample List<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<>(); Collection<String> factorsMissingValues = new HashSet<>(); Collection<BioMaterialValueObject> seenSamples = new HashSet<>(); // if same sample was run more than once on // diff platforms. Map<Long, FactorValue> fvs = new HashMap<>(); // avoid loading repeatedly. Collection<ExperimentalFactor> seenFactors = new HashSet<>(); for (BioAssayValueObject ba : eeLayouts.keySet()) { if (seenSamples.contains(ba.getSample())) { continue; } seenSamples.add(ba.getSample()); // double should be the factorValue id, defined in // ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment, // BioAssayDimension) LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get(ba); LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<>(factorNames.size()); // this is defensive, should only come into play when there's something messed up with the data. // for every factor, add a missing-value entry (guards against missing data messing up the layout) for (ExperimentalFactor factor : factorNames) { String[] facValAndColour = new String[] { "No value", missingValueColour }; factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour); } // for each experimental factor, store the name and value for (Entry<ExperimentalFactor, Double> pair : factorMap.entrySet()) { ExperimentalFactor factor = pair.getKey(); Double valueOrId = pair.getValue(); /* * the double is only a double because it is meant to hold measurements when the factor is continuous if * the factor is categorical, the double value is set to the value's id see * ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout( * ExpressionExperiment, BioAssayDimension) */ if (valueOrId == null || factor.getType() == null || (factor.getType().equals(FactorType.CATEGORICAL) && factor.getFactorValues().isEmpty())) { factorsMissingValues.add(getUniqueFactorName(factor)); continue; } if (!seenFactors.contains(factor) && factor.getType().equals(FactorType.CATEGORICAL)) { for (FactorValue fv : factor.getFactorValues()) { fvs.put(fv.getId(), fv); } } String facValsStr = getFacValsStr(fvs, factor, valueOrId); if (!factorToValueNames.containsKey(getUniqueFactorName(factor))) { factorToValueNames.put(getUniqueFactorName(factor), new LinkedHashMap<String, String>()); } // assign colour if unassigned or fetch it if already assigned String colourString = ""; if (!factorToValueNames.get(getUniqueFactorName(factor)).containsKey(facValsStr)) { if (factorColoursMap.containsKey(factor)) { colourString = factorColoursMap.get(factor).poll(); } if (colourString == null || Objects.equals(colourString, "")) { // ran out of predefined colours colourString = getRandomColour(random); } factorToValueNames.get(getUniqueFactorName(factor)).put(facValsStr, colourString); } else { colourString = factorToValueNames.get(getUniqueFactorName(factor)).get(facValsStr); } String[] facValAndColour = new String[] { facValsStr, colourString }; factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour); } factorValueMaps.add(factorNamesToValueColourPairs); } // add missing value entries here so they show up at the end of the legend's value lists if (!factorsMissingValues.isEmpty()) { for (String factorName : factorsMissingValues) { if (!factorToValueNames.containsKey(factorName)) { factorToValueNames.put(factorName, new LinkedHashMap<String, String>()); } factorToValueNames.get(factorName).put("No value", missingValueColour); } } vvo.setFactorNames(factorToValueNames); // this is summary of values & colours by factor, used for legend vvo.setFactorValuesToNames(factorValueMaps); // this is list of maps for each sample }
From source file:de.ingrid.importer.udk.strategy.v32.IDCStrategy3_2_0.java
/** * Also drops all old values (if syslist already exists) ! * @param listId id of syslist//from www . jav a2s.c om * @param deleteOldValues pass true if all old syslist values should be deleted before adding new ones ! * @param syslistMap_de german entries * @param syslistMap_en english entries * @param defaultEntry_de pass key of GERMAN default entry or -1 if no default entry ! * @param defaultEntry_en pass key of ENGLISH default entry or -1 if no default entry ! * @param syslistMap_descr_de pass null if no GERMAN description available * @param syslistMap_descr_en pass null if no ENGLISH description available * @throws Exception */ private void writeNewSyslist(int listId, boolean deleteOldValues, LinkedHashMap<Integer, String> syslistMap_de, LinkedHashMap<Integer, String> syslistMap_en, int defaultEntry_de, int defaultEntry_en, LinkedHashMap<Integer, String> syslistMap_descr_de, LinkedHashMap<Integer, String> syslistMap_descr_en) throws Exception { if (syslistMap_descr_de == null) { syslistMap_descr_de = new LinkedHashMap<Integer, String>(); } if (syslistMap_descr_en == null) { syslistMap_descr_en = new LinkedHashMap<Integer, String>(); } if (deleteOldValues) { // clean up, to guarantee no old values ! sqlStr = "DELETE FROM sys_list where lst_id = " + listId; jdbc.executeUpdate(sqlStr); } String psSql = "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default, description) " + "VALUES (?,?,?,?,?,?,?,?)"; PreparedStatement psInsert = jdbc.prepareStatement(psSql); Iterator<Integer> itr = syslistMap_de.keySet().iterator(); while (itr.hasNext()) { int key = itr.next(); // german version String isDefault = "N"; if (key == defaultEntry_de) { isDefault = "Y"; } psInsert.setLong(1, getNextId()); psInsert.setInt(2, listId); psInsert.setInt(3, key); psInsert.setString(4, "de"); psInsert.setString(5, syslistMap_de.get(key)); psInsert.setInt(6, 0); psInsert.setString(7, isDefault); psInsert.setString(8, syslistMap_descr_de.get(key)); psInsert.executeUpdate(); // english version isDefault = "N"; if (key == defaultEntry_en) { isDefault = "Y"; } psInsert.setLong(1, getNextId()); psInsert.setString(4, "en"); psInsert.setString(5, syslistMap_en.get(key)); psInsert.setString(7, isDefault); psInsert.setString(8, syslistMap_descr_en.get(key)); psInsert.executeUpdate(); } psInsert.close(); }
From source file:gate.util.reporting.DocTimeReporter.java
/** * Prints benchmark report in text format. * * @param reportContainer//from w w w .j av a 2 s .co m * An Object of type LinkedHashMap<String, Object> containing the * document names (with time in milliseconds) in hierarchical * structure. * @param outputFile * An object of type File representing the output report file. */ private void printToText(Object reportContainer, File outputFile) { ArrayList<String> printLines = new ArrayList<String>(); @SuppressWarnings("unchecked") LinkedHashMap<String, Object> rcHash = (LinkedHashMap<String, Object>) reportContainer; String docs = ""; if (maxDocumentInReport != ALL_DOCS) { if (allDocs.size() < maxDocumentInReport) { docs = Integer.toString(allDocs.size()); } else { docs = Integer.toString(maxDocumentInReport); } } else { docs = "All"; } printLines.add("=============================================================" + NL); if (PRMatchingRegex.equals(MATCH_ALL_PR_REGEX)) { printLines.add("Top " + docs + " expensive documents matching All PRs in " + pipelineName + NL); } else { if (matchingPRs.size() > 0) { printLines .add("Top " + docs + " expensive documents matching following PRs in " + pipelineName + NL); for (String pr : matchingPRs) { printLines.add("\t" + pr + NL); } } else { printLines.add( "No PRs matched to search string \"" + getPRMatchingRegex() + "\"" + " in " + pipelineName); printLines.add(NL); printLines.add("=============================================================" + NL); } } if (allDocs.size() > 0) { printLines.add("=============================================================" + NL); printLines.add("Document Name" + "\t" + "Time (in seconds)" + "\t" + "%" + NL); printLines.add("-------------------------------------------------------------" + NL); } Iterator<String> i = rcHash.keySet().iterator(); int count = 0; // Iterating over the report container while (i.hasNext()) { Object key = i.next(); if (!((String) key).equals("total")) { int value = Integer.parseInt((String) rcHash.get(key)); if (maxDocumentInReport == ALL_DOCS) printLines.add(key + "\t" + value / 1000.0 + "\t" + Math.round(((value / globalTotal) * 100) * 10) / 10.0 + NL); else if (count < maxDocumentInReport) printLines.add(key + "\t" + value / 1000.0 + "\t" + Math.round(((value / globalTotal) * 100) * 10) / 10.0 + NL); } count++; } if (allDocs.size() > 0) { printLines.add("-------------------------------------------------------------" + NL); printLines.add("Pipeline Total" + "\t" + globalTotal / 1000.0 + "\t" + 100 + NL + NL + NL); } BufferedWriter out = null; try { // Writing to report file out = new BufferedWriter(new FileWriter(outputFile, true)); for (String line : printLines) { out.write(line); } } catch (IOException e) { e.printStackTrace(); } finally { try { if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } }