List of usage examples for java.util LinkedHashMap containsKey
boolean containsKey(Object key);
From source file:org.bimserver.charting.SupportFunctions.java
public static ArrayList<LinkedHashMap<String, Object>> getIfcMaterialsByNameWithTreeStructure( String structureKeyword, IfcModelInterface model, Chart chart, MutableInt subChartCount) { // Derive the column name. String leafColumnName = structureKeyword; // Update the chart configuration. chart.setDimensionLookupKey(structureKeyword, leafColumnName); chart.setDimensionLookupKey("date", "date"); chart.setDimensionLookupKey("size", "size"); // Prepare to iterate the relationships. LinkedHashMap<String, ArrayList<Double>> materialNameWithSizes = new LinkedHashMap<>(); // Iterate only the relationships. for (IfcRelAssociatesMaterial ifcRelAssociatesMaterial : model .getAllWithSubTypes(IfcRelAssociatesMaterial.class)) { // IfcMaterialSelect: IfcMaterial, IfcMaterialList, IfcMaterialLayerSetUsage, IfcMaterialLayerSet, IfcMaterialLayer. IfcMaterialSelect materialLike = ifcRelAssociatesMaterial.getRelatingMaterial(); // If there was a material-like object, sum the names of what it decomposes into across X individually. if (materialLike != null) { // First, get size data from IFC products. ArrayList<Double> sizes = new ArrayList<>(); // Iterate objects. EList<IfcRoot> ifcRoots = ifcRelAssociatesMaterial.getRelatedObjects(); for (IfcRoot ifcRoot : ifcRoots) { Double size = 0.0; if (ifcRoot instanceof IfcObjectDefinition) { IfcObjectDefinition ifcObjectDefinition = (IfcObjectDefinition) ifcRoot; if (ifcObjectDefinition instanceof IfcObject) { IfcObject ifcObject = (IfcObject) ifcObjectDefinition; if (ifcObject instanceof IfcProduct) { IfcProduct ifcProduct = (IfcProduct) ifcObject; Double volume = getRoughVolumeEstimateFromIfcProduct(ifcProduct); size = volume; }// w ww . j a v a 2 s .c o m } } if (size != null && size > 0) sizes.add(size); } // Get material names with percentages, like: Material Name -> 0.5 LinkedHashMap<String, Double> materials = getNameOfMaterialsFromMaterialLikeWithPercents( materialLike, false); // Second, iterate materials, realizing the percentage of the sizes onto the collection of sizes for each material name. for (Entry<String, Double> materialEntry : materials.entrySet()) { String materialName = materialEntry.getKey(); Double percent = materialEntry.getValue(); // Use material name if available. Otherwise, use OID of top-level material-like object. String name = (materialName != null) ? materialName : String.format("%d", materialLike.getOid()); // Add entry if it doesn't exist. if (!materialNameWithSizes.containsKey(name)) materialNameWithSizes.put(name, new ArrayList<Double>()); ArrayList<Double> theseSizes = materialNameWithSizes.get(name); // Get existing size data. if (percent != null && percent > 0) { // If not alteration is required, clone into the stack. if (percent == 1.0) theseSizes.addAll(sizes); // Otherwise, realize the percent of the size. else for (Double size : sizes) theseSizes.add(size * percent); } } } } // subChartCount.setValue(materialNameWithSizes.size()); // ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>(); // for (Entry<String, ArrayList<Double>> entry : materialNameWithSizes.entrySet()) { String name = entry.getKey(); // Get existing size data. ArrayList<Double> sizes = materialNameWithSizes.get(name); // Sort, value ascending. Collections.sort(sizes, sortSmallerValuesToFront); sizes.add(0, 0.0); if (sizes.size() == 1) sizes.add(0, 0.0); // Count including empty first entry. double count = Math.max(1, sizes.size() - 1); double step = 10000.0 / count; double runningSize = 0.0; // Add sum of zero at entry zero. int i = 0; // Iterate objects, summing them across 0 to 10000 (an arbitrary range, a way to relate to other sums along X). for (Double size : sizes) { double someMeasurement = (size != null) ? size : 0.0; runningSize += someMeasurement; // Prepare to store this raw data entry. LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>(); // Name the group. dataEntry.put(leafColumnName, name); dataEntry.put("date", i * step); dataEntry.put("size", runningSize); // Push the entry into the data pool. rawData.add(dataEntry); // i += 1; } } // Send it all back. return rawData; }
From source file:gate.util.reporting.PRTimeReporter.java
/** * Sorts the processing element entries inside tree like structure made up of * LinkedHashMap. Entries will be sorted in descending order of time taken. * * @param gStore/* ww w. j ava 2 s .c o m*/ * An Object of type LinkedHashMap<String, Object> containing the * processing elements (with time in milliseconds) in hierarchical * structure. * * @return An Object of type LinkedHashMap<String, Object> containing the * processing elements sorted in descending order of processing time * taken. */ @SuppressWarnings("unchecked") private LinkedHashMap<String, Object> sortReport(LinkedHashMap<String, Object> gStore) { Iterator<String> i = gStore.keySet().iterator(); LinkedHashMap<String, Object> sortedReport = new LinkedHashMap<String, Object>(); LinkedHashMap<String, Object> mapperReport = new LinkedHashMap<String, Object>(); LinkedHashMap<String, String> unsortedReport = new LinkedHashMap<String, String>(); while (i.hasNext()) { Object key = i.next(); if (gStore.get(key) instanceof LinkedHashMap) { int systotal = 0; if (((LinkedHashMap<String, Object>) (gStore.get(key))).get("systotal") != null) { systotal = Integer .parseInt((String) ((LinkedHashMap<String, Object>) (gStore.get(key))).get("systotal")); } if (systotal >= 0) { unsortedReport.put((String) key, Integer.toString(systotal)); } mapperReport.put((String) key, sortReport((LinkedHashMap<String, Object>) (gStore.get(key)))); } else { if (!(key.equals("total") || key.equals("systotal"))) { if (Integer.parseInt((String) (gStore.get(key))) >= 0) { unsortedReport.put((String) key, new Integer((String) gStore.get(key)).toString()); } } } } LinkedHashMap<String, String> tempOutLHM = sortHashMapByValues(unsortedReport); Iterator<String> itr = tempOutLHM.keySet().iterator(); while (itr.hasNext()) { Object tempKey = itr.next(); sortedReport.put((String) tempKey, tempOutLHM.get(tempKey)); if (mapperReport.containsKey(tempKey)) { sortedReport.put((String) tempKey, mapperReport.get(tempKey)); } } sortedReport.put("total", gStore.get("total")); if (gStore.get("systotal") != null) { sortedReport.put("systotal", gStore.get("systotal")); } return sortedReport; }
From source file:in.sc.dao.ListGenerator.java
public LinkedHashMap generateCat(int from) { StringBuilder sql = new StringBuilder(); LinkedHashMap rootMap = null; try {//from w w w.j av a 2s . c o m sql.append( "select k1.c_unique_name,concAt(li.cat_name,'/',li.home_url_exp) as url,k1.cat_name as child_cat,k2.cat_name as parent_cat,k3.cat_name as root_cat " + "from category_details k1,category_details k2, category_details k3,linkgenerator li where k1.status='B' and " + " k1.parent_cat_id=k2.cat_id and k2.parent_cat_id=k3.cat_id " + " and li.category_id=k1.cat_id " + " order by k3.priority asc"); namedParameterJdbcTemplate = getTemplate(); rootMap = namedParameterJdbcTemplate.query(sql.toString(), new HashMap(), new ResultSetExtractor<LinkedHashMap>() { @Override public LinkedHashMap extractData(ResultSet rs) throws SQLException, DataAccessException { LinkedHashMap rootMap = new LinkedHashMap(); LinkedList li = null; HashMap parentMap = null; while (rs.next()) { try { ArrayList childList = null; String rootCat = ""; String parentCat = ""; String childCat = ""; String childUrl = rs.getString("url"); if (rs.getString("root_cat").equals("Root Category ")) { rootCat = rs.getString("parent_cat"); parentCat = rs.getString("child_cat"); childCat = childUrl; } else { rootCat = rs.getString("root_cat"); parentCat = rs.getString("parent_cat"); childCat = rs.getString("child_cat") + "#" + childUrl; } if (rootMap.containsKey(rootCat)) { parentMap = (HashMap) rootMap.get(rootCat); } else { parentMap = new HashMap(); } if (childCat != null && parentMap.containsKey(parentCat)) { childList = (ArrayList) parentMap.get(parentCat); } else { childList = new ArrayList(); } if (childCat != null) { childList.add(childCat); } parentMap.put(parentCat, childList); rootMap.put(rootCat, parentMap); // if (rootMap.containsKey(rootCat)) { // if (!childCat.equals("")) { // parentMap = (HashMap) rootMap.get(rootCat); // if (parentMap.containsKey(childCat)) { // childList = (ArrayList) parentMap.get(childCat); // } else { // childList = new ArrayList(); // } // childList.add(childCat); // parentMap.put(parentCat, childList); // rootMap.put(rootCat, parentMap); // } // // } else { // if (!childCat.equals("")) { // childList = new ArrayList(); // childList.add(childCat); // parentMap=new HashMap(); // parentMap.put(parentCat, childList); // rootMap.put(rootCat, parentMap); // }else{ // rootMap.put(rootCat, parentCat); // } // // } } catch (Exception e) { e.printStackTrace(); } } return rootMap; } }); // Mainmap.put("data", dataList); } catch (Exception e) { e.printStackTrace(); } finally { } return rootMap; }
From source file:com.fujitsu.dc.core.rs.odata.ODataBatchResource.java
/** * NP??./*from w ww.java2 s.c om*/ * @param npBulkContexts NavigationProperty? */ private void execBulkRequestForNavigationProperty(List<NavigationPropertyBulkContext> npBulkContexts) { // ???BulkRequest? // NP??EntityType??????ID?????? LinkedHashMap<String, BulkRequest> npBulkRequests = new LinkedHashMap<String, BulkRequest>(); for (NavigationPropertyBulkContext npBulkContext : npBulkContexts) { BatchBodyPart bodyPart = npBulkContext.getBodyPart(); BulkRequest bulkRequest = new BulkRequest(bodyPart); String key = DcUUID.randomUUID(); if (npBulkContext.isError()) { bulkRequest.setError(npBulkContext.getException()); npBulkRequests.put(key, bulkRequest); continue; } String targetEntitySetName = bodyPart.getTargetEntitySetName(); bulkRequest = createBulkRequest(bodyPart, targetEntitySetName); // ??ID?? // TODO ?????NTKP if (bulkRequest.getError() == null) { EntitySetDocHandler docHandler = bulkRequest.getDocHandler(); key = docHandler.getEntityTypeId() + ":" + (String) docHandler.getStaticFields().get("__id"); if (npBulkRequests.containsKey(key)) { key = DcUUID.randomUUID(); bulkRequest.setError(DcCoreException.OData.ENTITY_ALREADY_EXISTS); } } npBulkRequests.put(key, bulkRequest); } try { this.odataResource.getODataProducer().bulkCreateEntityViaNavigationProperty(npBulkContexts, npBulkRequests); } catch (DcCoreException e) { // 503??????????shutter? shutter.updateStatus(e); if (!DcCoreException.Misc.TOO_MANY_CONCURRENT_REQUESTS.equals(e)) { throw e; } else { createTooManyConcurrentResponse(npBulkContexts); } } npBulkRequests.clear(); }
From source file:io.personium.core.rs.odata.ODataBatchResource.java
/** * NP??.//from w w w . ja va 2 s.c o m * @param npBulkContexts NavigationProperty? */ private void execBulkRequestForNavigationProperty(List<NavigationPropertyBulkContext> npBulkContexts) { // ???BulkRequest? // NP??EntityType??????ID?????? LinkedHashMap<String, BulkRequest> npBulkRequests = new LinkedHashMap<String, BulkRequest>(); for (NavigationPropertyBulkContext npBulkContext : npBulkContexts) { BatchBodyPart bodyPart = npBulkContext.getBodyPart(); BulkRequest bulkRequest = new BulkRequest(bodyPart); String key = PersoniumUUID.randomUUID(); if (npBulkContext.isError()) { bulkRequest.setError(npBulkContext.getException()); npBulkRequests.put(key, bulkRequest); continue; } String targetEntitySetName = bodyPart.getTargetEntitySetName(); bulkRequest = createBulkRequest(bodyPart, targetEntitySetName); // ??ID?? // TODO ?????NTKP if (bulkRequest.getError() == null) { EntitySetDocHandler docHandler = bulkRequest.getDocHandler(); key = docHandler.getEntityTypeId() + ":" + (String) docHandler.getStaticFields().get("__id"); if (npBulkRequests.containsKey(key)) { key = PersoniumUUID.randomUUID(); bulkRequest.setError(PersoniumCoreException.OData.ENTITY_ALREADY_EXISTS); } } npBulkRequests.put(key, bulkRequest); } try { this.odataResource.getODataProducer().bulkCreateEntityViaNavigationProperty(npBulkContexts, npBulkRequests); } catch (PersoniumCoreException e) { // 503??????????shutter? shutter.updateStatus(e); if (!PersoniumCoreException.Misc.TOO_MANY_CONCURRENT_REQUESTS.equals(e)) { throw e; } else { createTooManyConcurrentResponse(npBulkContexts); } } npBulkRequests.clear(); }
From source file:OSFFM_ORC.FederationActionManager.java
/** * * @param mapContainer//from w w w. j a va 2 s . c o m * @param sClient * @param fedsdnURL * @return * @throws WSException * @throws JSONException * @author gtricomi */ private String checkTenantandInsertFEDSDN(FednetsLink mapContainer, Site sClient, String fedsdnURL, DBMongo m) throws WSException, JSONException, Exception { Response r = sClient.getAllSite(fedsdnURL); JSONArray ja = new JSONArray(r.readEntity(String.class)); LinkedHashMap<String, OpenstackInfoContainer> CloudId_To_OIC = mapContainer.getCloudId_To_OIC(); LinkedHashMap<String, JSONObject> tmpSiteList = new LinkedHashMap<String, JSONObject>(); JSONArray inner = new JSONArray(); String tenant = null; String tenant_password = null; String result = ""; //for (int i = 0; i < ja.length(); i++) { //JSONObject jo = (JSONObject) ja.get(i); Set siteNameSet = new HashSet(); //=new HashSet(); for (int i = 0; i < ja.length(); i++) { JSONObject jo = (JSONObject) ja.get(i); /*String siteIdToCheck = (String) jo.get("id"); String siteNameToCheck = (String) jo.get("name"); siteSet.add(siteIdToCheck+"@@@@::::@@@@"+siteNameToCheck);*/ //siteSet.add((String) jo.get("name")); tmpSiteList.put((String) jo.get("name"), jo); } Iterator it = CloudId_To_OIC.keySet().iterator(); while (it.hasNext()) { String siteNameToCheck = (String) it.next(); if ((tmpSiteList.containsKey(siteNameToCheck))) { try { OpenstackInfoContainer oik = (OpenstackInfoContainer) CloudId_To_OIC.get(siteNameToCheck); if (oik == null) { String siteUsernameToCheck = "oneadmin"; String sitePasswordToCheck = "opennebulaone"; String credentials = siteUsernameToCheck + ":" + sitePasswordToCheck; String user_id_insite = "1"; JSONObject inner_jo = new JSONObject("{\"site_id\" :\"ONE\",\"user_id_in_site\":\"" + user_id_insite + "\" ,\"credentials\":\"" + credentials + "\"}"); tenant = "review"; tenant_password = sitePasswordToCheck; } else { String siteUsernameToCheck = oik.getTenant() + "@@@" + oik.getUser(); String sitePasswordToCheck = oik.getPassword(); String credentials = siteUsernameToCheck + ":" + sitePasswordToCheck; KeystoneTest key = new KeystoneTest(oik.getTenant(), oik.getUser(), oik.getPassword(), oik.getEndpoint()); String user_id_insite = null; try { user_id_insite = key.getTenantId(oik.getTenant()); } catch (Exception e) { user_id_insite = "0"; } //>>>BEACON 03/07/2017: statically insert 0, but need to be checked !!! String siteIdToCheck = ((Integer) ((JSONObject) tmpSiteList.get(siteNameToCheck)).get("id")) .toString(); JSONObject inner_jo = new JSONObject( "{\"site_id\" :\"" + siteIdToCheck + "\",\"user_id_in_site\":\"" + user_id_insite + "\" ,\"credentials\":\"" + credentials + "\"}"); inner.put(inner_jo); tenant = oik.getTenant(); //03/07/2017: federation password for the tenant is set equal to the openstack site !!! tenant_password = sitePasswordToCheck; } } catch (Exception e) { LOGGER.error( "Exception occurred in \"Valid Site\" field entry.\nSite skipped: " + siteNameToCheck); } } } //03/07/2017: verify username management like structure "NotManagedUser@userFederation@UME" !!! System.out.println(inner.toString()); JSONObject tenant_jo = new JSONObject("{\"name\" :\"" + tenant + "\",\"password\":\"" + tenant_password + "\" ,\"type\":\"admin\",\"valid_sites\": " + inner.toString(0) + "}"); // m.insertTenantTables(tenant, tenant_jo.toString()); //03-07-2017 : hardcoded credential for tenant Tenant t = new Tenant(tenant, tenant_password);//"root","fedsdn");// boolean ok = false; for (int k = 0; k < inner.length(); k++) { try { System.out.println(tenant_jo.toString(0)); r = t.updateTen(tenant_jo, tenant, fedsdnURL);//createTen(tenant_jo, fedsdnURL);// /*oggetto restituito: { "id": 7, "name": "TESTAGOSTO2", "password": "pass1", "type": "admin", "valid_sites": [ { "tenant_id": 7, "site_id": 2, "user_id_in_site": "28", "credentials": "admin@admin:prova", "token": "86734b78980" } ] } */ String respon = r.readEntity(String.class); if ((respon.contains("Tenant ")) && (respon.contains("updated."))) { respon = respon.substring(respon.indexOf("Tenant")); respon = respon.replace("Tenant ", ""); respon = respon.replace(" updated.\"\n]", ""); Integer rid = new Integer(respon); r = t.getInfoTenant(fedsdnURL, rid.longValue()); respon = r.readEntity(String.class); } else { throw new Exception("Something in the site insertion process isn't working fine."); } JSONObject resp = new JSONObject(respon); JSONObject entry = new JSONObject(); String fedsdntenid = ((Integer) resp.remove("id")).toString(); result = fedsdntenid; entry.put("tenantID", fedsdntenid); entry.put("tenantEntry", resp); m.insertTenantTables(tenant, entry.toString(0)); } catch (WSException ex) { LOGGER.error("Exception is occurred in checkTenantFEDSDN for tenant: " + tenant + "\n" + ex); ok = false; } ok = true; if (ok) { break; } else if (k == 3) { LOGGER.error("Something going wrong! It's Impossible add site on FEDSDN"); throw new Exception("Something in the site insertion process isn't working fine."); //03/07/2017: inserito per bloccare il flusso nel caso in cui qualche sito non venga inserito !!! } } return result; }
From source file:org.openlaszlo.compiler.NodeModel.java
void addProperty(String name, Object value, String allocation, Element source) { if (frozen) { throw new CompilerImplementationError("Attempting to addProperty when NodeModel frozen"); }/*from w w w . j a va 2 s.c om*/ LinkedHashMap attrs; if (ALLOCATION_INSTANCE.equals(allocation)) { attrs = this.attrs; } else if (ALLOCATION_CLASS.equals(allocation)) { attrs = this.classAttrs; } else { throw new CompilationError("Unknown allocation: " + allocation, source); } // TODO: [2008-05-05 ptw] Make warning say whether it is a // class or instance property that is conflicting if (attrs.containsKey(name)) { env.warn( /* (non-Javadoc) * @i18n.test * @org-mes="an attribute or method named '" + p[0] + "' already is defined on " + p[1] */ org.openlaszlo.i18n.LaszloMessages.getMessage(NodeModel.class.getName(), "051018-682", new Object[] { name, getMessageName() }), source); } if (value instanceof CompiledAttribute) { // Special handling for attribute with binders CompiledAttribute cattr = (CompiledAttribute) value; // The methods of a datapath constraint are moved to the // replicator, so must be compiled as closures boolean chm = "datapath".equals(name) ? false : canHaveMethods; if (cattr.bindername != null) { attrs.put(cattr.bindername, cattr.getBinderMethod(chm, debug)); } if (cattr.dependenciesname != null) { attrs.put(cattr.dependenciesname, cattr.getDependenciesMethod(chm, debug)); } attrs.put(name, cattr.getInitialValue()); } else { attrs.put(name, value); } }
From source file:org.openlaszlo.compiler.NodeModel.java
/** Returns a NodeModel that represents an Element * * @param elt an element/*from w ww . j a v a 2 s. c o m*/ * @param schema a schema, used to encode attribute values * @param includeChildren whether or not to include children * @param env the CompilationEnvironment */ private static NodeModel elementAsModelInternal(Element elt, ViewSchema schema, boolean includeChildren, CompilationEnvironment env) { NodeModel model = ((ElementWithLocationInfo) elt).model; if (model != null) { return model; } ElementCompiler compiler = Compiler.getElementCompiler(elt, env); compiler.preprocess(elt, env); model = new NodeModel(elt, schema, env); LinkedHashMap attrs = model.attrs; Map delegates = model.delegates; model.addAttributes(env); // This emits a local dataset node, so only process // <dataset> tags that are not top level datasets. if (elt.getName().equals("dataset")) { boolean contentIsLiteralXMLData = true; String datafromchild = elt.getAttributeValue("datafromchild"); String src = elt.getAttributeValue("src"); String type = elt.getAttributeValue("type"); if ((type != null && ("soap".equals(type) || "http".equals(type))) || (src != null && XMLUtils.isURL(src)) || "true".equals(datafromchild)) { contentIsLiteralXMLData = false; } if (contentIsLiteralXMLData) { // Default to legacy behavior, treat all children as XML literal data. model.addProperty("initialdata", getDatasetContent(elt, env), ALLOCATION_INSTANCE, elt); includeChildren = false; } } if (includeChildren) { model.addChildren(env); // If any children are subclasses of <state>, recursively // hoist named children up in order to declare them so // they can be referenced as vars without a 'this.---" prefix. if (!isState(model, schema)) { model.addStateChildren(env); } model.addText(); if (!attrs.containsKey("clickable") && computeDefaultClickable(schema, attrs, delegates)) { model.addProperty("clickable", "true", ALLOCATION_INSTANCE, elt); } } // Check that all attributes required by the class or it's superclasses are present checkRequiredAttributes(elt, model, schema); ((ElementWithLocationInfo) elt).model = model; return model; }
From source file:com.ruesga.rview.fragments.ChangeDetailsFragment.java
@SuppressWarnings("Convert2streamapi") private void updateMessageComments(DataResponse response, Map<String, List<CommentInfo>> comments) { final Map<String, LinkedHashMap<String, List<CommentInfo>>> mwc = response.mMessagesWithComments; // Match comments with messages for (ChangeMessageInfo message : response.mChange.messages) { if (message.message != null && COMMENTS_PATTERN.matcher(message.message).find()) { for (String file : comments.keySet()) { List<CommentInfo> items = comments.get(file); if (items != null) { for (CommentInfo comment : items) { comment.path = file; if (comment.updated.compareTo(message.date) == 0 && comment.author.accountId == message.author.accountId) { if (!mwc.containsKey(message.id)) { mwc.put(message.id, new LinkedHashMap<>()); }/*from w w w . java 2 s . co m*/ final LinkedHashMap<String, List<CommentInfo>> filesAndComments = mwc .get(message.id); if (!filesAndComments.containsKey(file)) { filesAndComments.put(file, new ArrayList<>()); } List<CommentInfo> list = filesAndComments.get(file); comment.patchSet = message.revisionNumber; list.add(comment); } } } } } } }
From source file:com.novartis.opensource.yada.plugin.Gatekeeper.java
/** * Retrieves and processes the security query, and validates the results per * the security specification// ww w. java 2 s . com * * @param spec * the security specification for the requested query * @throws YADASecurityException * when there is an issue retrieving or processing the security * query */ @Override public void applyExecutionPolicy() throws YADASecurityException { //TODO the security query executes for every iteration of the qname // in the current request. a flag needs to be set somewhere to indicate // clearance has already been granted. This can't be in YADAQuery because of caching. //TODO needs to support app targets as well as qname targets //TODO tests for auth failure, i.e., unauthorized //TODO tests for ignoring attempted plugin overrides //TODO make it impossible to execute a protector query as a primary query without a server-side flag set, or // perhaps some authorization (i.e., for testing, maybe with a content policy) // This will close an attack vector. //TODO support dependency injection for other methods in addition to token for execution policy List<SecurityPolicyRecord> spec = getSecurityPolicyRecords(EXECUTION_POLICY_CODE); List<SecurityPolicyRecord> prunedSpec = new ArrayList<>(); // process security spec // query can be standard or json // if json, need name of column to map to token // if standard, need list of relevant indices String policyColumns = getArgumentValue(EXECUTION_POLICY_COLUMNS); String policyIndices = getArgumentValue(EXECUTION_POLICY_INDICES); policyIndices = policyIndices == null ? getArgumentValue(EXECUTION_POLICY_INDEXES) : policyIndices; String polColParams_rx = "^((" + RX_IDX_INJECTION + "|[\\d]+)\\s?)+$"; String polColJSONParams_rx = "^((" + RX_COL_INJECTION + "|[A-Za-z0-9_]+)\\s?)+$"; String result = ""; int index = -1; String injectedIndex = ""; boolean policyHasParams = false; boolean policyHasJSONParams = false; boolean reqHasParams = getYADARequest().getParams() == null || getYADARequest().getParams().length == 0 ? false : true; boolean reqHasJSONParams = YADAUtils.hasJSONParams(getYADARequest()); for (SecurityPolicyRecord secRec : spec) { // Are params required for security query? if (policyIndices != null && policyIndices.matches(polColParams_rx)) { policyHasParams = true; } if (policyColumns != null && policyColumns.matches(polColJSONParams_rx)) { policyHasJSONParams = true; } // request and policy must have syntax compatibility, i.e., matching param syntax, or no params if ((policyHasParams && !reqHasJSONParams) || (policyHasJSONParams && !reqHasParams) || (!policyHasParams && reqHasJSONParams) || (!policyHasJSONParams && reqHasParams) || !(policyHasParams || reqHasParams || policyHasJSONParams || reqHasJSONParams)) { // confirm sec spec is config properly if (hasValidPolicy(secRec.getType())) // whitelist or blacklist { // confirm sec spec is mapped to requested query try { new Finder().getQuery(secRec.getA11nQname()); } catch (YADAFinderException e) { String msg = "Unauthorized. Authorization qname not found."; throw new YADASecurityException(msg); } catch (YADAConnectionException | YADAQueryConfigurationException e) { String msg = "Unauthorized. Unable to check for security query. This could be a temporary issue."; throw new YADASecurityException(msg, e); } // security query exists } else { String msg = "Unauthorized, due to policy misconfiguration. Must be \"blacklist\" or \"whitelist.\""; throw new YADASecurityException(msg); } prunedSpec.add(secRec); } } // kill the query if there aren't any compatible specs if (prunedSpec.size() == 0) { String msg = "Unauthorized. Request parameter syntax is incompatible with policy."; throw new YADASecurityException(msg); } // process the relevant specs for (SecurityPolicyRecord secRec : prunedSpec) // policy code (E,C), policy type (white,black), target (qname), A11nqname { String a11nQname = secRec.getA11nQname(); String policyType = secRec.getType(); // policy has params and req has compatible params if (policyHasParams && !reqHasJSONParams) { @SuppressWarnings("null") String[] polCols = policyIndices.split("\\s"); StringBuilder polVals = new StringBuilder(); if (reqHasParams) { for (int i = 0; i < polCols.length; i++) { // handle as params // 1. get params from query List<String> vals = getYADAQuery().getVals(0); try { index = Integer.parseInt(polCols[i]); } catch (NumberFormatException e) { injectedIndex = polCols[i]; } // 2. pass user column if (polVals.length() > 0) polVals.append(","); if (injectedIndex.equals("") && index > -1) { if (index >= vals.size()) polVals.append((String) getToken()); else polVals.append(vals.get(index)); } else { Pattern rxInjection = Pattern.compile(RX_IDX_INJECTION); Matcher m1 = rxInjection.matcher(injectedIndex); if (m1.matches() && m1.groupCount() == 3) // injection { // parse regex: this is where the method value is injected String colIdx = m1.group(2); String colval = m1.group(3); // find and execute injected method String method = colval.substring(0, colval.indexOf('(')); String arg = colval.substring(colval.indexOf('(') + 1, colval.indexOf(')')); Object val = null; try { if (arg.equals("")) val = getClass().getMethod(method).invoke(this, new Object[] {}); else val = getClass().getMethod(method, new Class[] { java.lang.String.class }) .invoke(this, new Object[] { arg }); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg, e); } // add/replace item in dataRow polVals.append(val); } } index = -1; injectedIndex = ""; } // 3. execute the security query result = YADAUtils.executeYADAGet(new String[] { a11nQname }, new String[] { polVals.toString() }); } else { for (int i = 0; i < polCols.length; i++) { polVals.append((String) getToken()); } result = YADAUtils.executeYADAGet(new String[] { a11nQname }, new String[] { polVals.toString() }); } } // policy has JSONParams and req has compatible JSONParams else if (policyHasJSONParams && reqHasJSONParams) { LOG.warn("Could not parse column value into integer -- it's probably a String"); // handle as JSONParams // 1. get JSONParams from query (params) LinkedHashMap<String, String[]> dataRow = getYADAQuery().getDataRow(0); // 2. add user column if necessary @SuppressWarnings("null") String[] polCols = policyColumns.split("\\s"); for (String colspec : polCols) { // dataRow can look like, e.g.: {COL1:val1,COL2:val2} // polCols can look like, e.g.: COL2 APP:getValue(TARGET) Pattern rxInjection = Pattern.compile(RX_COL_INJECTION); Matcher m1 = rxInjection.matcher(colspec); if (m1.matches() && m1.groupCount() == 3) // injection { // parse regex: this is where the method value is injected String colname = m1.group(2); String colval = m1.group(3); // find and execute injected method String method = colval.substring(0, colval.indexOf('(')); String arg = colval.substring(colval.indexOf('(') + 1, colval.indexOf(')')); Object val = null; try { if (arg.equals("")) val = getClass().getMethod(method).invoke(this, new Object[] {}); else val = getClass().getMethod(method, new Class[] { java.lang.String.class }) .invoke(this, new Object[] { arg }); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg, e); } // add/replace item in dataRow dataRow.put(colname, new String[] { (String) val }); } else { if (!dataRow.containsKey(colspec)) // no injection AND no parameter { String msg = "Unathorized. Injected method invocation failed."; throw new YADASecurityException(msg); } } } // 3. execute the security query JSONParamsEntry jpe = new JSONParamsEntry(); // dataRow now contains injected values () or passed values // if values were injected, they've overwritten the passed in version jpe.addData(dataRow); JSONParams jp = new JSONParams(a11nQname, jpe); result = YADAUtils.executeYADAGetWithJSONParamsNoStats(jp); } else { // no parameters to pass to execution.policy query result = YADAUtils.executeYADAGet(new String[] { a11nQname }, new String[0]); } // parse result int count = new JSONObject(result).getJSONObject("RESULTSET").getInt("records"); // Reject if necessary if ((isWhitelist(policyType) && count == 0) || (isBlacklist(policyType) && count > 0)) throw new YADASecurityException("Unauthorized."); } this.clearSecurityPolicy(); }