List of usage examples for java.util HashMap remove
public V remove(Object key)
From source file:com.vmware.identity.idm.client.TenantManagementTest.java
@Test public void testRSAAgentConfigs() throws Exception, IDMException { CasIdmClient idmClient = getIdmClient(); String tenantName = "TestTenant1"; Tenant tenantToCreate = new Tenant(tenantName); try {/*from w ww . ja va 2 s . c o m*/ IdmClientTestUtil.ensureTenantDoesNotExist(idmClient, tenantName); idmClient.addTenant(tenantToCreate, DEFAULT_TENANT_ADMIN_NAME, DEFAULT_TENANT_ADMIN_PASSWORD.toCharArray()); } catch (Exception ex) { Assert.fail("should not reach here"); } try { Tenant tenant = idmClient.getTenant(tenantName); Assert.assertNotNull(tenant); Assert.assertEquals(tenantName, tenant.getName()); } catch (Exception ex) { Assert.fail("should not reach here"); } try { // Check default config AuthnPolicy authnPolicy = idmClient.getAuthnPolicy(tenantName); RSAAgentConfig config = authnPolicy.get_rsaAgentConfig(); Assert.assertNull("Default rsaConfigs in a new tenant should be null.", config); String siteID = idmClient.getClusterId(); String siteID2 = "siteID2"; // Test setting config with one site RSAAMInstanceInfo instInfo1 = new RSAAMInstanceInfo(siteID, "TestAgent", "sdConfBytes".getBytes(), "sdoptsTest".getBytes()); RSAAgentConfig configIn = new RSAAgentConfig(instInfo1); idmClient.setRSAConfig(tenantName, configIn); RSAAgentConfig configOut = idmClient.getRSAConfig(tenantName); AssertRSAAgentConfig(configIn, configOut); //test RSAAMInstanceInfo ctr try { RSAAMInstanceInfo instInfo = new RSAAMInstanceInfo(siteID, "TestAgent", null, null); instInfo = new RSAAMInstanceInfo(null, "", "sdConfBytes".getBytes(), null); instInfo = new RSAAMInstanceInfo("", "", "sdConfBytes".getBytes(), null); Assert.fail("should not reach here"); } catch (IllegalArgumentException | NullPointerException e) { } // Test setting full RSA configuration and add a second site configIn.set_loginGuide("Test login guidence string"); configIn.set_connectionTimeOut(10); configIn.set_readTimeOut(20); configIn.set_logFileSize(2); RSAAMInstanceInfo instInfo2 = new RSAAMInstanceInfo(siteID2, "TestAgent2", "sdConfBytes".getBytes(), null); configIn.add_instInfo(instInfo2); configIn.set_logLevel(RSAAgentConfig.RSALogLevelType.valueOf("DEBUG")); configIn.set_maxLogFileCount(20); configIn.set_rsaEncAlgList(new HashSet<String>(Arrays.asList("alg1", "alg2"))); HashMap<String, String> idsUserIDAttributeMap = new HashMap<String, String>(); idsUserIDAttributeMap.put("adTestIDS", "upn"); idsUserIDAttributeMap.put("localIDS", "email"); configIn.set_idsUserIDAttributeMaps(idsUserIDAttributeMap); idmClient.setRSAConfig(tenantName, configIn); configOut = idmClient.getRSAConfig(tenantName); AssertRSAAgentConfig(configIn, configOut); //Remove second site from RSAAgentConfig HashMap<String, RSAAMInstanceInfo> instMap = configIn.get_instMap(); instMap.remove(siteID2); configIn.set_instMap(instMap); idmClient.deleteRSAInstanceInfo(tenantName, siteID2); configOut = idmClient.getRSAConfig(tenantName); AssertRSAAgentConfig(configIn, configOut); //Udate siteInfo attributes instMap = configIn.get_instMap(); instInfo1.set_agentName("TestAgentChanged"); instInfo1.set_sdoptsRec("sdoptsTestModified".getBytes()); instMap.put(siteID, instInfo1); configIn.set_instMap(instMap); idmClient.setRSAConfig(tenantName, configIn); configOut = idmClient.getRSAConfig(tenantName); AssertRSAAgentConfig(configIn, configOut); // Test updates RSAAgentConfig attributes configIn.set_loginGuide("Modified login guidence string"); configIn.set_connectionTimeOut(40); configIn.set_readTimeOut(50); configIn.set_logFileSize(70); configIn.set_logLevel(RSAAgentConfig.RSALogLevelType.valueOf("WARN")); configIn.set_maxLogFileCount(7); configIn.set_rsaEncAlgList(new HashSet<String>(Arrays.asList("ALG1", "ALG2"))); idsUserIDAttributeMap = new HashMap<String, String>(); idsUserIDAttributeMap.put("adTestIDS", "email"); idsUserIDAttributeMap.put("localIDS", "upn"); configIn.set_idsUserIDAttributeMaps(idsUserIDAttributeMap); idmClient.setRSAConfig(tenantName, configIn); configOut = idmClient.getRSAConfig(tenantName); AssertRSAAgentConfig(configIn, configOut); } catch (Exception e) { Assert.fail("should not reach here"); } finally { // Cleanup IdmClientTestUtil.ensureTenantDoesNotExist(idmClient, tenantName); } }
From source file:com.caspida.plugins.graph.EntityGraphNeo4jPlugin.java
@Name(EntityGraphConstants.API_GET_BIPARTITE_GRAPH) @Description("Get bipartite information for various entities") @PluginTarget(GraphDatabaseService.class) public String getBipartite(@Source GraphDatabaseService graphDb, @Description("Input parameters as a JSON string ") @Parameter(name = "entityTypes") List<String> entityList, @Parameter(name = "sourceEntityId", optional = true) String entityId, @Parameter(name = "sourceEntityType") String entityType, @Parameter(name = "minScore", optional = true) Integer minScoreIn) { JSONArray nodes = new JSONArray(); JSONArray edges = new JSONArray(); JSONObject response = new JSONObject(); JSONParser parser = new JSONParser(); Integer minScore = (minScoreIn != null) ? minScoreIn.intValue() : 0; Integer uniqueLinkId = 1;/*from ww w . j ava2s . c o m*/ /* keep track if the caller requested to see anomalies, or not * if it did not, we have two options on what we will show, facts or only anomaly related links * if the caller asked for anomalies, then we will only show anomaly related links */ Boolean askedForAnomalies = ((entityType == EntityGraphConstants.UBER_GRAPH_THREAT_LABEL) || entityList.contains(EntityGraphConstants.UBER_GRAPH_THREAT_LABEL)) ? true : false; logger.error("Got input params entity list {} and entityId {} asked for anomalies {}", entityList, entityId, askedForAnomalies); try (Transaction txn = graphDb.beginTx()) { /* build a list of nodes that we will later expand * if entityId is present then it will be a single node * else we will need to pick a set of nodes from the entity types * specified. Should pick some nodes with low selectivity * (i.e. anomalies) if possible */ List<Node> nodeList = new ArrayList<>(); if (entityId != null && entityId.length() > 2) { // XXX better test for "" string logger.error("Looking up single node with id {}", entityId); Index<Node> nodeIndex = graphDb.index().forNodes(EntityGraphConstants.EVENT_CLASS); Node node = nodeIndex.get(EntityGraphConstants.NODE_ID, Long.parseLong(entityId)).getSingle(); if (node == null) { logger.error("Can not locate entity with provided id {}", entityId); return response.toJSONString(); } else { logger.debug("Found entity node with provided id {}", entityId); nodeList.add(node); } } else { /* map the entityType to the right label */ Label label = DynamicLabel.label(entityType); // XXX String eventType = null; /* map the entity id to the value I need for the event type */ switch (entityType) { case "caspida.device": eventType = "Device"; break; case "caspida.anomaly": eventType = "Threat"; break; case "caspida.user": eventType = "User"; break; case "caspida.url": eventType = "Url"; break; default: break; } logger.error("Got event type {}", eventType); /** XXX use the exact label to reduce the scope of the walk */ ResourceIterable<Node> iterableNodes = null; if (eventType != null) { iterableNodes = graphDb.findNodesByLabelAndProperty(eventClassLabel, EntityGraphConstants.EVENT_TYPE, eventType); } if (iterableNodes != null) { ResourceIterator<Node> it = iterableNodes.iterator(); while (it.hasNext()) { Node node = it.next(); logger.error("Adding node {} to the list", node.getId()); nodeList.add(node); } } else { logger.error("Could not find any nodes for request with entity list {}", entityList); return response.toJSONString(); } } /* at this point I have a list of nodes that I need to expand * need to make sure I do not repeat the same nodes, so use a * hash map to keep track of the ones I have visited so far */ HashMap<Node, Integer> nodeHash = new HashMap<>(); for (Node node : nodeList) { String objectType = null; String otherObjectType = null; Iterable<Relationship> rels = node.getRelationships(); /* if the current node is anomaly, skip depending on the score */ Integer score = getNodeScore(node); if (score != -1 && score < minScore) { logger.error("Dropping node due to low score"); continue; } /* now iterate all the neighbhors and put the ones to be expanded in a list * need to filter anomalies for the minScore * if we hit anomaly nodes and askedForAnomalies is false we need to "go over" them * to get to the entity on the other side * */ nodeHash.put(node, 1); List<Node> otherNodeList = new ArrayList<Node>(); for (Relationship rel : rels) { Node otherNode = (rel.getStartNode().getId() == node.getId()) ? rel.getEndNode() : rel.getStartNode(); /* if the other node is anomaly, skip depending on the score */ score = getNodeScore(otherNode); if (score != -1 && score < minScore) { logger.error("Dropping other node due to low score"); continue; } if (score != -1) { /* it is an anomaly */ if (askedForAnomalies == false) { /* need to expand the anomalies and get to its children */ Iterable<Relationship> anomalyRels = otherNode.getRelationships(); for (Relationship anomalyRel : anomalyRels) { Node otherAnomalyNode = (anomalyRel.getStartNode().getId() == otherNode.getId()) ? anomalyRel.getEndNode() : anomalyRel.getStartNode(); if (otherAnomalyNode != node && shouldInclude(otherAnomalyNode, entityList) == true && !nodeHash.containsKey(otherAnomalyNode)) { otherNodeList.add(otherAnomalyNode); nodeHash.put(otherAnomalyNode, 1); } } } else { if (shouldInclude(otherNode, entityList) == true && !nodeHash.containsKey(otherNode)) { otherNodeList.add(otherNode); nodeHash.put(otherNode, 1); } } } else { if (shouldInclude(otherNode, entityList) == true && !nodeHash.containsKey(otherNode)) { otherNodeList.add(otherNode); nodeHash.put(otherNode, 1); } } } /* I have a list of other nodes to expand now, if the list is not empty * output the local node first and then the links and other nodes * if i am going to skip remove the node from the nodeHash */ if (otherNodeList.size() == 0) { nodeHash.remove(node); continue; } logger.error("Other node list {}", otherNodeList); /* add information about the current node */ nodeHash.put(node, 1); JSONObject nodeInfo = new JSONObject(); String[] myTokens = getTokensFromFormattedString( node.getProperty(EntityGraphConstants.NODE_NAME).toString()); nodeInfo.put("nodeId", node.getId()); nodeInfo.put("entityId", node.getProperty(EntityGraphConstants.NODE_ID)); String typeStr = null; objectType = getNodeType(node); switch (objectType) { case "User": typeStr = EntityGraphConstants.UBER_GRAPH_USER_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); break; case "Device": Object deviceTypeObj = node.getProperty(EntityGraphConstants.DEVICE_TYPE); String deviceType = (deviceTypeObj != null) ? deviceTypeObj.toString() : "Desktop"; typeStr = EntityGraphConstants.UBER_GRAPH_DEVICE_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); nodeInfo.put("entitySubtype", deviceType); break; case "Application": typeStr = EntityGraphConstants.UBER_GRAPH_APPLICATION_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); break; case "Threat": typeStr = EntityGraphConstants.UBER_GRAPH_THREAT_LABEL; /* anomalies do not have a meaningful label */ nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, "Anomaly"); nodeInfo.put("entitySubtype", node.getProperty(EntityGraphConstants.ANOMALY_TYPE)); break; case "Url": typeStr = EntityGraphConstants.UBER_GRAPH_URL_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); default: break; } nodeInfo.put("entityType", typeStr); nodes.add(nodeInfo); for (Node otherNode : otherNodeList) { /* add information about the relationship */ JSONObject edgeInfo = new JSONObject(); edgeInfo.put(EntityGraphConstants.RES_SOURCE_ID_TAG, node.getId()); edgeInfo.put(EntityGraphConstants.RES_TARGET_ID_TAG, otherNode.getId()); edgeInfo.put(EntityGraphConstants.RES_LINK_ID_TAG, uniqueLinkId); uniqueLinkId += 1; edges.add(edgeInfo); /* add information about the other node */ nodeInfo = new JSONObject(); myTokens = getTokensFromFormattedString( otherNode.getProperty(EntityGraphConstants.NODE_NAME).toString()); nodeInfo.put("nodeId", otherNode.getId()); nodeInfo.put("entityId", otherNode.getProperty(EntityGraphConstants.NODE_ID)); typeStr = null; otherObjectType = getNodeType(otherNode); switch (otherObjectType) { case "User": typeStr = EntityGraphConstants.UBER_GRAPH_USER_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); break; case "Device": Object deviceTypeObj = otherNode.getProperty(EntityGraphConstants.DEVICE_TYPE); String deviceType = (deviceTypeObj != null) ? deviceTypeObj.toString() : "Desktop"; typeStr = EntityGraphConstants.UBER_GRAPH_DEVICE_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); nodeInfo.put("entitySubtype", deviceType); break; case "Application": typeStr = EntityGraphConstants.UBER_GRAPH_APPLICATION_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); break; case "Threat": typeStr = EntityGraphConstants.UBER_GRAPH_THREAT_LABEL; /* anomalies do not have a meaningful label */ nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, "Anomaly"); nodeInfo.put("entitySubtype", node.getProperty(EntityGraphConstants.ANOMALY_TYPE)); break; case "Url": typeStr = EntityGraphConstants.UBER_GRAPH_URL_LABEL; nodeInfo.put(EntityGraphConstants.RES_LABEL_TAG, myTokens[myTokens.length - 1]); default: break; } nodeInfo.put("entityType", typeStr); nodes.add(nodeInfo); } } } response.put("nodes", nodes); response.put("links", edges); if (logger.isDebugEnabled()) { logger.debug("Response : {}", response.toJSONString()); } return response.toString(); }
From source file:nl.afas.cordova.plugin.secureLocalStorage.SecureLocalStorage.java
private void handleAction(ActionId actionId, JSONArray args, CallbackContext callbackContext) throws SecureLocalStorageException, JSONException { if (Build.VERSION.SDK_INT < 18) { throw new SecureLocalStorageException("Invalid API Level (must be >= 18"); }/*from w ww. j a v a2 s . c o m*/ File file = _cordova.getActivity().getBaseContext().getFileStreamPath(SECURELOCALSTORAGEFILE); HashMap<String, String> hashMap = new HashMap<String, String>(); // lock the access lock.lock(); try { KeyStore keyStore = initKeyStore(); // clear just deletes the storage file if (actionId == ActionId.ACTION_CLEAR) { clear(file, keyStore); try { keyStore = initKeyStore(); generateKey(keyStore); } catch (SecureLocalStorageException ex2) { } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); pluginResult.setKeepCallback(false); callbackContext.sendPluginResult(pluginResult); } else { // clear localStorage if invalid if (actionId == ActionId.ACTION_CLEARIFINVALID) { try { checkValidity(); if (file.exists()) { // save hashmap for re-initializing certificate hashMap = readAndDecryptStorage(keyStore); // only clear file if untouched for 10 days if ((new Date().getTime() - file.lastModified()) > (10 * 24 * 60 * 60 * 1000)) { clear(file, keyStore); keyStore = initKeyStore(); generateKey(keyStore); writeAndEncryptStorage(keyStore, hashMap); } } } catch (SecureLocalStorageException ex) { clear(file, keyStore); try { keyStore = initKeyStore(); generateKey(keyStore); } catch (SecureLocalStorageException ex2) { } } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); pluginResult.setKeepCallback(false); callbackContext.sendPluginResult(pluginResult); } else { // initialize for reading later if (!file.exists()) { // generate key and store in keyStore generateKey(keyStore); writeAndEncryptStorage(keyStore, hashMap); } // read current storage hashmap hashMap = readAndDecryptStorage(keyStore); String key = args.getString(0); if (key == null || key.length() == 0) { throw new SecureLocalStorageException("Key is empty or null"); } // handle the methods. Note: getItem uses callback if (actionId == ActionId.ACTION_GETITEM) { if (hashMap.containsKey(key)) { if (callbackContext != null) { String value = hashMap.get(key); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, value); pluginResult.setKeepCallback(false); callbackContext.sendPluginResult(pluginResult); } } else { // return null when not found PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, (String) null); pluginResult.setKeepCallback(false); callbackContext.sendPluginResult(pluginResult); } } else if (actionId == ActionId.ACTION_SETITEM) { String value = args.getString(1); if (value == null) { throw new SecureLocalStorageException("Value is null"); } hashMap.put(key, value); // store back writeAndEncryptStorage(keyStore, hashMap); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); pluginResult.setKeepCallback(false); callbackContext.sendPluginResult(pluginResult); } else if (actionId == ActionId.ACTION_REMOVEITEM) { hashMap.remove(key); // store back writeAndEncryptStorage(keyStore, hashMap); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); pluginResult.setKeepCallback(false); callbackContext.sendPluginResult(pluginResult); } } } } finally { lock.unlock(); } }
From source file:io.n7.calendar.caldav.CalDAVService.java
private void doSyncCalendar(Account acc, long calid) throws Exception { // Get a list of local events String[] evproj1 = new String[] { Events._ID, Events._SYNC_ID, Events.DELETED, Events._SYNC_DIRTY }; HashMap<String, Long> localevs = new HashMap<String, Long>(); HashMap<String, Long> removedevs = new HashMap<String, Long>(); HashMap<String, Long> dirtyevs = new HashMap<String, Long>(); HashMap<String, Long> newdevevs = new HashMap<String, Long>(); Cursor c = mCR.query(EVENTS_URI, evproj1, Events.CALENDAR_ID + "=" + calid, null, null); long tid;/* w ww.ja va2 s. c om*/ String tuid = null; if (c.moveToFirst()) { do { tid = c.getLong(0); tuid = c.getString(1); if (c.getInt(2) != 0) removedevs.put(tuid, tid); else if (tuid == null) { // generate a UUID tuid = UUID.randomUUID().toString(); newdevevs.put(tuid, tid); } else if (c.getInt(3) != 0) dirtyevs.put(tuid, tid); else localevs.put(tuid, tid); } while (c.moveToNext()); c.close(); } CalDAV4jIf caldavif = new CalDAV4jIf(getAssets()); caldavif.setCredentials(new CaldavCredential(acc.getProtocol(), acc.getHost(), acc.getPort(), acc.getHome(), acc.getCollection(), acc.getUser(), acc.getPassword())); //add new device events to server for (String uid : newdevevs.keySet()) addEventOnServer(uid, newdevevs.get(uid), caldavif); //delete the locally removed events on server for (String uid : removedevs.keySet()) { removeEventOnServer(uid, caldavif); // clean up provider DB? removeLocalEvent(removedevs.get(uid)); } //update the dirty events on server for (String uid : dirtyevs.keySet()) updateEventOnServer(uid, dirtyevs.get(uid), caldavif); // Get events from server VEvent[] evs = caldavif.getEvents(); // add/update to provider DB String[] evproj = new String[] { Events._ID }; ContentValues cv = new ContentValues(); String temp, durstr = null; for (VEvent v : evs) { cv.clear(); durstr = null; String uid = ICalendarUtils.getUIDValue(v); // XXX Some times the server seem to return the deleted event if we do get events immediately // after removing.. // So ignore the possibility of deleted event on server was modified on server, for now. if (removedevs.containsKey(uid)) continue; //TODO: put etag here cv.put(Events._SYNC_ID, uid); //UUID cv.put(Events._SYNC_DATA, uid); cv.put(Events.CALENDAR_ID, calid); cv.put(Events.TITLE, ICalendarUtils.getSummaryValue(v)); cv.put(Events.DESCRIPTION, ICalendarUtils.getPropertyValue(v, Property.DESCRIPTION)); cv.put(Events.EVENT_LOCATION, ICalendarUtils.getPropertyValue(v, Property.LOCATION)); String tzid = ICalendarUtils.getPropertyValue(v, Property.TZID); if (tzid == null) tzid = Time.getCurrentTimezone(); cv.put(Events.EVENT_TIMEZONE, tzid); long dtstart = parseDateTimeToMillis(ICalendarUtils.getPropertyValue(v, Property.DTSTART), tzid); cv.put(Events.DTSTART, dtstart); temp = ICalendarUtils.getPropertyValue(v, Property.DTEND); if (temp != null) cv.put(Events.DTEND, parseDateTimeToMillis(temp, tzid)); else { temp = ICalendarUtils.getPropertyValue(v, Property.DURATION); durstr = temp; if (temp != null) { cv.put(Events.DURATION, durstr); // We still need to calculate and enter DTEND. Otherwise, the Android is not displaying // the event properly Duration dur = new Duration(); dur.parse(temp); cv.put(Events.DTEND, dtstart + dur.getMillis()); } } //TODO add more fields //if the event is already present, update it otherwise insert it // TODO find if something changed on server using etag Uri euri; if (localevs.containsKey(uid) || dirtyevs.containsKey(uid)) { if (localevs.containsKey(uid)) { tid = localevs.get(uid); localevs.remove(uid); } else { tid = dirtyevs.get(uid); dirtyevs.remove(uid); } mCR.update(ContentUris.withAppendedId(EVENTS_URI, tid), cv, null, null); //clear sync dirty flag cv.clear(); cv.put(Events._SYNC_DIRTY, 0); mCR.update(ContentUris.withAppendedId(EVENTS_URI, tid), cv, null, null); Log.d(TAG, "Updated " + uid); } else if (!newdevevs.containsKey(uid)) { euri = mCR.insert(EVENTS_URI, cv); Log.d(TAG, "Inserted " + uid); } } // the remaining events in local and dirty event list are no longer on the server. So remove them. for (String uid : localevs.keySet()) removeLocalEvent(localevs.get(uid)); //XXX Is this possible? /* for (String uid: dirtyevs.keySet ()) removeLocalEvent (dirtyevs[uid]); */ }
From source file:org.apache.hadoop.dfs.FSNamesystem.java
private synchronized ArrayList<DatanodeDescriptor> getDatanodeListForReport(DatanodeReportType type) { boolean listLiveNodes = type == DatanodeReportType.ALL || type == DatanodeReportType.LIVE; boolean listDeadNodes = type == DatanodeReportType.ALL || type == DatanodeReportType.DEAD; HashMap<String, String> mustList = new HashMap<String, String>(); if (listDeadNodes) { //first load all the nodes listed in include and exclude files. for (Iterator<String> it = hostsReader.getHosts().iterator(); it.hasNext();) { mustList.put(it.next(), ""); }//from w w w .j a v a 2 s. com for (Iterator<String> it = hostsReader.getExcludedHosts().iterator(); it.hasNext();) { mustList.put(it.next(), ""); } } ArrayList<DatanodeDescriptor> nodes = null; synchronized (datanodeMap) { nodes = new ArrayList<DatanodeDescriptor>(datanodeMap.size() + mustList.size()); for (Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator(); it.hasNext();) { DatanodeDescriptor dn = it.next(); boolean isDead = isDatanodeDead(dn); if ((isDead && listDeadNodes) || (!isDead && listLiveNodes)) { nodes.add(dn); } //Remove any form of the this datanode in include/exclude lists. mustList.remove(dn.getName()); mustList.remove(dn.getHost()); mustList.remove(dn.getHostName()); } } if (listDeadNodes) { for (Iterator<String> it = mustList.keySet().iterator(); it.hasNext();) { DatanodeDescriptor dn = new DatanodeDescriptor(new DatanodeID(it.next())); dn.setLastUpdate(0); nodes.add(dn); } } return nodes; }
From source file:com.dtolabs.rundeck.core.authorization.providers.TestYamlPolicy.java
public void testTypeRuleContextMatcherMatchRule() { {/*from w w w. ja v a 2 s.c om*/ //match any resource with name=~ blah, and allow all actions final Object load = yaml.load("match: \n" + " name: '.*blah.*'\n" + "allow: '*'"); assertTrue(load instanceof Map); final Map ruleSection = (Map) load; final YamlPolicy.TypeRuleContextMatcher typeRuleContext = new YamlPolicy.TypeRuleContextMatcher( ruleSection, 1); final HashMap<String, String> resmap = new HashMap<String, String>(); //false result for no match assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "something"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "blah"); assertTrue(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertTrue(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); } { //multiple regexes must all match final Object load = yaml.load("match: \n" + " name: ['.*blah.*','.*nada.*']\n" + "allow: '*'"); assertTrue(load instanceof Map); final Map ruleSection = (Map) load; final YamlPolicy.TypeRuleContextMatcher typeRuleContext = new YamlPolicy.TypeRuleContextMatcher( ruleSection, 1); final HashMap<String, String> resmap = new HashMap<String, String>(); //false result for no match assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "something"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "blah"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "nada"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "ablahz nada"); assertTrue(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); } { //multiple attributes must all match final Object load = yaml .load("match: \n" + " name: '.*blah.*'\n" + " something: '.*else.*'\n" + "allow: '*'"); assertTrue(load instanceof Map); final Map ruleSection = (Map) load; final YamlPolicy.TypeRuleContextMatcher typeRuleContext = new YamlPolicy.TypeRuleContextMatcher( ruleSection, 1); final HashMap<String, String> resmap = new HashMap<String, String>(); //false result for no match assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "something"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "blah"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("something", "els"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("something", "else"); assertTrue(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("something", "bloo else zaaf"); assertTrue(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "naba"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.remove("name"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); } { //invalid regex match becomes eequality match final Object load = yaml.load("match: \n" + " name: 'abc[def'\n" + "allow: '*'"); assertTrue(load instanceof Map); final Map ruleSection = (Map) load; final YamlPolicy.TypeRuleContextMatcher typeRuleContext = new YamlPolicy.TypeRuleContextMatcher( ruleSection, 1); final HashMap<String, String> resmap = new HashMap<String, String>(); //false result for no match assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "something"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "blah"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "abcdef"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.put("name", "abc[def"); assertTrue(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); resmap.remove("name"); assertFalse(typeRuleContext.ruleMatchesMatchSection(resmap, ruleSection)); } }
From source file:org.apache.jxtadoop.hdfs.server.namenode.FSNamesystem.java
private synchronized ArrayList<DatanodeDescriptor> getDatanodeListForReport(DatanodeReportType type) { boolean listLiveNodes = type == DatanodeReportType.ALL || type == DatanodeReportType.LIVE; boolean listDeadNodes = type == DatanodeReportType.ALL || type == DatanodeReportType.DEAD; HashMap<String, String> mustList = new HashMap<String, String>(); if (listDeadNodes) { //first load all the nodes listed in include and exclude files. for (Iterator<String> it = hostsReader.getHosts().iterator(); it.hasNext();) { mustList.put(it.next(), ""); }/*w w w .jav a 2 s . com*/ for (Iterator<String> it = hostsReader.getExcludedHosts().iterator(); it.hasNext();) { mustList.put(it.next(), ""); } } ArrayList<DatanodeDescriptor> nodes = null; synchronized (datanodeMap) { nodes = new ArrayList<DatanodeDescriptor>(datanodeMap.size() + mustList.size()); for (Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator(); it.hasNext();) { DatanodeDescriptor dn = it.next(); boolean isDead = isDatanodeDead(dn); if ((isDead && listDeadNodes) || (!isDead && listLiveNodes)) { nodes.add(dn); } //Remove any form of the this datanode in include/exclude lists. mustList.remove(dn.getName()); mustList.remove(dn.getPeerId()); mustList.remove(dn.getHostName()); } } if (listDeadNodes) { for (Iterator<String> it = mustList.keySet().iterator(); it.hasNext();) { DatanodeDescriptor dn = new DatanodeDescriptor(new DatanodeID(it.next())); dn.setLastUpdate(0); nodes.add(dn); } } return nodes; }
From source file:com.ibm.bi.dml.lops.compile.Dag.java
/** * Method to remove transient reads that do not have a transient write * /*from ww w .j av a2 s. c o m*/ * @param nodeV * @param inst * @throws DMLUnsupportedOperationException * @throws DMLRuntimeException */ @SuppressWarnings("unused") private void deleteUnwantedTransientReadVariables(ArrayList<N> nodeV, ArrayList<Instruction> inst) throws DMLRuntimeException, DMLUnsupportedOperationException { HashMap<String, N> labelNodeMapping = new HashMap<String, N>(); LOG.trace("In delete unwanted variables"); // first capture all transient read variables for (int i = 0; i < nodeV.size(); i++) { N node = nodeV.get(i); if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient() && ((Data) node).getOperationType() == OperationTypes.READ) { labelNodeMapping.put(node.getOutputParameters().getLabel(), node); } } // generate delete instructions for all transient read variables without // a transient write // first capture all transient write variables for (int i = 0; i < nodeV.size(); i++) { N node = nodeV.get(i); if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient() && ((Data) node).getOperationType() == OperationTypes.WRITE) { if (node.getInputs().get(0).getExecLocation() == ExecLocation.Data) { // this transient write is NOT a result of actual computation, but is simple copy. // in such a case, preserve the input variable so that appropriate copy instruction (cpvar or GMR) is generated // therefore, remove the input label from labelNodeMapping labelNodeMapping.remove(node.getInputs().get(0).getOutputParameters().getLabel()); } else { if (labelNodeMapping.containsKey(node.getOutputParameters().getLabel())) // corresponding transient read exists (i.e., the variable is updated) // in such a case, generate rmvar instruction so that OLD data gets deleted labelNodeMapping.remove(node.getOutputParameters().getLabel()); } } } // generate RM instructions Instruction rm_inst = null; for (Entry<String, N> e : labelNodeMapping.entrySet()) { String label = e.getKey(); N node = e.getValue(); if (((Data) node).getDataType() == DataType.SCALAR) { // if(DEBUG) // System.out.println("rmvar" + Lops.OPERAND_DELIMITOR + label); // inst.add(new VariableSimpleInstructions("rmvar" + // Lops.OPERAND_DELIMITOR + label)); } else { rm_inst = VariableCPInstruction.prepareRemoveInstruction(label); rm_inst.setLocation(node); if (LOG.isTraceEnabled()) LOG.trace(rm_inst.toString()); inst.add(rm_inst); } } }
From source file:com.krawler.esp.servlets.AdminServlet.java
public static String manageProjectMembers(Connection conn, HttpServletRequest request) { String res = "success"; try {/*w w w.j a v a 2 s. co m*/ String pid = request.getParameter("projectid"); String userid = AuthHandler.getUserid(request); String companyid = AuthHandler.getCompanyid(request); DbResults dbr = DbUtil.executeQuery(conn, "SELECT projectname FROM project WHERE projectid = ?", new Object[] { pid }); String projName = ""; if (dbr.next()) { projName = dbr.getString("projectname"); } com.krawler.utils.json.base.JSONObject jobj = new JSONObject(request.getParameter("data").toString()); dbr = DbUtil.executeQuery(conn, "SELECT userid FROM projectmembers WHERE projectid = ? AND inuseflag = 1 AND status >= 3", new Object[] { pid }); HashMap members = new HashMap(); int k = 0; while (dbr.next()) { members.put(dbr.getString("userid"), k); k++; } // DbUtil.executeUpdate(conn, "UPDATE projectmembers SET inuseflag = 0 WHERE projectid = ?", new Object[] {pid}); com.krawler.utils.json.base.JSONArray jarr = jobj.getJSONArray("data"); String subjectActive = "[" + projName + "] Access to the project activated."; String mailFooter = KWLErrorMsgs.mailSystemFooter; String msgActiveString = "Your access to the project : " + projName + " has been activated." + mailFooter; String chkQry = "SELECT status, inuseflag FROM projectmembers WHERE userid = ? AND projectid = ?"; String updateQry = "UPDATE projectmembers SET inuseflag = 1, status = ? WHERE projectid = ? AND userid = ?"; String insertQry = "INSERT INTO projectmembers (projectid, userid, status, inuseflag) VALUES (?,?,3,1)"; String insertRes = "INSERT INTO proj_resources (resourceid, resourcename, projid) VALUES (?,?,?)"; for (k = 0; k < jarr.length(); k++) { com.krawler.utils.json.base.JSONObject obj = jarr.getJSONObject(k); DbResults rs = DbUtil.executeQuery(conn, chkQry, new Object[] { obj.getString("userid"), pid }); if (rs.next()) { if (rs.getInt("status") == 4) { DbUtil.executeUpdate(conn, updateQry, new Object[] { 4, pid, obj.getString("userid") }); } else { DbUtil.executeUpdate(conn, updateQry, new Object[] { 3, pid, obj.getString("userid") }); if (rs.getInt("status") < 3) { // dbr = DbUtil.executeQuery(conn, "SELECT username FROM users WHERE userid = ?", new Object[]{obj.getString("userid")}); // String resName = ""; // if (dbr.next()) { String resName = getUserName(conn, obj.getString("userid"));//dbr.getString("username"); // } DbUtil.executeUpdate(conn, insertRes, new Object[] { obj.getString("userid"), resName, pid }); } } } else { DbUtil.executeUpdate(conn, insertQry, new Object[] { pid, obj.getString("userid") }); // dbr = DbUtil.executeQuery(conn, "SELECT username FROM users WHERE userid = ?", new Object[]{obj.getString("userid")}); // String resName = ""; // if (dbr.next()) { String resName = getUserName(conn, obj.getString("userid"));//dbr.getString("username"); // } DbUtil.executeUpdate(conn, insertRes, new Object[] { obj.getString("userid"), resName, pid }); Mail.insertMailMsg(conn, resName, userid, subjectActive, msgActiveString, "1", false, "1", "", "newmsg", "", 3, "", companyid); } members.remove(obj.getString("userid")); } Object[] obj = members.keySet().toArray(); for (k = 0; k < obj.length; k++) { subjectActive = "Your access to the project: " + projName + " has been deactivated."; String[] ids = { obj[k].toString() }; if (!Forum.chkResourceDependency(conn, ids, pid)) { DbUtil.executeUpdate(conn, "DELETE FROM projectmembers WHERE projectid = ? AND userid = ?", new Object[] { pid, obj[k] }); DbUtil.executeUpdate(conn, "DELETE FROM proj_resources WHERE projid = ? AND resourceid = ?", new Object[] { pid, obj[k] }); } else { DbUtil.executeUpdate(conn, "UPDATE projectmembers SET inuseflag = 0, status = 0 WHERE projectid = ? AND userid = ?", new Object[] { pid, obj[k] }); DbUtil.executeUpdate(conn, "UPDATE proj_resources SET inuseflag = 0 WHERE projid = ? AND resourceid = ?", new Object[] { pid, obj[k] }); } // dbr = DbUtil.executeQuery(conn, "SELECT username FROM users WHERE userid = ?", new Object[]{obj[k]}); // String resName = ""; // if (dbr.next()) { String resName = getUserName(conn, obj[k].toString());//dbr.getString("username"); // } msgActiveString = /*resName + */ "Your access to the project: " + projName + " has been deactivated. " + mailFooter; Mail.insertMailMsg(conn, resName, request.getParameter("lid"), subjectActive, msgActiveString, "1", false, "1", "", "newmsg", "", 3, "", companyid); } String ipAddress = AuthHandler.getIPAddress(request); int auditMode = 0; String loginid = AuthHandler.getUserid(request); String params = AuthHandler.getAuthor(conn, loginid) + " (" + AuthHandler.getUserName(request) + "), " + projName; AuditTrail.insertLog(conn, "324", loginid, pid, pid, companyid, params, ipAddress, auditMode); } catch (ServiceException ex) { Logger.getLogger(AdminServlet.class.getName()).log(Level.SEVERE, "Service Exception While Manging Members In Project", ex); res = ex.getMessage(); } catch (ParseException ex) { Logger.getLogger(AdminServlet.class.getName()).log(Level.SEVERE, "Parse Exception While Manging Members In Project", ex); res = ex.getMessage(); } catch (JSONException ex) { Logger.getLogger(AdminServlet.class.getName()).log(Level.SEVERE, "JSON Exception While Manging Members In Project", ex); res = ex.getMessage(); } catch (SessionExpiredException ex) { Logger.getLogger(AdminServlet.class.getName()).log(Level.SEVERE, "Session Expired Exception While Manging Members In Project", ex); res = ex.getMessage(); } return res; }
From source file:com.lp.server.auftrag.ejbfac.AuftragFacBean.java
public ArrayList<KundeDto> getLieferadressenEinerRechnungsadresseSortiertNachHaeufigkeit( Integer kundeIIdRechnungsadresse, TheClientDto theClientDto) { HashMap<Integer, Integer> anzahlLieferadressen = new HashMap<Integer, Integer>(); Query query = em.createNamedQuery("AuftragfindByKundeIIdRechnungsadresseMandantCNr"); query.setParameter(1, kundeIIdRechnungsadresse); query.setParameter(2, theClientDto.getMandant()); Collection<?> cl = query.getResultList(); Iterator it = cl.iterator();// w w w . java 2 s . c o m while (it.hasNext()) { Auftrag auftrag = (Auftrag) it.next(); if (anzahlLieferadressen.containsKey(auftrag.getKundeIIdLieferadresse())) { Integer iAnzahl = anzahlLieferadressen.get(auftrag.getKundeIIdLieferadresse()); iAnzahl++; anzahlLieferadressen.put(auftrag.getKundeIIdLieferadresse(), iAnzahl); } else { anzahlLieferadressen.put(auftrag.getKundeIIdLieferadresse(), new Integer(1)); } } ArrayList<KundeDto> kunden = new ArrayList<KundeDto>(); while (anzahlLieferadressen.size() > 0) { Iterator itAnzahl = anzahlLieferadressen.keySet().iterator(); Integer iGroessteAnzahl = null; Integer keyGroessteAnzahl = null; while (itAnzahl.hasNext()) { Integer key = (Integer) itAnzahl.next(); Integer value = anzahlLieferadressen.get(key); if (iGroessteAnzahl == null) { iGroessteAnzahl = value; } if (keyGroessteAnzahl == null) { keyGroessteAnzahl = key; } if (value >= iGroessteAnzahl) { iGroessteAnzahl = value; keyGroessteAnzahl = key; } } anzahlLieferadressen.remove(keyGroessteAnzahl); kunden.add(getKundeFac().kundeFindByPrimaryKey(keyGroessteAnzahl, theClientDto)); } return kunden; }