List of usage examples for java.util HashMap remove
public V remove(Object key)
From source file:com.ecofactor.qa.automation.api.HttpsReportsAPITest.java
/** * Missing start date user savings.//www . jav a 2 s .c om */ @Test(groups = { "sanity1" }) public void missingStartDateUserSavings() { HashMap<String, String> params = getNotAccumulatedParams(5); params.remove(HttpsReportsAPIConfig.START_DATE); String id = reportsAPIConfig.get(HttpsReportsAPIConfig.USER_ID); String content = HttpsUtil.get(getUserSavingsURL(id), params, 400); JSONObject object = JsonUtil.parseObject(content); Assert.assertNotNull(object); Assert.assertNotNull(object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG))); JSONArray mesgs = (JSONArray) object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG)); for (Object mesg : mesgs) { Assert.assertEquals(mesg.toString().trim(), reportsAPIConfig.get(HttpsReportsAPIConfig.SVG_START_DATE_ERROR)); } }
From source file:com.ecofactor.qa.automation.api.HttpsReportsAPITest.java
/** * Missing end date user savings./*from www . j a va2 s. com*/ */ @Test(groups = { "sanity1" }) public void missingEndDateUserSavings() { HashMap<String, String> params = getNotAccumulatedParams(5); params.remove(HttpsReportsAPIConfig.END_DATE); String id = reportsAPIConfig.get(HttpsReportsAPIConfig.USER_ID); String content = HttpsUtil.get(getUserSavingsURL(id), params, 400); JSONObject object = JsonUtil.parseObject(content); Assert.assertNotNull(object); Assert.assertNotNull(object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG))); JSONArray mesgs = (JSONArray) object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG)); for (Object mesg : mesgs) { Assert.assertEquals(mesg.toString().trim(), reportsAPIConfig.get(HttpsReportsAPIConfig.SVG_END_DATE_ERROR)); } }
From source file:org.sakaiproject.tool.assessment.ui.listener.author.SavePartAttachmentListener.java
private List prepareSectionAttachment(SectionBean sectionBean, AssessmentService assessmentService) { SectionDataIfc section = null;//w w w.ja v a2s . c om // section == null => section does not exist yet if (sectionBean.getSection() != null) { section = sectionBean.getSection().getData(); } ToolSession session = SessionManager.getCurrentToolSession(); if (session.getAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS) != null) { Set attachmentSet = new HashSet(); if (section != null) { attachmentSet = section.getSectionAttachmentSet(); } HashMap map = getResourceIdHash(attachmentSet); ArrayList newAttachmentList = new ArrayList(); String protocol = ContextUtil.getProtocol(); List refs = (List) session.getAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS); if (refs != null && refs.size() > 0) { Reference ref; for (int i = 0; i < refs.size(); i++) { ref = (Reference) refs.get(i); String resourceId = ref.getId(); if (map.get(resourceId) == null) { // new attachment, add log.debug("**** ref.Id=" + ref.getId()); log.debug("**** ref.name=" + ref.getProperties().getProperty(ref.getProperties().getNamePropDisplayName())); SectionAttachmentIfc newAttach = assessmentService.createSectionAttachment(section, ref.getId(), ref.getProperties().getProperty(ref.getProperties().getNamePropDisplayName()), protocol); newAttachmentList.add(newAttach); } else { // attachment already exist, let's add it to new list and // check it off from map newAttachmentList.add((SectionAttachmentIfc) map.get(resourceId)); map.remove(resourceId); } } } session.removeAttribute(FilePickerHelper.FILE_PICKER_ATTACHMENTS); session.removeAttribute(FilePickerHelper.FILE_PICKER_CANCEL); return newAttachmentList; } return new ArrayList(); }
From source file:com.ecofactor.qa.automation.api.HttpsReportsAPITest.java
/** * Invalid location savings./*from w w w . j a va 2 s . c o m*/ */ @Test(groups = { "sanity1" }) public void invalidLocationSavings() { HashMap<String, String> params = getNotAccumulatedParams(5); params.remove(HttpsReportsAPIConfig.ACCUMULATED); params.put(HttpsReportsAPIConfig.ACCUMULATED, ""); String id = reportsAPIConfig.get(HttpsReportsAPIConfig.LOCATION_ID); String content = HttpsUtil.get(getLocationSavingsURL(id), params, 500); JSONObject object = JsonUtil.parseObject(content); Assert.assertNotNull(object); Assert.assertNotNull(object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG))); JSONArray mesgs = (JSONArray) object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG)); for (Object mesg : mesgs) { Assert.assertTrue( mesg.toString().startsWith(reportsAPIConfig.get(HttpsReportsAPIConfig.INTERNAL_ERROR))); } }
From source file:com.ecofactor.qa.automation.api.HttpsReportsAPITest.java
/** * Invalid user savings./*from w ww . j a v a 2s.c o m*/ */ @Test(groups = { "sanity1" }) public void invalidUserSavings() { HashMap<String, String> params = getNotAccumulatedParams(5); params.remove(HttpsReportsAPIConfig.ACCUMULATED); params.put(HttpsReportsAPIConfig.ACCUMULATED, ""); String id = reportsAPIConfig.get(HttpsReportsAPIConfig.USER_ID); String content = HttpsUtil.get(getUserSavingsURL(id), params, 500); JSONObject object = JsonUtil.parseObject(content); Assert.assertNotNull(object); Assert.assertNotNull(object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG))); JSONArray mesgs = (JSONArray) object.get(reportsAPIConfig.get(HttpsReportsAPIConfig.ERROR_MSG)); for (Object mesg : mesgs) { Assert.assertTrue( mesg.toString().startsWith(reportsAPIConfig.get(HttpsReportsAPIConfig.INTERNAL_ERROR))); } }
From source file:org.wso2.carbon.event.simulator.core.internal.ds.CarbonEventSimulator.java
@Override public void deleteFile(String fileName, AxisConfiguration axisConfiguration) throws AxisFault { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, CSVFileInfo> csvFileInfoMap = tenantSpecificCSVFileInfoMap.get(tenantID); CSVFileInfo csvFileInfo = csvFileInfoMap.get(fileName); String repo = axisConfiguration.getRepository().getPath(); String path = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; String xmlFileName = csvFileInfo.getFileName().substring(0, csvFileInfo.getFileName().length() - 4) + EventSimulatorConstant.CONFIGURATION_XML_PREFIX; String xmlFilePath = path + File.separator + xmlFileName; File file = new File(csvFileInfo.getFilePath()); File xmlFile = new File(xmlFilePath); if (file.delete()) { csvFileInfoMap.remove(fileName); } else {/*from w ww . java 2 s.c o m*/ throw new AxisFault("Failed to delete the file .." + csvFileInfo.getFileName()); } if (xmlFile.exists()) { xmlFile.delete(); } }
From source file:org.wso2.carbon.event.simulator.core.internal.ds.CarbonEventSimulator.java
@Override public void deleteDBConfigFile(String fileName, AxisConfiguration axisConfiguration) throws AxisFault { int tenantID = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); HashMap<String, DataSourceTableAndStreamInfo> dataSourceTableAndStreamInfoMap = tenantSpecificDataSourceInfoMap .get(tenantID);/* w w w .j a v a 2s.c o m*/ fileName = fileName.replace(EventSimulatorConstant.DATA_SOURCE_CONFIGURATION_XML_PREFIX, ""); DataSourceTableAndStreamInfo dataSourceTableAndStreamInfo = dataSourceTableAndStreamInfoMap.get(fileName); String repo = axisConfiguration.getRepository().getPath(); String path = repo + EventSimulatorConstant.DEPLOY_DIRECTORY_PATH; String xmlFilePath = path + File.separator + dataSourceTableAndStreamInfo.getFileName(); File xmlFile = new File(xmlFilePath); if (xmlFile.exists()) { dataSourceTableAndStreamInfoMap.remove(fileName); xmlFile.delete(); } }
From source file:org.benjp.services.mongodb.ChatServiceImpl.java
public RoomsBean getRooms(String user, String filter, boolean withUsers, boolean withSpaces, boolean withPublic, boolean withOffline, boolean isAdmin, int limit, NotificationService notificationService, UserService userService, TokenService tokenService) { List<RoomBean> rooms = new ArrayList<RoomBean>(); List<RoomBean> roomsOffline = new ArrayList<RoomBean>(); UserBean userBean = userService.getUser(user, true); int unreadOffline = 0, unreadOnline = 0, unreadSpaces = 0, unreadTeams = 0; HashMap<String, UserBean> availableUsers = tokenService.getActiveUsersFilterBy(user, withUsers, withPublic, isAdmin, limit);/* w w w. j ava 2s . co m*/ rooms = this.getExistingRooms(user, withPublic, isAdmin, notificationService, tokenService); if (isAdmin) rooms.addAll(this.getExistingRooms(UserServiceImpl.SUPPORT_USER, withPublic, isAdmin, notificationService, tokenService)); for (RoomBean roomBean : rooms) { String targetUser = roomBean.getUser(); roomBean.setFavorite(userBean.isFavorite(targetUser)); if (availableUsers.keySet().contains(targetUser)) { UserBean targetUserBean = availableUsers.get(targetUser); roomBean.setFullname(targetUserBean.getFullname()); roomBean.setStatus(targetUserBean.getStatus()); roomBean.setAvailableUser(true); availableUsers.remove(targetUser); if (roomBean.getUnreadTotal() > 0) unreadOnline += roomBean.getUnreadTotal(); } else { UserBean targetUserBean = userService.getUser(targetUser); roomBean.setFullname(targetUserBean.getFullname()); roomBean.setAvailableUser(false); if (!withOffline) roomsOffline.add(roomBean); if (roomBean.getUnreadTotal() > 0) unreadOffline += roomBean.getUnreadTotal(); } } if (withUsers) { if (!withOffline) { for (RoomBean roomBean : roomsOffline) { rooms.remove(roomBean); } } for (UserBean availableUser : availableUsers.values()) { RoomBean roomBean = new RoomBean(); roomBean.setUser(availableUser.getName()); roomBean.setFullname(availableUser.getFullname()); roomBean.setStatus(availableUser.getStatus()); roomBean.setAvailableUser(true); roomBean.setFavorite(userBean.isFavorite(roomBean.getUser())); String status = roomBean.getStatus(); if (withOffline || (!withOffline && !UserServiceImpl.STATUS_INVISIBLE.equals(roomBean.getStatus()) && !UserServiceImpl.STATUS_OFFLINE.equals(roomBean.getStatus()))) { rooms.add(roomBean); } } } else { rooms = new ArrayList<RoomBean>(); } List<SpaceBean> spaces = userService.getSpaces(user); for (SpaceBean space : spaces) { RoomBean roomBeanS = new RoomBean(); roomBeanS.setUser(SPACE_PREFIX + space.getRoom()); roomBeanS.setRoom(space.getRoom()); roomBeanS.setFullname(space.getDisplayName()); roomBeanS.setStatus(UserService.STATUS_SPACE); roomBeanS.setTimestamp(space.getTimestamp()); roomBeanS.setAvailableUser(true); roomBeanS.setSpace(true); roomBeanS.setUnreadTotal(notificationService.getUnreadNotificationsTotal(user, "chat", "room", getSpaceRoom(SPACE_PREFIX + space.getRoom()))); if (roomBeanS.getUnreadTotal() > 0) unreadSpaces += roomBeanS.getUnreadTotal(); roomBeanS.setFavorite(userBean.isFavorite(roomBeanS.getUser())); if (withSpaces) { rooms.add(roomBeanS); } } List<RoomBean> teams = userService.getTeams(user); for (RoomBean team : teams) { RoomBean roomBeanS = new RoomBean(); roomBeanS.setUser(TEAM_PREFIX + team.getRoom()); roomBeanS.setRoom(team.getRoom()); roomBeanS.setFullname(team.getFullname()); roomBeanS.setStatus(UserService.STATUS_TEAM); roomBeanS.setTimestamp(team.getTimestamp()); roomBeanS.setAvailableUser(true); roomBeanS.setSpace(false); roomBeanS.setTeam(true); roomBeanS.setUnreadTotal( notificationService.getUnreadNotificationsTotal(user, "chat", "room", team.getRoom())); if (roomBeanS.getUnreadTotal() > 0) unreadTeams += roomBeanS.getUnreadTotal(); roomBeanS.setFavorite(userBean.isFavorite(roomBeanS.getUser())); if (withSpaces) { rooms.add(roomBeanS); } } List<RoomBean> finalRooms = new ArrayList<RoomBean>(); if (filter != null) { for (RoomBean roomBean : rooms) { String targetUser = roomBean.getFullname(); if (filter(targetUser, filter)) finalRooms.add(roomBean); } } else { finalRooms = rooms; } RoomsBean roomsBean = new RoomsBean(); roomsBean.setRooms(finalRooms); roomsBean.setUnreadOffline(unreadOffline); roomsBean.setUnreadOnline(unreadOnline); roomsBean.setUnreadSpaces(unreadSpaces); roomsBean.setUnreadTeams(unreadTeams); return roomsBean; }
From source file:com.ptts.sync.SyncAdapter.java
/** * Read JSON from an input stream, storing it into the content provider. * * <p>This is where incoming data is persisted, committing the results of a sync. In order to * minimize (expensive) disk operations, we compare incoming data with what's already in our * database, and compute a merge. Only changes (insert/update/delete) will result in a database * write.//from w w w . j a v a2 s . c o m * * <p>As an additional optimization, we use a batch operation to perform all database writes at * once. * * <p>Merge strategy: * 1. Get cursor to all items in feed<br/> * 2. For each item, check if it's in the incoming data.<br/> * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform * database UPDATE.<br/> * b. NO: Schedule DELETE from database.<br/> * (At this point, incoming database only contains missing items.)<br/> * 3. For any items remaining in incoming list, ADD to database. */ public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException, JSONException, RemoteException, OperationApplicationException, ParseException { final FeedParserJson feedParser = new FeedParserJson(); final ContentResolver contentResolver = getContext().getContentResolver(); Log.i(TAG, "Parsing stream as Json feed"); final List<FeedParserJson.Entry> entries = feedParser.parse(stream); Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries"); ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Build hash table of incoming entries HashMap<String, FeedParserJson.Entry> entryMap = new HashMap<String, FeedParserJson.Entry>(); for (FeedParserJson.Entry e : entries) { entryMap.put(e.id, e); } // Get list of all items Log.i(TAG, "Fetching local entries for merge"); Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries Cursor c = contentResolver.query(uri, PROJECTION, null, null, null); assert c != null; Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); // Find stale data int id; String entryId; String name; String start; String end; String stops; while (c.moveToNext()) { syncResult.stats.numEntries++; id = c.getInt(COLUMN_ID); entryId = c.getString(COLUMN_ENTRY_ID); name = c.getString(COLUMN_NAME); start = c.getString(COLUMN_START); end = c.getString(COLUMN_END); stops = c.getString(COLUMN_STOPS); Log.i("STOPS FROM PROJECTION", stops); FeedParserJson.Entry match = entryMap.get(entryId); if (match != null) { // Entry exists. Remove from entry map to prevent insert later. entryMap.remove(entryId); // Check to see if the entry needs to be updated Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)) .build(); if ((match.name != null && !match.name.equals(name)) || (match.start != null && !match.start.equals(start)) || (match.stops != null && !match.stops.equals(stops)) || (match.end != end)) { Log.i("STOPS FROM HASHMAP", match.stops); if (!match.stops.equals(stops)) { Log.i("COMPARING PROJECTION " + match.stops + " & HASHMAP " + stops, "The two aren't equal"); } else { Log.i("COMPARING PROJECTION & HASHMAP", "The two are equal"); } // Update existing record Log.i(TAG, "Scheduling update: " + existingUri); batch.add(ContentProviderOperation.newUpdate(existingUri) .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, entryId) .withValue(FeedContract.Entry.COLUMN_NAME_NAME, name) .withValue(FeedContract.Entry.COLUMN_NAME_START, start) .withValue(FeedContract.Entry.COLUMN_NAME_END, end) .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, stops).build()); syncResult.stats.numUpdates++; } else { Log.i(TAG, "No action: " + existingUri); } } else { // Entry doesn't exist. Remove it from the database. Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build(); Log.i(TAG, "Scheduling delete: " + deleteUri); batch.add(ContentProviderOperation.newDelete(deleteUri).build()); syncResult.stats.numDeletes++; } } c.close(); // Add new items for (FeedParserJson.Entry e : entryMap.values()) { Log.i(TAG, "Scheduling insert: entry_id=" + e.id); batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI) .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id) .withValue(FeedContract.Entry.COLUMN_NAME_NAME, e.name) .withValue(FeedContract.Entry.COLUMN_NAME_START, e.start) .withValue(FeedContract.Entry.COLUMN_NAME_END, e.end) .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, e.stops).build()); syncResult.stats.numInserts++; } Log.i(TAG, "Merge solution ready. Applying batch update"); mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch); mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified null, // No local observer false); // IMPORTANT: Do not sync to network // This sample doesn't support uploads, but if *your* code does, make sure you set // syncToNetwork=false in the line above to prevent duplicate syncs. }
From source file:org.quartz.simpl.RAMJobStore.java
/** * <p>/*from w ww .jav a 2s.c o m*/ * Remove (delete) the <code>{@link org.quartz.Job}</code> with the given * name, and any <code>{@link org.quartz.Trigger}</code> s that reference * it. * </p> * * @param jobName * The name of the <code>Job</code> to be removed. * @param groupName * The group name of the <code>Job</code> to be removed. * @return <code>true</code> if a <code>Job</code> with the given name & * group was found and removed from the store. */ public boolean removeJob(SchedulingContext ctxt, String jobName, String groupName) { String key = JobWrapper.getJobNameKey(jobName, groupName); boolean found = false; Trigger[] trigger = getTriggersForJob(ctxt, jobName, groupName); for (int i = 0; i < trigger.length; i++) { Trigger trig = trigger[i]; this.removeTrigger(ctxt, trig.getName(), trig.getGroup()); found = true; } synchronized (triggerLock) { found = (jobsByFQN.remove(key) != null) | found; if (found) { HashMap grpMap = (HashMap) jobsByGroup.get(groupName); if (grpMap != null) { grpMap.remove(jobName); if (grpMap.size() == 0) { jobsByGroup.remove(groupName); } } } } return found; }