List of usage examples for java.util HashMap remove
public V remove(Object key)
From source file:com.krawler.formbuilder.servlet.ReportBuilderDaoImpl.java
public String storeDashboardConf(HttpServletRequest request) throws ServiceException { String ret = "{'success': true}"; HashMap<String, String> arrParam = new HashMap<String, String>(); try {/*from w ww. j a v a2 s.c om*/ String temp = ""; ArrayList<FileItem> fi = new ArrayList<FileItem>(); boolean linksAddFlag = Boolean.parseBoolean(request.getParameter("linksAddFlag")); if (!linksAddFlag) { boolean fileUpload = false; parseRequest(request, arrParam, fi, fileUpload); temp = arrParam.get("grpjson"); mb_dashboard dash = (mb_dashboard) get(mb_dashboard.class, 1); HashMap idMap = new HashMap(); HashMap grpMap = new HashMap(); if (!StringUtil.isNullOrEmpty(temp)) { JSONArray grpArr = new JSONArray(temp); JSONArray grp = new JSONObject(getAllLinkGroups(request)).getJSONArray("data"); for (int i = 0; i < grp.length(); i++) { JSONObject tObj = grp.getJSONObject(i); grpMap.put(tObj.getString("groupid"), tObj.getString("groupname")); } for (int i = 0; i < grpArr.length(); i++) { JSONObject tObj = grpArr.getJSONObject(i); if (!grpMap.containsKey(tObj.getString("groupid"))) { com.krawler.esp.hibernate.impl.mb_linkgroup lg = new mb_linkgroup(); // lg.setGroupid(UUID.randomUUID().toString()); lg.setGrouptext(tObj.getString("groupname")); save(lg); idMap.put(tObj.getString("groupid"), lg.getGroupid()); } else { grpMap.remove(tObj.getString("groupid")); } } } temp = arrParam.get("linkjson"); String hql = "DELETE FROM " + PropsValues.PACKAGE_PATH + ".mb_dashlinks AS mb_dashlinks WHERE mb_dashlinks.dashboardid = ?"; executeUpdate(hql, new Object[] { dash }); if (!StringUtil.isNullOrEmpty(temp)) { JSONArray linkArr = new JSONArray(temp); for (int i = 0; i < linkArr.length(); i++) { JSONObject tObj = linkArr.getJSONObject(i); if (idMap.containsKey(tObj.getString("groupid"))) { tObj.put("groupid", idMap.get(tObj.getString("groupid"))); } storeDashboardLink(tObj); } } Object[] grpA = grpMap.keySet().toArray(); for (int i = 0; i < grpA.length; i++) { deleteLinkGroup(grpA[i].toString()); } temp = arrParam.get("portletjson"); hql = "DELETE FROM " + PropsValues.PACKAGE_PATH + ".mb_dashportlet AS mb_dashportlet WHERE mb_dashportlet.dashboardid = ?"; executeUpdate(hql, new Object[] { dash }); if (!StringUtil.isNullOrEmpty(temp)) { JSONArray portletArr = new JSONArray(temp); for (int cnt = 0; cnt < portletArr.length(); cnt++) { JSONObject tObj = portletArr.getJSONObject(cnt); com.krawler.esp.hibernate.impl.mb_dashportlet port = new mb_dashportlet(); port.setDashboardid(dash); port.setPortletid(UUID.randomUUID().toString()); mb_reportlist tempReport = (mb_reportlist) get(mb_reportlist.class, tObj.getString("moduleid")); port.setReportid(tempReport); port.setConfig(tObj.getString("colconfig")); port.setPortlettitle(tObj.getString("portlet")); save(port); } } if (fi.size() > 0) { com.krawler.esp.handlers.genericFileUpload uploader = new com.krawler.esp.handlers.genericFileUpload(); String destinationdir = PropsValues.STORE_PATH; uploader.uploadFile(fi.get(0), destinationdir, "logo"); if (!uploader.ErrorMsg.equals("")) { ret = "{'success':true,error:'" + uploader.ErrorMsg + "'}"; } } } else { temp = request.getParameter("linkjson"); if (!StringUtil.isNullOrEmpty(temp)) { JSONArray linkArr = new JSONArray(temp); for (int i = 0; i < linkArr.length(); i++) { JSONObject tObj = linkArr.getJSONObject(i); boolean duplicateName = checkDuplicateLinkName(tObj); if (!duplicateName) { storeDashboardLink(tObj); } else { ret = "{'success':false,'error':'Link name is already present for the same module.'}"; break; } } } } } catch (JSONException e) { logger.warn(e.getMessage(), e); throw ServiceException.FAILURE(e.getMessage(), e); } catch (Exception e) { logger.warn(e.getMessage(), e); throw ServiceException.FAILURE(e.getMessage(), e); } return ret; }
From source file:uk.ac.diamond.scisoft.analysis.rcp.inspector.InspectionTab.java
protected void populateCombos() { int cSize = combos.size() - comboOffset; HashMap<Integer, String> sAxes = getSelectedComboAxisNames(); for (int i = 0; i < cSize; i++) { Combo c = combos.get(i + comboOffset); c.removeAll();/*w ww.ja va2 s . c o m*/ PlotAxisProperty p = paxes.get(i + comboOffset); p.clear(); Label l = axisLabels.get(i + comboOffset); if (sAxes.size() == 0) { p.setInSet(false); c.setEnabled(false); c.setVisible(false); l.setVisible(false); if (itype == InspectorType.IMAGEXP) { // hack to change labels l = axisLabels.get(i + comboOffset - 1); l.setText(IMAGE_EXP_AXIS_LABEL); l.getParent().layout(); } break; } c.setEnabled(true); c.setVisible(true); l.setVisible(true); if (itype == InspectorType.IMAGEXP && l.getText().equals(IMAGE_EXP_AXIS_LABEL)) { l.setText(axes[i + comboOffset]); // reset label l.getParent().layout(); } ArrayList<Integer> keyList = new ArrayList<Integer>(sAxes.keySet()); Collections.sort(keyList); Integer lastKey = keyList.get(keyList.size() - 1); String a = sAxes.get(lastKey); // reverse order if (axes.length == 1) { // for 1D plots and 1D dataset table, remove single point axes int[] shape = dataset.getShape(); while (shape.length > lastKey && shape[lastKey] == 1) { lastKey--; } a = sAxes.get(lastKey); // reverse order for (int j : keyList) { String n = sAxes.get(j); p.put(j, n); if (shape.length > j && shape[j] != 1) c.add(n); } } else { for (int j : keyList) { String n = sAxes.get(j); p.put(j, n); c.add(n); } } c.setText(a); sAxes.remove(lastKey); p.setName(a, false); p.setInSet(true); } }
From source file:com.morphoss.acal.service.UpdateTimezones.java
private void refreshTimezoneData() { try {// w ww .j a v a2 s.c o m HashMap<String, Long> currentZones = new HashMap<String, Long>(); HashMap<String, Long> updatedZones = new HashMap<String, Long>(); HashMap<String, Long> insertedZones = new HashMap<String, Long>(); Cursor allZones = cr.query(Timezones.CONTENT_URI, new String[] { Timezones.TZID, Timezones.LAST_MODIFIED }, null, null, null); Long maxModified = 0L; for (allZones.moveToFirst(); !allZones.isAfterLast(); allZones.moveToNext()) { if (Constants.LOG_VERBOSE) Log.println(Constants.LOGV, TAG, "Found existing zone of '" + allZones.getString(0) + "' modified: " + AcalDateTime.fromMillis(allZones.getLong(1) * 1000L).toString()); currentZones.put(allZones.getString(0), allZones.getLong(1)); if (allZones.getLong(1) > maxModified) maxModified = allZones.getLong(1); } AcalDateTime mostRecentChange = AcalDateTime.getUTCInstance().setEpoch(maxModified); Log.println(Constants.LOGI, TAG, "Found " + allZones.getCount() + " existing timezones, most recent change on " + mostRecentChange.toString()); if (allZones.getCount() > 350 && mostRecentChange.after(AcalDateTime.getUTCInstance().addDays(-30))) { Log.println(Constants.LOGI, TAG, "Skipping update - our database is pretty recent"); return; } requestor.interpretUriString(tzUrl("list", null)); JSONObject root = requestor.doJsonRequest("GET", null, null, null); if (requestor.wasRedirected()) { Uri tzUri = Uri.parse(requestor.fullUrl()); String redirectedUrl = tzUri.getScheme() + "://" + tzUri.getAuthority() + tzUri.getPath(); if (Constants.debugTimeZone && Constants.LOG_DEBUG) Log.println(Constants.LOGD, TAG, "Redirected to Timezone Server at " + redirectedUrl); tzServerBaseUrl = redirectedUrl; AcalApplication.setPreferenceString(PrefNames.tzServerBaseUrl, redirectedUrl); } if (requestor.getStatusCode() >= 399) { Log.println(Constants.LOGI, TAG, "Bad response " + requestor.getStatusCode() + " from Timezone Server at " + tzUrl("list", null)); } if (root == null) { Log.println(Constants.LOGI, TAG, "No JSON from GET " + tzUrl("list", null)); return; } String tzid; String tzData = ""; long lastModified; StringBuilder localNames; StringBuilder aliases; ContentValues zoneValues = new ContentValues(); String tzDateStamp = root.getString("dtstamp"); JSONArray tzArray = root.getJSONArray("timezones"); for (int i = 0; i < tzArray.length(); i++) { JSONObject zoneNode = tzArray.getJSONObject(i); tzid = zoneNode.getString("tzid"); if (updatedZones.containsKey(tzid) || insertedZones.containsKey(tzid)) continue; if (Constants.debugTimeZone && Constants.LOG_DEBUG) Log.println(Constants.LOGD, TAG, "Working on " + tzid); lastModified = AcalDateTime.fromString(zoneNode.getString("last-modified")).getEpoch(); if (currentZones.containsKey(tzid) && currentZones.get(tzid) <= lastModified) { currentZones.remove(tzid); continue; } tzData = getTimeZone(tzid); if (tzData == null) continue; localNames = new StringBuilder(); try { JSONArray nameNodes = zoneNode.getJSONArray("local_names"); for (int j = 0; j < nameNodes.length(); j++) { if (localNames.length() > 0) localNames.append("\n"); localNames.append(nameNodes.getJSONObject(j).getString("lang")).append('~') .append(nameNodes.getJSONObject(j).getString("lname")); } } catch (JSONException je) { } aliases = new StringBuilder(); try { JSONArray aliasNodes = zoneNode.getJSONArray("aliases"); for (int j = 0; j < aliasNodes.length(); j++) { if (aliases.length() > 0) aliases.append("\n"); aliases.append(aliasNodes.getString(j)); } } catch (JSONException je) { } zoneValues.put(Timezones.TZID, tzid); zoneValues.put(Timezones.ZONE_DATA, tzData); zoneValues.put(Timezones.LAST_MODIFIED, lastModified); zoneValues.put(Timezones.TZ_NAMES, localNames.toString()); zoneValues.put(Timezones.TZID_ALIASES, aliases.toString()); Uri tzUri = Uri.withAppendedPath(Timezones.CONTENT_URI, "tzid/" + StaticHelpers.urlescape(tzid, false)); if (currentZones.containsKey(tzid)) { if (cr.update(tzUri, zoneValues, null, null) != 1) { Log.e(TAG, "Failed update for TZID '" + tzid + "'"); } updatedZones.put(tzid, currentZones.get(tzid)); currentZones.remove(tzid); } else { if (cr.insert(tzUri, zoneValues) == null) Log.e(TAG, "Failed insert for TZID '" + tzid + "'"); insertedZones.put(tzid, currentZones.get(tzid)); } if (context.workWaiting()) { Log.println(Constants.LOGI, TAG, "Something is waiting - deferring timezone sync until later."); deferMe = true; break; } // Let other stuff have a chance Thread.sleep(350); } int removed = 0; if (currentZones.size() > 0) { StringBuilder s = new StringBuilder(); for (String tz : currentZones.keySet()) { if (s.length() > 0) s.append(','); s.append("'").append(tz).append("'"); } removed = cr.delete(Timezones.CONTENT_URI, Timezones.TZID + " IN (" + s + ")", null); } Log.println(Constants.LOGI, TAG, "Updated data for " + updatedZones.size() + " zones, added data for " + insertedZones.size() + " new zones, removed data for " + removed); } catch (Exception e) { Log.e(TAG, Log.getStackTraceString(e)); } }
From source file:com.b44t.messenger.NotificationsController.java
@SuppressLint("InlinedApi") private void showExtraNotifications(NotificationCompat.Builder notificationBuilder, boolean notifyAboutLast) { // TODO: support Android wear by calling this function above from showOrUpdateNotification if (Build.VERSION.SDK_INT < 18) { return;/* w w w . j a va2s . co m*/ } ArrayList<Long> sortedDialogs = new ArrayList<>(); HashMap<Long, ArrayList<MessageObject>> messagesByDialogs = new HashMap<>(); for (int a = 0; a < pushMessages.size(); a++) { MessageObject messageObject = pushMessages.get(a); long dialog_id = messageObject.getDialogId(); if ((int) dialog_id == 0) { continue; } ArrayList<MessageObject> arrayList = messagesByDialogs.get(dialog_id); if (arrayList == null) { arrayList = new ArrayList<>(); messagesByDialogs.put(dialog_id, arrayList); sortedDialogs.add(0, dialog_id); } arrayList.add(messageObject); } HashMap<Long, Integer> oldIdsWear = new HashMap<>(); oldIdsWear.putAll(wearNotificationsIds); wearNotificationsIds.clear(); HashMap<Long, Integer> oldIdsAuto = new HashMap<>(); oldIdsAuto.putAll(autoNotificationsIds); autoNotificationsIds.clear(); for (int b = 0; b < sortedDialogs.size(); b++) { long dialog_id = sortedDialogs.get(b); ArrayList<MessageObject> messageObjects = messagesByDialogs.get(dialog_id); int max_id = messageObjects.get(0).getId(); int max_date = messageObjects.get(0).messageOwner.date; TLRPC.Chat chat = null; TLRPC.User user = null; String name; if (dialog_id > 0) { user = MessagesController.getInstance().getUser((int) dialog_id); if (user == null) { continue; } } else { /*chat = MessagesController.getInstance().getChat(-(int)dialog_id); if (chat == null) {*/ continue; //} } TLRPC.FileLocation photoPath = null; if (AndroidUtilities.needShowPasscode(false) || UserConfig.isWaitingForPasscodeEnter) { name = mContext.getString(R.string.AppName); } else { if (chat != null) { name = chat.title; } else { name = UserObject.getUserName(user); } /*if (chat != null) { if (chat.photo != null && chat.photo.photo_small != null && chat.photo.photo_small.volume_id != 0 && chat.photo.photo_small.local_id != 0) { photoPath = chat.photo.photo_small; } } else { if (user.photo != null && user.photo.photo_small != null && user.photo.photo_small.volume_id != 0 && user.photo.photo_small.local_id != 0) { photoPath = user.photo.photo_small; } }*/ } Integer notificationIdWear = oldIdsWear.get(dialog_id); if (notificationIdWear == null) { notificationIdWear = wearNotificationId++; } else { oldIdsWear.remove(dialog_id); } Integer notificationIdAuto = oldIdsAuto.get(dialog_id); if (notificationIdAuto == null) { notificationIdAuto = autoNotificationId++; } else { oldIdsAuto.remove(dialog_id); } NotificationCompat.CarExtender.UnreadConversation.Builder unreadConvBuilder = new NotificationCompat.CarExtender.UnreadConversation.Builder( name).setLatestTimestamp((long) max_date * 1000); Intent msgHeardIntent = new Intent(); msgHeardIntent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); msgHeardIntent.setAction("com.b44t.messenger.ACTION_MESSAGE_HEARD"); msgHeardIntent.putExtra("dialog_id", dialog_id); msgHeardIntent.putExtra("max_id", max_id); PendingIntent msgHeardPendingIntent = PendingIntent.getBroadcast(ApplicationLoader.applicationContext, notificationIdAuto, msgHeardIntent, PendingIntent.FLAG_UPDATE_CURRENT); unreadConvBuilder.setReadPendingIntent(msgHeardPendingIntent); NotificationCompat.Action wearReplyAction = null; if (/*!ChatObject.isChannel(chat) &&*/ !AndroidUtilities.needShowPasscode(false) && !UserConfig.isWaitingForPasscodeEnter) { Intent msgReplyIntent = new Intent(); msgReplyIntent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); msgReplyIntent.setAction("com.b44t.messenger.ACTION_MESSAGE_REPLY"); msgReplyIntent.putExtra("dialog_id", dialog_id); msgReplyIntent.putExtra("max_id", max_id); PendingIntent msgReplyPendingIntent = PendingIntent.getBroadcast( ApplicationLoader.applicationContext, notificationIdAuto, msgReplyIntent, PendingIntent.FLAG_UPDATE_CURRENT); RemoteInput remoteInputAuto = new RemoteInput.Builder(NotificationsController.EXTRA_VOICE_REPLY) .setLabel(mContext.getString(R.string.Reply)).build(); unreadConvBuilder.setReplyAction(msgReplyPendingIntent, remoteInputAuto); Intent replyIntent = new Intent(ApplicationLoader.applicationContext, WearReplyReceiver.class); replyIntent.putExtra("dialog_id", dialog_id); replyIntent.putExtra("max_id", max_id); PendingIntent replyPendingIntent = PendingIntent.getBroadcast(ApplicationLoader.applicationContext, notificationIdWear, replyIntent, PendingIntent.FLAG_UPDATE_CURRENT); RemoteInput remoteInputWear = new RemoteInput.Builder(EXTRA_VOICE_REPLY) .setLabel(mContext.getString(R.string.Reply)).build(); String replyToString; if (chat != null) { replyToString = String.format(mContext.getString(R.string.ReplyToGroup), name); } else { replyToString = String.format(mContext.getString(R.string.ReplyToContact), name); } wearReplyAction = new NotificationCompat.Action.Builder(R.drawable.ic_reply_icon, replyToString, replyPendingIntent).addRemoteInput(remoteInputWear).build(); } String text = ""; for (int a = messageObjects.size() - 1; a >= 0; a--) { MessageObject messageObject = messageObjects.get(a); String message = getStringForMessage(messageObject, ADD_GROUP | ADD_USER); if (message == null) { continue; } /*if (chat != null) { message = message.replace(" @ " + name, ""); } else { message = message.replace(name + ": ", "").replace(name + " ", ""); }*/ if (text.length() > 0) { text += "\n\n"; } text += message; unreadConvBuilder.addMessage(message); } Intent intent = new Intent(ApplicationLoader.applicationContext, LaunchActivity.class); intent.setAction("com.b44t.messenger.openchat" + Math.random() + Integer.MAX_VALUE); intent.setFlags(32768); if (chat != null) { intent.putExtra("chatId", chat.id); } else if (user != null) { intent.putExtra("userId", user.id); } PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_ONE_SHOT); NotificationCompat.WearableExtender wearableExtender = new NotificationCompat.WearableExtender(); if (wearReplyAction != null) { wearableExtender.addAction(wearReplyAction); } NotificationCompat.Builder builder = new NotificationCompat.Builder( ApplicationLoader.applicationContext).setContentTitle(name) .setSmallIcon(R.drawable.notification).setGroup("messages").setContentText(text) .setAutoCancel(true).setColor(Theme.ACTION_BAR_COLOR).setGroupSummary(false) .setContentIntent(contentIntent).extend(wearableExtender) .extend(new NotificationCompat.CarExtender() .setUnreadConversation(unreadConvBuilder.build())) .setCategory(NotificationCompat.CATEGORY_MESSAGE); /*if (photoPath != null) { BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(photoPath, null, "50_50"); if (img != null) { builder.setLargeIcon(img.getBitmap()); } }*/ notificationManager.notify(notificationIdWear, builder.build()); wearNotificationsIds.put(dialog_id, notificationIdWear); } for (HashMap.Entry<Long, Integer> entry : oldIdsWear.entrySet()) { notificationManager.cancel(entry.getValue()); } }
From source file:mondrian.olap.Util.java
public static <T> Set<T> newIdentityHashSetFake() { final HashMap<T, Boolean> map = new HashMap<T, Boolean>(); return new Set<T>() { public int size() { return map.size(); }/*from w w w. j a v a 2s. co m*/ public boolean isEmpty() { return map.isEmpty(); } public boolean contains(Object o) { return map.containsKey(o); } public Iterator<T> iterator() { return map.keySet().iterator(); } public Object[] toArray() { return map.keySet().toArray(); } public <T> T[] toArray(T[] a) { return map.keySet().toArray(a); } public boolean add(T t) { return map.put(t, Boolean.TRUE) == null; } public boolean remove(Object o) { return map.remove(o) == Boolean.TRUE; } public boolean containsAll(Collection<?> c) { return map.keySet().containsAll(c); } public boolean addAll(Collection<? extends T> c) { throw new UnsupportedOperationException(); } public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } public void clear() { map.clear(); } }; }
From source file:com.dtolabs.rundeck.core.authorization.providers.TestYamlPolicy.java
public void testTypeRuleContextMatcherEqualsRule() { {/*from w w w . j a va 2 s .co m*/ //equality for single attribute 'name' final Object load = yaml.load("equals: \n" + " name: blah\n" + "allow: '*'"); assertTrue(load instanceof Map); final Map ruleSection = (Map) load; final YamlPolicy.TypeRuleContextMatcher typeRuleContext = new YamlPolicy.TypeRuleContextMatcher( ruleSection, 1); final HashMap<String, String> resmap = new HashMap<String, String>(); //false result for no match assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "something"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "blah"); assertTrue(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); } { //equality for multiple attributes final Object load = yaml.load("equals: \n" + " name: blah\n" + " something: zelse\n" + "allow: '*'"); assertTrue(load instanceof Map); final Map ruleSection = (Map) load; final YamlPolicy.TypeRuleContextMatcher typeRuleContext = new YamlPolicy.TypeRuleContextMatcher( ruleSection, 1); final HashMap<String, String> resmap = new HashMap<String, String>(); //false result for no match assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "something"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "blah"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("something", "else"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("something", "zelse"); assertTrue(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.put("name", "ablahz"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); resmap.remove("name"); assertFalse(typeRuleContext.ruleMatchesEqualsSection(resmap, ruleSection)); } }
From source file:com.dell.asm.asmcore.asmmanager.util.firmwarerepository.FirmwareUtil.java
/** * Returns matching software components within a specified firmware * repository, ignoring components of type 'Driver'. See: ASM-3756 * @param componentId The component id//w ww .j a v a 2 s . c om * @param deviceId The device id * @param subDeviceId The sub-device id * @param vendorId The vendor id * @param subVendorId The sub-vendor id * @param parentRepo The repository to check for matches * @return A list of matching components. */ public List<SoftwareComponent> getSoftwareComponents(String componentId, String deviceId, String subDeviceId, String vendorId, String subVendorId, FirmwareRepositoryEntity parentRepo, String systemId, SourceType sourceType, String operatingSystem, boolean loadDriverType) { logger.debug("getsoftware components with componentid: " + componentId + " deviceid: " + deviceId + " subdeviceid: " + subDeviceId + " vendorid: " + vendorId + " subvendorid: " + subVendorId + " operatingSystem:" + operatingSystem); final List<SoftwareComponent> components = new ArrayList<>(); try { final HashMap<String, Object> attributeMap = new HashMap<>(); if (parentRepo != null) { // We need at least one of the following componentId, deviceId, subDeviceId, vendorId, subVendorId, systemId if (StringUtils.isBlank(componentId) && StringUtils.isBlank(deviceId) && StringUtils.isBlank(subDeviceId) && StringUtils.isBlank(vendorId) && StringUtils.isBlank(subVendorId) && StringUtils.isBlank(systemId)) { return null; } attributeMap.put("firmwareRepositoryEntity", parentRepo); attributeMap.put("componentId", componentId); attributeMap.put("deviceId", deviceId); attributeMap.put("subDeviceId", subDeviceId); attributeMap.put("vendorId", vendorId); attributeMap.put("subVendorId", subVendorId); attributeMap.put("operatingSystem", operatingSystem); // Special case brought on by componentId not being present in // the wsman command response for RAID and NICs // The alternative is to hard code the componentid in the // discovery // Spectre has similar logic. if (StringUtils.isNotEmpty(deviceId) && StringUtils.isNotEmpty(subDeviceId) && StringUtils.isNotEmpty(vendorId) && StringUtils.isNotEmpty(subVendorId) && StringUtils.isEmpty(componentId)) { attributeMap.remove("componentId"); } List<SoftwareComponentEntity> entities = null; if (SourceType.Device == sourceType) { entities = this.genericDAO.getForEquals(attributeMap, systemId); } else { // it's a SourceType.Catalog and systemId does not exist entities = this.genericDAO.getForEquals(attributeMap, (String) null); } if (entities != null) { for (final SoftwareComponentEntity entity : entities) { final String componentType = entity.getComponentType(); // We ONLY ignore if it's Driver and SourceType.Device (versus Catalog) if (!loadDriverType && StringUtils.equalsIgnoreCase(componentType, COMPONENT_TYPE_DRIVER) && SourceType.Device == sourceType) { // Ignore software_components of type 'Driver' logger.trace("Ignoring Driver softwareComponent" + entity.getId()); continue; } logger.trace("Found software component " + entity.getId() + " for firmware repo : " + parentRepo.getName() + " " + parentRepo.getId()); components.add(entity.getSoftwareComponent()); logger.trace("Found softwareComponent " + entity.getId() + " with dellversion " + entity.getDellVersion() + " vendor version " + entity.getVendorVersion()); } } } } catch (LocalizedWebApplicationException e) { logger.error("LocalizedWebApplicationException while retrieving software components", e); throw e; } catch (Exception e) { logger.error("LocalizedWebApplicationException while retrieving software components", e); throw new LocalizedWebApplicationException(Response.Status.INTERNAL_SERVER_ERROR, AsmManagerMessages.internalError()); } if (CollectionUtils.isNotEmpty(components) && components.size() > 1) { Collections.sort(components, componentVersionComparator); } return components; }
From source file:eu.edisonproject.training.wsd.BabelNet.java
private Set<Term> babelNetDisambiguation(String language, String lemma, Set<String> ngarms) { if (ngarms.isEmpty()) { return null; }/*w w w . jav a 2s. c o m*/ if (ngarms.size() == 1 && ngarms.iterator().next().length() <= 1) { return null; } HashMap<CharSequence, Double> idsMap = new HashMap<>(); Map<CharSequence, Term> termMap = new HashMap<>(); Set<Term> terms = new HashSet<>(); int count = 0; int breaklimit = 1000; int oneElementlimit = 65; int difflimit = 60; Double persent; for (String n : ngarms) { if (n.length() <= 1) { continue; } count++; if (idsMap.size() == 1 && count > oneElementlimit) { // Double score = idsMap.values().iterator().next(); // if (score >= 10) { break; // } } if ((count % 2) == 0 && idsMap.size() >= 2 && count > difflimit) { ValueComparator bvc = new ValueComparator(idsMap); TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc); sorted_map.putAll(idsMap); Iterator<CharSequence> iter = sorted_map.keySet().iterator(); Double first = idsMap.get(iter.next()); Double second = idsMap.get(iter.next()); persent = first / (first + second); if (persent > 0.65) { break; } } if (count > breaklimit) { break; } String clearNg = n.replaceAll("_", " "); if (clearNg == null) { continue; } if (clearNg.startsWith(" ")) { clearNg = clearNg.replaceFirst(" ", ""); } if (clearNg.endsWith(" ")) { clearNg = clearNg.substring(0, clearNg.length() - 1); } Pair<Term, Double> termPair = null; try { termPair = babelNetDisambiguation(language, lemma, clearNg); } catch (Exception ex) { if (ex.getMessage() != null && ex.getMessage().contains("Your key is not valid")) { try { termPair = babelNetDisambiguation(language, lemma, clearNg); } catch (Exception ex1) { // LOGGER.log(Level.WARNING, ex1, null); } } else { LOGGER.log(Level.WARNING, null, ex); } } if (termPair != null) { termMap.put(termPair.first.getUid(), termPair.first); Double score; if (idsMap.containsKey(termPair.first.getUid())) { score = idsMap.get(termPair.first.getUid()); // score++; score += termPair.second; } else { // score = 1.0; score = termPair.second; } idsMap.put(termPair.first.getUid(), score); } } if (!idsMap.isEmpty()) { ValueComparator bvc = new ValueComparator(idsMap); TreeMap<CharSequence, Double> sorted_map = new TreeMap(bvc); sorted_map.putAll(idsMap); count = 0; Double firstScore = idsMap.get(sorted_map.firstKey()); terms.add(termMap.get(sorted_map.firstKey())); idsMap.remove(sorted_map.firstKey()); for (CharSequence tvID : sorted_map.keySet()) { if (count >= 1) { Double secondScore = idsMap.get(tvID); persent = secondScore / (firstScore + secondScore); if (persent > 0.2) { terms.add(termMap.get(tvID)); } if (count >= 2) { break; } } count++; } return terms; } return null; }
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.kagawa.SyntheticPopJP.java
private void generateHouseholdsPersonsDwellings() { //Generate the synthetic population using Monte Carlo (select the households according to the weight) //Once the household is selected, all the characteristics of the household will be copied (including the household members) logger.info(" Starting to generate households and persons."); //List of households of the micro data int previousHouseholds = 0; int previousPersons = 0; //Define income distribution double incomeShape = ResourceUtil.getDoubleProperty(rb, PROPERTIES_INCOME_GAMMA_SHAPE); double incomeRate = ResourceUtil.getDoubleProperty(rb, PROPERTIES_INCOME_GAMMA_RATE); double[] incomeProbability = ResourceUtil.getDoubleArray(rb, PROPERTIES_INCOME_GAMMA_PROBABILITY); GammaDistributionImpl gammaDist = new GammaDistributionImpl(incomeShape, 1 / incomeRate); //Create a map to store the household IDs by municipality HashMap<Integer, HashMap<Integer, Integer>> householdByMunicipality = new HashMap<>(); generateCountersForValidation();//from ww w . j a va 2s . c om RealEstateDataManager realEstate = dataContainer.getRealEstateData(); HouseholdDataManager householdDataManager = dataContainer.getHouseholdData(); HouseholdFactory householdFactory = HouseholdUtil.getFactory(); regionsforFrequencyMatrix = SiloUtil.readCSVfile(rb.getString(PROPERTIES_ATRIBUTES_ZONAL_DATA)); regionsforFrequencyMatrix.buildIndex(regionsforFrequencyMatrix.getColumnPosition("V1")); householdsForFrequencyMatrix = new HashMap<>(); for (int i = 1; i <= microDataDwelling.getRowCount(); i++) { int v2Zone = (int) microDataDwelling.getValueAt(i, "PtResCode"); int ddID = (int) microDataDwelling.getValueAt(i, "id"); if (householdsForFrequencyMatrix.containsKey(v2Zone)) { householdsForFrequencyMatrix.get(v2Zone).put(ddID, 1); } else { HashMap<Integer, Integer> map = new HashMap<>(); map.put(ddID, 1); householdsForFrequencyMatrix.put(v2Zone, map); } } //Selection of households, persons, jobs and dwellings per municipality for (int municipality = 0; municipality < cityID.length; municipality++) { logger.info(" Municipality " + cityID[municipality] + ". Starting to generate households."); //-----------***** Data preparation *****------------------------------------------------------------------- //Create local variables to avoid accessing to the same variable on the parallel processing int municipalityID = cityID[municipality]; int v2zone = (int) regionsforFrequencyMatrix.getIndexedValueAt(municipalityID, "V2"); if (householdsForFrequencyMatrix.containsKey(v2zone)) { String[] attributesHouseholdIPU = attributesMunicipality; TableDataSet rasterCellsMatrix = cellsMatrix; TableDataSet microHouseholds = microDataHousehold; TableDataSet microPersons = microDataPerson; TableDataSet microDwellings = microDataDwelling; microHouseholds.buildIndex(microHouseholds.getColumnPosition("id")); microDwellings.buildIndex(microDwellings.getColumnPosition("id")); int totalHouseholds = (int) marginalsMunicipality.getIndexedValueAt(municipalityID, "hhTotal"); int[] agePerson = ageBracketsPerson; int[] levelEdu = new int[4]; double[] probEdu = new double[4]; for (int i = 0; i < levelEdu.length; i++) { probEdu[i] = marginalsMunicipality.getIndexedValueAt(municipalityID, "Ed_" + i); } //Probability of floor size for vacant dwellings double[] sizeDistribution = new double[sizeBracketsDwelling.length]; for (int row = 0; row < sizeBracketsDwelling.length; row++) { String name = "HA_LT_" + sizeBracketsDwelling[row] + "sqm"; sizeDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name); } //Probability for year and building size for vacant dwellings double[] yearDistribution = new double[yearBracketsDwelling.length]; for (int row = 0; row < yearBracketsDwelling.length; row++) { String name = "HY_" + yearBracketsDwelling[row]; yearDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name) / totalHouseholds; } //Average price per sqm of the zone according to building type float[] averagePriceDistribution = new float[typeBracketsDwelling.length]; for (int row = 0; row < typeBracketsDwelling.length; row++) { String name = "HPrice_" + typeBracketsDwelling[row]; yearDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name); } HashMap<Integer, Integer> hhs = householdsForFrequencyMatrix.get(v2zone); int[] hhFromV2 = hhs.keySet().stream().mapToInt(Integer::intValue).toArray(); HashMap<Integer, Integer> generatedHouseholds = new HashMap<>(); //obtain the raster cells of the municipality and their weight within the municipality int[] tazInCity = cityTAZ.get(municipalityID); double[] probTaz = new double[tazInCity.length]; double tazRemaining = 0; for (int i = 0; i < tazInCity.length; i++) { probTaz[i] = rasterCellsMatrix.getIndexedValueAt(tazInCity[i], "Population"); tazRemaining = tazRemaining + probTaz[i]; } double hhRemaining = 0; HashMap<Integer, Double> prob = new HashMap<>(); for (int row = 0; row < hhFromV2.length; row++) { double value = weightsTable.getIndexedValueAt(hhFromV2[row], Integer.toString(municipalityID)); hhRemaining = hhRemaining + value; prob.put(hhFromV2[row], value); } //marginals for the municipality int hhPersons = 0; int hhTotal = 0; int quartersTotal = 0; int id = 0; //for all the households that are inside the municipality (we will match perfectly the number of households. The total population will vary compared to the marginals.) for (int row = 0; row < totalHouseholds; row++) { //select the household to copy from the micro data(with replacement) double[] probability = prob.values().stream().mapToDouble(Double::doubleValue).toArray(); int[] hhIds = prob.keySet().stream().mapToInt(Integer::intValue).toArray(); int selectedHh = select(probability, hhIds, hhRemaining)[0]; if (prob.get(selectedHh) > 1) { prob.put(selectedHh, prob.get(selectedHh) - 1); hhRemaining = hhRemaining - 1; } else { hhRemaining = hhRemaining - prob.get(selectedHh); prob.remove(selectedHh); } //Select the taz to allocate the household (without replacement) int[] recordsCell = select(probTaz, tazInCity, tazRemaining); int selectedTAZ = recordsCell[0]; //copy the private household characteristics int householdSize = (int) microHouseholds.getIndexedValueAt(selectedHh, "HHsize"); int householdCars = Math.min((int) microHouseholds.getIndexedValueAt(selectedHh, "N_Car"), 3); id = householdDataManager.getNextHouseholdId(); int newDdId = RealEstateDataManager.getNextDwellingId(); Household household = householdFactory.createHousehold(id, newDdId, householdCars); //(int id, int dwellingID, int homeZone, int hhSize, int autos) householdDataManager.addHousehold(household); hhTotal++; //copy the household members characteristics PersonFactory factory = PersonUtils.getFactory(); for (int rowPerson = 0; rowPerson < householdSize; rowPerson++) { int idPerson = householdDataManager.getNextPersonId(); int personCounter = (int) microHouseholds.getIndexedValueAt(selectedHh, "firstPerson") + rowPerson; int age = (int) microPersons.getValueAt(personCounter, "age"); Gender gender = Gender.valueOf((int) microDataPerson.getValueAt(personCounter, "gender")); Occupation occupation = Occupation.UNEMPLOYED; int jobType = 1; if ((int) microDataPerson.getValueAt(personCounter, "occupation") == 1) { occupation = Occupation.EMPLOYED; if ((int) microDataPerson.getValueAt(personCounter, "jobType") == 1) { jobType = 1; } else if ((int) microDataPerson.getValueAt(personCounter, "jobType") == 2) { jobType = 2; } else { jobType = 3; } } int income = 0; int education = 0; if (age > 15) { education = SiloUtil.select(probEdu, levelEdu); try { income = (int) translateIncome((int) Math.random() * 10, incomeProbability, gammaDist) * 12; //convert monthly income to yearly income } catch (MathException e) { e.printStackTrace(); } } Person pers = factory.createPerson(idPerson, age, gender, Race.white, occupation, null, 0, income); //(int id, int hhid, int age, int gender, Race race, int occupation, int workplace, int income) householdDataManager.addPerson(pers); householdDataManager.addPersonToHousehold(pers, household); jobTypeByWorker.put(pers, jobType); PersonRole role = PersonRole.CHILD; //default value = child if ((int) microPersons.getValueAt(personCounter, "personRole") == 1) { //the person is single role = PersonRole.SINGLE; } else if ((int) microPersons.getValueAt(personCounter, "personRole") == 2) { // the person is married role = PersonRole.MARRIED; } pers.setRole(role); pers.setNationality(Nationality.GERMAN); boolean license = false; if (microPersons.getValueAt(personCounter, "DrivLicense") == 1) { license = true; } pers.setDriverLicense(license); pers.setSchoolType((int) microPersons.getValueAt(personCounter, "school")); hhPersons++; //counterMunicipality = updateCountersPerson(pers, counterMunicipality, municipality,ageBracketsPerson); } //counterMunicipality = updateCountersHousehold(household, counterMunicipality, municipality); //Copy the dwelling of that household int bedRooms = 1; //Not on the micro data int year = select(yearDistribution, yearBracketsDwelling)[0]; //the category int floorSpace = select(sizeDistribution, sizeBracketsDwelling)[0]; int usage = (int) microDwellings.getIndexedValueAt(selectedHh, "H_"); int buildingSize = (int) microDwellings.getIndexedValueAt(selectedHh, "ddT_"); DefaultDwellingTypeImpl ddType = translateDwellingType(buildingSize); int quality = 1; //depend on year built and type of heating year = selectDwellingYear(year); //convert from year class to actual 4-digit year int price = estimatePrice(ddType, floorSpace); Dwelling dwell = DwellingUtils.getFactory().createDwelling(newDdId, selectedTAZ, null, id, ddType, bedRooms, quality, price, 0, year); realEstate.addDwelling(dwell); dwell.setFloorSpace(floorSpace); dwell.setUsage(DwellingUsage.valueOf(usage)); dwell.setBuildingSize(buildingSize); generatedHouseholds.put(dwell.getId(), 1); } int households = householdDataManager.getHighestHouseholdIdInUse() - previousHouseholds; int persons = householdDataManager.getHighestPersonIdInUse() - previousPersons; previousHouseholds = householdDataManager.getHighestHouseholdIdInUse(); previousPersons = householdDataManager.getHighestPersonIdInUse(); //Consider if I need to add also the errors from other attributes. They must be at the marginals file, or one extra file //For county level they should be calculated on a next step, outside this loop. float averageError = 0f; /*for (int attribute = 1; attribute < attributesHouseholdIPU.length; attribute++){ float error = Math.abs((counterMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute]) - marginalsMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute])) / marginalsMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute])); errorMunicipality.setIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute],error); averageError = averageError + error; } averageError = averageError / (1 + attributesHouseholdIPU.length) * 100;*/ householdByMunicipality.put(municipalityID, generatedHouseholds); logger.info(" Municipality " + municipalityID + ". Generated " + hhPersons + " persons in " + hhTotal + " households. Average error of " + averageError + " %."); } else { logger.info(" Municipality " + municipalityID + " has no TAZ assigned."); } } int households = householdDataManager.getHighestHouseholdIdInUse(); int persons = householdDataManager.getHighestPersonIdInUse(); logger.info(" Finished generating households and persons. A population of " + persons + " persons in " + households + " households was generated."); //Vacant dwellings-------------------------------------------- //They have similar characteristics to the dwellings that are occupied (assume that there is no difference between the occupied and vacant dwellings in terms of quality) int vacantCounter = 0; for (int municipality = 0; municipality < cityID.length; municipality++) { logger.info(" Municipality " + cityID[municipality] + ". Starting to generate vacant dwellings."); int municipalityID = cityID[municipality]; int vacantDwellings = (int) marginalsMunicipality.getIndexedValueAt(cityID[municipality], "dd_Vacant"); TableDataSet rasterCellsMatrix = cellsMatrix; int[] occupiedDwellings = householdByMunicipality.get(municipalityID).keySet().stream() .mapToInt(Integer::intValue).toArray(); //obtain the raster cells of the municipality and their weight within the municipality int[] tazInCity = cityTAZ.get(municipalityID); double[] probTaz = new double[tazInCity.length]; double sumProbTaz = 0; for (int i = 0; i < tazInCity.length; i++) { probTaz[i] = rasterCellsMatrix.getIndexedValueAt(tazInCity[i], "Population"); sumProbTaz = sumProbTaz + probTaz[i]; } //Select the vacant dwelling and copy characteristics for (int row = 0; row < vacantDwellings; row++) { //Allocation int ddTAZ = select(probTaz, tazInCity, sumProbTaz)[0]; // I allocate vacant dwellings using the same proportion as occupied dwellings. //Select one occupied dwelling to copy int dd = selectEqualProbability(occupiedDwellings)[0]; //Copy characteristics int newDdId = realEstate.getNextDwellingId(); Dwelling ddToCopy = realEstate.getDwelling(dd); int bedRooms = ddToCopy.getBedrooms(); int price = ddToCopy.getPrice(); int quality = ddToCopy.getQuality(); int year = ddToCopy.getYearBuilt(); DwellingType type = ddToCopy.getType(); //using always type MF234 int floorSpaceDwelling = ddToCopy.getFloorSpace(); Dwelling dwell = DwellingUtils.getFactory().createDwelling(newDdId, ddTAZ, null, -1, DefaultDwellingTypeImpl.MF234, bedRooms, quality, price, 0, year); dwell.setUsage(DwellingUsage.VACANT); //vacant dwelling = 3; and hhID is equal to -1 dwell.setFloorSpace(floorSpaceDwelling); vacantCounter++; } logger.info(" The number of vacant dwellings is: " + vacantCounter); } //Write the files for all municipalities String name = ("microData/interimFiles/totalsSynPop.csv"); SiloUtil.writeTableDataSet(counterMunicipality, name); String name1 = ("microData/interimFiles/errorsSynPop.csv"); SiloUtil.writeTableDataSet(errorMunicipality, name1); }
From source file:org.apache.padaf.xmpbox.parser.XMPDocumentBuilder.java
/** * Treat each rdf:Description (which must represent a schema), instanciate * class representation of this schema and add it to metadata * /*from w ww . ja v a 2s. c o m*/ * @param metadata * Metadata to attach new elements * @throws XmpParsingException * When element expected not found * @throws XMLStreamException * When error during reading the rest of xmp stream * @throws XmpSchemaException * When instancing schema object failed or in PDF/A Extension * case, if its namespace miss * @throws XmpUnknownValueTypeException * When ValueType found not correspond to basic type and not has * been declared in current schema * @throws XmpExpectedRdfAboutAttribute * When rdf:Description not contains rdf:about attribute * @throws BadFieldValueException * When a bad value found in Schema description content */ protected void parseDescription(XMPMetadata metadata) throws XmpParsingException, XMLStreamException, XmpSchemaException, XmpUnknownValueTypeException, XmpExpectedRdfAboutAttribute, BadFieldValueException { nsMap.resetComplexBasicTypesDeclarationInSchemaLevel(); int cptNS = reader.get().getNamespaceCount(); HashMap<String, String> namespaces = new HashMap<String, String>(); for (int i = 0; i < cptNS; i++) { namespaces.put(reader.get().getNamespacePrefix(i), reader.get().getNamespaceURI(i)); if (nsMap.isComplexBasicTypes(reader.get().getNamespaceURI(i))) { // System.out.println("in parseDesc method: prefix:"+reader.get().getNamespacePrefix(i)+", nsURI:"+reader.get().getNamespaceURI(i)); nsMap.setComplexBasicTypesDeclarationForLevelSchema(reader.get().getNamespaceURI(i), reader.get().getNamespacePrefix(i)); } } // Different treatment for PDF/A Extension schema // System.out.println(PDFAExtensionSchema.PDFAEXTENSION+";"+PDFAExtensionSchema.PDFAPROPERTY+";"+PDFAExtensionSchema.PDFASCHEMA); if (namespaces.containsKey(PDFAExtensionSchema.PDFAEXTENSION)) { if (namespaces.containsKey(PDFAExtensionSchema.PDFAPROPERTY) && namespaces.containsKey(PDFAExtensionSchema.PDFASCHEMA)) { if (namespaces.containsValue(PDFAExtensionSchema.PDFAEXTENSIONURI) && namespaces.containsValue(PDFAExtensionSchema.PDFAPROPERTYURI) && namespaces.containsValue(PDFAExtensionSchema.PDFASCHEMAURI)) { PDFAExtensionSchema schema = metadata.createAndAddPDFAExtensionSchemaWithNS(namespaces); treatDescriptionAttributes(metadata, schema); parseExtensionSchema(schema, metadata); } else { throw new XmpUnexpectedNamespaceURIException( "Unexpected namespaceURI in PDFA Extension Schema encountered"); } } else { throw new XmpUnexpectedNamespacePrefixException( "Unexpected namespace Prefix in PDFA Extension Schema"); } } else { int c = 0; String namespaceUri = reader.get().getNamespaceURI(c); String namespacePrefix = reader.get().getNamespacePrefix(c); c++; XMPSchema schema = nsMap.getAssociatedSchemaObject(metadata, namespaceUri, namespacePrefix); while (c < reader.get().getNamespaceCount() && schema == null) { // try next namespaceUri = reader.get().getNamespaceURI(c); namespacePrefix = reader.get().getNamespacePrefix(c); schema = nsMap.getAssociatedSchemaObject(metadata, namespaceUri, namespacePrefix); c++; } if (schema != null) { namespaces.remove(namespacePrefix); } else { schema = metadata.createAndAddDefaultSchema(namespacePrefix, namespaceUri); } for (int i = 1; i < cptNS; i++) { schema.setAttribute(new Attribute(XMPSchema.NS_NAMESPACE, "xmlns", reader.get().getNamespacePrefix(i), reader.get().getNamespaceURI(i))); } treatDescriptionAttributes(metadata, schema); while (reader.get().nextTag() == XMLStreamReader.START_ELEMENT) { parseProperty(schema, metadata); } } }