List of usage examples for java.util HashMap clear
public void clear()
From source file:forge.game.combat.Combat.java
public void dealAssignedDamage() { // This function handles both Regular and First Strike combat assignment final HashMap<Card, Integer> defMap = defendingDamageMap; final HashMap<GameEntity, CardCollection> wasDamaged = new HashMap<GameEntity, CardCollection>(); final Map<Card, Integer> damageDealtThisCombat = new HashMap<Card, Integer>(); for (final Entry<Card, Integer> entry : defMap.entrySet()) { GameEntity defender = getDefenderByAttacker(entry.getKey()); if (defender instanceof Player) { // player if (((Player) defender).addCombatDamage(entry.getValue(), entry.getKey())) { if (wasDamaged.containsKey(defender)) { wasDamaged.get(defender).add(entry.getKey()); } else { CardCollection l = new CardCollection(); l.add(entry.getKey()); wasDamaged.put(defender, l); }// ww w . ja va 2 s . com damageDealtThisCombat.put(entry.getKey(), entry.getValue()); } } else if (defender instanceof Card) { // planeswalker if (((Card) defender).getController().addCombatDamage(entry.getValue(), entry.getKey())) { if (wasDamaged.containsKey(defender)) { wasDamaged.get(defender).add(entry.getKey()); } else { CardCollection l = new CardCollection(); l.add(entry.getKey()); wasDamaged.put(defender, l); } } } } // this can be much better below here... final CardCollection combatants = new CardCollection(); combatants.addAll(getAttackers()); combatants.addAll(getAllBlockers()); combatants.addAll(getDefendingPlaneswalkers()); Card c; for (int i = 0; i < combatants.size(); i++) { c = combatants.get(i); // if no assigned damage to resolve, move to next if (c.getTotalAssignedDamage() == 0) { continue; } final Map<Card, Integer> assignedDamageMap = c.getAssignedDamageMap(); final HashMap<Card, Integer> damageMap = new HashMap<Card, Integer>(); for (final Entry<Card, Integer> entry : assignedDamageMap.entrySet()) { final Card crd = entry.getKey(); damageMap.put(crd, entry.getValue()); if (entry.getValue() > 0) { if (damageDealtThisCombat.containsKey(crd)) { damageDealtThisCombat.put(crd, damageDealtThisCombat.get(crd) + entry.getValue()); } else { damageDealtThisCombat.put(crd, entry.getValue()); } } } c.addCombatDamage(damageMap); damageMap.clear(); c.clearAssignedDamage(); } // Run triggers for (final GameEntity ge : wasDamaged.keySet()) { final HashMap<String, Object> runParams = new HashMap<String, Object>(); runParams.put("DamageSources", wasDamaged.get(ge)); runParams.put("DamageTarget", ge); ge.getGame().getTriggerHandler().runTrigger(TriggerType.CombatDamageDoneOnce, runParams, false); } // This was deeper before, but that resulted in the stack entry acting like before. // when ... deals combat damage to one or more for (final Card damageSource : dealtDamageTo.keySet()) { final HashMap<String, Object> runParams = new HashMap<String, Object>(); int dealtDamage = damageDealtThisCombat.containsKey(damageSource) ? damageDealtThisCombat.get(damageSource) : 0; runParams.put("DamageSource", damageSource); runParams.put("DamageTargets", dealtDamageTo.get(damageSource)); runParams.put("DamageAmount", dealtDamage); damageSource.getGame().getTriggerHandler().runTrigger(TriggerType.DealtCombatDamageOnce, runParams, false); } dealtDamageToThisCombat.putAll(dealtDamageTo); dealtDamageTo.clear(); }
From source file:ddf.catalog.impl.operations.OperationsCrudSupport.java
void commitAndCleanup(StorageRequest storageRequest, Optional<String> historianTransactionKey, HashMap<String, Path> tmpContentPaths) { if (storageRequest != null) { try {/* w w w. java 2 s.c o m*/ sourceOperations.getStorage().commit(storageRequest); historianTransactionKey.ifPresent(historian::commit); } catch (StorageException e) { LOGGER.error("Unable to commit content changes for id: {}", storageRequest.getId(), e); try { sourceOperations.getStorage().rollback(storageRequest); } catch (StorageException e1) { LOGGER.error("Unable to remove temporary content for id: {}", storageRequest.getId(), e1); } finally { try { historianTransactionKey.ifPresent(historian::rollback); } catch (RuntimeException re) { LOGGER.error("Unable to commit versioned items for historian transaction: {}", historianTransactionKey.orElseGet(String::new), re); } } } } tmpContentPaths.values().forEach(path -> FileUtils.deleteQuietly(path.toFile())); tmpContentPaths.clear(); }
From source file:org.openhab.ui.cometvisu.internal.config.ConfigHelper.java
/** * add some basic mappings/*from w w w . ja v a2 s . c o m*/ */ private void initBasicMappings() { // Rollershutter mapping Mapping mapping = new Mapping(); mapping.setName("shutter"); HashMap<String, String> map = new HashMap<String, String>(); map.put("UP", "↑"); map.put("STOP", "o"); map.put("DOWN", "↓"); for (String value : map.keySet()) { Entry entry = new Entry(); entry.setValue(value); entry.getContent().add(map.get(value)); mapping.getEntry().add(entry); } addToMappings(mapping); // On/Off mapping = new Mapping(); mapping.setName("OnOff"); map.clear(); map.put("0", "O"); map.put("1", "I"); for (String value : map.keySet()) { Entry entry = new Entry(); entry.setValue(value); if (value == "0") entry.setDefault(true); entry.getContent().add(map.get(value)); mapping.getEntry().add(entry); } addToMappings(mapping); // Open/Close (for contacts) mapping = new Mapping(); mapping.setName("OpenClose"); HashMap<String, Icon> iconMap = new HashMap<String, Icon>(); iconMap.put("1", createIcon("fts_window_1w_open", "red")); iconMap.put("0", createIcon("fts_window_1w", null)); for (String value : iconMap.keySet()) { Entry entry = new Entry(); entry.setValue(value); entry.getContent().add(factory.createEntryIcon(iconMap.get(value))); mapping.getEntry().add(entry); } addToMappings(mapping); // Up/Down mapping = new Mapping(); mapping.setName("UpDown"); iconMap.clear(); iconMap.put("1", createIcon("control_down", null)); iconMap.put("0", createIcon("control_up", null)); for (String value : iconMap.keySet()) { Entry entry = new Entry(); entry.setValue(value); entry.getContent().add(factory.createEntryIcon(iconMap.get(value))); mapping.getEntry().add(entry); } addToMappings(mapping); }
From source file:it.cnr.icar.eric.server.lcm.LifeCycleManagerImplTest.java
/** * Tests that LCM allows nested members within a RegistryPackage * and automatically creates HasMember Associations with RegistryPackage * and nested members. Test also makes sure that this works to multiple levels * of nesting.//from w ww. j av a2 s. c om */ public void testNestedMembersInRegistryPackage() throws Exception { final String pkgId1 = "urn:org:freebxml:eric:server:lcm:LifeCycleManagerImplTest:testNestedMembersInRegistryPackage:pkg1"; final String pkgId2 = "urn:org:freebxml:eric:server:lcm:LifeCycleManagerImplTest:testNestedMembersInRegistryPackage:pkgId2"; final String pkgId3 = "urn:org:freebxml:eric:server:lcm:LifeCycleManagerImplTest:testNestedMembersInRegistryPackage:pkgId3"; ServerRequestContext context = new ServerRequestContext( "LifeCycleManagerImplTest:testNestedMembersInRegistryPackage", null); //Use registryOperator as even she cannot do deletes when references exist (as this is not an access control issue). context.setUser(AuthenticationServiceImpl.getInstance().registryOperator); try { // initial clean-up removeIfExist(context, pkgId1); removeIfExist(context, pkgId2); removeIfExist(context, pkgId3); // Create package RegistryPackageType pkg1 = bu.rimFac.createRegistryPackageType(); pkg1.setId(pkgId1); RegistryPackageType pkg2 = bu.rimFac.createRegistryPackageType(); pkg2.setId(pkgId2); RegistryPackageType pkg3 = bu.rimFac.createRegistryPackageType(); pkg3.setId(pkgId3); pkg1.setRegistryObjectList(bu.rimFac.createRegistryObjectListType()); pkg1.getRegistryObjectList().getIdentifiable().add(bu.rimFac.createRegistryPackage(pkg2)); pkg2.setRegistryObjectList(bu.rimFac.createRegistryObjectListType()); pkg2.getRegistryObjectList().getIdentifiable().add(bu.rimFac.createRegistryPackage(pkg3)); ArrayList<Object> objs = new ArrayList<Object>(); objs.add(pkg1); submit(context, objs); //Now make sure pkg2 is a member of pkg1 HashMap<String, String> queryParamsMap = new HashMap<String, String>(); queryParamsMap.put("$packageId", pkgId1); List<?> res = executeQuery(context, CanonicalConstants.CANONICAL_QUERY_GetMembersByRegistryPackageId, queryParamsMap); assertEquals("Nested member not found for pkg1", 1, res.size()); RegistryPackageType p = (RegistryPackageType) ((JAXBElement<?>) res.get(0)).getValue(); assertEquals("Nested member id not correct for pkg1", pkgId2, p.getId()); //Now make sure pkg3 is a member of pkg2 queryParamsMap.clear(); queryParamsMap.put("$packageId", pkgId2); res = executeQuery(context, CanonicalConstants.CANONICAL_QUERY_GetMembersByRegistryPackageId, queryParamsMap); assertEquals("Nested member not found for pkg1", 1, res.size()); p = (RegistryPackageType) ((JAXBElement<?>) res.get(0)).getValue(); assertEquals("Nested member id not correct for pkg1", pkgId3, p.getId()); } finally { // final clean-up removeIfExist(context, pkgId1); removeIfExist(context, pkgId2); removeIfExist(context, pkgId3); } }
From source file:org.openbravo.erpCommon.ad_forms.Fact.java
/** * Is Source Segment balanced.// w w w. j av a2s . c o m * * @param segmentType * - see AcctSchemaElement.SEGMENT_* Implemented only for Org Other sensible candidates * are Project, User1/2 * @return true if segments are balanced */ public boolean isSegmentBalanced(String segmentType, ConnectionProvider conn) { if (segmentType.equals(AcctSchemaElement.SEGMENT_Org)) { log4jFact.debug("Starting isSegmentBalanced"); HashMap<String, BigDecimal> map = new HashMap<String, BigDecimal>(); // Add up values by key for (int i = 0; i < m_lines.size(); i++) { FactLine line = (FactLine) m_lines.get(i); String key = line.getAD_Org_ID(conn); BigDecimal bal = line.getSourceBalance(); BigDecimal oldBal = map.get(key); if (oldBal != null) bal = bal.add(oldBal); map.put(key, bal); // log4jFact.debug("Add Key=" + key + ", Bal=" + bal + " <- " + // line); } // check if all keys are zero Iterator<BigDecimal> values = map.values().iterator(); while (values.hasNext()) { BigDecimal bal = values.next(); if (bal.compareTo(ZERO) != 0) { map.clear(); log4jFact.warn( "isSegmentBalanced (" + segmentType + ") NO - " + toString() + ", Balance=" + bal); return false; } } map.clear(); log4jFact.debug("isSegmentBalanced (" + segmentType + ") - " + toString()); return true; } log4jFact.debug("isSegmentBalanced (" + segmentType + ") (not checked) - " + toString()); return true; }
From source file:me.piebridge.bible.Bible.java
private void setResourceValues(HashMap<String, String> map, int resId) { map.clear(); for (String entry : mContext.getResources().getStringArray(resId)) { String[] strings = entry.split("\\|", 2); map.put(strings[0], strings[1]); }// www.j a v a 2s . c o m }
From source file:org.apache.apex.malhar.hive.HiveMockTest.java
@Test public void testHiveInsertMapOperator() throws SQLException, TException { HiveStore hiveStore = createStore(null); hiveStore.setFilepath(testdir);/*from w w w.j a v a2s. com*/ ArrayList<String> hivePartitionColumns = new ArrayList<String>(); hivePartitionColumns.add("dt"); hiveInitializeMapDatabase(createStore(null)); HiveOperator hiveOperator = new HiveOperator(); hiveOperator.setStore(hiveStore); hiveOperator.setTablename(tablemap); hiveOperator.setHivePartitionColumns(hivePartitionColumns); FSRollingMapTestImpl fsRolling = new FSRollingMapTestImpl(); fsRolling.setFilePath(testdir); short permission = 511; fsRolling.setFilePermission(permission); fsRolling.setAlwaysWriteToTmp(false); fsRolling.setMaxLength(128); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE); attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap); fsRolling.setup(context); hiveOperator.setup(context); HashMap<String, Object> map = new HashMap<String, Object>(); FilePartitionMapping mapping1 = new FilePartitionMapping(); FilePartitionMapping mapping2 = new FilePartitionMapping(); ArrayList<String> partitions1 = new ArrayList<String>(); partitions1.add("2014-12-10"); mapping1.setFilename(APP_ID + "/" + OPERATOR_ID + "/" + "2014-12-10" + "/" + "0-transaction.out.part.0"); mapping1.setPartition(partitions1); ArrayList<String> partitions2 = new ArrayList<String>(); partitions2.add("2014-12-11"); mapping2.setFilename(APP_ID + "/" + OPERATOR_ID + "/" + "2014-12-11" + "/" + "0-transaction.out.part.0"); mapping2.setPartition(partitions2); for (int wid = 0; wid < NUM_WINDOWS; wid++) { fsRolling.beginWindow(wid); for (int tupleCounter = 0; tupleCounter < BLAST_SIZE; tupleCounter++) { map.put(2014 - 12 - 10 + "", 2014 - 12 - 10); fsRolling.input.put(map); map.clear(); } if (wid == 7) { fsRolling.committed(wid - 1); hiveOperator.processTuple(mapping1); hiveOperator.processTuple(mapping2); } fsRolling.endWindow(); } fsRolling.teardown(); hiveStore.connect(); client.execute("select * from " + tablemap + " where dt='2014-12-10'"); List<String> recordsInDatePartition1 = client.fetchAll(); client.execute("drop table " + tablemap); hiveStore.disconnect(); Assert.assertEquals(13, recordsInDatePartition1.size()); for (int i = 0; i < recordsInDatePartition1.size(); i++) { LOG.debug("records in first date partition are {}", recordsInDatePartition1.get(i)); /*An array containing partition and data is returned as a string record, hence we need to upcast it to an object first and then downcast to a string in order to use in Assert.*/ Object record = recordsInDatePartition1.get(i); Object[] records = (Object[]) record; Assert.assertEquals("2014-12-10", records[1]); } }
From source file:com.datatorrent.contrib.hive.HiveMockTest.java
@Test public void testHiveInsertMapOperator() throws SQLException, TException { HiveStore hiveStore = createStore(null); hiveStore.setFilepath(testdir);//from w w w . j a v a 2 s . com ArrayList<String> hivePartitionColumns = new ArrayList<String>(); hivePartitionColumns.add("dt"); hiveInitializeMapDatabase(createStore(null)); HiveOperator hiveOperator = new HiveOperator(); hiveOperator.setStore(hiveStore); hiveOperator.setTablename(tablemap); hiveOperator.setHivePartitionColumns(hivePartitionColumns); FSRollingMapTestImpl fsRolling = new FSRollingMapTestImpl(); fsRolling.setFilePath(testdir); short permission = 511; fsRolling.setFilePermission(permission); fsRolling.setAlwaysWriteToTmp(false); fsRolling.setMaxLength(128); AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap(); attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE); attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L); attributeMap.put(DAG.APPLICATION_ID, APP_ID); OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext( OPERATOR_ID, attributeMap); fsRolling.setup(context); hiveOperator.setup(context); HashMap<String, Object> map = new HashMap<String, Object>(); FilePartitionMapping mapping1 = new FilePartitionMapping(); FilePartitionMapping mapping2 = new FilePartitionMapping(); ArrayList<String> partitions1 = new ArrayList<String>(); partitions1.add("2014-12-10"); mapping1.setFilename(APP_ID + "/" + OPERATOR_ID + "/" + "2014-12-10" + "/" + "0-transaction.out.part.0"); mapping1.setPartition(partitions1); ArrayList<String> partitions2 = new ArrayList<String>(); partitions2.add("2014-12-11"); mapping2.setFilename(APP_ID + "/" + OPERATOR_ID + "/" + "2014-12-11" + "/" + "0-transaction.out.part.0"); mapping2.setPartition(partitions2); for (int wid = 0; wid < NUM_WINDOWS; wid++) { fsRolling.beginWindow(wid); for (int tupleCounter = 0; tupleCounter < BLAST_SIZE; tupleCounter++) { map.put(2014 - 12 - 10 + "", 2014 - 12 - 10); fsRolling.input.put(map); map.clear(); } if (wid == 7) { fsRolling.committed(wid - 1); hiveOperator.processTuple(mapping1); hiveOperator.processTuple(mapping2); } fsRolling.endWindow(); } fsRolling.teardown(); hiveStore.connect(); client.execute("select * from " + tablemap + " where dt='2014-12-10'"); List<String> recordsInDatePartition1 = client.fetchAll(); client.execute("drop table " + tablemap); hiveStore.disconnect(); Assert.assertEquals(13, recordsInDatePartition1.size()); for (int i = 0; i < recordsInDatePartition1.size(); i++) { LOG.debug("records in first date partition are {}", recordsInDatePartition1.get(i)); /*An array containing partition and data is returned as a string record, hence we need to upcast it to an object first and then downcast to a string in order to use in Assert.*/ Object record = recordsInDatePartition1.get(i); Object[] records = (Object[]) record; Assert.assertEquals("2014-12-10", records[1]); } }
From source file:org.dasein.cloud.aws.storage.S3.java
private void loadObjects(@Nonnull String regionId, @Nonnull String bucket, @Nonnull Jiterator<Blob> iterator) throws CloudException, InternalException { HashMap<String, String> parameters = new HashMap<String, String>(); S3Response response;//from w w w.jav a 2 s . co m String marker = null; boolean done = false; S3Method method; while (!done) { NodeList blocks; parameters.clear(); if (marker != null) { parameters.put("marker", marker); } parameters.put("max-keys", String.valueOf(30)); method = new S3Method(provider, S3Action.LIST_CONTENTS, parameters, null); try { response = method.invoke(bucket, null); } catch (S3Exception e) { String code = e.getCode(); if (code == null || !code.equals("SignatureDoesNotMatch")) { throw new CloudException(e); } logger.error(e.getSummary()); throw new CloudException(e); } blocks = response.document.getElementsByTagName("IsTruncated"); if (blocks.getLength() > 0) { done = blocks.item(0).getFirstChild().getNodeValue().trim().equalsIgnoreCase("false"); } blocks = response.document.getElementsByTagName("Contents"); for (int i = 0; i < blocks.getLength(); i++) { Node object = blocks.item(i); Storage<org.dasein.util.uom.storage.Byte> size = null; String name = null; long ts = -1L; if (object.hasChildNodes()) { NodeList attrs = object.getChildNodes(); for (int j = 0; j < attrs.getLength(); j++) { Node attr = attrs.item(j); if (attr.getNodeName().equalsIgnoreCase("Key")) { String key = attr.getFirstChild().getNodeValue().trim(); name = key; marker = key; } else if (attr.getNodeName().equalsIgnoreCase("Size")) { size = new Storage<org.dasein.util.uom.storage.Byte>( Long.parseLong(attr.getFirstChild().getNodeValue().trim()), Storage.BYTE); } else if (attr.getNodeName().equalsIgnoreCase("LastModified")) { SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); String dateString = attr.getFirstChild().getNodeValue().trim(); try { ts = fmt.parse(dateString).getTime(); } catch (ParseException e) { logger.error(e); e.printStackTrace(); throw new CloudException(e); } } } } if (name == null || size == null) { continue; } iterator.push(Blob.getInstance(regionId, getLocation(bucket, name), bucket, name, ts, size)); } } }
From source file:com.krawler.crm.hrmsintegration.bizservice.GoalManagementServiceImpl.java
public JSONObject getGoalHistoryJSON(boolean isexport, String goalid, String companyid, String userid, DateFormat dateFormat, String start, String limit) throws ServiceException { JSONArray jarr = new JSONArray(); KwlReturnObject kmsg = null;// ww w .j ava2s . co m JSONObject jobj = new JSONObject(); int count = 0; List ll = null; List lst = null; try { ArrayList filter_names = new ArrayList(); ArrayList filter_params = new ArrayList(); HashMap<String, Object> requestParams = new HashMap<String, Object>(); filter_names.add("c.id"); filter_params.add(goalid); kmsg = hrmsIntDAOObj.getFinalGoals(requestParams, filter_names, filter_params); count = kmsg.getRecordTotalCount(); lst = kmsg.getEntityList(); Iterator ite = lst.iterator(); while (ite.hasNext()) { Finalgoalmanagement fgmt = (Finalgoalmanagement) ite.next(); filter_names.clear(); filter_params.clear(); requestParams.clear(); requestParams = getFilter(2, fgmt.getStartdate(), fgmt.getEnddate(), userid, companyid); if (!StringUtil.isNullOrEmpty(start)) { requestParams.put("start", start); requestParams.put("limit", limit); requestParams.put("pagingFlag", true); } filter_names = (ArrayList) requestParams.get("filter_names"); filter_params = (ArrayList) requestParams.get("filter_params"); kmsg = crmAccountDAOObj.getAccountOwners(requestParams, filter_names, filter_params); ll = kmsg.getEntityList(); count = kmsg.getRecordTotalCount(); Iterator itetype2 = ll.iterator(); while (itetype2.hasNext()) { CrmAccount ca = (CrmAccount) itetype2.next(); JSONObject tmpObj = new JSONObject(); String[] productInfo = crmAccountHandler.getAccountProducts(crmAccountDAOObj, ca.getAccountid()); tmpObj.put("accountid", ca.getAccountid()); tmpObj.put("accountname", ca.getAccountname()); tmpObj.put("revenue", StringUtil.isNullOrEmpty(ca.getRevenue()) ? "0" : ca.getRevenue()); tmpObj.put("createdon", isexport ? crmManagerCommon.exportDateNull(ca.getCreatedon(), dateFormat) : crmManagerCommon.dateNull(ca.getCreatedon())); tmpObj.put("productid", productInfo[0]); tmpObj.put("product", productInfo[1]); tmpObj.put("exportmultiproduct", productInfo[2]); tmpObj.put("type", (ca.getCrmCombodataByAccounttypeid() != null ? ca.getCrmCombodataByAccounttypeid().getValue() : "")); tmpObj.put("typeid", crmManagerCommon.comboNull(ca.getCrmCombodataByAccounttypeid())); tmpObj.put("industryid", crmManagerCommon.comboNull(ca.getCrmCombodataByIndustryid())); tmpObj.put("industry", (ca.getCrmCombodataByIndustryid() != null ? ca.getCrmCombodataByIndustryid().getValue() : "")); tmpObj.put("website", (ca.getWebsite() != null ? ca.getWebsite() : "")); jarr.put(tmpObj); } } jobj.put("data", jarr); jobj.put("count", count); } catch (JSONException e) { logger.warn("JSONException exception in getGoalHistoryJSON()", e); } catch (ServiceException e) { logger.warn("ServiceException exception in getGoalHistoryJSON()", e); } return jobj; }