List of usage examples for java.util LinkedHashMap putAll
void putAll(Map<? extends K, ? extends V> m);
From source file:it.drwolf.ridire.session.LocalResourcesManager.java
public Map<String, Integer> getAllFunctionalMetadataMapPlusNull() { LinkedHashMap<String, Integer> ret1 = new LinkedHashMap<String, Integer>(); ret1.put("", -1); ret1.putAll(this.getAllFunctionalMetadataMap()); return ret1;/* w w w. jav a2 s .co m*/ }
From source file:it.drwolf.ridire.session.LocalResourcesManager.java
public Map<String, Integer> getAllSemanticMetadataMapPlusNull() { LinkedHashMap<String, Integer> ret1 = new LinkedHashMap<String, Integer>(); ret1.put("", -1); ret1.putAll(this.getAllSemanticMetadataMap()); return ret1;/*from w w w. jav a2 s .co m*/ }
From source file:com.cloud.network.router.VpcNetworkHelperImpl.java
@Override public LinkedHashMap<Network, List<? extends NicProfile>> configureDefaultNics( final RouterDeploymentDefinition routerDeploymentDefinition) throws ConcurrentOperationException, InsufficientAddressCapacityException { final LinkedHashMap<Network, List<? extends NicProfile>> networks = new LinkedHashMap<Network, List<? extends NicProfile>>( 3);/*ww w . j a v a2s . c o m*/ // 1) Control network final LinkedHashMap<Network, List<? extends NicProfile>> controlNic = configureControlNic( routerDeploymentDefinition); networks.putAll(controlNic); // 2) Public network final LinkedHashMap<Network, List<? extends NicProfile>> publicNic = configurePublicNic( routerDeploymentDefinition, false); networks.putAll(publicNic); // 3) Guest Network final LinkedHashMap<Network, List<? extends NicProfile>> guestNic = configureGuestNic( routerDeploymentDefinition); networks.putAll(guestNic); return networks; }
From source file:com.github.gfx.android.orma.migration.SchemaDiffMigration.java
/** * @param srcIndexes Set of "CREATE INDEX" statements which the DB has * @param dstIndexes Set of "CREATE INDEX" statements which the running code has * @return List of "CREATED INDEX" statements to apply to DB *///from ww w. j a v a2s . c o m @NonNull public List<String> indexDiff(@NonNull Collection<String> srcIndexes, @NonNull Collection<String> dstIndexes) { LinkedHashMap<CreateIndexStatement, String> unionIndexes = new LinkedHashMap<>(); Map<CreateIndexStatement, String> srcIndexesPairs = parseIndexes(srcIndexes); unionIndexes.putAll(srcIndexesPairs); Map<CreateIndexStatement, String> dstIndexesPairs = parseIndexes(dstIndexes); unionIndexes.putAll(dstIndexesPairs); List<String> createIndexStatements = new ArrayList<>(); for (Map.Entry<CreateIndexStatement, String> createIndexStatement : unionIndexes.entrySet()) { boolean existsInDst = dstIndexesPairs.containsKey(createIndexStatement.getKey()); boolean existsInSrc = srcIndexesPairs.containsKey(createIndexStatement.getKey()); if (existsInDst && existsInSrc) { // okay, nothing to do } else if (existsInDst) { createIndexStatements.add(createIndexStatement.getValue()); } else { // existsInSrc createIndexStatements.add(buildDropIndexStatement(createIndexStatement.getKey())); } } return createIndexStatements; }
From source file:org.broadleafcommerce.common.extensibility.context.merge.AbstractMergeBeanPostProcessor.java
@Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { if (statusProvider != null && !statusProvider.isProcessingEnabled(bean, beanName, applicationContext)) { if (LOG.isTraceEnabled()) { LOG.trace(String.format( "Not performing post-processing on targetRef [%s] because the registered " + "status provider [%s] returned false", targetRef, statusProvider.getClass().getSimpleName())); }/*from w ww . j a v a 2 s. c o m*/ return bean; } if (beanName.equals(targetRef)) { Object mergeCollection = applicationContext.getBean(collectionRef); if (bean instanceof ListFactoryBean || bean instanceof List) { try { List mergeList = (List) mergeCollection; List sourceList; if (bean instanceof ListFactoryBean) { Field field = ListFactoryBean.class.getDeclaredField("sourceList"); field.setAccessible(true); sourceList = (List) field.get(bean); } else { sourceList = (List) bean; } switch (placement) { case APPEND: sourceList.addAll(mergeList); break; case PREPEND: sourceList.addAll(0, mergeList); break; case SPECIFIC: sourceList.addAll(position, mergeList); break; } } catch (Exception e) { throw new BeanCreationException(e.getMessage()); } } else if (bean instanceof SetFactoryBean || bean instanceof Set) { try { Set mergeSet = (Set) mergeCollection; Set sourceSet; if (bean instanceof SetFactoryBean) { Field field = SetFactoryBean.class.getDeclaredField("sourceSet"); field.setAccessible(true); sourceSet = (Set) field.get(bean); } else { sourceSet = (Set) bean; } List tempList = new ArrayList(sourceSet); switch (placement) { case APPEND: tempList.addAll(mergeSet); break; case PREPEND: tempList.addAll(0, mergeSet); break; case SPECIFIC: tempList.addAll(position, mergeSet); break; } sourceSet.clear(); sourceSet.addAll(tempList); } catch (Exception e) { throw new BeanCreationException(e.getMessage()); } } else if (bean instanceof MapFactoryBean || bean instanceof Map) { try { Map mergeMap = (Map) mergeCollection; Map sourceMap; if (bean instanceof MapFactoryBean) { Field field = MapFactoryBean.class.getDeclaredField("sourceMap"); field.setAccessible(true); sourceMap = (Map) field.get(bean); } else { sourceMap = (Map) bean; } LinkedHashMap tempMap = new LinkedHashMap(); switch (placement) { case APPEND: tempMap.putAll(sourceMap); tempMap.putAll(mergeMap); break; case PREPEND: tempMap.putAll(mergeMap); tempMap.putAll(sourceMap); break; case SPECIFIC: boolean added = false; int j = 0; for (Object key : sourceMap.keySet()) { if (j == position) { tempMap.putAll(mergeMap); added = true; } tempMap.put(key, sourceMap.get(key)); j++; } if (!added) { tempMap.putAll(mergeMap); } break; } sourceMap.clear(); sourceMap.putAll(tempMap); } catch (Exception e) { throw new BeanCreationException(e.getMessage()); } } else { throw new IllegalArgumentException("Bean (" + beanName + ") is specified as a merge target, " + "but is not" + " of type ListFactoryBean, SetFactoryBean or MapFactoryBean"); } } return bean; }
From source file:ca.sfu.federation.model.Scenario.java
/** * Get the local Element collection.//from w w w .j a va2 s . c om * @return Collection of NamedObjects in this context. */ public Map<String, INamed> getElementMap() { LinkedHashMap<String, INamed> results = new LinkedHashMap<String, INamed>(); results.putAll(this.contextual); results.putAll(this.transactional); return results; }
From source file:com.sillelien.dollar.api.types.DollarMap.java
@NotNull @Override// w w w. java 2s . c o m public var $plus(@NotNull var rhs) { var rhsFix = rhs._fixDeep(); if (rhsFix.map()) { LinkedHashMap<var, var> copy = copyMap(); copy.putAll(rhsFix.toVarMap().mutable()); return DollarFactory.wrap(new DollarMap(errors(), copy)); } else if (rhsFix.string()) { return DollarFactory.fromValue(toHumanString() + rhsFix.toHumanString(), errors(), rhsFix.errors()); } else { LinkedHashMap<var, var> copy = copyMap(); copy.put(DollarFactory.fromValue("_" + copy.size()), rhsFix); return DollarFactory.wrap(new DollarMap(errors(), copy)); } }
From source file:pt.lsts.neptus.plugins.sunfish.awareness.HubLocationProvider.java
@Periodic(millisBetweenUpdates = 3000 * 60) public void sendToHub() { if (!enabled) return;/*from w w w . ja va 2 s .c om*/ NeptusLog.pub().info("Uploading device updates to Hub..."); LinkedHashMap<Integer, AssetPosition> toSend = new LinkedHashMap<Integer, AssetPosition>(); LocationType myLoc = MyState.getLocation(); AssetPosition myPos = new AssetPosition(StringUtils.toImcName(GeneralPreferences.imcCcuName), myLoc.getLatitudeDegs(), myLoc.getLongitudeDegs()); toSend.put(ImcMsgManager.getManager().getLocalId().intValue(), myPos); toSend.putAll(positionsToSend); positionsToSend.clear(); DeviceUpdate upd = new DeviceUpdate(); //ExtendedDeviceUpdate upd = new ExtendedDeviceUpdate(); upd.source = ImcMsgManager.getManager().getLocalId().intValue(); upd.destination = 65535; for (Entry<Integer, AssetPosition> pos : toSend.entrySet()) { Position p = new Position(); p.id = pos.getKey(); p.latRads = pos.getValue().getLoc().getLatitudeRads(); p.lonRads = pos.getValue().getLoc().getLongitudeRads(); p.posType = Position.fromImcId(p.id); p.timestamp = pos.getValue().getTimestamp() / 1000.0; upd.getPositions().put(pos.getKey(), p); } for (Position p : upd.getPositions().values()) { NeptusLog.pub().info("Uploading position for " + p.id + ": " + Math.toDegrees(p.latRads) + "/" + Math.toDegrees(p.lonRads) + "/" + new Date((long) (1000 * p.timestamp))); } try { HttpPost postMethod = new HttpPost(iridiumUrl); postMethod.setHeader("Content-type", "application/hub"); String data = new String(Hex.encodeHex(upd.serialize())); NeptusLog.pub().info("Sending '" + data + "'"); StringEntity ent = new StringEntity(data); postMethod.setEntity(ent); @SuppressWarnings("resource") HttpClient client = new DefaultHttpClient(); HttpResponse response = client.execute(postMethod); NeptusLog.pub().info("Sent " + upd.getPositions().size() + " device updates to Hub: " + response.getStatusLine().toString()); postMethod.abort(); } catch (Exception e) { NeptusLog.pub().error("Error sending updates to hub", e); parent.postNotification(Notification .error("Situation Awareness", e.getClass().getSimpleName() + " while trying to send device updates to HUB.") .requireHumanAction(false)); } }
From source file:com.sillelien.dollar.api.types.DollarMap.java
@NotNull @Override/*from ww w . j a va 2s . c om*/ public var $prepend(@NotNull var value) { final LinkedHashMap<var, var> newMap = new LinkedHashMap<>(); newMap.put(value.$pairKey(), value.$pairValue()); newMap.putAll(toVarMap().mutable()); return DollarFactory.fromValue(newMap, errors(), value.errors()); }
From source file:org.apache.tez.mapreduce.input.TestMultiMRInput.java
@Test(timeout = 5000) public void testMultipleSplits() throws Exception { Path workDir = new Path(TEST_ROOT_DIR, "testMultipleSplits"); JobConf jobConf = new JobConf(defaultConf); jobConf.setInputFormat(org.apache.hadoop.mapred.SequenceFileInputFormat.class); FileInputFormat.setInputPaths(jobConf, workDir); MRInputUserPayloadProto.Builder builder = MRInputUserPayloadProto.newBuilder(); builder.setGroupingEnabled(false);/*from w ww . j a v a2 s. com*/ builder.setConfigurationBytes(TezUtils.createByteStringFromConf(jobConf)); byte[] payload = builder.build().toByteArray(); InputContext inputContext = createTezInputContext(payload); MultiMRInput input = new MultiMRInput(inputContext, 2); input.initialize(); List<Event> eventList = new ArrayList<Event>(); LinkedHashMap<LongWritable, Text> data = new LinkedHashMap<LongWritable, Text>(); String file1 = "file1"; LinkedHashMap<LongWritable, Text> data1 = createInputData(localFs, workDir, jobConf, file1, 0, 10); String file2 = "file2"; LinkedHashMap<LongWritable, Text> data2 = createInputData(localFs, workDir, jobConf, file2, 10, 20); data.putAll(data1); data.putAll(data2); SequenceFileInputFormat<LongWritable, Text> format = new SequenceFileInputFormat<LongWritable, Text>(); InputSplit[] splits = format.getSplits(jobConf, 2); assertEquals(2, splits.length); MRSplitProto splitProto1 = MRInputHelpers.createSplitProto(splits[0]); InputDataInformationEvent event1 = InputDataInformationEvent.createWithSerializedPayload(0, splitProto1.toByteString().asReadOnlyByteBuffer()); MRSplitProto splitProto2 = MRInputHelpers.createSplitProto(splits[1]); InputDataInformationEvent event2 = InputDataInformationEvent.createWithSerializedPayload(0, splitProto2.toByteString().asReadOnlyByteBuffer()); eventList.clear(); eventList.add(event1); eventList.add(event2); input.handleEvents(eventList); int readerCount = 0; for (KeyValueReader reader : input.getKeyValueReaders()) { readerCount++; while (reader.next()) { if (data.size() == 0) { fail("Found more records than expected"); } Object key = reader.getCurrentKey(); Object val = reader.getCurrentValue(); assertEquals(val, data.remove(key)); } } assertEquals(2, readerCount); }