List of usage examples for com.google.common.collect Maps newEnumMap
public static <K extends Enum<K>, V> EnumMap<K, V> newEnumMap(Map<K, ? extends V> map)
From source file:org.apache.hadoop.hdfs.server.namenode.AclTransformation.java
/** * Calculates mask entries required for the ACL. Mask calculation is performed * separately for each scope: access and default. This method is responsible * for handling the following cases of mask calculation: * 1. Throws an exception if the caller attempts to remove the mask entry of an * existing ACL that requires it. If the ACL has any named entries, then a * mask entry is required./*from w w w. j av a 2 s .c o m*/ * 2. If the caller supplied a mask in the ACL spec, use it. * 3. If the caller did not supply a mask, but there are ACL entry changes in * this scope, then automatically calculate a new mask. The permissions of * the new mask are the union of the permissions on the group entry and all * named entries. * * @param aclBuilder ArrayList<AclEntry> containing entries to build * @param providedMask EnumMap<AclEntryScope, AclEntry> mapping each scope to * the mask entry that was provided for that scope (if provided) * @param maskDirty EnumSet<AclEntryScope> which contains a scope if the mask * entry is dirty (added or deleted) in that scope * @param scopeDirty EnumSet<AclEntryScope> which contains a scope if any entry * is dirty (added or deleted) in that scope * @throws AclException if validation fails */ private static void calculateMasks(List<AclEntry> aclBuilder, EnumMap<AclEntryScope, AclEntry> providedMask, EnumSet<AclEntryScope> maskDirty, EnumSet<AclEntryScope> scopeDirty) throws AclException { EnumSet<AclEntryScope> scopeFound = EnumSet.noneOf(AclEntryScope.class); EnumMap<AclEntryScope, FsAction> unionPerms = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskNeeded = EnumSet.noneOf(AclEntryScope.class); // Determine which scopes are present, which scopes need a mask, and the // union of group class permissions in each scope. for (AclEntry entry : aclBuilder) { scopeFound.add(entry.getScope()); if (entry.getType() == GROUP || entry.getName() != null) { FsAction scopeUnionPerms = Objects.firstNonNull(unionPerms.get(entry.getScope()), FsAction.NONE); unionPerms.put(entry.getScope(), scopeUnionPerms.or(entry.getPermission())); } if (entry.getName() != null) { maskNeeded.add(entry.getScope()); } } // Add mask entry if needed in each scope. for (AclEntryScope scope : scopeFound) { if (!providedMask.containsKey(scope) && maskNeeded.contains(scope) && maskDirty.contains(scope)) { // Caller explicitly removed mask entry, but it's required. throw new AclException("Invalid ACL: mask is required and cannot be deleted."); } else if (providedMask.containsKey(scope) && (!scopeDirty.contains(scope) || maskDirty.contains(scope))) { // Caller explicitly provided new mask, or we are preserving the existing // mask in an unchanged scope. aclBuilder.add(providedMask.get(scope)); } else if (maskNeeded.contains(scope) || providedMask.containsKey(scope)) { // Otherwise, if there are maskable entries present, or the ACL // previously had a mask, then recalculate a mask automatically. aclBuilder.add(new AclEntry.Builder().setScope(scope).setType(MASK) .setPermission(unionPerms.get(scope)).build()); } } }
From source file:lombok.ast.ecj.EcjTreeConverter.java
private Node toVariableDefinition(List<AbstractVariableDeclaration> decls, FlagKey... keys) { Map<FlagKey, Object> map = Maps.newEnumMap(FlagKey.class); for (FlagKey key : keys) map.put(key, key);/*from www. ja v a2s .co m*/ return toVariableDefinition(decls, map); }
From source file:com.google.cloud.spanner.SpannerImpl.java
static Map<SpannerRpc.Option, ?> optionMap(SessionOption... options) { if (options.length == 0) { return Collections.emptyMap(); }/*from w w w .jav a 2 s . c o m*/ Map<SpannerRpc.Option, Object> tmp = Maps.newEnumMap(SpannerRpc.Option.class); for (SessionOption option : options) { Object prev = tmp.put(option.rpcOption(), option.value()); checkArgument(prev == null, "Duplicate option %s", option.rpcOption()); } return ImmutableMap.copyOf(tmp); }
From source file:lombok.ast.javac.JcTreeConverter.java
private void fillList(java.util.List<? extends JCTree> nodes, RawListAccessor<?, ?> list, FlagKey... keys) { Map<FlagKey, Object> map = Maps.newEnumMap(FlagKey.class); for (FlagKey key : keys) map.put(key, key);/*from w w w. j av a 2s . com*/ fillList(nodes, list, map); }
From source file:org.terasology.world.block.loader.BlockLoader.java
private BlockFamily processConnectToAdjacentFamily(AssetUri blockDefUri, JsonObject blockDefJson) { Map<BlockAdjacentType, EnumMap<Side, Block>> blockMap = Maps.newEnumMap(BlockAdjacentType.class); String[] categories = new String[0]; if (blockDefJson.has("types")) { JsonArray blockTypes = blockDefJson.getAsJsonArray("types"); blockDefJson.remove("types"); for (JsonElement element : blockTypes.getAsJsonArray()) { JsonObject typeDefJson = element.getAsJsonObject(); if (!typeDefJson.has("type")) { throw new IllegalArgumentException("Block type is empty"); }/*from ww w. j av a 2 s . c om*/ BlockAdjacentType type = gson.fromJson(typeDefJson.get("type"), BlockAdjacentType.class); if (type == null) { throw new IllegalArgumentException( "Invalid type block: " + gson.fromJson(typeDefJson.get("type"), String.class)); } if (!blockMap.containsKey(type)) { blockMap.put(type, Maps.<Side, Block>newEnumMap(Side.class)); } typeDefJson.remove("type"); mergeJsonInto(blockDefJson, typeDefJson); BlockDefinition typeDef = loadBlockDefinition(typeDefJson); constructHorizontalBlocks(blockDefUri, typeDef, blockMap.get(type)); } } return new ConnectToAdjacentBlockFamily(new BlockUri(blockDefUri.getPackage(), blockDefUri.getAssetName()), blockMap, categories); }
From source file:org.pircbotx.UserChannelDao.java
/** * Create an immutable snapshot (copy) of all of contained Users, Channels, * and mappings, VERY EXPENSIVE./* w w w. jav a2 s.c o m*/ * * @return Copy of entire model */ @Synchronized("accessLock") public UserChannelDaoSnapshot createSnapshot() { //Create snapshots of all users and channels Map<U, UserSnapshot> userSnapshotMap = Maps.newHashMapWithExpectedSize(userNickMap.size()); for (U curUser : userNickMap.values()) userSnapshotMap.put(curUser, curUser.createSnapshot()); Map<C, ChannelSnapshot> channelSnapshotMap = Maps.newHashMapWithExpectedSize(channelNameMap.size()); for (C curChannel : channelNameMap.values()) channelSnapshotMap.put(curChannel, curChannel.createSnapshot()); //Make snapshots of the relationship maps using the above user and channel snapshots UserChannelMapSnapshot mainMapSnapshot = mainMap.createSnapshot(userSnapshotMap, channelSnapshotMap); EnumMap<UserLevel, UserChannelMap<UserSnapshot, ChannelSnapshot>> levelsMapSnapshot = Maps .newEnumMap(UserLevel.class); for (Map.Entry<UserLevel, UserChannelMap<U, C>> curLevel : levelsMap.entrySet()) levelsMapSnapshot.put(curLevel.getKey(), curLevel.getValue().createSnapshot(userSnapshotMap, channelSnapshotMap)); ImmutableBiMap.Builder<String, UserSnapshot> userNickMapSnapshotBuilder = ImmutableBiMap.builder(); for (Map.Entry<String, U> curNickEntry : userNickMap.entrySet()) userNickMapSnapshotBuilder.put(curNickEntry.getKey(), userSnapshotMap.get(curNickEntry.getValue())); ImmutableBiMap.Builder<String, ChannelSnapshot> channelNameMapSnapshotBuilder = ImmutableBiMap.builder(); for (Map.Entry<String, C> curName : channelNameMap.entrySet()) channelNameMapSnapshotBuilder.put(curName.getKey(), channelSnapshotMap.get(curName.getValue())); ImmutableBiMap.Builder<String, UserSnapshot> privateUserSnapshotBuilder = ImmutableBiMap.builder(); for (Map.Entry<String, U> curNickEntry : privateUsers.entrySet()) privateUserSnapshotBuilder.put(curNickEntry.getKey(), userSnapshotMap.get(curNickEntry.getValue())); //Finally can create the snapshot object UserChannelDaoSnapshot daoSnapshot = new UserChannelDaoSnapshot(bot, locale, mainMapSnapshot, levelsMapSnapshot, userNickMapSnapshotBuilder.build(), channelNameMapSnapshotBuilder.build(), privateUserSnapshotBuilder.build()); //Tell UserSnapshots and ChannelSnapshots what the new backing dao is for (UserSnapshot curUserSnapshot : userSnapshotMap.values()) curUserSnapshot.setDao(daoSnapshot); for (ChannelSnapshot curChannelSnapshot : channelSnapshotMap.values()) curChannelSnapshot.setDao(daoSnapshot); //Finally return daoSnapshot; }
From source file:org.onosproject.codec.impl.DecodeInstructionCodecHelper.java
private Map<StatTriggerField, Long> getStatThreshold(JsonNode statThresholdNode) { Map<StatTriggerField, Long> statThresholdMap = Maps.newEnumMap(StatTriggerField.class); for (JsonNode jsonNode : statThresholdNode) { if (jsonNode.hasNonNull(InstructionCodec.STAT_BYTE_COUNT)) { JsonNode byteCountNode = jsonNode.get(InstructionCodec.STAT_BYTE_COUNT); if (!byteCountNode.isNull() && byteCountNode.isNumber()) { statThresholdMap.put(StatTriggerField.BYTE_COUNT, byteCountNode.asLong()); }/*from w ww. j a va2s .c om*/ } else if (jsonNode.hasNonNull(STAT_PACKET_COUNT)) { JsonNode packetCount = jsonNode.get(STAT_PACKET_COUNT); if (!packetCount.isNull() && packetCount.isNumber()) { statThresholdMap.put(StatTriggerField.PACKET_COUNT, packetCount.asLong()); } } else if (jsonNode.hasNonNull(InstructionCodec.STAT_DURATION)) { JsonNode duration = jsonNode.get(InstructionCodec.STAT_DURATION); if (!duration.isNull() && duration.isNumber()) { statThresholdMap.put(StatTriggerField.DURATION, duration.asLong()); } } else { log.error("Unsupported stat {}", jsonNode.toString()); } } return statThresholdMap; }
From source file:org.terasology.world.block.loader.BlockLoader.java
private BlockFamily processHorizontalBlockFamily(AssetUri blockDefUri, BlockDefinition blockDef) { Map<Side, Block> blockMap = Maps.newEnumMap(Side.class); constructHorizontalBlocks(blockDefUri, blockDef, blockMap); return new HorizontalBlockFamily(new BlockUri(blockDefUri.getPackage(), blockDefUri.getAssetName()), blockMap, getCategories(blockDef)); }
From source file:org.carrot2.workbench.core.ui.SearchEditor.java
/** * Restore global state shared among editors. *///from ww w. j ava 2 s.c o m private static SearchEditorMemento restoreGlobalState() { SearchEditorMemento memento = SimpleXmlMemento.fromPreferenceStore(SearchEditorMemento.class, GLOBAL_MEMENTO_KEY); if (memento == null) { memento = new SearchEditorMemento(); memento.sectionsExpansionState = Maps.newHashMap(); } final IPreferenceStore prefStore = WorkbenchCorePlugin.getDefault().getPreferenceStore(); final Map<PanelName, PanelState> panels = Maps.newEnumMap(PanelName.class); for (PanelName n : EnumSet.allOf(PanelName.class)) { final PanelState s = new PanelState(); s.visibility = prefStore.getBoolean(n.prefKeyVisibility); s.weight = prefStore.getInt(n.prefKeyWeight); panels.put(n, s); } memento.panels = panels; return memento; }
From source file:org.n52.sos.encode.WmlTDREncoderv20.java
private XmlObject createDataRecord(AbstractObservationValue<?> observationValue, String unit) throws OwsExceptionReport { // AbstractPhenomenon observableProperty = // sosObservation.getObservationConstellation().getObservableProperty(); SweDataRecord dataRecord = new SweDataRecord(); dataRecord.setIdentifier("datarecord_" + observationValue.getObservationID()); SweQuantity quantity = new SweQuantity(); quantity.setDefinition(observationValue.getObservableProperty()); quantity.setUom(unit);//from w w w . j a v a 2 s . c o m SweField field = new SweField("observed_value", quantity); dataRecord.addField(field); Map<HelperValues, String> additionalValues = Maps.newEnumMap(HelperValues.class); additionalValues.put(HelperValues.FOR_OBSERVATION, null); return CodingHelper.encodeObjectToXml(SweConstants.NS_SWE_20, dataRecord, additionalValues); }