List of usage examples for com.google.common.collect Multimap put
boolean put(@Nullable K key, @Nullable V value);
From source file:com.github.rinde.rinsim.central.rt.ScheduleUtil.java
static List<List<Parcel>> fixSchedule(ImmutableList<ImmutableList<Parcel>> schedule, GlobalStateObject state) { checkArgument(schedule.size() == state.getVehicles().size(), "The number of routes (%s) and the number of vehicles (%s) must " + "be equal.", schedule.size(), state.getVehicles().size()); checkArgument(!state.getVehicles().get(0).getRoute().isPresent(), "A state object without routes is expected."); // only parcels in this set may occur in the schedule final Set<Parcel> undeliveredParcels = new HashSet<>(); undeliveredParcels.addAll(state.getAvailableParcels()); // for each vehicle, we create a multiset that is a representation of the // number of times the occurrence of a parcel is REQUIRED to be in the // route of the vehicle final List<Multiset<Parcel>> expectedRoutes = new ArrayList<>(); for (int i = 0; i < state.getVehicles().size(); i++) { expectedRoutes.add(HashMultiset.<Parcel>create()); final VehicleStateObject vehicle = state.getVehicles().get(i); expectedRoutes.get(i).addAll(vehicle.getContents()); if (vehicle.getDestination().isPresent() && !vehicle.getContents().contains(vehicle.getDestination().get())) { expectedRoutes.get(i).add(vehicle.getDestination().get(), 2); }//from w w w . ja v a 2 s. com undeliveredParcels.addAll(vehicle.getContents()); } // create map of parcel -> vehicle index final Multimap<Parcel, Integer> parcelOwner = LinkedHashMultimap.create(); for (int i = 0; i < schedule.size(); i++) { final List<Parcel> route = schedule.get(i); final Set<Parcel> routeSet = ImmutableSet.copyOf(route); for (final Parcel p : routeSet) { parcelOwner.put(p, i); } } // copy schedule into a modifiable structure final List<List<Parcel>> newSchedule = new ArrayList<>(); for (final ImmutableList<Parcel> route : schedule) { newSchedule.add(new ArrayList<>(route)); } // compare with current vehicle cargo for (int i = 0; i < state.getVehicles().size(); i++) { final VehicleStateObject vehicle = state.getVehicles().get(i); final Multiset<Parcel> routeSet = ImmutableMultiset.copyOf(schedule.get(i)); final Set<Parcel> test = Sets.union(routeSet.elementSet(), expectedRoutes.get(i).elementSet()); for (final Parcel p : test) { final int actualOccurences = routeSet.count(p); checkState(actualOccurences <= 2); final int expectedOccurrences = expectedRoutes.get(i).count(p); if (!undeliveredParcels.contains(p)) { // it is already delivered, remove all occurrences newSchedule.get(i).removeAll(Collections.singleton(p)); } else if (actualOccurences != expectedOccurrences && expectedOccurrences > 0) { if (expectedOccurrences == 1 && actualOccurences == 2) { newSchedule.get(i).remove(p); } else { // expected occurr = 1 or 2 final boolean destinationIsCurrent = vehicle.getDestination().asSet().contains(p); int toAdd = expectedOccurrences - actualOccurences; // add it once at the front of the route if (destinationIsCurrent) { newSchedule.get(i).add(0, p); toAdd--; } // add it once to the end of the route if (toAdd > 0) { newSchedule.get(i).add(p); } } } // if the parcel is expected in the current vehicle, but it also appears // in (an) other vehicle(s), we have to remove it there if (expectedOccurrences > 0 && parcelOwner.containsKey(p)) { for (final Integer v : parcelOwner.get(p)) { if (!v.equals(i)) { newSchedule.get(v).removeAll(Collections.singleton(p)); } } } } if (vehicle.getDestination().isPresent() && !newSchedule.get(i).get(0).equals(vehicle.getDestination().get())) { newSchedule.get(i).remove(vehicle.getDestination().get()); newSchedule.get(i).add(0, vehicle.getDestination().get()); } } return newSchedule; }
From source file:org.apache.samza.execution.ExecutionPlanner.java
/** * Fetch the partitions of source/sink streams and update the StreamEdges. * @param jobGraph {@link JobGraph}/*from w w w . j ava2s .c o m*/ * @param streamManager the {@link StreamManager} to interface with the streams. */ /* package private */ static void updateExistingPartitions(JobGraph jobGraph, StreamManager streamManager) { Set<StreamEdge> existingStreams = new HashSet<>(); existingStreams.addAll(jobGraph.getSources()); existingStreams.addAll(jobGraph.getSinks()); Multimap<String, StreamEdge> systemToStreamEdges = HashMultimap.create(); // group the StreamEdge(s) based on the system name existingStreams.forEach(streamEdge -> { SystemStream systemStream = streamEdge.getSystemStream(); systemToStreamEdges.put(systemStream.getSystem(), streamEdge); }); for (Map.Entry<String, Collection<StreamEdge>> entry : systemToStreamEdges.asMap().entrySet()) { String systemName = entry.getKey(); Collection<StreamEdge> streamEdges = entry.getValue(); Map<String, StreamEdge> streamToStreamEdge = new HashMap<>(); // create the stream name to StreamEdge mapping for this system streamEdges.forEach( streamEdge -> streamToStreamEdge.put(streamEdge.getSystemStream().getStream(), streamEdge)); // retrieve the partition counts for the streams in this system Map<String, Integer> streamToPartitionCount = streamManager.getStreamPartitionCounts(systemName, streamToStreamEdge.keySet()); // set the partitions of a stream to its StreamEdge streamToPartitionCount.forEach((stream, partitionCount) -> { streamToStreamEdge.get(stream).setPartitionCount(partitionCount); log.debug("Partition count is {} for stream {}", partitionCount, stream); }); } }
From source file:com.github.naios.wide.framework.internal.storage.server.builder.SQLScope.java
/** * Splits collections containing update, insert & delete structures into its scopes *//* w ww. j a v a2s.c o m*/ protected static Map<String, SQLScope> split(final SQLBuilderImpl sqlBuilder, final Map<ServerStorageStructure, Collection<SQLUpdateInfo>> update, final Collection<ServerStorageStructure> insert, final Collection<ServerStorageStructure> delete) { final Map<String, SQLScope> scopes = new HashMap<>(); update.forEach((structure, infos) -> { infos.forEach(new SQLScopeSplitter<SQLUpdateInfo>(scopes) { @Override public SQLBuilderImpl getSQLBuilder() { return sqlBuilder; } @Override public String getScope(final SQLUpdateInfo info) { return sqlBuilder.getSQLInfoProvider().getScopeOfEntry(structure, info.getProperty()); } @Override public void addObservable(final SQLScope scope, final SQLUpdateInfo info) { Multimap<ServerStorageStructure, SQLUpdateInfo> map = scope.update.get(structure.getOwner()); if (Objects.isNull(map)) { map = HashMultimap.create(); scope.update.put(structure.getOwner(), map); } map.put(structure, info); } }); }); insert.forEach(new SQLScopeSplitter<ServerStorageStructure>(scopes) { @Override public SQLBuilderImpl getSQLBuilder() { return sqlBuilder; } @Override public String getScope(final ServerStorageStructure entry) { return sqlBuilder.getSQLInfoProvider().getScopeOfStructure(entry); } @Override public void addObservable(final SQLScope scope, final ServerStorageStructure entry) { scope.insert.put(entry.getOwner(), entry); } }); delete.forEach(new SQLScopeSplitter<ServerStorageStructure>(scopes) { @Override public SQLBuilderImpl getSQLBuilder() { return sqlBuilder; } @Override public String getScope(final ServerStorageStructure entry) { return sqlBuilder.getSQLInfoProvider().getScopeOfStructure(entry); } @Override public void addObservable(final SQLScope scope, final ServerStorageStructure entry) { scope.delete.put(entry.getOwner(), entry); } }); return scopes; }
From source file:com.torodb.torod.db.postgresql.meta.routines.DeleteDocuments.java
public static int execute(Configuration configuration, CollectionSchema colSchema, Multimap<DocStructure, Integer> didsByStructure, boolean justOne) { Multimap<DocStructure, Integer> didsByStructureToDelete; if (didsByStructure.isEmpty()) { return 0; }//w w w .ja va 2s . co m if (justOne) { didsByStructureToDelete = MultimapBuilder.hashKeys(1).arrayListValues(1).build(); Map.Entry<DocStructure, Integer> aEntry = didsByStructure.entries().iterator().next(); didsByStructureToDelete.put(aEntry.getKey(), aEntry.getValue()); } else { didsByStructureToDelete = didsByStructure; } try { return execute(configuration, colSchema, didsByStructureToDelete); } catch (SQLException ex) { throw new RuntimeException(ex); } }
From source file:org.elasticlib.node.repository.Index.java
private static Multimap<String, Value> headMetadata(RevisionTree revisionTree) { Multimap<String, Value> metadata = HashMultimap.create(); revisionTree.getHead().stream().flatMap(rev -> revisionTree.get(rev).getMetadata().entrySet().stream()) .forEach(entry -> metadata.put(entry.getKey(), entry.getValue())); return metadata; }
From source file:rubedo.util.RemapHelper.java
@SuppressWarnings("unchecked") public static void overwriteEntry(RegistryNamespaced registry, String name, Object object) { Object oldThing = registry.getObject(name); Repl.overwrite_do(registry, name, object, oldThing); Multimap<String, Object> reg = Repl.replacements.get(registry); if (reg == null) { Repl.replacements.put(registry, reg = ArrayListMultimap.create()); }/*from w w w . java 2 s . com*/ if (!reg.containsKey(name)) { reg.put(name, oldThing); } reg.put(name, object); Repl.alterDelegateChain(registry, name, object); }
From source file:com.torodb.torod.db.backends.meta.routines.DeleteDocuments.java
public static int execute(Configuration configuration, CollectionSchema colSchema, Multimap<DocStructure, Integer> didsByStructure, boolean justOne, @Nonnull DatabaseInterface databaseInterface) throws RetryTransactionException { Multimap<DocStructure, Integer> didsByStructureToDelete; if (didsByStructure.isEmpty()) { return 0; }// w w w . j a v a2s.c om if (justOne) { didsByStructureToDelete = MultimapBuilder.hashKeys(1).arrayListValues(1).build(); Map.Entry<DocStructure, Integer> aEntry = didsByStructure.entries().iterator().next(); didsByStructureToDelete.put(aEntry.getKey(), aEntry.getValue()); } else { didsByStructureToDelete = didsByStructure; } try { return execute(configuration, colSchema, didsByStructureToDelete, databaseInterface); } catch (SQLException ex) { throw new RuntimeException(ex); } }
From source file:org.jclouds.http.utils.ModifyRequest.java
public static void parseKeyValueFromStringToMap(String stringToParse, Multimap<String, String> map) { // note that '=' can be a valid part of the value int indexOfFirstEquals = stringToParse.indexOf('='); String key = indexOfFirstEquals == -1 ? stringToParse : stringToParse.substring(0, indexOfFirstEquals); String value = indexOfFirstEquals == -1 ? null : stringToParse.substring(indexOfFirstEquals + 1); map.put(key, value); }
From source file:moa2014.MOA2014.java
public static void principal(int[][] matrizatual) { Multimap<Integer, String> open_list = TreeMultimap.create(); HashMap<String, Estado> processados = new HashMap(); int difmatrizatual = diferencaMatriz(matrizatual); String stringmatriz = transformaMatrizString(matrizatual); open_list.put(difmatrizatual, stringmatriz); Estado estadoatual = new Estado(matrizatual, 0); processados.put(stringmatriz, estadoatual); int counter = 1; while (!open_list.isEmpty()) { System.out.println("Arvores processadas: " + counter); Iterator iterator = open_list.keySet().iterator(); /*//from w ww .j av a2 s.c o m Iterator iterator2 = open_list.keySet().iterator(); while (iterator2.hasNext()) { Integer key = (Integer) iterator2.next(); System.out.println("key : " + key + " value :" + open_list.get(key)); } Scanner scanner = new Scanner( System.in ); String input = scanner.nextLine(); */ counter++; Integer key = (Integer) iterator.next(); String matrizatualx1 = open_list.asMap().get(key).iterator().next(); Estado estadomenor = processados.get(matrizatualx1); int altura = estadomenor.getCusto(); System.out.println("Altura: " + altura); //LOCALIZA O ZERO int[] zerot = localizazero(estadomenor.getMatriz()); int x = zerot[0]; int y = zerot[1]; int x0 = x - 1; int x1 = x + 1; int y0 = y - 1; int y1 = y + 1; int difmatrizatualx = diferencaMatriz(estadomenor.getMatriz()); if (difmatrizatualx == 0) { System.out.println("Arvores processadas: " + counter); System.out.println("Custo: " + estadomenor.getCusto()); break; } int[][] matrizatualx = estadomenor.getMatriz(); if (x0 >= 0) { int[][] matriz; matriz = copyarray(matrizatualx); matriz[x][y] = matrizatualx[x0][y]; matriz[x0][y] = matrizatualx[x][y]; String stringmatriz1 = transformaMatrizString(matriz); if (!(processados.containsKey(stringmatriz1))) { int diferencamatriz = diferencaMatriz(matriz); int custototal = diferencamatriz + altura + 1; Estado estadonovo = new Estado(matriz, altura + 1); open_list.put(custototal, stringmatriz1); processados.put(stringmatriz1, estadonovo); } } if (x1 <= 3) { int[][] matriz; matriz = copyarray(matrizatualx); matriz[x][y] = matrizatualx[x1][y]; matriz[x1][y] = matrizatualx[x][y]; String stringmatriz2 = transformaMatrizString(matriz); if (!(processados.containsKey(stringmatriz2))) { int diferencamatriz = diferencaMatriz(matriz); int custototal = diferencamatriz + altura + 1; Estado estadonovo = new Estado(matriz, altura + 1); open_list.put(custototal, stringmatriz2); processados.put(stringmatriz2, estadonovo); } } if (y0 >= 0) { int[][] matriz; matriz = copyarray(matrizatualx); matriz[x][y] = matrizatualx[x][y0]; matriz[x][y0] = matrizatualx[x][y]; String stringmatriz3 = transformaMatrizString(matriz); if (!(processados.containsKey(stringmatriz3))) { int diferencamatriz = diferencaMatriz(matriz); int custototal = diferencamatriz + altura + 1; Estado estadonovo = new Estado(matriz, altura + 1); open_list.put(custototal, stringmatriz3); processados.put(stringmatriz3, estadonovo); } } if (y1 <= 3) { int[][] matriz; matriz = copyarray(matrizatualx); matriz[x][y] = matrizatualx[x][y1]; matriz[x][y1] = matrizatualx[x][y]; int custoateaqui = diferencaMatriz(matriz) + altura + 1; String stringmatriz4 = transformaMatrizString(matriz); if (!(processados.containsKey(stringmatriz4))) { int diferencamatriz = diferencaMatriz(matriz); int custototal = diferencamatriz + altura + 1; Estado estadonovo = new Estado(matriz, altura + 1); open_list.put(custototal, stringmatriz4); processados.put(stringmatriz4, estadonovo); } } open_list.remove(key, matrizatualx1); } }
From source file:org.semanticweb.owlapi.model.OWLDocumentFormat.java
/** * @param signature/*from www .j av a 2 s . c om*/ * signature for the ontology * @param punnedEntities * the set of entities that are known already to be punned * @param add * true if missing declarations should be added. If false, no * declarations will be added. * @return collection of IRIS used in illegal punnings */ static Collection<IRI> determineIllegalPunnings(boolean add, Stream<OWLEntity> signature, Collection<IRI> punnedEntities) { if (!add) { return Collections.emptySet(); } // determine what entities are illegally punned Multimap<IRI, EntityType<?>> punnings = LinkedListMultimap.create(); // disregard individuals as they do not give raise to illegal // punnings; only keep track of punned entities, ignore the rest signature.filter(e -> !e.isOWLNamedIndividual() && punnedEntities.contains(e.getIRI())) .forEach(e -> punnings.put(e.getIRI(), e.getEntityType())); return computeIllegals(punnings); }