List of usage examples for java.util Set removeAll
boolean removeAll(Collection<?> c);
From source file:gov.bnl.channelfinder.TagsResource.java
/** * PUT method to create and <b>exclusively</b> update the tag identified by the * path parameter <tt>name</tt> to all channels identified in the payload * structure <tt>data</tt>./* w ww . j a v a2s .co m*/ * Setting the owner attribute in the XML root element is mandatory. * * @param data XmlTag structure containing the list of channels to be tagged * @return HTTP Response */ @PUT @Consumes({ MediaType.APPLICATION_JSON }) public Response createTags(List<XmlTag> data) { long start = System.currentTimeMillis(); Client client = getNewClient(); audit.info("client initialization: " + (System.currentTimeMillis() - start)); UserManager um = UserManager.getInstance(); um.setUser(securityContext.getUserPrincipal(), securityContext.isUserInRole("Administrator")); try { BulkRequestBuilder bulkRequest = client.prepareBulk(); for (XmlTag xmlTag : data) { IndexRequest indexRequest = new IndexRequest("tags", "tag", xmlTag.getName()) .source(jsonBuilder().startObject().field("name", xmlTag.getName()) .field("owner", xmlTag.getOwner()).endObject()); UpdateRequest updateRequest = new UpdateRequest("tags", "tag", xmlTag.getName()) .doc(jsonBuilder().startObject().field("name", xmlTag.getName()) .field("owner", xmlTag.getOwner()).endObject()) .upsert(indexRequest); bulkRequest.add(updateRequest); SearchResponse qbResult = client.prepareSearch("channelfinder") .setQuery(QueryBuilders.matchQuery("tags.name", xmlTag.getName())).addField("name") .setSize(10000).execute().actionGet(); Set<String> existingChannels = new HashSet<String>(); for (SearchHit hit : qbResult.getHits()) { existingChannels.add(hit.field("name").getValue().toString()); } Set<String> newChannels = new HashSet<String>(); if (xmlTag.getChannels() != null) { newChannels.addAll( Collections2.transform(xmlTag.getChannels(), new Function<XmlChannel, String>() { @Override public String apply(XmlChannel channel) { return channel.getName(); } })); } Set<String> remove = new HashSet<String>(existingChannels); remove.removeAll(newChannels); Set<String> add = new HashSet<String>(newChannels); add.removeAll(existingChannels); HashMap<String, String> param = new HashMap<String, String>(); param.put("name", xmlTag.getName()); param.put("owner", xmlTag.getOwner()); for (String ch : remove) { bulkRequest.add(new UpdateRequest("channelfinder", "channel", ch).refresh(true) .script("removeTag = new Object();" + "for (xmltag in ctx._source.tags) " + "{ if (xmltag.name == tag.name) { removeTag = xmltag} }; " + "ctx._source.tags.remove(removeTag);") .addScriptParam("tag", param)); } for (String ch : add) { bulkRequest.add(new UpdateRequest("channelfinder", "channel", ch) .script("ctx._source.tags.add(tag)").addScriptParam("tag", param)); } } bulkRequest.setRefresh(true); BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { audit.severe(bulkResponse.buildFailureMessage()); if (bulkResponse.buildFailureMessage().contains("DocumentMissingException")) { return handleException(um.getUserName(), Response.Status.NOT_FOUND, bulkResponse.buildFailureMessage()); } else { return handleException(um.getUserName(), Response.Status.INTERNAL_SERVER_ERROR, bulkResponse.buildFailureMessage()); } } else { return Response.ok(data).build(); } } catch (Exception e) { return handleException(um.getUserName(), Response.Status.INTERNAL_SERVER_ERROR, e); } finally { client.close(); } }
From source file:org.apache.taverna.scufl2.api.common.Scufl2Tools.java
/** * @param processors// ww w . j ava2 s. co m * @param splitPoint * @return */ public ProcessorSplit splitProcessors(Collection<Processor> processors, Processor splitPoint) { Set<Processor> upStream = new HashSet<>(); Set<Processor> downStream = new HashSet<>(); Set<Processor> queue = new HashSet<>(); queue.add(splitPoint); // First let's go upstream while (!queue.isEmpty()) { Processor processor = queue.iterator().next(); queue.remove(processor); List<BlockingControlLink> preConditions = controlLinksBlocking(processor); for (BlockingControlLink condition : preConditions) { Processor upstreamProc = condition.getUntilFinished(); if (!upStream.contains(upstreamProc)) { upStream.add(upstreamProc); queue.add(upstreamProc); } } for (InputProcessorPort inputPort : processor.getInputPorts()) for (DataLink incomingLink : datalinksTo(inputPort)) { SenderPort source = incomingLink.getReceivesFrom(); if (!(source instanceof OutputProcessorPort)) continue; Processor upstreamProc = ((OutputProcessorPort) source).getParent(); if (!upStream.contains(upstreamProc)) { upStream.add(upstreamProc); queue.add(upstreamProc); } } } // Our split queue.add(splitPoint); // Then downstream while (!queue.isEmpty()) { Processor processor = queue.iterator().next(); queue.remove(processor); List<BlockingControlLink> controlledConditions = controlLinksWaitingFor(processor); for (BlockingControlLink condition : controlledConditions) { Processor downstreamProc = condition.getBlock(); if (!downStream.contains(downstreamProc)) { downStream.add(downstreamProc); queue.add(downstreamProc); } } for (OutputProcessorPort outputPort : processor.getOutputPorts()) for (DataLink datalink : datalinksFrom(outputPort)) { ReceiverPort sink = datalink.getSendsTo(); if (!(sink instanceof InputProcessorPort)) continue; Processor downstreamProcc = ((InputProcessorPort) sink).getParent(); if (!downStream.contains(downstreamProcc)) { downStream.add(downstreamProcc); queue.add(downstreamProcc); } } } Set<Processor> undecided = new HashSet<>(processors); undecided.remove(splitPoint); undecided.removeAll(upStream); undecided.removeAll(downStream); return new ProcessorSplit(splitPoint, upStream, downStream, undecided); }
From source file:com.db4o.sync4o.SyncDb.java
/** * Updates the SyncDb so that it contains updated sync data for all the * current instances of the class represented by SyncClass. * /*from w w w . ja v a 2 s . c o m*/ * @param syncClass * Represents the class whose instances are to have their sync * metadata updated. * @throws Sync4jException If any processing errors occur. * @throws IllegalArgumentException If syncClass is null. */ public void updateSyncData(final SyncClass syncClass) throws Sync4jException { if (syncClass == null) { throw new IllegalArgumentException(); } // NB: Sets are deliberately used in this method as our key // operations are all supposedly on *unique* keys. Sets will // help expose any runtime violations of this constraint. // All operations in this method are performed in memory, as the entire // shadow database needs to be activated to be updated. // First, assemble the set of keys that we already have in the syncdb // We also keep a map of keys to ObjectInfo's, to make subsequent // operations on the ObjectInfoList easier and quicker. SyncClassInfo syncClassInfo = getClassInfo(syncClass.getConfig()); List syncInfos = syncClassInfo.getObjectInfos(); Set syncDbKeys = getKeySet(syncInfos); // Create a set of keys for all the objects in the target db // to make subsequent set operations a little easier Set dbKeys = getAllKeys(syncClass); // find records that have appeared in the db since // we last saw it and mark them as SyncItemState.NEW Date newTimestamp = null; if (syncClassInfo.getLastSync() == null) { newTimestamp = new Date(); } Set newKeys = new HashSet(dbKeys); newKeys.removeAll(syncDbKeys); for (Iterator i = newKeys.iterator(); i.hasNext();) { Object key = i.next(); Object o = get(syncClass, key); SyncObjectInfo info = new SyncObjectInfo(syncClassInfo, key, (newTimestamp == null) ? syncClass.extractTimestamp(o) : newTimestamp, SyncState.NEW); syncInfos.add(info); } // find records that have been removed from the db since // we last saw it and mark them as SyncItemState.DELETED Date deleteTimestamp = null; if (syncClassInfo.getLastSync() != null) { Calendar cal = Calendar.getInstance(); cal.setTime(syncClassInfo.getLastSync()); cal.add(Calendar.SECOND, 1); deleteTimestamp = new Date(cal.getTimeInMillis()); } Set deletedKeys = new HashSet(syncDbKeys); deletedKeys.removeAll(dbKeys); for (Iterator i = deletedKeys.iterator(); i.hasNext();) { SyncObjectInfo info = syncClassInfo.findByKey(i.next()); info.setSyncState(SyncState.DELETED); info.setTimestamp(deleteTimestamp); } // find records that have been modified since we last saw // them and mark them as SyncItemState.UPDATED final Date lastSyncTime = syncClassInfo.getLastSync(); if (lastSyncTime != null) { for (Iterator i = syncInfos.iterator(); i.hasNext();) { SyncObjectInfo info = (SyncObjectInfo) i.next(); // Need to get the actual object here so we can compare // its last update to the lastSyncTime if ((info.getSyncState() == SyncState.SYNCHRONIZED) || (info.getSyncState() == SyncState.UPDATED)) { List objects = findByKey(syncClass, info.getKey()); if ((objects != null) && objects.size() > 0) { Object o = objects.get(0); Date d = syncClass.extractTimestamp(o); if (d != null) { info.setTimestamp(d); if ((info.getTimestamp().after(lastSyncTime))) { info.setSyncState(SyncState.UPDATED); } } } } } } // commit all the changes we have made setClassInfo(syncClassInfo); }
From source file:com.b2international.snowowl.snomed.importer.rf2.util.SnomedRefSetNameCollector.java
private void useGeneralLabelForExistingRefsets(Set<String> unlabeledRefSetIds) { Set<String> existingConceptIds = SnomedRequests.prepareSearchConcept().setLimit(unlabeledRefSetIds.size()) .filterByActive(true).filterByIds(unlabeledRefSetIds) .build(SnomedDatastoreActivator.REPOSITORY_UUID, configuration.getBranchPath()) .execute(ApplicationContext.getServiceForClass(IEventBus.class)) .then(concepts -> concepts.stream().map(SnomedConcept::getId).collect(Collectors.toSet())) .getSync();// w w w .j a va 2 s.c om fillGeneralLabels(existingConceptIds); unlabeledRefSetIds.removeAll(existingConceptIds); }
From source file:net.dv8tion.jda.core.handle.GuildMemberUpdateHandler.java
@Override protected Long handleInternally(JSONObject content) { final long id = content.getLong("guild_id"); if (api.getGuildLock().isLocked(id)) return id; JSONObject userJson = content.getJSONObject("user"); final long userId = userJson.getLong("id"); GuildImpl guild = (GuildImpl) api.getGuildMap().get(id); if (guild == null) { api.getEventCache().cache(EventCache.Type.GUILD, userId, () -> { handle(responseNumber, allContent); });/*from w w w. j a va 2s.com*/ EventCache.LOG.debug("Got GuildMember update but JDA currently does not have the Guild cached. " + content.toString()); return null; } MemberImpl member = (MemberImpl) guild.getMembersMap().get(userId); if (member == null) { api.getEventCache().cache(EventCache.Type.USER, userId, () -> { handle(responseNumber, allContent); }); EventCache.LOG.debug( "Got GuildMember update but Member is not currently present in Guild. " + content.toString()); return null; } Set<Role> currentRoles = member.getRoleSet(); List<Role> newRoles = toRolesList(guild, content.getJSONArray("roles")); //If newRoles is null that means that we didn't find a role that was in the array and was cached this event if (newRoles == null) return null; //Find the roles removed. List<Role> removedRoles = new LinkedList<>(); each: for (Role role : currentRoles) { for (Iterator<Role> it = newRoles.iterator(); it.hasNext();) { Role r = it.next(); if (role.equals(r)) { it.remove(); continue each; } } removedRoles.add(role); } if (removedRoles.size() > 0) currentRoles.removeAll(removedRoles); if (newRoles.size() > 0) currentRoles.addAll(newRoles); if (removedRoles.size() > 0) { api.getEventManager() .handle(new GuildMemberRoleRemoveEvent(api, responseNumber, guild, member, removedRoles)); } if (newRoles.size() > 0) { api.getEventManager().handle(new GuildMemberRoleAddEvent(api, responseNumber, guild, member, newRoles)); } if (content.has("nick")) { String prevNick = member.getNickname(); String newNick = content.isNull("nick") ? null : content.getString("nick"); if (!Objects.equals(prevNick, newNick)) { member.setNickname(newNick); api.getEventManager().handle( new GuildMemberNickChangeEvent(api, responseNumber, guild, member, prevNick, newNick)); } } return null; }
From source file:gov.bnl.channelfinder.TagsResource.java
/** * PUT method to create and <b>exclusively</b> update the tag identified by the * path parameter <tt>name</tt> to all channels identified in the payload * structure <tt>data</tt>.// w w w. j a v a 2 s. c o m * Setting the owner attribute in the XML root element is mandatory. * * @param tag URI path parameter: tag name * @param data XmlTag structure containing the list of channels to be tagged * @return HTTP Response */ @PUT @Path("{tagName: " + tagNameRegex + "}") @Consumes({ MediaType.APPLICATION_JSON }) public Response create(@PathParam("tagName") String tag, XmlTag data) { long start = System.currentTimeMillis(); Client client = getNewClient(); audit.info("client initialization: " + (System.currentTimeMillis() - start)); UserManager um = UserManager.getInstance(); um.setUser(securityContext.getUserPrincipal(), securityContext.isUserInRole("Administrator")); try { if (tag.equals(data.getName())) { BulkRequestBuilder bulkRequest = client.prepareBulk(); IndexRequest indexRequest = new IndexRequest("tags", "tag", tag).source(jsonBuilder().startObject() .field("name", data.getName()).field("owner", data.getOwner()).endObject()); UpdateRequest updateRequest = new UpdateRequest("tags", "tag", tag).doc(jsonBuilder().startObject() .field("name", data.getName()).field("owner", data.getOwner()).endObject()) .upsert(indexRequest); bulkRequest.add(updateRequest); SearchResponse qbResult = client.prepareSearch("channelfinder") .setQuery(QueryBuilders.matchQuery("tags.name", tag)).addField("name").setSize(10000) .execute().actionGet(); Set<String> existingChannels = new HashSet<String>(); for (SearchHit hit : qbResult.getHits()) { existingChannels.add(hit.field("name").getValue().toString()); } Set<String> newChannels = new HashSet<String>(); if (data.getChannels() != null) { newChannels .addAll(Collections2.transform(data.getChannels(), new Function<XmlChannel, String>() { @Override public String apply(XmlChannel channel) { return channel.getName(); } })); } Set<String> remove = new HashSet<String>(existingChannels); remove.removeAll(newChannels); Set<String> add = new HashSet<String>(newChannels); add.removeAll(existingChannels); HashMap<String, String> param = new HashMap<String, String>(); param.put("name", data.getName()); param.put("owner", data.getOwner()); for (String ch : remove) { bulkRequest.add(new UpdateRequest("channelfinder", "channel", ch).refresh(true) .script("removeTag = new Object();" + "for (xmltag in ctx._source.tags) " + "{ if (xmltag.name == tag.name) { removeTag = xmltag} }; " + "ctx._source.tags.remove(removeTag);") .addScriptParam("tag", param)); } for (String ch : add) { bulkRequest.add(new UpdateRequest("channelfinder", "channel", ch).refresh(true) .script("ctx._source.tags.add(tag)").addScriptParam("tag", param)); } bulkRequest.setRefresh(true); BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { audit.severe(bulkResponse.buildFailureMessage()); if (bulkResponse.buildFailureMessage().contains("DocumentMissingException")) { return handleException(um.getUserName(), Response.Status.NOT_FOUND, bulkResponse.buildFailureMessage()); } else { return handleException(um.getUserName(), Response.Status.INTERNAL_SERVER_ERROR, bulkResponse.buildFailureMessage()); } } else { GetResponse response = client.prepareGet("tags", "tag", tag).execute().actionGet(); ObjectMapper mapper = new ObjectMapper(); XmlTag result = mapper.readValue(response.getSourceAsBytes(), XmlTag.class); Response r; if (result == null) { r = Response.status(Response.Status.NOT_FOUND).build(); } else { r = Response.ok(result).build(); } audit.info(um.getUserName() + "|" + uriInfo.getPath() + "|PUT|OK|" + (System.currentTimeMillis() - start) + "|" + r.getStatus() + "|data=" + XmlTag.toLog(data)); return r; } } else { return Response.status(Status.BAD_REQUEST).entity("Specified tag name '" + tag + "' and payload tag name '" + data.getName() + "' do not match").build(); } } catch (Exception e) { return handleException(um.getUserName(), Response.Status.INTERNAL_SERVER_ERROR, e); } finally { client.close(); } }
From source file:org.geowebcache.diskquota.jdbc.JDBCQuotaStore.java
/** * Called to initialize the database structure and the layers *///from w w w . j a v a2s . co m public void initialize() { if (dialect == null || jt == null || tt == null) { throw new IllegalStateException( "Please provide both the sql dialect and the data " + "source before calling inizialize"); } tt.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { // setup the tables if necessary dialect.initializeTables(schema, jt); // get the existing table names List<String> existingLayers = jt.query(dialect.getAllLayersQuery(schema), new ParameterizedRowMapper<String>() { public String mapRow(ResultSet rs, int rowNum) throws SQLException { return rs.getString(1); } }); // compare with the ones available in the config final Set<String> layerNames = calculator.getLayerNames(); final Set<String> layersToDelete = new HashSet<String>(existingLayers); layersToDelete.removeAll(layerNames); // remove all the layers we don't need for (String layerName : layersToDelete) { deleteLayer(layerName); } // add any missing tileset for (String layerName : layerNames) { createLayerInternal(layerName); } // create the global quota if necessary Quota global = getUsedQuotaByTileSetIdInternal(GLOBAL_QUOTA_NAME); if (global == null) { createLayerInternal(GLOBAL_QUOTA_NAME); } } }); }
From source file:gov.usgs.cida.coastalhazards.wps.AutoImportProcess.java
/** * Applies a set of heuristics to find which target attribute corresponds to a certain input * attribute/*from w ww . j a v a2 s . c o m*/ * * @param sourceType * @param targetType * @return */ Map<String, String> buildAttributeMapping(SimpleFeatureType sourceType, SimpleFeatureType targetType) { // look for the typical manglings. For example, if the target is a // shapefile store it will move the geometry and name it the_geom // collect the source names Set<String> sourceNames = new HashSet<String>(); for (AttributeDescriptor sd : sourceType.getAttributeDescriptors()) { sourceNames.add(sd.getLocalName()); } // first check if we have been kissed by sheer luck and the names are // the same Map<String, String> result = new HashMap<String, String>(); for (String name : sourceNames) { if (targetType.getDescriptor(name) != null) { result.put(name, name); } } sourceNames.removeAll(result.keySet()); // then check for simple case difference (Oracle case) for (String name : sourceNames) { for (AttributeDescriptor td : targetType.getAttributeDescriptors()) { if (td.getLocalName().equalsIgnoreCase(name)) { result.put(name, td.getLocalName()); break; } } } sourceNames.removeAll(result.keySet()); // then check attribute names being cut (another Oracle case) for (String name : sourceNames) { String loName = name.toLowerCase(); for (AttributeDescriptor td : targetType.getAttributeDescriptors()) { String tdName = td.getLocalName().toLowerCase(); if (loName.startsWith(tdName)) { result.put(name, td.getLocalName()); break; } } } sourceNames.removeAll(result.keySet()); // consider the shapefile geometry descriptor mangling if (targetType.getGeometryDescriptor() != null && "the_geom".equals(targetType.getGeometryDescriptor().getLocalName()) && !"the_geom".equalsIgnoreCase(sourceType.getGeometryDescriptor().getLocalName())) { result.put(sourceType.getGeometryDescriptor().getLocalName(), "the_geom"); } // and finally we return with as much as we can match if (!sourceNames.isEmpty()) { LOGGER.warning("Could not match the following attributes " + sourceNames + " to the target feature type ones: " + targetType); } return result; }
From source file:fr.aliacom.obm.common.calendar.EventNotificationServiceImpl.java
private void notifyAcceptedUpdateUsers(ObmUser user, Event previous, Event current, Locale locale, Map<Participation, ? extends Set<Attendee>> atts, TimeZone timezone, String ics, AccessToken token) { Set<Attendee> attendeesAccepted = atts.get(Participation.accepted()); if (attendeesAccepted != null) { Collection<Attendee> attendeesCanWriteOnCalendar = filterCanWriteOnCalendar(attendeesAccepted); if (attendeesCanWriteOnCalendar != null && !attendeesCanWriteOnCalendar.isEmpty()) { eventChangeMailer.notifyAcceptedUpdateUsersCanWriteOnCalendar(user, attendeesCanWriteOnCalendar, previous, current, locale, timezone, token); }//from ww w .ja v a 2s. c o m attendeesAccepted.removeAll(attendeesCanWriteOnCalendar); if (!attendeesAccepted.isEmpty()) { eventChangeMailer.notifyAcceptedUpdateUsers(user, attendeesAccepted, previous, current, locale, timezone, ics, token); } } }
From source file:gov.usgs.cida.dsas.wps.AutoImportProcess.java
/** * Applies a set of heuristics to find which target attribute corresponds to a certain input * attribute/*from w w w . j a v a2s . co m*/ * * @param sourceType * @param targetType * @return */ Map<String, String> buildAttributeMapping(SimpleFeatureType sourceType, SimpleFeatureType targetType) { // look for the typical manglings. For example, if the target is a // shapefile store it will move the geometry and name it the_geom // collect the source names Set<String> sourceNames = new HashSet<String>(); for (AttributeDescriptor sd : sourceType.getAttributeDescriptors()) { sourceNames.add(sd.getLocalName()); } // first check if we have been kissed by sheer luck and the names are // the same Map<String, String> result = new HashMap<String, String>(); for (String name : sourceNames) { if (targetType.getDescriptor(name) != null) { result.put(name, name); } } sourceNames.removeAll(result.keySet()); // then check for simple case difference (Oracle case) for (String name : sourceNames) { for (AttributeDescriptor td : targetType.getAttributeDescriptors()) { if (td.getLocalName().equalsIgnoreCase(name)) { result.put(name, td.getLocalName()); break; } } } sourceNames.removeAll(result.keySet()); // then check attribute names being cut (another Oracle case) for (String name : sourceNames) { String loName = name.toLowerCase(); for (AttributeDescriptor td : targetType.getAttributeDescriptors()) { String tdName = td.getLocalName().toLowerCase(); if (loName.startsWith(tdName)) { result.put(name, td.getLocalName()); break; } } } sourceNames.removeAll(result.keySet()); // consider the shapefile geometry descriptor mangling if (targetType.getGeometryDescriptor() != null && Constants.DEFAULT_GEOM_ATTR.equals(targetType.getGeometryDescriptor().getLocalName()) && !Constants.DEFAULT_GEOM_ATTR .equalsIgnoreCase(sourceType.getGeometryDescriptor().getLocalName())) { result.put(sourceType.getGeometryDescriptor().getLocalName(), Constants.DEFAULT_GEOM_ATTR); } // and finally we return with as much as we can match if (!sourceNames.isEmpty()) { LOGGER.warning("Could not match the following attributes " + sourceNames + " to the target feature type ones: " + targetType); } return result; }