List of usage examples for com.google.common.collect Sets union
public static <E> SetView<E> union(final Set<? extends E> set1, final Set<? extends E> set2)
From source file:com.opengamma.strata.function.calculation.fra.FraCalculationFunction.java
@Override public FunctionRequirements requirements(FraTrade trade, Set<Measure> measures) { Fra product = trade.getProduct();//from w w w.j a va2 s . c o m // Create a set of all indices referenced by the FRA Set<IborIndex> indices = new HashSet<>(); // The main index is always present indices.add(product.getIndex()); // The index used for linear interpolation is optional product.getIndexInterpolated().ifPresent(indices::add); // Create a key identifying the rate of each index referenced by the FRA Set<ObservableKey> indexRateKeys = indices.stream().map(IndexRateKey::of).collect(toImmutableSet()); // Create a key identifying the forward curve of each index referenced by the FRA Set<MarketDataKey<?>> indexCurveKeys = indices.stream().map(IborIndexCurveKey::of) .collect(toImmutableSet()); // Create a key identifying the discount factors for the FRA currency Set<DiscountCurveKey> discountFactorsKeys = ImmutableSet.of(DiscountCurveKey.of(product.getCurrency())); return FunctionRequirements.builder() .singleValueRequirements(Sets.union(indexCurveKeys, discountFactorsKeys)) .timeSeriesRequirements(indexRateKeys).outputCurrencies(product.getCurrency()).build(); }
From source file:mvm.rya.indexing.accumulo.entity.StarQuery.java
public Set<String> getAssuredBindingNames() { Set<String> bindingNames = Sets.newHashSet(); for (StatementPattern sp : nodes) { if (bindingNames.size() == 0) { bindingNames = sp.getAssuredBindingNames(); } else {//from w w w .j a va 2s . co m bindingNames = Sets.union(bindingNames, sp.getAssuredBindingNames()); } } return bindingNames; }
From source file:com.google.javascript.jscomp.newtypes.EnumType.java
static ImmutableSet<EnumType> union(ImmutableSet<EnumType> s1, ImmutableSet<EnumType> s2) { if (s1.isEmpty()) { return s2; }//from ww w.j a v a 2s .co m if (s2.isEmpty() || s1.equals(s2)) { return s1; } return Sets.union(s1, s2).immutableCopy(); }
From source file:org.apache.flex.compiler.internal.targets.FlexLibrarySWFTarget.java
@Override protected FramesInformation computeFramesInformation() throws InterruptedException { final ModuleFactoryInfo moduleFactoryInfo = getModuleFactoryInfo(); if (!moduleFactoryInfo.generateModuleFactory()) return super.computeFramesInformation(); final Set<ICompilationUnit> compilationUnits = Sets.union( Collections.singleton(moduleFactoryInfo.moduleFactoryBaseClassCompilationUnit), this.rootedCompilationUnits); final SWFFrameInfo frameInfo = new SWFFrameInfo(null, SWFFrameInfo.EXTERNS_ALLOWED, compilationUnits, moduleFactoryInfo.problems); final FlexLibrarySWFFramesInformation framesInfo = new FlexLibrarySWFFramesInformation(frameInfo); return framesInfo; }
From source file:org.apache.whirr.service.hadoop.HadoopService.java
@Override public HadoopCluster launchCluster(ClusterSpec clusterSpec) throws IOException { ComputeService computeService = ComputeServiceBuilder.build(serviceSpec); String privateKey = serviceSpec.readPrivateKey(); String publicKey = serviceSpec.readPublicKey(); // deal with user packages and autoshutdown with extra runurls byte[] nnjtBootScript = RunUrlBuilder.runUrls("sun/java/install", String.format("apache/hadoop/install nn,jt -c %s", serviceSpec.getProvider())); Template template = computeService.templateBuilder().osFamily(OsFamily.UBUNTU) .options(runScript(nnjtBootScript).installPrivateKey(privateKey).authorizePublicKey(publicKey) .inboundPorts(22, 80, 8020, 8021, 50030)) // TODO: restrict further .build();//from ww w. j a v a2 s . com InstanceTemplate instanceTemplate = clusterSpec.getInstanceTemplate(MASTER_ROLE); checkNotNull(instanceTemplate); checkArgument(instanceTemplate.getNumberOfInstances() == 1); Set<? extends NodeMetadata> nodes; try { nodes = computeService.runNodesWithTag(serviceSpec.getClusterName(), 1, template); } catch (RunNodesException e) { // TODO: can we do better here (retry?) throw new IOException(e); } NodeMetadata node = Iterables.getOnlyElement(nodes); InetAddress namenodePublicAddress = Iterables.getOnlyElement(node.getPublicAddresses()); InetAddress jobtrackerPublicAddress = Iterables.getOnlyElement(node.getPublicAddresses()); byte[] slaveBootScript = RunUrlBuilder.runUrls("sun/java/install", String.format("apache/hadoop/install dn,tt -n %s -j %s", namenodePublicAddress.getHostName(), jobtrackerPublicAddress.getHostName())); template = computeService.templateBuilder().osFamily(OsFamily.UBUNTU) .options(runScript(slaveBootScript).installPrivateKey(privateKey).authorizePublicKey(publicKey)) .build(); instanceTemplate = clusterSpec.getInstanceTemplate(WORKER_ROLE); checkNotNull(instanceTemplate); Set<? extends NodeMetadata> workerNodes; try { workerNodes = computeService.runNodesWithTag(serviceSpec.getClusterName(), instanceTemplate.getNumberOfInstances(), template); } catch (RunNodesException e) { // TODO: don't bail out if only a few have failed to start throw new IOException(e); } // TODO: wait for TTs to come up (done in test for the moment) Set<Instance> instances = Sets.union(getInstances(MASTER_ROLE, Collections.singleton(node)), getInstances(WORKER_ROLE, workerNodes)); Properties config = createClientSideProperties(namenodePublicAddress, jobtrackerPublicAddress); return new HadoopCluster(instances, config); }
From source file:com.facebook.buck.core.model.targetgraph.AbstractTargetNode.java
/** * @return all targets which must be present in the TargetGraph before this one can be transformed * into a BuildRule.//ww w . j a v a 2s .c om */ public Set<BuildTarget> getParseDeps() { return Sets.union(getBuildDeps(), getTargetGraphOnlyDeps()); }
From source file:piecework.persistence.concrete.SearchRepositoryProvider.java
@Override public SearchResponse forms(SearchCriteria criteria, ViewContext context, boolean excludeData) throws PieceworkException { long time = 0; if (LOG.isDebugEnabled()) time = System.currentTimeMillis(); Set<String> overseerProcessDefinitionKeys = principal.getProcessDefinitionKeys(AuthorizationRole.OVERSEER); Set<String> userProcessDefinitionKeys = principal.getProcessDefinitionKeys(AuthorizationRole.USER); Set<String> allProcessDefinitionKeys = Sets.union(overseerProcessDefinitionKeys, userProcessDefinitionKeys); Set<piecework.model.Process> allowedProcesses = processes(allProcessDefinitionKeys); SearchResponse response = new SearchResponse(); if (allowedProcesses == null || allowedProcesses.isEmpty()) return response; List<Process> alphabetical = new ArrayList<Process>(allowedProcesses); Collections.sort(alphabetical, new Comparator<Process>() { @Override//from w w w . j a va 2 s . c o m public int compare(Process o1, Process o2) { if (org.apache.commons.lang.StringUtils.isEmpty(o1.getProcessDefinitionLabel())) return 0; if (org.apache.commons.lang.StringUtils.isEmpty(o2.getProcessDefinitionLabel())) return 1; return o1.getProcessDefinitionLabel().compareTo(o2.getProcessDefinitionLabel()); } }); List<Map<String, String>> metadata = new ArrayList<Map<String, String>>(); Set<String> pgs = new HashSet<String>(); for (Process allowedProcess : alphabetical) { if (allowedProcess.getProcessDefinitionKey() != null) { Process definition = allowedProcess; Form form = new Form.Builder().processDefinitionKey(definition.getProcessDefinitionKey()) .task(new Task.Builder().processDefinitionKey(definition.getProcessDefinitionKey()) .processDefinitionLabel(definition.getProcessDefinitionLabel()).build(context)) .build(context); Map<String, String> map = new HashMap<String, String>(); map.put("processDefinitionKey", definition.getProcessDefinitionKey()); map.put("processDefinitionLabel", definition.getProcessDefinitionLabel()); map.put("link", form.getLink()); metadata.add(map); if (StringUtils.isNotEmpty(allowedProcess.getProcessGroup())) { pgs.add(allowedProcess.getProcessGroup()); } } } response.setMetadata(metadata); // bucket list stuff String pg = null; // get process group from allowed processes if (pgs.size() == 1) { pg = pgs.toArray()[0].toString(); } else { // then try to get process group from query Map<String, List<String>> contentParameter = criteria.getContentParameters(); if (contentParameter != null) { List<String> vlist = contentParameter.get("pg"); if (vlist != null && vlist.size() > 0) { pg = vlist.get(0); } } } if (StringUtils.isNotEmpty(pg) && bucketListRepository != null) { BucketList bucketList = bucketListRepository.findOne(pg); if (bucketList != null) { response.setBucketList(bucketList); } response.setProcessGroup(pg); } String processStatus = criteria.getProcessStatus() != null ? sanitizer.sanitize(criteria.getProcessStatus()) : Constants.ProcessStatuses.OPEN; String taskStatus = criteria.getTaskStatus() != null ? sanitizer.sanitize(criteria.getTaskStatus()) : Constants.TaskStatuses.ALL; List<TaskDeployment> taskDeployments = new ArrayList<TaskDeployment>(); Set<String> userIds = new HashSet<String>(); List<Facet> facets = FacetFactory.facets(allowedProcesses); response.setFacets(facets); Map<DataFilterFacet, String> filterFacetParameters = criteria.getFilterFacetParameters(); if (!excludeData) { Query query = new SearchQueryBuilder(criteria).build(allProcessDefinitionKeys, sanitizer); int instancePageNumber = 0; int instancePageSize = 1000; Sort sort = SearchUtility.sort(criteria, sanitizer); Pageable pageable = new PageRequest(instancePageNumber, instancePageSize, sort); Page<ProcessInstance> page = instanceRepository.findByQuery(query, pageable, true); int pageNumber = criteria.getPageNumber() != null ? criteria.getPageNumber().intValue() : 0; int pageSize = criteria.getPageSize() != null ? criteria.getPageSize() : 200; Pageable taskPageable = new PageRequest(pageNumber, pageSize); long total = page.getTotalElements(); long taskCounter = 0; long instanceCounter = 0; long start = taskPageable.getOffset(); long end = taskPageable.getOffset() + taskPageable.getPageSize(); while (instanceCounter < total && page.hasContent()) { // Loop again through the list to get all user ids and build the intermediate object including // task, instance, and deployment for (ProcessInstance instance : page.getContent()) { String processDefinitionKey = instance.getProcessDefinitionKey(); String processInstanceId = instance.getProcessInstanceId(); instanceCounter++; ProcessDeployment processDeployment = null; Map<String, Object> instanceData = new HashMap<String, Object>(); instanceData.put("processInstanceId", processInstanceId); instanceData.put("processInstanceLabel", instance.getProcessInstanceLabel()); instanceData.put("processDefinitionLabel", instance.getProcessDefinitionLabel()); instanceData.put("processStatus", instance.getProcessStatus()); instanceData.put("applicationStatusExplanation", instance.getApplicationStatusExplanation()); instanceData.put("startTime", instance.getStartTime()); instanceData.put("lastModifiedTime", instance.getLastModifiedTime()); instanceData.put("endTime", instance.getEndTime()); String activation = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, "activation"); String attachment = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, Attachment.Constants.ROOT_ELEMENT_NAME); String cancellation = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, "cancellation"); String history = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, History.Constants.ROOT_ELEMENT_NAME); String restart = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, "restart"); String suspension = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, "suspension"); String bucketUrl = context.getApplicationUri(ProcessInstance.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, processInstanceId, "value/Bucket"); instanceData.put("activation", activation); instanceData.put("attachment", attachment); instanceData.put("cancellation", cancellation); instanceData.put("history", history); instanceData.put("restart", restart); instanceData.put("suspension", suspension); instanceData.put("bucketUrl", bucketUrl); Map<String, List<Value>> valueData = instance.getData(); if (valueData != null && !valueData.isEmpty()) { for (Facet facet : facets) { if (facet instanceof DataSearchFacet) { DataSearchFacet dataSearchFacet = DataSearchFacet.class.cast(facet); String name = dataSearchFacet.getName(); String value = ProcessInstanceUtility.firstString(name, valueData); if (StringUtils.isNotEmpty(value)) instanceData.put(name, value); } } } Set<Task> tasks = instance.getTasks(); if (tasks != null && !tasks.isEmpty()) { for (Task task : tasks) { if (include(task, processStatus, taskStatus, overseerProcessDefinitionKeys, principal)) { if (taskCounter >= start && taskCounter < end) { taskDeployments.add(new TaskDeployment(taskCounter, processDeployment, instance, task, instanceData)); userIds.addAll(task.getAssigneeAndCandidateAssigneeIds()); } taskCounter++; } } } } if (total > instanceCounter) { instancePageNumber++; Pageable nextPage = new PageRequest(instancePageNumber, instancePageSize, sort); page = instanceRepository.findByQuery(query, nextPage, false); } } response.setTotal((int) taskCounter); response.setPageNumber(taskPageable.getPageNumber()); response.setPageSize(taskPageable.getPageSize()); } Map<String, User> userMap = identityService.findUsers(userIds); List<Map<String, Object>> data = new ArrayList<Map<String, Object>>(); for (TaskDeployment taskDeployment : taskDeployments) { Map<String, Object> map = new HashMap<String, Object>(); Map<String, Object> instanceData = taskDeployment.getInstanceData(); if (instanceData != null && !instanceData.isEmpty()) map.putAll(instanceData); Task task = TaskFactory.task(taskDeployment.getTask(), new PassthroughSanitizer(), userMap, context); String processDefinitionKey = task.getProcessDefinitionKey(); if (!include(task, filterFacetParameters)) continue; map.put("itemNumber", taskDeployment.getItemNumber()); map.put("assignee", task.getAssignee()); map.put("candidateAssignees", task.getCandidateAssignees()); map.put("formInstanceId", task.getTaskInstanceId()); map.put("taskId", task.getTaskInstanceId()); map.put("taskClaimTime", task.getClaimTime()); map.put("taskDueDate", task.getDueDate()); map.put("taskStartTime", task.getStartTime()); map.put("taskEndTime", task.getEndTime()); map.put("taskLabel", task.getTaskLabel()); map.put("taskDescription", task.getTaskDescription()); map.put("taskStatus", task.getTaskStatus()); map.put("active", task.isActive()); String assignment = context != null && task != null && task.getTaskInstanceId() != null ? context.getApplicationUri(Task.Constants.ROOT_ELEMENT_NAME, processDefinitionKey, task.getTaskInstanceId(), "assign") : null; map.put("assignment", assignment); map.put("link", context != null ? context.getApplicationUri(Form.Constants.ROOT_ELEMENT_NAME, processDefinitionKey) + "?taskId=" + task.getTaskInstanceId() : null); data.add(map); } List<FacetSort> postQuerySortBy = criteria.getPostQuerySortBy(); if (postQuerySortBy != null && !postQuerySortBy.isEmpty()) { Collections.reverse(postQuerySortBy); for (FacetSort facetSort : postQuerySortBy) { Collections.sort(data, new DataFilterFacetComparator(facetSort.getFacet())); if (facetSort.getDirection().equals(Sort.Direction.DESC)) Collections.reverse(data); } } response.setData(data); List<FacetSort> facetSortList = criteria.getSortBy(); List<String> sortBy = new ArrayList<String>(); if (facetSortList != null) { for (FacetSort facetSort : facetSortList) { sortBy.add(facetSort.toString()); } } response.setSortBy(sortBy); if (LOG.isDebugEnabled()) LOG.debug("Retrieved forms in " + (System.currentTimeMillis() - time) + " ms"); if (principal instanceof User) response.setCurrentUser(User.class.cast(principal)); return response; }
From source file:fr.norad.jaxrs.oauth2.core.service.TokenSpecService.java
private Set<String> checkScopes(Set<String> requestedScopes, Client client, User user) throws InvalidScopeOauthException { Set<String> allowedScopes = client.getAllowedScopes(); if (user != null) { for (Group group : user.getGroups()) { allowedScopes = Sets.union(allowedScopes, group.getAllowedScopes()); }/*ww w.j a va 2 s . co m*/ } Sets.SetView<String> intersection = Sets.intersection(allowedScopes, requestedScopes); if (intersection.isEmpty()) { throw new InvalidScopeOauthException("No scope allowed requested"); } return intersection; }
From source file:org.apache.rya.indexing.accumulo.entity.StarQuery.java
public Set<String> getAssuredBindingNames() { Set<String> bindingNames = Sets.newHashSet(); for (final StatementPattern sp : nodes) { if (bindingNames.size() == 0) { bindingNames = sp.getAssuredBindingNames(); } else {//from w ww . j ava 2s .c o m bindingNames = Sets.union(bindingNames, sp.getAssuredBindingNames()); } } return bindingNames; }
From source file:org.asoem.greyfish.core.utils.EvaluatingMarkovChain.java
@Override public Set<S> getStates() { return Sets.union(markovMatrix.rowKeySet(), markovMatrix.columnKeySet()); }