List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:io.cloudslang.lang.compiler.scorecompiler.ScoreCompilerImpl.java
@Override public CompilationModellingResult compileSource(Executable executable, Set<Executable> path) { List<RuntimeException> exceptions = new ArrayList<>(); Map<String, Executable> filteredDependencies = new HashMap<>(); //we handle dependencies only if the file has imports boolean hasDependencies = CollectionUtils.isNotEmpty(executable.getExecutableDependencies()) && executable.getType().equals(SlangTextualKeys.FLOW_TYPE); if (hasDependencies) { try {/*from w w w .j av a 2 s. co m*/ Validate.notEmpty(path, "Source " + executable.getName() + " has dependencies but no path was given to the compiler"); Validate.noNullElements(path, "Source " + executable.getName() + " has empty dependencies"); } catch (RuntimeException ex) { exceptions.add(ex); } //we add the current executable since a dependency can require it List<Executable> availableExecutables = new ArrayList<>(path); availableExecutables.add(executable); try { //than we match the references to the actual dependencies filteredDependencies = dependenciesHelper.matchReferences(executable, availableExecutables); handleOnFailureCustomResults(executable, filteredDependencies); List<RuntimeException> errors = compileValidator.validateModelWithDependencies(executable, filteredDependencies); exceptions.addAll(errors); } catch (RuntimeException ex) { exceptions.add(ex); } } try { //next we create an execution plan for the required executable ExecutionPlan executionPlan = compileToExecutionPlan(executable); //and also create execution plans for all other dependencies Converter<Executable, ExecutionPlan> converter = new Converter<Executable, ExecutionPlan>() { @Override public ExecutionPlan convert(Executable compiledExecutable) { return compileToExecutionPlan(compiledExecutable); } }; Map<String, ExecutionPlan> dependencies = convertMap(filteredDependencies, converter); Collection<Executable> executables = new ArrayList<>(filteredDependencies.values()); executables.add(executable); HashSet<String> subflowsUuids = new HashSet<>(dependencies.keySet()); subflowsUuids.addAll(executable.getExternalExecutableDependencies()); executionPlan.setSubflowsUUIDs(subflowsUuids); CompilationArtifact compilationArtifact = new CompilationArtifact(executionPlan, dependencies, executable.getInputs(), getSystemPropertiesFromExecutables(executables)); return new CompilationModellingResult(compilationArtifact, exceptions); } catch (RuntimeException ex) { exceptions.add(ex); } return new CompilationModellingResult(null, exceptions); }
From source file:com.bah.applefox.main.plugins.webcrawler.utilities.WebPageCrawl.java
/** * Gets all allowed links from the Web Page * /*from w w w. ja va 2 s . co m*/ * @return - allLinks */ public Set<String> getLinks() { // temp is used to eliminate duplicate links HashSet<String> temp = new HashSet<String>(); temp.addAll(allLinks); return temp; }
From source file:com.bah.applefox.main.plugins.webcrawler.utilities.WebPageCrawl.java
public Set<String> getImages() { // temp is used to eliminate duplicate links HashSet<String> temp = new HashSet<String>(); temp.addAll(allImages); return temp;/*from w w w . j av a2 s . c o m*/ }
From source file:info.dolezel.fatrat.plugins.helpers.JarClassLoader.java
public Set<Class> findAnnotatedClassesLocal(String packageName, Class annotation) throws IOException, ClassNotFoundException { String path = packageName.replace('.', '/'); Enumeration<URL> resources = this.getResources(path); Set<File> dirs = new HashSet<File>(); while (resources.hasMoreElements()) { URL resource = resources.nextElement(); dirs.add(new File(resource.getFile())); }/* ww w .j ava 2 s. com*/ for (URL url : this.getURLs()) { dirs.add(new File(url.getFile())); } HashSet<Class> classes = new HashSet<Class>(); for (File directory : dirs) { classes.addAll(findClasses(directory, packageName, annotation)); } return classes; }
From source file:licenseUtil.LicensingObject.java
public HashSet<String> getNonFixedHeaders() { HashSet<String> result = new HashSet<>(); result.addAll(keySet()); result.removeAll(ColumnHeader.HEADER_VALUES); return result; }
From source file:com.netcrest.pado.tools.pado.command.export.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override/*w w w . j av a2 s. c om*/ public void run(CommandLine commandLine, String command) throws Exception { List<String> argList = commandLine.getArgList(); if (argList.size() < 3) { PadoShell.printlnError(this, "Invalid number of arguments."); return; } String fromPath = argList.get(1); String toFilePath = argList.get(2); boolean isRefresh = commandLine.hasOption("refresh"); boolean isForce = PadoShellUtil.hasSingleLetterOption(commandLine, 'f', "refresh"); boolean includeKeys = PadoShellUtil.hasSingleLetterOption(commandLine, 'k', "refresh"); boolean includeValues = PadoShellUtil.hasSingleLetterOption(commandLine, 'v', "refresh"); boolean isSchema = includeKeys && includeValues || (includeKeys == false && includeValues == false); IPathBiz pathBiz = SharedCache.getSharedCache().getPado().getCatalog().newInstance(IPathBiz.class); String gridId = padoShell.getGridId(fromPath); String gridPath = padoShell.getGridPath(fromPath); if (pathBiz.exists(gridId, gridPath) == false) { PadoShell.printlnError(this, fromPath + ": Path does not exist."); return; } less l = (less) padoShell.getCommand("less"); IScrollableResultSet rs = l.queryPath(fromPath, isRefresh, includeKeys, includeValues); if (rs == null || rs.toList() == null || rs.toList().size() == 0) { PadoShell.printlnError(this, fromPath + ": Path empty. File not created."); return; } File csvFile; if (toFilePath.endsWith(".csv") == false) { csvFile = new File(toFilePath + ".csv"); } else { csvFile = new File(toFilePath); } String fn = csvFile.getName().substring(0, csvFile.getName().lastIndexOf(".csv")); File schemaFile = new File(fn + ".schema"); if (isForce == false && padoShell.isInteractiveMode() && csvFile.exists()) { PadoShell.println(this, toFilePath + ": File exists. Do you want to overwrite?"); PadoShell.println("Enter 'continue' to continue or any other keys to abort:"); String line = padoShell.readLine(""); if (line.equals("continue") == false) { PadoShell.println("Command aborted."); return; } } if (isSchema) { List list = rs.toList(); Struct struct = (Struct) list.get(0); Object key = struct.getFieldValues()[0]; Object value = struct.getFieldValues()[1]; PrintWriter schemaWriter = new PrintWriter(schemaFile); List keyList = null; if (value instanceof Map) { // Must iterate the entire map to get all unique keys Map valueMap = (Map) value; Set keySet = valueMap.keySet(); HashSet set = new HashSet(keySet.size(), 1f); set.addAll(keySet); keyList = new ArrayList(set); Collections.sort(keyList); } OutputUtil.printSchema(schemaWriter, gridPath, key, value, keyList, OutputUtil.TYPE_KEYS_VALUES, ",", PadoShellUtil.getIso8601DateFormat(), true, true); schemaWriter.close(); } PrintWriter csvWriter = new PrintWriter(csvFile); int printType; if (includeKeys && includeValues == false) { printType = OutputUtil.TYPE_KEYS; } else if (includeKeys == false && includeValues) { printType = OutputUtil.TYPE_VALUES; } else { printType = OutputUtil.TYPE_KEYS_VALUES; } try { OutputUtil.printScrollableResultSet(csvWriter, rs, ",", printType, PadoShellUtil.getIso8601DateFormat()); } finally { if (csvWriter != null) { csvWriter.close(); } } }
From source file:org.openvpms.component.business.domain.im.security.User.java
/** * Returns the authorities granted to the user. * * @return the authorities, sorted by natural key (never <code>null</code>) *//*from ww w . j a v a2s . c o m*/ public Collection<GrantedAuthority> getAuthorities() { // TODO For performance we may need to cache the authorities for each user. HashSet<GrantedAuthority> authorities = new HashSet<GrantedAuthority>(); for (SecurityRole role : roles) { authorities.addAll(role.getAuthorities()); } return authorities; }
From source file:com.bitranger.parknshop.common.service.ads.ItemAdService.java
/** /*from w ww. j av a2s.co m*/ select AD.*, count(RTI.id_tag) as TAG_INTER from ps_item as IT inner join r_tag_item as RTI on RTI.id_item = IT.id inner join ps_promot_item as AD on AD.id_item = IT.id where IT.id = ? and RTI.id_tag in (???) order by TAG_INTER desc */ @SuppressWarnings("unchecked") @Override public List<PsPromotItem> forItemList(@Nullable final List<PsItem> items, final int limit, @Nullable PsCustomer customer) { if (customer != null) { return new CFilter().filter(items, limit, customer.getId()); } final HashSet<PsTag> tags = new HashSet<>(items.size() * 6); for (PsItem i : items) { tags.addAll(i.getPsTags()); } final StringBuilder b = new StringBuilder(512); b.append(" select AD.*, count(RTI.id_tag) as TAG_INTER from ps_item as IT " + " inner join r_tag_item as RTI on RTI.id_item = IT.id " + " inner join ps_promot_item as AD on AD.id_item = IT.id "); if (tags != null && tags.size() > 1) { b.append(" where RTI.id_tag in ( "); for (PsTag psTag : tags) { b.append(psTag.getId()).append(','); } b.setCharAt(b.length() - 1, ')'); } b.append(" order by TAG_INTER desc "); System.out.println(b.toString()); return psAdItemDAO.hibernate().executeFind(new HibernateCallback<List<PsPromotItem>>() { @Override public List<PsPromotItem> doInHibernate(Session session) throws HibernateException, SQLException { return session.createSQLQuery(b.toString()).addEntity(PsPromotItem.class).list(); } }); // Iterator<PsTag> i = tags.iterator(); // while (tags.size() > limit) { // i.remove(); // } }
From source file:gov.nih.nci.caintegrator.application.query.CopyNumberAlterationCriterionHandler.java
@Override Set<SegmentData> getSegmentDataMatches(CaIntegrator2Dao dao, Study study, Platform platform) throws InvalidCriterionException { List<SegmentData> segmentDataMatchesFromDao = dao.findMatchingSegmentDatas(criterion, study, platform); List<List<SegmentData>> segmentDataMatchesList = new ArrayList<List<SegmentData>>(); int startPos = 0; int endPos = SEGMENT_BUFFER_SIZE; int totalSize = segmentDataMatchesFromDao.size(); while (totalSize > startPos) { int positionToEnd = endPos >= totalSize ? totalSize : endPos; segmentDataMatchesList.add(segmentDataMatchesFromDao.subList(startPos, positionToEnd)); startPos = positionToEnd + 1;/*from ww w.j a v a2 s . co m*/ endPos += SEGMENT_BUFFER_SIZE; } HashSet<SegmentData> segmentDataMatches = new HashSet<SegmentData>(); for (List<SegmentData> segments : segmentDataMatchesList) { segmentDataMatches.addAll(dao.findMatchingSegmentDatasByLocation(segments, study, platform)); } return segmentDataMatches; }
From source file:org.mariotaku.twidere.fragment.AddStatusFilterDialogFragment.java
private FilterItemInfo[] getFilterItemsInfo() { final Bundle args = getArguments(); if (args == null || !args.containsKey(EXTRA_STATUS)) return new FilterItemInfo[0]; final ParcelableStatus status = args.getParcelable(EXTRA_STATUS); if (status == null) return new FilterItemInfo[0]; final ArrayList<FilterItemInfo> list = new ArrayList<>(); if (status.is_retweet) { list.add(new FilterItemInfo(FilterItemInfo.FILTER_TYPE_USER, new UserItem(status.retweeted_by_user_key, status.retweeted_by_user_name, status.retweeted_by_user_screen_name))); }/*from w w w . jav a 2 s. com*/ if (status.is_quote) { list.add(new FilterItemInfo(FilterItemInfo.FILTER_TYPE_USER, new UserItem(status.quoted_user_key, status.quoted_user_name, status.quoted_user_screen_name))); } list.add(new FilterItemInfo(FilterItemInfo.FILTER_TYPE_USER, new UserItem(status.user_key, status.user_name, status.user_screen_name))); final ParcelableUserMention[] mentions = status.mentions; if (mentions != null) { for (final ParcelableUserMention mention : mentions) { if (!mention.key.equals(status.user_key)) { list.add(new FilterItemInfo(FilterItemInfo.FILTER_TYPE_USER, mention)); } } } final HashSet<String> hashtags = new HashSet<>(); hashtags.addAll(mExtractor.extractHashtags(status.text_plain)); for (final String hashtag : hashtags) { list.add(new FilterItemInfo(FilterItemInfo.FILTER_TYPE_KEYWORD, hashtag)); } final String source = HtmlEscapeHelper.toPlainText(status.source); list.add(new FilterItemInfo(FilterItemInfo.FILTER_TYPE_SOURCE, source)); return list.toArray(new FilterItemInfo[list.size()]); }