List of usage examples for java.util List removeAll
boolean removeAll(Collection<?> c);
From source file:com.seer.datacruncher.spring.ChecksTypeReadController.java
@SuppressWarnings("unchecked") public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { ObjectMapper mapper = new ObjectMapper(); ServletOutputStream out = null;// w ww .j av a 2 s.co m response.setContentType("application/json"); out = response.getOutputStream(); String idSchemaField = request.getParameter("idSchemaField"); ReadList readList = checksTypeDao.read(-1, -1); List<ChecksTypeEntity> checkTypeEntites = (List<ChecksTypeEntity>) readList.getResults(); if (StringUtils.isNotEmpty(idSchemaField)) { String leftPane = request.getParameter("leftPane"); ReadList assignedReadList = checksTypeDao.readCheckTypeBySchemaFieldId(Long.parseLong(idSchemaField)); List<ChecksTypeEntity> assignedCheckTypeEntites = (List<ChecksTypeEntity>) assignedReadList .getResults(); if ("true".equalsIgnoreCase(leftPane)) { if (CollectionUtils.isNotEmpty(assignedCheckTypeEntites)) { checkTypeEntites.removeAll(assignedCheckTypeEntites); } } else { readList.setResults(assignedCheckTypeEntites); } } out.write(mapper.writeValueAsBytes(readList)); out.flush(); out.close(); return null; }
From source file:de.hybris.platform.product.impl.DefaultVariantsService.java
/** * {@inheritDoc}/*from w w w.ja va 2 s . c o m*/ */ @Override public void setVariantAttributesForVariantType(final VariantTypeModel variantType, final List<VariantAttributeDescriptorModel> newAttributes) { if (getModelService().isNew(variantType)) { throw new IllegalArgumentException( "Given variantType " + variantType + " is new and hasn't been persisted yet"); } final List<VariantAttributeDescriptorModel> oldAttributes = getVariantAttributesForVariantType(variantType); if (newAttributes != null && !newAttributes.isEmpty()) { //prepare to remove List: existing Attributes which don't include the new (given attributes) oldAttributes.removeAll(newAttributes); } if (!oldAttributes.isEmpty()) { //if any of the attributes is an inherited attribute - it cannot be removed for (final VariantAttributeDescriptorModel attr : oldAttributes) { if (!variantType.equals(attr.getDeclaringEnclosingType())) { throw new SystemException("attribute " + attr.getQualifier() + " is an inherited attribute of the variantType " + variantType.getCode() + " and can't be removed this way. Setting new Variant Attributes List don't forget to include the inherited variant attributes as well."); } } if (LOG.isDebugEnabled()) { LOG.debug(oldAttributes.size() + " old attributes will be removed from model..."); } for (final VariantAttributeDescriptorModel attr : oldAttributes) { getModelService().remove(attr); } } //for all new Attributes check the enclosing type and set position for attribute order if (newAttributes != null) { int index = 0; for (final VariantAttributeDescriptorModel attr : newAttributes) { // check enclosing type if (!variantType.equals(attr.getEnclosingType())) { final String attrCode = attr.getEnclosingType() == null ? null : attr.getEnclosingType().getCode(); throw new SystemException("attribute descriptor " + attr.getQualifier() + " has different enclosing type (expected " + variantType.getCode() + " but got " + attrCode + ")"); } // set position for attribute order attr.setPosition(Integer.valueOf(index++)); } //save all attributes getModelService().saveAll(newAttributes); if (LOG.isDebugEnabled()) { LOG.debug("Attributes have been assigned to the variantType=" + variantType.getCode() + ". This type contains now " + newAttributes.size() + " attributes"); } } else { if (LOG.isDebugEnabled()) { LOG.debug("No new attributes have been assigned to the variantType=" + variantType.getCode() + ". The old attributes have been removed and the type does not contain any attributes at this moment"); } } }
From source file:org.openmrs.module.pihmalawi.reporting.definition.dataset.evaluator.EncounterBreakdownDataSetEvaluator.java
/** * @throws EvaluationException // www . j a v a2 s . c o m * @see DataSetEvaluator#evaluate(DataSetDefinition, EvaluationContext) */ public DataSet evaluate(DataSetDefinition dataSetDefinition, EvaluationContext context) throws EvaluationException { context = ObjectUtil.nvl(context, new EvaluationContext()); MapDataSet data = new MapDataSet(dataSetDefinition, context); EncounterBreakdownDataSetDefinition dsd = (EncounterBreakdownDataSetDefinition) dataSetDefinition; // Construct the encounter filters to iterate across Map<String, List<EncounterType>> encounterTypeFilters = new LinkedHashMap<String, List<EncounterType>>(); List<EncounterType> otherEncounterTypes = Context.getEncounterService().getAllEncounterTypes(); for (int encTypeNum = 1; encTypeNum <= dsd.getTypes().size(); encTypeNum++) { EncounterType encounterType = dsd.getTypes().get(encTypeNum - 1); encounterTypeFilters.put("enc" + encTypeNum, Arrays.asList(encounterType)); otherEncounterTypes.remove(encounterType); } encounterTypeFilters.put("otherenc", otherEncounterTypes); // Construct the user and location filters to iterate across Map<String, List<User>> userFilters = new LinkedHashMap<String, List<User>>(); Map<String, List<Location>> locationFilters = new LinkedHashMap<String, List<Location>>(); if (dsd.getGrouping() == EncounterBreakdownDataSetDefinition.Grouping.User) { // Determine what users to iterate across. Default to top 10, and "other" List<User> mostFrequentUsers = getUsersOrderedByNumEncounters( DateUtil.adjustDate(dsd.getEndDate(), -7 * dsd.getNumberOfWeeks(), Calendar.DATE), dsd.getEndDate(), context); List<User> otherUsers = Context.getUserService().getAllUsers(); for (int userNum = 1; userNum <= mostFrequentUsers.size() && userNum <= 10; userNum++) { User user = mostFrequentUsers.get(userNum - 1); String userKey = "user" + userNum; userFilters.put(userKey, Arrays.asList(user)); otherUsers.remove(user); data.addData(new DataSetColumn(userKey + "name", userKey + "name", String.class), user.getUsername()); } userFilters.put("userother", otherUsers); } else { List<Location> locations = metadata.getSystemLocations(); List<Location> otherLocations = Context.getLocationService().getAllLocations(); for (int locationNum = 1; locationNum <= locations.size(); locationNum++) { Location location = locations.get(locationNum - 1); String locationKey = "loc" + locationNum; List<Location> locList = metadata.getAllLocations(location); locationFilters.put(locationKey, locList); otherLocations.removeAll(locList); data.addData(new DataSetColumn(locationKey + "name", locationKey + "name", String.class), location.getName()); } locationFilters.put("locother", otherLocations); } // Now, iterate across the product of all of these and add them to the data set Date startDate, endDate = null; for (int weekNum = 0; weekNum < dsd.getNumberOfWeeks(); weekNum++) { endDate = (endDate == null ? dsd.getEndDate() : DateUtil.adjustDate(endDate, -7, Calendar.DATE)); startDate = DateUtil.adjustDate(endDate, -6, Calendar.DATE); for (String encounterTypeKey : encounterTypeFilters.keySet()) { List<EncounterType> encounterTypes = encounterTypeFilters.get(encounterTypeKey); for (String userKey : userFilters.keySet()) { List<User> users = userFilters.get(userKey); String key = userKey + encounterTypeKey + "ago" + weekNum; addData(data, key, startDate, endDate, encounterTypes, users, null, context); } for (String locationKey : locationFilters.keySet()) { List<Location> locations = locationFilters.get(locationKey); String key = locationKey + encounterTypeKey + "ago" + weekNum; addData(data, key, startDate, endDate, encounterTypes, null, locations, context); } } } return data; }
From source file:cs.ox.ac.uk.sors.PreferencesTestA.java
public void testMerge() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();//from ww w .j a v a 2 s.c o m StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final IRule query = RewritingUtils.getQueries(bodies, queryHeads).get(0); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; LOGGER.debug("Finished rewriting constraints."); Map<String, Double> probModel = ProbabilisticModel.get(_DEFAULT_INPUT_PATH + "reviews.txt"); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); // List<Integer> ks = new ArrayList<Integer>(); // ks.add(5); List<MergingStrategy> str = new ArrayList<MergingStrategy>(); //str.add(PreferenceStrategy.PREFS_GEN); str.add(MergingStrategy.PREFS_PT); str.add(MergingStrategy.PREFS_RANK); str.add(MergingStrategy.PREFS_SORT); LOGGER.trace("start the things."); final String summaryPrefix = StringUtils.join(creationDate, "-"); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/"), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/"), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); sizeSummaryWriter.writeNext(UReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(UReportingUtils.getSummaryRewritingTimeReportHeader()); // for (int nbNodes = 500; nbNodes < 1000; nbNodes += 500) { // for (int nbNodes = 10; nbNodes < 20; nbNodes += 10) { for (int nbNodes = 1000; nbNodes < 13000; nbNodes += 1000) { double sparisity = 0.15; //double sparsity = 0.15 and experirement no gen and no transitve closure, expeirment 1000> 13.0000 // for (Integer k : ks) { for (int con = 0; con < 10; con++) { PrefParameters parameters = new PrefParameters(nbNodes, sparisity); IRule q = query; CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final JoDSReporter rep = JoDSReporter.getInstance(true); JoDSReporter.setupReporting(); JoDSReporter.setQuery(queryPredicate); JoDSReporter.setTest("test" + con); rep.setValue(URewMetric.DEPGRAPH_TIME, posDepTime); LOGGER.info("Processing query: ".concat(q.toString())); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(URewMetric.REW_TIME, System.currentTimeMillis() - overallTime); rep.setValue(URewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(URewMetric.REW_CNS_TIME, ncRewTime); IRelation result = getRelation(rewriting, parameters, ndmRewriter); // CONSTRUCT graph long constPrefGraphTime = System.currentTimeMillis(); final PreferencesGraph prefGraph = PreferenceGenerator .generatePreferenceGraph(parameters.getSparsity(), result); System.out.println("Gen" + prefGraph.getEdgesSize()); rep.setValue(URewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(URewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(URewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); // TRANSITIVE graph long transitiveClosureTime = System.currentTimeMillis(); //TransitiveClosure c = TransitiveClosure.INSTANCE; //c.closeSimpleDirectedGraph(prefGraph.g); transitiveClosureTime = System.currentTimeMillis() - transitiveClosureTime; rep.setValue(URewMetric.TRANSITIVE_CLOSURE_TIME, transitiveClosureTime); rep.setValue(URewMetric.PREFGRAPH_TRA_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(URewMetric.PREFGRAPH_TRA_SIZE_E, (long) prefGraph.getEdgesSize()); System.out.println("Trans" + prefGraph.getEdgesSize() + "-" + transitiveClosureTime); PreferencesGraph prefMerged = null; Map<ITuple, Integer> ranks = null; // Merge Graph graph for (MergingStrategy strategyQA : str) { JoDSReporter.setStrategy(strategyQA); long mergeOperatorTime = System.currentTimeMillis(); if (strategyQA == MergingStrategy.PREFS_GEN) { double t = 0.3;//randInt(509, 761)/100.0; mergeOperatorTime = System.currentTimeMillis(); prefMerged = CombinationAlgorithms.combPrefsGen(prefGraph, probModel, t); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; } else { if (strategyQA == MergingStrategy.PREFS_PT) { mergeOperatorTime = System.currentTimeMillis(); double p = 0.5;//randInt(383, 887)/100.0; prefMerged = CombinationAlgorithms.combPrefsPT(prefGraph, probModel, p); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; } else { if (strategyQA == MergingStrategy.PREFS_RANK) { mergeOperatorTime = System.currentTimeMillis(); ranks = CombinationAlgorithms.combPrefsRank(prefGraph, probModel, Function.Max); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; } else { if (strategyQA == MergingStrategy.PREFS_SORT) { mergeOperatorTime = System.currentTimeMillis(); prefMerged = CombinationAlgorithms.combPrefsSort(prefGraph, probModel); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; } } } } if (prefMerged != null) { rep.setValue(URewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefMerged.getVertexesSize()); rep.setValue(URewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefMerged.getEdgesSize()); } else { rep.setValue(URewMetric.PREFGRAPH_TOPK_SIZE_V, (long) 0); rep.setValue(URewMetric.PREFGRAPH_TOPK_SIZE_E, (long) 0); } System.out.print("test" + con + strategyQA + "\n"); for (int k = 5; k < 10; k = k + 5) { JoDSReporter.setK(k); rep.setValue(URewMetric.PREFGRAPH_MERGE_TIME, (long) mergeOperatorTime); long topKTime = System.currentTimeMillis(); List<ITuple> topk = null; if (strategyQA == MergingStrategy.PREFS_RANK) { topk = TopKAlgorithms.topkPrefsRank(ranks, k); } else { topk = TopKAlgorithms.getTopK(prefMerged, k); } topKTime = System.currentTimeMillis() - topKTime; rep.setValue(URewMetric.PREFGRAPH_TOPK_TIME, topKTime); int sizeAnswer = (topk != null) ? topk.size() : 0; rep.setValue(URewMetric.ANSWER_SIZE, (long) sizeAnswer); sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); }
From source file:br.com.autonomiccs.autonomic.plugin.common.services.AutonomiccsSystemVmDeploymentService.java
/** * This method tries to find a suitable host to deploy an Autonomiccs system VM. * The process is the following:/*w w w .j a v a2 s . c om*/ * <ul> * <li>We load all hosts of the given cluster, we also shuffle the host list; * <li>after that, we go over the host list and check if it can host the system VM {@link #canHostSupportVm(ServiceOfferingVO, HostResources)} * <li>if the host has resources to support the system VM, we check if its hypervisor type has a system VM template registered for it, if so we use it as the host to deploy the system VM * </ul> * @param c cluster to look for a host to deploy a system VM * @param excludeHosts hosts to exclude from the search * @return {@link HostVO} to deploy a system VM, it may return null if no suitable hosts have been found */ private HostVO searchForAnotherRandomHostInTheClusterToStartSystemVmExcludingHosts(ClusterVO c, List<HostVO> excludeHosts) { List<HostVO> allHostsInCluster = hostService.listAllHostsInCluster(c); allHostsInCluster.removeAll(excludeHosts); Collections.shuffle(allHostsInCluster); for (HostVO h : allHostsInCluster) { if (canDeployAutonomiccsSystemVmOnHost(h)) { return h; } } logger.info(String.format( "Could not find any suitable hosts to deploy the system VM into cluster [clusterId=%d, clusterName=%s]", c.getId(), c.getName())); return null; }
From source file:com.inkubator.hrm.web.appraisal.CompetenceTypeFormController.java
public void doDualListModelKlasifikasiKerja(Boolean isUpdate, AppraisalCompetencyTypeModel model) throws Exception { List<GolonganJabatan> listGolonganJabatan = golonganJabatanService.getAllData(); if (isUpdate) { List<AppraisalCompetencyTypeGolJab> listCompTypeGolJab = appraisalCompetencyTypeGolJabService .getListByAppraisalCompetenceTypeId(model.getId()); List<GolonganJabatan> listGolJabatanFromCompetencyType = listCompTypeGolJab.stream() .map(AppraisalCompetencyTypeGolJab::getGolonganJabatan).collect(Collectors.toList()); listGolonganJabatan.removeAll(listGolJabatanFromCompetencyType); dualListModelGolJabatan = new DualListModel<>(listGolonganJabatan, listGolJabatanFromCompetencyType); } else {/* w ww .ja v a 2 s . c o m*/ dualListModelGolJabatan.setSource(listGolonganJabatan); } }
From source file:com.glaf.base.district.web.springmvc.DistrictController.java
@RequestMapping("/edit") public ModelAndView edit(HttpServletRequest request, ModelMap modelMap) { LoginContext loginContext = RequestUtils.getLoginContext(request); RequestUtils.setRequestParameterToAttribute(request); DistrictEntity district = districtService.getDistrict(RequestUtils.getLong(request, "id")); if (district != null && (StringUtils.equals(district.getCreateBy(), loginContext.getActorId()) || loginContext.isSystemAdministrator())) { request.setAttribute("district", district); }/*from w w w . j a v a 2 s . c o m*/ Long parentId = RequestUtils.getLong(request, "parentId", 0); List<DistrictEntity> districts = districtService.getDistrictList(parentId); if (district != null) { List<DistrictEntity> children = districtService.getDistrictList(district.getId()); if (districts != null && !districts.isEmpty()) { if (children != null && !children.isEmpty()) { districts.removeAll(children); } districts.remove(district); } } if (parentId > 0) { DistrictEntity parent = districtService.getDistrict(parentId); if (districts == null) { districts = new java.util.ArrayList<DistrictEntity>(); } districts.add(parent); } request.setAttribute("districts", districts); String view = request.getParameter("view"); if (StringUtils.isNotEmpty(view)) { return new ModelAndView(view, modelMap); } String x_view = ViewProperties.getString("district.edit"); if (StringUtils.isNotEmpty(x_view)) { return new ModelAndView(x_view, modelMap); } return new ModelAndView("/modules/sys/district/edit", modelMap); }
From source file:com.github.fge.jsonschema.core.keyword.syntax.checkers.hyperschema.LinksSyntaxChecker.java
@Override protected void checkValue(final Collection<JsonPointer> pointers, final MessageBundle bundle, final ProcessingReport report, final SchemaTree tree) throws ProcessingException { final JsonNode node = getNode(tree); final int size = node.size(); JsonNode ldo;/* w w w . j ava 2 s . c om*/ NodeType type; Set<String> set; List<String> list; for (int index = 0; index < size; index++) { ldo = getNode(tree).get(index); type = NodeType.getNodeType(ldo); if (type != NodeType.OBJECT) { report.error(LDOMsg(tree, bundle, "draftv4.ldo.incorrectType", index) .put("expected", NodeType.OBJECT).putArgument("found", type)); continue; } set = Sets.newHashSet(ldo.fieldNames()); list = Lists.newArrayList(REQUIRED_LDO_PROPERTIES); list.removeAll(set); if (!list.isEmpty()) { final ProcessingMessage msg = LDOMsg(tree, bundle, "draftv4.ldo.missingRequired", index); report.error(msg.put("required", REQUIRED_LDO_PROPERTIES).putArgument("missing", list)); continue; } if (ldo.has("schema")) pointers.add(JsonPointer.of(keyword, index, "schema")); if (ldo.has("targetSchema")) pointers.add(JsonPointer.of(keyword, index, "targetSchema")); checkLDO(report, bundle, tree, index); } }
From source file:de.unihannover.l3s.mws.bean.Search.java
public String searcMe(int nuovo) { System.out.println("TYPE: " + this.searchtype + " nuovo: " + nuovo); String accountKey = "BmbX+6Sy9/VEcS5oOjurccO5MQpKr2ewvLQ2vRHBKXQ"; TextManager tmgr = new TextManager(); tmgr.setCotextrange(this.user.getUtente().getCotextrange()); searchterms.removeAll(Collections.singleton("")); String q = ""; for (String t : this.searchterms) { q += "\"" + t + "\" "; }//from w ww. j av a 2s.c om if (nuovo == 1) { siteAvailablelist.clear(); siteSelectedlist.clear(); } List<String> exclude = new ArrayList<String>(siteAvailablelist); exclude.removeAll(siteSelectedlist); for (String s : exclude) q += " -site:" + s + " "; System.out.println(q); Track track = new Track(); track.setDate((new GregorianCalendar()).getTime()); track.setOperation("search"); track.setParam1(q); track.setParam2(this.searchtype); track.setParam3("" + nuovo); track.setUtente(this.user.getUtente()); TrackDao td = new TrackDao(); td.addTrack(track); if (this.searchtype.compareTo("Web") == 0) { AzureSearchWebQuery aq = new AzureSearchWebQuery(); aq.setAppid(accountKey); aq.setQuery(q); // aq.setQuery(q); // System.out.println(q); List<AzureSearchWebResult> arsall = new ArrayList<AzureSearchWebResult>(); for (int i = 1; i < 8; i++) { aq.setPage(i); aq.doQuery(); AzureSearchResultSet<AzureSearchWebResult> ars = aq.getQueryResult(); for (AzureSearchWebResult anr : ars) { arsall.add(anr); } } searchResult = new ArrayList<SearchResult>(); // WIKIMEDIA: http://en.wikipedia.org/w/api.php?action=query&titles=Berlin&prop=revisions&rvprop=timestamp&rvdir=newer&format=xml // LASTFM: http://ws.audioscrobbler.com/2.0/?method=artist.getshouts&artist=Berlin&api_key=9b6009eca365ded3a03c2b9673d54eb9&page=3 for (AzureSearchWebResult anr : arsall) { SearchWebResult r = new SearchWebResult(); r.setTitle(anr.getTitle()); r.setDescription(tmgr.SingleTextToCheck(this.searchterms.get(0), anr.getDescription(), 0)); r.setUrl(anr.getUrl()); System.out.println(r.getUrl()); searchResult.add(r); // System.out.println(anr.getTitle()); // System.out.println(tmgr.SingleTextToCheck(this.searchtext, anr.getDescription(), 1)); // System.out.println(anr.getUrl()); // System.out.println(); } } if (this.searchtype.compareTo("Video") == 0) { AzureSearchVideoQuery aq = new AzureSearchVideoQuery(); aq.setAppid(accountKey); aq.setQuery(q); List<AzureSearchVideoResult> arsall = new ArrayList<AzureSearchVideoResult>(); for (int i = 1; i < 8; i++) { aq.setPage(i); aq.doQuery(); AzureSearchResultSet<AzureSearchVideoResult> ars = aq.getQueryResult(); for (AzureSearchVideoResult anr : ars) { arsall.add(anr); } } searchResult = new ArrayList<SearchResult>(); for (AzureSearchVideoResult anr : arsall) { SearchVideoResult r = new SearchVideoResult(); r.setTitle(anr.getTitle()); // r.setHeight(anr.getThumbnail().getHeight()); // r.setWidth(anr.getThumbnail().getWidth()); r.setRuntime("" + anr.getRunTime()); r.setThumbnail(anr.getThumbnail()); r.setUrl(anr.getMediaUrl()); searchResult.add(r); } } if (this.searchtype.compareTo("Image") == 0) { AzureSearchImageQuery aq = new AzureSearchImageQuery(); aq.setAppid(accountKey); aq.setQuery(q); List<AzureSearchImageResult> arsall = new ArrayList<AzureSearchImageResult>(); for (int i = 1; i < 8; i++) { aq.setPage(i); aq.doQuery(); AzureSearchResultSet<AzureSearchImageResult> ars = aq.getQueryResult(); for (AzureSearchImageResult anr : ars) { arsall.add(anr); } } searchResult = new ArrayList<SearchResult>(); for (AzureSearchImageResult anr : arsall) { SearchImageResult r = new SearchImageResult(); r.setTitle(anr.getTitle()); r.setHeight(anr.getHeight()); r.setWidth(anr.getWidth()); r.setUrl(anr.getMediaUrl()); searchResult.add(r); } } if (searchterms.size() == 0) searchterms.add(""); StatsManager sm = new StatsManager(); List<YData> list = sm.getMatcthTable(sm.getSites(searchResult, null, null)); searchDataPie = "var data = [ "; List<String> datastring = new ArrayList<String>(); for (YData a : list) { // System.out.println(a.getSite()+"---"+a.getQty()); datastring.add("{ label: \"" + a.getSite() + "\", data: " + a.getQty() + "} "); if (nuovo == 1) { siteAvailablelist.add(a.getSite()); siteSelectedlist.add(a.getSite()); } } searchDataPie += Joiner.on(",").join(datastring); searchDataPie += " ]; "; // searchDataPie+=" var options = { series: { pie: { show: true } }, legend: { show: true, labelFormatter: function(label, series) { return('<input type=\"checkbox\" name=\"' + label +'\" checked=\"checked\" id=\"id' + label + '\"><a href=\"http://'+label+'\" target=\"_blank\">'+label+'</a> '); } } }; "; searchDataPie += " var options = { series: { pie: {show: true, label: {show: false} } }, grid: { hoverable: true, clickable: true }, legend: {show: false} }; "; searchDataPie += "$.plot($(\"#chartpie\"), data, options ); \n"; String hover = " $(\"#chartpie\").bind(\"plothover\", function(event, pos, obj){ if (!obj){return;} percent = parseFloat(obj.series.percent).toFixed(2); var html = []; html.push(\"<div style=\\\"flot:left;width:105px;height:20px;text-align:center;border:0px solid black;background-color:\", obj.series.color, \"\\\">\", \"<span style=\\\"font-weight:bold;color:red\\\">\", obj.series.label, \" (\", percent, \"%)</span>\", \"</div>\"); $(\"#showInteractive\").html(html.join('')); }); "; hover = " $(\"#chartpie\").bind(\"plothover\", function(event, pos, obj){ if (!obj){return;} percent = parseFloat(obj.series.percent).toFixed(2); var html = []; html.push(\"<div style=\\\"flot:left;width:105px;height:20px;text-align:center;border:0px solid black; \\\">\", \"<span style=\\\"font-weight:bold;color:red\\\">\", obj.series.label, \" (\", percent, \"%)</span>\", \"</div>\"); $(\"#showInteractive\").html(html.join('')); }); "; searchDataPie += hover; searchDataPie += " var choiceContainer = $(\"#chartpie\");"; searchDataPie += " choiceContainer.find(\"input\").click(plotAccordingToChoices);"; searchDataPie += " function plotAccordingToChoices() { "; searchDataPie += " var key = $(this).attr(\"name\"); "; searchDataPie += " $( \"input[value*='\"+key+\"']\" ).trigger('click'); "; // searchDataPie+=" document.getElementById('checkform').submit();"; // searchDataPie+=" var data = []; "; /* searchDataPie+=" choiceContainer.find(\"input:checked\").each(function () { "; searchDataPie+="var data1 = [ "; searchDataPie+=Joiner.on(",").join(datastring); searchDataPie+=" ]; "; searchDataPie+=" var options1 = { series: { pie: { show: true } }, legend: { show: true, labelFormatter: function(label, series) { return('<input type=\"checkbox\" name=\"' + label +'\" checked=\"checked\" id=\"id' + label + '\"><a href=\"http://'+label+'\" target=\"_blank\">'+label+'</a> '); } } }; "; // searchDataPie+=" var key = $(this).attr(\"name\"); if (key && data[key]) data.push(datasets[key]); });"; searchDataPie+=" var key = $(this).attr(\"name\");"; searchDataPie+=" var index = data1.indexOf(key); if (index > -1) { data1.splice(index, 1); };"; // searchDataPie+=" if (data.length > 0) $.plot($(\"#placeholder\"), data, { yaxis: { min: 0 }, xaxis: { tickDecimals: 0 } }); "; searchDataPie+="$.plot($(\"#chartpie\"), data1, options1 ); });"; */ searchDataPie += " }"; searchDataPie += " "; calculateTimeline(searchResult); return "basicSearch"; }
From source file:io.kamax.mxisd.lookup.provider.ForwarderProvider.java
@Override public List<ThreePidMapping> populate(List<ThreePidMapping> mappings) { List<ThreePidMapping> mappingsToDo = new ArrayList<>(mappings); List<ThreePidMapping> mappingsFoundGlobal = new ArrayList<>(); for (String label : cfg.getServers()) { for (String srv : mxCfg.getIdentity().getServers(label)) { log.info("{} mappings remaining: {}", mappingsToDo.size(), mappingsToDo); log.info("Querying {}", srv); List<ThreePidMapping> mappingsFound = fetcher.find(srv, mappingsToDo); log.info("{} returned {} mappings", srv, mappingsFound.size()); mappingsFoundGlobal.addAll(mappingsFound); mappingsToDo.removeAll(mappingsFound); }//from w w w .j ava 2 s . c o m } return mappingsFoundGlobal; }