List of usage examples for java.util List removeAll
boolean removeAll(Collection<?> c);
From source file:trycb.util.StartupPreparations.java
/** * Helper method to ensure all indexes are created for this application to run properly. * Since Couchbase Server 4.0 GA, this should always be skipped since the index definitions are part of the sample. */// w w w. ja v a 2 s . c o m private void ensureIndexes() throws Exception { LOGGER.info("Ensuring all Indexes are created."); N1qlQueryResult indexResult = bucket.query(N1qlQuery .simple(select("indexes.*").from("system:indexes").where(i("keyspace_id").eq(s(bucket.name()))))); List<String> indexesToCreate = new ArrayList<String>(); indexesToCreate.addAll(Arrays.asList("def_sourceairport", "def_airportname", "def_type", "def_faa", "def_icao", "def_city")); boolean hasPrimary = false; List<String> foundIndexes = new ArrayList<String>(); for (N1qlQueryRow indexRow : indexResult) { String name = indexRow.value().getString("name"); Boolean isPrimary = indexRow.value().getBoolean("is_primary"); if (name.equals(PRIMARY_NAME) || isPrimary == Boolean.TRUE) { hasPrimary = true; } else { foundIndexes.add(name); } } indexesToCreate.removeAll(foundIndexes); if (!hasPrimary) { //will create the primary index with default name "#primary". //Note that some tools may also create it under the name "def_primary" (in which case hasPrimary should be true). Statement query = createPrimaryIndex().on(bucket.name()).withDefer(); LOGGER.info("Executing index query: {}", query); N1qlQueryResult result = bucket.query(N1qlQuery.simple(query)); if (result.finalSuccess()) { LOGGER.info("Successfully created primary index."); } else { LOGGER.warn("Could not create primary index: {}", result.errors()); } } for (String name : indexesToCreate) { Statement query = createIndex(name).on(bucket.name(), x(name.replace("def_", ""))).withDefer(); LOGGER.info("Executing index query: {}", query); N1qlQueryResult result = bucket.query(N1qlQuery.simple(query)); if (result.finalSuccess()) { LOGGER.info("Successfully created index with name {}.", name); } else { LOGGER.warn("Could not create index {}: {}", name, result.errors()); } } //prepare the list of indexes to build (both primary and secondary indexes) List<String> indexesToBuild = new ArrayList<String>(indexesToCreate.size() + 1); indexesToBuild.addAll(indexesToCreate); if (!hasPrimary) { indexesToBuild.add(PRIMARY_NAME); } //skip the build step if all indexes have been found if (indexesToBuild.isEmpty()) { LOGGER.info("All indexes are already in place, nothing to build"); return; } LOGGER.info("Waiting 5 seconds before building the indexes."); Thread.sleep(5000); //trigger the build StringBuilder indexes = new StringBuilder(); boolean first = true; for (String name : indexesToBuild) { if (first) { first = false; } else { indexes.append(","); } indexes.append(name); } String query = "BUILD INDEX ON `" + bucket.name() + "` (" + indexes.toString() + ")"; LOGGER.info("Executing index query: {}", query); N1qlQueryResult result = bucket.query(N1qlQuery.simple(query)); if (result.finalSuccess()) { LOGGER.info("Successfully executed build index query."); } else { LOGGER.warn("Could not execute build index query {}.", result.errors()); } }
From source file:cs.ox.ac.uk.gsors.GroupPreferencesTestAux.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();/*from w w w.ja v a 2s . co m*/ StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); final IRelationFactory rf = new RelationFactory(); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); Map<String, Integer> cities = new HashMap<String, Integer>(); cities.put("Peoria", 142); cities.put("Gilbert", 216); cities.put("Glendale", 314); cities.put("Chandler", 466); // cities.put("Tempe", 648); // cities.put("Phoenix", 2351); List<Integer> ks = new ArrayList<Integer>(); ks.add(1); ks.add(2); ks.add(3); List<AggregateStrategy> str = new ArrayList<AggregateStrategy>(); str.add(AggregateStrategy.CSU); str.add(AggregateStrategy.Plurality); str.add(AggregateStrategy.PluralityMisery); for (AggregateStrategy strategyQA : str) { final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ','); final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ','); sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader()); cacheSummaryWriter.writeNext(GReportingUtils.getSummaryCachingReportHeader()); memorySummaryWriter.writeNext(GReportingUtils.getSummaryMemoryReportHeader()); for (Integer k : ks) { for (String city : cities.keySet()) { for (int con = 0; con < 10; con++) { LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString()); // top k for each preferences for (final String testName : tests) { // Create a buffer for the output final IRelation result = rf.createRelation(); GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city)); // Create the Directory where to store the test // results // final File outTestDir = FileUtils // .getFile( // _WORKING_DIR, // FilenameUtils // .separatorsToSystem(_DEFAULT_OUTPUT_PATH // + "/" // + strategyQA // .toString() // + k + city), // testName); // if (!outTestDir.exists()) { // if (outTestDir.mkdirs()) { // LOGGER.info("Created output directory: " // + testName); // } else { // LOGGER.fatal("Error creating output directory"); // } // } LOGGER.info("Processing file: " + testName); // dump the rewritten constraints: IRule q = null; if (parameters.getScenario() == Scenario.BREAKFAST_FOOD || parameters.getScenario() == Scenario.LUNCH_FOOD || parameters.getScenario() == Scenario.DINNER_FOOD) { q = queries.get(0); } if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE || parameters.getScenario() == Scenario.LUNCH_CUSINE || parameters.getScenario() == Scenario.DINNER_CUSINE) { q = queries.get(1); } if (parameters.getScenario() == Scenario.BREAKFAST_PLACE || parameters.getScenario() == Scenario.LUNCH_PLACE || parameters.getScenario() == Scenario.DINNER_PLACE) { q = queries.get(2); } CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final ToitReporter rep = ToitReporter.getInstance(true); ToitReporter.setupReporting(); ToitReporter.setQuery(queryPredicate); ToitReporter.setTest(testName); ToitReporter.setK(parameters.getK()); //ToitReporter.setStrategy(parameters.getStrategy()); ToitReporter.setCity(parameters.getCity()); ToitReporter.setGroupID(parameters.getGroupId()); ToitReporter.setNbUsers(parameters.getMaxNbUsers()); ToitReporter.setNbBuss(parameters.getBs()); ToitReporter.setScenario(parameters.getScenario()); rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = // MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(RewMetric.REW_MEM, // MonitoringUtils.getHeapUsage() - rewMem); // rep.setValue(RewMetric.DEPGRAPH_MEM, // depGraphMem); rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); // rewFW.write(qr + "\n"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); Set<IRule> rrules = ndmRewriter.getRewriting(qr); sboxRewriting.addAll(rrules); // Produce the SQL rewriting for each query in // the // program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // rewFW.write("Computing SQL Rewriting"); try { // Get the SQL rewriting as Union of // Conjunctive // Queries long duration = -System.nanoTime(); final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings( parameters.getConstraintsSqlQuery(), parameters.getNbNodes(), parameters.getStartFromRes()); duration = ((duration + System.nanoTime()) / 1000000); IRelation resultAux = rf.createRelation(); for (final String qu : ucqSQLRewriting) { IRelation r = StorageManager.executeQuery(qu); // LOGGER.info("-Query: " + // qu+" "+r.size()+" "+c); resultAux.addAll(r); } for (IPredicate predicate : qr.getBodyPredicates()) { results.put(predicate, resultAux); } result.addAll(resultAux); // LOGGER.info("-R: " +result.size()); } catch (final SQLException e) { e.printStackTrace(); } } // write the result in the output // rewFW.write(result.toString()); // construct the graph Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper .getGPreferences(parameters.getPrefs(), tgds); final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH .createPreferencesGraph(); long constPrefGraphTime = System.currentTimeMillis(); // final long constPrefGraphMem = // MonitoringUtils.getHeapUsage(); for (User user : prefs.keySet()) { for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) { IRelation morePrefs = results.get(pairPreference.getElement0()); IRelation lessPrefs = results.get(pairPreference.getElement1()); for (int j = 0; j < morePrefs.size(); j++) { ITuple el1 = morePrefs.get(j); if (!lessPrefs.contains(el1)) { for (int i = 0; i < lessPrefs.size(); i++) { ITuple el2 = lessPrefs.get(i); GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user); prefGraph.addPreference(edge); } } } } } for (int i = 0; i < result.size(); i++) { ITuple v = result.get(i); prefGraph.addVertex(v); } // LOGGER.info("-----Size--Graph--: " + // result.size()+"--"+prefGraph.getVertexesSize() ); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); // rep.setValue(RewMetric.PREFGRAPH_CONST__MEM, // MonitoringUtils.getHeapUsage() - // constPrefGraphMem); long mergeOperatorTime = System.currentTimeMillis(); // prefGraph // .mergeProbabilisticModel("/home/onsa/Dropbox/VGOT/toit13/resources/data_final/reviews.txt"); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, mergeOperatorTime); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize()); long topKTime = System.currentTimeMillis(); // prefGraph.getTopK(parameters.getK(), // parameters.getStrategy()); topKTime = System.currentTimeMillis() - topKTime; rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize()); // rewFW.write("\n"); // for (final ILiteral h : newHeads) { // rewFW.write("?- " + h + ".\n"); // } // rewFW.write("\n"); // rewFW.flush(); // rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics()); memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); cacheSummaryWriter.flush(); memorySummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); cacheSummaryWriter.close(); memorySummaryWriter.close(); } }
From source file:org.openmrs.module.appframework.service.AppFrameworkServiceImpl.java
@Override public List<AppDescriptor> getAllEnabledApps() { // first just get all apps List<AppDescriptor> appDescriptors = getAllApps(); // find out which ones are disabled List<AppDescriptor> disabledAppDescriptors = new ArrayList<AppDescriptor>(); for (AppDescriptor appDescriptor : appDescriptors) { if (disabledByComponentState(appDescriptor) || disabledByFeatureToggle(appDescriptor) || disabledByAppFrameworkConfig(appDescriptor)) { disabledAppDescriptors.add(appDescriptor); }// w w w .j ava 2 s .c o m } // remove disabled apps appDescriptors.removeAll(disabledAppDescriptors); return appDescriptors; }
From source file:com.jaspersoft.studio.server.protocol.soap.SoapConnection.java
@Override public List<ResourceDescriptor> listDatasources(IProgressMonitor monitor, IDatasourceFilter f) throws Exception { List<ResourceDescriptor> list = client.listDatasources(); if (f != null) { List<ResourceDescriptor> toremove = new ArrayList<ResourceDescriptor>(); for (ResourceDescriptor rd : list) if (!f.isDatasource(rd)) toremove.add(rd);//from ww w .java 2 s . com list.removeAll(toremove); } return list; }
From source file:com.openbravo.pos.sales.restaurant.JTicketsBagRestaurantRes.java
private void jButtonPlacesAddActionPerformed(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_jButton1ActionPerformed try {//from www.j a va 2 s . c om Date start = (Date) Formats.TIMESTAMPSHORT.parseValue(m_jFromDate.getText()); Date end = (Date) Formats.TIMESTAMPSHORT.parseValue(m_jTillDate.getText()); List<PlaceSplit> placesAvailable; placesAvailable = dlCustomers.getAvailablePlaces(start, end); placesAvailable.removeAll(placesReservation); JPlacesListDialog dialog = JPlacesListDialog.newJDialog(m_App, (JButton) evt.getSource()); PlaceSplit place2Add = dialog.showPlacesList(placesAvailable); if (place2Add != null && !placesReservation.contains(place2Add)) { placesReservation.add(place2Add); RefreshPlaces(); m_Dirty.setDirty(true); } } catch (BasicException e) { JConfirmDialog.showInformation(m_App, JTicketsBagRestaurantRes.this, AppLocal.getIntString("error.information"), AppLocal.getIntString("message.checkinput")); } }
From source file:com.sri.ai.praise.model.imports.church.ChurchToModelVisitor.java
protected Expression defineInHOGM(Expression name, List<Expression> params, Expression body) { Expression result = null;//from w ww . j av a2 s .c om // Track the known random variable names knownRandomVariableNames.add(name); // Determine the correct argument names for any of the StringBuilder rArgs = new StringBuilder(); final Map<Expression, Expression> paramVarNames = new HashMap<Expression, Expression>(); boolean firstArg = true; int cnt = 0; Expression randomVariable = name; List<Expression> rvArgs = new ArrayList<Expression>(); for (Expression arg : params) { if (firstArg) { firstArg = false; rArgs.append(" "); } else { rArgs.append(" x "); } // Ensure name is upper cased Expression logicalVariableArg = newLogicalVariable(arg.toString()); rvArgs.add(logicalVariableArg); params.set(cnt, logicalVariableArg); paramVarNames.put(arg, params.get(cnt)); // TODO - anything better? rArgs.append(CHURCH_VALUES_SORT); cnt++; } randoms.add("random " + name + ":" + rArgs + (rArgs.length() > 0 ? " -> " : " ") + "Boolean"); StringJoiner knownRandomVariablesHLM = new StringJoiner(";\n", "", ";"); randoms.forEach(r -> knownRandomVariablesHLM.add(r)); RewritingProcess processForRV = LBPFactory.newLBPProcessWithHighLevelModel( "sort " + CHURCH_VALUES_SORT + ";\n\n" + knownRandomVariablesHLM.toString()); if (rvArgs.size() > 0) { randomVariable = Expressions.apply(randomVariable, rvArgs); processForRV = LPIUtil .extendContextualSymbolsWithFreeVariablesInferringDomainsFromUsageInRandomVariables( randomVariable, processForRV); } final List<List<Boolean>> flipValues = new ArrayList<List<Boolean>>(); final List<Boolean> trueFalseValues = new ArrayList<Boolean>(); final Map<Expression, Integer> flipMarkerToId = new LinkedHashMap<Expression, Integer>(); final Map<Expression, Integer> flipMarkerToFlipValuesIdx = new LinkedHashMap<Expression, Integer>(); // Flips <- array of flip applications in body trueFalseValues.add(Boolean.FALSE); trueFalseValues.add(Boolean.TRUE); for (Integer flipId : flipIdToValue.keySet()) { Expression flipMarker = newSymbol(FLIP_ID_PREFIX + flipId); if (!flipMarkerToId.containsKey(flipMarker) && Expressions.isSubExpressionOf(flipMarker, body)) { flipMarkerToId.put(flipMarker, flipId); flipMarkerToFlipValuesIdx.put(flipMarker, flipMarkerToFlipValuesIdx.size()); flipValues.add(trueFalseValues); } } if (flipValues.size() == 0) { Expression potentialRule = createPotentialRule(randomVariable, deterministicChurch2HOGM(body, paramVarNames, processForRV), Expressions.ONE, Expressions.ZERO); result = rNormalize.rewrite(potentialRule, processForRV); } else { // H <- empty list List<Expression> h = new ArrayList<Expression>(); // for all assignments of FlipsValues to Flips do CartesianProductEnumeration<Boolean> cpe = new CartesianProductEnumeration<Boolean>(flipValues); while (cpe.hasMoreElements()) { final List<Boolean> values = cpe.nextElement(); // caseC <- subsitute FlipsValues for Flips in body Expression caseC = body .replaceAllOccurrences(new AbstractReplacementFunctionWithContextuallyUpdatedProcess() { @Override public Expression apply(Expression expression, RewritingProcess process) { Expression result = expression; if (Expressions.isSymbol(expression)) { Integer idx = flipMarkerToFlipValuesIdx.get(expression); if (idx != null) { result = values.get(idx) ? Expressions.TRUE : Expressions.FALSE; } } return result; } }, LBPFactory.newLBPProcess()); // caseH <- deterministicChurch2HOGM(caseC) Expression caseH = deterministicChurch2HOGM(caseC, paramVarNames, processForRV); // Calculate q Rational q = Rational.ONE; for (Map.Entry<Expression, Integer> flipMarkerToIdEntry : flipMarkerToId.entrySet()) { Rational pi = flipIdToValue.get(flipMarkerToIdEntry.getValue()); if (!values.get(flipMarkerToFlipValuesIdx.get(flipMarkerToIdEntry.getKey()))) { pi = Rational.ONE.subtract(pi); } q = q.multiply(pi); } h.add(createPotentialRule(randomVariable, caseH, Expressions.makeSymbol(q), Expressions.ZERO)); } Expression plusH = Plus.make(h); List<Expression> constants = new ArrayList<>(FormulaUtil.getConstants(plusH, processForRV)); // Ensure we exclude known random variable names constants.removeAll(knownRandomVariableNames); // And also ensure we remove these known constants as well. constants.remove(Expressions.TRUE); constants.remove(Expressions.FALSE); if (constants.size() > 0) { knownConstants.addAll(constants); Model model = Model.getRewritingProcessesModel(processForRV); Set<Expression> sortDeclarationExpressions = new LinkedHashSet<>(); sortDeclarationExpressions.add(new SortDeclaration(Expressions.makeSymbol(CHURCH_VALUES_SORT), SortDeclaration.UNKNOWN_SIZE, ExtensionalSet.makeUniSet(constants)).getSortDeclaration()); Set<Expression> randomVariableDeclarationExpressions = new LinkedHashSet<>(); model.getRandomVariableDeclarations() .forEach(randomVariableDeclaration -> randomVariableDeclarationExpressions .add(randomVariableDeclaration.getRandomVariableDeclaration())); processForRV = Model.setKnownSortsAndRandomVariables(sortDeclarationExpressions, randomVariableDeclarationExpressions, processForRV); } result = SimplifyWithRelationsAtBottom.simplify(plusH, name, processForRV); } rules.add(result.toString()); return result; }
From source file:org.wso2.carbon.identity.application.authentication.framework.handler.request.impl.consent.ConsentMgtPostAuthnHandler.java
private List<ClaimMetaData> buildDisapprovedClaimList(List<ClaimMetaData> consentRequiredClaims, List<ClaimMetaData> approvedClaims) { List<ClaimMetaData> disapprovedClaims = new ArrayList<>(); if (isNotEmpty(consentRequiredClaims)) { consentRequiredClaims.removeAll(approvedClaims); disapprovedClaims = consentRequiredClaims; }//from www. j a va2 s. c om return disapprovedClaims; }
From source file:com.wipro.ats.bdre.clustermigration.MigrationPreprocessor.java
private List<String> getNewRegularColumnsAtSourceList(List<String> sourceColumnList, List<String> previousColumnList) { List<String> addedColumnList = new ArrayList<>(sourceColumnList); //TODO: if previous list is empty it means there are no log entries in process log currently. But this could be the first run of the whole migration program, hence there will obviously be no log entries, in this case add logic to test destination columns using gethivejdbcconnection or you can skip this logic entirely and get columns always from hive metadata. if (!previousColumnList.isEmpty()) addedColumnList.removeAll(previousColumnList); for (String addedColumn : addedColumnList) { LOGGER.debug("addedColumn = " + addedColumn); }// w ww . ja v a2s. co m return addedColumnList; }
From source file:io.github.retz.inttest.RetzIntTest.java
@Test public void scheduleAppTest2() throws Exception { URI uri = new URI("http://" + RETZ_HOST + ":" + RETZ_PORT); try (Client client = Client.newBuilder(uri).setAuthenticator(config.getAuthenticator()).build()) { loadSimpleApp(client, "echo3"); List<EchoJob> finishedJobs = new LinkedList<>(); List<Integer> argvList = IntStream.rangeClosed(0, 32).boxed().collect(Collectors.toList()); argvList.addAll(Arrays.asList(42, 63, 64, 127, 128, 151, 192, 255)); int jobNum = argvList.size(); List<EchoJob> echoJobs = scheduleEchoJobs(client, "echo3", "echo ", argvList); assertThat(echoJobs.size(), is(jobNum)); for (int i = 0; i < 32; i++) { List<EchoJob> toRemove = toRemove(client, echoJobs, false); if (!toRemove.isEmpty()) { i = 0;//from w w w . j av a 2 s . c o m } echoJobs.removeAll(toRemove); finishedJobs.addAll(toRemove); if (echoJobs.isEmpty()) { break; } Thread.sleep(1000); System.err.println(TimestampHelper.now() + ": Finished=" + ClientHelper.finished(client).size() + ", Running=" + ClientHelper.running(client).size() + ", Scheduled=" + ClientHelper.queue(client).size()); for (Job finished : ClientHelper.finished(client)) { assertThat(finished.retry(), is(0)); assertThat(finished.state(), is(Job.JobState.FINISHED)); assertThat(finished.result(), is(RES_OK)); } } assertThat(finishedJobs.size(), is(jobNum)); assertThat(ClientHelper.finished(client).size(), greaterThanOrEqualTo(jobNum)); assertThat(ClientHelper.running(client).size(), is(0)); assertThat(ClientHelper.queue(client).size(), is(0)); UnloadAppResponse unloadRes = (UnloadAppResponse) client.unload("echo3"); assertThat(unloadRes.status(), is("ok")); } }
From source file:io.github.retz.inttest.RetzIntTest.java
@Test public void scheduleAppTest() throws Exception { URI uri = new URI("http://" + RETZ_HOST + ":" + RETZ_PORT); try (Client client = Client.newBuilder(uri).setAuthenticator(config.getAuthenticator()).build()) { loadSimpleApp(client, "echo2"); List<EchoJob> finishedJobs = new LinkedList<>(); List<Integer> argvList = IntStream.rangeClosed(0, 32).boxed().collect(Collectors.toList()); argvList.addAll(Arrays.asList(42, 63, 64, 127, 128, 151, 192, 255)); int jobNum = argvList.size(); List<EchoJob> echoJobs = scheduleEchoJobs(client, "echo2", "echo.sh ", argvList); assertThat(echoJobs.size(), is(jobNum)); for (int i = 0; i < 16; i++) { List<EchoJob> toRemove = toRemove(client, echoJobs, true); if (!toRemove.isEmpty()) { i = 0;/*from w ww . j ava 2 s .co m*/ } echoJobs.removeAll(toRemove); finishedJobs.addAll(toRemove); if (echoJobs.isEmpty()) { break; } Thread.sleep(1000); System.err.println(TimestampHelper.now() + ": Finished=" + ClientHelper.finished(client).size() + ", Running=" + ClientHelper.running(client).size() + ", Scheduled=" + ClientHelper.queue(client).size()); for (Job finished : ClientHelper.finished(client)) { assertThat(finished.retry(), is(0)); } } assertThat(finishedJobs.size(), is(jobNum)); assertThat(ClientHelper.finished(client).size(), greaterThanOrEqualTo(jobNum)); assertThat(ClientHelper.running(client).size(), is(0)); assertThat(ClientHelper.queue(client).size(), is(0)); UnloadAppResponse unloadRes = (UnloadAppResponse) client.unload("echo2"); assertThat(unloadRes.status(), is("ok")); } }