List of usage examples for java.util Set toString
public String toString()
From source file:org.mxupdate.eclipse.MXAdapter.java
/** * Updates given MX update files in the MX database. If * {@link #PREF_UPDATE_FILE_CONTENT} is set, also the file content is * transfered within the update (e.g. if an update on another server is * done).//from w w w . j ava 2 s . co m * * @param _files MxUpdate file which must be updated * @param _compile if <i>true</i> all JPOs are compiled; if <i>false</i> * no JPOs are compiled, only an update is done * @see #execMql(CharSequence) */ public void update(final List<IFile> _files, final boolean _compile) { // update by file content if (this.preferences.getBoolean(MXAdapter.PREF_UPDATE_FILE_CONTENT)) { final Map<String, String> files = new HashMap<String, String>(); for (final IFile file : _files) { try { final InputStream in = new FileInputStream(file.getLocation().toFile()); final byte[] bytes = new byte[in.available()]; in.read(bytes); in.close(); files.put(file.getLocation().toString(), new String(bytes, file.getCharset())); } catch (final UnsupportedEncodingException e) { this.console.logError(Messages.getString("MXAdapter.ExceptionConvertFileContent", //$NON-NLS-1$ file.getLocation().toString()), e); } catch (final CoreException e) { this.console.logError(Messages.getString("MXAdapter.ExceptionFileCharSet", //$NON-NLS-1$ file.getLocation().toString()), e); } catch (final IOException e) { this.console.logError(Messages.getString("MXAdapter.ExceptionReadFileContentFailed", //$NON-NLS-1$ file.getLocation().toString()), e); } } try { this.console .logInfo(this.jpoInvoke("org.mxupdate.plugin.Update", "updateByContent", files, _compile)); } catch (final Exception e) { this.console.logError(Messages.getString("MXAdapter.ExceptionUpdateFailed", //$NON-NLS-1$ files.keySet().toString()), e); } // update by file names } else { final Set<String> fileNames = new HashSet<String>(); for (final IFile file : _files) { fileNames.add(file.getLocation().toString()); } try { this.console .logInfo(this.jpoInvoke("org.mxupdate.plugin.Update", "updateByName", fileNames, _compile)); } catch (final Exception e) { this.console.logError(Messages.getString("MXAdapter.ExceptionUpdateFailed", //$NON-NLS-1$ fileNames.toString()), e); } } }
From source file:org.apache.storm.daemon.nimbus.NimbusUtils.java
@SuppressWarnings("rawtypes") @ClojureClass(className = "backtype.storm.daemon.nimbus#alive-executors") private static Set<ExecutorInfo> aliveExecutors(NimbusData nimbus, TopologyDetails topologyDetails, Set<ExecutorInfo> allExecutors, Assignment existingAssignment) { LOG.debug("Computing alive executors for " + topologyDetails.getId() + "\n" + "Executors: " + allExecutors.toString() + "\n" + "Assignment: " + existingAssignment.toString() + "\n" + "Heartbeat cache: " + nimbus.getExecutorHeartbeatsCache().get(topologyDetails.getId())); // TODO: need to consider all executors associated with a dead executor // (in same slot) dead as well, don't just rely on heartbeat being the // same/*from w w w . j av a2s . c o m*/ Map conf = nimbus.getConf(); String stormId = topologyDetails.getId(); Set<ExecutorInfo> allAliveExecutors = new HashSet<ExecutorInfo>(); Map<ExecutorInfo, Integer> executorStartTimes = existingAssignment.getExecutorToStartTimeSecs(); Map<ExecutorInfo, ExecutorCache> heartbeatsCache = nimbus.getExecutorHeartbeatsCache().get(stormId); int taskLaunchSecs = CoreUtil.parseInt(conf.get(Config.NIMBUS_TASK_LAUNCH_SECS), 30); for (ExecutorInfo executor : allExecutors) { Integer startTime = executorStartTimes.get(executor); boolean isTaskLaunchNotTimeOut = CoreUtil.timeDelta(null != startTime ? startTime : 0) < taskLaunchSecs; // boolean isExeTimeOut = heartbeatsCache.get(executor).isTimedOut(); if (startTime != null && (isTaskLaunchNotTimeOut || !isExeTimeOut)) { allAliveExecutors.add(executor); } else { LOG.info("Executor {}:{} not alive", stormId, executor.toString()); } } return allAliveExecutors; }
From source file:com.google.ie.common.builder.ProjectBuilder.java
/** * Retrieves the list of Projects created by a user. * /*from w w w . ja v a2s. c o m*/ * @param user the User object. * @param retrievalInfo the idea list retrieval information. * @return Returns the list of ProjectDetail objects. */ public List<ProjectDetail> getProjectsForUser(User user, RetrievalInfo retrievalInfo) { List<ProjectDetail> projectDtoList = null; Set<String> projectKeys = new HashSet<String>(); /* Get developers having with the specific user key */ LOGGER.debug("User key =" + user.getUserKey()); List<Developer> developers = developerService.getDeveloperByUserKey(user.getUserKey(), retrievalInfo); /* Get the Set of project keys associated with the developers */ Iterator<Developer> iterator = developers.iterator(); while (iterator.hasNext()) { Developer developer = iterator.next(); if (!StringUtils.isBlank(developer.getProjectKey())) { Project proj = projectService.getProjectById(developer.getProjectKey()); if (!proj.getStatus().equals(Project.STATUS_DELETED)) projectKeys.add(developer.getProjectKey()); } } LOGGER.debug("Project keys =" + projectKeys.toString()); /* Get Project list by Set of project keys. */ List<Project> projects = projectService.getProjects(projectKeys, retrievalInfo); Collections.sort(projects, new ProjectCreationDateComparator()); /* Convert them into ProjectDetails. */ if (projects != null && projects.size() > ZERO) { LOGGER.debug("Project size =" + projects.size()); projectDtoList = convertToProjectDetailList(projects, true, false, true); } return projectDtoList; }
From source file:org.apache.phoenix.coprocessor.PhoenixAccessController.java
public String authString(String user, TableName table, Set<Action> actions) { StringBuilder sb = new StringBuilder(); sb.append(" (user=").append(user != null ? user : "UNKNOWN").append(", "); sb.append("scope=").append(table == null ? "GLOBAL" : table.getNameWithNamespaceInclAsString()) .append(", "); sb.append(actions.size() > 1 ? "actions=" : "action=").append(actions != null ? actions.toString() : "") .append(")"); return sb.toString(); }
From source file:cs.ox.ac.uk.gsors.GroupPreferencesTest1.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Parse the program final Parser parser = new Parser(); parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg")); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); final Map<IPredicate, IRelation> conf = parser.getDirectives(); if (!conf.isEmpty()) { StorageManager.getInstance();// w w w . j av a 2 s. c o m StorageManager.configure(conf); } // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads); final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor(); ruleProc.process(mSBox); final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox); final IRelationFactory rf = new RelationFactory(); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) { extracted(); } // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // depGraphMem = depGraphMem - MonitoringUtils.getHeapUsage(); // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); Map<String, Integer> cities = new HashMap<String, Integer>(); // cities.put("Peoria", 109); // cities.put("Gilbert", 163); // cities.put("Glendale", 242); // cities.put("Chandler", 349); cities.put("Tempe", 465); //cities.put("Scottsdale", 780); // cities.put("Phoenix", 1683); List<Integer> ks = new ArrayList<Integer>(); ks.add(1); ks.add(2); ks.add(3); List<AggregateStrategy> str = new ArrayList<AggregateStrategy>(); str.add(AggregateStrategy.CSU); str.add(AggregateStrategy.Plurality); str.add(AggregateStrategy.PluralityMisery); for (AggregateStrategy strategyQA : str) { final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); // final File cacheSummaryFile = FileUtils.getFile( // _WORKING_DIR, // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" // + strategyQA.toString()), // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), // StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); // final CSVWriter cacheSummaryWriter = new CSVWriter(new // FileWriter( // cacheSummaryFile), ','); // // final File memorySummaryFile = FileUtils.getFile( // _WORKING_DIR, // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" // + strategyQA.toString()), // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), // StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); // final CSVWriter memorySummaryWriter = new CSVWriter(new // FileWriter( // memorySummaryFile), ','); sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader()); // cacheSummaryWriter.writeNext(GReportingUtils // .getSummaryCachingReportHeader()); // memorySummaryWriter.writeNext(GReportingUtils // .getSummaryMemoryReportHeader()); for (Integer k : ks) { for (String city : cities.keySet()) { for (int con = 0; con < 1; con++) { LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString()); // top k for each preferences for (final String testName : tests) { // Create a buffer for the output final IRelation result = rf.createRelation(); GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city)); // Create the Directory where to store the test // results // final File outTestDir = FileUtils // .getFile( // _WORKING_DIR, // FilenameUtils // .separatorsToSystem(_DEFAULT_OUTPUT_PATH // + "/" // + strategyQA // .toString() // + k + city), // testName); // if (!outTestDir.exists()) { // if (outTestDir.mkdirs()) { // LOGGER.info("Created output directory: " // + testName); // } else { // LOGGER.fatal("Error creating output directory"); // } // } LOGGER.info("Processing file: " + testName); // dump the rewritten constraints: IRule q = null; if (parameters.getScenario() == Scenario.BREAKFAST_FOOD || parameters.getScenario() == Scenario.LUNCH_FOOD || parameters.getScenario() == Scenario.DINNER_FOOD) { q = queries.get(0); } if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE || parameters.getScenario() == Scenario.LUNCH_CUSINE || parameters.getScenario() == Scenario.DINNER_CUSINE) { q = queries.get(1); } if (parameters.getScenario() == Scenario.BREAKFAST_PLACE || parameters.getScenario() == Scenario.LUNCH_PLACE || parameters.getScenario() == Scenario.DINNER_PLACE) { q = queries.get(2); } CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final ToitReporter rep = ToitReporter.getInstance(true); ToitReporter.setupReporting(); ToitReporter.setQuery(queryPredicate); ToitReporter.setTest(testName); ToitReporter.setK(parameters.getK()); // GroupReporter.setStrategy(parameters.getStrategy()); ToitReporter.setCity(parameters.getCity()); ToitReporter.setGroupID(parameters.getGroupId()); ToitReporter.setNbUsers(parameters.getMaxNbUsers()); ToitReporter.setNbBuss(parameters.getBs()); ToitReporter.setScenario(parameters.getScenario()); rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime); // rep.setValue(GRewMetric.DEPGRAPH_MEM, // depGraphMem); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = // MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(GRewMetric.REW_MEM, // MonitoringUtils.getHeapUsage() - rewMem); rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(GRewMetric.REW_CNS_MEM, ncRewMem); // Other metrics // Caching size metrics // Create a file to store the rewriting results. // File outFile = FileUtils.getFile(outTestDir, // queryPredicate.concat("_rew.dtg")); // final FileWriter rewFW = new FileWriter(outFile); // // rewFW.write("/// Query: " + q + "///\n"); // rewFW.write("/// Ontology: " + testName + "///"); // rewFW.write("/// Created on: " + creationDate // + " ///\n"); // rewFW.write("/// Rules in the program: " // + rules.size() + " ///\n"); // rewFW.write("/// TGDs in the program: " // + tgds.size() + " ///\n"); // LOGGER.info("Writing the output at: " // + outFile.getAbsolutePath()); // dump metrics for individual queries. // rewFW.write(rep.getReport()); // // rewFW.write(IOUtils.LINE_SEPARATOR); // rewFW.write(IOUtils.LINE_SEPARATOR); // // rewFW.write("/// Rewritten Program ///\n"); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); // rewFW.write(qr + "\n"); final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>(); Set<IRule> rrules = ndmRewriter.getRewriting(qr); sboxRewriting.addAll(rrules); // Produce the SQL rewriting for each query in // the // program final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting); // rewFW.write("Computing SQL Rewriting"); try { // Get the SQL rewriting as Union of // Conjunctive // Queries long duration = -System.nanoTime(); final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings( parameters.getConstraintsSqlQuery(), parameters.getNbNodes(), parameters.getStartFromRes()); duration = ((duration + System.nanoTime()) / 1000000); IRelation resultAux = rf.createRelation(); for (final String qu : ucqSQLRewriting) { IRelation r = StorageManager.executeQuery(qu); // LOGGER.info("-Query: " + // qu+" "+r.size()+" "+c); resultAux.addAll(r); } for (IPredicate predicate : qr.getBodyPredicates()) { results.put(predicate, resultAux); } result.addAll(resultAux); // LOGGER.info("-R: " +result.size()); } catch (final SQLException e) { e.printStackTrace(); } } // write the result in the output // rewFW.write(result.toString()); // construct the graph Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper .getGPreferences(parameters.getPrefs(), tgds); final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH .createPreferencesGraph(); long constPrefGraphTime = System.currentTimeMillis(); // final long constPrefGraphMem = MonitoringUtils // .getHeapUsage(); for (User user : prefs.keySet()) { for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) { IRelation morePrefs = results.get(pairPreference.getElement0()); IRelation lessPrefs = results.get(pairPreference.getElement1()); for (int j = 0; j < morePrefs.size(); j++) { ITuple el1 = morePrefs.get(j); if (!lessPrefs.contains(el1)) { for (int i = 0; i < lessPrefs.size(); i++) { ITuple el2 = lessPrefs.get(i); GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user); prefGraph.addPreference(edge); } } } } } for (int i = 0; i < result.size(); i++) { ITuple v = result.get(i); prefGraph.addVertex(v); } // LOGGER.info("-----Size--Graph--: " + // result.size()+"--"+prefGraph.getVertexesSize() ); constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime; rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize()); // rep.setValue(GRewMetric.PREFGRAPH_CONST_MEM, // MonitoringUtils.getHeapUsage() // - constPrefGraphMem); long mergeOperatorTime = System.currentTimeMillis(); // final long mergeProbModel = MonitoringUtils // .getHeapUsage(); // prefGraph // .mergeProbabilisticModel(_DEFAULT_INPUT_PATH+"reviews.txt"); mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime; // rep.setValue(GRewMetric.PREFGRAPH_MERGE_MEM, // MonitoringUtils.getHeapUsage() // - mergeProbModel); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize()); rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, (long) mergeOperatorTime); // long topKMem = MonitoringUtils // .getHeapUsage(); long topKTime = System.currentTimeMillis(); IRelation r = GTopKAlgorithms.getTopK(prefGraph, parameters.getK(), strategyQA); topKTime = System.currentTimeMillis() - topKTime; // rep.setValue(GRewMetric.PREFGRAPH_TOPK_MEM, // topKMem-MonitoringUtils // .getHeapUsage()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize()); rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize()); rep.setValue(GRewMetric.ANSWER_SIZE, (long) r.size()); // rewFW.write("\n"); // for (final ILiteral h : newHeads) { // rewFW.write("?- " + h + ".\n"); // } // rewFW.write("\n"); // rewFW.flush(); // rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); // cacheSummaryWriter.writeNext(rep // .getSummaryCacheMetrics()); // memorySummaryWriter.writeNext(rep // .getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); // cacheSummaryWriter.flush(); // memorySummaryWriter.flush(); } } } } sizeSummaryWriter.close(); timeSummaryWriter.close(); // cacheSummaryWriter.close(); // memorySummaryWriter.close(); } }
From source file:jeeves.resources.dbms.ApacheDBCPool.java
/** * Builds the pool using init parameters from jeeves config. *///from w ww . ja v a 2s . co m private void parseJeevesDBConfig(Element config) throws Exception { url = config.getChildText(Jeeves.Res.Pool.URL); String user = config.getChildText(Jeeves.Res.Pool.USER); String passwd = config.getChildText(Jeeves.Res.Pool.PASSWORD); String driver = config.getChildText(Jeeves.Res.Pool.DRIVER); String size = config.getChildText(Jeeves.Res.Pool.POOL_SIZE); String maxw = config.getChildText(Jeeves.Res.Pool.MAX_WAIT); String maxIdle = config.getChildText(Jeeves.Res.Pool.MAX_IDLE); String minIdle = config.getChildText(Jeeves.Res.Pool.MIN_IDLE); String maxActive = config.getChildText(Jeeves.Res.Pool.MAX_ACTIVE); String testWhileIdleStr = config.getChildText(Jeeves.Res.Pool.TEST_WHILE_IDLE); String timeBetweenEvictionRunsMillisStr = config .getChildText(Jeeves.Res.Pool.TIME_BETWEEN_EVICTION_RUNS_MILLIS); String minEvictableIdleTimeMillisStr = config.getChildText(Jeeves.Res.Pool.MIN_EVICTABLE_IDLE_TIME_MILLIS); String numTestsPerEvictionRunStr = config.getChildText(Jeeves.Res.Pool.NUM_TESTS_PER_EVICTION_RUN); String validationQuery = config.getChildText(Jeeves.Res.Pool.VALIDATION_QUERY); String transactionIsolation = config.getChildText(Jeeves.Res.Pool.TRANSACTION_ISOLATION); if (transactionIsolation == null) { // use READ_COMMITTED for everything by default except McKoi which // only supports SERIALIZABLE transactionIsolation = Jeeves.Res.Pool.TRANSACTION_ISOLATION_READ_COMMITTED; if (url.toUpperCase().contains("MCKOI")) transactionIsolation = Jeeves.Res.Pool.TRANSACTION_ISOLATION_SERIALIZABLE; } else { Set<String> isolations = new HashSet<String>(); isolations.add(Jeeves.Res.Pool.TRANSACTION_ISOLATION_SERIALIZABLE); isolations.add(Jeeves.Res.Pool.TRANSACTION_ISOLATION_READ_COMMITTED); isolations.add(Jeeves.Res.Pool.TRANSACTION_ISOLATION_REPEATABLE_READ); if (!isolations.contains(transactionIsolation.toUpperCase())) { throw new IllegalArgumentException( "Invalid " + Jeeves.Res.Pool.TRANSACTION_ISOLATION + " parameter value: " + transactionIsolation + ". Should be one of " + isolations.toString()); } } warning("Using transaction isolation setting " + transactionIsolation); this.name = url; int poolSize = (size == null) ? Jeeves.Res.Pool.DEF_POOL_SIZE : Integer.parseInt(size); int maxWait = (maxw == null) ? Jeeves.Res.Pool.DEF_MAX_WAIT : Integer.parseInt(maxw); // set maximum number of prepared statements in pool cache int iMaxOpen = getPreparedStatementCacheSize(config); boolean testWhileIdle = false; if (testWhileIdleStr != null) { testWhileIdle = testWhileIdleStr.equals("true"); } long timeBetweenEvictionRunsMillis = -1; if (timeBetweenEvictionRunsMillisStr != null) { timeBetweenEvictionRunsMillis = Long.parseLong(timeBetweenEvictionRunsMillisStr); } long minEvictableIdleTimeMillis = 1000 * 60 * 30; if (minEvictableIdleTimeMillisStr != null) { minEvictableIdleTimeMillis = Long.parseLong(minEvictableIdleTimeMillisStr); } int numTestsPerEvictionRun = 3; if (numTestsPerEvictionRunStr != null) { numTestsPerEvictionRun = Integer.parseInt(numTestsPerEvictionRunStr); } // create the datasource basicDataSource = new BasicDataSource(); basicDataSource.setDriverClassName(driver); basicDataSource.setRemoveAbandoned(true); basicDataSource.setRemoveAbandonedTimeout(60 * 60); basicDataSource.setLogAbandoned(true); // configure the rest of the pool from params // http://commons.apache.org/dbcp/configuration.html if (maxActive != null) { basicDataSource.setMaxActive(Integer.parseInt(maxActive)); } else { basicDataSource.setMaxActive(poolSize); } if (maxIdle != null) { basicDataSource.setMaxIdle(Integer.parseInt(maxIdle)); } else { basicDataSource.setMaxIdle(poolSize); } if (minIdle != null) { basicDataSource.setMinIdle(Integer.parseInt(minIdle)); } else { basicDataSource.setMinIdle(0); } basicDataSource.setMaxWait(maxWait); // always test connections when we get them from the pool basicDataSource.setTestOnBorrow(true); // time between runs of idle evictor thread basicDataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis); // test idle connections basicDataSource.setTestWhileIdle(testWhileIdle); // let idle connections sit in there forever basicDataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis); // test all idle connections each run basicDataSource.setNumTestsPerEvictionRun(numTestsPerEvictionRun); // set maximum number of prepared statements in pool cache, if not set // then switch it off altogether if (iMaxOpen != -1) { basicDataSource.setPoolPreparedStatements(true); basicDataSource.setMaxOpenPreparedStatements(iMaxOpen); } else { basicDataSource.setPoolPreparedStatements(false); basicDataSource.setMaxOpenPreparedStatements(-1); } if (validationQuery != null && validationQuery.trim().length() > 0) { basicDataSource.setValidationQuery(validationQuery); } basicDataSource.setDefaultReadOnly(false); basicDataSource.setDefaultAutoCommit(false); basicDataSource.setUrl(url); basicDataSource.setUsername(user); basicDataSource.setPassword(passwd); basicDataSource.setInitialSize(poolSize); }
From source file:net.duckling.ddl.web.controller.task.TaskBaseController.java
protected String deleteSame(String str1, String str2) { String[] str1A = str1.split(","); String[] str2A = str2.split(","); Set<String> set = new HashSet<String>(); if (!CommonUtil.isNullArray(str1A)) { for (String str : str1A) { if (StringUtils.isEmpty(str)) { continue; }//from www .j a va 2s . co m set.add(str); } } if (!CommonUtil.isNullArray(str2A)) { for (String str : str2A) { if (StringUtils.isEmpty(str)) { continue; } set.add(str); } } return set.toString().replace("[", "").replace("]", ""); }
From source file:com.seleniumtests.uipage.PageObject.java
/** * Selects the first unknown window. To use we an action creates a new window or tab * @param waitMs wait for N milliseconds before raising error * @return// w w w.ja v a 2 s.c o m */ public final String selectNewWindow(int waitMs) { // app test are not compatible with window if (SeleniumTestsContextManager.getThreadContext().getTestType().family() == TestType.APP) { throw new ScenarioException("Application are not compatible with Windows"); } // Keep the name of the current window handle before switching // sometimes, our action made window disappear String mainWindowHandle; try { mainWindowHandle = driver.getWindowHandle(); } catch (Exception e) { mainWindowHandle = ""; } logger.debug("Current handle: " + mainWindowHandle); // wait for window to be displayed Instant end = systemClock.instant().plusMillis(waitMs + 250L); Set<String> handles = new TreeSet<>(); boolean found = false; while (end.isAfter(systemClock.instant()) && !found) { handles = driver.getWindowHandles(); logger.debug("All handles: " + handles.toString()); for (String handle : handles) { // we already know this handle if (getCurrentHandles().contains(handle)) { continue; } selectWindow(handle); // wait for a valid address String address = ""; Instant endLoad = systemClock.instant().plusMillis(5000); while (address.isEmpty() && endLoad.isAfter(systemClock.instant())) { address = driver.getCurrentUrl(); } // make window display in foreground // TODO: reactivate feature try { // Point windowPosition = driver.manage().window().getPosition(); // org.openqa.selenium.interactions.Mouse mouse = ((HasInputDevices) driver).getMouse(); // mouse.click(); // Mouse mouse = new DesktopMouse(); // mouse.click(new DesktopScreenRegion(Math.max(0, windowPosition.x) + driver.manage().window().getSize().width / 2, Math.max(0, windowPosition.y) + 5, 2, 2).getCenter()); } catch (Exception e) { logger.warn("error while giving focus to window"); } found = true; break; } WaitHelper.waitForMilliSeconds(300); } // check window has changed if (waitMs > 0 && mainWindowHandle.equals(driver.getWindowHandle())) { throw new CustomSeleniumTestsException("new window has not been found. Handles: " + handles); } return mainWindowHandle; }
From source file:org.apache.cxf.fediz.service.idp.service.security.GrantedAuthorityEntitlements.java
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { try {/*from www. ja v a2 s . c o m*/ Authentication currentAuth = SecurityContextHolder.getContext().getAuthentication(); if (currentAuth == null) { chain.doFilter(request, response); return; } final Set<GrantedAuthority> authorities = new HashSet<GrantedAuthority>(); if (currentAuth.getAuthorities() != null) { authorities.addAll(currentAuth.getAuthorities()); } Iterator<? extends GrantedAuthority> authIt = currentAuth.getAuthorities().iterator(); while (authIt.hasNext()) { GrantedAuthority ga = authIt.next(); String roleName = ga.getAuthority(); try { Role role = roleDAO.getRole(roleName.substring(5), Arrays.asList("all")); for (Entitlement e : role.getEntitlements()) { authorities.add(new SimpleGrantedAuthority(e.getName())); } } catch (Exception ex) { LOG.error("Role '" + roleName + "' not found"); } } if (LOG.isDebugEnabled()) { LOG.debug(authorities.toString()); } UsernamePasswordAuthenticationToken enrichedAuthentication = new UsernamePasswordAuthenticationToken( currentAuth.getName(), currentAuth.getCredentials(), authorities); enrichedAuthentication.setDetails(currentAuth.getDetails()); SecurityContextHolder.getContext().setAuthentication(enrichedAuthentication); LOG.info("Enriched AuthenticationToken added"); } catch (Exception ex) { LOG.error("Failed to enrich security context with entitlements", ex); } chain.doFilter(request, response); }
From source file:org.ff4j.cli.FF4jCliProcessor.java
private void processCommandGrant(String commandLine) { try {/*from ww w . j a v a2 s .c om*/ CommandLine cmd = CMD_PARSER.parse(grantOptions(), commandLine.split(" ")); if (cmd.getArgList().size() != 1 || !cmd.hasOption("f") || !cmd.hasOption("r")) { logError("Invalid command, expecting grant[revoke] -r <role> -f <featureName>"); } else { String feature = cmd.getOptionValue('f'); String role = cmd.getOptionValue('r'); if (!currentFF4J.getFeatureStore().exist(feature)) { logWarn("Feature does not exist, nothing updated"); } else { if (cmd.getArgList().get(0).equals("grant")) { currentFF4J.getFeatureStore().grantRoleOnFeature(feature, role); logInfo("Role " + role + " has been added to feature " + feature); } else if (cmd.getArgList().get(0).equals("revoke")) { Set<String> permissions = currentFF4J.getFeatureStore().read(feature).getPermissions(); if (permissions == null) { logWarn("The role is invalidn there is no role on the feature " + feature); } else if (permissions.contains(role)) { currentFF4J.getFeatureStore().removeRoleFromFeature(feature, role); logInfo(FEATURE + feature + " has not more role " + role); } else { logWarn("The role is invalid expected one of " + permissions.toString()); } } } } } catch (ParseException e) { error(e, "Error during addToGroup/removeFromGroup command"); } }