List of usage examples for java.util.logging Logger log
public void log(Level level, Throwable thrown, Supplier<String> msgSupplier)
From source file:to.sparks.mtgox.net.HTTPAuthenticator.java
public HTTPAuthenticator(final Logger logger, String apiKey, String secret) { this.apiKey = apiKey; this.secret = secret; TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() { @Override//from w w w .ja v a2 s . c o m public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } @Override public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType) { } @Override public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType) { } } }; try { SSLContext sc = SSLContext.getInstance("SSL"); sc.init(null, trustAllCerts, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (Exception e) { logger.log(Level.SEVERE, null, e); } }
From source file:org.torproject.ernie.db.BridgeSnapshotReader.java
public BridgeSnapshotReader(BridgeDescriptorParser bdp, String bridgeDirectoriesDir) { Logger logger = Logger.getLogger(BridgeSnapshotReader.class.getName()); SortedSet<String> parsed = new TreeSet<String>(); File bdDir = new File(bridgeDirectoriesDir); File pbdFile = new File("stats/parsed-bridge-directories"); boolean modified = false; if (bdDir.exists()) { if (pbdFile.exists()) { logger.fine("Reading file " + pbdFile.getAbsolutePath() + "..."); try { BufferedReader br = new BufferedReader(new FileReader(pbdFile)); String line = null; while ((line = br.readLine()) != null) { parsed.add(line);// w w w.j a v a2 s. c o m } br.close(); logger.fine("Finished reading file " + pbdFile.getAbsolutePath() + "."); } catch (IOException e) { logger.log(Level.WARNING, "Failed reading file " + pbdFile.getAbsolutePath() + "!", e); return; } } logger.fine("Importing files in directory " + bridgeDirectoriesDir + "/..."); Stack<File> filesInInputDir = new Stack<File>(); filesInInputDir.add(bdDir); while (!filesInInputDir.isEmpty()) { File pop = filesInInputDir.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { filesInInputDir.add(f); } } else if (!parsed.contains(pop.getName())) { try { FileInputStream in = new FileInputStream(pop); if (in.available() > 0) { GzipCompressorInputStream gcis = new GzipCompressorInputStream(in); TarArchiveInputStream tais = new TarArchiveInputStream(gcis); BufferedInputStream bis = new BufferedInputStream(tais); String fn = pop.getName(); String dateTime = fn.substring(11, 21) + " " + fn.substring(22, 24) + ":" + fn.substring(24, 26) + ":" + fn.substring(26, 28); while ((tais.getNextTarEntry()) != null) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } byte[] allData = baos.toByteArray(); if (allData.length == 0) { continue; } String ascii = new String(allData, "US-ASCII"); BufferedReader br3 = new BufferedReader(new StringReader(ascii)); String firstLine = null; while ((firstLine = br3.readLine()) != null) { if (firstLine.startsWith("@")) { continue; } else { break; } } if (firstLine.startsWith("r ")) { bdp.parse(allData, dateTime, false); } else { int start = -1, sig = -1, end = -1; String startToken = firstLine.startsWith("router ") ? "router " : "extra-info "; String sigToken = "\nrouter-signature\n"; String endToken = "\n-----END SIGNATURE-----\n"; while (end < ascii.length()) { start = ascii.indexOf(startToken, end); if (start < 0) { break; } sig = ascii.indexOf(sigToken, start); if (sig < 0) { break; } sig += sigToken.length(); end = ascii.indexOf(endToken, sig); if (end < 0) { break; } end += endToken.length(); byte[] descBytes = new byte[end - start]; System.arraycopy(allData, start, descBytes, 0, end - start); bdp.parse(descBytes, dateTime, false); } } } } in.close(); /* Let's give some memory back, or we'll run out of it. */ System.gc(); parsed.add(pop.getName()); modified = true; } catch (IOException e) { logger.log(Level.WARNING, "Could not parse bridge snapshot " + pop.getName() + "!", e); continue; } } } logger.fine("Finished importing files in directory " + bridgeDirectoriesDir + "/."); if (!parsed.isEmpty() && modified) { logger.fine("Writing file " + pbdFile.getAbsolutePath() + "..."); try { pbdFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(pbdFile)); for (String f : parsed) { bw.append(f + "\n"); } bw.close(); logger.fine("Finished writing file " + pbdFile.getAbsolutePath() + "."); } catch (IOException e) { logger.log(Level.WARNING, "Failed writing file " + pbdFile.getAbsolutePath() + "!", e); } } } }
From source file:org.jenkins.plugins.lockableresources.LockableResourcesManager.java
/** * Try to acquire the resources required by the task. * @param number Number of resources to acquire. {@code 0} means all * @return List of the locked resources if the task has been accepted. * {@code null} if the item is still waiting for the resources * @throws ExecutionException Cannot queue the resource due to the execution failure. Carries info in the cause * @since TODO// ww w . j av a 2 s . c om */ @CheckForNull public synchronized List<LockableResource> tryQueue(LockableResourcesStruct requiredResources, long queueItemId, String queueItemProject, int number, Map<String, Object> params, Logger log) throws ExecutionException { List<LockableResource> selected = new ArrayList<LockableResource>(); if (!checkCurrentResourcesStatus(selected, queueItemProject, queueItemId, log)) { // The project has another buildable item waiting -> bail out log.log(Level.FINEST, "{0} has another build waiting resources." + " Waiting for it to proceed first.", new Object[] { queueItemProject }); return null; } boolean candidatesByScript = false; List<LockableResource> candidates = new ArrayList<LockableResource>(); final SecureGroovyScript systemGroovyScript = requiredResources.getResourceMatchScript(); if (requiredResources.label != null && requiredResources.label.isEmpty() && systemGroovyScript == null) { candidates = requiredResources.required; } else if (systemGroovyScript == null) { candidates = getResourcesWithLabel(requiredResources.label, params); } else { candidates = getResourcesMatchingScript(systemGroovyScript, params); candidatesByScript = true; } for (LockableResource rs : candidates) { if (number != 0 && (selected.size() >= number)) break; if (!rs.isReserved() && !rs.isLocked() && !rs.isQueued()) selected.add(rs); } // if did not get wanted amount or did not get all final int required_amount; if (candidatesByScript && candidates.size() == 0) { /** * If the groovy script does not return any candidates, it means nothing is needed, even * if a higher amount is specified. A valid use case is a Matrix job, when not all * configurations need resources. */ required_amount = 0; } else { required_amount = number == 0 ? candidates.size() : number; } if (selected.size() != required_amount) { log.log(Level.FINEST, "{0} found {1} resource(s) to queue." + "Waiting for correct amount: {2}.", new Object[] { queueItemProject, selected.size(), required_amount }); // just to be sure, clean up for (LockableResource x : resources) { if (x.getQueueItemProject() != null && x.getQueueItemProject().equals(queueItemProject)) x.unqueue(); } return null; } for (LockableResource rsc : selected) { rsc.setQueued(queueItemId, queueItemProject); } return selected; }
From source file:net.openhft.chronicle.logger.jul.JulTestBase.java
protected void testTextAppender(String testId, Chronicle chronicle) throws IOException { final String threadId = "thread-" + Thread.currentThread().getId(); setupLogManager(testId);/*from w ww .j av a2 s . com*/ final Logger logger = Logger.getLogger(testId); for (ChronicleLogLevel level : LOG_LEVELS) { log(logger, level, "level is {0}", level); } ExcerptTailer tailer = chronicle.createTailer().toStart(); ChronicleLogEvent evt = null; for (ChronicleLogLevel level : LOG_LEVELS) { assertTrue(tailer.nextIndex()); evt = ChronicleLogHelper.decodeText(tailer); assertNotNull(evt); assertEquals(level, evt.getLevel()); assertEquals(threadId, evt.getThreadName()); assertEquals(testId, evt.getLoggerName()); assertEquals("level is " + level, evt.getMessage()); assertNotNull(evt.getArgumentArray()); assertEquals(0, evt.getArgumentArray().length); tailer.finish(); } logger.log(Level.FINE, "Throwable test", new UnsupportedOperationException()); logger.log(Level.FINE, "Throwable test", new UnsupportedOperationException("Exception message")); assertTrue(tailer.nextIndex()); evt = ChronicleLogHelper.decodeText(tailer); assertNotNull(evt); assertEquals(threadId, evt.getThreadName()); assertEquals(testId, evt.getLoggerName()); assertTrue(evt.getMessage().contains("Throwable test")); assertTrue(evt.getMessage().contains(UnsupportedOperationException.class.getName())); assertTrue(evt.getMessage().contains(this.getClass().getName())); assertNotNull(evt.getArgumentArray()); assertEquals(0, evt.getArgumentArray().length); assertNull(evt.getThrowable()); assertTrue(tailer.nextIndex()); evt = ChronicleLogHelper.decodeText(tailer); assertNotNull(evt); assertEquals(threadId, evt.getThreadName()); assertEquals(testId, evt.getLoggerName()); assertTrue(evt.getMessage().contains("Throwable test")); assertTrue(evt.getMessage().contains("Exception message")); assertTrue(evt.getMessage().contains(UnsupportedOperationException.class.getName())); assertTrue(evt.getMessage().contains(this.getClass().getName())); assertNotNull(evt.getArgumentArray()); assertEquals(0, evt.getArgumentArray().length); assertNull(evt.getThrowable()); tailer.close(); chronicle.close(); chronicle.clear(); }
From source file:edu.emory.cci.aiw.i2b2etl.dest.I2b2QueryResultsHandler.java
/** * Calls stored procedures to drop all of the temp tables created. * * @throws SQLException if an error occurs while interacting with the * database// ww w. j av a 2s. c o m */ private void truncateTempTables() throws SQLException { Logger logger = I2b2ETLUtil.logger(); logger.log(Level.INFO, "Truncating temp data tables for query {0}", this.query.getName()); try (final Connection conn = openDataDatabaseConnection()) { conn.setAutoCommit(true); String[] dataschemaTables = { tempPatientTableName(), tempPatientMappingTableName(), tempVisitTableName(), tempEncounterMappingTableName(), tempProviderTableName(), tempConceptTableName(), tempModifierTableName(), tempObservationFactTableName(), tempObservationFactCompleteTableName() }; for (String tableName : dataschemaTables) { truncateTable(conn, tableName); } logger.log(Level.INFO, "Done truncating temp data tables for query {0}", this.query.getName()); } }
From source file:org.protempa.backend.dsb.relationaldb.ConstantResultProcessor.java
@Override public void process(ResultSet resultSet) throws SQLException { ResultCache<Constant> results = getResults(); EntitySpec entitySpec = getEntitySpec(); String entitySpecName = entitySpec.getName(); //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0; String[] propIds = entitySpec.getPropositionIds(); ColumnSpec codeSpec = entitySpec.getCodeSpec(); if (codeSpec != null) { List<ColumnSpec> codeSpecL = codeSpec.asList(); codeSpec = codeSpecL.get(codeSpecL.size() - 1); }/*from ww w . j a va 2 s.c o m*/ Logger logger = SQLGenUtil.logger(); PropertySpec[] propertySpecs = entitySpec.getPropertySpecs(); Value[] propertyValues = new Value[propertySpecs.length]; int count = 0; String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length]; SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId()); ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int[] columnTypes = new int[resultSetMetaData.getColumnCount()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = resultSetMetaData.getColumnType(i + 1); } while (resultSet.next()) { int i = 1; String keyId = resultSet.getString(i++); if (keyId == null) { logger.warning("A keyId is null. Skipping record."); continue; } i = readUniqueIds(uniqueIds, resultSet, i); if (Arrays.contains(uniqueIds, null)) { if (logger.isLoggable(Level.WARNING)) { logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.", StringUtils.join(uniqueIds, ", ")); continue; } } UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds); String propId = null; if (!isCasePresent()) { if (codeSpec == null) { assert propIds.length == 1 : "Don't know which proposition id to assign to"; propId = propIds[0]; } else { String code = resultSet.getString(i++); propId = sqlCodeToPropositionId(codeSpec, code); if (propId == null) { continue; } } } else { i++; } i = extractPropertyValues(resultSet, i, propertyValues, columnTypes); if (isCasePresent()) { propId = resultSet.getString(i++); } Constant cp = new Constant(propId, uniqueId); for (int j = 0; j < propertySpecs.length; j++) { PropertySpec propertySpec = propertySpecs[j]; cp.setProperty(propertySpec.getName(), propertyValues[j]); } cp.setSourceSystem(dsType); logger.log(Level.FINEST, "Created constant {0}", cp); results.add(keyId, cp); if (++count % FLUSH_SIZE == 0) { try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records"); } } } try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total", "Retrieved {0} records total"); } }
From source file:org.jenkins.plugins.lockableresources.LockableResourcesManager.java
private boolean checkCurrentResourcesStatus(List<LockableResource> selected, String project, long taskId, Logger log) { for (LockableResource r : resources) { // This project might already have something in queue String rProject = r.getQueueItemProject(); if (rProject != null && rProject.equals(project)) { if (r.isQueuedByTask(taskId)) { // this item has queued the resource earlier selected.add(r);/*from w w w. j ava 2s. c o m*/ } else { // The project has another buildable item waiting -> bail out log.log(Level.FINEST, "{0} has another build " + "that already queued resource {1}. Continue queueing.", new Object[] { project, r }); return false; } } } return true; }
From source file:hudson.cli.CLI.java
public static int _main(String[] _args) throws Exception { List<String> args = Arrays.asList(_args); PrivateKeyProvider provider = new PrivateKeyProvider(); boolean sshAuthRequestedExplicitly = false; String httpProxy = null;// ww w . j av a2 s . com String url = System.getenv("JENKINS_URL"); if (url == null) url = System.getenv("HUDSON_URL"); boolean tryLoadPKey = true; Mode mode = null; String user = null; String auth = null; String userIdEnv = System.getenv("JENKINS_USER_ID"); String tokenEnv = System.getenv("JENKINS_API_TOKEN"); boolean strictHostKey = false; while (!args.isEmpty()) { String head = args.get(0); if (head.equals("-version")) { System.out.println("Version: " + computeVersion()); return 0; } if (head.equals("-http")) { if (mode != null) { printUsage("-http clashes with previously defined mode " + mode); return -1; } mode = Mode.HTTP; args = args.subList(1, args.size()); continue; } if (head.equals("-ssh")) { if (mode != null) { printUsage("-ssh clashes with previously defined mode " + mode); return -1; } mode = Mode.SSH; args = args.subList(1, args.size()); continue; } if (head.equals("-remoting")) { if (mode != null) { printUsage("-remoting clashes with previously defined mode " + mode); return -1; } mode = Mode.REMOTING; args = args.subList(1, args.size()); continue; } if (head.equals("-s") && args.size() >= 2) { url = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-noCertificateCheck")) { LOGGER.info("Skipping HTTPS certificate checks altogether. Note that this is not secure at all."); SSLContext context = SSLContext.getInstance("TLS"); context.init(null, new TrustManager[] { new NoCheckTrustManager() }, new SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(context.getSocketFactory()); // bypass host name check, too. HttpsURLConnection.setDefaultHostnameVerifier(new HostnameVerifier() { public boolean verify(String s, SSLSession sslSession) { return true; } }); args = args.subList(1, args.size()); continue; } if (head.equals("-noKeyAuth")) { tryLoadPKey = false; args = args.subList(1, args.size()); continue; } if (head.equals("-i") && args.size() >= 2) { File f = new File(args.get(1)); if (!f.exists()) { printUsage(Messages.CLI_NoSuchFileExists(f)); return -1; } provider.readFrom(f); args = args.subList(2, args.size()); sshAuthRequestedExplicitly = true; continue; } if (head.equals("-strictHostKey")) { strictHostKey = true; args = args.subList(1, args.size()); continue; } if (head.equals("-user") && args.size() >= 2) { user = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-auth") && args.size() >= 2) { auth = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-p") && args.size() >= 2) { httpProxy = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-logger") && args.size() >= 2) { Level level = parse(args.get(1)); for (Handler h : Logger.getLogger("").getHandlers()) { h.setLevel(level); } for (Logger logger : new Logger[] { LOGGER, FullDuplexHttpStream.LOGGER, PlainCLIProtocol.LOGGER, Logger.getLogger("org.apache.sshd") }) { // perhaps also Channel logger.setLevel(level); } args = args.subList(2, args.size()); continue; } break; } if (url == null) { printUsage(Messages.CLI_NoURL()); return -1; } if (auth == null) { // -auth option not set if (StringUtils.isNotBlank(userIdEnv) && StringUtils.isNotBlank(tokenEnv)) { auth = StringUtils.defaultString(userIdEnv).concat(":").concat(StringUtils.defaultString(tokenEnv)); } else if (StringUtils.isNotBlank(userIdEnv) || StringUtils.isNotBlank(tokenEnv)) { printUsage(Messages.CLI_BadAuth()); return -1; } // Otherwise, none credentials were set } if (!url.endsWith("/")) { url += '/'; } if (args.isEmpty()) args = Arrays.asList("help"); // default to help if (tryLoadPKey && !provider.hasKeys()) provider.readFromDefaultLocations(); if (mode == null) { mode = Mode.HTTP; } LOGGER.log(FINE, "using connection mode {0}", mode); if (user != null && auth != null) { LOGGER.warning("-user and -auth are mutually exclusive"); } if (mode == Mode.SSH) { if (user == null) { // TODO SshCliAuthenticator already autodetects the user based on public key; why cannot AsynchronousCommand.getCurrentUser do the same? LOGGER.warning("-user required when using -ssh"); return -1; } return SSHCLI.sshConnection(url, user, args, provider, strictHostKey); } if (strictHostKey) { LOGGER.warning("-strictHostKey meaningful only with -ssh"); } if (user != null) { LOGGER.warning("Warning: -user ignored unless using -ssh"); } CLIConnectionFactory factory = new CLIConnectionFactory().url(url).httpsProxyTunnel(httpProxy); String userInfo = new URL(url).getUserInfo(); if (userInfo != null) { factory = factory.basicAuth(userInfo); } else if (auth != null) { factory = factory.basicAuth( auth.startsWith("@") ? FileUtils.readFileToString(new File(auth.substring(1))).trim() : auth); } if (mode == Mode.HTTP) { return plainHttpConnection(url, args, factory); } CLI cli = factory.connect(); try { if (provider.hasKeys()) { try { // TODO: server verification cli.authenticate(provider.getKeys()); } catch (IllegalStateException e) { if (sshAuthRequestedExplicitly) { LOGGER.warning("The server doesn't support public key authentication"); return -1; } } catch (UnsupportedOperationException e) { if (sshAuthRequestedExplicitly) { LOGGER.warning("The server doesn't support public key authentication"); return -1; } } catch (GeneralSecurityException e) { if (sshAuthRequestedExplicitly) { LOGGER.log(WARNING, null, e); return -1; } LOGGER.warning("Failed to authenticate with your SSH keys. Proceeding as anonymous"); LOGGER.log(FINE, null, e); } } // execute the command // Arrays.asList is not serializable --- see 6835580 args = new ArrayList<String>(args); return cli.execute(args, System.in, System.out, System.err); } finally { cli.close(); } }
From source file:org.protempa.backend.dsb.relationaldb.PrimitiveParameterResultProcessor.java
@Override public void process(ResultSet resultSet) throws SQLException { ResultCache<PrimitiveParameter> results = getResults(); EntitySpec entitySpec = getEntitySpec(); String entitySpecName = entitySpec.getName(); //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0; String[] propIds = entitySpec.getPropositionIds(); ColumnSpec codeSpec = entitySpec.getCodeSpec(); if (codeSpec != null) { List<ColumnSpec> codeSpecL = codeSpec.asList(); codeSpec = codeSpecL.get(codeSpecL.size() - 1); }//from w w w . ja v a2 s .c om Logger logger = SQLGenUtil.logger(); PropertySpec[] propertySpecs = entitySpec.getPropertySpecs(); Value[] propertyValues = new Value[propertySpecs.length]; int count = 0; String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length]; ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int[] columnTypes = new int[resultSetMetaData.getColumnCount()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = resultSetMetaData.getColumnType(i + 1); } SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId()); while (resultSet.next()) { int i = 1; String keyId = resultSet.getString(i++); if (keyId == null) { logger.warning("A keyId is null. Skipping record."); continue; } i = readUniqueIds(uniqueIds, resultSet, i); if (Arrays.contains(uniqueIds, null)) { if (logger.isLoggable(Level.WARNING)) { logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.", StringUtils.join(uniqueIds, ", ")); continue; } } UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds); String propId = null; if (!isCasePresent()) { if (codeSpec == null) { assert propIds.length == 1 : "Don't know which proposition id to assign to"; propId = propIds[0]; } else { String code = resultSet.getString(i++); propId = sqlCodeToPropositionId(codeSpec, code); if (propId == null) { continue; } } } else { i++; } Long timestamp = null; try { timestamp = entitySpec.getPositionParser().toPosition(resultSet, i, columnTypes[i - 1]); i++; } catch (SQLException e) { logger.log(Level.WARNING, "Could not parse timestamp. Leaving timestamp unset.", e); } ValueType valueType = entitySpec.getValueType(); String cpValStr = resultSet.getString(i++); Value cpVal = valueType.parse(cpValStr); i = extractPropertyValues(resultSet, i, propertyValues, columnTypes); if (isCasePresent()) { propId = resultSet.getString(i++); } PrimitiveParameter p = new PrimitiveParameter(propId, uniqueId); p.setPosition(timestamp); p.setGranularity(entitySpec.getGranularity()); p.setValue(cpVal); for (int j = 0; j < propertySpecs.length; j++) { PropertySpec propertySpec = propertySpecs[j]; p.setProperty(propertySpec.getName(), propertyValues[j]); } p.setSourceSystem(dsType); logger.log(Level.FINEST, "Created primitive parameter {0}", p); results.add(keyId, p); if (++count % FLUSH_SIZE == 0) { try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records"); } } } try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total", "Retrieved {0} records total"); } }
From source file:jp.ikedam.jenkins.plugins.ldap_sasl.SearchGroupResolver.java
/** * Resolves groups by querying the LDAP directory. * //w ww . ja v a2 s .co m * Never return null in any case. Returns empty list instead. * * @param ctx * @param dn * @param username * @return List of authorities (not null) * @see jp.ikedam.jenkins.plugins.ldap_sasl.GroupResolver#resolveGroup(javax.naming.ldap.LdapContext, java.lang.String, java.lang.String) */ @Override public List<GrantedAuthority> resolveGroup(LdapContext ctx, String dn, String username) { List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>(); Logger logger = getLogger(); if (dn == null) { logger.warning("Group cannot be resolved: DN of the user is not resolved!"); return authorities; } try { SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); logger.fine(String.format("Searching groups base=%s, dn=%s", getSearchBase(), dn)); NamingEnumeration<SearchResult> entries = ctx.search((getSearchBase() != null) ? getSearchBase() : "", getGroupSearchQuery(dn), searchControls); while (entries.hasMore()) { SearchResult entry = entries.next(); String groupName = entry.getAttributes().get("cn").get().toString(); if (getPrefix() != null) { groupName = getPrefix() + groupName; } authorities.add(new GrantedAuthorityImpl(groupName)); logger.fine(String.format("group: %s", groupName)); } entries.close(); } catch (NamingException e) { logger.log(Level.WARNING, "Failed to search groups", e); } return authorities; }