List of usage examples for java.util.logging Level FINER
Level FINER
To view the source code for java.util.logging Level FINER.
Click Source Link
From source file:org.overlord.apiman.service.client.http.HTTPServiceClient.java
/** * {@inheritDoc}// www . j a va 2s . com */ @Override public Response process(Request request) throws Exception { String method = "GET"; if (request instanceof HTTPGatewayRequest) { method = ((HTTPGatewayRequest) request).getHTTPMethod(); } String proxyRequestUri = rewriteUrlFromRequest(request); HttpRequest proxyRequest; //spec: RFC 2616, sec 4.3: either of these two headers signal that there is a message body. if (request.getHeader(HttpHeaders.CONTENT_LENGTH) != null || request.getHeader(HttpHeaders.TRANSFER_ENCODING) != null) { HttpEntityEnclosingRequest eProxyRequest = new BasicHttpEntityEnclosingRequest(method, proxyRequestUri); java.io.InputStream is = new java.io.ByteArrayInputStream(request.getContent()); InputStreamEntity entity = new InputStreamEntity(is, request.getContent().length); is.close(); eProxyRequest.setEntity(entity); proxyRequest = eProxyRequest; } else { proxyRequest = new BasicHttpRequest(method, proxyRequestUri); } copyRequestHeaders(request, proxyRequest); try { // Execute the request if (LOG.isLoggable(Level.FINER)) { LOG.finer("proxy " + method + " uri: " + request.getSourceURI() + " -- " + proxyRequest.getRequestLine().getUri()); } HttpResponse proxyResponse = _proxyClient .execute(URIUtils.extractHost(new java.net.URI(request.getServiceURI())), proxyRequest); Response resp = new HTTPGatewayResponse(proxyResponse); return (resp); } catch (Exception e) { //abort request, according to best practice with HttpClient if (proxyRequest instanceof AbortableHttpRequest) { AbortableHttpRequest abortableHttpRequest = (AbortableHttpRequest) proxyRequest; abortableHttpRequest.abort(); } if (e instanceof RuntimeException) { throw (RuntimeException) e; } //noinspection ConstantConditions if (e instanceof IOException) { throw (IOException) e; } throw new RuntimeException(e); } }
From source file:com.ibm.jaggr.core.impl.cache.GzipCacheImpl.java
@Override public InputStream getInputStream(final String key, final URI source, final MutableInt retLength) throws IOException { final String sourceMethod = "getInputStream"; //$NON-NLS-1$ final boolean isTraceLogging = log.isLoggable(Level.FINER); if (isTraceLogging) { log.entering(sourceClass, sourceMethod, new Object[] { key, source, retLength }); }/* w w w . j av a 2 s . c o m*/ InputStream in = null, result = null; CacheEntry tryCacheEntry = (CacheEntry) super.get(key); URLConnection connection = source.toURL().openConnection(); try { long lastModified = connection.getLastModified(); if (tryCacheEntry != null) { // Make local copies of volatile CacheEntry fields byte[] bytes = tryCacheEntry.bytes; File file = tryCacheEntry.file; if (bytes != null) { // Important - CacheEntry.lastModified is set before CacheEntry.bytes so we can // safely // check CacheEntry.lastModified here even though we're not synchronized. if (lastModified != tryCacheEntry.lastModified) { // stale cache entry. Remove it and create a new one below cacheMap.remove(key, tryCacheEntry); } else { retLength.setValue(tryCacheEntry.bytes.length); result = new ByteArrayInputStream(tryCacheEntry.bytes); } } else if (file != null) { // Some platforms round file last modified times to nearest second. if (Math.abs(lastModified - file.lastModified()) > 1000) { // Stale cache entry, remove it and create a new one below cacheMap.remove(key, tryCacheEntry); // also delete the associated cache file asynchronously. cacheManager.deleteFileDelayed(file.getName()); } else { try { retLength.setValue(file.length()); result = new FileInputStream(file); } catch (FileNotFoundException ex) { // File doesn't exist (was probably deleted outside this program) // Not fatal, just fall through and create it again. cacheMap.remove(key, tryCacheEntry); } } } if (result != null) { // found result in cache. Return it. log.exiting(sourceClass, sourceMethod, result); return result; } } // Result not in cache (or we removed it). Try to create a new cache entry. CacheEntry newCacheEntry = new CacheEntry(); CacheEntry oldCacheEntry = (CacheEntry) cacheMap.putIfAbsent(key, newCacheEntry); final CacheEntry cacheEntry = oldCacheEntry != null ? oldCacheEntry : newCacheEntry; // Synchronize on the cache entry so that more than one thread won't try to create the // zipped content. synchronized (cacheEntry) { if (cacheEntry.ex != null) { // An exception occurred trying to create the gzip response in another thread. // Re-throw the exception here. throw cacheEntry.ex; } // First, check to make sure that another thread didn't beat us to the punch. // Even though we're synchronized on the cacheEntry object, cacheEntry.bytes can be // cleared by the createCacheFileAsync callback, so we need to copy this volatile // field // to a local variable and access it from there. byte[] bytes = cacheEntry.bytes; if (bytes != null) { retLength.setValue(bytes.length); result = new ByteArrayInputStream(bytes); } else if (cacheEntry.file != null) { // once set, cacheEntry.file does not change // by convention retLength.setValue(cacheEntry.file.length()); result = new FileInputStream(cacheEntry.file); } else { // Gzip encode the resource and save the result in the cache entry until the // cache // file is written asynchronously. try { in = connection.getInputStream(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); compress.setLevel(Deflater.BEST_COMPRESSION); CopyUtil.copy(in, compress); // Important - CacheEntry.lastModified must be set before cacheEntry.bytes cacheEntry.lastModified = lastModified; cacheEntry.bytes = bos.toByteArray(); result = new ByteArrayInputStream(cacheEntry.bytes); retLength.setValue(cacheEntry.bytes.length); // Call the cache manager to asynchronously save the gzipped response to // disk // Include the filename part of the source URI in the cached filename String path = source.getPath(); int idx = path.lastIndexOf("/"); //$NON-NLS-1$ String fname = (idx != -1) ? path.substring(idx + 1) : path; cacheManager.createCacheFileAsync(fname + ".gzip.", //$NON-NLS-1$ new ByteArrayInputStream(cacheEntry.bytes), new ICacheManager.CreateCompletionCallback() { @Override public void completed(String filename, Exception e) { if (e != null && log.isLoggable(Level.SEVERE)) { // Exception occurred saving file. Not much we can do // except log the error log.logp(Level.SEVERE, sourceClass, sourceMethod, e.getMessage(), e); return; } File cacheFile = new File(cacheManager.getCacheDir(), filename); cacheFile.setLastModified(cacheEntry.lastModified); // Important - cacheEntry.file must be set before clearing // cacheEntry.bytes cacheEntry.file = cacheFile; cacheEntry.bytes = null; } }); } catch (Throwable t) { cacheEntry.ex = (t instanceof IOException) ? (IOException) t : new IOException(t); cacheMap.remove(key, cacheEntry); throw cacheEntry.ex; } } } } finally { // URLConnection doesn't have a close method. The only way to make sure a connection is // closed is to close the input or output stream which is obtained from the connection. if (in != null) { in.close(); } else { connection.getInputStream().close(); } } if (isTraceLogging) { log.exiting(sourceClass, sourceClass, result); } return result; }
From source file:net.osten.watermap.batch.FetchPCTWaypointsJob.java
private void unZipIt(File zipFile, File outputFolder) { byte[] buffer = new byte[1024]; try (ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile))) { // get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); File newFile = new File(outputFolder.getAbsolutePath() + File.separator + fileName); log.log(Level.FINER, "file unzip : {0}", new Object[] { newFile.getAbsoluteFile() }); // create all non existing folders else you will hit FileNotFoundException for compressed folder new File(newFile.getParent()).mkdirs(); try (FileOutputStream fos = new FileOutputStream(newFile)) { int len; while ((len = zis.read(buffer)) > 0) { fos.write(buffer, 0, len); }//from w ww. ja v a2s.co m } ze = zis.getNextEntry(); // move newFile to data directory try { // have to delete first since FileUtils does not overwrite File destinationFile = new File(outputFolder + File.separator + newFile.getName()); if (destinationFile.exists()) { destinationFile.delete(); } FileUtils.moveFileToDirectory(newFile, outputFolder, false); } catch (FileExistsException ioe) { log.warning(ioe.getLocalizedMessage()); } catch (IOException ioe) { log.warning(ioe.getLocalizedMessage()); } } // close the last entry zis.closeEntry(); } catch (IOException e) { log.warning(e.getLocalizedMessage()); } }
From source file:de.theit.jenkins.crowd.CrowdRememberMeServices.java
/** * {@inheritDoc}//from w w w . j av a 2s . c o m * * @see org.springframework.security.ui.rememberme.RememberMeServices#autoLogin(javax.servlet.http.HttpServletRequest, * javax.servlet.http.HttpServletResponse) */ @Override public Authentication autoLogin(HttpServletRequest request, HttpServletResponse response) { Authentication result = null; List<ValidationFactor> validationFactors = this.configuration.tokenHelper.getValidationFactorExtractor() .getValidationFactors(request); // check whether a SSO token is available if (LOG.isLoggable(Level.FINER)) { LOG.finer("Checking whether a SSO token is available..."); } String ssoToken = this.configuration.tokenHelper.getCrowdToken(request, this.configuration.clientProperties.getCookieTokenKey()); // auto-login is only possible when the SSO token was found if (null != ssoToken) { try { // SSO token available => check whether it is still valid if (LOG.isLoggable(Level.FINER)) { LOG.finer("SSO token available => check whether it is still valid..."); } this.configuration.crowdClient.validateSSOAuthentication(ssoToken, validationFactors); // retrieve the user that is logged in via SSO if (LOG.isLoggable(Level.FINER)) { LOG.finer("Retrieving SSO user..."); } User user = this.configuration.crowdClient.findUserFromSSOToken(ssoToken); // check whether the user is a member of the user group in Crowd // that specifies who is allowed to login if (LOG.isLoggable(Level.FINER)) { LOG.finer("Validating group membership of user..."); } if (this.configuration.isGroupMember(user.getName())) { // user is authenticated and validated // => create the user object and finalize the auto-login // process List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>(); authorities.add(SecurityRealm.AUTHENTICATED_AUTHORITY); authorities.addAll(this.configuration.getAuthoritiesForUser(user.getName())); result = new CrowdAuthenticationToken(user.getName(), null, authorities, ssoToken, user.getDisplayName()); } } catch (InvalidTokenException ex) { // LOG.log(Level.INFO, invalidToken(), ex); } catch (ApplicationPermissionException ex) { LOG.warning(applicationPermission()); } catch (InvalidAuthenticationException ex) { LOG.warning(invalidAuthentication()); } catch (OperationFailedException ex) { LOG.log(Level.SEVERE, operationFailed(), ex); } } return result; }
From source file:org.geotools.data.ngi.NGISchemaReader.java
private void loadSchemas(File ngiFile, File ndaFile, Charset charset) { BufferedReader ngiReader = null; BufferedReader ndaReader = null; try {/*from ww w .j a v a 2 s . co m*/ ngiReader = new BufferedReader(new InputStreamReader(new FileInputStream(ngiFile), charset)); if (ndaFile.exists()) { ndaReader = new BufferedReader(new InputStreamReader(new FileInputStream(ndaFile), charset)); } String line = ngiReader.readLine(); while (line != null) { String layerName = seekNextLayer(ngiReader); if (layerName != null) { // schema SimpleFeatureType schema = createSchema(ngiReader, ndaReader, layerName); if (schema != null) { schemas.put(layerName, schema); } // extent ReferencedEnvelope extent = getBounds(ngiReader); if (extent == null) { extent = new ReferencedEnvelope(crs); } bounds.put(layerName, extent); // feature count if (ndaReader != null) { counts.put(layerName, Integer.valueOf(getCount(ndaReader))); } else { counts.put(layerName, Integer.valueOf(-1)); } } line = ngiReader.readLine(); } } catch (IOException e) { LOGGER.log(Level.FINER, e.getMessage(), e); } finally { IOUtils.closeQuietly(ngiReader); IOUtils.closeQuietly(ndaReader); } }
From source file:jenkins.plugins.git.GitSCMFileSystem.java
/** * Constructor.//from www.j a v a 2 s . c om * * @param client the client * @param remote the remote GIT URL * @param head identifier for the head commit to be referenced * @param rev the revision. * @throws IOException on I/O error * @throws InterruptedException on thread interruption */ protected GitSCMFileSystem(GitClient client, String remote, final String head, @CheckForNull AbstractGitSCMSource.SCMRevisionImpl rev) throws IOException, InterruptedException { super(rev); this.remote = remote; this.head = head; cacheEntry = AbstractGitSCMSource.getCacheEntry(remote); listener = new LogTaskListener(LOGGER, Level.FINER); this.client = client; commitId = rev == null ? invoke(new FSFunction<ObjectId>() { @Override public ObjectId invoke(Repository repository) throws IOException, InterruptedException { return repository.getRef(head).getObjectId(); } }) : ObjectId.fromString(rev.getHash()); }
From source file:org.jenkinsci.plugins.registry.notification.webhook.JSONWebHook.java
protected void trigger(StaplerResponse response, final PushNotification pushNotification) throws IOException { final Jenkins jenkins = Jenkins.getInstance(); if (jenkins == null) { return;//from w w w . jav a2s. co m } ACL.impersonate(ACL.SYSTEM, new Runnable() { @Override public void run() { // search all jobs for DockerHubTrigger for (ParameterizedJobMixIn.ParameterizedJob p : jenkins .getAllItems(ParameterizedJobMixIn.ParameterizedJob.class)) { DockerHubTrigger trigger = DockerHubTrigger.getTrigger(p); if (trigger == null) { logger.log(Level.FINER, "job {0} doesn't have DockerHubTrigger set", p.getName()); continue; } logger.log(Level.FINER, "Inspecting candidate job {0}", p.getName()); Set<String> allRepoNames = trigger.getAllRepoNames(); String repoName = pushNotification.getRepoName(); if (allRepoNames.contains(repoName)) { schedule((Job) p, pushNotification); } } } }); }
From source file:name.richardson.james.bukkit.utilities.persistence.database.AbstractDatabaseLoader.java
@Override public final void drop() { logger.log(Level.FINER, "Dropping and destroying database."); this.beforeDatabaseDrop(); generator.runScript(true, this.getDeleteDLLScript()); }
From source file:org.archive.modules.recrawl.BdbContentDigestHistory.java
public void store(CrawlURI curi) { if (!curi.hasContentDigestHistory() || curi.getContentDigestHistory().isEmpty()) { logger.warning("not saving empty content digest history (do you " + " have a ContentDigestHistoryLoader in your disposition" + " chain?) - " + curi); return;/*w w w .ja v a 2 s. c o m*/ } if (logger.isLoggable(Level.FINER)) { logger.finer("storing history by digest " + persistKeyFor(curi) + " for uri " + curi + " - " + curi.getContentDigestHistory()); } store.put(persistKeyFor(curi), curi.getContentDigestHistory()); }
From source file:Peer.java
/** * Used with trying to get a remote object to a peer. * It will check a local cache for a the object and if it does exist * it will ask the super peer for the location of the peer and store it. * @return PeerInterface or null if some error occurs. *//*from ww w. jav a 2 s.c o m*/ private PeerInterface getPeer(Key node) throws Exception { lg.log(Level.FINEST, "getPeer Entry"); PeerInterface peer = peercache.get(node); try { if (peer == null) { lg.log(Level.FINER, "Peer " + node + " not found in cache asking superpeer."); String addy = superpeer.getAddress(node); lg.log(Level.FINER, "//" + addy + "/" + node.toString()); if (addy != null) { peer = (PeerInterface) Naming.lookup("//" + addy + "/" + node.toString()); peercache.put(node, peer); } } else lg.log(Level.FINER, "Peer " + node + " found in cache."); } catch (Exception e) { e.printStackTrace(); } if (peer == null) lg.log(Level.WARNING, "getPeer attempt on " + node + " unsuccessful."); lg.log(Level.FINEST, "getPeer Exit"); return peer; }