Example usage for java.util.logging Level FINER

List of usage examples for java.util.logging Level FINER

Introduction

In this page you can find the example usage for java.util.logging Level FINER.

Prototype

Level FINER

To view the source code for java.util.logging Level FINER.

Click Source Link

Document

FINER indicates a fairly detailed tracing message.

Usage

From source file:hudson.ClassicPluginStrategy.java

public <T> List<ExtensionComponent<T>> findComponents(Class<T> type, Hudson hudson) {

    List<ExtensionFinder> finders;
    if (type == ExtensionFinder.class) {
        // Avoid infinite recursion of using ExtensionFinders to find ExtensionFinders
        finders = Collections.<ExtensionFinder>singletonList(new ExtensionFinder.Sezpoz());
    } else {/* w ww  .  ja va  2  s . c o m*/
        finders = hudson.getExtensionList(ExtensionFinder.class);
    }

    /**
     * See {@link ExtensionFinder#scout(Class, Hudson)} for the dead lock issue and what this does.
     */
    if (LOGGER.isLoggable(Level.FINER))
        LOGGER.log(Level.FINER, "Scout-loading ExtensionList: " + type, new Throwable());
    for (ExtensionFinder finder : finders) {
        finder.scout(type, hudson);
    }

    List<ExtensionComponent<T>> r = Lists.newArrayList();
    for (ExtensionFinder finder : finders) {
        try {
            r.addAll(finder.find(type, hudson));
        } catch (AbstractMethodError e) {
            // backward compatibility
            for (T t : finder.findExtensions(type, hudson))
                r.add(new ExtensionComponent<T>(t));
        }
    }

    List<ExtensionComponent<T>> filtered = Lists.newArrayList();
    for (ExtensionComponent<T> e : r) {
        if (ExtensionFilter.isAllowed(type, e))
            filtered.add(e);
    }

    return filtered;
}

From source file:org.geoserver.jdbcconfig.internal.ConfigDatabase.java

private void addAttributes(final Info info, final Number infoPk) {
    final String id = info.getId();
    if (LOGGER.isLoggable(Level.FINER)) {
        LOGGER.finer("Storing properties of " + id + " with pk " + infoPk);
    }//from  w  w w .j  a v  a2  s.  c  om

    final Iterable<Property> properties = dbMappings.properties(info);

    for (Property prop : properties) {
        if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.finest("Adding property " + prop.getPropertyName() + "='" + prop.getValue() + "'");
        }

        final List<?> values = valueList(prop);

        Object propValue;
        Integer colIndex;

        for (int index = 0; index < values.size(); index++) {
            colIndex = prop.isCollectionProperty() ? (index + 1) : 0;
            propValue = values.get(index);
            final String storedValue = marshalValue(propValue);

            addAttribute(info, infoPk, prop, colIndex, storedValue);
        }
    }
}

From source file:com.esri.gpt.catalog.search.SearchEngineRest.java

private SearchXslRecord getMetadataAsObjectX(String uuid) throws SearchException {
    SearchResultRecord record = new SearchResultRecord();
    String metaUrl = this.getMetadataUrl(uuid);
    SearchXslRecord recordX = null;/*from  www  .  j  a v  a  2  s  .  c o m*/

    URL url = null;
    Exception ex = null;
    try {
        url = new URL(metaUrl);

        HttpClientRequest clientRequest = HttpClientRequest.newRequest(HttpClientRequest.MethodName.GET,
                url.toExternalForm());
        clientRequest.setConnectionTimeMs(getConnectionTimeoutMs());
        clientRequest.setResponseTimeOutMs(getResponseTimeoutMs());
        clientRequest.execute();
        String response = clientRequest.readResponseAsCharacters();
        LOG.log(Level.FINER, "Response from get Metadata url = {0}\n response = \n{1}",
                new Object[] { url.toExternalForm(), response });
        recordX = new SearchXslRecord();
        this.readXslProfile().readGetMetadataByIDResponse(response, recordX);
    } catch (MalformedURLException e) {
        ex = e;
    } catch (IOException e) {
        ex = e;
    } catch (TransformerException e) {
        ex = e;
    }
    if (ex != null) {
        throw new SearchException("Could not get metadata id url = " + url, ex);
    }
    return recordX;
}

From source file:org.b3log.latke.repository.gae.GAERepository.java

/**
 * Removes./*from   ww  w  .java2  s.co m*/
 * 
 * @param id the specified id
 * @param parentKeyKind the specified parent key kind
 * @param parentKeyName the specified parent key name
 * @throws RepositoryException repository exception
 */
private void remove(final String id, final String parentKeyKind, final String parentKeyName)
        throws RepositoryException {

    final Key parentKey = KeyFactory.createKey(parentKeyKind, parentKeyName);
    final Key key = KeyFactory.createKey(parentKey, getName(), id);

    datastoreService.delete(key);
    LOGGER.log(Level.FINER, "Removed an object[oId={0}] from repository[name={1}]",
            new Object[] { id, getName() });
}

From source file:com.ibm.jaggr.core.impl.transport.AbstractHttpTransport.java

/**
 * This method checks the request for the has conditions which may either be contained in URL
 * query arguments or in a cookie sent from the client.
 *
 * @param request//w  ww  .  ja  va2s.c  o  m
 *            the request object
 * @return The has conditions from the request.
 * @throws IOException
 * @throws UnsupportedEncodingException
 */
protected String getHasConditionsFromRequest(HttpServletRequest request) throws IOException {

    final String sourceMethod = "getHasConditionsFromRequest"; //$NON-NLS-1$
    boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(AbstractHttpTransport.class.getName(), sourceMethod, new Object[] { request });
    }
    String ret = null;
    if (request.getParameter(FEATUREMAPHASH_REQPARAM) != null) {
        // The cookie called 'has' contains the has conditions
        if (isTraceLogging) {
            log.finer("has hash = " + request.getParameter(FEATUREMAPHASH_REQPARAM)); //$NON-NLS-1$
        }
        Cookie[] cookies = request.getCookies();
        if (cookies != null) {
            for (int i = 0; ret == null && i < cookies.length; i++) {
                Cookie cookie = cookies[i];
                if (cookie.getName().equals(FEATUREMAP_REQPARAM) && cookie.getValue() != null) {
                    if (isTraceLogging) {
                        log.finer("has cookie = " + cookie.getValue()); //$NON-NLS-1$
                    }
                    ret = URLDecoder.decode(cookie.getValue(), "US-ASCII"); //$NON-NLS-1$
                    break;
                }
            }
        }
        if (ret == null) {
            if (log.isLoggable(Level.WARNING)) {
                StringBuffer url = request.getRequestURL();
                if (url != null) { // might be null if using mock request for unit testing
                    url.append("?").append(request.getQueryString()).toString(); //$NON-NLS-1$
                    log.warning(MessageFormat.format(Messages.AbstractHttpTransport_0,
                            new Object[] { url, request.getHeader("User-Agent") })); //$NON-NLS-1$
                }
            }
        }
    } else {
        ret = request.getParameter(FEATUREMAP_REQPARAM);
        if (isTraceLogging) {
            log.finer("reading features from has query arg"); //$NON-NLS-1$
        }
    }
    if (isTraceLogging) {
        log.exiting(AbstractHttpTransport.class.getName(), sourceMethod, ret);
    }
    return ret;
}

From source file:com.moesol.geoserver.sync.client.AbstractClientSynchronizer.java

private void computeSha1s() {
    LOGGER.log(Level.FINER, "attributes={0}, sync={1}",
            new Object[] { m_featureSha1Sync.getAttributesToInclude(), m_server });

    m_featureSha1s.clear();//from www  .  j  av  a 2  s .c  o  m
    for (FeatureAccessor a : m_features.values()) {
        Feature f = a.getFeature();
        m_featureSha1s.add(makeHashAndFeatureValue(f));
    }
    Collections.sort(m_featureSha1s, new IdAndValueSha1Comparator(versionFeatures));
}

From source file:org.b3log.latke.repository.jdbc.JdbcRepository.java

@Override
public JSONObject get(final Query query) throws RepositoryException {
    JSONObject ret = new JSONObject();

    final String cacheKey = CACHE_KEY_PREFIX + query.getCacheKey() + "_" + getName();

    if (cacheEnabled) {
        ret = (JSONObject) CACHE.get(cacheKey);
        if (null != ret) {
            LOGGER.log(Level.FINER, "Got query result[cacheKey={0}] from repository cache[name={1}]",
                    new Object[] { cacheKey, getName() });
            return ret;
        }/*from w  w w  .  jav a2  s. c o m*/

        ret = new JSONObject(); // Re-instantiates it if cache miss
    }

    final int currentPageNum = query.getCurrentPageNum();
    final int pageSize = query.getPageSize();
    // final Map<String, SortDirection> sorts = query.getSorts();
    // final Set<Projection> projections = query.getProjections();
    // Asssumes the application call need to ccount page
    int pageCount = -1;

    // If the application caller need not to count page, gets the page count
    // the caller specified
    if (null != query.getPageCount()) {
        pageCount = query.getPageCount();
    }

    final StringBuilder sql = new StringBuilder();
    final Connection connection = getConnection();
    final List<Object> paramList = new ArrayList<Object>();

    try {
        final int pageCnt = get(currentPageNum, pageSize, pageCount, query, sql, paramList);

        // page
        final JSONObject pagination = new JSONObject();

        pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCnt);
        ret.put(Pagination.PAGINATION, pagination);

        // result
        if (pageCnt == 0) {
            ret.put(Keys.RESULTS, new JSONArray());
            return ret;
        }

        final JSONArray jsonResults = JdbcUtil.queryJsonArray(sql.toString(), paramList, connection, getName());

        ret.put(Keys.RESULTS, jsonResults);

        if (cacheEnabled) {
            CACHE.putAsync(cacheKey, ret);
            LOGGER.log(Level.FINER, "Added query result[cacheKey={0}] in repository cache[{1}]",
                    new Object[] { cacheKey, getName() });
            try {
                cacheQueryResults(ret.optJSONArray(Keys.RESULTS), query);
            } catch (final JSONException e) {
                LOGGER.log(Level.WARNING, "Caches query results failed", e);
            }
        }

    } catch (final SQLException e) {
        throw new JDBCRepositoryException(e);
    } catch (final Exception e) {
        LOGGER.log(Level.SEVERE, "query: " + e.getMessage(), e);
        throw new RepositoryException(e);
    } finally {
        closeQueryConnection(connection);
    }

    return ret;
}

From source file:com.ibm.jaggr.core.impl.AbstractAggregatorImpl.java

protected void processAggregatorRequest(HttpServletRequest req, HttpServletResponse resp) {
    final String sourceMethod = "processAggregatorRequest"; //$NON-NLS-1$
    boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(AbstractAggregatorImpl.class.getName(), sourceMethod, new Object[] { req, resp });
    }/*from  w ww.  ja  v a2s .  c  o  m*/
    req.setAttribute(AGGREGATOR_REQATTRNAME, this);
    ConcurrentMap<String, Object> concurrentMap = new ConcurrentHashMap<String, Object>();
    req.setAttribute(CONCURRENTMAP_REQATTRNAME, concurrentMap);

    try {
        // Validate config last-modified if development mode is enabled
        if (getOptions().isDevelopmentMode()) {
            long lastModified = -1;
            URI configUri = getConfig().getConfigUri();
            if (configUri != null) {
                try {
                    // try to get platform URI from IResource in case uri specifies
                    // aggregator specific scheme like namedbundleresource
                    configUri = newResource(configUri).getURI();
                } catch (UnsupportedOperationException e) {
                    // Not fatal.  Just use uri as specified.
                }
                lastModified = configUri.toURL().openConnection().getLastModified();
            }
            if (lastModified > getConfig().lastModified()) {
                if (reloadConfig()) {
                    // If the config has been modified, then dependencies will be revalidated
                    // asynchronously.  Rather than forcing the current request to wait, return
                    // a response that will display an alert informing the user of what is
                    // happening and asking them to reload the page.
                    String content = "alert('" + //$NON-NLS-1$
                            StringUtil.escapeForJavaScript(Messages.ConfigModified) + "');"; //$NON-NLS-1$
                    resp.addHeader("Cache-control", "no-store"); //$NON-NLS-1$ //$NON-NLS-2$
                    CopyUtil.copy(new StringReader(content), resp.getOutputStream());
                    return;
                }
            }
        }

        getTransport().decorateRequest(req);
        notifyRequestListeners(RequestNotifierAction.start, req, resp);

        ILayer layer = getLayer(req);
        long modifiedSince = req.getDateHeader("If-Modified-Since"); //$NON-NLS-1$
        long lastModified = (Math.max(getCacheManager().getCache().getCreated(), layer.getLastModified(req))
                / 1000) * 1000;
        if (modifiedSince >= lastModified) {
            if (log.isLoggable(Level.FINER)) {
                log.finer("Returning Not Modified response for layer in servlet" + //$NON-NLS-1$
                        getName() + ":" //$NON-NLS-1$
                        + req.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
            }
            resp.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
        } else {
            // Get the InputStream for the response.  This call sets the Content-Type,
            // Content-Length and Content-Encoding headers in the response.
            InputStream in = layer.getInputStream(req, resp);
            // if any of the readers included an error response, then don't cache the layer.
            if (req.getAttribute(ILayer.NOCACHE_RESPONSE_REQATTRNAME) != null) {
                resp.addHeader("Cache-Control", "no-store"); //$NON-NLS-1$ //$NON-NLS-2$
            } else {
                resp.setDateHeader("Last-Modified", lastModified); //$NON-NLS-1$
                int expires = getConfig().getExpires();
                resp.addHeader("Cache-Control", //$NON-NLS-1$
                        "public" + (expires > 0 ? (", max-age=" + expires) : "") //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
                );
            }
            CopyUtil.copy(in, resp.getOutputStream());
        }
        notifyRequestListeners(RequestNotifierAction.end, req, resp);
    } catch (DependencyVerificationException e) {
        // clear the cache now even though it will be cleared when validateDeps has
        // finished (asynchronously) so that any new requests will be forced to wait
        // until dependencies have been validated.
        getCacheManager().clearCache();
        getDependencies().validateDeps(false);

        resp.addHeader("Cache-control", "no-store"); //$NON-NLS-1$ //$NON-NLS-2$
        if (getOptions().isDevelopmentMode()) {
            String msg = StringUtil.escapeForJavaScript(MessageFormat.format(Messages.DepVerificationFailed,
                    new Object[] { e.getMessage(), "aggregator " + //$NON-NLS-1$
                            "validatedeps " + //$NON-NLS-1$
                            getName() + " clean", //$NON-NLS-1$
                            getWorkingDirectory().toString().replace("\\", "\\\\") //$NON-NLS-1$ //$NON-NLS-2$
                    }));
            String content = "alert('" + msg + "');"; //$NON-NLS-1$ //$NON-NLS-2$
            try {
                CopyUtil.copy(new StringReader(content), resp.getOutputStream());
            } catch (IOException e1) {
                if (log.isLoggable(Level.SEVERE)) {
                    log.log(Level.SEVERE, e1.getMessage(), e1);
                }
                resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
            }
        } else {
            resp.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
        }
    } catch (ProcessingDependenciesException e) {
        resp.addHeader("Cache-control", "no-store"); //$NON-NLS-1$ //$NON-NLS-2$
        if (getOptions().isDevelopmentMode()) {
            String content = "alert('" + StringUtil.escapeForJavaScript(Messages.Busy) + "');"; //$NON-NLS-1$ //$NON-NLS-2$
            try {
                CopyUtil.copy(new StringReader(content), resp.getOutputStream());
            } catch (IOException e1) {
                if (log.isLoggable(Level.SEVERE)) {
                    log.log(Level.SEVERE, e1.getMessage(), e1);
                }
                resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
            }
        } else {
            resp.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
        }
    } catch (BadRequestException e) {
        exceptionResponse(req, resp, e, HttpServletResponse.SC_BAD_REQUEST);
    } catch (NotFoundException e) {
        exceptionResponse(req, resp, e, HttpServletResponse.SC_NOT_FOUND);
    } catch (Exception e) {
        exceptionResponse(req, resp, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    } finally {
        concurrentMap.clear();
    }
    if (isTraceLogging) {
        log.exiting(AbstractAggregatorImpl.class.getName(), sourceMethod);
    }
}

From source file:com.onarandombox.MultiverseCore.MVWorld.java

/**
 * Initializes permissions.//  ww  w.  j  a  va2s  . c  om
 */
private void initPerms() {
    this.permission = new Permission("multiverse.access." + this.getName(),
            "Allows access to " + this.getName(), PermissionDefault.OP);
    // This guy is special. He shouldn't be added to any parent perms.
    this.ignoreperm = new Permission("mv.bypass.gamemode." + this.getName(),
            "Allows players with this permission to ignore gamemode changes.", PermissionDefault.FALSE);

    this.exempt = new Permission("multiverse.exempt." + this.getName(),
            "A player who has this does not pay to enter this world, or use any MV portals in it "
                    + this.getName(),
            PermissionDefault.OP);

    this.limitbypassperm = new Permission("mv.bypass.playerlimit." + this.getName(),
            "A player who can enter this world regardless of wether its full", PermissionDefault.OP);
    try {
        this.plugin.getServer().getPluginManager().addPermission(this.permission);
        this.plugin.getServer().getPluginManager().addPermission(this.exempt);
        this.plugin.getServer().getPluginManager().addPermission(this.ignoreperm);
        this.plugin.getServer().getPluginManager().addPermission(this.limitbypassperm);
        // Add the permission and exempt to parents.
        this.addToUpperLists(this.permission);

        // Add ignore to it's parent:
        this.ignoreperm.addParent("mv.bypass.gamemode.*", true);
        // Add limit bypass to it's parent
        this.limitbypassperm.addParent("mv.bypass.playerlimit.*", true);
    } catch (IllegalArgumentException e) {
        this.plugin.log(Level.FINER, "Permissions nodes were already added for " + this.name);
    }
}

From source file:fungus.HyphaLink.java

public MycoNode growHypha() {
    MycoNode newHypha = getMaxBiomass();
    if (newHypha != null) {
        log.log(Level.FINER, myNode + " GROWS HYPHA INTO " + newHypha, new Object[] { myNode, newHypha });
        HyphaData newHyphaData = (HyphaData) newHypha.getProtocol(hyphaDataPid);
        newHyphaData.becomeExtending(newHypha);
    }/*  w  w w .java2 s .c o  m*/
    return newHypha;
}