Example usage for java.util.logging Level FINER

List of usage examples for java.util.logging Level FINER

Introduction

In this page you can find the example usage for java.util.logging Level FINER.

Prototype

Level FINER

To view the source code for java.util.logging Level FINER.

Click Source Link

Document

FINER indicates a fairly detailed tracing message.

Usage

From source file:com.granule.json.utils.internal.JSONSAXHandler.java

public void endDocument() throws SAXException {
    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "endDocument()");

    endJSON();/*from   w w w. j a va  2s.c o  m*/

    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "endDocument()");
}

From source file:jenkins.security.ClassFilterImpl.java

@SuppressWarnings("rawtypes")
@Override/*from  w  w w .  j  a v a  2  s. com*/
public boolean isBlacklisted(Class _c) {
    for (CustomClassFilter f : ExtensionList.lookup(CustomClassFilter.class)) {
        Boolean r = f.permits(_c);
        if (r != null) {
            if (r) {
                LOGGER.log(Level.FINER, "{0} specifies a policy for {1}: {2}",
                        new Object[] { f, _c.getName(), true });
            } else {
                notifyRejected(_c, _c.getName(),
                        String.format("%s specifies a policy for %s: %s ", f, _c.getName(), r));
            }
            return !r;
        }
    }
    return cache.computeIfAbsent(_c, c -> {
        String name = c.getName();
        if (Main.isUnitTest && (name.contains("$$EnhancerByMockitoWithCGLIB$$")
                || name.contains("$$FastClassByMockitoWithCGLIB$$") || name.startsWith("org.mockito."))) {
            mockOff();
            return false;
        }
        if (ClassFilter.STANDARD.isBlacklisted(c)) { // currently never true, but may issue diagnostics
            notifyRejected(_c, _c.getName(), String.format("%s is not permitted ", _c.getName()));
            return true;
        }
        if (c.isArray()) {
            LOGGER.log(Level.FINE, "permitting {0} since it is an array", name);
            return false;
        }
        if (Throwable.class.isAssignableFrom(c)) {
            LOGGER.log(Level.FINE, "permitting {0} since it is a throwable", name);
            return false;
        }
        if (Enum.class.isAssignableFrom(c)) { // Class.isEnum seems to be false for, e.g., java.util.concurrent.TimeUnit$6
            LOGGER.log(Level.FINE, "permitting {0} since it is an enum", name);
            return false;
        }
        String location = codeSource(c);
        if (location != null) {
            if (isLocationWhitelisted(location)) {
                LOGGER.log(Level.FINE, "permitting {0} due to its location in {1}",
                        new Object[] { name, location });
                return false;
            }
        } else {
            ClassLoader loader = c.getClassLoader();
            if (loader != null && loader.getClass().getName().equals("hudson.remoting.RemoteClassLoader")) {
                LOGGER.log(Level.FINE, "permitting {0} since it was loaded by a remote class loader", name);
                return false;
            }
        }
        if (WHITELISTED_CLASSES.contains(name)) {
            LOGGER.log(Level.FINE, "tolerating {0} by whitelist", name);
            return false;
        }
        if (SUPPRESS_WHITELIST || SUPPRESS_ALL) {
            notifyRejected(_c, null, String.format(
                    "%s in %s might be dangerous, so would normally be rejected; see https://jenkins.io/redirect/class-filter/",
                    name, location != null ? location : "JRE"));

            return false;
        }
        notifyRejected(_c, null, String.format(
                "%s in %s might be dangerous, so rejecting; see https://jenkins.io/redirect/class-filter/",
                name, location != null ? location : "JRE"));
        return true;
    });
}

From source file:com.caucho.hessian.client.HessianProxy.java

/**
 * Handles the object invocation./*  w ww .  j  a v  a 2 s. co  m*/
 * 
 * @param proxy
 *            the proxy object to invoke
 * @param method
 *            the method to call
 * @param args
 *            the arguments to the proxy object
 */
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
    String mangleName;

    synchronized (_mangleMap) {
        mangleName = _mangleMap.get(method);
    }

    if (mangleName == null) {
        String methodName = method.getName();
        Class<?>[] params = method.getParameterTypes();
        // equals and hashCode are special cased
        if (methodName.equals("equals") && params.length == 1 && params[0].equals(Object.class)) {
            Object value = args[0];
            if (value == null || !Proxy.isProxyClass(value.getClass()))
                return Boolean.FALSE;

            Object proxyHandler = Proxy.getInvocationHandler(value);

            if (!(proxyHandler instanceof HessianProxy))
                return Boolean.FALSE;

            HessianProxy handler = (HessianProxy) proxyHandler;

            return new Boolean(_url.equals(handler.getURL()));
        } else if (methodName.equals("hashCode") && params.length == 0)
            return new Integer(_url.hashCode());
        else if (methodName.equals("getHessianType"))
            return proxy.getClass().getInterfaces()[0].getName();
        else if (methodName.equals("getHessianURL"))
            return _url.toString();
        else if (methodName.equals("toString") && params.length == 0)
            return "HessianProxy[" + _url + "]";

        if (!_factory.isOverloadEnabled())
            mangleName = method.getName();
        else
            mangleName = mangleName(method);

        synchronized (_mangleMap) {
            _mangleMap.put(method, mangleName);
        }
    }
    InputStream is = null;
    HessianConnection conn = null;

    try {
        if (log.isLoggable(Level.FINER))
            log.finer("Hessian[" + _url + "] calling " + mangleName);
        conn = sendRequest(mangleName, args);

        if (conn.getStatusCode() != 200) {
            throw new HessianProtocolException("http code is " + conn.getStatusCode());
        }

        is = conn.getInputStream();

        if (log.isLoggable(Level.FINEST)) {
            PrintWriter dbg = new PrintWriter(new LogWriter(log));
            HessianDebugInputStream dIs = new HessianDebugInputStream(is, dbg);

            dIs.startTop2();

            is = dIs;
        }

        AbstractHessianInput in;

        int code = is.read();

        if (code == 'H') {
            int major = is.read();
            int minor = is.read();

            in = _factory.getHessian2Input(is);

            Object value = in.readReply(method.getReturnType());

            return value;
        } else if (code == 'r') {
            int major = is.read();
            int minor = is.read();

            in = _factory.getHessianInput(is);

            in.startReplyBody();

            Object value = in.readObject(method.getReturnType());

            if (value instanceof InputStream) {
                value = new ResultInputStream(conn, is, in, (InputStream) value);
                is = null;
                conn = null;
            } else
                in.completeReply();

            return value;
        } else
            throw new HessianProtocolException("'" + (char) code + "' is an unknown code");
    } catch (HessianProtocolException e) {
        throw new HessianRuntimeException(e);
    } finally {
        try {
            if (is != null)
                is.close();
        } catch (Exception e) {
            log.log(Level.FINE, e.toString(), e);
        }

        try {
            if (conn != null)
                conn.destroy();
        } catch (Exception e) {
            log.log(Level.FINE, e.toString(), e);
        }
    }
}

From source file:com.ibm.datapower.amt.clientAPI.Blob.java

/**
 * Create a new blob object from a URL.//  w  w w  .ja v a  2  s  .  c o m
 * 
 * @param url
 *            a URL representing the location of the blob source
 *            
 *            The URL location is not read during the constructor, it is 
 *            instead read by the consumer of {@link #getInputStream()},  
 *            or read into memory if you call {@link #getByteArray()}. So
 *            it is expected that this URL parameter should exist and be
 *            available for reading during the lifetime of this Blob object.
 *            If that is not possible, then you need to use the constructor
 *            {@link #Blob(byte[])}. Supported URL schemes are file:, http:,
 *            and https:
 */
public Blob(URL url) {
    final String METHOD_NAME = "Blob(URL)"; //$NON-NLS-1$
    this.url = url;

    String fileName = url.getFile();
    int index = fileName.lastIndexOf('.');
    filenameExtension = fileName.substring(index + 1, fileName.length());

    logger.logp(Level.FINER, CLASS_NAME, METHOD_NAME, "Creating Blob from input stream: " + url.toString()); //$NON-NLS-1$
}

From source file:org.cloudifysource.esc.driver.provisioning.openstack.OpenStackNovaClient.java

/**
 * Retrieve server's details./*from ww  w .  ja v a2 s .c o m*/
 * 
 * @param serverId
 *            The id of the server.
 * @return An instance of the server with all its details.
 * @throws OpenstackException
 *             Thrown when something went wrong with the request.
 */
public NovaServer getServerDetails(final String serverId) throws OpenstackException {

    if (logger.isLoggable(Level.FINER)) {
        logger.log(Level.FINER, "Request=getServerDetails: " + serverId);
    }

    final String response;
    try {
        response = doGet("servers/" + serverId);
    } catch (final OpenstackServerException e) {
        if (RESOURCE_NOT_FOUND_STATUS == e.getStatusCode()) {
            return null;
        }
        throw e;
    }

    final NovaServer nsr = JsonUtils.unwrapRootToObject(NovaServer.class, response);
    return nsr;
}

From source file:org.geoserver.bkprst.BrTask.java

/**
 * Reads an XML file containing data about last backup
 *  /*ww w .  j ava 2 s.  c o  m*/
 * @path Directory get dasta from
 * 
 * @return a BackupTask object containing info about previous backup 
 */
protected BackupTask readBackupInfo(String path) {

    File xmlFile = new File(path + File.separatorChar + BrTask.INFOFILE);
    BackupTask backupInfo = new BackupTask(null, "", null, null);
    try {
        String xml = FileUtils.readFileToString(xmlFile);
        this.br.fromXML(xml, backupInfo);
    } catch (IOException e) {
        LOGGER.log(Level.FINER, e.getMessage(), e);
        return null;
    }
    return backupInfo;
}

From source file:edu.cwru.sepia.environment.Environment.java

/**
 * Step through an episode//from w ww. ja  v  a 2s. c  o  m
 * @return Return whether it has terminated.
 * @throws InterruptedException
 */
public boolean step() throws InterruptedException {
    //grab states and histories
    StateView[] states = new StateView[connectedagents.length];
    History.HistoryView[] histories = new History.HistoryView[connectedagents.length];
    CountDownLatch[] actionLatches = new CountDownLatch[connectedagents.length];
    boolean[] isAgentsTurn = new boolean[connectedagents.length];
    long[] endTimes = new long[connectedagents.length];
    for (int ag = 0; ag < connectedagents.length; ag++) {
        isAgentsTurn[ag] = turnTracker.isAgentsTurn(connectedagents[ag]);
        if (isAgentsTurn[ag]) {
            int playerNumber = connectedagents[ag].getPlayerNumber();
            states[ag] = model.getState().getView(playerNumber);
            histories[ag] = model.getHistory().getView(playerNumber);
            endTimes[ag] = System.currentTimeMillis() + DELAY_MS;
        }
    }
    //And run them
    for (int ag = 0; ag < connectedagents.length; ag++) {
        if (isAgentsTurn[ag]) {
            if (logger.isLoggable(Level.FINER)) {
                logger.finer("Step " + step + ": Agent with player number: "
                        + connectedagents[ag].getPlayerNumber() + "'s turn.  "
                        + (turnTracker.hasHadTurnBefore(connectedagents[ag].getPlayerNumber()) ? "Has had turn"
                                : "First turn"));
            }
            actionLatches[ag] = agentIntermediaries[ag].submitState(states[ag], histories[ag],
                    turnTracker.hasHadTurnBefore(connectedagents[ag].getPlayerNumber())
                            ? ThreadIntermediary.StateType.MIDDLE
                            : ThreadIntermediary.StateType.INITIAL);
        }
    }
    for (int ag = 0; ag < connectedagents.length; ag++) {
        if (isAgentsTurn[ag]) {
            //Wait for the actions to be ready
            if (DELAY_MS >= 0) {
                //if there is a positive delay, only give it that long to process
                actionLatches[ag].await(endTimes[ag] - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
            } else {
                //if the delay is negative (IE: nonsense), wait as long as you need
                actionLatches[ag].await();
            }

            //Get the responses
            Collection<Action> actionMapTemp = agentIntermediaries[ag].retrieveActions();
            if (actionMapTemp != null) //If there were responses
            {
                Collection<Action> copy = new ArrayList<Action>(actionMapTemp);
                model.addActions(copy, connectedagents[ag].getPlayerNumber());
            }
        }
    }
    logger.fine("Executing one step of the model");
    model.executeStep();
    step++;
    logger.fine("Notifying TurnTracker of new step");
    turnTracker.newStep();
    return model.isTerminated();
}

From source file:com.ibm.jaggr.core.impl.deps.DepTree.java

/**
 * Object constructor. Attempts to de-serialize the cached dependency lists
 * from disk and then validates the dependency lists based on last-modified
 * dates, looking for any new or removed files. If the cached dependency
 * list data cannot be de-serialized, new lists are constructed. Once the
 * dependency lists have been validated, the list data is serialized back
 * out to disk./*from   w w  w  .j a  v a2s  . c  o m*/
 *
 * @param paths
 *            Collection of URIs which specify the target resources
 *            to be scanned for javascript files.
 * @param aggregator
 *            The servlet instance for this object
 * @param stamp
 *            timestamp associated with external override/customization
 *            resources that are check on every server restart
 * @param clean
 *            If true, then the dependency lists are generated from scratch
 *            rather than by de-serializing and then validating the cached
 *            dependency lists.
 * @param validateDeps
 *            If true, then validate existing cached dependencies using
 *            file last-modified times.
 * @throws IOException
 */
public DepTree(Collection<URI> paths, IAggregator aggregator, long stamp, boolean clean, boolean validateDeps)
        throws IOException {
    final String sourceMethod = "<ctor>"; //$NON-NLS-1$
    boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(DepTree.class.getName(), sourceMethod,
                new Object[] { paths, aggregator, stamp, clean, validateDeps });
    }
    this.stamp = stamp;
    IConfig config = aggregator.getConfig();
    rawConfig = config.toString();
    cacheBust = AggregatorUtil.getCacheBust(aggregator);

    File cacheDir = new File(aggregator.getWorkingDirectory(), DEPCACHE_DIRNAME);
    File cacheFile = new File(cacheDir, CACHE_FILE);

    /*
     * The de-serialized dependency map. If we have a cached dependency map,
     * then it will be validated against the last-modified dates of the
     * current files and only the files that have changed will need to be
     * re-parsed to update the dependency lists.
     */
    DepTree cached = null;

    if (!clean) {
        // If we're not starting clean, try to de-serialize the map from
        // cache
        try {
            ObjectInputStream is = new ObjectInputStream(new FileInputStream(cacheFile));
            try {
                if (isTraceLogging) {
                    log.finer("Attempting to read cached dependencies from " + cacheFile.toString()); //$NON-NLS-1$
                }
                cached = (DepTree) is.readObject();
            } finally {
                try {
                    is.close();
                } catch (Exception ignore) {
                }
            }
        } catch (FileNotFoundException e) {
            /*
             * Not an error. Just means that the cache file hasn't been
             * written yet or else it's been deleted.
             */
            if (log.isLoggable(Level.INFO))
                log.log(Level.INFO, Messages.DepTree_1);
        } catch (Exception e) {
            if (log.isLoggable(Level.SEVERE))
                log.log(Level.SEVERE, e.getMessage(), e);
        }
    }

    // If the cacheBust config param has changed, then do a clean build
    // of the dependencies.
    if (cached != null) {
        if (stamp == 0) {
            // no init stamp provided.  Preserve the cached one.
            stamp = cached.stamp;
        }
        if (stamp > cached.stamp) {
            // init stamp has been updated.  Validate dependencies.
            validateDeps = true;
        }
        if (!StringUtils.equals(cacheBust, cached.cacheBust)) {
            if (isTraceLogging) {
                log.finer("Current cacheBust = " + cacheBust + ", cached cacheBust = " + cached.cacheBust); //$NON-NLS-1$//$NON-NLS-2$
            }
            if (log.isLoggable(Level.INFO)) {
                log.info(Messages.DepTree_2);
            }
            cached = null;
        }
        if (cached != null && !StringUtils.equals(rawConfig, cached.rawConfig)) {
            if (isTraceLogging) {
                log.finer("Current config = " + rawConfig); //$NON-NLS-1$
                log.finer("Cached config = " + cached.rawConfig); //$NON-NLS-1$
            }
            validateDeps = true;
        }
    }

    /*
     * If we de-serialized a previously saved dependency map, then go with
     * that.
     */
    if (cached != null && !validateDeps && !clean) {
        depMap = cached.depMap;
        fromCache = true;
        return;
    } else if (isTraceLogging) {
        log.finer("Building/validating deps: cached = " + cached + ", validateDeps = " + validateDeps //$NON-NLS-1$//$NON-NLS-2$
                + ", clean = " + clean); //$NON-NLS-1$
    }

    // Initialize the dependency map
    depMap = new ConcurrentHashMap<URI, DepTreeNode>();

    // This can take a while, so print something to the console
    String msg = MessageFormat.format(Messages.DepTree_3, new Object[] { aggregator.getName() });

    ConsoleService cs = new ConsoleService();
    cs.println(msg);

    if (log.isLoggable(Level.INFO)) {
        log.info(msg);
    }
    // Make sure that all the paths are unique and orthogonal
    paths = DepUtils.removeRedundantPaths(paths);

    /*
     * Create the thread pools, one for the tree builders and one for the
     * parsers. Since a tree builder thread will wait for all the outstanding
     * parser threads started by that builder to complete, we need to use two
     * independent thread pools to guard against the possibility of deadlock
     * caused by all the threads in the pool being consumed by tree builders
     * and leaving none available to service the parsers.
     */
    final ThreadGroup treeBuilderTG = new ThreadGroup(TREEBUILDER_TGNAME),
            parserTG = new ThreadGroup(JSPARSER_TGNAME);
    ExecutorService treeBuilderExc = Executors.newFixedThreadPool(10, new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(treeBuilderTG, r, MessageFormat.format(THREADNAME,
                    new Object[] { treeBuilderTG.getName(), treeBuilderTG.activeCount() }));
        }
    }), parserExc = Executors.newFixedThreadPool(20, new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(parserTG, r, MessageFormat.format(THREADNAME,
                    new Object[] { parserTG.getName(), parserTG.activeCount() }));
        }
    });

    // Counter to keep track of number of tree builder threads started
    AtomicInteger treeBuilderCount = new AtomicInteger(0);

    // The completion services for the thread pools
    final CompletionService<URI> parserCs = new ExecutorCompletionService<URI>(parserExc);
    CompletionService<DepTreeBuilder.Result> treeBuilderCs = new ExecutorCompletionService<DepTreeBuilder.Result>(
            treeBuilderExc);

    Set<String> nonJSExtensions = Collections.unmodifiableSet(getNonJSExtensions(aggregator));
    // Start the tree builder threads to process the paths
    for (final URI path : paths) {
        /*
         * Create or get from cache the root node for this path and
         * add it to the new map.
         */
        DepTreeNode root = new DepTreeNode("", path); //$NON-NLS-1$
        DepTreeNode cachedNode = null;
        if (cached != null) {
            cachedNode = cached.depMap.get(path);
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_4, new Object[] { path }));
            }
        } else {
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_5, new Object[] { path }));
            }
        }
        depMap.put(path, root);

        treeBuilderCount.incrementAndGet();
        treeBuilderCs.submit(new DepTreeBuilder(aggregator, parserCs, path, root, cachedNode, nonJSExtensions));
    }

    // List of parser exceptions
    LinkedList<Exception> parserExceptions = new LinkedList<Exception>();

    /*
     * Pull the completed tree builder tasks from the completion queue until
     * all the paths have been processed
     */
    while (treeBuilderCount.decrementAndGet() >= 0) {
        try {
            DepTreeBuilder.Result result = treeBuilderCs.take().get();
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_6,
                        new Object[] { result.parseCount, result.dirName }));
            }
        } catch (Exception e) {
            if (log.isLoggable(Level.SEVERE))
                log.log(Level.SEVERE, e.getMessage(), e);
            parserExceptions.add(e);
        }
    }

    // shutdown the thread pools now that we're done with them
    parserExc.shutdown();
    treeBuilderExc.shutdown();

    // If parser exceptions occurred, then rethrow the first one
    if (parserExceptions.size() > 0) {
        throw new RuntimeException(parserExceptions.get(0));
    }

    // Prune dead nodes (folder nodes with no children)
    for (Map.Entry<URI, DepTreeNode> entry : depMap.entrySet()) {
        entry.getValue().prune();
    }

    /*
     * Make sure the cache directory exists before we try to serialize the
     * dependency map.
     */
    if (!cacheDir.exists())
        if (!cacheDir.mkdirs()) {
            throw new IOException(
                    MessageFormat.format(Messages.DepTree_0, new Object[] { cacheDir.getAbsolutePath() }));
        }

    // Serialize the map to the cache directory
    ObjectOutputStream os;
    os = new ObjectOutputStream(new FileOutputStream(cacheFile));
    try {
        if (isTraceLogging) {
            log.finer("Writing cached dependencies to " + cacheFile.toString()); //$NON-NLS-1$
        }
        os.writeObject(this);
    } finally {
        try {
            os.close();
        } catch (Exception ignore) {
        }
    }
    msg = MessageFormat.format(Messages.DepTree_7, new Object[] { aggregator.getName() });

    // Output that we're done.
    cs.println(msg);
    if (log.isLoggable(Level.INFO)) {
        log.info(msg);
    }
    if (isTraceLogging) {
        log.exiting(DepTree.class.getName(), sourceMethod);
    }
}

From source file:mendeley2kindle.KindleDAO.java

public void removeFile(String collection, KFile file) {
    log.log(Level.FINER, "Removing a document:" + file.getName() + " from the collection: " + collection);
    String path = toKindlePath(file);
    String khash = toKindleHash(path);
    String key = collection + KINDLE_LOCALE;
    try {/*from  w ww. ja v  a  2s .  c  om*/
        JSONArray items = collections.getJSONObject(key).getJSONArray("items");
        for (int i = 0; i < items.length(); i++) {
            if (khash.equals(items.get(i))) {
                items.remove(i);
            }
        }
        log.log(Level.FINE, "Removed a document:" + file.getName() + " to the collection: " + collection);
    } catch (JSONException e) {
        e.printStackTrace();
    }
}

From source file:com.granule.json.utils.internal.JSONSAXHandler.java

/**
 * Method to flush out anything remaining in the buffers.
 *///from  ww w .  j  a  v  a  2 s  . co  m
public void flushBuffer() throws IOException {
    if (logger.isLoggable(Level.FINER))
        logger.entering(className, "flushBuffer()");

    if (this.osWriter != null) {
        this.osWriter.flush();
    }

    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "flushBuffer()");
}