Example usage for java.util.logging Level FINEST

List of usage examples for java.util.logging Level FINEST

Introduction

In this page you can find the example usage for java.util.logging Level FINEST.

Prototype

Level FINEST

To view the source code for java.util.logging Level FINEST.

Click Source Link

Document

FINEST indicates a highly detailed tracing message.

Usage

From source file:io.instacount.appengine.counter.service.ShardedCounterServiceImpl.java

/**
 * The cache will expire after {@code defaultCounterCountExpiration} seconds, so the counter will be accurate after
 * a minute because it performs a load from the datastore.
 *
 * @param counterName/*from ww  w.ja v a2 s.c  o m*/
 * @param skipCache A boolean that allows a caller to skip memcache when retrieving a counter. Set to {@code true}
 *            to load the counter and all of its shards directly from the Datastore. Set to {@code false} to attempt
 *            to load the count from memcache, with fallback to the datastore.
 * @return
 */
@Override
public Optional<Counter> getCounter(final String counterName, final boolean skipCache) {
    Preconditions.checkNotNull(counterName);

    // This method always load the CounterData from the Datastore (or its Objectify cache), but sometimes returns
    // the
    // cached count value.

    // //////////////
    // ShortCircuit: If nothing is present in the datastore.
    // //////////////
    final Optional<CounterData> optCounterData = this.getCounterData(counterName);
    if (!optCounterData.isPresent()) {
        logger.log(Level.FINEST, String.format("Counter '%s' was not found in hte Datastore!", counterName));
        return Optional.absent();
    }

    final CounterData counterData = optCounterData.get();

    // //////////////
    // ShortCircuit: If the counter is in an indeterminate state, then return its count as 0.
    // //////////////
    if (this.counterStatusYieldsIndeterminateCount(counterData.getCounterStatus())) {
        logger.log(Level.FINEST,
                String.format("Counter '%s' was in an indeterminate state.  Returning 0!", counterName));
        return Optional.of(new CounterBuilder(counterData).withCount(BigInteger.ZERO).build());
    }

    // //////////////
    // ShortCircuit: If the counter was found in memcache.
    // //////////////
    final String memCacheKey = this.assembleCounterKeyforMemcache(counterName);
    if (!skipCache) {
        final BigInteger cachedCounterCount = this.memcacheSafeGet(memCacheKey);
        if (cachedCounterCount != null) {
            // /////////////////////////////////////
            // The count was found in memcache, so return it.
            // /////////////////////////////////////
            logger.log(Level.FINEST, String.format("Cache Hit for Counter Named '%s': value=%s", counterName,
                    cachedCounterCount));
            return Optional.of(new CounterBuilder(counterData).withCount(cachedCounterCount).build());
        } else {
            logger.log(Level.FINE,
                    String.format(
                            "Cache Miss for CounterData Named '%s': value='%s'.  Checking Datastore instead!",
                            counterName, cachedCounterCount));
        }
    }

    // /////////////////////////////////////
    // skipCache was true or the count was NOT found in memcache!
    // /////////////////////////////////////

    // Note: No Need to clear the Objectify session cache here because it will be cleared automatically and
    // repopulated upon every request.

    logger.log(Level.FINE,
            String.format("Aggregating counts from '%s' CounterDataShards for CounterData named '%s'!",
                    counterData.getNumShards(), counterData.getName()));

    // ///////////////////
    // Assemble a List of CounterShardData Keys to retrieve in parallel!
    final List<Key<CounterShardData>> keysToLoad = Lists.newArrayList();
    for (int i = 0; i < counterData.getNumShards(); i++) {
        final Key<CounterShardData> counterShardKey = CounterShardData.key(counterData.getTypedKey(), i);
        keysToLoad.add(counterShardKey);
    }

    long sum = 0;

    // For added performance, we could spawn multiple threads to wait for each value to be returned from the
    // DataStore, and then aggregate that way. However, the simple summation below is not very expensive, so
    // creating multiple threads to get each value would probably be overkill. Just let objectify do this for
    // us. Even though we have to wait for all entities to return before summation begins, the summation is a quick
    // in-memory operation with a relatively small number of shards, so parallelizing it would likely not increase
    // performance.

    // No TX - get is Strongly consistent by default, and we will exceed the TX limit for high-shard-count
    // counters if we try to do this in a TX.
    final Map<Key<CounterShardData>, CounterShardData> counterShardDatasMap = ObjectifyService.ofy()
            .transactionless().load().keys(keysToLoad);
    final Collection<CounterShardData> counterShardDatas = counterShardDatasMap.values();
    for (CounterShardData counterShardData : counterShardDatas) {
        if (counterShardData != null) {
            sum += counterShardData.getCount();
        }
    }

    logger.log(Level.FINE,
            String.format(
                    "The Datastore is reporting a count of %s for CounterData '%s' count.  Resetting memcache "
                            + "count to %s for this counter name.",
                    sum, counterData.getName(), sum));

    final BigInteger bdSum = BigInteger.valueOf(sum);
    try {
        // This method will only get here if there was nothing in Memcache, or if the caller requested to skip
        // reading the Counter count from memcache. In these cases, the value in memcache should always be replaced.
        memcacheService.put(memCacheKey, bdSum, config.getDefaultCounterCountExpiration(),
                SetPolicy.SET_ALWAYS);
    } catch (MemcacheServiceException mse) {
        // Do nothing. The method will still return even though memcache is not available.
    }

    return Optional.of(new CounterBuilder(counterData).withCount(bdSum).build());
}

From source file:com.googlecode.batchfb.impl.Batch.java

/**
 * Constructs the batch query and executes it, possibly asynchronously.
 * @return an asynchronous handle to the raw batch result, whatever it may be.
 *//*  w  ww.  j ava 2 s  . c o  m*/
private Later<JsonNode> createFetcher() {
    final RequestBuilder call = new GraphRequestBuilder(getGraphEndpoint(), HttpMethod.POST, this.timeout,
            this.retries);

    // This actually creates the correct JSON structure as an array
    String batchValue = JSONUtils.toJSON(this.graphRequests, this.mapper);
    if (log.isLoggable(Level.FINEST))
        log.finest("Batch request is: " + batchValue);

    this.addParams(call, new Param[] { new Param("batch", batchValue) });

    final HttpResponse response;
    try {
        response = call.execute();
    } catch (IOException ex) {
        throw new IOFacebookException(ex);
    }

    return new Later<JsonNode>() {
        @Override
        public JsonNode get() throws FacebookException {
            try {
                if (response.getResponseCode() == HttpURLConnection.HTTP_OK
                        || response.getResponseCode() == HttpURLConnection.HTTP_BAD_REQUEST
                        || response.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {

                    // If it was an error, we will recognize it in the content later.
                    // It's possible we should capture all 4XX codes here.
                    JsonNode result = mapper.readTree(response.getContentStream());

                    if (log.isLoggable(Level.FINEST))
                        log.finest("Response is: " + result);

                    return result;
                } else {
                    throw new IOFacebookException("Unrecognized error " + response.getResponseCode() + " from "
                            + call + " :: " + StringUtils.read(response.getContentStream()));
                }
            } catch (IOException e) {
                throw new IOFacebookException("Error calling " + call, e);
            }
        }
    };
}

From source file:net.sourceforge.pmd.util.database.DBType.java

/**
 * @param properties//from w w  w  .jav  a2  s .c  o m
 *            the properties to set
 */
public void setProperties(Properties properties) {
    this.properties = properties;

    // Driver Class
    if (null != this.properties.getProperty("driver")) {
        this.driverClass = this.properties.getProperty("driver");
    }

    // Database CharacterSet
    if (null != this.properties.getProperty("characterset")) {
        this.characterSet = this.properties.getProperty("characterset");
    }

    // String to get objects
    if (null != this.properties.getProperty("sourcecodetypes")) {
        this.sourceCodeTypes = this.properties.getProperty("sourcecodetypes");
    }

    // Languages to process
    if (null != this.properties.getProperty("languages")) {
        this.languages = this.properties.getProperty("languages");
    }

    // Return class for source code
    if (null != this.properties.getProperty("returnType")) {
        if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.finest("returnType" + this.properties.getProperty("returnType"));
        }
        this.sourceCodeReturnType = Integer.parseInt(this.properties.getProperty("returnType"));
    }

}

From source file:com.granule.json.utils.internal.JSONObject.java

/**
 * Internal method to write out all children JSON objects attached to this JSON object.
 * @param writer The writer to use while writing the JSON text.
 * @param depth The indention depth of the JSON text.
 * @param compact Flag to denote whether or not to write in nice indent format, or compact format.
 * @throws IOException Trhown if an error occurs on write.
 *//*  w w  w. j  av  a  2s.com*/
private void writeChildren(Writer writer, int depth, boolean compact) throws IOException {
    if (logger.isLoggable(Level.FINER))
        logger.entering(className, "writeChildren(Writer, int, boolean)");

    if (!jsonObjects.isEmpty()) {
        Enumeration keys = jsonObjects.keys();
        while (keys.hasMoreElements()) {
            String objName = (String) keys.nextElement();
            Vector vect = (Vector) jsonObjects.get(objName);
            if (vect != null && !vect.isEmpty()) {
                /**
                 * Non-array versus array elements.
                 */
                if (vect.size() == 1) {
                    if (logger.isLoggable(Level.FINEST))
                        logger.logp(Level.FINEST, className, "writeChildren(Writer, int, boolean)",
                                "Writing child object: [" + objName + "]");

                    JSONObject obj = (JSONObject) vect.elementAt(0);
                    obj.writeObject(writer, depth + 1, false, compact);
                    if (keys.hasMoreElements()) {
                        try {
                            if (!compact) {
                                if (!obj.isTextOnlyObject() && !obj.isEmptyObject()) {
                                    writeIndention(writer, depth + 1);
                                }
                                writer.write(",\n");
                            } else {
                                writer.write(",");
                            }
                        } catch (Exception ex) {
                            IOException iox = new IOException("Error occurred on serialization of JSON text.");
                            iox.initCause(ex);
                            throw iox;
                        }
                    } else {
                        if (obj.isTextOnlyObject() && !compact) {
                            writer.write("\n");
                        }
                    }
                } else {
                    if (logger.isLoggable(Level.FINEST))
                        logger.logp(Level.FINEST, className, "writeChildren(Writer, int, boolean)",
                                "Writing array of JSON objects with attribute name: [" + objName + "]");

                    try {
                        if (!compact) {
                            writeIndention(writer, depth + 1);
                            writer.write("\"" + objName + "\"");
                            writer.write(" : [\n");
                        } else {
                            writer.write("\"" + objName + "\"");
                            writer.write(":[");
                        }
                        for (int i = 0; i < vect.size(); i++) {
                            JSONObject obj = (JSONObject) vect.elementAt(i);
                            obj.writeObject(writer, depth + 2, true, compact);

                            /**
                             * Still more we haven't handled.
                             */
                            if (i != (vect.size() - 1)) {
                                if (!compact) {
                                    if (!obj.isTextOnlyObject() && !obj.isEmptyObject()) {
                                        writeIndention(writer, depth + 2);
                                    }
                                    writer.write(",\n");
                                } else {
                                    writer.write(",");
                                }
                            }
                        }

                        if (!compact) {
                            writer.write("\n");
                            writeIndention(writer, depth + 1);
                        }

                        writer.write("]");
                        if (keys.hasMoreElements()) {
                            writer.write(",");
                        }

                        if (!compact) {
                            writer.write("\n");
                        }
                    } catch (Exception ex) {
                        IOException iox = new IOException("Error occurred on serialization of JSON text.");
                        iox.initCause(ex);
                        throw iox;
                    }
                }
            }
        }
    }

    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "writeChildren(Writer, int, boolean)");
}

From source file:org.activiti.cycle.impl.connector.signavio.SignavioConnector.java

public List<RepositoryNode> getChildNodes(String parentId) {
    try {/* w w w . j a  va  2  s. co m*/
        Response directoryResponse = getJsonResponse(conf.getDirectoryUrl() + parentId);
        JsonRepresentation jsonData = new JsonRepresentation(directoryResponse.getEntity());
        JSONArray relJsonArray = jsonData.toJsonArray();

        if (log.isLoggable(Level.FINEST)) {
            SignavioLogHelper.logJSONArray(log, relJsonArray);
        }

        ArrayList<RepositoryNode> nodes = new ArrayList<RepositoryNode>();
        for (int i = 0; i < relJsonArray.length(); i++) {
            JSONObject relObject = relJsonArray.getJSONObject(i);

            if ("dir".equals(relObject.getString("rel"))) {
                RepositoryFolder folderInfo = getFolderInfo(relObject);
                nodes.add(folderInfo);
            } else if ("mod".equals(relObject.getString("rel"))) {
                RepositoryArtifact fileInfo = getArtifactInfoFromFolderLink(relObject);
                nodes.add(fileInfo);
            }
        }
        return nodes;
    } catch (Exception ex) {
        throw new RepositoryException("Exception while accessing Signavio repository", ex);
    }
}

From source file:at.irian.myfaces.wscope.renderkit.html.WsServerSideStateCacheImpl.java

protected Object serializeView(FacesContext context, Object serializedView) {
    if (log.isLoggable(Level.FINEST)) {
        log.finest("Entering serializeView");
    }/*from  ww  w .  j  a  va 2  s.co  m*/

    if (isSerializeStateInSession(context)) {
        if (log.isLoggable(Level.FINEST)) {
            log.finest("Processing serializeView - serialize state in session");
        }

        ByteArrayOutputStream baos = new ByteArrayOutputStream(1024);
        try {
            OutputStream os = baos;
            if (isCompressStateInSession(context)) {
                if (log.isLoggable(Level.FINEST)) {
                    log.finest("Processing serializeView - serialize compressed");
                }

                os.write(COMPRESSED_FLAG);
                os = new GZIPOutputStream(os, 1024);
            } else {
                if (log.isLoggable(Level.FINEST)) {
                    log.finest("Processing serializeView - serialize uncompressed");
                }

                os.write(UNCOMPRESSED_FLAG);
            }

            //Object[] stateArray = (Object[]) serializedView;

            ObjectOutputStream out = new ObjectOutputStream(os);

            out.writeObject(serializedView);
            //out.writeObject(stateArray[0]);
            //out.writeObject(stateArray[1]);
            out.close();
            baos.close();

            if (log.isLoggable(Level.FINEST)) {
                log.finest("Exiting serializeView - serialized. Bytes : " + baos.size());
            }
            return baos.toByteArray();
        } catch (IOException e) {
            log.log(Level.SEVERE, "Exiting serializeView - Could not serialize state: " + e.getMessage(), e);
            return null;
        }
    }

    if (log.isLoggable(Level.FINEST)) {
        log.finest("Exiting serializeView - do not serialize state in session.");
    }

    return serializedView;

}

From source file:org.apache.reef.io.network.NetworkConnectionServiceTest.java

/**
 * NetworkService messaging rate benchmark.
 *///  w  ww .ja v a 2  s.  c o  m
@Test
public void testMessagingNetworkConnServiceBatchingRate() throws Exception {

    Assume.assumeFalse("Use log level INFO to run benchmarking", LOG.isLoggable(Level.FINEST));

    LOG.log(Level.FINEST, name.getMethodName());

    final int batchSize = 1024 * 1024;
    final int[] messageSizes = { 32, 64, 512 };

    for (final int size : messageSizes) {
        final String message = StringUtils.repeat('1', batchSize);
        final int numMessages = 300 / (Math.max(1, size / 512));
        final Monitor monitor = new Monitor();
        final Codec<String> codec = new StringCodec();
        try (final NetworkMessagingTestService messagingTestService = new NetworkMessagingTestService(
                localAddress)) {
            messagingTestService.registerTestConnectionFactory(groupCommClientId, numMessages, monitor, codec);
            try (final Connection<String> conn = messagingTestService
                    .getConnectionFromSenderToReceiver(groupCommClientId)) {
                final long start = System.currentTimeMillis();
                try {
                    conn.open();
                    for (int i = 0; i < numMessages; i++) {
                        conn.write(message);
                    }
                    monitor.mwait();
                } catch (final NetworkException e) {
                    e.printStackTrace();
                    throw new RuntimeException(e);
                }

                final long end = System.currentTimeMillis();
                final double runtime = ((double) end - start) / 1000;
                final long numAppMessages = numMessages * batchSize / size;
                LOG.log(Level.INFO, "size: " + size + "; messages/s: " + numAppMessages / runtime
                        + " bandwidth(bytes/s): " + ((double) numAppMessages * 2 * size) / runtime); // x2 for unicode chars
            }
        }
    }
}

From source file:com.cyberway.issue.crawler.fetcher.FetchFTP.java

/**
 * Extracts FTP links in a directory listing.
 * //from ww w.  ja va  2  s . c  om
 * @param curi  The curi to save extracted links to
 * @param dir   The directory listing to extract links from
 * @throws URIException  if an extracted link is invalid
 */
private void extract(CrawlURI curi, ReplayCharSequence dir) {
    logger.log(Level.FINEST, "Extracting URIs from FTP directory.");
    Matcher matcher = DIR.matcher(dir);
    while (matcher.find()) {
        String file = matcher.group(1);
        addExtracted(curi, file);
    }
}

From source file:com.granule.json.utils.XML.java

/**
 * Method to do the transform from an JSON input stream to a XML stream.
 * Neither input nor output streams are closed.  Closure is left up to the caller.
 *
 * @param JSONStream The XML stream to convert to JSON
 * @param XMLStream The stream to write out JSON to.  The contents written to this stream are always in UTF-8 format.
 * @param verbose Flag to denote whether or not to render the XML text in verbose (indented easy to read), or compact (not so easy to read, but smaller), format.
 *
 * @throws IOException Thrown if an IO error occurs.
 *///from   w w w. java  2 s  .c o  m
public static void toXml(InputStream JSONStream, OutputStream XMLStream, boolean verbose) throws IOException {
    if (logger.isLoggable(Level.FINER)) {
        logger.entering(className, "toXml(InputStream, OutputStream)");
    }

    if (XMLStream == null) {
        throw new NullPointerException("XMLStream cannot be null");
    } else if (JSONStream == null) {
        throw new NullPointerException("JSONStream cannot be null");
    } else {

        if (logger.isLoggable(Level.FINEST)) {
            logger.logp(Level.FINEST, className, "transform", "Parsing the JSON and a DOM builder.");
        }

        try {
            //Get the JSON from the stream.
            JSONObject jObject = new JSONObject(JSONStream);

            //Create a new document

            DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
            DocumentBuilder dBuilder = dbf.newDocumentBuilder();
            Document doc = dBuilder.newDocument();

            if (logger.isLoggable(Level.FINEST)) {
                logger.logp(Level.FINEST, className, "transform", "Parsing the JSON content to XML");
            }

            convertJSONObject(doc, doc.getDocumentElement(), jObject, "jsonObject");

            //Serialize it.
            TransformerFactory tfactory = TransformerFactory.newInstance();
            Transformer serializer = null;
            if (verbose) {
                serializer = tfactory.newTransformer(new StreamSource(new StringReader(styleSheet)));
                ;
            } else {
                serializer = tfactory.newTransformer();
            }
            Properties oprops = new Properties();
            oprops.put(OutputKeys.METHOD, "xml");
            oprops.put(OutputKeys.OMIT_XML_DECLARATION, "yes");
            oprops.put(OutputKeys.VERSION, "1.0");
            oprops.put(OutputKeys.INDENT, "true");
            serializer.setOutputProperties(oprops);
            serializer.transform(new DOMSource(doc), new StreamResult(XMLStream));

        } catch (Exception ex) {
            IOException iox = new IOException("Problem during conversion");
            iox.initCause(ex);
            throw iox;
        }
    }

    if (logger.isLoggable(Level.FINER)) {
        logger.exiting(className, "toXml(InputStream, OutputStream)");
    }
}

From source file:com.ibm.sbt.security.authentication.oauth.consumer.OAuth2Handler.java

@Override
public String createAuthorizationHeader() {
    if (logger.isLoggable(Level.FINEST)) {
        logger.entering(sourceClass, "createAuthorizationHeader", new Object[] {});
    }/* w w  w.ja  va 2 s  .co m*/

    if (logger.isLoggable(Level.FINEST)) {
        logger.log(Level.FINEST, "Security Header :" + "Bearer " + accessToken);
    }
    return "Bearer " + accessToken;
}