List of usage examples for java.util.logging Level FINEST
Level FINEST
To view the source code for java.util.logging Level FINEST.
Click Source Link
From source file:com.clothcat.hpoolauto.model.HtmlGenerator.java
/** * Pretty print the given HTML/*from www . j av a 2 s . c o m*/ */ private static String tidyHtml(String html) { String result = ""; try { try (StringWriter out = new StringWriter()) { try (InputStream in = new ByteArrayInputStream(html.getBytes())) { Tidy tidy = new Tidy(); tidy.setIndentContent(true); tidy.parse(in, out); result = out.toString(); } } } catch (IOException ex) { HLogger.log(Level.FINEST, "Caught exception in tidyHtml()", ex); } return result; }
From source file:com.granule.json.utils.XML.java
/** * Method to do the transform from an XML input stream to a JSON stream. * Neither input nor output streams are closed. Closure is left up to the caller. * * @param XMLStream The XML stream to convert to JSON * @param JSONStream The stream to write out JSON to. The contents written to this stream are always in UTF-8 format. * @param verbose Flag to denote whether or not to render the JSON text in verbose (indented easy to read), or compact (not so easy to read, but smaller), format. * * @throws SAXException Thrown if a parse error occurs. * @throws IOException Thrown if an IO error occurs. *//*from w w w .j a va2s. co m*/ public static void toJson(InputStream XMLStream, OutputStream JSONStream, boolean verbose) throws SAXException, IOException { if (logger.isLoggable(Level.FINER)) { logger.entering(className, "toJson(InputStream, OutputStream)"); } if (XMLStream == null) { throw new NullPointerException("XMLStream cannot be null"); } else if (JSONStream == null) { throw new NullPointerException("JSONStream cannot be null"); } else { if (logger.isLoggable(Level.FINEST)) { logger.logp(Level.FINEST, className, "transform", "Fetching a SAX parser for use with JSONSAXHandler"); } try { /** * Get a parser. */ SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(true); SAXParser sParser = factory.newSAXParser(); XMLReader parser = sParser.getXMLReader(); JSONSAXHandler jsonHandler = new JSONSAXHandler(JSONStream, verbose); parser.setContentHandler(jsonHandler); parser.setErrorHandler(jsonHandler); InputSource source = new InputSource(new BufferedInputStream(XMLStream)); if (logger.isLoggable(Level.FINEST)) { logger.logp(Level.FINEST, className, "transform", "Parsing the XML content to JSON"); } /** * Parse it. */ source.setEncoding("UTF-8"); parser.parse(source); jsonHandler.flushBuffer(); } catch (javax.xml.parsers.ParserConfigurationException pce) { throw new SAXException("Could not get a parser: " + pce.toString()); } } if (logger.isLoggable(Level.FINER)) { logger.exiting(className, "toJson(InputStream, OutputStream)"); } }
From source file:org.apache.myfaces.ov2021.application.jsp.JspStateManagerImpl.java
public JspStateManagerImpl() { if (log.isLoggable(Level.FINEST)) log.finest("New JspStateManagerImpl instance created"); }
From source file:hudson.plugins.plot.CSVSeries.java
/** * This function checks the exclusion/inclusion filters from the properties * file and returns true if a point should be excluded. * //from ww w. j a va 2 s . com * @return true if the point should be excluded based on label or column */ private boolean excludePoint(String label, int index) { if (inclusionFlag == null || inclusionFlag == InclusionFlag.OFF) return false; boolean retVal = false; switch (inclusionFlag) { case INCLUDE_BY_STRING: // if the set contains it, don't exclude it. retVal = !(strExclusionSet.contains(label)); break; case EXCLUDE_BY_STRING: // if the set doesn't contain it, exclude it. retVal = strExclusionSet.contains(label); break; case INCLUDE_BY_COLUMN: // if the set contains it, don't exclude it. retVal = !(colExclusionSet.contains(Integer.valueOf(index))); break; case EXCLUDE_BY_COLUMN: // if the set doesn't contain it, don't exclude it. retVal = colExclusionSet.contains(Integer.valueOf(index)); break; } if (LOGGER.isLoggable(Level.FINEST)) LOGGER.finest(((retVal) ? "excluded" : "included") + " CSV Column: " + index + " : " + label); return retVal; }
From source file:com.socrata.Dataset.java
/** * Creates a new column in the dataset//from w w w .jav a2s . c om * @param name the unique name of the column * @param description an optional description for the column * @param type the data type; e.g. text, number, url * @param width how many pixels wide the column should display * @param hidden whether or not the column is hidden * @return column properties or null if error */ public JSONObject addColumn(String name, String description, DataType type, Integer width, Boolean hidden, String rdfProperties) { if (!attached()) { return null; } log(Level.FINEST, "Creating column '" + name + "' of type '" + type + "'"); JSONObject columnJson = new JSONObject(); try { columnJson.put("name", name); columnJson.put("description", description); columnJson.put("dataTypeName", getDataTypeName(type)); columnJson.put("hidden", hidden); columnJson.put("width", width); if (type == DataType.RICHTEXT) { Map<String, String> map = new HashMap<String, String>(); map.put("formatting_option", "rich"); columnJson.put("format", map); } if (rdfProperties != null) { columnJson.put("rdfProperties", rdfProperties); } } catch (JSONException ex) { log(Level.SEVERE, "Could not create column JSON data for addColumn()", ex); } HttpPost request = new HttpPost(httpBase() + "/views/" + id() + "/columns.json"); try { request.setEntity(new StringEntity(columnJson.toString())); } catch (UnsupportedEncodingException ex) { log(Level.SEVERE, "Could not encode column data in Dataset.addColumn().", ex); return null; } JsonPayload response = performRequest(request); if (isErroneous(response)) { log(Level.SEVERE, "Error in column creation, see logs", null); return null; } if (response.getObject() == null) { log(Level.SEVERE, "Received empty response from server on Dataset.addColumn().", null); return null; } return response.getObject(); }
From source file:org.apache.myfaces.ov2021.application.jsp.JspStateManagerImpl.java
@Override protected Object getComponentStateToSave(FacesContext facesContext) { if (log.isLoggable(Level.FINEST)) log.finest("Entering getComponentStateToSave"); UIViewRoot viewRoot = facesContext.getViewRoot(); if (viewRoot.isTransient()) { return null; }/*from w w w. ja va2 s. c om*/ Object serializedComponentStates = viewRoot.processSaveState(facesContext); //Locale is a state attribute of UIViewRoot and need not be saved explicitly if (log.isLoggable(Level.FINEST)) log.finest("Exiting getComponentStateToSave"); return serializedComponentStates; }
From source file:com.ibm.datapower.amt.clientAPI.Blob.java
/** * Create a new blob object from a byte array. We recommend against using * this, especially for the long term, because it means that the entire byte * array will be resident in memory for the lifetime of this object. It is * preferred that you use the {@link #Blob(File)} constructor so that this * object does not trigger large memory usage. * //from w w w . j ava 2 s. co m * @param bytes * the byte array that contains the binary data. This class will * reference this byte array and not copy it, so beware of making * changes to your array after you use it as an argument for this * constructor. */ public Blob(byte[] bytes) { final String METHOD_NAME = "Blob(byte[])"; //$NON-NLS-1$ String message = "First 20 bytes of Blob: "; //$NON-NLS-1$ if (bytes != null) { this.bytes = bytes.clone(); logger.logp(Level.FINER, CLASS_NAME, METHOD_NAME, "Creating Blob from byte array of length " + bytes.length); //$NON-NLS-1$ StringBuffer buf = new StringBuffer(message); for (int i = 0; i < 20 && i < bytes.length; i++) { int a = bytes[i]; buf.append(Integer.toHexString(a & 0xff) + " "); //message += Integer.toHexString(a & 0xff) + " "; //$NON-NLS-1$ } message = buf.toString(); } logger.logp(Level.FINEST, CLASS_NAME, METHOD_NAME, message); }
From source file:com.prowidesoftware.swift.model.SwiftMessage.java
/** * Add a block to this message./*from w ww.ja v a 2 s . c o m*/ * <p>Notes: on user blocks, no checks are done, on swift blocks, block number * must be non null and have a value from 1-5 both inclusive</p> * * @param b the block to add, may be <code>null</code> in which case nothing happens * @throws IllegalArgumentException <code>b</code> is <code>null</code> or the method getInt in the block returns a value out of range (non user blocks) */ public void addBlock(final SwiftBlock b) { if (log.isLoggable(Level.FINEST)) { log.finest("Add block " + b); } Validate.notNull(b); // support for user blocks in this method is useful for XML parser and other code that // takes advantages of using SwiftTagListBlock if (b instanceof SwiftBlockUser) { addUserBlock((SwiftBlockUser) b); } else { Validate.notNull(b.getNumber(), "SwiftBlock.getNumber() is null"); final int index = b.getNumber().intValue(); Validate.isTrue(index >= 1 && index <= 5, "SwiftBlock.getNumber int did not return an int between 1-5"); switch (index) { case 1: setBlock1((SwiftBlock1) b); break; case 2: setBlock2((SwiftBlock2) b); break; case 3: setBlock3((SwiftBlock3) b); break; case 4: setBlock4((SwiftBlock4) b); break; case 5: setBlock5((SwiftBlock5) b); break; default: log.severe("Invalid block number " + b + ". Expected numbers are 1 to 5"); break; } } }
From source file:fr.ortolang.diffusion.store.binary.BinaryStoreServiceBean.java
@Override @TransactionAttribute(TransactionAttributeType.SUPPORTS) public String type(String identifier, String filename) throws BinaryStoreServiceException, DataNotFoundException { Path path = getPathForIdentifier(identifier); if (!Files.exists(path)) { throw new DataNotFoundException("Unable to find an object with id [" + identifier + "] in the storage"); }// w w w . j ava 2 s .c o m try (InputStream is = Files.newInputStream(path)) { Tika tika = new Tika(); String type; if (Files.size(path) < 50000000) { LOGGER.log(Level.FINEST, "file size is not too large, trying to detect also containers"); try (TikaInputStream tis = TikaInputStream.get(is)) { type = tika.detect(tis, filename); } } else { LOGGER.log(Level.FINEST, "file size is TOO large, does not detect types inside containers"); type = tika.detect(is, filename); } return type; } catch (Exception e) { throw new BinaryStoreServiceException(e); } }
From source file:org.apache.reef.runtime.azbatch.evaluator.EvaluatorShim.java
private void onEvaluatorLaunch(final String launchCommand, final String evaluatorConfigString, final String fileResourcesUrl) { LOG.log(Level.FINEST, "Entering EvaluatorShim.onEvaluatorLaunch()."); if (StringUtils.isNotBlank(fileResourcesUrl)) { LOG.log(Level.FINER, "Downloading evaluator resource file archive from {0}.", fileResourcesUrl); try {//from w w w .j a v a2 s . co m File tmpFile = downloadFile(fileResourcesUrl); extractFiles(tmpFile); } catch (StorageException | IOException e) { LOG.log(Level.SEVERE, "Failed to download evaluator file resources: {0}. {1}", new Object[] { fileResourcesUrl, e }); throw new RuntimeException(e); } } else { LOG.log(Level.FINER, "No file resources URL given."); } File evaluatorConfigurationFile = new File(this.reefFileNames.getEvaluatorConfigurationPath()); LOG.log(Level.FINER, "Persisting evaluator config at: {0}", evaluatorConfigurationFile.getAbsolutePath()); try { boolean newFileCreated = evaluatorConfigurationFile.createNewFile(); LOG.log(Level.FINEST, newFileCreated ? "Created a new file for persisting evaluator configuration at {0}." : "Using existing file for persisting evaluator configuration at {0}.", evaluatorConfigurationFile.getAbsolutePath()); Configuration evaluatorConfiguration = this.configurationSerializer.fromString(evaluatorConfigString); this.configurationSerializer.toFile(evaluatorConfiguration, evaluatorConfigurationFile); } catch (final IOException | BindException e) { LOG.log(Level.SEVERE, "An unexpected exception occurred while attempting to deserialize and write " + "Evaluator configuration file. {0}", e); throw new RuntimeException("Unable to write configuration.", e); } LOG.log(Level.INFO, "Launching the evaluator by invoking the following command: " + launchCommand); try { final List<String> command = Arrays.asList(launchCommand.split(" ")); this.evaluatorProcess = new ProcessBuilder().command(command) .redirectError(new File(this.azureBatchFileNames.getEvaluatorStdErrFilename())) .redirectOutput(new File(this.azureBatchFileNames.getEvaluatorStdOutFilename())).start(); // This will block the current thread until the Evaluator process completes. this.evaluatorProcessExitValue = EvaluatorShim.this.evaluatorProcess.waitFor(); LOG.log(Level.INFO, "Evaluator process completed with exit value: {0}.", this.evaluatorProcessExitValue); } catch (IOException | InterruptedException e) { throw new RuntimeException(e); } LOG.log(Level.FINEST, "Exiting EvaluatorShim.onEvaluatorLaunch()."); }