List of usage examples for java.util.logging Logger warning
public void warning(Supplier<String> msgSupplier)
From source file:com.tulskiy.musique.plugins.hotkeys.HotkeyConfiguration.java
public static Map<KeyStroke, HotKeyEvent> getHotkeys(Logger logger) { Configuration config = Application.getInstance().getConfiguration(); List<String> hotkeysRaw = (List<String>) config.getList(getHotkeyKey()); Map<KeyStroke, HotKeyEvent> hotkeys = new LinkedHashMap<KeyStroke, HotKeyEvent>(); if (!CollectionUtils.isEmpty(hotkeysRaw)) { for (String hotkeyRaw : hotkeysRaw) { try { String[] tokens = hotkeyRaw.split(": "); HotKeyEvent event = HotKeyEvent.valueOf(tokens[0]); KeyStroke keyStroke = KeyStroke.getKeyStroke(tokens[1]); hotkeys.put(keyStroke, event); } catch (IllegalArgumentException e) { logger.warning("Could not parse hotkey for string: " + hotkeyRaw); }// w ww . j av a 2s.com } } return hotkeys; }
From source file:com.google.apphosting.vmruntime.VmApiProxyDelegate.java
/** * Convert the RemoteApiPb.RpcError to the appropriate exception. * * @param rpcError the RemoteApiPb.RpcError. * @param packageName the name of the API package. * @param methodName the name of the method within the API package. * @param logger the Logger used to create log messages. * @return ApiProxyException/*from ww w .ja v a 2 s . c o m*/ */ private static ApiProxyException convertApiResponseRpcErrorToException(RemoteApiPb.RpcError rpcError, String packageName, String methodName, Logger logger) { int rpcCode = rpcError.getCode(); String errorDetail = rpcError.getDetail(); if (rpcCode > RemoteApiPb.RpcError.ErrorCode.values().length) { logger.severe("Received unrecognized error code from server: " + rpcError.getCode() + " details: " + errorDetail); return new ApiProxy.UnknownException(packageName, methodName); } RemoteApiPb.RpcError.ErrorCode errorCode = RemoteApiPb.RpcError.ErrorCode.values()[rpcError.getCode()]; logger.warning( "RPC failed, API=" + packageName + "." + methodName + " : " + errorCode + " : " + errorDetail); // This is very similar to apphosting/utils/runtime/ApiProxyUtils.java#convertApiError, // which is for APIResponse. TODO(user): retire both in favor of gRPC. switch (errorCode) { case CALL_NOT_FOUND: return new ApiProxy.CallNotFoundException(packageName, methodName); case PARSE_ERROR: return new ApiProxy.ArgumentException(packageName, methodName); case SECURITY_VIOLATION: logger.severe("Security violation: invalid request id used!"); return new ApiProxy.UnknownException(packageName, methodName); case CAPABILITY_DISABLED: return new ApiProxy.CapabilityDisabledException(errorDetail, packageName, methodName); case OVER_QUOTA: return new ApiProxy.OverQuotaException(packageName, methodName); case REQUEST_TOO_LARGE: return new ApiProxy.RequestTooLargeException(packageName, methodName); case RESPONSE_TOO_LARGE: return new ApiProxy.ResponseTooLargeException(packageName, methodName); case BAD_REQUEST: return new ApiProxy.ArgumentException(packageName, methodName); case CANCELLED: return new ApiProxy.CancelledException(packageName, methodName); case FEATURE_DISABLED: return new ApiProxy.FeatureNotEnabledException(errorDetail, packageName, methodName); case DEADLINE_EXCEEDED: return new ApiProxy.ApiDeadlineExceededException(packageName, methodName); default: return new ApiProxy.UnknownException(packageName, methodName); } }
From source file:jp.ikedam.jenkins.plugins.ldap_sasl.SearchGroupResolver.java
/** * Resolves groups by querying the LDAP directory. * // w ww . jav a 2 s.c om * Never return null in any case. Returns empty list instead. * * @param ctx * @param dn * @param username * @return List of authorities (not null) * @see jp.ikedam.jenkins.plugins.ldap_sasl.GroupResolver#resolveGroup(javax.naming.ldap.LdapContext, java.lang.String, java.lang.String) */ @Override public List<GrantedAuthority> resolveGroup(LdapContext ctx, String dn, String username) { List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>(); Logger logger = getLogger(); if (dn == null) { logger.warning("Group cannot be resolved: DN of the user is not resolved!"); return authorities; } try { SearchControls searchControls = new SearchControls(); searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE); logger.fine(String.format("Searching groups base=%s, dn=%s", getSearchBase(), dn)); NamingEnumeration<SearchResult> entries = ctx.search((getSearchBase() != null) ? getSearchBase() : "", getGroupSearchQuery(dn), searchControls); while (entries.hasMore()) { SearchResult entry = entries.next(); String groupName = entry.getAttributes().get("cn").get().toString(); if (getPrefix() != null) { groupName = getPrefix() + groupName; } authorities.add(new GrantedAuthorityImpl(groupName)); logger.fine(String.format("group: %s", groupName)); } entries.close(); } catch (NamingException e) { logger.log(Level.WARNING, "Failed to search groups", e); } return authorities; }
From source file:jext2.DataInode.java
/** * Read Inode data/* ww w. ja va 2 s .c o m*/ * @param size size of the data to be read * @param offset start address in data area * @return buffer of size size containing data. * @throws FileTooLarge * @throws IoError */ public ByteBuffer readData(int size, long fileOffset) throws JExt2Exception, FileTooLarge { /* Returning null may break things somewhere.. * Zero length buffer breaks something in jlowfuse's c code */ if (getSize() == 0) return ByteBuffer.allocateDirect(1); /* * size may be larger than the inode.size, it doesn't make sense to return * 4k of zeros */ if (size > getSize()) size = (int) getSize(); ByteBuffer buf = ByteBuffer.allocateDirect(size); int blocksize = superblock.getBlocksize(); long i = 0; long firstBlock = fileOffset / blocksize; long offset = fileOffset % blocksize; /* * just as size may be larger than the inode's data, the number of blocks * may also be. */ long approxBlocks = (size / blocksize) + 1; long maxBlocks = this.getBlocks() / (superblock.getBlocksize() / 512); if (approxBlocks > maxBlocks) approxBlocks = maxBlocks; while (i < approxBlocks) { long start = firstBlock + i; long stop = firstBlock + approxBlocks; LinkedList<Long> b = accessData().getBlocks(start, stop); int blocksRead; /* * Note on the sparse file support: * getBlocks will return null if there is no data block for this * logical address. So just move the position count blocks forward. */ if (b == null) { /* hole */ blocksRead = 1; int unboundedLimit = buf.position() + blocksize; int limit = Math.min(unboundedLimit, buf.capacity()); assert limit <= buf.capacity() : "New position, limit " + limit + " is beyond buffer's capacity, " + buf; buf.limit(limit); buf.position(limit); assert buf.limit() == buf.position(); } else { /* blocks */ blocksRead = b.size(); long pos = b.getFirst() * blocksize + offset; int unboundedLimit = buf.position() + blocksRead * blocksize; int limit = Math.min(unboundedLimit, buf.capacity()); assert limit <= buf.capacity() : "New limit " + limit + " is beyond buffer's capacity, " + buf; buf.limit(limit); blockAccess.readToBufferUnsynchronized(pos, buf); } i += blocksRead; offset = 0; /* This should be removed soon. IllegalMonitorStateException happen * occasionally for unknown reasons. */ try { accessData().getHierarchyLock().readLock().unlock(); } catch (IllegalMonitorStateException e) { Logger log = Filesystem.getLogger(); log.warning("IllegalMonitorStateException encountered in readData, inode=" + this); log.warning(String.format( "context for exception: blocks=%s i=%d approxBlocks=%d off=%d buf=%s readlock=%s lock.readlock.holds=%s", b, i, approxBlocks, fileOffset, buf, accessData().getHierarchyLock(), accessData().getHierarchyLock().getReadHoldCount())); } if (buf.capacity() == buf.limit()) break; } assert buf.position() == buf.limit() : "Buffer wasn't filled completely"; assert buf.limit() == size : "Read buffer size does not match request size"; if (buf.limit() > getSize()) buf.limit((int) getSize()); buf.rewind(); return buf; }
From source file:mockit.integration.logging.LoggingIntegrationsTest.java
@Test public void jdkLoggingShouldLogNothing() { Logger log1 = Logger.getAnonymousLogger(); Logger log2 = Logger.getAnonymousLogger("bundle"); Logger log3 = Logger.getLogger(LoggingIntegrationsTest.class.getName()); Logger log4 = Logger.getLogger(LoggingIntegrationsTest.class.getName(), "bundle"); assertFalse(log1.isLoggable(Level.ALL)); log1.severe("testing that logger does nothing"); log2.setLevel(Level.WARNING); log2.info("testing that logger does nothing"); log3.warning("testing that logger does nothing"); log4.fine("testing that logger does nothing"); log4.finest("testing that logger does nothing"); }
From source file:org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.java
/** * Converts a diagram template file to a grafik tpl. * <p>/*w ww.java 2s . c o m*/ * Important note: the XML-Schema for the diag template file says that if no curve element or specified for a given observation, then all curves of that observation should be displayed. This is not * possible here using the grafik tool. As a conclusion: when a template file is meant to be used with the grafik tool, then curves need to be explicitely specified in the xml. */ private static RememberForSync[] odt2tpl(final Obsdiagview odt, final IFolder dest, final Writer writer, final IProgressMonitor monitor) throws CoreException, IOException { final List<RememberForSync> sync = new ArrayList<>(); final UrlResolver urlRes = new UrlResolver(); final URL context = ResourceUtilities.createURL(dest.getParent()); final GrafikAchsen gAchsen = new GrafikAchsen(odt.getAxis()); final GrafikKurven gKurven = new GrafikKurven(gAchsen); Date xLower = null; Date xUpper = null; Number yLower = new Double(Double.MAX_VALUE); Number yUpper = new Double(-Double.MAX_VALUE); final Set<XLine> xLines = new TreeSet<>(); final Map<Double, ValueAndColor> yLines = new HashMap<>(); // set the timezone of the dateformat if (odt.getTimezone() != null && odt.getTimezone().length() > 0) { // FIXME: changing static data here.... final TimeZone timeZone = TimeZone.getTimeZone(odt.getTimezone()); GRAFIK_DF.setTimeZone(timeZone); } final Logger logger = Logger.getLogger(GrafikLauncher.class.getName()); final IStatusCollector stati = new StatusCollector(KalypsoGisPlugin.getId()); int cc = 1; final TypeObservation[] tobs = odt.getObservation().toArray(new TypeObservation[0]); for (final TypeObservation element : tobs) { // now try to locate observation file final URL url = urlRes.resolveURL(context, element.getHref()); final IFile zmlFile = ResourceUtilities.findFileFromURL(url); // if file cannot be found, that probably means it is not local... // maybe make a better test later? if (zmlFile == null) { final String msg = Messages.getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.18") //$NON-NLS-1$ + url.toExternalForm(); logger.warning(msg); stati.add(IStatus.WARNING, msg); continue; } final IObservation obs; final ITupleModel values; try { obs = ZmlFactory.parseXML(zmlFile.getLocationURI().toURL()); values = obs.getValues(null); } catch (final Exception e) { final String msg = Messages.getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.19") //$NON-NLS-1$ + zmlFile.getName() + Messages.getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.20") //$NON-NLS-1$ + e.getLocalizedMessage(); logger.warning(msg); stati.add(IStatus.WARNING, msg, e); continue; } // find out which axes to use final IAxis[] axes = obs.getAxes(); final IAxis dateAxis = ObservationUtilities.findAxisByClass(axes, Date.class); // REMARK: we use the version with many classes, so no exception is thrown if now number-axis was found. final IAxis[] numberAxes = KalypsoStatusUtils.findAxesByClasses(axes, new Class[] { Number.class }, true); // Just ignore this obs, if it has no number axises if (numberAxes.length == 0) continue; final List<IAxis> displayedAxes = new ArrayList<>(numberAxes.length); final List<TypeCurve> curves = element.getCurve(); for (final TypeCurve tc : curves) { // create a corresponding dat-File for the current observation file final String datFileProtoName = FileUtilities.nameWithoutExtension(zmlFile.getName()) + "-" + cc //$NON-NLS-1$ + ".dat"; //$NON-NLS-1$ final String datFileName = datFileProtoName.replace(' ', '_'); final IFile datFile = dest.getFile(datFileName); final IAxis axis = gKurven.addCurve(datFile, tc, numberAxes); if (axis != null) { displayedAxes.add(axis); // convert to dat-file, ready to be read by the grafik tool zml2dat(values, datFile, dateAxis, axis, monitor); final RememberForSync rfs = new RememberForSync(zmlFile, datFile, axis); sync.add(rfs); cc++; try { // fetch Y axis range for placing possible scenario text item final IAxisRange range = values.getRange(axis); if (range != null) { final DoubleComparator dc = new DoubleComparator(0.001); final Number lower = (Number) range.getLower(); final Number upper = (Number) range.getUpper(); if (dc.compare(lower, yLower) < 0) yLower = lower; if (dc.compare(upper, yUpper) > 0) yUpper = upper; } } catch (final SensorException e) { e.printStackTrace(); } } else Logger.getLogger(GrafikLauncher.class.getName()) .warning(Messages.getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.23") //$NON-NLS-1$ + tc.getName() + Messages .getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.24")); //$NON-NLS-1$ } try { // fetch X axis range for placing possible scenario text item final IAxisRange range = values.getRange(dateAxis); if (range != null) { final Date d1 = (Date) range.getLower(); final Date d2 = (Date) range.getUpper(); if (xLower == null || d1.before(xLower)) xLower = d1; if (xUpper == null || d2.after(xUpper)) xUpper = d2; } } catch (final SensorException e) { e.printStackTrace(); } // is this obs a forecast? // TODO: check if odt wants forecast to be shown final DateRange fr = TimeseriesUtils.isTargetForecast(obs); if (fr != null) { final String strDate = GRAFIK_DF.format(fr.getFrom()); xLines.add(new XLine( Messages.getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.25") + strDate, //$NON-NLS-1$ strDate)); } // does is have Alarmstufen? only check if we are displaying at least a // W-axis try { ObservationUtilities.findAxisByType(displayedAxes.toArray(new IAxis[displayedAxes.size()]), ITimeseriesConstants.TYPE_WATERLEVEL); final MetadataList mdl = obs.getMetadataList(); final String[] mds = TimeseriesUtils.findOutMDAlarmLevel(obs); for (final String element2 : mds) { final String alarmLevel = mdl.getProperty(element2); final Double value = NumberUtils.parseQuietDouble(alarmLevel); yLines.put(value, new ValueAndColor(element2 + " (" + GRAFIK_NF_W.format(value) + ")", //$NON-NLS-1$//$NON-NLS-2$ value.doubleValue())); } } catch (final NoSuchElementException e) { // ignored } displayedAxes.clear(); } writer.write(gKurven.toVorlagentext()); writer.write("\n"); //$NON-NLS-1$ writer.write("HTitel:\t" + odt.getTitle() + "\n"); //$NON-NLS-1$ //$NON-NLS-2$ writer.write("xTitel:\t" + gAchsen.getBottomLabel() + "\n"); //$NON-NLS-1$ //$NON-NLS-2$ writer.write("yTitel1:\t" + gAchsen.getLeftLabel() + "\n"); //$NON-NLS-1$ //$NON-NLS-2$ writer.write("yTitel2:\t" + gAchsen.getRightLabel() + "\n"); //$NON-NLS-1$ //$NON-NLS-2$ // constant vertical lines... for (final Object element : xLines) { final String strDate = element.toString(); writer.write("Senkrechte:" + strDate + '\n'); //$NON-NLS-1$ } // constant horizontal lines... for (final Object element : yLines.keySet()) { final ValueAndColor vac = yLines.get(element); writer.write("yKonst:" + GRAFIK_NF_W.format(vac.value) + " " + vac.label + '\n'); //$NON-NLS-1$ //$NON-NLS-2$ } final IStatus status = stati .asMultiStatusOrOK(Messages.getString("org.kalypso.ogc.sensor.diagview.grafik.GrafikLauncher.17")); //$NON-NLS-1$ if (!status.isOK()) throw new CoreException(status); final RememberForSync[] syncs = sync.toArray(new RememberForSync[sync.size()]); return syncs; }
From source file:com.codelanx.codelanxlib.util.auth.UUIDFetcher.java
/** * Calls each supplied name individually to Mojang's servers, treating them * as previously used names which henceforth were changed. This method is * much slower than the other call methods, and should only be used * if there is a need to retrieve names which are now changed * /*w ww .ja va 2 s.co m*/ * @since 0.1.0 * @version 0.1.0 * * @param output Whether or not to print output * @param log The {@link Logger} to print to * @param doOutput A {@link Predicate} representing when to output a number * @return A {@link Map} of supplied names to relevant {@link UserInfo}. * Note that this map will contain the supplied names even if they * are invalid or not actual usernames (in which case, they will * be mapped to {@code null}). Note names that have never been * changed before will be mapped as invalid per this method * @throws IOException If there's a problem sending or receiving the request * @throws ParseException If the request response cannot be read * @throws InterruptedException If the thread is interrupted while sleeping */ public Map<String, UserInfo> callFromOldNames(boolean output, Logger log, Predicate<? super Integer> doOutput) throws IOException, ParseException, InterruptedException { Map<String, UserInfo> back = new HashMap<>(); int completed = 0; int failed = 0; for (String s : names) { HttpURLConnection connection = UUIDFetcher.createSingleProfileConnection(s); if (connection.getResponseCode() == 429 && this.rateLimiting) { log.warning("[UUIDFetcher] Rate limit hit! Waiting 10 minutes until continuing conversion..."); Thread.sleep(TimeUnit.MINUTES.toMillis(10)); connection = UUIDFetcher.createSingleProfileConnection(s); } if (connection.getResponseCode() == 200) { JSONObject o = (JSONObject) this.jsonParser .parse(new InputStreamReader(connection.getInputStream())); back.put(s, new UserInfo((String) o.get("name"), UUIDFetcher.getUUID((String) o.get("id")))); completed++; } else { //e.g. 400, 204 if (output) { log.warning(String.format("No profile found for '%s', skipping...", s)); } back.put(s, null); failed++; continue; //nothing can be done with the return } if (output) { int processed = completed + failed; if (doOutput.test(processed) || processed == this.names.size()) { log.info(String.format("[UUIDFetcher] Progress: %d/%d, %.2f%%, Failed names: %d", processed, this.names.size(), ((double) processed / this.names.size()) * 100D, failed)); } } } return back; }
From source file:org.protempa.backend.dsb.relationaldb.ConstantResultProcessor.java
@Override public void process(ResultSet resultSet) throws SQLException { ResultCache<Constant> results = getResults(); EntitySpec entitySpec = getEntitySpec(); String entitySpecName = entitySpec.getName(); //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0; String[] propIds = entitySpec.getPropositionIds(); ColumnSpec codeSpec = entitySpec.getCodeSpec(); if (codeSpec != null) { List<ColumnSpec> codeSpecL = codeSpec.asList(); codeSpec = codeSpecL.get(codeSpecL.size() - 1); }/*from w w w . j a v a 2 s . c om*/ Logger logger = SQLGenUtil.logger(); PropertySpec[] propertySpecs = entitySpec.getPropertySpecs(); Value[] propertyValues = new Value[propertySpecs.length]; int count = 0; String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length]; SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId()); ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int[] columnTypes = new int[resultSetMetaData.getColumnCount()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = resultSetMetaData.getColumnType(i + 1); } while (resultSet.next()) { int i = 1; String keyId = resultSet.getString(i++); if (keyId == null) { logger.warning("A keyId is null. Skipping record."); continue; } i = readUniqueIds(uniqueIds, resultSet, i); if (Arrays.contains(uniqueIds, null)) { if (logger.isLoggable(Level.WARNING)) { logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.", StringUtils.join(uniqueIds, ", ")); continue; } } UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds); String propId = null; if (!isCasePresent()) { if (codeSpec == null) { assert propIds.length == 1 : "Don't know which proposition id to assign to"; propId = propIds[0]; } else { String code = resultSet.getString(i++); propId = sqlCodeToPropositionId(codeSpec, code); if (propId == null) { continue; } } } else { i++; } i = extractPropertyValues(resultSet, i, propertyValues, columnTypes); if (isCasePresent()) { propId = resultSet.getString(i++); } Constant cp = new Constant(propId, uniqueId); for (int j = 0; j < propertySpecs.length; j++) { PropertySpec propertySpec = propertySpecs[j]; cp.setProperty(propertySpec.getName(), propertyValues[j]); } cp.setSourceSystem(dsType); logger.log(Level.FINEST, "Created constant {0}", cp); results.add(keyId, cp); if (++count % FLUSH_SIZE == 0) { try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records"); } } } try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total", "Retrieved {0} records total"); } }
From source file:org.protempa.backend.dsb.relationaldb.PrimitiveParameterResultProcessor.java
@Override public void process(ResultSet resultSet) throws SQLException { ResultCache<PrimitiveParameter> results = getResults(); EntitySpec entitySpec = getEntitySpec(); String entitySpecName = entitySpec.getName(); //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0; String[] propIds = entitySpec.getPropositionIds(); ColumnSpec codeSpec = entitySpec.getCodeSpec(); if (codeSpec != null) { List<ColumnSpec> codeSpecL = codeSpec.asList(); codeSpec = codeSpecL.get(codeSpecL.size() - 1); }/*from www . j a va2 s. co m*/ Logger logger = SQLGenUtil.logger(); PropertySpec[] propertySpecs = entitySpec.getPropertySpecs(); Value[] propertyValues = new Value[propertySpecs.length]; int count = 0; String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length]; ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int[] columnTypes = new int[resultSetMetaData.getColumnCount()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = resultSetMetaData.getColumnType(i + 1); } SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId()); while (resultSet.next()) { int i = 1; String keyId = resultSet.getString(i++); if (keyId == null) { logger.warning("A keyId is null. Skipping record."); continue; } i = readUniqueIds(uniqueIds, resultSet, i); if (Arrays.contains(uniqueIds, null)) { if (logger.isLoggable(Level.WARNING)) { logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.", StringUtils.join(uniqueIds, ", ")); continue; } } UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds); String propId = null; if (!isCasePresent()) { if (codeSpec == null) { assert propIds.length == 1 : "Don't know which proposition id to assign to"; propId = propIds[0]; } else { String code = resultSet.getString(i++); propId = sqlCodeToPropositionId(codeSpec, code); if (propId == null) { continue; } } } else { i++; } Long timestamp = null; try { timestamp = entitySpec.getPositionParser().toPosition(resultSet, i, columnTypes[i - 1]); i++; } catch (SQLException e) { logger.log(Level.WARNING, "Could not parse timestamp. Leaving timestamp unset.", e); } ValueType valueType = entitySpec.getValueType(); String cpValStr = resultSet.getString(i++); Value cpVal = valueType.parse(cpValStr); i = extractPropertyValues(resultSet, i, propertyValues, columnTypes); if (isCasePresent()) { propId = resultSet.getString(i++); } PrimitiveParameter p = new PrimitiveParameter(propId, uniqueId); p.setPosition(timestamp); p.setGranularity(entitySpec.getGranularity()); p.setValue(cpVal); for (int j = 0; j < propertySpecs.length; j++) { PropertySpec propertySpec = propertySpecs[j]; p.setProperty(propertySpec.getName(), propertyValues[j]); } p.setSourceSystem(dsType); logger.log(Level.FINEST, "Created primitive parameter {0}", p); results.add(keyId, p); if (++count % FLUSH_SIZE == 0) { try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records"); } } } try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total", "Retrieved {0} records total"); } }
From source file:org.usrz.libs.logging.LevelTraceTest.java
@Test public void testJavaLogging() { final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName()); logger.finest("Foobar FINEST"); AppenderForTests.hasLastEvent("at Finest level"); assertTrue(logger.isLoggable(java.util.logging.Level.FINEST)); logger.finer("Foobar FINER"); AppenderForTests.hasLastEvent("at Finer level"); assertTrue(logger.isLoggable(java.util.logging.Level.FINER)); logger.fine("Foobar FINE"); AppenderForTests.hasLastEvent("at Fine level"); assertTrue(logger.isLoggable(java.util.logging.Level.FINE)); logger.config("Foobar CONFIG"); AppenderForTests.hasLastEvent("at Config level"); assertTrue(logger.isLoggable(java.util.logging.Level.CONFIG)); logger.info("Foobar INFO"); AppenderForTests.hasLastEvent("at Info level"); assertTrue(logger.isLoggable(java.util.logging.Level.INFO)); logger.warning("Foobar WARNING"); AppenderForTests.hasLastEvent("at Warning level"); assertTrue(logger.isLoggable(java.util.logging.Level.WARNING)); logger.severe("Foobar SEVERE"); AppenderForTests.hasLastEvent("at Severe level"); assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE)); }