List of usage examples for java.util.logging Level FINER
Level FINER
To view the source code for java.util.logging Level FINER.
Click Source Link
From source file:hudson.model.Run.java
/** * Sets the {@link #getResult} of this build. * Has no effect when the result is already set and worse than the proposed result. * May only be called after the build has started and before it has moved into post-production * (normally meaning both {@link #isInProgress} and {@link #isBuilding} are true). * @param r the proposed new result// ww w .j a va 2 s.co m * @throws IllegalStateException if the build has not yet started, is in post-production, or is complete */ public void setResult(@Nonnull Result r) { if (state != State.BUILDING) { throw new IllegalStateException("cannot change build result while in " + state); } // result can only get worse if (result == null || r.isWorseThan(result)) { result = r; LOGGER.log(FINE, this + " in " + getRootDir() + ": result is set to " + r, LOGGER.isLoggable(Level.FINER) ? new Exception() : null); } }
From source file:org.geoserver.wfs.response.SpatiaLiteOutputFormatDev.java
private int getSpatialSRID(CoordinateReferenceSystem crs) { try {//from w w w.j a v a 2s .co m return CRS.lookupEpsgCode(crs, true); } catch (FactoryException e) { System.out.println(e.getMessage()); LOGGER.log(Level.FINER, e.getMessage(), e); return -1; } }
From source file:com.google.enterprise.connector.salesforce.storetype.DBStore.java
public void setDocList(String checkpoint, String str_store_entry) { DatabaseMetaData dbm = null;/*from ww w . j a v a 2 s. c o m*/ Connection connection = null; logger.log(Level.FINEST, "Setting doclist " + checkpoint); logger.log(Level.FINEST, "Setting store_entry " + str_store_entry); try { connection = ds.getConnection(); connection.setAutoCommit(true); dbm = connection.getMetaData(); //logger.log(Level.FINE,"Base64 ENCODING..."); String encode_entry = new String( org.apache.commons.codec.binary.Base64.encodeBase64(str_store_entry.getBytes())); str_store_entry = encode_entry; //logger.log(Level.FINE,"Setting store_entry ENCODED " + str_store_entry); if (dbm.getDatabaseProductName().equals("MySQL")) { //get the most recent row Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); String update_stmt = "select crawl_set from " + this.instance_table + " where crawl_set=(select max(crawl_set) from " + this.instance_table + ")"; logger.log(Level.FINE, "Getting lastentryp in db: " + update_stmt); ResultSet rs = statement.executeQuery(update_stmt); boolean ret_rows = rs.first(); String last_entry_in_db = null; while (ret_rows) { BigDecimal crawl_set = rs.getBigDecimal("crawl_set"); last_entry_in_db = crawl_set.toPlainString(); ret_rows = rs.next(); } logger.log(Level.FINER, "Last_Entry_in_Database " + last_entry_in_db); if (last_entry_in_db != null) { if (last_entry_in_db.startsWith(checkpoint)) { //increment if in the same set BigDecimal bd = new BigDecimal(last_entry_in_db); bd = bd.add(new BigDecimal(".00001")); logger.log(Level.INFO, "Adding to DBStore. Index Value: " + bd.toPlainString()); update_stmt = "insert into " + this.instance_table + " (crawl_set,crawl_data) values (?,COMPRESS(?))"; PreparedStatement ps = connection.prepareStatement(update_stmt); ps.setString(1, bd.toPlainString()); ps.setString(2, str_store_entry); ps.executeUpdate(); ps.close(); } else { //otherwise add the the 0th row for this set logger.log(Level.INFO, "Adding to DBStore. Index Value: " + checkpoint + ".00000"); update_stmt = "insert into " + this.instance_table + " (crawl_set,crawl_data) values (?,COMPRESS(?))"; PreparedStatement ps = connection.prepareStatement(update_stmt); ps.setString(1, checkpoint + ".00000"); ps.setString(2, str_store_entry); ps.executeUpdate(); ps.close(); } } else { logger.log(Level.INFO, "Adding to DBStore. Index Value: " + checkpoint + ".00000"); update_stmt = "insert into " + this.instance_table + " (crawl_set,crawl_data) values (?,COMPRESS(?))"; PreparedStatement ps = connection.prepareStatement(update_stmt); ps.setString(1, checkpoint + ".00000"); ps.setString(2, str_store_entry); ps.executeUpdate(); ps.close(); } rs.close(); statement.close(); connection.close(); } } catch (Exception ex) { logger.log(Level.SEVERE, "Exception initializing context Datasource " + ex); return; } }
From source file:edu.umass.cs.reconfiguration.ActiveReplica.java
@Override public boolean handleMessage(JSONObject jsonObject) { log.log(Level.FINER, "{0} handleMessage received {1}", new Object[] { this, jsonObject }); long entryTime = System.nanoTime(); BasicReconfigurationPacket<NodeIDType> rcPacket = null; try {// w w w . ja va 2s . co m // try handling as reconfiguration packet through protocol task if ( /* Assumed here that ReconfigurationPacket's JSON implementation * won't change, so it won't be a JSONObjectWrapper. */ !(jsonObject instanceof JSONMessenger.JSONObjectWrapper) && ReconfigurationPacket.isReconfigurationPacket(jsonObject) && (rcPacket = this.protocolTask.getReconfigurationPacket(jsonObject)) != null) { if (!this.protocolExecutor.handleEvent(rcPacket)) { // do nothing log.log(Level.FINE, "{0} unable to handle packet {1}", new Object[] { this, jsonObject }); } } // else check if app request else if (isAppRequest(jsonObject)) { log.log(Level.FINER, "{0} handleMessage received appRequest {1}", new Object[] { this, jsonObject }); // long startTime = System.currentTimeMillis(); Request request = this.getRequest(jsonObject); NIOHeader header = jsonObject instanceof JSONMessenger.JSONObjectWrapper ? NIOHeader.getNIOHeader((byte[]) ((JSONMessenger.JSONObjectWrapper) jsonObject).getObj()) : new NIOHeader(MessageNIOTransport.getSenderAddress(jsonObject), MessageNIOTransport.getReceiverAddress(jsonObject)); log.log(Level.FINE, "{0} received app request {1}:{2}:{3}", new Object[] { this, request.getRequestType(), request.getServiceName(), (request instanceof ClientRequest ? ((ClientRequest) request).getRequestID() : "") }); boolean isCoordinatedRequest = isCoordinated(request); SenderAndRequest senderAndRequest = new SenderAndRequest(request, header.sndr, header.rcvr, // startTime entryTime); // enqueue demand stats sending callback if (ENQUEUE_REQUEST && isCoordinatedRequest) enqueue(senderAndRequest); // app doesn't understand ReplicableClientRequest if (!isCoordinatedRequest && request instanceof ReplicableClientRequest) request = this.appCoordinator.getRequest((ReplicableClientRequest) request, header); // send to app via its coordinator boolean handled = this.handRequestToApp(makeClientRequest(request, header.sndr), ENQUEUE_REQUEST ? // generic callback this : // request-specific callback senderAndRequest); InetSocketAddress sender = header.sndr, receiver = header.rcvr; if (handled) { if (!isCoordinatedRequest) //this.sendResponse(request, senderAndRequest, false) ; // else do nothing as coordinated callback will be called } else { // if failed, dequeue useless enqueue if (isCoordinatedRequest) this.dequeue(((ReplicableRequest) request)); // send error message to sender ((JSONMessenger<?>) this.messenger).sendClient( // this.send( sender == null ? MessageNIOTransport.getSenderAddress(jsonObject) : sender, new ActiveReplicaError(this.nodeConfig.getNodeSocketAddress(getMyID()), request.getServiceName(), ((ClientRequest) request).getRequestID()) // .toJSONObject() , receiver == null ? MessageNIOTransport.getReceiverAddress(jsonObject) : receiver); AppInstrumenter.sentActiveReplicaError(); } if (instrument(Instrument.getStats)) { System.out.println(DelayProfiler.getStats()); log.log(Level.INFO, "{0} {1}", new Object[] { this, DelayProfiler.getStats() }); } } } catch (RequestParseException rpe) { rpe.printStackTrace(); } catch (JSONException je) { je.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return false; // neither reconfiguration packet nor app request }
From source file:org.cloudifysource.esc.driver.provisioning.privateEc2.PrivateEC2CloudifyDriver.java
/** * ***************************************************************************************************************** *//*from ww w. jav a2 s. c om*/ /* * (non-Javadoc) * * @see * org.cloudifysource.esc.driver.provisioning.BaseProvisioningDriver#setConfig(org.cloudifysource.dsl.cloud.Cloud, * java.lang.String, boolean, java.lang.String) */ @Override public void setConfig(final Cloud cloud, final String cloudTemplateName, final boolean management, final String fullServiceName) { logger.fine("Running path : " + System.getProperty("user.dir")); this.serviceName = this.getSimpleServiceName(fullServiceName); this.cloudTemplateName = cloudTemplateName; this.cloudName = cloud.getName(); super.setConfig(cloud, cloudTemplateName, management, fullServiceName); if (logger.isLoggable(Level.FINER)) { logger.finer("Service name : " + this.serviceName + "(" + fullServiceName + ")"); } try { ComputeTemplate managerTemplate = this.getManagerComputeTemplate(); // Initialize the ec2 client if the service use the CFN template if (management) { // TODO - NO VALIDATION! managerCfnTemplateFileName = (String) managerTemplate.getCustom().get("cfnManagerTemplate"); } else { this.privateEc2Template = cfnTemplatePerService.get(this.serviceName); if (this.privateEc2Template == null) { throw new IllegalArgumentException("CFN template not found for service:" + fullServiceName); } } this.ec2 = this.createAmazonEC2(); // Create s3 client String locationId = (String) managerTemplate.getCustom().get("s3LocationId"); CloudUser user = this.cloud.getUser(); this.amazonS3Uploader = new AmazonS3Uploader(user.getUser(), user.getApiKey(), locationId); // Setup debug console output final boolean debug = BooleanUtils.toBoolean((String) managerTemplate.getCustom().get("debugMode")); if (debug) { this.debugExecutors = Executors.newFixedThreadPool(NB_THREADS_CONSOLE_OUTPUT); } } catch (final CloudProvisioningException e) { throw new IllegalArgumentException(e); } }
From source file:com.archivas.clienttools.arcutils.utils.net.GetCertsX509TrustManager.java
protected void handleCertFailureCallback(SSLCertChain certChain, CertificateException certException) throws CertificateException { // We need to warn them if the hostname is invalid String hostNameWarningString = null; if (hostname != null) { hostNameWarningString = testHostname(hostname, certChain); }/*from w w w . j a v a 2s. c o m*/ SSLCertificateCallback.AllowSSLCert allowAccess = SSLCertificateCallback.AllowSSLCert.NO; certChain.setCertificateException(certException); if (sslExceptionCallback != null) { try { allowAccess = sslExceptionCallback.exceptionCallback(profile, certChain, hostNameWarningString); LOG.log(Level.FINE, "SSLExceptionCallback returned from user: " + allowAccess); } catch (Exception e) { LOG.log(Level.WARNING, "Unexpected Exception in SSL Certificate Exception Callback. Disallowing use of certificate: " + certChain, e); allowAccess = SSLCertificateCallback.AllowSSLCert.NO; } } if (allowAccess == SSLCertificateCallback.AllowSSLCert.THIS_SESSION_ONLY) { // User accepted the certificate. persist it to the keystore. try { LOG.log(Level.FINER, "About to store certificate in memory keystore: " + certChain); memoryKeyStore.setCertificateEntry(certChain.getIssuedByCommonName(), certChain.getCertificateList().get(0)); initMemoryTrustManager(true); LOG.log(Level.FINER, "Successfully stored cert into memory keystore: " + certChain); } catch (Exception e) { LOG.log(Level.WARNING, "Error saving certificate to memory keystore", e); } } else if (allowAccess == SSLCertificateCallback.AllowSSLCert.PERMANENTLY_SAVE) { // User accepted the certificate. persist it to the keystore. try { LOG.log(Level.FINER, "About to store certificate in persisted keystore: " + certChain); persistedKeyStore.setCertificateEntry(certChain.getIssuedByCommonName(), certChain.getCertificateList().get(0)); FileOutputStream fos = new FileOutputStream(persistedKeystoreFile); persistedKeyStore.store(fos, persistedKeystorePassword); initPersistedTrustManager(true); LOG.log(Level.FINER, "Successfully stored cert into persisted keystore: " + certChain); } catch (Exception e) { LOG.log(Level.WARNING, "Error saving certificate to persisted keystore", e); } } else { // If AllowSSLCert.NO or anything else, throw the exception throw certException; } // If we got here, The user accepted the cert and it is stored. Do the callback. if (sslExceptionCallback != null) { sslExceptionCallback.validCertCallback(profile, certChain); } }
From source file:fungus.HyphaLink.java
@Override public void onKill() { if (myNode == null) { log.log(Level.FINER, "Cycle " + CDState.getCycle() + ": TRIED TO KILL UNINITIALIZED NODE"); return;/*from www . ja v a 2 s . c o m*/ } HyphaData myData = (HyphaData) myNode.getProtocol(hyphaDataPid); log.log(Level.FINER, myNode + " (" + myData.getState() + ") HAS BEEN KILLED ", myNode); fireFailing(myNode, myData.getState(), degree(), myData.getParentTarget(), neighbors); MycoList removing = new MycoList(neighbors); for (MycoNode neighbor : removing) { HyphaLink nl = neighbor.getHyphaLink(); nl.removeNeighbor(myNode); } neighbors.clear(); /* List<Node> nl = new ArrayList<Node>(neighbors); for (Node neighbor : nl) { HyphaData nd = (HyphaData) neighbor.getProtocol(hyphaDataPid); log.finest(myNode.getID() + " SEVERING NEIGHBOR " + neighbor.getID() + " (" + nd.getState() + ")"); removeNeighbor(neighbor); } */ }
From source file:org.jenkinsci.plugins.vsphere.tools.VSphere.java
public void reconfigureVm(String name, VirtualMachineConfigSpec spec) throws VSphereException { VirtualMachine vm = getVmByName(name); if (vm == null) { throw new VSphereNotFoundException("VM or template", name); }//from ww w.j av a2s. c om LOGGER.log(Level.FINER, "Reconfiguring VM. Please wait ..."); try { Task task = vm.reconfigVM_Task(spec); String status = task.waitForTask(); if (status.equals(TaskInfoState.success.toString())) { return; } throw newVSphereException(task.getTaskInfo(), "Couldn't reconfigure \"" + name + "\"!"); } catch (RuntimeException | VSphereException e) { throw e; } catch (Exception e) { throw new VSphereException("VM cannot be reconfigured:" + e.getMessage(), e); } }
From source file:com.ibm.jaggr.core.impl.transport.AbstractHttpTransport.java
/** * Returns the value of the requested parameter from the request, or null * * @param request/*from w w w . java2s. c o m*/ * the request object * @param aliases * array of query arg names by which the request may be specified * @return the value of the param, or null if it is not specified under the * specified names */ protected static String getParameter(HttpServletRequest request, String[] aliases) { final String sourceMethod = "getParameter"; //$NON-NLS-1$ boolean isTraceLogging = log.isLoggable(Level.FINER); if (isTraceLogging) { log.entering(AbstractHttpTransport.class.getName(), sourceMethod, new Object[] { request.getQueryString(), Arrays.asList(aliases) }); } Map<String, String[]> params = request.getParameterMap(); String result = null; for (Map.Entry<String, String[]> entry : params.entrySet()) { String name = entry.getKey(); for (String alias : aliases) { if (alias.equalsIgnoreCase(name)) { String[] values = entry.getValue(); result = values[values.length - 1]; // return last value in array } } } if (isTraceLogging) { log.exiting(AbstractHttpTransport.class.getName(), sourceMethod, result); } return result; }
From source file:org.jenkinsci.plugins.pipeline.maven.dao.PipelineMavenPluginH2Dao.java
@Nonnull @Override/*ww w. j a v a 2 s . c om*/ public List<String> listDownstreamJobs(@Nonnull String jobFullName, int buildNumber) { LOGGER.log(Level.FINER, "listDownstreamJobs({0}, {1})", new Object[] { jobFullName, buildNumber }); String generatedArtifactsSql = "SELECT DISTINCT GENERATED_MAVEN_ARTIFACT.ARTIFACT_ID " + " FROM GENERATED_MAVEN_ARTIFACT " + " INNER JOIN JENKINS_BUILD AS UPSTREAM_BUILD ON GENERATED_MAVEN_ARTIFACT.BUILD_ID = UPSTREAM_BUILD.ID " + " INNER JOIN JENKINS_JOB AS UPSTREAM_JOB ON UPSTREAM_BUILD.JOB_ID = UPSTREAM_JOB.ID " + " WHERE " + " UPSTREAM_JOB.FULL_NAME = ? AND" + " UPSTREAM_BUILD.NUMBER = ? AND " + " GENERATED_MAVEN_ARTIFACT.SKIP_DOWNSTREAM_TRIGGERS = FALSE"; String sql = "SELECT DISTINCT DOWNSTREAM_JOB.FULL_NAME " + " FROM JENKINS_JOB AS DOWNSTREAM_JOB" + " INNER JOIN JENKINS_BUILD AS DOWNSTREAM_BUILD ON DOWNSTREAM_JOB.ID = DOWNSTREAM_BUILD.JOB_ID " + " INNER JOIN MAVEN_DEPENDENCY ON DOWNSTREAM_BUILD.ID = MAVEN_DEPENDENCY.BUILD_ID" + " WHERE " + " MAVEN_DEPENDENCY.ARTIFACT_ID IN (" + generatedArtifactsSql + ") AND " + " MAVEN_DEPENDENCY.IGNORE_UPSTREAM_TRIGGERS = FALSE AND " + " DOWNSTREAM_BUILD.NUMBER in (SELECT MAX(JENKINS_BUILD.NUMBER) FROM JENKINS_BUILD WHERE DOWNSTREAM_JOB.ID = JENKINS_BUILD.JOB_ID)" + " ORDER BY DOWNSTREAM_JOB.FULL_NAME"; List<String> downstreamJobsFullNames = new ArrayList<>(); LOGGER.log(Level.FINER, "sql: {0}, jobFullName:{1}, buildNumber: {2}", new Object[] { sql, jobFullName, buildNumber }); try (Connection cnn = jdbcConnectionPool.getConnection()) { try (PreparedStatement stmt = cnn.prepareStatement(sql)) { stmt.setString(1, jobFullName); stmt.setInt(2, buildNumber); try (ResultSet rst = stmt.executeQuery()) { while (rst.next()) { downstreamJobsFullNames.add(rst.getString(1)); } } } } catch (SQLException e) { throw new RuntimeSqlException(e); } LOGGER.log(Level.FINE, "listDownstreamJobs({0}, {1}): {2}", new Object[] { jobFullName, buildNumber, downstreamJobsFullNames }); return downstreamJobsFullNames; }