Example usage for java.util.logging Level FINER

List of usage examples for java.util.logging Level FINER

Introduction

In this page you can find the example usage for java.util.logging Level FINER.

Prototype

Level FINER

To view the source code for java.util.logging Level FINER.

Click Source Link

Document

FINER indicates a fairly detailed tracing message.

Usage

From source file:org.apache.cxf.jca.outbound.ManagedConnectionFactoryImpl.java

public Object createConnectionFactory(ConnectionManager connMgr) throws ResourceException {
    if (LOG.isLoggable(Level.FINER)) {
        LOG.finer("Create connection factory by app server connMgr " + connMgr);
    }//w ww . j a v  a  2 s . co m
    return new ConnectionFactoryImpl(this, connMgr == null ? defaultConnectionManager : connMgr);
}

From source file:org.cloudifysource.rest.controllers.UploadController.java

/**
 * Uploading a file to be used in future deployments.
 * The file will be kept at least {@link UploadRepo#TIMEOUT_SECOND} seconds.
 * @param fileName - the name of the file to upload.
 * @param file - the file to upload./*w w  w.j  ava 2s .  c  om*/
 * @return {@link UploadResponse} - contains the uploaded file's name.
 * @throws RestErrorException .
 */
@PreAuthorize("isFullyAuthenticated() and hasPermission(#authGroups, 'deploy')")
@RequestMapping(value = "/{fileName:.+}", method = RequestMethod.POST)
public UploadResponse upload(@PathVariable() final String fileName,
        @RequestParam(value = CloudifyConstants.UPLOAD_FILE_PARAM_NAME, required = true) final MultipartFile file)
        throws RestErrorException {
    // determine file's name
    String name = fileName;
    if (StringUtils.isEmpty(fileName)) {
        name = file.getOriginalFilename();
    }
    if (logger.isLoggable(Level.FINER)) {
        logger.finer("received request to upload file " + name);
    }
    // upload file using uploadRepo
    String uploadedFileDirName = null;
    try {
        uploadedFileDirName = uploadRepo.put(name, file);
    } catch (IOException e) {
        logger.warning("could not upload file " + name + " error was - " + e.getMessage());
        throw new RestErrorException(CloudifyMessageKeys.UPLOAD_FAILED.getName(), name, e.getMessage());
    }
    // create and return UploadResponse
    UploadResponse response = new UploadResponse();
    response.setUploadKey(uploadedFileDirName);
    return response;
}

From source file:com.google.enterprise.connector.salesforce.security.BaseAuthorizationManager.java

/**
 * Connector manager sends a collection of documentIDs to the connector 
 * to authorize for an authenticated context
 *
 * @param  col Collection  the docIDs to authorize
 * @param  id AuthenticationIdentity   the identity to auth
 * @return Collection of docs that are authorized
 *//*from  w w  w.  ja  va2 s .  c o m*/

public Collection authorizeDocids(Collection col, AuthenticationIdentity id) {
    logger.log(Level.FINER, " SalesForceAuthorizationManager. authorizeDocids called for " + id.getUsername());

    //first see if we have a callable authorization module to try

    String callable_az_module = System
            .getProperty(BaseConstants.CALLABLE_AZ + "_" + connector.getInstanceName());

    if (callable_az_module != null) {
        logger.log(Level.FINE, "Using Loadable Authorization Module : " + callable_az_module);
        try {
            Class cls = Class.forName(callable_az_module);
            java.lang.reflect.Constructor co = cls.getConstructor();
            IAuthorizationModule icau = (IAuthorizationModule) co.newInstance();

            Collection auth_col = icau.authorizeDocids(col, id.getUsername());

            Collection ret_col = new ArrayList();

            for (Iterator i = auth_col.iterator(); i.hasNext();) {
                String did = (String) i.next();
                AuthorizationResponse ap = new AuthorizationResponse(true, did);
                ret_col.add(ap);
            }

            return ret_col;
        } catch (Exception ex) {
            logger.log(Level.SEVERE, "Unable to load Authorization Module " + callable_az_module);
        }
    } else {
        logger.log(Level.FINER, "Using Default Authorization Module");
    }

    Iterator itr = col.iterator();
    logger.log(Level.FINER, " AUTHORIZING  BATCH OF : " + col.size() + " documents");

    //vector to hold the list of docs that will eventually get authorized
    Vector v_docIDs = new Vector();

    //create a string of 'docid1','docid2','docid3'  to send into the AZ query
    String doc_wildcard = "";
    while (itr.hasNext()) {
        String docID = (String) itr.next();
        v_docIDs.add(docID);
        doc_wildcard = doc_wildcard + "'" + docID + "'";
        if (itr.hasNext())
            doc_wildcard = doc_wildcard + ",";
    }

    //initialize the collection for the response
    Collection col_resp = new ArrayList();
    String query = connector.getAzquery();

    //substitute the doc IDs into the AZ query
    String modified_az_query = query.replace("$DOCIDS", doc_wildcard);
    modified_az_query = modified_az_query.replace("$USERID", id.getUsername());

    logger.log(Level.FINER, "Attempting Authorizing DocList " + modified_az_query);

    //get ready to submit the query
    SFQuery sfq = new SFQuery();
    //get the user's sessionID, login server thats in context
    //this step maynot be necessary if we use the connector's login context
    //instead of the users...
    //TODO: figure out which way is better later on
    Properties session_props = connector.getUserSession(id.getUsername());
    //not that it matters, how did the user authenticate..
    //if its strong (i.e, we got a session ID, we can submit a full AZ query)                      
    String auth_strength = (String) session_props.get(BaseConstants.AUTHENTICATION_TYPE);

    if (auth_strength.equals(BaseConstants.STRONG_AUTHENTICATION)) {
        logger.log(Level.FINER, "Using Strong Authentication");

        try {

            //following section is used if we want to AZ using the connectors authenticated super context
            //its commented out for now but we'll touch on this later
            // if (connector.getSessionID().equalsIgnoreCase("")){
            //     SalesForceLogin sfl = new SalesForceLogin(connector.getUsername(),connector.getPassword(),connector.getLoginsrv());
            //     if (sfl.isLoggedIn()){
            //        connector.setSessionID(sfl.getSessionID());
            //        connector.setEndPointServer(sfl.getEndPointServer());
            //     }
            //  }

            //for connector-managed sessions
            //todo figure out someway to purge the older sessions

            logger.log(Level.INFO,
                    "Submitting  [" + (String) session_props.getProperty(BaseConstants.LOGIN_SERVER) + "]  ["
                            + (String) session_props.getProperty(BaseConstants.SESSIONID) + "]");
            org.w3c.dom.Document az_resp = sfq.submitStatement(modified_az_query, BaseConstants.QUERY_TYPE,
                    (String) session_props.getProperty(BaseConstants.LOGIN_SERVER),
                    (String) session_props.getProperty(BaseConstants.SESSIONID));

            //if  using system session to check AZ
            //org.w3c.dom.Document az_resp =  sfq.submitStatement(modified_az_query, BaseConstants.QUERY_TYPE,connector.getEndPointServer() , connector.getSessionID());            

            //now transform the AZ SOAP response into the canonical form using
            //the AZ XLST provided.
            String encodedXSLT = connector.getAzxslt();
            byte[] decode = org.apache.commons.codec.binary.Base64.decodeBase64(encodedXSLT.getBytes());

            org.w3c.dom.Document az_xsl = Util.XMLStringtoDoc(new String(decode));

            logger.log(Level.FINER, "AZ Query Response " + Util.XMLDoctoString(az_resp));
            Document tx_xml = Util.TransformDoctoDoc(az_resp, az_xsl);
            tx_xml.getDocumentElement().normalize();
            logger.log(Level.FINER,
                    "AZ transform result for " + id.getUsername() + "  " + Util.XMLDoctoString(tx_xml));

            //TODO...figure out why I can use tx_xml as a document by itself
            //have to resort to convert tx_xml  to string and then back to Document 
            //for some reason
            DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
            DocumentBuilder db = dbf.newDocumentBuilder();
            StringBuffer sb1 = new StringBuffer(Util.XMLDoctoString(tx_xml));
            ByteArrayInputStream bis = new ByteArrayInputStream(sb1.toString().getBytes("UTF-8"));
            Document doc = db.parse(bis);
            doc.getDocumentElement().normalize();

            //now that the soap response is transformed, extract the documents that were
            //authorized from the canonical XML AZ form
            NodeList nl_documents = doc.getElementsByTagName("azdecisions");
            //get the NodeList under <document>
            HashMap hm_azdecisions = new HashMap();
            ;
            Node n_documents = nl_documents.item(0);
            for (int i = 0; i < n_documents.getChildNodes().getLength(); i++) {
                Node n_doc = n_documents.getChildNodes().item(i);
                if (n_doc.getNodeType() == Node.ELEMENT_NODE) {
                    TransformerFactory transfac = TransformerFactory.newInstance();
                    Transformer trans = transfac.newTransformer();
                    trans.setOutputProperty(OutputKeys.INDENT, "yes");

                    if (n_doc.getNodeName().equalsIgnoreCase("docID")) {
                        //ok...so this doc ID was returned so we'll allow/permit this
                        hm_azdecisions.put(n_doc.getFirstChild().getNodeValue(), "PERMIT");
                    }
                }
            }
            //for each doc ID we got in, iterate and authorize the docs that we got back..
            //TODO, ofcourse we could just forego this loop
            //and simply iterate the hm_azdecisions hashmap to create col_resp
            for (int i = 0; i < v_docIDs.size(); i++) {
                //a doc id we got to test
                String in_docID = (String) v_docIDs.get(i);
                //if the doc exists in the set we authorized
                //the more i write this the more i want to just iterate the hm_azdecisions
                //and get it over with...i'll work on that next week
                if (hm_azdecisions.containsKey(in_docID)) {
                    AuthorizationResponse ap = new AuthorizationResponse(true, in_docID);
                    col_resp.add(ap);
                }
            }

        } catch (Exception bex) {
            logger.log(Level.SEVERE, " ERROR SUBMITTING AZ Query " + bex);
        }
    }
    //if the user was just authenticated
    //we don't have the sessionid so we'lll authorize all docs.

    //WEAK_AUTH flag should never get set since
    //we've failed the AU attempt in the BaseAuthenticationManager already
    else if (auth_strength.equals(BaseConstants.WEAK_AUTHENTICATION)) {
        logger.log(Level.FINER, "Using Weak Authentication");
        if (connector.allowWeakAuth()) {

            col_resp = new ArrayList();
            for (int i = 0; i < v_docIDs.size(); i++) {
                String docID = (String) v_docIDs.get(i);
                logger.log(Level.FINER, "Authorizing " + docID);
                AuthorizationResponse ap = new AuthorizationResponse(true, docID);
                col_resp.add(ap);
            }
        }
    }
    return col_resp;
}

From source file:org.jboss.arquillian.container.was.wlp_remote_8_5.WLPRestClient.java

/**
 * Uses the rest api to upload an application binary to the dropins folder
 * of WLP to allow the server automatically deploy it.
 * /*w  ww  .  java2s. co m*/
 * @param archive
 * @throws ClientProtocolException
 * @throws IOException
 */
public void deploy(File archive) throws ClientProtocolException, IOException {

    if (log.isLoggable(Level.FINER)) {
        log.entering(className, "deploy");
    }

    String deployPath = String.format("${wlp.user.dir}/servers/%s/dropins/%s", configuration.getServerName(),
            archive.getName());

    String serverRestEndpoint = String.format("https://%s:%d%s%s", configuration.getHostName(),
            configuration.getHttpsPort(), FILE_ENDPOINT, URLEncoder.encode(deployPath, UTF_8));

    HttpResponse result = executor.execute(Request.Post(serverRestEndpoint).useExpectContinue()
            .version(HttpVersion.HTTP_1_1).bodyFile(archive, ContentType.DEFAULT_BINARY)).returnResponse();

    if (log.isLoggable(Level.FINE)) {
        log.fine("While deploying file " + archive.getName() + ", server returned response: "
                + result.getStatusLine().getStatusCode());
    }

    if (!isSuccessful(result)) {
        throw new ClientProtocolException(
                "Could not deploy application to server, server returned response: " + result);
    }

    if (log.isLoggable(Level.FINER)) {
        log.exiting(className, "deploy");
    }

}

From source file:org.jenkinsci.plugins.dockerhub.notification.Coordinator.java

@Override
public void onFinalized(@Nonnull Run<?, ?> run) {
    DockerHubWebHookCause cause = run.getCause(DockerHubWebHookCause.class);
    if (cause != null) {
        logger.log(Level.FINER, "Build {0} done for cause: [{1}]",
                new Object[] { run.getFullDisplayName(), cause });
        TriggerStore.TriggerEntry entry = TriggerStore.getInstance().finalized(cause.getPayload(), run);
        if (entry != null) {
            if (entry.areAllDone()) {
                logger.log(Level.FINE, "All builds for [{0}] are done, preparing callback to Docker Hub",
                        cause);/*  w ww. ja  v  a 2s . c  o  m*/
                try {
                    CallbackPayload callback = CallbackPayload.from(entry);
                    if (callback != null) {
                        entry.setCallbackData(callback);
                        TriggerStore.getInstance().save(entry);
                        sendResponse(cause.getPayload(), callback);
                    }
                } catch (Exception e) {
                    logger.log(Level.SEVERE, "Failed to update Docker Hub!", e);
                }
            }
        } else {
            logger.log(Level.SEVERE, "Failed to do final evaluation of builds for cause [{0}]", cause);
        }
    }
}

From source file:com.granule.json.utils.internal.JSONSAXHandler.java

/**
 * Constructor./*  www. j a va 2  s .  c om*/
 * @param os The outputStream to write the resulting JSON to
 * @param verbose Whenther or not to render the stream in a verbose (formatted), or compact form.
 * @throws IOException Thrown if an error occurs during streaming out, or XML read.
 */
public JSONSAXHandler(OutputStream os, boolean verbose) throws IOException {
    if (logger.isLoggable(Level.FINER))
        logger.entering(className, "JSONHander(OutputStream, boolean) <constructor>");

    this.osWriter = new OutputStreamWriter(os, "UTF-8");
    this.compact = !verbose;

    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "JSONHander(OutputStream, boolean) <constructor>");
}

From source file:com.ibm.liberty.starter.api.v1.LibertyFileUploader.java

protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    String tech = request.getParameter(PARAMETER_TECH);
    String workspaceId = request.getParameter(PARAMETER_WORKSPACE); //specify the unique workspace directory to upload the file(s) to.   
    Collection<Part> filePartCollection = request.getParts();

    String serverHostPort = request.getRequestURL().toString().replace(request.getRequestURI(), "");
    int schemeLength = request.getScheme().toString().length();
    String internalHostPort = "http" + serverHostPort.substring(schemeLength);
    log.log(Level.FINER, "serverHostPort : " + serverHostPort);
    final ServiceConnector serviceConnector = new ServiceConnector(serverHostPort, internalHostPort);
    HashMap<Part, String> fileNames = new HashMap<Part, String>();
    if (!isValidRequest(request, response, tech, workspaceId, filePartCollection, serviceConnector,
            fileNames)) {//from w w  w  .  j a v  a2 s  .  com
        return;
    }

    Service techService = serviceConnector.getServiceObjectFromId(tech);
    String techDirPath = StarterUtil.getWorkspaceDir(workspaceId) + "/" + techService.getId();
    File techDir = new File(techDirPath);
    if (techDir.exists() && techDir.isDirectory()
            && "true".equalsIgnoreCase(request.getParameter(PARAMETER_CLEANUP))) {
        FileUtils.cleanDirectory(techDir);
        log.log(Level.FINER, "Cleaned up tech workspace directory : " + techDirPath);
    }

    for (Part filePart : filePartCollection) {
        if (!techDir.exists()) {
            FileUtils.forceMkdir(techDir);
            log.log(Level.FINER, "Created tech directory :" + techDirPath);
        }

        String filePath = techDirPath + "/" + fileNames.get(filePart);
        log.log(Level.FINER, "File path : " + filePath);
        File uploadedFile = new File(filePath);

        Files.copy(filePart.getInputStream(), uploadedFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
        log.log(Level.FINE, "Copied file to " + filePath);
    }

    if ("true".equalsIgnoreCase(request.getParameter(PARAMETER_PROCESS))) {
        // Process uploaded file(s)
        String processResult = serviceConnector.processUploadedFiles(techService, techDirPath);
        if (!processResult.equalsIgnoreCase("success")) {
            log.log(Level.INFO,
                    "Error processing the files uploaded to " + techDirPath + " : Result=" + processResult);
            response.sendError(500, processResult);
            return;
        }
        log.log(Level.FINE, "Processed the files uploaded to " + techDirPath);
    }

    response.setContentType("text/html");
    PrintWriter out = response.getWriter();
    out.println("success");
    out.close();
}

From source file:com.gisgraphy.domain.geoloc.service.fulltextsearch.SolrClientTest.java

@Test
public void testSetSolRLogLevel() throws Exception {
    IsolrClient client = new SolrClient(AbstractIntegrationHttpSolrTestCase.fulltextSearchUrlBinded,
            new MultiThreadedHttpConnectionManager());
    assertTrue(client.isServerAlive());/*from   w ww  .  ja v a  2  s.  c  o m*/
    client.setSolRLogLevel(Level.CONFIG);
    client.setSolRLogLevel(Level.FINE);
    client.setSolRLogLevel(Level.FINER);
    client.setSolRLogLevel(Level.FINEST);
    client.setSolRLogLevel(Level.INFO);
    client.setSolRLogLevel(Level.SEVERE);
    client.setSolRLogLevel(Level.WARNING);
    client.setSolRLogLevel(Level.ALL);
    client.setSolRLogLevel(Level.OFF);
}

From source file:mendeley2kindle.KindleDAO.java

public void commit() throws IOException {
    File path = new File(kindleLocal);
    File file = new File(path, KINDLE_COLLECTIONS_JSON);
    log.log(Level.FINER, "writing collections data: " + file);
    if (!file.exists() || file.canWrite()) {
        file.getParentFile().mkdirs();//from   w  ww. ja v a2  s  .  c  o  m
        FileWriter fw = new FileWriter(file);
        fw.write(collections.toString());
        fw.close();
        log.log(Level.FINE, "Saved kindle collections: " + file);
    } else {
        log.log(Level.SEVERE, "CANNOT write Kindle collections data. Aborting..." + file);
    }
}

From source file:com.granule.json.utils.internal.JSONObject.java

/**
 * Method to add a JSON child object to this JSON object.
 * @param obj The child JSON object to add to this JSON object.
 *//*from   ww w .jav a 2s . com*/
public void addJSONObject(JSONObject obj) {
    if (logger.isLoggable(Level.FINER))
        logger.entering(className, "addJSONObject(JSONObject)");

    Vector vect = (Vector) this.jsonObjects.get(obj.objectName);
    if (vect != null) {
        vect.add(obj);
    } else {
        vect = new Vector();
        vect.add(obj);
        this.jsonObjects.put(obj.objectName, vect);
    }

    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "addJSONObject(JSONObject)");
}