List of usage examples for org.apache.commons.lang.time StopWatch StopWatch
public StopWatch()
Constructor.
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Serializes hashmaps to disk. The format is private, don't touch it. *//*www. j a va 2 s . c o m*/ private synchronized void serializeAttrsToDisk(WikiPage p) { ObjectOutputStream out = null; StopWatch sw = new StopWatch(); sw.start(); try { File f = new File(m_engine.getWorkDir(), SERIALIZATION_DIR); if (!f.exists()) f.mkdirs(); // // Create a digest for the name // f = new File(f, getHashFileName(p.getName())); // FIXME: There is a concurrency issue here... Set entries = p.getAttributes().entrySet(); if (entries.size() == 0) { // Nothing to serialize, therefore we will just simply remove the // serialization file so that the next time we boot, we don't // deserialize old data. f.delete(); return; } out = new ObjectOutputStream(new BufferedOutputStream(new FileOutputStream(f))); out.writeLong(serialVersionUID); out.writeLong(System.currentTimeMillis()); // Timestamp out.writeUTF(p.getName()); out.writeLong(entries.size()); for (Iterator i = entries.iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); if (e.getValue() instanceof Serializable) { out.writeUTF((String) e.getKey()); out.writeObject(e.getValue()); } } out.close(); } catch (IOException e) { log.error("Unable to serialize!"); try { if (out != null) out.close(); } catch (IOException ex) { } } catch (NoSuchAlgorithmException e) { log.fatal("No MD5 algorithm!?!"); } finally { sw.stop(); log.debug("serialization for " + p.getName() + " done - took " + sw); } }
From source file:au.org.theark.lims.util.BioCollectionSpecimenUploader.java
/** * Upload Biospecimen Inventory location file. * * * Where N is any number of columns/* w w w .j a v a 2 s. co m*/ * * @param fileInputStream * is the input stream of a file * @param inLength * is the length of a file * @throws FileFormatException * file format Exception * @throws ArkBaseException * general ARK Exception * @return the upload report detailing the upload process */ public StringBuffer uploadAndReportMatrixBiospecimenInventoryFile(InputStream fileInputStream, long inLength, String inFileFormat, char inDelimChr) throws FileFormatException, ArkSystemException { delimiterCharacter = inDelimChr; uploadReport = new StringBuffer(); curPos = 0; List<InvCell> cellsToUpdate = new ArrayList<InvCell>(); InputStreamReader inputStreamReader = null; CsvReader csvReader = null; DecimalFormat decimalFormat = new DecimalFormat("0.00"); // If Excel, convert to CSV for validation if (inFileFormat.equalsIgnoreCase("XLS")) { Workbook w; try { w = Workbook.getWorkbook(fileInputStream); delimiterCharacter = ','; XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter); fileInputStream = xlsToCsv.convertXlsToCsv(w); fileInputStream.reset(); } catch (BiffException e) { log.error(e.getMessage()); } catch (IOException e) { log.error(e.getMessage()); } } try { inputStreamReader = new InputStreamReader(fileInputStream); csvReader = new CsvReader(inputStreamReader, delimiterCharacter); srcLength = inLength; if (srcLength <= 0) { uploadReport.append("The input size was not greater than 0. Actual length reported: "); uploadReport.append(srcLength); uploadReport.append("\n"); throw new FileFormatException( "The input size was not greater than 0. Actual length reported: " + srcLength); } timer = new StopWatch(); timer.start(); csvReader.readHeaders(); srcLength = inLength - csvReader.getHeaders().toString().length(); log.debug("Header length: " + csvReader.getHeaders().toString().length()); // Loop through all rows in file while (csvReader.readRecord()) { log.info("At record: " + recordCount); String biospecimenUID = csvReader.get("BIOSPECIMENUID"); Biospecimen biospecimen = iLimsService.getBiospecimenByUid(biospecimenUID, study); if (biospecimen == null) { log.error( "\n\n\n\n\n\n\n\n\n....We should NEVER have null biospecimens this should be validated in prior step"); break; } // Allocation details InvCell invCell; String siteName = null; String freezerName = null; String rackName = null; String boxName = null; String row = null; String column = null; if (csvReader.getIndex("SITE") > 0) { siteName = csvReader.get("SITE"); } if (csvReader.getIndex("FREEZER") > 0) { freezerName = csvReader.get("FREEZER"); } if (csvReader.getIndex("RACK") > 0) { rackName = csvReader.get("RACK"); } if (csvReader.getIndex("BOX") > 0) { boxName = csvReader.get("BOX"); } if (csvReader.getIndex("ROW") > 0) { row = csvReader.get("ROW"); } if (csvReader.getIndex("COLUMN") > 0) { column = csvReader.get("COLUMN"); } invCell = iInventoryService.getInvCellByLocationNames(siteName, freezerName, rackName, boxName, row, column); if (invCell != null && invCell.getId() != null) { if (invCell.getBiospecimen() != null) { log.error( "This should NEVER happen as validation should ensure no cell will wipte another"); break; } invCell.setBiospecimen(biospecimen); cellsToUpdate.add(invCell); updateCount++; } else { log.error("This should NEVER happen as validation should ensure all cells valid"); break; } recordCount++; } } catch (IOException ioe) { uploadReport.append("Unexpected I/O exception whilst reading the biospecimen data file\n"); log.error("processMatrixBiospecimenFile IOException stacktrace:", ioe); throw new ArkSystemException("Unexpected I/O exception whilst reading the biospecimen data file"); } catch (Exception ex) { uploadReport.append("Unexpected exception whilst reading the biospecimen data file\n"); log.error("processMatrixBiospecimenFile Exception stacktrace:", ex); throw new ArkSystemException( "Unexpected exception occurred when trying to process biospecimen data file"); } finally { // Clean up the IO objects timer.stop(); uploadReport.append("\n"); uploadReport.append("Total elapsed time: "); uploadReport.append(timer.getTime()); uploadReport.append(" ms or "); uploadReport.append(decimalFormat.format(timer.getTime() / 1000.0)); uploadReport.append(" s"); uploadReport.append("\n"); uploadReport.append("Total file size: "); uploadReport.append(inLength); uploadReport.append(" B or "); uploadReport.append(decimalFormat.format(inLength / 1024.0 / 1024.0)); uploadReport.append(" MB"); uploadReport.append("\n"); if (timer != null) timer = null; if (csvReader != null) { try { csvReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: csvRdr.close()", ex); } } if (inputStreamReader != null) { try { inputStreamReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: isr.close()", ex); } } // Restore the state of variables srcLength = -1; } uploadReport.append("Processed "); uploadReport.append(recordCount); uploadReport.append(" records."); uploadReport.append("\n"); uploadReport.append("Updated "); uploadReport.append(updateCount); uploadReport.append(" records."); uploadReport.append("\n"); iLimsService.batchUpdateInvCells(cellsToUpdate); return uploadReport; }
From source file:net.nan21.dnet.core.web.controller.data.AbstractAsgnController.java
@RequestMapping(method = RequestMethod.POST, params = Constants.REQUEST_PARAM_ACTION + "=" + Constants.ASGN_ACTION_MOVE_LEFT_ALL) @ResponseBody/*from w w w.j av a2 s .c om*/ public String moveLeftAll(@PathVariable String resourceName, @PathVariable String dataFormat, @RequestParam(value = Constants.REQUEST_PARAM_ASGN_OBJECT_ID, required = true) String objectId, @RequestParam(value = Constants.REQUEST_PARAM_ASGN_SELECTION_ID, required = true) String selectionId, @RequestParam(value = "data", required = false, defaultValue = "{}") String dataString, @RequestParam(value = "params", required = false, defaultValue = "{}") String paramString, HttpServletRequest request, HttpServletResponse response) throws Exception { try { StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (logger.isInfoEnabled()) { logger.info("Processing request: {}.{} -> action = {} ", new String[] { resourceName, dataFormat, Constants.ASGN_ACTION_MOVE_LEFT_ALL }); } if (logger.isDebugEnabled()) { logger.debug(" --> request-filter: objectId={}, selectionId={} data={}", new String[] { objectId, selectionId, dataString }); logger.debug(" --> request-params: {} ", new String[] { paramString }); } this.prepareRequest(request, response); this.authorizeAsgnAction(resourceName, "update"); IAsgnService<M, F, P> service = this.findAsgnService(this.serviceNameFromResourceName(resourceName)); IDsMarshaller<M, F, P> marshaller = service.createMarshaller(dataFormat); F filter = marshaller.readFilterFromString(dataString); P params = marshaller.readParamsFromString(paramString); service.moveLeftAll(selectionId, filter, params); stopWatch.stop(); return ""; } catch (Exception e) { return this.handleException(e, response); } finally { this.finishRequest(); } }
From source file:fr.inria.edelweiss.kgdqp.test.TestDQP.java
public void testDQP(String testCase) throws EngineException, MalformedURLException { Graph graph = Graph.create(false);/*from ww w . j a v a2 s .com*/ ProviderImplCostMonitoring sProv = ProviderImplCostMonitoring.create(); QueryProcessDQP execDQP = QueryProcessDQP.create(graph, sProv, true); execDQP.setGroupingEnabled(true); // Mode BGP or not if (modeBGP) { execDQP.setPlanProfile(Query.QP_BGP); } // DUPLICATED DATA if (testCase.equals("d")) { execDQP.addRemote(new URL("http://" + host + ":8081/sparql"), WSImplem.REST); execDQP.addRemote(new URL("http://" + host + ":8082/sparql"), WSImplem.REST); } // GLOBAL BGP if (testCase.equals("g")) { execDQP.addRemote(new URL("http://" + host + ":8083/sparql"), WSImplem.REST); execDQP.addRemote(new URL("http://" + host + ":8084/sparql"), WSImplem.REST); } // Partial BGP and AND Lock if (testCase.equals("p")) { execDQP.addRemote(new URL("http://" + host + ":8085/sparql"), WSImplem.REST); execDQP.addRemote(new URL("http://" + host + ":8086/sparql"), WSImplem.REST); execDQP.addRemote(new URL("http://" + host + ":8087/sparql"), WSImplem.REST); } // Demographic execDQP.addRemote(new URL("http://" + host + ":8088/sparql"), WSImplem.REST); for (Map.Entry<String, String> query : queries.entrySet()) { // try { // String resultFileName = "/home/macina/NetBeansProjects/corese/kg-dqp/src/main/resources/" + query.getKey() + "/" + query.getKey() + "Result"; // String valuesFileName = "/home/macina/NetBeansProjects/corese/kg-dqp/src/main/resources/" + query.getKey() + "/" + query.getKey() + "Values"; // File resultFile = new File(resultFileName); // File valuesFile = new File(valuesFileName); // // if (modeBGP) { //// resultFileName += "H" + round + ".txt"; //// valuesFileName += "H" + round + ".csv"; // resultFileName += "H.txt"; // valuesFileName += "H.csv"; // resultFile = new File(resultFileName); // valuesFile = new File(valuesFileName); // // //To put values (execution time , final results, etc. => in a .csv file) // FileWriter writeValuesFile; // try { // writeValuesFile = new FileWriter(valuesFile,true); // BufferedWriter bufferValuesFile = new BufferedWriter(writeValuesFile); //// if(!valuesFile.exists()) // bufferValuesFile.write("BGPs, Edges, Query, Results, Execution, Remote, Intermediate\n"); // bufferValuesFile.flush(); // writeValuesFile.close(); // } catch (IOException ex) { // LogManager.getLogger(TestDQP.class.getName()).log(Level.ERROR, "", ex); // } // } else { //// resultFileName += "D" + round + ".txt"; //// valuesFileName += "D" + round + ".csv"; // // resultFileName += "D.txt"; // valuesFileName += "D.csv"; // // resultFile = new File(resultFileName); // valuesFile = new File(valuesFileName); // // //To put values (execution time , final results, etc. => in a .csv file) // FileWriter writeValuesFile; // try { // writeValuesFile = new FileWriter(valuesFile,true); // BufferedWriter bufferValuesFile = new BufferedWriter(writeValuesFile); //// if(!valuesFile.exists()) // bufferValuesFile.write("Edges, Query, Results, Execution, Remote, Intermediate\n"); // bufferValuesFile.flush(); // writeValuesFile.close(); // } catch (IOException ex) { // LogManager.getLogger(TestDQP.class.getName()).log(Level.ERROR, "", ex); // } // } // // //// for (int i = 0; i < 1; i++) { // logger.setAdditivity(false); // try { //// TO FIX //// String path = TestDQP.class.getClassLoader().getResource("test").getPath()+query.getKey()+"/log"+i+".html"; //// System.out.println("?? "+path); // String logName = "/home/macina/NetBeansProjects/corese/kg-dqp/src/main/resources/" + query.getKey() + "/" + query.getKey(); // if (modeBGP) { // logName += "H" + round + ".xml"; // } else { // logName += "D" + round + ".xml"; // } // FileAppender fa = new FileAppender(new XMLLayout(), logName, false); // // logger.addAppender(fa); // // } catch (IOException ex) { // LogManager.getLogger(TestDQP.class.getName()).log(Level.ERROR, "", ex); // } StopWatch sw = new StopWatch(); sw.start(); Mappings map = execDQP.query(query.getValue()); sw.stop(); logger.info(map.size() + " results in " + sw.getTime() + " ms"); logger.info("\n" + map.toString()); logger.info(Messages.countQueries); logger.info(Util.prettyPrintCounter(QueryProcessDQP.queryCounter)); logger.info(Messages.countTransferredResults); logger.info(Util.prettyPrintCounter(QueryProcessDQP.queryVolumeCounter)); logger.info(Messages.countDS); logger.info(Util.prettyPrintCounter(QueryProcessDQP.sourceCounter)); logger.info(Messages.countTransferredResultsPerSource); logger.info(Util.prettyPrintCounter(QueryProcessDQP.sourceVolumeCounter)); // try { // //To put results (mappings values) in a .txt file // FileWriter writeResultFile = new FileWriter(resultFile, false); // BufferedWriter bufferResultFile = new BufferedWriter(writeResultFile); // bufferResultFile.write(map.toString()); // bufferResultFile.flush(); // bufferResultFile.close(); // writeResultFile.close(); // // //To put values (execution time , final results, etc. => in a .csv file) // FileWriter writeValuesFile = new FileWriter(valuesFile, true); // BufferedWriter bufferValuesFile = new BufferedWriter(writeValuesFile); // bufferValuesFile.write(round + "," + map.size() + "," + sw.getTime() + "," + Util.sum(QueryProcessDQP.queryCounter) + "," + Util.sum(QueryProcessDQP.queryVolumeCounter) + "," + "\n"); // bufferValuesFile.flush(); // writeValuesFile.close(); // } catch (IOException e) { // LogManager.getLogger(TestDQP.class.getName()).log(Level.ERROR, "", e); // } // } catch (InterruptedException ex) { // LogManager.getLogger(TestDQP.class.getName()).log(Level.ERROR, "", ex); // } } }
From source file:adams.flow.control.TimedSubProcess.java
/** * Executes the flow item./* www . j a v a 2 s. co m*/ * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; result = null; if (m_TimingEnabled) { if (m_StopWatch == null) m_StopWatch = new StopWatch(); } // time execution if (m_TimingEnabled) { m_StopWatch.reset(); m_StopWatch.start(); } result = super.doExecute(); if (m_TimingEnabled) { m_StopWatch.stop(); } // is variable attached? if (!m_Configured) result = setUpCallableActor(); if (result == null) { if (m_TimingEnabled) { if (m_CallableActor != null) { if (!m_CallableActor.getSkip() && !m_CallableActor.isStopped()) { synchronized (m_CallableActor) { if (isLoggingEnabled()) getLogger().info("Executing callable actor - start: " + m_CallableActor); result = executeCallableActor(m_StopWatch.getTime()); if (isLoggingEnabled()) getLogger().info("Executing callable actor - end: " + result); } } } } } return result; }
From source file:com.auditbucket.test.functional.TestTrack.java
@Test public void headersForDifferentCompaniesAreNotVisible() throws Exception { regService.registerSystemUser(new RegistrationBean(monowai, mike, "bah")); String hummingbird = "Hummingbird"; regService.registerSystemUser(new RegistrationBean(hummingbird, mark, "bah")); //Monowai/Mike SecurityContextHolder.getContext().setAuthentication(authMike); Fortress fortWP = fortressService.registerFortress(new FortressInputBean("wportfolio", true)); MetaInputBean inputBean = new MetaInputBean(fortWP.getName(), "wally", "CompanyNode", new DateTime(), "AHWP"); String ahWP = mediationFacade.createHeader(inputBean, null).getMetaKey(); assertNotNull(ahWP);/*from ww w .ja v a 2s. c o m*/ assertNotNull(trackService.getHeader(ahWP)); //Hummingbird/Gina SecurityContextHolder.getContext().setAuthentication(authMark); Fortress fortHS = fortressService.registerFortress(new FortressInputBean("honeysuckle", true)); inputBean = new MetaInputBean(fortHS.getName(), "harry", "CompanyNode", new DateTime(), "AHHS"); String ahHS = mediationFacade.createHeader(inputBean, null).getMetaKey(); assertNotNull(fortressService.getFortressUser(fortWP, "wally", true)); assertNotNull(fortressService.getFortressUser(fortHS, "harry", true)); assertNull(fortressService.getFortressUser(fortWP, "wallyz", false)); double max = 2000d; StopWatch watch = new StopWatch(); watch.start(); createLogRecords(authMike, ahWP, what, 20); createLogRecords(authMark, ahHS, what, 40); watch.stop(); logger.info("End " + watch.getTime() / 1000d + " avg = " + (watch.getTime() / 1000d) / max); }
From source file:com.mothsoft.alexis.dao.DocumentDaoImpl.java
@Override public ScrollableResults scrollableSearch(Long userId, DocumentState state, String queryString, SortOrder sortOrder, Date startDate, Date endDate) { final StopWatch stopWatch = new StopWatch(); stopWatch.start();/* w w w . j a v a 2 s . c om*/ final FullTextQuery fullTextQuery = this.buildFullTextQuery(queryString, userId, startDate, endDate, false, state, FullTextQuery.THIS, FullTextQuery.SCORE); final Sort sort; switch (sortOrder) { case DATE_ASC: sort = new Sort(new SortField("id", SortField.LONG)); break; case DATE_DESC: sort = new Sort(new SortField("id", SortField.LONG, true)); break; case RELEVANCE: sort = new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.LONG, true)); break; default: throw new IllegalArgumentException("Unexpected SortOrder: " + sortOrder.name()); } fullTextQuery.setSort(sort); fullTextQuery.setFetchSize(50); fullTextQuery.setReadOnly(true); fullTextQuery.setCacheable(false); fullTextQuery.setCacheMode(CacheMode.IGNORE); final ScrollableResults result = fullTextQuery.scroll(ScrollMode.FORWARD_ONLY); stopWatch.stop(); logger.debug(stopWatch.toString()); return result; }
From source file:eagle.service.generic.GenericEntityServiceResource.java
/** * * @param query/*from w w w. j a va 2s . c o m*/ * @param startTime * @param endTime * @param pageSize * @param startRowkey * @param treeAgg * @param timeSeries * @param intervalmin * @param top * @param filterIfMissing * @param parallel * @param metricName * @param verbose * @return */ @GET @Produces(MediaType.APPLICATION_JSON) @SuppressWarnings("unchecked") public GenericServiceAPIResponseEntity search(@QueryParam("query") String query, @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime, @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey, @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries, @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top, @QueryParam("filterIfMissing") boolean filterIfMissing, @QueryParam("parallel") int parallel, @QueryParam("metricName") String metricName, @QueryParam("verbose") Boolean verbose) { RawQuery rawQuery = RawQuery.build().query(query).startTime(startTime).endTime(endTime).pageSize(pageSize) .startRowkey(startRowkey).treeAgg(treeAgg).timeSeries(timeSeries).intervalMin(intervalmin).top(top) .filerIfMissing(filterIfMissing).parallel(parallel).metricName(metricName).verbose(verbose).done(); QueryStatement queryStatement = new QueryStatement(rawQuery); GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity(); Map<String, Object> meta = new HashMap<>(); DataStorage dataStorage; StopWatch stopWatch = new StopWatch(); try { stopWatch.start(); dataStorage = DataStorageManager.getDataStorageByEagleConfig(); if (dataStorage == null) { LOG.error("Data storage is null"); throw new IllegalDataStorageException("data storage is null"); } QueryResult<?> result = queryStatement.execute(dataStorage); if (result.isSuccess()) { meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp()); meta.put(LAST_TIMESTAMP, result.getLastTimestamp()); meta.put(TOTAL_RESULTS, result.getSize()); meta.put(ELAPSEDMS, stopWatch.getTime()); response.setObj(result.getData()); response.setType(result.getEntityType()); response.setSuccess(true); response.setMeta(meta); return response; } } catch (Exception e) { response.setException(e); LOG.error(e.getMessage(), e); } finally { stopWatch.stop(); } return response; }
From source file:com.liferay.portal.lar.PermissionExporter.java
protected void exportUserPermissions(LayoutCache layoutCache, long companyId, long groupId, String resourceName, String resourcePrimKey, Element parentElement) throws Exception { StopWatch stopWatch = null;/*from w w w . ja v a2 s .co m*/ if (_log.isDebugEnabled()) { stopWatch = new StopWatch(); stopWatch.start(); } Element userPermissionsElement = SAXReaderUtil.createElement("user-permissions"); List<User> users = layoutCache.getGroupUsers(groupId); for (User user : users) { String uuid = user.getUuid(); Element userActionsElement = SAXReaderUtil.createElement("user-actions"); List<Permission> permissions = PermissionLocalServiceUtil.getUserPermissions(user.getUserId(), companyId, resourceName, ResourceConstants.SCOPE_INDIVIDUAL, resourcePrimKey); List<String> actions = ResourceActionsUtil.getActions(permissions); for (String action : actions) { Element actionKeyElement = userActionsElement.addElement("action-key"); actionKeyElement.addText(action); } if (!userActionsElement.elements().isEmpty()) { userActionsElement.addAttribute("uuid", uuid); userPermissionsElement.add(userActionsElement); } } if (!userPermissionsElement.elements().isEmpty()) { parentElement.add(userPermissionsElement); } if (_log.isDebugEnabled()) { _log.debug("Export user permissions for {" + resourceName + ", " + resourcePrimKey + "} with " + users.size() + " users takes " + stopWatch.getTime() + " ms"); } }
From source file:com.liferay.portal.security.permission.AdvancedPermissionChecker.java
public boolean hasPermission(long groupId, String name, String primKey, String actionId) { StopWatch stopWatch = null;/*from www. java2s. c o m*/ if (_log.isDebugEnabled()) { stopWatch = new StopWatch(); stopWatch.start(); } Group group = null; // If the current group is a staging group, check the live group. If the // current group is a scope group for a layout, check the original // group. try { if (groupId > 0) { group = GroupLocalServiceUtil.getGroup(groupId); if (group.isUser() && (group.getClassPK() == getUserId())) { group = GroupLocalServiceUtil.getGroup(getCompanyId(), GroupConstants.USER_PERSONAL_SITE); groupId = group.getGroupId(); } if (group.isLayout()) { Layout layout = LayoutLocalServiceUtil.getLayout(group.getClassPK()); groupId = layout.getGroupId(); group = GroupLocalServiceUtil.getGroup(groupId); } if (group.isStagingGroup()) { if (primKey.equals(String.valueOf(groupId))) { primKey = String.valueOf(group.getLiveGroupId()); } groupId = group.getLiveGroupId(); group = group.getLiveGroup(); } } } catch (Exception e) { _log.error(e, e); } Boolean value = PermissionCacheUtil.getPermission(user.getUserId(), signedIn, checkGuest, groupId, name, primKey, actionId); if (value == null) { try { value = Boolean.valueOf(hasPermissionImpl(groupId, name, primKey, actionId)); if (_log.isDebugEnabled()) { _log.debug("Checking permission for " + groupId + " " + name + " " + primKey + " " + actionId + " takes " + stopWatch.getTime() + " ms"); } } finally { if (value == null) { value = Boolean.FALSE; } PermissionCacheUtil.putPermission(user.getUserId(), signedIn, checkGuest, groupId, name, primKey, actionId, value); } } return value.booleanValue(); }