List of usage examples for java.lang.reflect Method toString
public String toString()
From source file:net.nelz.simplesm.aop.CacheBase.java
protected Object getIndexObject(final int index, final JoinPoint jp, final Method methodToCache) throws Exception { if (index < 0) { throw new InvalidParameterException(String.format("An index of %s is invalid", index)); }/*from ww w .j a v a 2s . com*/ final Object[] args = jp.getArgs(); if (args.length <= index) { throw new InvalidParameterException( String.format("An index of %s is too big for the number of arguments in [%s]", index, methodToCache.toString())); } final Object indexObject = args[index]; if (indexObject == null) { throw new InvalidParameterException(String.format("The argument passed into [%s] at index %s is null.", methodToCache.toString(), index)); } return indexObject; }
From source file:org.flite.cach3.aop.UpdateMultiCacheAdvice.java
protected void updateCache(final List<String> cacheKeys, final List<Object> returnList, final Method methodToCache, final int jitter, final int expiration, final MemcachedClientIF cache, final Class dataTemplateType) { if (returnList.size() != cacheKeys.size()) { throw new InvalidAnnotationException(String.format( "The key generation objects, and the resulting objects do not match in size for [%s].", methodToCache.toString())); }// w w w . j av a 2 s.c o m for (int ix = 0; ix < returnList.size(); ix++) { final Object result = returnList.get(ix); final String cacheKey = cacheKeys.get(ix); final Object cacheObject = result != null ? applyDataTemplateType(result, dataTemplateType) : new PertinentNegativeNull(); boolean cacheable = true; if (cacheObject instanceof CacheConditionally) { cacheable = ((CacheConditionally) cacheObject).isCacheable(); } if (cacheable) { cache.set(cacheKey, calculateJitteredExpiration(expiration, jitter), cacheObject); } } }
From source file:no.sesat.search.datamodel.DataModelTest.java
private void ensureJavaBeanAPI(final Class<?> cls) throws IntrospectionException { LOG.info("ensuring pure JavaBean API on " + cls.getSimpleName()); // collect the getter and setters final Collection<Method> propertyMethods = new ArrayList<Method>(); collectProperties(cls, propertyMethods); final Collection<String> gettersSatisfied = new ArrayList<String>(); final Collection<String> settersSatisfied = new ArrayList<String>(); // now scan the methods for (Method method : cls.getMethods()) { final boolean setter = method.getName().startsWith("set"); final String propertyName = method.getName().replaceFirst("is|get|set", ""); final Collection<String> propertiesSatisfied = setter ? settersSatisfied : gettersSatisfied; if (!propertiesSatisfied.contains(propertyName)) { LOG.info(" method --> " + method.getName()); assert propertyMethods.contains(method) : method.toString() + ASSERT_METHOD_NOT_GETTER_OR_SETTER + propertyMethods;/* w ww . ja va 2 s. c om*/ propertiesSatisfied.add(propertyName); } else { LOG.info(" method --> " + method.getName() + " previously satisfied"); } } LOG.info(cls.getSimpleName() + " passed API test"); }
From source file:com.cloudera.impala.catalog.CatalogServiceCatalog.java
/** * Returns a list of Impala Functions, one per compatible "evaluate" method in the UDF * class referred to by the given Java function. This method copies the UDF Jar * referenced by "function" to a temporary file in "LOCAL_LIBRARY_PATH" and loads it * into the jvm. Then we scan all the methods in the class using reflection and extract * those methods and create corresponding Impala functions. Currently Impala supports * only "JAR" files for symbols and also a single Jar containing all the dependent * classes rather than a set of Jar files. *//* ww w. ja va 2 s.c om*/ public static List<Function> extractFunctions(String db, org.apache.hadoop.hive.metastore.api.Function function) throws ImpalaRuntimeException { List<Function> result = Lists.newArrayList(); List<String> addedSignatures = Lists.newArrayList(); boolean compatible = true; StringBuilder warnMessage = new StringBuilder(); if (function.getFunctionType() != FunctionType.JAVA) { compatible = false; warnMessage.append("Function type: " + function.getFunctionType().name() + " is not supported. Only " + FunctionType.JAVA.name() + " functions " + "are supported."); } if (function.getResourceUrisSize() != 1) { compatible = false; List<String> resourceUris = Lists.newArrayList(); for (ResourceUri resource : function.getResourceUris()) { resourceUris.add(resource.getUri()); } warnMessage.append("Impala does not support multiple Jars for dependencies." + "(" + Joiner.on(",").join(resourceUris) + ") "); } if (function.getResourceUris().get(0).getResourceType() != ResourceType.JAR) { compatible = false; warnMessage.append("Function binary type: " + function.getResourceUris().get(0).getResourceType().name() + " is not supported. Only " + ResourceType.JAR.name() + " type is supported."); } if (!compatible) { LOG.warn("Skipping load of incompatible Java function: " + function.getFunctionName() + ". " + warnMessage.toString()); return result; } String jarUri = function.getResourceUris().get(0).getUri(); Class<?> udfClass = null; try { Path localJarPath = new Path(LOCAL_LIBRARY_PATH, UUID.randomUUID().toString() + ".jar"); if (!FileSystemUtil.copyToLocal(new Path(jarUri), localJarPath)) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Couldn't copy " + jarUri + " to local path: " + localJarPath.toString(); LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg); } URL[] classLoaderUrls = new URL[] { new URL(localJarPath.toString()) }; URLClassLoader urlClassLoader = new URLClassLoader(classLoaderUrls); udfClass = urlClassLoader.loadClass(function.getClassName()); // Check if the class is of UDF type. Currently we don't support other functions // TODO: Remove this once we support Java UDAF/UDTF if (FunctionUtils.getUDFClassType(udfClass) != FunctionUtils.UDFClassType.UDF) { LOG.warn("Ignoring load of incompatible Java function: " + function.getFunctionName() + " as " + FunctionUtils.getUDFClassType(udfClass) + " is not a supported type. Only UDFs are supported"); return result; } // Load each method in the UDF class and create the corresponding Impala Function // object. for (Method m : udfClass.getMethods()) { if (!m.getName().equals("evaluate")) continue; Function fn = ScalarFunction.fromHiveFunction(db, function.getFunctionName(), function.getClassName(), m.getParameterTypes(), m.getReturnType(), jarUri); if (fn == null) { LOG.warn("Ignoring incompatible method: " + m.toString() + " during load of " + "Hive UDF:" + function.getFunctionName() + " from " + udfClass); continue; } if (!addedSignatures.contains(fn.signatureString())) { result.add(fn); addedSignatures.add(fn.signatureString()); } } } catch (ClassNotFoundException c) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Symbol class " + udfClass + "not found in Jar: " + jarUri; LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg, c); } catch (Exception e) { LOG.error("Skipping function load: " + function.getFunctionName(), e); throw new ImpalaRuntimeException("Error extracting functions", e); } return result; }
From source file:net.nelz.simplesm.aop.UpdateMultiCacheAdvice.java
protected List<Object> getKeyObjects(final int keyIndex, final Object returnValue, final JoinPoint jp, final Method methodToCache) throws Exception { final Object keyObject = keyIndex == -1 ? validateReturnValueAsKeyObject(returnValue, methodToCache) : getIndexObject(keyIndex, jp, methodToCache); if (verifyTypeIsList(keyObject.getClass())) { return (List<Object>) keyObject; }/*from w w w . j a v a2 s .c o m*/ throw new InvalidAnnotationException(String.format( "The parameter object found at dataIndex [%s] is not a [%s]. " + "[%s] does not fulfill the requirements.", UpdateMultiCache.class.getName(), List.class.getName(), methodToCache.toString())); }
From source file:org.cruxframework.crux.core.server.rest.core.registry.ResourceRegistry.java
/** * //from www . ja va2 s. c o m * @param clazz * @param base */ protected void addResource(Class<?> clazz, String base) { Set<String> restMethodNames = new HashSet<String>(); Map<String, List<RestMethodRegistrationInfo>> validRestMethods = new HashMap<String, List<RestMethodRegistrationInfo>>(); for (Method method : clazz.getMethods()) { if (!method.isSynthetic()) { RestMethodRegistrationInfo methodRegistrationInfo = null; try { methodRegistrationInfo = processMethod(base, clazz, method, restMethodNames); } catch (Exception e) { throw new InternalServerErrorException("Error to processMethod: " + method.toString(), "Can not execute requested service", e); } if (methodRegistrationInfo != null) { List<RestMethodRegistrationInfo> methodsForPath = validRestMethods .get(methodRegistrationInfo.pathExpression); if (methodsForPath == null) { methodsForPath = new ArrayList<RestMethodRegistrationInfo>(); validRestMethods.put(methodRegistrationInfo.pathExpression, methodsForPath); } methodsForPath.add(methodRegistrationInfo); } } } checkConditionalWriteMethods(validRestMethods); createCorsAllowedMethodsList(validRestMethods); }
From source file:org.lexevs.cache.AbstractMethodCachingBean.java
/** * Cache method.//from w ww .j a va2 s . c o m * * @param pjp the pjp * * @return the object * * @throws Throwable the throwable */ protected Object doCacheMethod(T joinPoint) throws Throwable { if (!CacheSessionManager.getCachingStatus()) { return this.proceed(joinPoint); } Method method = this.getMethod(joinPoint); if (method.isAnnotationPresent(CacheMethod.class) && method.isAnnotationPresent(ClearCache.class)) { throw new RuntimeException("Cannot both Cache method results and clear the Cache in " + "the same method. Please only use @CacheMethod OR @ClearCache -- not both. " + " This occured on method: " + method.toString()); } Object target = this.getTarget(joinPoint); Annotation[][] parameterAnnotations = method.getParameterAnnotations(); String key = this.getKeyFromMethod(target.getClass().getName(), method.getName(), this.getArguments(joinPoint), parameterAnnotations); Cacheable cacheableAnnotation = AnnotationUtils.findAnnotation(target.getClass(), Cacheable.class); CacheMethod cacheMethodAnnotation = AnnotationUtils.findAnnotation(method, CacheMethod.class); CacheWrapper<String, Object> cache = this.getCacheFromName(cacheableAnnotation.cacheName(), true); if (method.isAnnotationPresent(ClearCache.class)) { return this.clearCache(joinPoint, method); } Object value = cache.get(key); if (value != null) { this.logger.debug("Cache hit on: " + key); if (value.equals(NULL_VALUE_CACHE_PLACEHOLDER)) { return null; } else { return returnResult(value, cacheMethodAnnotation); } } else { this.logger.debug("Caching miss on: " + key); } Object result = this.proceed(joinPoint); if (this.isolateCachesOnClear == false || (this.isolateCachesOnClear == true && (this.cacheRegistry.getInThreadCacheClearingState() == null || this.cacheRegistry.getInThreadCacheClearingState() == false))) { this.logger.debug("Thread is not in @Clear state, caching can continue for key: " + key); if (result != null) { cache.put(key, result); } else { cache.put(key, NULL_VALUE_CACHE_PLACEHOLDER); } } else { this.logger.debug("Thread is in @Clear state, caching skipped for key: " + key); } return returnResult(result, cacheMethodAnnotation); }
From source file:org.apache.impala.catalog.CatalogServiceCatalog.java
/** * Returns a list of Impala Functions, one per compatible "evaluate" method in the UDF * class referred to by the given Java function. This method copies the UDF Jar * referenced by "function" to a temporary file in localLibraryPath_ and loads it * into the jvm. Then we scan all the methods in the class using reflection and extract * those methods and create corresponding Impala functions. Currently Impala supports * only "JAR" files for symbols and also a single Jar containing all the dependent * classes rather than a set of Jar files. *//*from w w w . j a v a 2 s . c om*/ public static List<Function> extractFunctions(String db, org.apache.hadoop.hive.metastore.api.Function function) throws ImpalaRuntimeException { List<Function> result = Lists.newArrayList(); List<String> addedSignatures = Lists.newArrayList(); StringBuilder warnMessage = new StringBuilder(); if (!isFunctionCompatible(function, warnMessage)) { LOG.warn("Skipping load of incompatible function: " + function.getFunctionName() + ". " + warnMessage.toString()); return result; } String jarUri = function.getResourceUris().get(0).getUri(); Class<?> udfClass = null; Path localJarPath = null; try { localJarPath = new Path(localLibraryPath_, UUID.randomUUID().toString() + ".jar"); try { FileSystemUtil.copyToLocal(new Path(jarUri), localJarPath); } catch (IOException e) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Couldn't copy " + jarUri + " to local path: " + localJarPath.toString(); LOG.error(errorMsg, e); throw new ImpalaRuntimeException(errorMsg); } URL[] classLoaderUrls = new URL[] { new URL(localJarPath.toString()) }; URLClassLoader urlClassLoader = new URLClassLoader(classLoaderUrls); udfClass = urlClassLoader.loadClass(function.getClassName()); // Check if the class is of UDF type. Currently we don't support other functions // TODO: Remove this once we support Java UDAF/UDTF if (FunctionUtils.getUDFClassType(udfClass) != FunctionUtils.UDFClassType.UDF) { LOG.warn("Ignoring load of incompatible Java function: " + function.getFunctionName() + " as " + FunctionUtils.getUDFClassType(udfClass) + " is not a supported type. Only UDFs are supported"); return result; } // Load each method in the UDF class and create the corresponding Impala Function // object. for (Method m : udfClass.getMethods()) { if (!m.getName().equals(UdfExecutor.UDF_FUNCTION_NAME)) continue; Function fn = ScalarFunction.fromHiveFunction(db, function.getFunctionName(), function.getClassName(), m.getParameterTypes(), m.getReturnType(), jarUri); if (fn == null) { LOG.warn("Ignoring incompatible method: " + m.toString() + " during load of " + "Hive UDF:" + function.getFunctionName() + " from " + udfClass); continue; } if (!addedSignatures.contains(fn.signatureString())) { result.add(fn); addedSignatures.add(fn.signatureString()); } } } catch (ClassNotFoundException c) { String errorMsg = "Error loading Java function: " + db + "." + function.getFunctionName() + ". Symbol class " + udfClass + "not found in Jar: " + jarUri; LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg, c); } catch (Exception e) { LOG.error("Skipping function load: " + function.getFunctionName(), e); throw new ImpalaRuntimeException("Error extracting functions", e); } catch (LinkageError e) { String errorMsg = "Error resolving dependencies for Java function: " + db + "." + function.getFunctionName(); LOG.error(errorMsg); throw new ImpalaRuntimeException(errorMsg, e); } finally { if (localJarPath != null) FileSystemUtil.deleteIfExists(localJarPath); } return result; }
From source file:net.nelz.simplesm.aop.ReadThroughMultiCacheAdvice.java
protected List<Object> getKeyObjectList(final int keyIndex, final JoinPoint jp, final Method method) throws Exception { final Object keyObjects = getIndexObject(keyIndex, jp, method); if (verifyTypeIsList(keyObjects.getClass())) { return (List<Object>) keyObjects; }/*w w w. j av a 2 s .c o m*/ throw new InvalidAnnotationException(String.format( "The parameter object found at dataIndex [%s] is not a [%s]. " + "[%s] does not fulfill the requirements.", ReadThroughMultiCache.class.getName(), List.class.getName(), method.toString())); }
From source file:org.flite.cach3.aop.L2UpdateAssignCacheAdvice.java
@AfterReturning(pointcut = "updateL2Assign()", returning = "retVal") public Object cacheUpdateL2Assign(final JoinPoint jp, final Object retVal) throws Throwable { // If we've disabled the caching programmatically (or via properties file) just flow through. if (isCacheDisabled()) { LOG.debug("Caching is disabled."); return retVal; }/*w ww . ja v a2s. co m*/ // This is injected caching. If anything goes wrong in the caching, LOG the crap outta it, // but do not let it surface up past the AOP injection itself. try { final Method methodToCache = getMethodToCache(jp); final L2UpdateAssignCache annotation = methodToCache.getAnnotation(L2UpdateAssignCache.class); final AnnotationInfo info = getAnnotationInfo(annotation, methodToCache.getName()); final String cacheKey = buildCacheKey(info.getAsString(AType.ASSIGN_KEY), info.getAsString(AType.NAMESPACE), info.getAsString(AType.KEY_PREFIX)); final int dataIndex = info.getAsInteger(AType.DATA_INDEX, -2).intValue(); final Object dataObject = dataIndex == -1 ? retVal : CacheBase.getIndexObject(dataIndex, jp.getArgs(), methodToCache.toString()); final Object submission = (dataObject == null) ? new PertinentNegativeNull() : dataObject; boolean cacheable = true; if (submission instanceof CacheConditionally) { cacheable = ((CacheConditionally) submission).isCacheable(); } if (cacheable) { getCache().setBulk(ImmutableMap.of(cacheKey, submission), info.<Duration>getAsType(AType.WINDOW, null)); } } catch (Exception ex) { if (LOG.isDebugEnabled()) { LOG.warn("Caching on " + jp.toShortString() + " aborted due to an error.", ex); } else { LOG.warn("Caching on " + jp.toShortString() + " aborted due to an error: " + ex.getMessage()); } } return retVal; }