List of usage examples for java.lang Class getModifiers
@HotSpotIntrinsicCandidate public native int getModifiers();
From source file:ca.sqlpower.object.annotation.SPAnnotationProcessor.java
/** * Generates and returns source code for importing packages that are * required by the persister helper this class is generating. * /* w w w .j av a 2s.c o m*/ * @param visitedClass * The {@link SPObject} class that is being visited by the * annotation processor. * @param constructorImports * The {@link Set} of packages that visitedClass uses in its * {@link Constructor} annotated constructor and need to be * imported. * @param mutatorImports * The {@link Multimap} of setter methods to packages that * visitedClass uses in its {@link Mutator} annotated methods and * needs to be imported. * @return The source code for the generated imports. */ private String generateImports(Class<? extends SPObject> visitedClass, Set<String> constructorImports, Multimap<String, String> mutatorImports) { final String helperPackage = visitedClass.getPackage().getName() + "." + PersisterHelperFinder.GENERATED_PACKAGE_NAME; // Using a TreeSet here to sort imports alphabetically. Set<String> allImports = new TreeSet<String>(); if (!Modifier.isAbstract(visitedClass.getModifiers())) { allImports.addAll(constructorImports); } allImports.addAll(mutatorImports.values()); StringBuilder sb = new StringBuilder(); // XXX Need to import any additional classes this generated persister helper // class requires, aside from those needed in visitedClass. allImports.add(List.class.getName()); allImports.add(visitedClass.getName()); allImports.add(SPPersistenceException.class.getName()); allImports.add(SPPersister.class.getName()); allImports.add(SessionPersisterSuperConverter.class.getName()); allImports.add(SPObject.class.getName()); allImports.add(DataType.class.getName()); allImports.addAll(importedClassNames); for (String pkg : allImports) { // No need to import java.lang as it is automatically imported. // No need to import package if the persister helper is already // in the package. // Also want to keep array classes out if (!pkg.startsWith("java.lang") && !pkg.startsWith("[L")) { // Nested classes, enums, etc. will be separated by the "$" // character but we need to change them to "." so it can be // imported correctly. String pkgName = pkg.replaceAll("\\$", "."); // Only import the package if it is not the same one // that the persister helper exists in. int index = pkgName.lastIndexOf("."); if (index == -1) { index = pkgName.length(); } if (!pkgName.substring(0, index).equals(helperPackage)) { niprintln(sb, "import " + pkgName + ";"); } } } return sb.toString(); }
From source file:org.apache.flink.api.java.typeutils.TypeExtractor.java
@SuppressWarnings("unchecked") protected <OUT, IN1, IN2> TypeInformation<OUT> analyzePojo(Class<OUT> clazz, ArrayList<Type> typeHierarchy, ParameterizedType parameterizedType, TypeInformation<IN1> in1Type, TypeInformation<IN2> in2Type) { if (!Modifier.isPublic(clazz.getModifiers())) { LOG.info("Class " + clazz.getName() + " is not public, cannot treat it as a POJO type. Will be handled as GenericType"); return new GenericTypeInfo<OUT>(clazz); }//from w w w. ja v a 2 s . co m // add the hierarchy of the POJO itself if it is generic if (parameterizedType != null) { getTypeHierarchy(typeHierarchy, parameterizedType, Object.class); } // create a type hierarchy, if the incoming only contains the most bottom one or none. else if (typeHierarchy.size() <= 1) { getTypeHierarchy(typeHierarchy, clazz, Object.class); } List<Field> fields = getAllDeclaredFields(clazz, false); if (fields.size() == 0) { LOG.info("No fields detected for " + clazz + ". Cannot be used as a PojoType. Will be handled as GenericType"); return new GenericTypeInfo<OUT>(clazz); } List<PojoField> pojoFields = new ArrayList<PojoField>(); for (Field field : fields) { Type fieldType = field.getGenericType(); if (!isValidPojoField(field, clazz, typeHierarchy)) { LOG.info(clazz + " is not a valid POJO type"); return null; } try { ArrayList<Type> fieldTypeHierarchy = new ArrayList<Type>(typeHierarchy); fieldTypeHierarchy.add(fieldType); TypeInformation<?> ti = createTypeInfoWithTypeHierarchy(fieldTypeHierarchy, fieldType, in1Type, in2Type); pojoFields.add(new PojoField(field, ti)); } catch (InvalidTypesException e) { Class<?> genericClass = Object.class; if (isClassType(fieldType)) { genericClass = typeToClass(fieldType); } pojoFields.add(new PojoField(field, new GenericTypeInfo<OUT>((Class<OUT>) genericClass))); } } CompositeType<OUT> pojoType = new PojoTypeInfo<OUT>(clazz, pojoFields); // // Validate the correctness of the pojo. // returning "null" will result create a generic type information. // List<Method> methods = getAllDeclaredMethods(clazz); for (Method method : methods) { if (method.getName().equals("readObject") || method.getName().equals("writeObject")) { LOG.info(clazz + " contains custom serialization methods we do not call."); return null; } } // Try retrieving the default constructor, if it does not have one // we cannot use this because the serializer uses it. Constructor defaultConstructor = null; try { defaultConstructor = clazz.getDeclaredConstructor(); } catch (NoSuchMethodException e) { if (clazz.isInterface() || Modifier.isAbstract(clazz.getModifiers())) { LOG.info(clazz + " is abstract or an interface, having a concrete " + "type can increase performance."); } else { LOG.info(clazz + " must have a default constructor to be used as a POJO."); return null; } } if (defaultConstructor != null && !Modifier.isPublic(defaultConstructor.getModifiers())) { LOG.info("The default constructor of " + clazz + " should be Public to be used as a POJO."); return null; } // everything is checked, we return the pojo return pojoType; }
From source file:org.seasar.s2click.S2ClickConfigService.java
private String getPageClassName(String pagePath, String pagesPackage) { String packageName = pagesPackage + "."; String className = ""; // Strip off .htm extension String path = pagePath.substring(0, pagePath.lastIndexOf(".")); if (path.indexOf("/") != -1) { StringTokenizer tokenizer = new StringTokenizer(path, "/"); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (tokenizer.hasMoreTokens()) { packageName = packageName + token + "."; } else { className = token;//from w w w . jav a2s . c om } } } else { className = path; } StringTokenizer tokenizer = new StringTokenizer(className, "_-"); className = ""; while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); token = Character.toUpperCase(token.charAt(0)) + token.substring(1); className += token; } className = packageName + className; // if(!className.endsWith("Page")){ // className = className + "Page"; // } // return className; Class pageClass = loadClass(className); if (pageClass != null) { if (!Page.class.isAssignableFrom(pageClass)) { String msg = "Automapped page class " + className + " is not a subclass of net.sf.click.Page"; throw new RuntimeException(msg); } } else { boolean classFound = false; if (!className.endsWith("Page")) { className = className + "Page"; pageClass = loadClass(className); if (pageClass != null) { if (!Page.class.isAssignableFrom(pageClass)) { String msg = "Automapped page class " + className + " is not a subclass of net.sf.click.Page"; throw new RuntimeException(msg); } classFound = true; } } if (!classFound) { if (logService.isDebugEnabled()) { logService.debug(pagePath + " -> CLASS NOT FOUND"); } if (logService.isTraceEnabled()) { logService.trace("class not found: " + className); } return null; } } // abstract???? if (Modifier.isAbstract(pageClass.getModifiers())) { return null; } return className; }
From source file:org.evosuite.setup.TestClusterGenerator.java
/** * All public methods defined directly in the SUT should be covered * /* ww w .j a v a 2s .com*/ * TODO: What if we use instrument_parent? * * @param targetClass */ @SuppressWarnings("unchecked") private void initializeTargetMethods() throws RuntimeException, ClassNotFoundException { logger.info("Analyzing target class"); Class<?> targetClass = Properties.getTargetClass(); TestCluster cluster = TestCluster.getInstance(); Set<Class<?>> targetClasses = new LinkedHashSet<Class<?>>(); if (targetClass == null) { throw new RuntimeException("Failed to load " + Properties.TARGET_CLASS); } targetClasses.add(targetClass); addDeclaredClasses(targetClasses, targetClass); if (Modifier.isAbstract(targetClass.getModifiers())) { logger.info("SUT is an abstract class"); Set<Class<?>> subclasses = getConcreteClasses(targetClass, inheritanceTree); logger.info("Found {} concrete subclasses", subclasses.size()); targetClasses.addAll(subclasses); } // To make sure we also have anonymous inner classes double check inner classes using ASM ClassNode targetClassNode = DependencyAnalysis.getClassNode(Properties.TARGET_CLASS); Queue<InnerClassNode> innerClasses = new LinkedList<InnerClassNode>(); innerClasses.addAll(targetClassNode.innerClasses); while (!innerClasses.isEmpty()) { InnerClassNode icn = innerClasses.poll(); try { logger.debug("Loading inner class: {}, {},{}", icn.innerName, icn.name, icn.outerName); String innerClassName = ResourceList.getClassNameFromResourcePath(icn.name); Class<?> innerClass = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(innerClassName); //if (!canUse(innerClass)) // continue; // Sometimes strange things appear such as Map$Entry if (!targetClasses.contains(innerClass)) { // && !innerClassName.matches(".*\\$\\d+(\\$.*)?$")) { logger.info("Adding inner class {}", innerClassName); targetClasses.add(innerClass); ClassNode innerClassNode = DependencyAnalysis.getClassNode(innerClassName); innerClasses.addAll(innerClassNode.innerClasses); } } catch (Throwable t) { logger.error("Problem for {}. Error loading inner class: {}, {},{}: {}", Properties.TARGET_CLASS, icn.innerName, icn.name, icn.outerName, t); } } for (Class<?> clazz : targetClasses) { logger.info("Current SUT class: {}", clazz); if (!canUse(clazz)) { logger.info("Cannot access SUT class: {}", clazz); continue; } // Add all constructors for (Constructor<?> constructor : getConstructors(clazz)) { logger.info("Checking target constructor {}", constructor); String name = "<init>" + org.objectweb.asm.Type.getConstructorDescriptor(constructor); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), "<init>", org.objectweb.asm.Type.getConstructorDescriptor(constructor)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(constructor)) { GenericConstructor genericConstructor = new GenericConstructor(constructor, clazz); cluster.addTestCall(genericConstructor); // TODO: Add types! cluster.addGenerator(new GenericClass(clazz).getWithWildcardTypes(), genericConstructor); addDependencies(genericConstructor, 1); logger.debug("Keeping track of {}.{}{}", constructor.getDeclaringClass().getName(), constructor.getName(), Type.getConstructorDescriptor(constructor)); } else { logger.debug("Constructor cannot be used: {}", constructor); } } // Add all methods for (Method method : getMethods(clazz)) { logger.info("Checking target method {}", method); String name = method.getName() + org.objectweb.asm.Type.getMethodDescriptor(method); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), method.getName(), org.objectweb.asm.Type.getMethodDescriptor(method)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(method, clazz)) { logger.debug("Adding method {}.{}{}", clazz.getName(), method.getName(), Type.getMethodDescriptor(method)); GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); cluster.addModifier(new GenericClass(clazz).getWithWildcardTypes(), genericMethod); addDependencies(genericMethod, 1); GenericClass retClass = new GenericClass(method.getReturnType()); if (!retClass.isPrimitive() && !retClass.isVoid() && !retClass.isObject()) cluster.addGenerator(retClass.getWithWildcardTypes(), genericMethod); } else { logger.debug("Method cannot be used: {}", method); } } for (Field field : getFields(clazz)) { logger.info("Checking target field {}", field); if (canUse(field, clazz)) { GenericField genericField = new GenericField(field, clazz); addDependencies(genericField, 1); cluster.addGenerator(new GenericClass(field.getGenericType()).getWithWildcardTypes(), genericField); logger.debug("Adding field {}", field); if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } else { logger.debug("Is final"); if (Modifier.isStatic(field.getModifiers()) && !field.getType().isPrimitive()) { logger.debug("Is static non-primitive"); /* * With this we are trying to cover such cases: * public static final DurationField INSTANCE = new MillisDurationField(); private MillisDurationField() { super(); } */ try { Object o = field.get(null); if (o == null) { logger.info("Field is not yet initialized: {}", field); } else { Class<?> actualClass = o.getClass(); logger.debug("Actual class is {}", actualClass); if (!actualClass.isAssignableFrom(genericField.getRawGeneratedType()) && genericField.getRawGeneratedType().isAssignableFrom(actualClass)) { GenericField superClassField = new GenericField(field, clazz); cluster.addGenerator(new GenericClass(actualClass), superClassField); } } } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } else { logger.debug("Can't use field {}", field); } } analyzedClasses.add(clazz); // TODO: Set to generic type rather than class? cluster.getAnalyzedClasses().add(clazz); } if (Properties.INSTRUMENT_PARENT) { for (String superClass : inheritanceTree.getSuperclasses(Properties.TARGET_CLASS)) { try { Class<?> superClazz = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(superClass); dependencies.add(new Pair(0, superClazz)); } catch (ClassNotFoundException e) { logger.error("Problem for {}. Class not found: {}", Properties.TARGET_CLASS, superClass, e); } } } if (Properties.HANDLE_STATIC_FIELDS) { GetStaticGraph getStaticGraph = GetStaticGraphGenerator.generate(Properties.TARGET_CLASS); Map<String, Set<String>> staticFields = getStaticGraph.getStaticFields(); for (String className : staticFields.keySet()) { logger.info("Adding static fields to cluster for class {}", className); Class<?> clazz; try { clazz = getClass(className); } catch (ExceptionInInitializerError ex) { logger.debug("Class class init caused exception {}", className); continue; } if (clazz == null) { logger.debug("Class not found {}", className); continue; } if (!canUse(clazz)) continue; Set<String> fields = staticFields.get(className); for (Field field : getFields(clazz)) { if (!canUse(field, clazz)) continue; if (fields.contains(field.getName())) { if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } } } } PutStaticMethodCollector collector = new PutStaticMethodCollector(Properties.TARGET_CLASS, staticFields); Set<MethodIdentifier> methodIdentifiers = collector.collectMethods(); for (MethodIdentifier methodId : methodIdentifiers) { Class<?> clazz = getClass(methodId.getClassName()); if (clazz == null) continue; if (!canUse(clazz)) continue; Method method = getMethod(clazz, methodId.getMethodName(), methodId.getDesc()); if (method == null) continue; GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); } } logger.info("Finished analyzing target class"); }
From source file:org.apache.hive.beeline.BeeLine.java
private Driver[] scanDriversOLD(String line) { long start = System.currentTimeMillis(); Set<String> paths = new HashSet<String>(); Set driverClasses = new HashSet(); for (StringTokenizer tok = new StringTokenizer(System.getProperty("java.ext.dirs"), System.getProperty("path.separator")); tok.hasMoreTokens();) { File[] files = new File(tok.nextToken()).listFiles(); for (int i = 0; files != null && i < files.length; i++) { paths.add(files[i].getAbsolutePath()); }//from ww w.j a va 2s . c o m } for (StringTokenizer tok = new StringTokenizer(System.getProperty("java.class.path"), System.getProperty("path.separator")); tok.hasMoreTokens();) { paths.add(new File(tok.nextToken()).getAbsolutePath()); } for (Iterator<String> i = paths.iterator(); i.hasNext();) { File f = new File(i.next()); output(getColorBuffer().pad(loc("scanning", f.getAbsolutePath()), 60), false); try (ZipFile zf = new ZipFile(f)) { int total = zf.size(); int index = 0; for (Enumeration zfEnum = zf.entries(); zfEnum.hasMoreElements();) { ZipEntry entry = (ZipEntry) zfEnum.nextElement(); String name = entry.getName(); progress(index++, total); if (name.endsWith(".class")) { name = name.replace('/', '.'); name = name.substring(0, name.length() - 6); try { // check for the string "driver" in the class // to see if we should load it. Not perfect, but // it is far too slow otherwise. if (name.toLowerCase().indexOf("driver") != -1) { Class c = Class.forName(name, false, getClass().getClassLoader()); if (Driver.class.isAssignableFrom(c) && !(Modifier.isAbstract(c.getModifiers()))) { try { // load and initialize Class.forName(name); } catch (Exception e) { } driverClasses.add(c.newInstance()); } } } catch (Throwable t) { } } } progress(total, total); } catch (Exception e) { } } info("scan complete in " + (System.currentTimeMillis() - start) + "ms"); return (Driver[]) driverClasses.toArray(new Driver[0]); }
From source file:org.datavec.api.transform.TransformProcess.java
private static ObjectMapper reinitializeMapperWithSubtypes(ObjectMapper mapper) { //Register concrete subtypes for JSON serialization List<Class<?>> classes = Arrays.<Class<?>>asList(Transform.class, Condition.class, Filter.class, IReducer.class); List<String> classNames = new ArrayList<>(6); for (Class<?> c : classes) classNames.add(c.getName());//from ww w . j a va 2 s .c o m // First: scan the classpath and find all instances of the 'baseClasses' classes if (subtypesClassCache == null) { List<Class<?>> interfaces = Arrays.<Class<?>>asList(Transform.class, Condition.class, Filter.class, IReducer.class); List<Class<?>> classesList = Arrays.<Class<?>>asList(); Collection<URL> urls = ClasspathHelper.forClassLoader(); List<URL> scanUrls = new ArrayList<>(); for (URL u : urls) { String path = u.getPath(); if (!path.matches(".*/jre/lib/.*jar")) { //Skip JRE/JDK JARs scanUrls.add(u); } } Reflections reflections = new Reflections( new ConfigurationBuilder().filterInputsBy(new FilterBuilder().exclude("^(?!.*\\.class$).*$") //Consider only .class files (to avoid debug messages etc. on .dlls, etc //Exclude the following: the assumption here is that no custom functionality will ever be present // under these package name prefixes. .exclude("^org.nd4j.*").exclude("^org.bytedeco.*") //JavaCPP .exclude("^com.fasterxml.*")//Jackson .exclude("^org.apache.*") //Apache commons, Spark, log4j etc .exclude("^org.projectlombok.*").exclude("^com.twelvemonkeys.*").exclude("^org.joda.*") .exclude("^org.slf4j.*").exclude("^com.google.*").exclude("^org.reflections.*") .exclude("^ch.qos.*") //Logback ).addUrls(scanUrls).setScanners(new DataVecSubTypesScanner(interfaces, classesList))); org.reflections.Store store = reflections.getStore(); Iterable<String> subtypesByName = store.getAll(DataVecSubTypesScanner.class.getSimpleName(), classNames); Set<? extends Class<?>> subtypeClasses = Sets.newHashSet(ReflectionUtils.forNames(subtypesByName)); subtypesClassCache = new HashSet<>(); for (Class<?> c : subtypeClasses) { if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) { //log.info("Skipping abstract/interface: {}",c); continue; } subtypesClassCache.add(c); } } //Second: get all currently registered subtypes for this mapper Set<Class<?>> registeredSubtypes = new HashSet<>(); for (Class<?> c : classes) { AnnotatedClass ac = AnnotatedClass.construct(c, mapper.getSerializationConfig().getAnnotationIntrospector(), null); Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector()); for (NamedType nt : types) { registeredSubtypes.add(nt.getType()); } } //Third: register all _concrete_ subtypes that are not already registered List<NamedType> toRegister = new ArrayList<>(); for (Class<?> c : subtypesClassCache) { //Check if it's concrete or abstract... if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) { //log.info("Skipping abstract/interface: {}",c); continue; } if (!registeredSubtypes.contains(c)) { String name; if (ClassUtils.isInnerClass(c)) { Class<?> c2 = c.getDeclaringClass(); name = c2.getSimpleName() + "$" + c.getSimpleName(); } else { name = c.getSimpleName(); } toRegister.add(new NamedType(c, name)); if (log.isDebugEnabled()) { for (Class<?> baseClass : classes) { if (baseClass.isAssignableFrom(c)) { log.debug("Registering class for JSON serialization: {} as subtype of {}", c.getName(), baseClass.getName()); break; } } } } } mapper.registerSubtypes(toRegister.toArray(new NamedType[toRegister.size()])); //Recreate the mapper (via copy), as mapper won't use registered subtypes after first use mapper = mapper.copy(); return mapper; }
From source file:org.apache.hive.beeline.BeeLine.java
Driver[] scanDrivers(boolean knownOnly) throws IOException { long start = System.currentTimeMillis(); Set<String> classNames = new HashSet<String>(); if (!knownOnly) { classNames.addAll(Arrays.asList(ClassNameCompleter.getClassNames())); }/*from ww w .ja v a 2 s . c om*/ classNames.addAll(KNOWN_DRIVERS); Set driverClasses = new HashSet(); for (Iterator<String> i = classNames.iterator(); i.hasNext();) { String className = i.next().toString(); if (className.toLowerCase().indexOf("driver") == -1) { continue; } try { Class c = Class.forName(className, false, Thread.currentThread().getContextClassLoader()); if (!Driver.class.isAssignableFrom(c)) { continue; } if (Modifier.isAbstract(c.getModifiers())) { continue; } // now instantiate and initialize it driverClasses.add(c.newInstance()); } catch (Throwable t) { } } info("scan complete in " + (System.currentTimeMillis() - start) + "ms"); return (Driver[]) driverClasses.toArray(new Driver[0]); }
From source file:adalid.core.EntityAtlas.java
@SuppressWarnings("deprecation") void initialiseFields(Class<?> clazz) { track("initialiseFields", _declaringArtifact, clazz.getSimpleName()); Class<?> c;/*from ww w . j av a 2 s .co m*/ int d, r; String name; String key; String pattern = "there are several fields for operation {0}"; String message; Class<?> type; Class<?> decl; Class<?> operationClass; Field operationField; int modifiers; boolean restricted; Object o; int depth = _declaringArtifact.depth(); int round = _declaringArtifact.round(); Class<?>[] classes = new Class<?>[] { Property.class, Key.class, Tab.class, View.class, Instance.class, NamedValue.class, Expression.class, Transition.class, Operation.class, Trigger.class }; Class<?> dac = _declaringArtifact.getClass(); Class<?> top = Entity.class; int i = ArrayUtils.indexOf(classes, clazz); if (i != ArrayUtils.INDEX_NOT_FOUND) { c = classes[i]; for (Field field : XS1.getFields(dac, top)) { field.setAccessible(true); logger.trace(field); name = field.getName(); type = field.getType(); decl = field.getDeclaringClass(); if (!c.isAssignableFrom(type)) { continue; } if (c.equals(Expression.class) && Property.class.isAssignableFrom(type)) { continue; } // TODO: extension handling if (field.isAnnotationPresent(Extension.class) && Entity.class.isAssignableFrom(type)) { // if (!dac.equals(decl) || !dac.isAssignableFrom(type)) { // continue; // } continue; } modifiers = type.getModifiers(); if (NamedValue.class.isAssignableFrom(type) || Expression.class.isAssignableFrom(type)) { restricted = false; } else { restricted = type.isInterface() || Modifier.isAbstract(modifiers); } restricted = restricted || !Modifier.isPublic(modifiers); if (restricted) { continue; } modifiers = field.getModifiers(); restricted = Modifier.isPrivate(modifiers); if (restricted) { continue; } restricted = Modifier.isStatic(modifiers) || Modifier.isFinal(modifiers); if (restricted) { continue; } if (Operation.class.isAssignableFrom(type)) { key = type.getSimpleName(); operationClass = _operationClasses.get(key); if (operationClass != null) { operationField = _operationFields.get(key); if (operationField == null) { _operationFields.put(key, field); } else { message = MessageFormat.format(pattern, operationClass.getName()); logger.warn(message); TLC.getProject().getParser().increaseWarningCount(); } } } String errmsg = "failed to create a new instance of field \"" + field + "\" at " + _declaringArtifact; try { o = field.get(_declaringArtifact); if (o == null) { logger.debug(message(type, name, o, depth, round)); o = XS1.initialiseField(_declaringArtifact, field); if (o == null) { logger.debug(message(type, name, o, depth, round)); // throw new RuntimeException(message(type, name, o, depth, round)); } else { logger.debug(message(type, name, o, depth, round)); field.set(_declaringArtifact, o); } } } catch (IllegalArgumentException | IllegalAccessException ex) { throw new InstantiationRuntimeException(errmsg, ex); } } } }
From source file:org.enhydra.shark.asap.util.BeanSerializerShark.java
/** * Return XML schema for the specified type, suitable for insertion into * the <types> element of a WSDL document, or underneath an * <element> or <attribute> declaration. * * @param javaType the Java Class we're writing out schema for * @param types the Java2WSDL Types object which holds the context * for the WSDL being generated. * @return a type element containing a schema simpleType/complexType * @see org.apache.axis.wsdl.fromJava.Types *///from w ww . j a va 2 s . c om public Element writeSchema(Class javaType, Types types) throws Exception { // ComplexType representation of bean class Element complexType = types.createElement("complexType"); // See if there is a super class, stop if we hit a stop class Element e = null; Class superClass = javaType.getSuperclass(); BeanPropertyDescriptor[] superPd = null; List stopClasses = types.getStopClasses(); if (superClass != null && superClass != java.lang.Object.class && superClass != java.lang.Exception.class && superClass != java.lang.Throwable.class && superClass != java.rmi.RemoteException.class && superClass != org.apache.axis.AxisFault.class && (stopClasses == null || !(stopClasses.contains(superClass.getName())))) { // Write out the super class String base = types.writeType(superClass); Element complexContent = types.createElement("complexContent"); complexType.appendChild(complexContent); Element extension = types.createElement("extension"); complexContent.appendChild(extension); extension.setAttribute("base", base); e = extension; // Get the property descriptors for the super class TypeDesc superTypeDesc = TypeDesc.getTypeDescForClass(superClass); if (superTypeDesc != null) { superPd = superTypeDesc.getPropertyDescriptors(); } else { superPd = BeanUtils.getPd(superClass, null); } } else { e = complexType; } // Add fields under sequence element. // Note: In most situations it would be okay // to put the fields under an all element. // However it is illegal schema to put an // element with minOccurs=0 or maxOccurs>1 underneath // an all element. This is the reason why a sequence // element is used. Element all = types.createElement("sequence"); e.appendChild(all); if (Modifier.isAbstract(javaType.getModifiers())) { complexType.setAttribute("abstract", "true"); } // Serialize each property for (int i = 0; i < propertyDescriptor.length; i++) { String propName = propertyDescriptor[i].getName(); // Don't serializer properties named class boolean writeProperty = true; if (propName.equals("class")) { writeProperty = false; } // Don't serialize the property if it is present // in the super class property list if (superPd != null && writeProperty) { for (int j = 0; j < superPd.length && writeProperty; j++) { if (propName.equals(superPd[j].getName())) { writeProperty = false; } } } if (!writeProperty) { continue; } // If we have type metadata, check to see what we're doing // with this field. If it's an attribute, skip it. If it's // an element, use whatever qname is in there. If we can't // find any of this info, use the default. if (typeDesc != null) { Class fieldType = propertyDescriptor[i].getType(); FieldDesc field = typeDesc.getFieldByName(propName); if (field != null) { QName qname = field.getXmlName(); QName fieldXmlType = field.getXmlType(); boolean isAnonymous = fieldXmlType != null && fieldXmlType.getLocalPart().startsWith(">"); if (qname != null) { // FIXME! // Check to see if this is in the right namespace - // if it's not, we need to use an <element ref=""> // to represent it!!! // Use the default... propName = qname.getLocalPart(); } if (!field.isElement()) { writeAttribute(types, propName, fieldType, fieldXmlType, complexType); } else { writeField(types, propName, fieldXmlType, fieldType, propertyDescriptor[i].isIndexed(), field.isMinOccursZero(), all, isAnonymous); } } else { writeField(types, propName, null, fieldType, propertyDescriptor[i].isIndexed(), false, all, false); } } else { boolean done = false; if (propertyDescriptor[i] instanceof FieldPropertyDescriptor) { FieldPropertyDescriptor fpd = (FieldPropertyDescriptor) propertyDescriptor[i]; Class clazz = fpd.getField().getType(); if (types.getTypeQName(clazz) != null) { writeField(types, propName, null, clazz, false, false, all, false); done = true; } } if (!done) { writeField(types, propName, null, propertyDescriptor[i].getType(), propertyDescriptor[i].isIndexed(), false, all, false); } } } // done return complexType; }
From source file:ca.uhn.fhir.rest.method.OperationMethodBinding.java
protected OperationMethodBinding(Class<?> theReturnResourceType, Class<? extends IBaseResource> theReturnTypeFromRp, Method theMethod, FhirContext theContext, Object theProvider, boolean theIdempotent, String theOperationName, Class<? extends IBaseResource> theOperationType, OperationParam[] theReturnParams, BundleTypeEnum theBundleType) {//from ww w . j av a 2 s . c o m super(theReturnResourceType, theMethod, theContext, theProvider); myBundleType = theBundleType; myIdempotent = theIdempotent; myIdParamIndex = MethodUtil.findIdParameterIndex(theMethod, getContext()); if (myIdParamIndex != null) { for (Annotation next : theMethod.getParameterAnnotations()[myIdParamIndex]) { if (next instanceof IdParam) { myCanOperateAtTypeLevel = ((IdParam) next).optional() == true; } } } else { myCanOperateAtTypeLevel = true; } Description description = theMethod.getAnnotation(Description.class); if (description != null) { myDescription = description.formalDefinition(); if (isBlank(myDescription)) { myDescription = description.shortDefinition(); } } if (isBlank(myDescription)) { myDescription = null; } if (isBlank(theOperationName)) { throw new ConfigurationException("Method '" + theMethod.getName() + "' on type " + theMethod.getDeclaringClass().getName() + " is annotated with @" + Operation.class.getSimpleName() + " but this annotation has no name defined"); } if (theOperationName.startsWith("$") == false) { theOperationName = "$" + theOperationName; } myName = theOperationName; if (theContext.getVersion().getVersion().isEquivalentTo(FhirVersionEnum.DSTU1)) { throw new ConfigurationException("@" + Operation.class.getSimpleName() + " methods are not supported on servers for FHIR version " + theContext.getVersion().getVersion().name() + " - Found one on class " + theMethod.getDeclaringClass().getName()); } if (theReturnTypeFromRp != null) { setResourceName(theContext.getResourceDefinition(theReturnTypeFromRp).getName()); } else { if (Modifier.isAbstract(theOperationType.getModifiers()) == false) { setResourceName(theContext.getResourceDefinition(theOperationType).getName()); } else { setResourceName(null); } } if (theMethod.getReturnType().isAssignableFrom(Bundle.class)) { throw new ConfigurationException("Can not return a DSTU1 bundle from an @" + Operation.class.getSimpleName() + " method. Found in method " + theMethod.getName() + " defined in type " + theMethod.getDeclaringClass().getName()); } if (theMethod.getReturnType().equals(IBundleProvider.class)) { myReturnType = ReturnTypeEnum.BUNDLE; } else { myReturnType = ReturnTypeEnum.RESOURCE; } if (getResourceName() == null) { myOtherOperatiopnType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER; } else if (myIdParamIndex == null) { myOtherOperatiopnType = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE; } else { myOtherOperatiopnType = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE; } myReturnParams = new ArrayList<OperationMethodBinding.ReturnType>(); if (theReturnParams != null) { for (OperationParam next : theReturnParams) { ReturnType type = new ReturnType(); type.setName(next.name()); type.setMin(next.min()); type.setMax(next.max()); if (type.getMax() == OperationParam.MAX_DEFAULT) { type.setMax(1); } if (!next.type().equals(IBase.class)) { if (next.type().isInterface() || Modifier.isAbstract(next.type().getModifiers())) { throw new ConfigurationException( "Invalid value for @OperationParam.type(): " + next.type().getName()); } type.setType(theContext.getElementDefinition(next.type()).getName()); } myReturnParams.add(type); } } if (myIdParamIndex != null) { myCanOperateAtInstanceLevel = true; } if (getResourceName() == null) { myCanOperateAtServerLevel = true; } }