List of usage examples for java.lang Class asSubclass
@SuppressWarnings("unchecked") public <U> Class<? extends U> asSubclass(Class<U> clazz)
From source file:org.cruk.genologics.api.impl.GenologicsAPIImpl.java
/** * Set the Jaxb marshaller./*from w w w . j a v a 2s .c om*/ * * <p>This operation also immediately scans the classes managed by the marshaller * to find those supporting classes for retrieving lists of links to a given entity * and classes that allow batch fetch and update of entities. * </p> * * @param jaxbMarshaller The Jaxb marshaller. */ @Required public void setJaxbMarshaller(Jaxb2Marshaller jaxbMarshaller) { entityToListClassMap = new HashMap<Class<? extends Locatable>, Class<?>>(); entityToBatchRetrieveClassMap = new HashMap<Class<? extends Locatable>, Class<?>>(); for (Class<?> possibleClass : jaxbMarshaller.getClassesToBeBound()) { GenologicsQueryResult queryAnno = possibleClass.getAnnotation(GenologicsQueryResult.class); GenologicsBatchRetrieveResult batchAnno = possibleClass .getAnnotation(GenologicsBatchRetrieveResult.class); if (queryAnno != null) { Class<? extends Locatable> entityClass = queryAnno.entityClass().asSubclass(Locatable.class); @SuppressWarnings("rawtypes") Class<? extends Batch> listClass = possibleClass.asSubclass(Batch.class); entityToListClassMap.put(entityClass, listClass); if (logger.isDebugEnabled()) { logger.debug("Results class {} mapped as query results for {}", getShortClassName(listClass), getShortClassName(entityClass)); } } if (batchAnno != null) { Class<? extends Locatable> entityClass = batchAnno.entityClass().asSubclass(Locatable.class); @SuppressWarnings("rawtypes") Class<? extends BatchUpdate> detailsClass = possibleClass.asSubclass(BatchUpdate.class); entityToBatchRetrieveClassMap.put(entityClass, detailsClass); if (logger.isDebugEnabled()) { logger.debug("Batch retrieve class {} mapped as entity holder for {}", getShortClassName(detailsClass), getShortClassName(entityClass)); } } } }
From source file:org.kuali.rice.krad.uif.service.impl.ViewHelperServiceImpl.java
/** * {@inheritDoc}// w w w.ja v a2 s . co m */ @SuppressWarnings("unchecked") public void processMultipleValueLookupResults(ViewModel model, String collectionId, String collectionPath, String multiValueReturnFields, String lookupResultValues) { // if no line values returned, no population is needed if (StringUtils.isBlank(lookupResultValues) || !(model instanceof ViewModel)) { return; } ViewModel viewModel = (ViewModel) model; if (StringUtils.isBlank(collectionId)) { throw new RuntimeException( "Id is not set for this collection lookup: " + collectionId + ", " + "path: " + collectionPath); } // retrieve the collection group so we can get the collection class and collection lookup Class<?> collectionObjectClass = (Class<?>) viewModel.getViewPostMetadata() .getComponentPostData(collectionId, UifConstants.PostMetadata.COLL_OBJECT_CLASS); Collection<Object> collection = ObjectPropertyUtils.getPropertyValue(model, collectionPath); if (collection == null) { Class<?> collectionClass = ObjectPropertyUtils.getPropertyType(model, collectionPath); collection = (Collection<Object>) KRADUtils.createNewObjectFromClass(collectionClass); ObjectPropertyUtils.setPropertyValue(model, collectionPath, collection); } // get the field conversions Map<String, String> fieldConversions = (Map<String, String>) viewModel.getViewPostMetadata() .getComponentPostData(collectionId, UifConstants.PostMetadata.COLL_LOOKUP_FIELD_CONVERSIONS); // filter the field conversions by what was returned from the multi value lookup return fields Map<String, String> returnedFieldConversions = filterByReturnedFieldConversions(multiValueReturnFields, fieldConversions); List<String> toFieldNamesColl = new ArrayList<String>(returnedFieldConversions.values()); Collections.sort(toFieldNamesColl); String[] toFieldNames = new String[toFieldNamesColl.size()]; toFieldNamesColl.toArray(toFieldNames); // first split to get the line value sets String[] lineValues = StringUtils.split(lookupResultValues, ","); List<Object> lineDataObjects = new ArrayList<Object>(); // for each returned set create a new instance of collection class and populate with returned line values for (String lineValue : lineValues) { Object lineDataObject = null; // TODO: need to put this in data object service so logic can be reused ModuleService moduleService = KRADServiceLocatorWeb.getKualiModuleService() .getResponsibleModuleService(collectionObjectClass); if (moduleService != null && moduleService.isExternalizable(collectionObjectClass)) { lineDataObject = moduleService.createNewObjectFromExternalizableClass(collectionObjectClass .asSubclass(org.kuali.rice.krad.bo.ExternalizableBusinessObject.class)); } else { lineDataObject = KRADUtils.createNewObjectFromClass(collectionObjectClass); } String[] fieldValues = StringUtils.splitByWholeSeparatorPreserveAllTokens(lineValue, ":"); if (fieldValues.length != toFieldNames.length) { throw new RuntimeException( "Value count passed back from multi-value lookup does not match field conversion count"); } // set each field value on the line for (int i = 0; i < fieldValues.length; i++) { String fieldName = toFieldNames[i]; ObjectPropertyUtils.setPropertyValue(lineDataObject, fieldName, fieldValues[i]); } lineDataObjects.add(lineDataObject); processAndAddLineObject(viewModel, lineDataObject, collectionId, collectionPath); } viewModel.getViewPostMetadata().getAddedCollectionObjects().put(collectionId, lineDataObjects); }
From source file:org.apache.lens.server.query.QueryExecutionServiceImpl.java
private void loadQueryComparator() throws LensException { try {/*from w w w . j av a2 s. c om*/ Class<?>[] classes = conf.getClasses(QUERY_COMPARATOR_CLASSES, MoreRetriesFirstComparator.class, QueryPriorityComparator.class, FIFOQueryComparator.class); List<Comparator<QueryContext>> comparators = Lists.newArrayList(); for (Class<?> clazz : classes) { comparators.add(clazz.asSubclass(QueryComparator.class).newInstance()); } queryComparator = new ChainedComparator<>(comparators); } catch (Exception e) { throw new LensException( "Couldn't instantiate query comparator class. Classes: " + conf.get(QUERY_COMPARATOR_CLASSES) + ". Please supply a valid value for " + QUERY_COMPARATOR_CLASSES); } }
From source file:org.kuali.rice.kns.maintenance.rules.MaintenanceDocumentRuleBase.java
/** * This method checks the two major cases that may violate primary key integrity. * * 1. Disallow changing of the primary keys on an EDIT maintenance document. Other fields can be changed, but once * the primary/* ww w .j a v a2 s .c om*/ * keys have been set, they are permanent. * * 2. Disallow creating a new object whose primary key values are already present in the system on a CREATE NEW * maintenance * document. * * This method also will add new Errors to the Global Error Map. * * @param document - The Maintenance Document being tested. * @return Returns false if either test failed, otherwise returns true. */ protected boolean primaryKeyCheck(MaintenanceDocument document) { // default to success if no failures boolean success = true; Class<?> boClass = document.getNewMaintainableObject().getDataObjectClass(); Object oldBo = document.getOldMaintainableObject().getDataObject(); Object newBo = document.getNewMaintainableObject().getDataObject(); // We dont do primaryKeyChecks on Global Business Object maintenance documents. This is // because it doesnt really make any sense to do so, given the behavior of Globals. When a // Global Document completes, it will update or create a new record for each BO in the list. // As a result, there's no problem with having existing BO records in the system, they will // simply get updated. if (newBo instanceof GlobalBusinessObject) { return success; } // fail and complain if the person has changed the primary keys on // an EDIT maintenance document. if (document.isEdit()) { if (!KRADServiceLocatorWeb.getLegacyDataAdapter().equalsByPrimaryKeys(oldBo, newBo)) { // add a complaint to the errors putDocumentError(KRADConstants.DOCUMENT_ERRORS, RiceKeyConstants.ERROR_DOCUMENT_MAINTENANCE_PRIMARY_KEYS_CHANGED_ON_EDIT, getHumanReadablePrimaryKeyFieldNames(boClass)); success &= false; } } // fail and complain if the person has selected a new object with keys that already exist // in the DB. else if (document.isNew()) { // TODO: when/if we have standard support for DO retrieval, do this check for DO's if (newBo instanceof PersistableBusinessObject) { // get a map of the pk field names and values Map<String, ?> newPkFields = getDataObjectMetaDataService().getPrimaryKeyFieldValues(newBo); // attempt to do a lookup, see if this object already exists by these Primary Keys // If there are any pk fields that are null, don't bother doing the check since it // would be an unneeded DB call. boolean foundNullValuePK = false; for (Map.Entry<String, ?> pkField : newPkFields.entrySet()) { if (pkField.getValue() == null) { foundNullValuePK = true; break; } } if (!foundNullValuePK) { PersistableBusinessObject testBo = boService .findByPrimaryKey(boClass.asSubclass(PersistableBusinessObject.class), newPkFields); // if the retrieve was successful, then this object already exists, and we need // to complain if (testBo != null) { putDocumentError(KRADConstants.DOCUMENT_ERRORS, RiceKeyConstants.ERROR_DOCUMENT_MAINTENANCE_KEYS_ALREADY_EXIST_ON_CREATE_NEW, getHumanReadablePrimaryKeyFieldNames(boClass)); success &= false; } } } } return success; }
From source file:org.apache.flink.api.java.typeutils.TypeExtractor.java
@SuppressWarnings({ "unchecked", "rawtypes" }) private <OUT, IN1, IN2> TypeInformation<OUT> privateGetForClass(Class<OUT> clazz, ArrayList<Type> typeHierarchy, ParameterizedType parameterizedType, TypeInformation<IN1> in1Type, TypeInformation<IN2> in2Type) { checkNotNull(clazz);/*w w w .jav a2 s . c o m*/ // check if type information can be produced using a factory final TypeInformation<OUT> typeFromFactory = createTypeInfoFromFactory(clazz, typeHierarchy, in1Type, in2Type); if (typeFromFactory != null) { return typeFromFactory; } // Object is handled as generic type info if (clazz.equals(Object.class)) { return new GenericTypeInfo<>(clazz); } // Class is handled as generic type info if (clazz.equals(Class.class)) { return new GenericTypeInfo<OUT>(clazz); } // recursive types are handled as generic type info if (countTypeInHierarchy(typeHierarchy, clazz) > 1) { return new GenericTypeInfo<>(clazz); } // check for arrays if (clazz.isArray()) { // primitive arrays: int[], byte[], ... PrimitiveArrayTypeInfo<OUT> primitiveArrayInfo = PrimitiveArrayTypeInfo.getInfoFor(clazz); if (primitiveArrayInfo != null) { return primitiveArrayInfo; } // basic type arrays: String[], Integer[], Double[] BasicArrayTypeInfo<OUT, ?> basicArrayInfo = BasicArrayTypeInfo.getInfoFor(clazz); if (basicArrayInfo != null) { return basicArrayInfo; } // object arrays else { TypeInformation<?> componentTypeInfo = createTypeInfoWithTypeHierarchy(typeHierarchy, clazz.getComponentType(), in1Type, in2Type); return ObjectArrayTypeInfo.getInfoFor(clazz, componentTypeInfo); } } // check for writable types if (isHadoopWritable(clazz)) { return createHadoopWritableTypeInfo(clazz); } // check for basic types TypeInformation<OUT> basicTypeInfo = BasicTypeInfo.getInfoFor(clazz); if (basicTypeInfo != null) { return basicTypeInfo; } // check for SQL time types TypeInformation<OUT> timeTypeInfo = SqlTimeTypeInfo.getInfoFor(clazz); if (timeTypeInfo != null) { return timeTypeInfo; } // check for subclasses of Value if (Value.class.isAssignableFrom(clazz)) { Class<? extends Value> valueClass = clazz.asSubclass(Value.class); return (TypeInformation<OUT>) ValueTypeInfo.getValueTypeInfo(valueClass); } // check for subclasses of Tuple if (Tuple.class.isAssignableFrom(clazz)) { if (clazz == Tuple0.class) { return new TupleTypeInfo(Tuple0.class); } throw new InvalidTypesException( "Type information extraction for tuples (except Tuple0) cannot be done based on the class."); } // check for Enums if (Enum.class.isAssignableFrom(clazz)) { return new EnumTypeInfo(clazz); } // special case for POJOs generated by Avro. if (SpecificRecordBase.class.isAssignableFrom(clazz)) { return new AvroTypeInfo(clazz); } if (Modifier.isInterface(clazz.getModifiers())) { // Interface has no members and is therefore not handled as POJO return new GenericTypeInfo<OUT>(clazz); } try { TypeInformation<OUT> pojoType = analyzePojo(clazz, new ArrayList<Type>(typeHierarchy), parameterizedType, in1Type, in2Type); if (pojoType != null) { return pojoType; } } catch (InvalidTypesException e) { if (LOG.isDebugEnabled()) { LOG.debug("Unable to handle type " + clazz + " as POJO. Message: " + e.getMessage(), e); } // ignore and create generic type info } // return a generic type return new GenericTypeInfo<OUT>(clazz); }
From source file:org.apache.nifi.controller.FlowController.java
public ReportingTaskNode createReportingTask(final String type, final String id, final boolean firstTimeAdded, final boolean register) throws ReportingTaskInstantiationException { if (type == null || id == null) { throw new NullPointerException(); }/* w ww. j a va 2s . c o m*/ ReportingTask task = null; boolean creationSuccessful = true; final ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader(); try { final ClassLoader detectedClassLoader = ExtensionManager.getClassLoader(type, id); final Class<?> rawClass; if (detectedClassLoader == null) { rawClass = Class.forName(type); } else { rawClass = Class.forName(type, false, detectedClassLoader); } Thread.currentThread().setContextClassLoader(detectedClassLoader); final Class<? extends ReportingTask> reportingTaskClass = rawClass.asSubclass(ReportingTask.class); final Object reportingTaskObj = reportingTaskClass.newInstance(); task = reportingTaskClass.cast(reportingTaskObj); } catch (final Exception e) { LOG.error("Could not create Reporting Task of type " + type + " for ID " + id + "; creating \"Ghost\" implementation", e); final GhostReportingTask ghostTask = new GhostReportingTask(); ghostTask.setIdentifier(id); ghostTask.setCanonicalClassName(type); task = ghostTask; creationSuccessful = false; } finally { if (ctxClassLoader != null) { Thread.currentThread().setContextClassLoader(ctxClassLoader); } } final ComponentLog logger = new SimpleProcessLogger(id, task); final ValidationContextFactory validationContextFactory = new StandardValidationContextFactory( controllerServiceProvider, variableRegistry); final ReportingTaskNode taskNode; if (creationSuccessful) { taskNode = new StandardReportingTaskNode(task, id, this, processScheduler, validationContextFactory, variableRegistry, logger); } else { final String simpleClassName = type.contains(".") ? StringUtils.substringAfterLast(type, ".") : type; final String componentType = "(Missing) " + simpleClassName; taskNode = new StandardReportingTaskNode(task, id, this, processScheduler, validationContextFactory, componentType, type, variableRegistry, logger); } taskNode.setName(task.getClass().getSimpleName()); if (firstTimeAdded) { final ComponentLog componentLog = new SimpleProcessLogger(id, taskNode.getReportingTask()); final ReportingInitializationContext config = new StandardReportingInitializationContext(id, taskNode.getName(), SchedulingStrategy.TIMER_DRIVEN, "1 min", componentLog, this, nifiProperties); try { task.initialize(config); } catch (final InitializationException ie) { throw new ReportingTaskInstantiationException("Failed to initialize reporting task of type " + type, ie); } try (final NarCloseable x = NarCloseable.withComponentNarLoader(taskNode.getReportingTask().getClass(), taskNode.getReportingTask().getIdentifier())) { ReflectionUtils.invokeMethodsWithAnnotation(OnAdded.class, task); ReflectionUtils.quietlyInvokeMethodsWithAnnotation(OnConfigurationRestored.class, taskNode.getReportingTask()); } catch (final Exception e) { throw new ComponentLifeCycleException("Failed to invoke On-Added Lifecycle methods of " + task, e); } } if (register) { reportingTasks.put(id, taskNode); // Register log observer to provide bulletins when reporting task logs anything at WARN level or above final LogRepository logRepository = LogRepositoryFactory.getRepository(id); logRepository.addObserver(StandardProcessorNode.BULLETIN_OBSERVER_ID, LogLevel.WARN, new ReportingTaskLogObserver(getBulletinRepository(), taskNode)); } return taskNode; }
From source file:org.apache.nifi.controller.FlowController.java
public FlowFilePrioritizer createPrioritizer(final String type) throws InstantiationException, IllegalAccessException, ClassNotFoundException { FlowFilePrioritizer prioritizer;/* w w w . j ava 2 s. c om*/ final ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader(); try { final ClassLoader detectedClassLoaderForType = ExtensionManager.getClassLoader(type); final Class<?> rawClass; if (detectedClassLoaderForType == null) { // try to find from the current class loader rawClass = Class.forName(type); } else { // try to find from the registered classloader for that type rawClass = Class.forName(type, true, ExtensionManager.getClassLoader(type)); } Thread.currentThread().setContextClassLoader(detectedClassLoaderForType); final Class<? extends FlowFilePrioritizer> prioritizerClass = rawClass .asSubclass(FlowFilePrioritizer.class); final Object processorObj = prioritizerClass.newInstance(); prioritizer = prioritizerClass.cast(processorObj); return prioritizer; } finally { if (ctxClassLoader != null) { Thread.currentThread().setContextClassLoader(ctxClassLoader); } } }
From source file:org.apache.nifi.controller.FlowController.java
private Processor instantiateProcessor(final String type, final String identifier) throws ProcessorInstantiationException { Processor processor;//w ww . j a v a 2 s. c om final ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader(); try { final ClassLoader detectedClassLoaderForType = ExtensionManager.getClassLoader(type, identifier); final Class<?> rawClass; if (detectedClassLoaderForType == null) { // try to find from the current class loader rawClass = Class.forName(type); } else { // try to find from the registered classloader for that type rawClass = Class.forName(type, true, ExtensionManager.getClassLoader(type, identifier)); } Thread.currentThread().setContextClassLoader(detectedClassLoaderForType); final Class<? extends Processor> processorClass = rawClass.asSubclass(Processor.class); processor = processorClass.newInstance(); final ComponentLog componentLogger = new SimpleProcessLogger(identifier, processor); final ProcessorInitializationContext ctx = new StandardProcessorInitializationContext(identifier, componentLogger, this, this, nifiProperties); processor.initialize(ctx); LogRepositoryFactory.getRepository(identifier).setLogger(componentLogger); return processor; } catch (final Throwable t) { throw new ProcessorInstantiationException(type, t); } finally { if (ctxClassLoader != null) { Thread.currentThread().setContextClassLoader(ctxClassLoader); } } }
From source file:org.apache.nifi.cluster.manager.impl.WebClusterManager.java
@Override public ReportingTaskNode createReportingTask(final String type, final String id, final boolean firstTimeAdded) throws ReportingTaskInstantiationException { if (type == null) { throw new NullPointerException(); }/*from ww w. j a v a2s . com*/ ReportingTask task = null; final ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader(); try { final ClassLoader detectedClassLoader = ExtensionManager.getClassLoader(type); final Class<?> rawClass; if (detectedClassLoader == null) { rawClass = Class.forName(type); } else { rawClass = Class.forName(type, false, detectedClassLoader); } Thread.currentThread().setContextClassLoader(detectedClassLoader); final Class<? extends ReportingTask> reportingTaskClass = rawClass.asSubclass(ReportingTask.class); final Object reportingTaskObj = reportingTaskClass.newInstance(); task = reportingTaskClass.cast(reportingTaskObj); } catch (final ClassNotFoundException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException t) { throw new ReportingTaskInstantiationException(type, t); } finally { if (ctxClassLoader != null) { Thread.currentThread().setContextClassLoader(ctxClassLoader); } } final ValidationContextFactory validationContextFactory = new StandardValidationContextFactory(this); final ReportingTaskNode taskNode = new ClusteredReportingTaskNode(task, id, processScheduler, new ClusteredEventAccess(this, auditService), bulletinRepository, controllerServiceProvider, validationContextFactory); taskNode.setName(task.getClass().getSimpleName()); reportingTasks.put(id, taskNode); if (firstTimeAdded) { try (final NarCloseable x = NarCloseable.withNarLoader()) { ReflectionUtils.invokeMethodsWithAnnotation(OnAdded.class, task); } catch (final Exception e) { throw new ComponentLifeCycleException("Failed to invoke On-Added Lifecycle methods of " + task, e); } } // Register log observer to provide bulletins when reporting task logs anything at WARN level or above final LogRepository logRepository = LogRepositoryFactory.getRepository(id); logRepository.addObserver(StandardProcessorNode.BULLETIN_OBSERVER_ID, LogLevel.WARN, new ReportingTaskLogObserver(getBulletinRepository(), taskNode)); return taskNode; }