List of usage examples for java.io Serializable toString
public String toString()
From source file:org.unitime.timetable.backup.SessionBackup.java
@Override public void backup(OutputStream out, Progress progress, Long sessionId) throws IOException { iOut = CodedOutputStream.newInstance(out); iProgress = progress;// w ww . ja va2s .co m iSessionId = sessionId; iHibSession = new _RootDAO().createNewSession(); iHibSession.setCacheMode(CacheMode.IGNORE); iHibSessionFactory = iHibSession.getSessionFactory(); try { iProgress.setStatus("Exporting Session"); iProgress.setPhase("Loading Model", 3); TreeSet<ClassMetadata> allMeta = new TreeSet<ClassMetadata>(new Comparator<ClassMetadata>() { @Override public int compare(ClassMetadata m1, ClassMetadata m2) { return m1.getEntityName().compareTo(m2.getEntityName()); } }); allMeta.addAll(iHibSessionFactory.getAllClassMetadata().values()); iProgress.incProgress(); Queue<QueueItem> queue = new LinkedList<QueueItem>(); queue.add(new QueueItem(iHibSessionFactory.getClassMetadata(Session.class), null, "uniqueId", Relation.None)); Set<String> avoid = new HashSet<String>(); // avoid following relations avoid.add(TimetableManager.class.getName() + ".departments"); avoid.add(TimetableManager.class.getName() + ".solverGroups"); avoid.add(DistributionType.class.getName() + ".departments"); avoid.add(LastLikeCourseDemand.class.getName() + ".student"); avoid.add(Student.class.getName() + ".lastLikeCourseDemands"); Set<String> disallowedNotNullRelations = new HashSet<String>(); disallowedNotNullRelations.add(Assignment.class.getName() + ".datePattern"); disallowedNotNullRelations.add(Assignment.class.getName() + ".timePattern"); disallowedNotNullRelations.add(LastLikeCourseDemand.class.getName() + ".student"); disallowedNotNullRelations.add(OnlineSectioningLog.class.getName() + ".session"); Map<String, List<QueueItem>> data = new HashMap<String, List<QueueItem>>(); List<QueueItem> sessions = new ArrayList<QueueItem>(); sessions.add(queue.peek()); data.put(queue.peek().name(), sessions); QueueItem item = null; while ((item = queue.poll()) != null) { if (item.size() == 0) continue; for (ClassMetadata meta : allMeta) { if (meta.hasSubclasses()) continue; for (int i = 0; i < meta.getPropertyNames().length; i++) { String property = meta.getPropertyNames()[i]; if (disallowedNotNullRelations.contains(meta.getEntityName() + "." + property) || meta.getPropertyNullability()[i]) continue; Type type = meta.getPropertyTypes()[i]; if (type instanceof EntityType && type.getReturnedClass().equals(item.clazz())) { QueueItem qi = new QueueItem(meta, item, property, Relation.Parent); if (!data.containsKey(qi.name())) { List<QueueItem> items = new ArrayList<QueueItem>(); data.put(qi.name(), items); queue.add(qi); items.add(qi); if (qi.size() > 0) iProgress.info("Parent: " + qi); } } } } } iProgress.incProgress(); for (List<QueueItem> list : data.values()) queue.addAll(list); // The following part is needed to ensure that instructor distribution preferences are saved including their distribution types List<QueueItem> distributions = new ArrayList<QueueItem>(); for (QueueItem instructor : data.get(DepartmentalInstructor.class.getName())) { QueueItem qi = new QueueItem(iHibSessionFactory.getClassMetadata(DistributionPref.class), instructor, "owner", Relation.Parent); distributions.add(qi); queue.add(qi); if (qi.size() > 0) iProgress.info("Extra: " + qi); } data.put(DistributionPref.class.getName(), distributions); while ((item = queue.poll()) != null) { if (item.size() == 0) continue; for (int i = 0; i < item.meta().getPropertyNames().length; i++) { String property = item.meta().getPropertyNames()[i]; Type type = item.meta().getPropertyTypes()[i]; if (type instanceof EntityType) { if (avoid.contains(item.name() + "." + property)) continue; ClassMetadata meta = iHibSessionFactory.getClassMetadata(type.getReturnedClass()); if (item.contains(meta.getEntityName())) continue; QueueItem qi = new QueueItem(meta, item, property, Relation.One); List<QueueItem> items = data.get(qi.name()); if (items == null) { items = new ArrayList<QueueItem>(); data.put(qi.name(), items); } queue.add(qi); items.add(qi); if (qi.size() > 0) iProgress.info("One: " + qi); } if (type instanceof CollectionType) { if (avoid.contains(item.name() + "." + property)) continue; ClassMetadata meta = iHibSessionFactory.getClassMetadata(((CollectionType) type) .getElementType((SessionFactoryImplementor) iHibSessionFactory).getReturnedClass()); if (meta == null || item.contains(meta.getEntityName())) continue; QueueItem qi = new QueueItem(meta, item, property, Relation.Many); List<QueueItem> items = data.get(qi.name()); if (items == null) { items = new ArrayList<QueueItem>(); data.put(qi.name(), items); } queue.add(qi); items.add(qi); if (qi.size() > 0) iProgress.info("Many: " + qi); } } } iProgress.incProgress(); Map<String, Set<Serializable>> allExportedIds = new HashMap<String, Set<Serializable>>(); for (String name : new TreeSet<String>(data.keySet())) { List<QueueItem> list = data.get(name); Map<String, TableData.Table.Builder> tables = new HashMap<String, TableData.Table.Builder>(); for (QueueItem current : list) { if (current.size() == 0) continue; iProgress.info("Loading " + current); List<Object> objects = current.list(); if (objects == null || objects.isEmpty()) continue; iProgress.setPhase(current.abbv() + " [" + objects.size() + "]", objects.size()); objects: for (Object object : objects) { iProgress.incProgress(); // Get meta data (check for sub-classes) ClassMetadata meta = iHibSessionFactory.getClassMetadata(object.getClass()); if (meta == null) meta = current.meta(); if (meta.hasSubclasses()) { for (Iterator i = iHibSessionFactory.getAllClassMetadata().entrySet().iterator(); i .hasNext();) { Map.Entry entry = (Map.Entry) i.next(); ClassMetadata classMetadata = (ClassMetadata) entry.getValue(); if (classMetadata.getMappedClass().isInstance(object) && !classMetadata.hasSubclasses()) { meta = classMetadata; break; } } } // Get unique identifier Serializable id = meta.getIdentifier(object, (SessionImplementor) iHibSession); // Check if already exported Set<Serializable> exportedIds = allExportedIds.get(meta.getEntityName()); if (exportedIds == null) { exportedIds = new HashSet<Serializable>(); allExportedIds.put(meta.getEntityName(), exportedIds); } if (!exportedIds.add(id)) continue; // Check relation to an academic session (if exists) for (String property : meta.getPropertyNames()) { Type type = meta.getPropertyType(property); if (type instanceof EntityType && type.getReturnedClass().equals(Session.class)) { Session s = (Session) meta.getPropertyValue(object, property); if (s != null && !s.getUniqueId().equals(iSessionId)) { iProgress.warn(meta.getEntityName() .substring(meta.getEntityName().lastIndexOf('.') + 1) + "@" + id + " belongs to a different academic session (" + s + ")"); continue objects; // wrong session } } } // Get appropriate table TableData.Table.Builder table = tables.get(meta.getEntityName()); if (table == null) { table = TableData.Table.newBuilder(); tables.put(meta.getEntityName(), table); table.setName(meta.getEntityName()); } // Export object TableData.Record.Builder record = TableData.Record.newBuilder(); record.setId(id.toString()); for (String property : meta.getPropertyNames()) { Type type = meta.getPropertyType(property); Object value = meta.getPropertyValue(object, property); if (value == null) continue; TableData.Element.Builder element = TableData.Element.newBuilder(); element.setName(property); if (type instanceof PrimitiveType) { element.addValue(((PrimitiveType) type).toString(value)); } else if (type instanceof StringType) { element.addValue(((StringType) type).toString((String) value)); } else if (type instanceof BinaryType) { element.addValueBytes(ByteString.copyFrom((byte[]) value)); } else if (type instanceof TimestampType) { element.addValue(((TimestampType) type).toString((Date) value)); } else if (type instanceof DateType) { element.addValue(((DateType) type).toString((Date) value)); } else if (type instanceof EntityType) { List<Object> ids = current.relation(property, id, false); if (ids != null) for (Object i : ids) element.addValue(i.toString()); iHibSession.evict(value); } else if (type instanceof CustomType && value instanceof Document) { if (object instanceof CurriculumClassification && property.equals("students")) continue; StringWriter w = new StringWriter(); XMLWriter x = new XMLWriter(w, OutputFormat.createCompactFormat()); x.write((Document) value); x.flush(); x.close(); element.addValue(w.toString()); } else if (type instanceof CollectionType) { List<Object> ids = current.relation(property, id, false); if (ids != null) for (Object i : ids) element.addValue(i.toString()); } else if (type instanceof EmbeddedComponentType && property.equalsIgnoreCase("uniqueCourseNbr")) { continue; } else { iProgress.warn("Unknown data type: " + type + " (property " + meta.getEntityName() + "." + property + ", class " + value.getClass() + ")"); continue; } record.addElement(element.build()); } table.addRecord(record.build()); iHibSession.evict(object); } current.clearCache(); } for (TableData.Table.Builder table : tables.values()) { add(table.build()); } } /* // Skip ConstraintInfo if (!iData.containsKey(ConstraintInfo.class.getName())) iData.put(ConstraintInfo.class.getName(), new QueueItem(iHibSessionFactory.getClassMetadata(ConstraintInfo.class), null, null, Relation.Empty)); for (String name: items) export(iData.get(name)); while (true) { List<Object> objects = new ArrayList<Object>(); ClassMetadata meta = null; for (Entity e: iObjects) { if (e.exported()) continue; if (objects.isEmpty() || meta.getEntityName().equals(e.name())) { meta = e.meta(); objects.add(e.object()); e.notifyExported(); } } if (objects.isEmpty()) break; export(meta, objects, null); } */ iProgress.setStatus("All done."); } finally { iHibSession.close(); } }
From source file:org.egov.egf.commons.EgovCommon.java
public EntityType getEntityType(final Accountdetailtype accountdetailtype, final Serializable detailkey) throws ApplicationException { if (LOGGER.isDebugEnabled()) LOGGER.debug("EgovCommon | getEntityType| Start"); EntityType entity = null;/*w w w . j a va 2s . c om*/ try { final Class aClass = Class.forName(accountdetailtype.getFullQualifiedName()); final java.lang.reflect.Method method = aClass.getMethod("getId"); final String dataType = method.getReturnType().getSimpleName(); if (LOGGER.isDebugEnabled()) LOGGER.debug("data Type = " + dataType); if (dataType.equals("Long")) entity = (EntityType) persistenceService.getSession().load(aClass, Long.valueOf(detailkey.toString())); else entity = (EntityType) persistenceService.getSession().load(aClass, detailkey); } catch (final ClassCastException e) { LOGGER.error(e); throw new ApplicationException(e.getMessage()); } catch (final Exception e) { LOGGER.error("Exception to get EntityType=" + e.getMessage(), e); throw new ApplicationException(e.getMessage()); } return entity; }
From source file:org.egov.dao.budget.BudgetDetailsHibernateDAO.java
private Object findById(final Class clazz, final Serializable id) { if (id == null) throw new ValidationException(EMPTY_STRING, clazz.getSimpleName() + " id is null or empty"); final Object object = session.get(clazz, id); if (object == null) throw new ValidationException(EMPTY_STRING, clazz.getSimpleName() + " is not defined for this id [ " + id.toString() + " ]"); return object; }
From source file:gov.pnnl.goss.gridappsd.process.ProcessManagerImpl.java
@Start public void start() { LogMessage logMessageObj = new LogMessage(); try {//www . j a v a 2 s .com Credentials credentials = new UsernamePasswordCredentials(GridAppsDConstants.username, GridAppsDConstants.password); Client client = clientFactory.create(PROTOCOL.STOMP, credentials); logMessageObj.setLogLevel(LogLevel.DEBUG); logMessageObj.setProcessId(this.getClass().getName()); logMessageObj.setProcessStatus(ProcessStatus.RUNNING); logMessageObj.setStoreToDb(true); logMessageObj.setLogMessage("Starting " + this.getClass().getName()); client.publish(GridAppsDConstants.topic_platformLog, logMessageObj); if (newSimulationProcess == null) newSimulationProcess = new ProcessNewSimulationRequest(); logMessageObj.setTimestamp(new Date().getTime()); logMessageObj.setLogMessage("Starting " + this.getClass().getName()); client.publish(GridAppsDConstants.topic_platformLog, logMessageObj); client.subscribe(GridAppsDConstants.topic_prefix + ".>", new GossResponseEvent() { @Override public void onMessage(Serializable message) { DataResponse event = (DataResponse) message; //TODO:Get username from message's metadata e.g. event.getUserName() String username = GridAppsDConstants.username; int processId = generateProcessId(); logMessageObj.setTimestamp(new Date().getTime()); logMessageObj.setLogMessage("Received message: " + event.getData() + " on topic " + event.getDestination() + " from user " + username); client.publish(GridAppsDConstants.topic_platformLog, logMessageObj); //TODO: create registry mapping between request topics and request handlers. if (event.getDestination().contains(GridAppsDConstants.topic_requestSimulation)) { //generate simulation id and reply to event's reply destination. try { int simPort = assignSimulationPort(processId); client.publish(event.getReplyDestination(), processId); newSimulationProcess.process(configurationManager, simulationManager, processId, message, simPort); } catch (Exception e) { e.printStackTrace(); logMessageObj.setTimestamp(new Date().getTime()); logMessageObj.setLogLevel(LogLevel.ERROR); logMessageObj.setLogMessage(e.getMessage()); client.publish(GridAppsDConstants.topic_platformLog, logMessageObj); } } else if (event.getDestination().contains(GridAppsDConstants.topic_requestApp)) { try { appManager.process(processId, event, message); } catch (Exception e) { e.printStackTrace(); logMessageObj.setTimestamp(new Date().getTime()); logMessageObj.setLogLevel(LogLevel.ERROR); logMessageObj.setLogMessage(e.getMessage()); client.publish(GridAppsDConstants.topic_platformLog, logMessageObj); } } else if (event.getDestination().contains(GridAppsDConstants.topic_requestData)) { String outputTopics = String.join(".", GridAppsDConstants.topic_responseData, String.valueOf(processId), "output"); String logTopic = String.join(".", GridAppsDConstants.topic_responseData, String.valueOf(processId), "log"); logManager.get(LogMessage.parse(message.toString()), outputTopics, logTopic); //TODO: catch JsonSyntaxException and call get ModelDataManager or SimulationOutputDataManager } else if (event.getDestination().contains("log")) { logManager.log(LogMessage.parse(message.toString()), username); } //case GridAppsDConstants.topic_requestData : processDataRequest(); break; //case GridAppsDConstants.topic_requestSimulationStatus : processSimulationStatusRequest(); break; } }); } catch (Exception e) { e.printStackTrace(); logMessageObj.setTimestamp(new Date().getTime()); logMessageObj.setLogLevel(LogLevel.ERROR); logMessageObj.setLogMessage(e.getMessage()); logManager.log(logMessageObj, GridAppsDConstants.username); } }
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
@SuppressWarnings("rawtypes") @Override/*from w w w .j ava 2 s . c o m*/ public void delete(Expression userQuery) { Session session = this.getCurrentSession(); try { storageClassLoader.bind(Thread.currentThread()); // Session session = factory.getCurrentSession(); userQuery = userQuery.normalize(); // First do a normalize for correct optimization detection. // Check if optimized delete for one type (and no filter) is applicable if (userQuery instanceof Select) { Select select = (Select) userQuery; List<ComplexTypeMetadata> types = select.getTypes(); if (types.size() == 1 && select.getCondition() == null) { FlushMode previousFlushMode = session.getFlushMode(); try { session.setFlushMode(FlushMode.ALWAYS); // Force Hibernate to actually send SQL query to // database during delete. ComplexTypeMetadata mainType = types.get(0); TypeMapping mapping = mappingRepository.getMappingFromUser(mainType); // Compute (and eventually sort) types to delete List<ComplexTypeMetadata> typesToDelete; MetadataRepository internalRepository = typeMappingRepository.getInternalRepository(); if (mapping instanceof ScatteredTypeMapping) { MetadataVisitor<List<ComplexTypeMetadata>> transitiveClosure = new TypeTransitiveClosure(); List<ComplexTypeMetadata> typeClosure = mapping.getDatabase().accept(transitiveClosure); typesToDelete = MetadataUtils.sortTypes(internalRepository, typeClosure); } else { Collection<ComplexTypeMetadata> subTypes = mapping.getDatabase().getSubTypes(); if (subTypes.isEmpty()) { typesToDelete = Collections.singletonList(mapping.getDatabase()); } else { typesToDelete = new ArrayList<ComplexTypeMetadata>(subTypes.size() + 1); typesToDelete.add(mapping.getDatabase()); typesToDelete.addAll(subTypes); } } Map<ComplexTypeMetadata, Map<String, List>> recordsToDeleteMap = new HashMap<ComplexTypeMetadata, Map<String, List>>(); for (ComplexTypeMetadata typeToDelete : typesToDelete) { InboundReferences inboundReferences = new InboundReferences(typeToDelete); Set<ReferenceFieldMetadata> references = internalRepository.accept(inboundReferences); // Empty values from intermediate tables to this non instantiable type and unset inbound // references if (typeToDelete.equals(mainType)) { for (ReferenceFieldMetadata reference : references) { if (reference.isMany()) { // No need to check for mandatory collections of references since constraint // cannot be expressed in db schema String formattedTableName = tableResolver.getCollectionTable(reference); session.createSQLQuery("delete from " + formattedTableName).executeUpdate(); //$NON-NLS-1$ } else { String referenceTableName = tableResolver .get(reference.getContainingType()); if (referenceTableName.startsWith("X_ANONYMOUS")) { //$NON-NLS-1$ session.createSQLQuery("delete from " + referenceTableName) //$NON-NLS-1$ .executeUpdate(); } } } } else { for (ReferenceFieldMetadata reference : references) { if (reference.getContainingType().equals(mainType)) { HashMap<String, List> fieldsCondition = new HashMap<>(); if (reference.isMany()) { // No need to check for mandatory collections of references since constraint // cannot // be expressed in db schema String formattedTableName = tableResolver.getCollectionTable(reference); session.createSQLQuery("delete from " + formattedTableName) //$NON-NLS-1$ .executeUpdate(); } else { String referenceTableName = tableResolver .get(reference.getContainingType()); if (reference.getReferencedField() instanceof CompoundFieldMetadata) { FieldMetadata[] fields = ((CompoundFieldMetadata) reference .getReferencedField()).getFields(); for (FieldMetadata field : fields) { List list = session.createSQLQuery("select " //$NON-NLS-1$ + tableResolver.get(field, reference.getName()) + " from " //$NON-NLS-1$ + referenceTableName).list(); if (list == null || list.isEmpty()) { continue; } else { fieldsCondition.put( tableResolver.get(reference.getReferencedField()), list); } } } else { List list = session.createSQLQuery("select " //$NON-NLS-1$ + tableResolver.get(reference.getReferencedField(), reference.getName()) + " from " + referenceTableName).list(); //$NON-NLS-1$ if (list == null || list.isEmpty()) { continue; } else { fieldsCondition.put( tableResolver.get(reference.getReferencedField()), list); } } recordsToDeleteMap.put(typeToDelete, fieldsCondition); } } } } } deleteData(mapping.getDatabase(), new HashMap<String, List>(), mapping); for (Map.Entry<ComplexTypeMetadata, Map<String, List>> entry : recordsToDeleteMap .entrySet()) { // Empty values in type isMany=true reference deleteData(entry.getKey(), entry.getValue(), mapping); } } finally { session.setFlushMode(previousFlushMode); } return; } } // Generic fall back for deletions (filter) if (userQuery instanceof Select) { ((Select) userQuery).setForUpdate(true); } Iterable<DataRecord> records = internalFetch(session, userQuery, Collections.<ResultsCallback>emptySet()); for (DataRecord currentDataRecord : records) { ComplexTypeMetadata currentType = currentDataRecord.getType(); List<ComplexTypeMetadata> types = new ArrayList<>(); if (userQuery instanceof Select) { types.addAll(((Select) userQuery).getTypes()); } if (types.isEmpty() || types.contains(currentType)) { TypeMapping mapping = mappingRepository.getMappingFromUser(currentType); if (mapping == null) { throw new IllegalArgumentException( "Type '" + currentType.getName() + "' does not have a database mapping."); //$NON-NLS-1$ //$NON-NLS-2$ } Class<?> clazz = storageClassLoader.getClassFromType(mapping.getDatabase()); Serializable idValue; Collection<FieldMetadata> keyFields = currentType.getKeyFields(); if (keyFields.size() == 1) { idValue = (Serializable) currentDataRecord.get(keyFields.iterator().next()); } else { List<Object> compositeIdValues = new LinkedList<Object>(); for (FieldMetadata keyField : keyFields) { compositeIdValues.add(currentDataRecord.get(keyField)); } idValue = ObjectDataRecordConverter.createCompositeId(storageClassLoader, clazz, compositeIdValues); } Wrapper object = (Wrapper) session.get(clazz, idValue, LockOptions.READ); if (object != null) { session.delete(object); } else { LOGGER.warn("Instance of type '" + currentType.getName() + "' and ID '" + idValue.toString() //$NON-NLS-1$ //$NON-NLS-2$ + "' has already been deleted within same transaction."); //$NON-NLS-1$ } } } } catch (ConstraintViolationException e) { throw new com.amalto.core.storage.exception.ConstraintViolationException(e); } catch (HibernateException e) { throw new RuntimeException(e); } finally { this.releaseSession(); storageClassLoader.unbind(Thread.currentThread()); } }
From source file:org.nuxeo.ecm.core.storage.sql.Mapper.java
/** * Returns a loggable value using pseudo-SQL syntax. *//*from www .j a v a2 s .c om*/ @SuppressWarnings("boxing") private static String loggedValue(Serializable value) { if (value == null) { return "NULL"; } if (value instanceof String) { String v = (String) value; if (v.length() > DEBUG_MAX_STRING) { v = v.substring(0, DEBUG_MAX_STRING) + "...(" + v.length() + " chars)..."; } return "'" + v.replace("'", "''") + "'"; } if (value instanceof Calendar) { Calendar cal = (Calendar) value; char sign; int offset = cal.getTimeZone().getOffset(cal.getTimeInMillis()) / 60000; if (offset < 0) { offset = -offset; sign = '-'; } else { sign = '+'; } return String.format("TIMESTAMP '%04d-%02d-%02dT%02d:%02d:%02d.%03d%c%02d:%02d'", cal.get(Calendar.YEAR), // cal.get(Calendar.MONTH) + 1, // cal.get(Calendar.DAY_OF_MONTH), // cal.get(Calendar.HOUR_OF_DAY), // cal.get(Calendar.MINUTE), // cal.get(Calendar.SECOND), // cal.get(Calendar.MILLISECOND), // sign, offset / 60, offset % 60); } if (value instanceof Binary) { return "'" + ((Binary) value).getDigest() + "'"; } if (value.getClass().isArray()) { Serializable[] v = (Serializable[]) value; StringBuilder b = new StringBuilder(); b.append('['); for (int i = 0; i < v.length; i++) { if (i > 0) { b.append(','); if (i > DEBUG_MAX_ARRAY) { b.append("...(" + v.length + " items)..."); break; } } b.append(loggedValue(v[i])); } b.append(']'); return b.toString(); } return value.toString(); }
From source file:ddf.catalog.impl.CatalogFrameworkImpl.java
@SuppressWarnings("javadoc") protected ResourceResponse getResource(ResourceRequest resourceRequest, boolean isEnterprise, String resourceSiteName) throws IOException, ResourceNotFoundException, ResourceNotSupportedException { ResourceResponse resourceResponse = null; ResourceRequest resourceReq = resourceRequest; String resourceSourceName = resourceSiteName; if (fanoutEnabled) { isEnterprise = true;//w w w . j a v a 2 s. co m } if (resourceSourceName == null && !isEnterprise) { throw new ResourceNotFoundException("resourceSiteName cannot be null when obtaining resource."); } validateGetResourceRequest(resourceReq); try { HashMap<String, Set<String>> requestPolicyMap = new HashMap<>(); for (PolicyPlugin plugin : frameworkProperties.getPolicyPlugins()) { PolicyResponse policyResponse = plugin.processPreResource(resourceReq); buildPolicyMap(requestPolicyMap, policyResponse.operationPolicy().entrySet()); } resourceReq.getProperties().put(PolicyPlugin.OPERATION_SECURITY, requestPolicyMap); for (AccessPlugin plugin : frameworkProperties.getAccessPlugins()) { resourceReq = plugin.processPreResource(resourceReq); } for (PreResourcePlugin plugin : frameworkProperties.getPreResource()) { try { resourceReq = plugin.process(resourceReq); } catch (PluginExecutionException e) { LOGGER.info("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } Map<String, Serializable> requestProperties = resourceReq.getProperties(); LOGGER.debug("Attempting to get resource from siteName: {}", resourceSourceName); // At this point we pull out the properties and use them. Serializable sourceIdProperty = requestProperties.get(ResourceRequest.SOURCE_ID); if (sourceIdProperty != null) { resourceSourceName = sourceIdProperty.toString(); } Serializable enterpriseProperty = requestProperties.get(ResourceRequest.IS_ENTERPRISE); if (enterpriseProperty != null) { if (Boolean.parseBoolean(enterpriseProperty.toString())) { isEnterprise = true; } } // check if the resourceRequest has an ID only // If so, the metacard needs to be found and the Resource URI StringBuilder resolvedSourceIdHolder = new StringBuilder(); ResourceInfo resourceInfo = getResourceInfo(resourceReq, resourceSourceName, isEnterprise, resolvedSourceIdHolder, requestProperties); if (resourceInfo == null) { throw new ResourceNotFoundException("Resource could not be found for the given attribute value: " + resourceReq.getAttributeValue()); } URI responseURI = resourceInfo.getResourceUri(); Metacard metacard = resourceInfo.getMetacard(); String resolvedSourceId = resolvedSourceIdHolder.toString(); LOGGER.debug("resolvedSourceId = {}", resolvedSourceId); LOGGER.debug("ID = {}", getId()); if (isEnterprise) { // since resolvedSourceId specifies what source the product // metacard resides on, we can just // change resourceSiteName to be that value, and then the // following if-else statements will // handle retrieving the product on the correct source resourceSourceName = resolvedSourceId; } String key; try { key = new CacheKey(metacard, resourceRequest).generateKey(); } catch (IllegalArgumentException e) { LOGGER.error("Resource not found", resourceRequest); throw new ResourceNotFoundException("Resource not found : " + resourceRequest); } if (frameworkProperties.getResourceCache() != null && frameworkProperties.getResourceCache().containsValid(key, metacard)) { Resource resource = frameworkProperties.getResourceCache().getValid(key, metacard); if (resource != null) { resourceResponse = new ResourceResponseImpl(resourceRequest, requestProperties, resource); LOGGER.info("Successfully retrieved product from cache for metacard ID = {}", metacard.getId()); } else { LOGGER.info("Unable to get resource from cache. Have to retrieve it from source {}", resourceSourceName); } } if (resourceResponse == null) { // retrieve product from specified federated site if not in cache if (!resourceSourceName.equals(getId())) { LOGGER.debug("Searching federatedSource {} for resource.", resourceSourceName); LOGGER.debug("metacard for product found on source: {}", resolvedSourceId); FederatedSource source; source = frameworkProperties.getFederatedSources().get(resourceSourceName); if (source != null) { LOGGER.debug("Adding federated site to federated query: {}", source.getId()); } if (source != null) { LOGGER.debug("Retrieving product from remote source {}", source.getId()); ResourceRetriever retriever = new RemoteResourceRetriever(source, responseURI, requestProperties); try { resourceResponse = frameworkProperties.getReliableResourceDownloadManager() .download(resourceRequest, metacard, retriever); } catch (DownloadException e) { LOGGER.info("Unable to download resource", e); } } else { LOGGER.warn("Could not find federatedSource: {}", resourceSourceName); } } else { LOGGER.debug("Retrieving product from local source {}", resourceSourceName); ResourceRetriever retriever = new LocalResourceRetriever( frameworkProperties.getResourceReaders(), responseURI, requestProperties); try { resourceResponse = frameworkProperties.getReliableResourceDownloadManager() .download(resourceRequest, metacard, retriever); } catch (DownloadException e) { LOGGER.info("Unable to download resource", e); } } } resourceResponse = validateFixGetResourceResponse(resourceResponse, resourceReq); HashMap<String, Set<String>> responsePolicyMap = new HashMap<>(); for (PolicyPlugin plugin : frameworkProperties.getPolicyPlugins()) { PolicyResponse policyResponse = plugin.processPostResource(resourceResponse, metacard); buildPolicyMap(responsePolicyMap, policyResponse.operationPolicy().entrySet()); } resourceResponse.getProperties().put(PolicyPlugin.OPERATION_SECURITY, responsePolicyMap); for (AccessPlugin plugin : frameworkProperties.getAccessPlugins()) { resourceResponse = plugin.processPostResource(resourceResponse, metacard); } for (PostResourcePlugin plugin : frameworkProperties.getPostResource()) { try { resourceResponse = plugin.process(resourceResponse); } catch (PluginExecutionException e) { LOGGER.info("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } resourceResponse.getProperties().put(Constants.METACARD_PROPERTY, metacard); } catch (DataUsageLimitExceededException e) { LOGGER.error("RuntimeException caused by: ", e); throw e; } catch (RuntimeException e) { LOGGER.error("RuntimeException caused by: ", e); throw new ResourceNotFoundException("Unable to find resource"); } catch (StopProcessingException e) { LOGGER.error("Resource not supported", e); throw new ResourceNotSupportedException(FAILED_BY_GET_RESOURCE_PLUGIN + e.getMessage()); } return resourceResponse; }
From source file:ddf.catalog.impl.CatalogFrameworkImpl.java
@Override public DeleteResponse delete(DeleteRequest deleteRequest) throws IngestException, SourceUnavailableException { boolean catalogStoreRequest = isCatalogStoreRequest(deleteRequest); setFlagsOnRequest(deleteRequest);/* ww w.j a va 2 s . c o m*/ if (fanoutEnabled) { throw new IngestException(FANOUT_MESSAGE); } validateDeleteRequest(deleteRequest); if (Requests.isLocal(deleteRequest) && (!sourceIsAvailable(catalog) || !storageIsAvailable(storage))) { throw new SourceUnavailableException( "Local provider is not available, cannot perform delete operation."); } DeleteStorageRequest deleteStorageRequest = null; DeleteResponse deleteResponse = null; try { List<Filter> idFilters = new ArrayList<>(); for (Serializable serializable : deleteRequest.getAttributeValues()) { idFilters.add(frameworkProperties.getFilterBuilder().attribute(deleteRequest.getAttributeName()) .is().equalTo().text(serializable.toString())); } QueryImpl queryImpl = new QueryImpl(getFilterWithAdditionalFilters(idFilters)); queryImpl.setStartIndex(1); queryImpl.setPageSize(deleteRequest.getAttributeValues().size()); QueryRequestImpl queryRequest = new QueryRequestImpl(queryImpl, deleteRequest.getStoreIds()); QueryResponse query; List<Metacard> metacards = new ArrayList<>(deleteRequest.getAttributeValues().size()); if (!frameworkProperties.getPolicyPlugins().isEmpty()) { try { query = doQuery(queryRequest, frameworkProperties.getFederationStrategy()); metacards.addAll( query.getResults().stream().map(Result::getMetacard).collect(Collectors.toList())); } catch (FederationException e) { LOGGER.warn("Unable to complete query for updated metacards.", e); throw new IngestException("Exception during runtime while performing delete"); } if (metacards.size() < deleteRequest.getAttributeValues().size()) { throw new StopProcessingException("Unable to remove all metacards contained in request."); } } deleteStorageRequest = new DeleteStorageRequestImpl(metacards, deleteRequest.getProperties()); HashMap<String, Set<String>> requestPolicyMap = new HashMap<>(); Map<String, Serializable> unmodifiableProperties = Collections .unmodifiableMap(deleteRequest.getProperties()); for (PolicyPlugin plugin : frameworkProperties.getPolicyPlugins()) { PolicyResponse policyResponse = plugin.processPreDelete(metacards, unmodifiableProperties); buildPolicyMap(requestPolicyMap, policyResponse.operationPolicy().entrySet()); } deleteRequest.getProperties().put(PolicyPlugin.OPERATION_SECURITY, requestPolicyMap); for (AccessPlugin plugin : frameworkProperties.getAccessPlugins()) { deleteRequest = plugin.processPreDelete(deleteRequest); } deleteRequest.getProperties().put(Constants.OPERATION_TRANSACTION_KEY, new OperationTransactionImpl(OperationTransaction.OperationType.DELETE, metacards)); for (PreIngestPlugin plugin : frameworkProperties.getPreIngest()) { try { deleteRequest = plugin.process(deleteRequest); } catch (PluginExecutionException e) { LOGGER.info("Plugin processing failed. This is allowable. Skipping to next plugin.", e); } } validateDeleteRequest(deleteRequest); // Call the Provider delete method LOGGER.debug("Calling catalog.delete() with {} entries.", deleteRequest.getAttributeValues().size()); if (Requests.isLocal(deleteRequest)) { try { storage.delete(deleteStorageRequest); } catch (StorageException e) { LOGGER.error("Unable to delete stored content items. Not removing stored metacards", e); throw new InternalIngestException( "Unable to delete stored content items. Not removing stored metacards.", e); } deleteResponse = catalog.delete(deleteRequest); } if (catalogStoreRequest) { DeleteResponse remoteDeleteResponse = doRemoteDelete(deleteRequest); if (deleteResponse == null) { deleteResponse = remoteDeleteResponse; } else { deleteResponse.getProperties().putAll(remoteDeleteResponse.getProperties()); deleteResponse.getProcessingErrors().addAll(remoteDeleteResponse.getProcessingErrors()); } } HashMap<String, Set<String>> responsePolicyMap = new HashMap<>(); unmodifiableProperties = Collections.unmodifiableMap(deleteRequest.getProperties()); if (deleteResponse != null && deleteResponse.getDeletedMetacards() != null) { for (Metacard metacard : deleteResponse.getDeletedMetacards()) { HashMap<String, Set<String>> itemPolicyMap = new HashMap<>(); for (PolicyPlugin plugin : frameworkProperties.getPolicyPlugins()) { PolicyResponse policyResponse = plugin.processPostDelete(metacard, unmodifiableProperties); buildPolicyMap(itemPolicyMap, policyResponse.itemPolicy().entrySet()); buildPolicyMap(responsePolicyMap, policyResponse.operationPolicy().entrySet()); } metacard.setAttribute(new AttributeImpl(Metacard.SECURITY, itemPolicyMap)); } } deleteRequest.getProperties().put(PolicyPlugin.OPERATION_SECURITY, responsePolicyMap); for (AccessPlugin plugin : frameworkProperties.getAccessPlugins()) { deleteResponse = plugin.processPostDelete(deleteResponse); } // Post results to be available for pubsub deleteResponse = validateFixDeleteResponse(deleteResponse, deleteRequest); for (final PostIngestPlugin plugin : frameworkProperties.getPostIngest()) { try { deleteResponse = plugin.process(deleteResponse); } catch (PluginExecutionException e) { LOGGER.info("Plugin exception", e); } } } catch (StopProcessingException see) { LOGGER.warn(PRE_INGEST_ERROR + see.getMessage(), see); throw new IngestException(PRE_INGEST_ERROR + see.getMessage()); } catch (RuntimeException re) { LOGGER.warn("Exception during runtime while performing delete", re); throw new InternalIngestException("Exception during runtime while performing delete"); } finally { if (deleteStorageRequest != null) { try { storage.commit(deleteStorageRequest); } catch (StorageException e) { LOGGER.error("Unable to remove stored content items.", e); } } } return deleteResponse; }
From source file:org.ow2.proactive_grid_cloud_portal.scheduler.SchedulerStateRest.java
private String getTaskResultValueAsStringOrExceptionStackTrace(TaskResult taskResult) { if (taskResult == null) { // task is not finished yet return null; }// www. j a v a2 s . c o m String value = null; // No entry if the task had exception if (taskResult.hadException()) { value = StackTraceUtil.getStackTrace(taskResult.getException()); } else { try { Serializable instanciatedValue = taskResult.value(); if (instanciatedValue != null) { value = instanciatedValue.toString(); } } catch (InternalSchedulerException e) { value = UNKNOWN_VALUE_TYPE; } catch (Throwable t) { value = "Unable to get the value due to " + t.getMessage(); } } return value; }
From source file:org.redpill.alfresco.repo.findwise.SearchIntegrationServiceImpl.java
/** * Create bean object from node properties * //from ww w. j a v a2s . c o m * @param nodeRef * @return */ protected FindwiseObjectBean createFindwiseObjectBean(final NodeRef nodeRef, boolean empty) { FindwiseObjectBean fob = new FindwiseObjectBean(); fob.setId(nodeRef.toString()); if (empty == false) { List<FindwiseFieldBean> fields = new ArrayList<FindwiseFieldBean>(); Map<QName, Serializable> properties = nodeService.getProperties(nodeRef); // Get node type QName nodeType = nodeService.getType(nodeRef); String title = nodeType.toPrefixString(namespaceService); fields.add(new FindwiseFieldBean("type", "string", title)); // Get site info SiteInfo site = siteService.getSite(nodeRef); String siteName = site.getTitle(); String siteShortName = site.getShortName(); fields.add(new FindwiseFieldBean("siteName", "string", siteName)); fields.add(new FindwiseFieldBean("siteShortName", "string", siteShortName)); // Get download & details url String shareUrl = UrlUtil.getShareUrl(sysAdminParams); String downloadPath = "/proxy/alfresco/api/node/content/" + nodeRef.toString().replace("://", "/"); downloadPath += "/" + URLEncoder.encode((String) properties.get(ContentModel.PROP_NAME)); downloadPath += "?a=true"; String downloadUrl = shareUrl + downloadPath; String detailsPath = "/page/site/" + siteShortName + "/document-details?nodeRef=" + URLEncoder.encode(nodeRef.toString()); String detailsUrl = shareUrl + detailsPath; fields.add(new FindwiseFieldBean("downloadUrl", "string", downloadUrl)); fields.add(new FindwiseFieldBean("detailsUrl", "string", detailsUrl)); // Add more user info for creator NodeRef creatorNodeRef = personService .getPersonOrNull((String) properties.get(ContentModel.PROP_CREATOR)); if (creatorNodeRef != null) { Map<QName, Serializable> personProperties = nodeService.getProperties(creatorNodeRef); String firstName = (String) personProperties.get(ContentModel.PROP_FIRSTNAME); if (firstName == null) { firstName = ""; } fields.add(new FindwiseFieldBean("creatorFirstName", "string", firstName)); String lastName = (String) personProperties.get(ContentModel.PROP_LASTNAME); if (lastName == null) { lastName = ""; } fields.add(new FindwiseFieldBean("creatorLastName", "string", lastName)); fields.add(new FindwiseFieldBean("creatorFullName", "string", firstName + " " + lastName)); String email = (String) personProperties.get(ContentModel.PROP_EMAIL); if (email == null) { email = ""; } fields.add(new FindwiseFieldBean("creatorEmail", "string", email)); } // Add more user info for modifier NodeRef modifierNodeRef = personService .getPersonOrNull((String) properties.get(ContentModel.PROP_MODIFIER)); if (modifierNodeRef != null) { Map<QName, Serializable> personProperties = nodeService.getProperties(modifierNodeRef); String firstName = (String) personProperties.get(ContentModel.PROP_FIRSTNAME); if (firstName == null) { firstName = ""; } fields.add(new FindwiseFieldBean("modifierFirstName", "string", firstName)); String lastName = (String) personProperties.get(ContentModel.PROP_LASTNAME); if (lastName == null) { lastName = ""; } fields.add(new FindwiseFieldBean("modifierLastName", "string", lastName)); fields.add(new FindwiseFieldBean("modifierFullName", "string", firstName + " " + lastName)); String email = (String) personProperties.get(ContentModel.PROP_EMAIL); if (email == null) { email = ""; } fields.add(new FindwiseFieldBean("modifierEmail", "string", email)); } Iterator<QName> it = properties.keySet().iterator(); while (it.hasNext()) { FindwiseFieldBean ffb = new FindwiseFieldBean(); QName property = it.next(); if (LOG.isTraceEnabled()) { LOG.trace("Handling property " + property.toString()); } Serializable value = properties.get(property); if (NamespaceService.SYSTEM_MODEL_1_0_URI.equals(property.getNamespaceURI()) || FindwiseIntegrationModel.URI.equals(property.getNamespaceURI()) || !isPropertyAllowedToIndex(property)) { if (LOG.isTraceEnabled()) { LOG.trace("Skiping property " + property.toString()); } continue; } String javaClassName = "unknown"; PropertyDefinition propertyDefinition = dictionaryService.getProperty(property); // The following condition is needed to properly handle residual // properties if (propertyDefinition != null) { DataTypeDefinition dataType = propertyDefinition.getDataType(); if (dataType != null) { javaClassName = dataType.getJavaClassName(); } } String type; if (LOG.isTraceEnabled()) { LOG.trace("Detected " + javaClassName + " java type for property " + property.toString()); } if ("java.util.Date".equals(javaClassName)) { if (LOG.isTraceEnabled()) { LOG.trace("Converting " + property.toString() + " to date"); } type = "string"; DateTime date = new DateTime((Date) value, DateTimeZone.UTC); ffb.setValue(date.toString()); } else if ("org.alfresco.service.cmr.repository.ContentData".equals(javaClassName)) { // Create Base64 data if (LOG.isTraceEnabled()) { LOG.trace("Handling content on property " + property.toString()); } ContentReader contentReader = contentService.getReader(nodeRef, property); if (contentReader != null) { InputStream contentInputStream = contentReader.getContentInputStream(); try { byte[] nodeBytes = IOUtils.toByteArray(contentInputStream); ffb.setValue(new String(Base64.encodeBase64(nodeBytes))); } catch (IOException e) { LOG.warn("Error while reading content", e); } finally { IOUtils.closeQuietly(contentInputStream); } } else { LOG.warn(nodeRef + " had no content"); } type = "binary"; } else { if (LOG.isDebugEnabled()) { LOG.debug("Unhandled property type, using default conversion"); } type = "string"; ffb.setValue(value.toString()); } ffb.setType(type); String name = property.toPrefixString(namespaceService); if (LOG.isTraceEnabled()) { LOG.trace("Short name for property " + property.toString() + ": " + name); } ffb.setName(name); fields.add(ffb); } fob.setFields(fields); } return fob; }