List of usage examples for javax.transaction UserTransaction begin
void begin() throws NotSupportedException, SystemException;
From source file:it.doqui.index.ecmengine.business.personalization.importer.ArchiveImporterJob.java
public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("[ArchiveImporterJob::execute] BEGIN"); if (!running) { synchronized (this) { if (!running) { running = true;//from w w w. ja va 2 s. c o m } else { logger.debug("[ArchiveImporterJob::execute] job already running 1"); logger.debug("[ArchiveImporterJob::execute] END"); return; } } } else { logger.debug("[ArchiveImporterJob::execute] job already running 2"); logger.debug("[ArchiveImporterJob::execute] END"); return; } logger.debug("[ArchiveImporterJob::execute] START"); JobBusinessInterface jobManager = null; BatchJob batchJob = null; try { // Chiedo un'istanza di JobManager jobManager = (JobBusinessInterface) context.getJobDetail().getJobDataMap() .get(ECMENGINE_JOB_MANAGER_BEAN); // In caso di successo if (jobManager != null) { List<Repository> repositories = RepositoryManager.getInstance().getRepositories(); for (Repository repository : repositories) { logger.debug("[ArchiveImporterJob::execute] import archive on repository '" + repository.getId() + "'"); RepositoryManager.setCurrentRepository(repository.getId()); // Faccio la lista dei job di tipo ECMENGINE_ARCHIVE_IMPORTER_JOB_REF, attivi // Possono essere // Ready - da processare // Running - in esecuzione : sto importando il file // Finished - ho finito // In questo stato, essendo un job singleton, dovrei avere solo stati ready (nuovi job) o finished (job finiti) // Se icontro un RUNNING, sicuramente si tratta di una condizione di errore, riporto a ready il job, e faccio in // modo che l'algoritmo sottostante continui l'importazione in modo incrementale // Prendo tutti i job di un certo esecutore: in futuro, se la cosa dovesse dare problemi di performance // aggiungere un filtro sullo status RUNNING BatchJob[] bjs = jobManager.getJobsByExecutor(ECMENGINE_ARCHIVE_IMPORTER_JOB_REF); if (bjs != null) { // Se ho dei BatchJob for (BatchJob bj : bjs) { logger.debug("[ArchiveImporterJob::execute] job status " + bj.getId() + ":" + bj.getStatus()); // Se lo stato e' running if (bj.getStatus().equalsIgnoreCase(JobStatus.RUNNING)) { logger.debug("[ArchiveImporterJob::execute] update status to ready " + bj.getId()); // Reimposto lo stato a ready bj.setStatus(JobStatus.READY); jobManager.updateJob(bj); } } } else { logger.debug("[ArchiveImporterJob::execute] BatchJob NULL per " + ECMENGINE_ARCHIVE_IMPORTER_JOB_REF); } // A questo punto, sono in una situazione consistente, con dei job ready // I job ready possono essere sia nuovi job, che resume di situazioni precedenti while ((batchJob = jobManager.getNextJob(ECMENGINE_ARCHIVE_IMPORTER_JOB_REF)) != null) { // Prendo il prossimo job logger.debug("[ArchiveImporterJob::execute] start batchJob " + batchJob.getId()); if (batchJob != null) { try { // Estraggo i parametri di esecuzione BatchJobParam pName = batchJob.getParam(PARAM_NAME); BatchJobParam pFormat = batchJob.getParam(PARAM_FORMAT); BatchJobParam pStore = batchJob.getParam(PARAM_CONTENTSTORE_DIR); String importDirectory = (String) context.getJobDetail().getJobDataMap() .get(ECMENGINE_IMPORT_DIRECTORY); // verifico se sono corretti checkParam(batchJob, pName, "Archive name not found"); checkParam(batchJob, pFormat, "Archive format not found"); checkParam(batchJob, pStore, "Archive store not found"); checkParam(batchJob, importDirectory, "importDirectory null"); // Validati i parametri di estrazione del fie, procedo con lo // spostamento da TEMP alla directory di import, ed esplodo il file // Sposto il file dalla temp alla workdir del progetto String cFrom = pStore.getValue() + File.separator + pName.getValue(); File oFrom = new File(cFrom); logger.debug("[ArchiveImporterJob::execute] From:" + cFrom); // Crea la directory di output se non esiste File oDir = new File(importDirectory); if (!oDir.exists()) { oDir.mkdir(); } // File di importazione String cFile = importDirectory + File.separator + pName.getValue(); File oFile = new File(cFile); logger.debug("[ArchiveImporterJob::execute] Import:" + cFile); // Se esiste il file, lo sposto nella dir dove deve essere processato if (oFrom.exists()) { // Provo lo spostamento, perche' piu' veloce if (!oFrom.renameTo(oFile)) { // Se fallisce provo la copia, piu' lenta if (!copyFile(oFrom, oFile)) { batchJob.setMessage( "Unable to copy from (" + cFrom + ") to (" + cFile + ")"); throw new EcmEngineException( "ArchiveImporterJob: " + batchJob.getMessage()); } else { // Se la copia va a buon fine, cancello il file oFrom.delete(); } } } // Prima cosa, provo a caricare in RAM il file indicato. // Attenzione: vista la dimensione a 256 char dei parametri dei job, // ho preferito spezzare path e nome file in due variabili diverse, in modo // da all'ungare il piu' possibile la lunghezza del path utilizzabile // Una volta processato il file, lo rinomino in processed, per poterne tenere traccia // I processed andrebbero alimintati ogni tanto, per ridurne la coda // Alternativamente, si puo' variare il codice per rinominarli al posto di rinominare // Un'altra ragione della rinomina e' data dal fatto che, in caso di zip corrotti, puo' accadere che // il file risulti importato, ma non presente, senza nessun messaggio d'errore String cFileRenamed = cFile + ".processed"; File oFileRenamed = new File(cFileRenamed); // Il temp path lo formo col nome del file compresso +".extract", in modo da avere l'univocita' data gia // dal nome file String tempPath = cFile + ".extract"; File tmpWorkDir = new File(tempPath); // Se il file non esiste if (!oFile.exists()) { logger.debug( "[ArchiveImporterJob::execute] File di importazione non presente, controllo directory di esplosione"); // Se ho la dir di estrazione, vuol dire che stavo importando e c'e' stato un errore // Quindi do errore solo se non esiste manco la dir di output if (!tmpWorkDir.exists()) { batchJob.setMessage("Archive not found"); throw new EcmEngineException("ArchiveImporterJob: " + batchJob.getMessage() + " (" + cFile + ")"); } else { logger.debug( "[ArchiveImporterJob::execute] Directory di importazione presente, procedo con l'importazione"); } } // Se sono qui, posso avere il file o la dir di esplosione // Se ho il file, prendo il contenuto da disco e lo decomprimo if (oFile.exists()) { byte[] content = getBinary(cFile); logger.debug("[ArchiveImporterJob::execute] Content size: " + content.length); // La directory la creo solo se non esiste, devo infatti gestire il caso che sia gia' creata // e sto andando in aggiornamento if (!tmpWorkDir.exists()) { if (!tmpWorkDir.mkdirs()) { batchJob.setMessage("Cant' creare working dir"); throw new EcmEngineException("ArchiveImporterJob: " + batchJob.getMessage() + " (" + tempPath + ")"); } } // A questo punto, estraggo i file presenti nello zip String cFormat = pFormat.getValue(); logger.debug("[ArchiveImporterJob::execute] estrazione archivio (" + cFile + ")(" + cFormat + ") in " + tempPath); if (ECMENGINE_ARCHIVE_FORMAT_TAR.equals(cFormat)) { extractTar(new ByteArrayInputStream(content), tempPath); } else if (ECMENGINE_ARCHIVE_FORMAT_TAR_GZ.equals(cFormat)) { extractTarGz(new ByteArrayInputStream(content), tempPath); } else if (ECMENGINE_ARCHIVE_FORMAT_ZIP.equals(cFormat)) { extractZip(new ByteArrayInputStream(content), tempPath); } else { // In caso di formato non gestito, esco con errore batchJob.setMessage("Format not supported"); throw new EcmEngineException("ArchiveImporterJob: " + batchJob.getMessage() + " (" + cFormat + ")"); } // A questo punto, ho l'esplosione dello ZIP // Una volta esploso in modo corretto, cancello eventuali copie non previste // e rinomino il file oFileRenamed.delete(); oFile.renameTo(oFileRenamed); // A fine processo, cancello i file interessati dalla import // Commentare questa riga se si volesse verificare come mai non viene importato un file oFile.delete(); oFileRenamed.delete(); } // Creo i service e verifico siano presi in modo corretto transactionService = (TransactionService) context.getJobDetail().getJobDataMap() .get(ECMENGINE_TRANSACTION_SERVICE_BEAN); namespaceService = (NamespaceService) context.getJobDetail().getJobDataMap() .get(ECMENGINE_NAMESPACE_SERVICE_BEAN); contentService = (ContentService) context.getJobDetail().getJobDataMap() .get(ECMENGINE_CONTENT_SERVICE_BEAN); nodeService = (NodeService) context.getJobDetail().getJobDataMap() .get(ECMENGINE_NODE_SERVICE_BEAN); authenticationService = (AuthenticationService) context.getJobDetail() .getJobDataMap().get(ECMENGINE_AUTHENTICATION_SERVICE_BEAN); checkParam(batchJob, transactionService, "transactionService null"); checkParam(batchJob, namespaceService, "namespaceService null"); checkParam(batchJob, contentService, "contentService null"); checkParam(batchJob, nodeService, "nodeService null"); checkParam(batchJob, authenticationService, "authenticationService null"); // Vengono presi i parametri del batch e ne viene controllata la conguenza, uscendo in caso di errore BatchJobParam pUID = batchJob.getParam(PARAM_UID); BatchJobParam pStoreProtocol = batchJob.getParam(PARAM_STORE_PROTOCOL); BatchJobParam pStoreIdentifier = batchJob.getParam(PARAM_STORE_IDENTIFIER); BatchJobParam pUser = batchJob.getParam(PARAM_USER); BatchJobParam pPassword = batchJob.getParam(PARAM_PASSWORD); BatchJobParam pContentType = batchJob.getParam(PARAM_CONTENT_TYPE); BatchJobParam pNameProperty = batchJob.getParam(PARAM_CONTENT_NAME_PROPERTY); BatchJobParam pContainerType = batchJob.getParam(PARAM_CONTAINER_TYPE); BatchJobParam pContainerNameProperty = batchJob .getParam(PARAM_CONTAINER_NAME_PROPERTY); BatchJobParam pContainerAssocType = batchJob.getParam(PARAM_CONTAINER_ASSOC_TYPE); BatchJobParam pParentAssocType = batchJob.getParam(PARAM_PARENT_ASSOC_TYPE); checkParam(batchJob, pUID, "Node UID not found"); checkParam(batchJob, pStoreProtocol, "Store Protocol not found"); checkParam(batchJob, pStoreIdentifier, "Store Identifier not found"); checkParam(batchJob, pUser, "User not found"); checkParam(batchJob, pPassword, "Password not found"); checkParam(batchJob, pContentType, "Content Type not found"); checkParam(batchJob, pNameProperty, "Content Name not found"); checkParam(batchJob, pContainerType, "Container Type not found"); checkParam(batchJob, pContainerNameProperty, "Container Name not found"); checkParam(batchJob, pContainerAssocType, "Container Assoc not found"); checkParam(batchJob, pParentAssocType, "Parent Assoc not found"); // Trasformazione dei parametri in QName QName contentTypeQName = resolvePrefixNameToQName(pContentType.getValue()); QName contentNamePropertyQName = resolvePrefixNameToQName(pNameProperty.getValue()); QName containerTypeQName = resolvePrefixNameToQName(pContainerType.getValue()); QName containerNamePropertyQName = resolvePrefixNameToQName( pContainerNameProperty.getValue()); QName containerAssocTypeQName = resolvePrefixNameToQName( pContainerAssocType.getValue()); QName parentAssocTypeQName = resolvePrefixNameToQName(pParentAssocType.getValue()); // Prendo un oggetto UserTransaction UserTransaction transaction = transactionService.getNonPropagatingUserTransaction(); try { // Inizio la transazione transaction.begin(); // Cambio l'utente, con l'utente che ha deve importare authenticationService.authenticate(pUser.getValue(), EncryptionHelper.decrypt(pPassword.getValue()).toCharArray()); } catch (Exception e) { logger.debug(e); throw e; } finally { // Anche se non ho fatto try { transaction.rollback(); } catch (Exception e) { } } // Creo un nodo, usando l'UID del folder dove devo mettere i dati StoreRef sr = new StoreRef(pStoreProtocol.getValue(), pStoreIdentifier.getValue()); // DictionarySvc.SPACES_STORE NodeRef nodeRef = new NodeRef(sr, pUID.getValue()); // Attivo l'importazione ricorsiva int nContent = handleRootFolder(tmpWorkDir, nodeRef, parentAssocTypeQName, containerTypeQName, containerNamePropertyQName, containerAssocTypeQName, contentTypeQName, contentNamePropertyQName); // Reimposto lo status e vado al prossimo JOB batchJob.setMessage("Content nuovi: " + nContent + " Datafile " + pName.getValue() + ".processed"); batchJob.setStatus(JobStatus.FINISHED); jobManager.updateJob(batchJob); } catch (Exception e) { logger.error("[ArchiveImporterJob::execute] ERROR", e); try { // Reimposto il getMessage(), nel caso arrivi vuoto if (batchJob.getMessage().length() == 0) { batchJob.setMessage(e.getMessage()); } // Reimposto lo status e vado al prossimo JOB batchJob.setStatus(JobStatus.ERROR); jobManager.updateJob(batchJob); } catch (Exception ee) { // TODO: vedere se e' giusto tenerlo muto } } finally { // Non posso toccare lo stato a ready, altrimenti vado in loop } } } } } else { logger.error("[ArchiveImporterJob::execute] JobManager NULL per " + ECMENGINE_JOB_MANAGER_BEAN); } } catch (Exception e) { logger.error("[ArchiveImporterJob::execute] ERROR", e); throw new JobExecutionException(e); } finally { running = false; logger.debug("[ArchiveImporterJob::execute] END"); } logger.debug("[ArchiveImporterJob::execute] END run"); }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
private List<NodeRef> searchNodeWithProp() throws DictionaryRuntimeException, NotSupportedException, SystemException, SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException { logger.debug("[MoveAggregationJob::searchNodeWithProp] BEGIN"); //1) ricerca dei nodi spostati //a partire dalla company_home(radice) prendere ogni nodo figlio // e verificare se possiede un aspect con proprieta` di valore "spostabile" // oppure e` possibile fare una ricerca con lucene e prendere i nodi con quel particolare aspect // Utilizzare searchService per puntare direttamente ai nodi che hanno un certo aspect //RepositoryManager.setCurrentRepository("primary"); logger.debug("[MoveAggregationJob::searchNodeWithProp] Ricerca nel repository : " + RepositoryManager.getCurrentRepository()); StoreRef spacesStore = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore"); //proprieta ecm-sys:stato (dell'aspect ecm-sys:state) SearchParameters searchParams = new SearchParameters(); searchParams.addStore(spacesStore);/*from ww w. j av a2s .c o m*/ searchParams.setLimitBy(LimitBy.UNLIMITED); searchParams.setLimit(0); searchParams.setLanguage(SearchService.LANGUAGE_LUCENE); //ricerca per aspect e poi filtrare sulla property relativa //searchParams.setQuery("ASPECT:\"{http://www.doqui.it/model/ecmengine/system/1.0}state\""); //ricerca direttamente per property //@ecm-sys\\:stato:\"spostabile\" //searchParams.setQuery("@ecm-sys\\:stato:\"spostabile\""); //searchParams.setQuery("@{http://www.doqui.it/model/ecmengine/system/1.0}\\:stato:\"spostabile\""); searchParams.setQuery("@ecm-sys\\:stato:\"spostabile\""); ResultSet resultSet = null; List<NodeRef> listaNodi = null; //List<NodeRef> nodiConProp = null; UserTransaction userTxSource = transactionService.getNonPropagatingUserTransaction(); try { userTxSource.begin(); logger.debug("[MoveAggregationJob::searchNodeWithProp] searchService is " + (searchService != null ? " <> null" : " null")); logger.debug("[MoveAggregationJob::searchNodeWithProp] Query : " + searchParams.getQuery()); resultSet = searchService.query(searchParams); logger.debug("[MoveAggregationJob::searchNodeWithProp] Risultati trovati: " + resultSet.length()); if (resultSet.length() > 0) { listaNodi = resultSet.getNodeRefs(); } userTxSource.commit(); /* QName stateProp = resolvePrefixNameToQName("ecm-sys:stato"); String valoreStatoNodo = "spostabile"; if (resultSet.length() > 0){ listaNodi = resultSet.getNodeRefs(); if(listaNodi!=null){ Map<QName,Serializable> propMap = null; nodiConProp = new ArrayList<NodeRef>(); for (NodeRef ref : listaNodi) { propMap = nodeService.getProperties(ref); if(propMap.containsKey(stateProp) && propMap.containsValue(valoreStatoNodo)) { nodiConProp.add(ref); } } } } searchParams.setLanguage(SearchService.LANGUAGE_XPATH); searchParams.setQuery("/app:company_home/cm:TestSposta"); searchParams.setLimitBy(LimitBy.FINAL_SIZE); searchParams.setLimit(1); resultSet = searchService.query(searchParams); logger.debug("[MoveAggregationJob::searchNodeWithProp] Query per XPATH : "+searchParams.getQuery()); if(resultSet.length()>0){ logger.debug("[MoveAggregationJob::searchNodeWithProp] resultSet per XPATH diverso da Null"); logger.debug("[MoveAggregationJob::searchNodeWithProp] Numero Risultati trovati : " +resultSet.length()); if (resultSet.getNodeRefs()!=null && resultSet.getNodeRefs().size()>0){ NodeRef nodo = resultSet.getNodeRef(0); Map<QName,Serializable> prop = nodeService.getProperties(nodo); String valore =(String) prop.get(stateProp); logger.debug("[MoveAggregationJob::searchNodeWithProp] valore property 'ecm-sys:stato' e` : " +valore); } } */ } finally { if (resultSet != null) { resultSet.close(); } logger.debug("[MoveAggregationJob::searchNodeWithProp] END"); } return listaNodi; }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
private void moveIntraRepo(NodeRef sourceNodeRef, String sourceRepository, String idSourceNode, String idDestinationParent) throws NotSupportedException, SystemException, DictionaryRuntimeException, SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException { //riclassificazioni(voci di titolario differenti) //spostamenti (la stessa voce di titolario) logger.debug("[MoveAggregationJob::moveIntraRepo] BEGIN"); Node result = null;/*from w w w. j a v a 2 s . c om*/ String logCtx = "S: " + idSourceNode + " - D: " + idDestinationParent; try { UserTransaction userTxSource = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); RepositoryManager.setCurrentRepository(sourceRepository); logger.debug("[MoveAggregationJob::moveIntraRepo] Spostamento da Corrente a Corrente"); StoreRef spacesStore = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore"); NodeRef destinationParentRef = new NodeRef(spacesStore, idDestinationParent); ChildAssociationRef sourceParentRef = nodeService.getPrimaryParent(sourceNodeRef); QName destinationQName = sourceParentRef.getQName(); QName destinationAssocTypeQName = sourceParentRef.getTypeQName(); logger.debug( "[MoveAggregationJob::moveIntraRepo] Nome Nuova Associazione : " + destinationQName.toString()); logger.debug("[MoveAggregationJob::moveIntraRepo] Tipo Nuova Associazione : " + destinationAssocTypeQName.toString()); NodeRef copyNodeRef = copyService.copyAndRename(sourceNodeRef, destinationParentRef, destinationAssocTypeQName, destinationQName, true); //NodeRef copyNodeRef = copyService.copy(sourceNodeRef, destinationParentRef, destinationAssocTypeQName,destinationQName, true); result = new Node(copyNodeRef.getId()); dumpElapsed("MoveAggregationJob", "moveIntraRepo", logCtx, "Nodo Copia creato."); logger.debug("[MoveAggregationJob::moveIntraRepo] Uid Nodo Copia creato: " + result.getUid()); QName stateAspect = resolvePrefixNameToQName("ecm-sys:state"); nodeService.removeAspect(copyNodeRef, stateAspect); dumpElapsed("MoveAggregationJob", "moveIntraRepo", logCtx, "Rimosso Aspect 'state' dal Nodo copiato."); logger.debug( "[MoveAggregationJob::moveIntraRepo] Rimosso Aspect 'state' dal nodo : " + copyNodeRef.getId()); QName destinationAspect = resolvePrefixNameToQName("ecm-sys:destination"); nodeService.removeAspect(copyNodeRef, destinationAspect); dumpElapsed("MoveAggregationJob", "moveIntraRepo", logCtx, "Rimosso Aspect 'destination' dal Nodo copiato."); logger.debug("[MoveAggregationJob::moveIntraRepo] Rimosso Aspect 'destination' dal nodo : " + copyNodeRef.getId()); //TODO: //in questo caso (da corrente a corrente) cosa fare dell'aggragazione sorgente?? // si deve distinguere tra riclassificazione e spostamento? // a quanto pare in caso di riclassificazione l' aggregazione nella source deve rimanere // ma senza contenuti; // in caso di spostamento invece l'aggregazione va spostata in destination e cancellata // dalla source //Riepilogando: // Riclassificazione : --> metodo copy di copyService e succesive modifiche all'aggregazione //in source: //assume uno stato "R" = riclassificato //e` vuoto di contenuti,conserva i suoi metadati // Spostamento : --> metodo moveNode di nodeService?? questo metodo va bene?? Non copia i figli? //Implementazione solo di Riclassificazione //Cancello i nodi figli del nodo source(basta questo??) List<ChildAssociationRef> childAssociations = nodeService.getChildAssocs(sourceNodeRef); int size = childAssociations != null ? childAssociations.size() : 0; logger.debug("[MoveAggregationJob::moveIntraRepo] Cancellare " + size + " nodi figli."); if (size > 0) { for (ChildAssociationRef childAssoc : childAssociations) { if (childAssoc != null) { nodeService.removeChildAssociation(childAssoc); logger.debug("[MoveAggregationJob::moveIntraRepo] Associazione child eliminata."); dumpElapsed("MoveAggregationJob", "moveIntraRepo", logCtx, "Associazione child eliminata."); } } } //<property name="ecm-sys:stato"> proprieta dell'aspect "state" QName stateProp = resolvePrefixNameToQName("ecm-sys:stato"); String valoreStatoNodo = "riclassificato"; //add aspect ecm-sys:state con proprieta ecm-sys:stato di valore "ri-classificato" //al nodo source //nodeService.addAspect(sourceNodeRef, stateAspect, stateAspectProps); //in realta l'aspect e` gia esistente; bisogna modificare il valore // della proprieta da spostabile a ri-classificato nodeService.setProperty(sourceNodeRef, stateProp, valoreStatoNodo); dumpElapsed("MoveAggregationJob", "moveIntraRepo", logCtx, "Modificata property 'stato' dell'Aspect 'state' del nodo."); logger.debug("[MoveAggregationJob::moveIntraRepo] Modificata property 'stato' dell'Aspect 'state' " + "del nodo : " + sourceNodeRef.getId()); userTxSource.commit(); // INSERIMENTO AUDIT insertAudit("MoveAggregationJob", "moveIntraRepo", logCtx, result.getUid(), "Source :" + sourceNodeRef.getId() + " -- Destination Parent : " + destinationParentRef.getId()); } finally { logger.debug("[MoveAggregationJob::moveIntraRepo] END"); } }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
/** * Copies the target associations onto the destination node reference. * * @param sourceNodeRef the destination node reference * @param destinationNodeRef the destination node reference * * @throws NodeRuntimeException//ww w . j a v a 2 s. c om */ private void copyTargetAssociations(NodeRef sourceNodeRef, NodeRef destinationNodeRef, String sourceRepo, String destRepo) throws NodeRuntimeException { try { logger.debug("[MoveAggregationJob::copyTargetAssociations] BEGIN"); UserTransaction userTxSource = transactionService.getNonPropagatingUserTransaction(); UserTransaction userTxDest = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); RepositoryManager.setCurrentRepository(sourceRepo); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); List<AssociationRef> nodeAssocRefs = nodeService.getTargetAssocs(sourceNodeRef, RegexQNamePattern.MATCH_ALL); userTxSource.commit(); if (nodeAssocRefs != null) { userTxDest.begin(); RepositoryManager.setCurrentRepository(destRepo); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); for (AssociationRef assocRef : nodeAssocRefs) { NodeRef targetRef = assocRef.getTargetRef(); boolean exists = false; for (AssociationRef assocRef2 : nodeService.getTargetAssocs(destinationNodeRef, assocRef.getTypeQName())) { if (targetRef.equals(assocRef2.getTargetRef()) == true) { exists = true; break; } } if (exists == false) { // Add the association(aggiunge le associazioni di tipo reference verso il nodo che si trova //nel corrente ma questo nodo non viene ricreato nel deposito; Cosa fare? ) //TODO: // crea la relazione verso il nodo presente in corrente , ma non crea il nodo in deposito nodeService.createAssociation(destinationNodeRef, targetRef, assocRef.getTypeQName()); } } userTxDest.commit(); } } catch (NotSupportedException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SystemException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SecurityException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (IllegalStateException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (RollbackException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicMixedException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicRollbackException e) { logger.error("[MoveAggregationJob::copyTargetAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } finally { logger.debug("[MoveAggregationJob::copyTargetAssociations] END"); } }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
/** * Copies the permissions of the source node reference onto the destination node reference * * @param sourceNodeRef the source node reference * @param destinationNodeRef the destination node reference * @throws AuthenticationRuntimeException * @throws PermissionRuntimeException// www. j a v a 2s.c om */ private void copyPermissions(NodeRef sourceNodeRef, NodeRef destinationNodeRef, String sourceRepo, String destRepo) throws PermissionRuntimeException, AuthenticationRuntimeException { try { logger.debug("[MoveAggregationJob::copyPermissions] BEGIN"); UserTransaction userTxSource = transactionService.getNonPropagatingUserTransaction(); UserTransaction userTxDest = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); RepositoryManager.setCurrentRepository(sourceRepo); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); AccessStatus accessStatus = permissionService.hasPermission(sourceNodeRef, PermissionService.READ_PERMISSIONS); userTxSource.commit(); if (accessStatus == AccessStatus.ALLOWED) { userTxSource = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); // Get the permission details of the source node reference Set<AccessPermission> permissions = permissionService.getAllSetPermissions(sourceNodeRef); boolean includeInherited = permissionService.getInheritParentPermissions(sourceNodeRef); userTxSource.commit(); userTxDest.begin(); RepositoryManager.setCurrentRepository(destRepo); // authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); AccessStatus writePermission = permissionService.hasPermission(destinationNodeRef, PermissionService.CHANGE_PERMISSIONS); if (writePermission.equals(AccessStatus.ALLOWED) || authenticationService.isCurrentUserTheSystemUser()) { // Set the permission values on the destination node for (AccessPermission permission : permissions) { permissionService.setPermission(destinationNodeRef, permission.getAuthority(), permission.getPermission(), permission.getAccessStatus().equals(AccessStatus.ALLOWED)); } permissionService.setInheritParentPermissions(destinationNodeRef, includeInherited); } userTxDest.commit(); } } catch (NotSupportedException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SystemException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SecurityException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (IllegalStateException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (RollbackException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicMixedException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicRollbackException e) { logger.error("[MoveAggregationJob::copyPermissions] Eccezione: " + e.getMessage()); e.printStackTrace(); } finally { logger.debug("[MoveAggregationJob::copyPermissions] END"); } }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
/** * Copies the child associations onto the destiantion node reference. * <p>//from w w w . j a v a 2 s . c o m * If copyChildren is true then the nodes at the end of a primary assoc will be copied before they * are associated. * * @param sourceNodeRef the source node reference * @param destinationNodeRef the destination node reference * @param copyChildren indicates whether to copy the primary children * @throws AuthenticationRuntimeException * @throws PermissionRuntimeException * @throws NodeRuntimeException */ private void copyChildAssociations(NodeRef sourceNodeRef, NodeRef destinationNodeRef, boolean copyChildren, Map<NodeRef, NodeRef> copiedChildren, String sourceRepo, String destRepo) throws NodeRuntimeException, PermissionRuntimeException, AuthenticationRuntimeException { try { logger.debug("[MoveAggregationJob::copyChildAssociations] BEGIN"); UserTransaction userTxSource = transactionService.getNonPropagatingUserTransaction(); UserTransaction userTxDest = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); RepositoryManager.setCurrentRepository(sourceRepo); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); List<ChildAssociationRef> childAssocs = nodeService.getChildAssocs(sourceNodeRef); userTxSource.commit(); if (childAssocs != null) { logger.debug( "[MoveAggregationJob::copyChildAssociations] Nodi figli da ricreare in Repo Secondary: " + childAssocs.size()); for (ChildAssociationRef childAssoc : childAssocs) { if (copyChildren == true) { if (childAssoc.isPrimary() == true) { logger.debug("[MoveAggregationJob::copyChildAssociations]" + " Nodo figlio primario da ricreare in Repo Secondary."); // Do not recurse further, if we've already copied this node if (copiedChildren.containsKey(childAssoc.getChildRef()) == false && copiedChildren.containsValue(childAssoc.getChildRef()) == false) { // Copy the child recursiveCopy(childAssoc.getChildRef(), childAssoc.getParentRef(), destinationNodeRef, childAssoc.getTypeQName(), childAssoc.getQName(), copyChildren, copiedChildren, sourceRepo, destRepo); } } else { logger.debug( "[MoveAggregationJob::copyChildAssociations] Nodo figlio Non Primario da ricreare."); //Add the child (I figli non primari non vengono ricreati nel deposito)Cosa fare?? //TODO: NB i figli secondari non vengono ricreati, ma solo viene creata la relazione //tra padre e figlio( e il figlio si trova nel deposito) NodeRef childRef = childAssoc.getChildRef(); userTxDest.begin(); RepositoryManager.setCurrentRepository(destRepo); // authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); nodeService.addChild(destinationNodeRef, childRef, childAssoc.getTypeQName(), childAssoc.getQName()); userTxDest.commit(); } } } } } catch (NotSupportedException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SystemException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SecurityException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (IllegalStateException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (RollbackException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicMixedException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicRollbackException e) { logger.error("[MoveAggregationJob::copyChildAssociations] Eccezione: " + e.getMessage()); e.printStackTrace(); } finally { logger.debug("[MoveAggregationJob::copyChildAssociations] END"); } }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
/** * Recursive copy algorithm/*from w w w . j ava2 s . c o m*/ * * @throws NodeRuntimeException * @throws AuthenticationRuntimeException * @throws PermissionRuntimeException */ private NodeRef recursiveCopy(NodeRef sourceNodeRef, NodeRef sourceParentRef, NodeRef destinationParentRef, QName destinationAssocTypeQName, QName destinationQName, boolean copyChildren, Map<NodeRef, NodeRef> copiedChildren, String sourceRepo, String destRepo) throws NodeRuntimeException, PermissionRuntimeException, AuthenticationRuntimeException { NodeRef destinationNodeRef = null; UserTransaction userTxSource = null; UserTransaction userTxDest = null; try { logger.debug("[MoveAggregationJob::recursiveCopy] BEGIN"); userTxSource = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); Map<QName, Serializable> properties = null; Set<QName> sourceAspects = null; RepositoryManager.setCurrentRepository(sourceRepo); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); QName sourceType = nodeService.getType(sourceNodeRef); properties = nodeService.getProperties(sourceNodeRef); sourceAspects = nodeService.getAspects(sourceNodeRef); userTxSource.commit(); // Create the new node userTxDest = transactionService.getNonPropagatingUserTransaction(); userTxDest.begin(); RepositoryManager.setCurrentRepository(destRepo); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); boolean esisteStore = nodeService.exists(destinationParentRef.getStoreRef()); logger.debug("[MoveAggregationJob::recursiveCopy] Lo Store Destination esiste ? : " + esisteStore); final boolean destExists = nodeService.exists(destinationParentRef); logger.debug("[MoveAggregationJob::recursiveCopy] " + "Repository di Destinazione : " + RepositoryManager.getCurrentRepository()); logger.debug( "[MoveAggregationJob::recursiveCopy] '" + destinationParentRef + "' esiste: " + destExists); ChildAssociationRef destinationChildAssocRef = nodeService.createNode(destinationParentRef, destinationAssocTypeQName, destinationQName, sourceType, null); destinationNodeRef = destinationChildAssocRef.getChildRef(); logger.debug("[MoveAggregationJob::recursiveCopy] Nodo spostato: " + destinationNodeRef.getId()); copiedChildren.put(sourceNodeRef, destinationNodeRef); for (QName aspect : sourceAspects) { nodeService.addAspect(destinationNodeRef, aspect, null); logger.debug("[MoveAggregationJob::recursiveCopy] Aspect copiato: " + aspect); } //setto sul nuovo nodo appena creato tutte le properties, anche quelle degli aspects nodeService.setProperties(destinationNodeRef, properties); logger.debug("[MoveAggregationJob::recursiveCopy] Property copiate: " + properties.size()); // Prevent any rules being fired on the new destination node //ruleService.disableRules(destinationNodeRef); // Apply the copy aspect to the new node //Map<QName, Serializable> copyProperties = new HashMap<QName, Serializable>(); //copyProperties.put(ContentModel.PROP_COPY_REFERENCE, sourceNodeRef); //nodeService.addAspect(destinationNodeRef, ContentModel.ASPECT_COPIEDFROM, copyProperties); // Copy the aspects //copyAspects(destinationNodeRef, copyDetails); userTxDest.commit(); // Copy the associations copyAssociations(sourceNodeRef, destinationNodeRef, copyChildren, copiedChildren, sourceRepo, destRepo); // Copy permissions copyPermissions(sourceNodeRef, destinationNodeRef, sourceRepo, destRepo); } catch (NotSupportedException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); } catch (SystemException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (SecurityException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (IllegalStateException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (RollbackException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicMixedException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); e.printStackTrace(); } catch (HeuristicRollbackException e) { logger.error("[MoveAggregationJob::recursiveCopy] Eccezione: " + e.getMessage()); e.printStackTrace(); } finally { //ruleService.enableRules(destinationNodeRef); logger.debug("[MoveAggregationJob::recursiveCopy] END"); } return destinationNodeRef; }
From source file:it.doqui.index.ecmengine.business.job.move.MoveAggregationJob.java
private void moveCrossRepo(String sourceRepository, String idSourceNode, String destinationRepository, String idDestinationParent, NodeRef sourceNodeRef) throws NotSupportedException, SystemException, NodeRuntimeException, PermissionRuntimeException, AuthenticationRuntimeException, DictionaryRuntimeException, SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException { logger.debug("[MoveAggregationJob::moveCrossRepo] BEGIN"); Node result = null;// w ww .ja va2 s .co m UserTransaction userTxSource = null; UserTransaction userTxDest = null; String logCtx = "S: " + idSourceNode + " - SourceRepo: " + sourceRepository + " - D: " + idDestinationParent + " - DestRepo: " + destinationRepository; try { logger.debug("[MoveAggregationJob::moveCrossRepo] " + logCtx); userTxSource = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); RepositoryManager.setCurrentRepository(sourceRepository); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); ChildAssociationRef sourceParentRef = nodeService.getPrimaryParent(sourceNodeRef); logger.debug("[MoveAggregationJob::moveCrossRepo] Nodo Source Padre : " + sourceParentRef.getParentRef().getId()); QName destinationQName = sourceParentRef.getQName(); QName destinationAssocTypeQName = sourceParentRef.getTypeQName(); userTxSource.commit(); userTxDest = transactionService.getNonPropagatingUserTransaction(); userTxDest.begin(); RepositoryManager.setCurrentRepository(destinationRepository); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); StoreRef spacesStoreDest = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore"); NodeRef destinationParentRef = new NodeRef(spacesStoreDest, idDestinationParent); boolean exist = nodeService.exists(destinationParentRef); logger.debug("[MoveAggregationJob::moveCrossRepo] Nodo Destination Padre: " + destinationParentRef.getId() + " esiste? " + exist); userTxDest.commit(); //copyAggregation userTxSource = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); Map<NodeRef, NodeRef> copiedChildren = new HashMap<NodeRef, NodeRef>(); RepositoryManager.setCurrentRepository(sourceRepository); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); NodeRef parentRef = sourceParentRef.getParentRef(); userTxSource.commit(); boolean copyChildren = true; NodeRef destinationNodeRef = null; //recursiveCopy -->permette di ricreare in deposito la stessa struttura in corrente //in realta crea i figli primari , invece i figli secondari e i nodi target delle associazioni //normali non vengono create nel deposito ma viene creata uan relazione verso il nodo //originario presente in corrente. //TODO //Eliminare o non fare creare relazioni dal secondario verso nodi del primario logger.debug("[MoveAggregationJob::moveCrossRepo] Inizio metodo ricorsivo : 'recursiveCopy'"); destinationNodeRef = recursiveCopy(sourceNodeRef, parentRef, destinationParentRef, destinationAssocTypeQName, destinationQName, copyChildren, copiedChildren, sourceRepository, destinationRepository); logger.debug("[MoveAggregationJob::moveCrossRepo] Fine metodo ricorsivo : 'recursiveCopy'"); dumpElapsed("MoveAggregationJob", "moveCrossRepo", logCtx, "Nodo Copia creato."); if (destinationNodeRef != null) { result = new Node(destinationNodeRef.getId(), destinationRepository); logger.debug("[MoveAggregationJob::moveCrossRepo] Uid Nodo Copia creato: " + result.getUid()); } userTxDest = transactionService.getNonPropagatingUserTransaction(); userTxDest.begin(); //Dal nodo padre sposato sul deposito elimino gli aspect state e destination RepositoryManager.setCurrentRepository(destinationRepository); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); QName stateAspect = resolvePrefixNameToQName("ecm-sys:state"); nodeService.removeAspect(destinationNodeRef, stateAspect); dumpElapsed("MoveAggregationJob", "moveCrossRepo", logCtx, "Rimosso Aspect 'state' dal Nodo spostato."); logger.debug("[MoveAggregationJob::moveCrossRepo] Rimosso Aspect 'state' dal nodo : " + destinationNodeRef.getId()); QName destinationAspect = resolvePrefixNameToQName("ecm-sys:destination"); nodeService.removeAspect(destinationNodeRef, destinationAspect); dumpElapsed("MoveAggregationJob", "moveCrossRepo", logCtx, "Rimosso Aspect 'destination' dal Nodo spostato."); logger.debug("[MoveAggregationJob::moveCrossRepo] Rimosso Aspect 'destination' dal nodo : " + destinationNodeRef.getId()); userTxDest.commit(); // BEGIN DISABLE AGGREGATION userTxSource = transactionService.getNonPropagatingUserTransaction(); userTxSource.begin(); //Cancello i nodi figli del nodo source RepositoryManager.setCurrentRepository(sourceRepository); //authenticate as the system user authenticationComponent.setSystemUserAsCurrentUser(); List<ChildAssociationRef> childAssociations = nodeService.getChildAssocs(sourceNodeRef); int size = childAssociations != null ? childAssociations.size() : 0; logger.debug("[MoveAggregationJob::moveCrossRepo] Cancello " + size + " nodi/o figli."); if (size > 0) { for (ChildAssociationRef childAssoc : childAssociations) { if (childAssoc != null) { nodeService.removeChildAssociation(childAssoc); logger.debug("[MoveAggregationJob::moveCrossRepo] Associazione child eliminata."); dumpElapsed("MoveAggregationJob", "moveCrossRepo", logCtx, "Associazione child eliminata."); } } } //ecm-sys:ecmengineSystemModel //aspect ecm-sys:state //<property name="ecm-sys:stato"> //aspect ecm-sys:state //<property name="ecm-sys:stato"> proprieta dell'aspect QName stateProp = resolvePrefixNameToQName("ecm-sys:stato"); String valoreStatoNodo = "spostato"; //setto la proprieta ecm-sys:stato dell'aspect ecm-sys:state //del nodo source con valore "spostato" nodeService.setProperty(sourceNodeRef, stateProp, valoreStatoNodo); dumpElapsed("MoveAggregationJob", "moveCrossRepo", logCtx, "Modificata property 'stato' dell'Aspect 'state'"); //TODO: in realta l'aggregazione deve essere Cancellata del tutto e non disabilitata //con l'aggiunta di un aspect; si dovrebbe cancellare l'aggregazione solo dopo che //il job di spostamento e` andato a buon fine. logger.debug( "[MoveAggregationJob::moveCrossRepo] Modificata property 'stato' dell'Aspect 'state' del nodo : " + sourceNodeRef.getId()); //Dal nodo sorgente presente nel corrente elimino l'aspect destination nodeService.removeAspect(sourceNodeRef, destinationAspect); dumpElapsed("MoveAggregationJob", "moveCrossRepo", logCtx, "Rimosso Aspect 'destination' dal Nodo."); logger.debug("[MoveAggregationJob::moveCrossRepo] Rimosso Aspect 'destination' dal nodo : " + sourceNodeRef.getId()); //END DISABLE AGGREGATION userTxSource.commit(); //INSERIMENTO AUDIT insertAudit("MoveAggregationJob", "moveCrossRepo", logCtx, result.getUid(), "Source: " + sourceNodeRef.getId() + " RepoSource: " + sourceRepository + " -- Dest Parent: " + destinationParentRef.getId() + " RepoDest: " + destinationRepository); } finally { logger.debug("[MoveAggregationJob::moveCrossRepo] END"); } }
From source file:fr.openwide.talendalfresco.rest.server.command.LoginCommand.java
private User authenticate(String username, String password, String ticket) { // case of existing session user : getting alfresco ticket User existingSessionUser = null;/*from w w w . jav a 2 s. com*/ HttpSession session = httpRequest.getSession(false); if (session != null) { existingSessionUser = (User) session.getAttribute(AuthenticationHelper.AUTHENTICATION_USER); if (existingSessionUser != null) { String existingSessionTicket = existingSessionUser.getTicket(); // alternatives : // 1. using alfresco ticket rather than sso ticket to speed up things // NB. this means that before logging in a different user an explicit logout must be done // 2. using sso ticket rather than alfresco one // this requires never to give the ticket but when we want to relog, which is bothersome if (existingSessionTicket != null) { ticket = existingSessionTicket; } } } UserTransaction tx = null; try { // Authenticate via the authentication service, then save the details of user in an object // in the session - this is used by the servlet filter etc. on each page to check for login if (username != null && password != null) { // authentication using login (alfresco or sso), since user/pwd params (even empty ones) have been supplied // validation : RestServerHelper.validateUsername(session, username); RestServerHelper.validatePassword(session, password); // login : authenticationService.authenticate(username, password.toCharArray()); } else if (ticket != null && ticket.length() != 0) { // authentication using ticket (alfresco or sso), since non empty ticket has been supplied authenticationService.validate(ticket); } else { xmlResult.setError(RestCommandResult.CODE_ERROR_AUTH_MISSING, RestServerHelper.getMessage(session, RestServerHelper.MSG_ERROR_MISSING) + " : " + username, null); return null; } // Set the user name as stored by the back end username = authenticationService.getCurrentUserName(); if (existingSessionUser != null && existingSessionUser.getUserName().equals(username)) { // user was already logged in, nothing else to do return existingSessionUser; } // now setting up logged in user elements // using non propagated tx because already inside a tx (commandServlet) tx = transactionService.getNonPropagatingUserTransaction(); tx.begin(); // remove the session invalidated flag (used to remove last username cookie by AuthenticationFilter) if (session != null) { session.removeAttribute(AuthenticationHelper.SESSION_INVALIDATED); } // setup User object and Home space ID User user = new User(username, authenticationService.getCurrentTicket(), personService.getPerson(username)); NodeRef homeSpaceRef = (NodeRef) nodeService.getProperty(personService.getPerson(username), ContentModel.PROP_HOMEFOLDER); // check that the home space node exists - else user cannot login if (nodeService.exists(homeSpaceRef) == false) { throw new InvalidNodeRefException(homeSpaceRef); } user.setHomeSpaceId(homeSpaceRef.getId()); tx.commit(); tx = null; // clear this so we know not to rollback // put the User object in the Session - the authentication servlet will then allow // the app to continue without redirecting to the login page if (session == null) { session = httpRequest.getSession(true); // creating session if none yet } session.setAttribute(AuthenticationHelper.AUTHENTICATION_USER, user); // Set the current locale for Alfresco web app. NB. session exists now. I18NUtil.setLocale(Application.getLanguage(session, true)); return user; } catch (AuthenticationException ae) { xmlResult.setError(RestCommandResult.CODE_ERROR_AUTH_UNKNOWN_USER, RestServerHelper.getMessage(session, RestServerHelper.MSG_ERROR_UNKNOWN_USER) + " : " + username, ae); } catch (InvalidNodeRefException inre) { xmlResult.setError(RestCommandResult.CODE_ERROR_AUTH_UNKNOWN_USER, RestServerHelper.getMessage(session, Repository.ERROR_NOHOME) + " : " + inre.getNodeRef().getId() + " (" + username + ")", inre); } catch (Throwable e) { // Some other kind of serious failure xmlResult.setError("Unknown technical error when authenticating user " + username, null); } finally { try { if (tx != null) { tx.rollback(); } } catch (Exception tex) { } } return null; }
From source file:com.flexive.tests.embedded.persistence.StructureTest.java
@Test(groups = { "ejb", "structure" }) public void assignmentGroupProperty() throws Exception { Context c = EJBLookup.getInitialContext(); UserTransaction ut = (UserTransaction) c.lookup("java:comp/UserTransaction"); ut.begin(); FxString desc = new FxString("group description..."); desc.setTranslation(2, "gruppen beschreibung"); final String GROUPNAME = "GROUPTEST" + RandomStringUtils.randomNumeric(5); FxGroupEdit ge = FxGroupEdit.createNew(GROUPNAME, desc, new FxString("hint..."), true, FxMultiplicity.of(0, FxMultiplicity.N)); ae.createGroup(ge, "/"); ge.setName("subgroup"); ae.createGroup(ge, "/" + GROUPNAME); ge.setName("subgroup2"); ae.createGroup(ge, "/" + GROUPNAME + "/SUBGROUP"); desc.setTranslation(1, "property description..."); desc.setTranslation(2, "attribut beschreibung..."); FxPropertyEdit pe = FxPropertyEdit.createNew("testproperty", desc, new FxString("property hint"), true, FxMultiplicity.of(1, 1), true, env().getACL(1), FxDataType.Number, new FxString("123"), true, null, null, null);//from w w w. ja va2 s .co m ae.createProperty(pe, "/" + GROUPNAME + "/SUBGROUP"); FxGroupAssignment ga = (FxGroupAssignment) env().getAssignment("ROOT/" + GROUPNAME); FxGroupAssignmentEdit gae = FxGroupAssignmentEdit.createNew(ga, env().getType("ROOT"), "GTEST", "/"); ae.save(gae, true); ut.rollback(); }