Example usage for javax.transaction UserTransaction rollback

List of usage examples for javax.transaction UserTransaction rollback

Introduction

In this page you can find the example usage for javax.transaction UserTransaction rollback.

Prototype

void rollback() throws IllegalStateException, SecurityException, SystemException;

Source Link

Document

Roll back the transaction associated with the current thread.

Usage

From source file:com.atolcd.repo.web.scripts.archive.ArchivedNodesGet.java

/**
 * This method gets all deleted nodes for current user from the archive
 * which were originally contained within the specified StoreRef.
 *///  ww w.  j av  a 2 s. c  o  m
private List<ArchivedNodeState> filterDeletedNodes(StoreRef storeRef, String username) {
    List<ArchivedNodeState> deletedNodes = null;

    if (username != null && username.length() > 0) {

        UserTransaction tx = null;
        ResultSet results = null;

        try {

            tx = serviceRegistry.getTransactionService().getNonPropagatingUserTransaction(false);
            tx.begin();

            if (storeRef != null) {
                String query = String.format(SEARCH_USERPREFIX, username)
                        + String.format(SEARCH_ALL, ContentModel.ASPECT_ARCHIVED);
                SearchParameters sp = new SearchParameters();
                sp.setLanguage(SearchService.LANGUAGE_LUCENE);
                sp.setQuery(query);
                sp.addStore(storeRef); // the Archived Node store

                results = serviceRegistry.getSearchService().query(sp);
                deletedNodes = new ArrayList<ArchivedNodeState>(results.length());
            }

            if (results != null && results.length() != 0) {
                NodeService nodeService = serviceRegistry.getNodeService();

                for (ResultSetRow row : results) {
                    NodeRef nodeRef = row.getNodeRef();

                    if (nodeService.exists(nodeRef)) {
                        ArchivedNodeState state = ArchivedNodeState.create(nodeRef, serviceRegistry);
                        deletedNodes.add(state);
                    }
                }
            }

            tx.commit();
        } catch (Throwable err) {
            if (logger.isWarnEnabled())
                logger.warn("Error while browsing the archive store: " + err.getMessage());
            try {
                if (tx != null) {
                    tx.rollback();
                }
            } catch (Exception tex) {
                if (logger.isWarnEnabled())
                    logger.warn("Error while during the rollback: " + tex.getMessage());
            }
        } finally {
            if (results != null) {
                results.close();
            }
        }
    }

    return deletedNodes;
}

From source file:com.surevine.alfresco.repo.delete.PerishabilityLogicImpl.java

private synchronized void loadPerishReasons()
        throws JSONException, SecurityException, IllegalStateException, RollbackException,
        HeuristicMixedException, HeuristicRollbackException, SystemException, NotSupportedException {
    UserTransaction transaction = null;
    ResultSet rs = null;/* w  w  w  .j av a2 s  .c  om*/

    try {
        StoreRef storeRef = new StoreRef(StoreRef.PROTOCOL_WORKSPACE, "SpacesStore");
        rs = _searchService.query(storeRef, SearchService.LANGUAGE_LUCENE,
                "PATH:\"/app:company_home/app:dictionary/cm:perishableReasons.json\"");
        NodeRef nodeRef = null;
        transaction = _transactionService.getUserTransaction(true);

        transaction.begin();

        if (rs.length() == 0) {
            _logger.error(
                    "Unable to load perishable reasons: Didn't find perishableReasons.json in the Data Dictionary.");
            perishReasons = Collections.emptyList();
            perishReasonsByCode = Collections.emptyMap();
            perishReasonsBySite = Collections.emptyMap();
            return;
        }
        nodeRef = rs.getNodeRef(0);

        ContentReader reader = _contentService.getReader(nodeRef, ContentModel.PROP_CONTENT);

        JSONObject obj = new JSONObject(reader.getContentString());

        JSONArray perishableReasons = obj.getJSONArray("perishableReasons");

        perishReasons = new ArrayList<PerishReason>(perishableReasons.length());
        perishReasonsByCode = new HashMap<String, PerishReason>();
        perishReasonsBySite = new HashMap<String, List<PerishReason>>();

        for (int i = 0; i < perishableReasons.length(); ++i) {
            PerishReason reason = PerishReason.fromJSON(perishableReasons.getJSONObject(i));
            perishReasons.add(reason);
            perishReasonsByCode.put(reason.getCode(), reason);
            addPerishReasonBySite(reason);
        }

        transaction.commit();
    } finally {
        if (rs != null) {
            rs.close();
        }

        if ((transaction != null) && (transaction.getStatus() == Status.STATUS_ACTIVE)) {
            transaction.rollback();
        }
    }
}

From source file:it.doqui.index.ecmengine.business.personalization.importer.ArchiveImporterJob.java

public void execute(JobExecutionContext context) throws JobExecutionException {
    logger.debug("[ArchiveImporterJob::execute] BEGIN");
    if (!running) {
        synchronized (this) {
            if (!running) {
                running = true;/*ww  w  . j av  a 2s. co m*/
            } else {
                logger.debug("[ArchiveImporterJob::execute] job already running 1");
                logger.debug("[ArchiveImporterJob::execute] END");
                return;
            }
        }
    } else {
        logger.debug("[ArchiveImporterJob::execute] job already running 2");
        logger.debug("[ArchiveImporterJob::execute] END");
        return;
    }

    logger.debug("[ArchiveImporterJob::execute] START");

    JobBusinessInterface jobManager = null;
    BatchJob batchJob = null;
    try {
        // Chiedo un'istanza di JobManager
        jobManager = (JobBusinessInterface) context.getJobDetail().getJobDataMap()
                .get(ECMENGINE_JOB_MANAGER_BEAN);

        // In caso di successo
        if (jobManager != null) {
            List<Repository> repositories = RepositoryManager.getInstance().getRepositories();
            for (Repository repository : repositories) {
                logger.debug("[ArchiveImporterJob::execute] import archive on repository '" + repository.getId()
                        + "'");
                RepositoryManager.setCurrentRepository(repository.getId());

                // Faccio la lista dei job di tipo ECMENGINE_ARCHIVE_IMPORTER_JOB_REF, attivi
                // Possono essere
                // Ready - da processare
                // Running - in esecuzione : sto importando il file
                // Finished - ho finito

                // In questo stato, essendo un job singleton, dovrei avere solo stati ready (nuovi job) o finished (job finiti)
                // Se icontro un RUNNING, sicuramente si tratta di una condizione di errore, riporto a ready il job, e faccio in
                // modo che l'algoritmo sottostante continui l'importazione in modo incrementale

                // Prendo tutti i job di un certo esecutore: in futuro, se la cosa dovesse dare problemi di performance
                // aggiungere un filtro sullo status RUNNING
                BatchJob[] bjs = jobManager.getJobsByExecutor(ECMENGINE_ARCHIVE_IMPORTER_JOB_REF);
                if (bjs != null) {
                    // Se ho dei BatchJob
                    for (BatchJob bj : bjs) {
                        logger.debug("[ArchiveImporterJob::execute] job status " + bj.getId() + ":"
                                + bj.getStatus());
                        // Se lo stato e' running
                        if (bj.getStatus().equalsIgnoreCase(JobStatus.RUNNING)) {
                            logger.debug("[ArchiveImporterJob::execute] update status to ready " + bj.getId());
                            // Reimposto lo stato a ready
                            bj.setStatus(JobStatus.READY);
                            jobManager.updateJob(bj);
                        }
                    }
                } else {
                    logger.debug("[ArchiveImporterJob::execute] BatchJob NULL per "
                            + ECMENGINE_ARCHIVE_IMPORTER_JOB_REF);
                }

                // A questo punto, sono in una situazione consistente, con dei job ready
                // I job ready possono essere sia nuovi job, che resume di situazioni precedenti
                while ((batchJob = jobManager.getNextJob(ECMENGINE_ARCHIVE_IMPORTER_JOB_REF)) != null) {

                    // Prendo il prossimo job
                    logger.debug("[ArchiveImporterJob::execute] start batchJob " + batchJob.getId());
                    if (batchJob != null) {
                        try {
                            // Estraggo i parametri di esecuzione
                            BatchJobParam pName = batchJob.getParam(PARAM_NAME);
                            BatchJobParam pFormat = batchJob.getParam(PARAM_FORMAT);
                            BatchJobParam pStore = batchJob.getParam(PARAM_CONTENTSTORE_DIR);
                            String importDirectory = (String) context.getJobDetail().getJobDataMap()
                                    .get(ECMENGINE_IMPORT_DIRECTORY);

                            // verifico se sono corretti
                            checkParam(batchJob, pName, "Archive name not found");
                            checkParam(batchJob, pFormat, "Archive format not found");
                            checkParam(batchJob, pStore, "Archive store not found");
                            checkParam(batchJob, importDirectory, "importDirectory null");

                            // Validati i parametri di estrazione del fie, procedo con lo
                            // spostamento da TEMP alla directory di import, ed esplodo il file

                            // Sposto il file dalla temp alla workdir del progetto
                            String cFrom = pStore.getValue() + File.separator + pName.getValue();
                            File oFrom = new File(cFrom);
                            logger.debug("[ArchiveImporterJob::execute] From:" + cFrom);

                            // Crea la directory di output se non esiste
                            File oDir = new File(importDirectory);
                            if (!oDir.exists()) {
                                oDir.mkdir();
                            }

                            // File di importazione
                            String cFile = importDirectory + File.separator + pName.getValue();
                            File oFile = new File(cFile);
                            logger.debug("[ArchiveImporterJob::execute] Import:" + cFile);

                            // Se esiste il file, lo sposto nella dir dove deve essere processato
                            if (oFrom.exists()) {
                                // Provo lo spostamento, perche' piu' veloce
                                if (!oFrom.renameTo(oFile)) {
                                    // Se fallisce provo la copia, piu' lenta
                                    if (!copyFile(oFrom, oFile)) {
                                        batchJob.setMessage(
                                                "Unable to copy from (" + cFrom + ") to (" + cFile + ")");
                                        throw new EcmEngineException(
                                                "ArchiveImporterJob: " + batchJob.getMessage());
                                    } else {
                                        // Se la copia va a buon fine, cancello il file
                                        oFrom.delete();
                                    }
                                }
                            }

                            // Prima cosa, provo a caricare in RAM il file indicato.
                            // Attenzione: vista la dimensione a 256 char dei parametri dei job,
                            // ho preferito spezzare path e nome file in due variabili diverse, in modo
                            // da all'ungare il piu' possibile la lunghezza del path utilizzabile

                            // Una volta processato il file, lo rinomino in processed, per poterne tenere traccia
                            // I processed andrebbero alimintati ogni tanto, per ridurne la coda
                            // Alternativamente, si puo' variare il codice per rinominarli al posto di rinominare
                            // Un'altra ragione della rinomina e' data dal fatto che, in caso di zip corrotti, puo' accadere che
                            // il file risulti importato, ma non presente, senza nessun messaggio d'errore
                            String cFileRenamed = cFile + ".processed";
                            File oFileRenamed = new File(cFileRenamed);

                            // Il temp path lo formo col nome del file compresso +".extract", in modo da avere l'univocita' data gia
                            // dal nome file
                            String tempPath = cFile + ".extract";
                            File tmpWorkDir = new File(tempPath);

                            // Se il file non esiste
                            if (!oFile.exists()) {
                                logger.debug(
                                        "[ArchiveImporterJob::execute] File di importazione non presente, controllo directory di esplosione");

                                // Se ho la dir di estrazione, vuol dire che stavo importando e c'e' stato un errore
                                // Quindi do errore solo se non esiste manco la dir di output
                                if (!tmpWorkDir.exists()) {
                                    batchJob.setMessage("Archive not found");
                                    throw new EcmEngineException("ArchiveImporterJob: " + batchJob.getMessage()
                                            + " (" + cFile + ")");
                                } else {
                                    logger.debug(
                                            "[ArchiveImporterJob::execute] Directory di importazione presente, procedo con l'importazione");
                                }
                            }

                            // Se sono qui, posso avere il file o la dir di esplosione

                            // Se ho il file, prendo il contenuto da disco e lo decomprimo
                            if (oFile.exists()) {
                                byte[] content = getBinary(cFile);
                                logger.debug("[ArchiveImporterJob::execute] Content size: " + content.length);

                                // La directory la creo solo se non esiste, devo infatti gestire il caso che sia gia' creata
                                // e sto andando in aggiornamento
                                if (!tmpWorkDir.exists()) {
                                    if (!tmpWorkDir.mkdirs()) {
                                        batchJob.setMessage("Cant' creare working dir");
                                        throw new EcmEngineException("ArchiveImporterJob: "
                                                + batchJob.getMessage() + " (" + tempPath + ")");
                                    }
                                }

                                // A questo punto, estraggo i file presenti nello zip
                                String cFormat = pFormat.getValue();

                                logger.debug("[ArchiveImporterJob::execute] estrazione archivio (" + cFile
                                        + ")(" + cFormat + ") in " + tempPath);
                                if (ECMENGINE_ARCHIVE_FORMAT_TAR.equals(cFormat)) {
                                    extractTar(new ByteArrayInputStream(content), tempPath);

                                } else if (ECMENGINE_ARCHIVE_FORMAT_TAR_GZ.equals(cFormat)) {
                                    extractTarGz(new ByteArrayInputStream(content), tempPath);

                                } else if (ECMENGINE_ARCHIVE_FORMAT_ZIP.equals(cFormat)) {
                                    extractZip(new ByteArrayInputStream(content), tempPath);

                                } else {
                                    // In caso di formato non gestito, esco con errore
                                    batchJob.setMessage("Format not supported");
                                    throw new EcmEngineException("ArchiveImporterJob: " + batchJob.getMessage()
                                            + " (" + cFormat + ")");
                                }

                                // A questo punto, ho l'esplosione dello ZIP
                                // Una volta esploso in modo corretto, cancello eventuali copie non previste
                                // e rinomino il file
                                oFileRenamed.delete();
                                oFile.renameTo(oFileRenamed);

                                // A fine processo, cancello i file interessati dalla import
                                // Commentare questa riga se si volesse verificare come mai non viene importato un file
                                oFile.delete();
                                oFileRenamed.delete();
                            }

                            // Creo i service e verifico siano presi in modo corretto
                            transactionService = (TransactionService) context.getJobDetail().getJobDataMap()
                                    .get(ECMENGINE_TRANSACTION_SERVICE_BEAN);
                            namespaceService = (NamespaceService) context.getJobDetail().getJobDataMap()
                                    .get(ECMENGINE_NAMESPACE_SERVICE_BEAN);
                            contentService = (ContentService) context.getJobDetail().getJobDataMap()
                                    .get(ECMENGINE_CONTENT_SERVICE_BEAN);
                            nodeService = (NodeService) context.getJobDetail().getJobDataMap()
                                    .get(ECMENGINE_NODE_SERVICE_BEAN);
                            authenticationService = (AuthenticationService) context.getJobDetail()
                                    .getJobDataMap().get(ECMENGINE_AUTHENTICATION_SERVICE_BEAN);

                            checkParam(batchJob, transactionService, "transactionService null");
                            checkParam(batchJob, namespaceService, "namespaceService null");
                            checkParam(batchJob, contentService, "contentService null");
                            checkParam(batchJob, nodeService, "nodeService null");
                            checkParam(batchJob, authenticationService, "authenticationService null");

                            // Vengono presi i parametri del batch e ne viene controllata la conguenza, uscendo in caso di errore
                            BatchJobParam pUID = batchJob.getParam(PARAM_UID);
                            BatchJobParam pStoreProtocol = batchJob.getParam(PARAM_STORE_PROTOCOL);
                            BatchJobParam pStoreIdentifier = batchJob.getParam(PARAM_STORE_IDENTIFIER);
                            BatchJobParam pUser = batchJob.getParam(PARAM_USER);
                            BatchJobParam pPassword = batchJob.getParam(PARAM_PASSWORD);

                            BatchJobParam pContentType = batchJob.getParam(PARAM_CONTENT_TYPE);
                            BatchJobParam pNameProperty = batchJob.getParam(PARAM_CONTENT_NAME_PROPERTY);
                            BatchJobParam pContainerType = batchJob.getParam(PARAM_CONTAINER_TYPE);
                            BatchJobParam pContainerNameProperty = batchJob
                                    .getParam(PARAM_CONTAINER_NAME_PROPERTY);
                            BatchJobParam pContainerAssocType = batchJob.getParam(PARAM_CONTAINER_ASSOC_TYPE);
                            BatchJobParam pParentAssocType = batchJob.getParam(PARAM_PARENT_ASSOC_TYPE);

                            checkParam(batchJob, pUID, "Node UID not found");
                            checkParam(batchJob, pStoreProtocol, "Store Protocol not found");
                            checkParam(batchJob, pStoreIdentifier, "Store Identifier not found");
                            checkParam(batchJob, pUser, "User not found");
                            checkParam(batchJob, pPassword, "Password not found");

                            checkParam(batchJob, pContentType, "Content Type not found");
                            checkParam(batchJob, pNameProperty, "Content Name not found");
                            checkParam(batchJob, pContainerType, "Container Type not found");
                            checkParam(batchJob, pContainerNameProperty, "Container Name not found");
                            checkParam(batchJob, pContainerAssocType, "Container Assoc not found");
                            checkParam(batchJob, pParentAssocType, "Parent Assoc not found");

                            // Trasformazione dei parametri in QName
                            QName contentTypeQName = resolvePrefixNameToQName(pContentType.getValue());
                            QName contentNamePropertyQName = resolvePrefixNameToQName(pNameProperty.getValue());
                            QName containerTypeQName = resolvePrefixNameToQName(pContainerType.getValue());
                            QName containerNamePropertyQName = resolvePrefixNameToQName(
                                    pContainerNameProperty.getValue());
                            QName containerAssocTypeQName = resolvePrefixNameToQName(
                                    pContainerAssocType.getValue());
                            QName parentAssocTypeQName = resolvePrefixNameToQName(pParentAssocType.getValue());

                            // Prendo un oggetto UserTransaction
                            UserTransaction transaction = transactionService.getNonPropagatingUserTransaction();

                            try {
                                // Inizio la transazione
                                transaction.begin();

                                // Cambio l'utente, con l'utente che ha deve importare
                                authenticationService.authenticate(pUser.getValue(),
                                        EncryptionHelper.decrypt(pPassword.getValue()).toCharArray());

                            } catch (Exception e) {
                                logger.debug(e);
                                throw e;

                            } finally {
                                // Anche se non ho fatto
                                try {
                                    transaction.rollback();
                                } catch (Exception e) {
                                }
                            }

                            // Creo un nodo, usando l'UID del folder dove devo mettere i dati
                            StoreRef sr = new StoreRef(pStoreProtocol.getValue(), pStoreIdentifier.getValue());
                            // DictionarySvc.SPACES_STORE
                            NodeRef nodeRef = new NodeRef(sr, pUID.getValue());

                            // Attivo l'importazione ricorsiva
                            int nContent = handleRootFolder(tmpWorkDir, nodeRef, parentAssocTypeQName,
                                    containerTypeQName, containerNamePropertyQName, containerAssocTypeQName,
                                    contentTypeQName, contentNamePropertyQName);

                            // Reimposto lo status e vado al prossimo JOB
                            batchJob.setMessage("Content nuovi: " + nContent + " Datafile " + pName.getValue()
                                    + ".processed");
                            batchJob.setStatus(JobStatus.FINISHED);
                            jobManager.updateJob(batchJob);

                        } catch (Exception e) {
                            logger.error("[ArchiveImporterJob::execute] ERROR", e);
                            try {
                                // Reimposto il getMessage(), nel caso arrivi vuoto
                                if (batchJob.getMessage().length() == 0) {
                                    batchJob.setMessage(e.getMessage());
                                }

                                // Reimposto lo status e vado al prossimo JOB
                                batchJob.setStatus(JobStatus.ERROR);
                                jobManager.updateJob(batchJob);
                            } catch (Exception ee) {
                                // TODO: vedere se e' giusto tenerlo muto
                            }

                        } finally {
                            // Non posso toccare lo stato a ready, altrimenti vado in loop
                        }
                    }
                }

            }
        } else {
            logger.error("[ArchiveImporterJob::execute] JobManager NULL per " + ECMENGINE_JOB_MANAGER_BEAN);
        }
    } catch (Exception e) {
        logger.error("[ArchiveImporterJob::execute] ERROR", e);
        throw new JobExecutionException(e);
    } finally {
        running = false;
        logger.debug("[ArchiveImporterJob::execute] END");
    }

    logger.debug("[ArchiveImporterJob::execute] END run");

}

From source file:it.doqui.index.ecmengine.business.personalization.multirepository.bootstrap.MultiTAdminServiceImpl.java

/**
 * @see TenantAdminService.deleteTenant()
 *///from ww w .j  av a 2s . co m
public void deleteTenant(String tenantDomain) {
    if (!existsTenant(tenantDomain)) {
        throw new RuntimeException("Tenant does not exist: " + tenantDomain);
    } else {
        try {
            final String tenantAdminUser = getTenantAdminUser(tenantDomain);
            //final String tenantAdminUser = tenantService.getDomainUser(AuthenticationUtil.getSystemUserName(), tenantDomain);

            AuthenticationUtil.runAs(new RunAsWork<Object>() {
                public Object doWork() {
                    List<WorkflowDefinition> workflowDefs = workflowService.getDefinitions();
                    if (workflowDefs != null) {
                        for (WorkflowDefinition workflowDef : workflowDefs) {
                            workflowService.undeployDefinition(workflowDef.getId());
                        }
                    }

                    List<String> messageResourceBundles = repoAdminService.getMessageBundles();
                    if (messageResourceBundles != null) {
                        for (String messageResourceBundle : messageResourceBundles) {
                            repoAdminService.undeployMessageBundle(messageResourceBundle);
                        }
                    }

                    List<RepoModelDefinition> models = repoAdminService.getModels();
                    if (models != null) {
                        for (RepoModelDefinition model : models) {
                            repoAdminService.undeployModel(model.getRepoName());
                        }
                    }

                    return null;
                }
            }, tenantAdminUser);

            //-------------------------------------
            UserTransaction userTransaction = transactionService.getUserTransaction();
            authenticationComponent.setSystemUserAsCurrentUser();
            try {
                // TODO: occorre usare lo SplittingDbNodeServiceImpl
                // che ha dentro un deleteStore che aggiorna gli indici
                // ora e' usata l'imlementation di ALF che ha il metodo ma non aggiorna gli indici di lucene
                userTransaction.begin();

                ns.deleteStore(tenantService.getName(tenantAdminUser,
                        new StoreRef(PROTOCOL_STORE_WORKSPACE, STORE_BASE_ID_SPACES)));
                ns.deleteStore(tenantService.getName(tenantAdminUser,
                        new StoreRef(PROTOCOL_STORE_ARCHIVE, STORE_BASE_ID_SPACES)));
                ns.deleteStore(tenantService.getName(tenantAdminUser,
                        new StoreRef(PROTOCOL_STORE_WORKSPACE, STORE_BASE_ID_VERSION)));
                ns.deleteStore(tenantService.getName(tenantAdminUser,
                        new StoreRef(PROTOCOL_STORE_SYSTEM, STORE_BASE_ID_SYSTEM)));
                ns.deleteStore(tenantService.getName(tenantAdminUser,
                        new StoreRef(PROTOCOL_STORE_USER, STORE_BASE_ID_USER)));

                userTransaction.commit();

            } catch (Throwable e) {
                // rollback the transaction
                try {
                    if (userTransaction != null) {
                        userTransaction.rollback();
                    }
                } catch (Exception ex) {
                }
                try {
                    authenticationComponent.clearCurrentSecurityContext();
                } catch (Exception ex) {
                }
                throw new AlfrescoRuntimeException("Failed to delete tenant", e);
            }

            // notify listeners that tenant has been deleted & hence disabled
            AuthenticationUtil.runAs(new RunAsWork<Object>() {
                public Object doWork() {
                    List<TenantDeployer> tenantDeployers = getTenantDeployers();
                    for (TenantDeployer tenantDeployer : tenantDeployers) {
                        tenantDeployer.onDisableTenant();
                    }
                    return null;
                }
            }, tenantAdminUser);

            // remove tenant
            attributeService.removeAttribute(TENANTS_ATTRIBUTE_PATH, tenantDomain);
        } catch (Throwable t) {
            throw new AlfrescoRuntimeException("Failed to delete tenant: " + tenantDomain, t);
        }
    }
}

From source file:it.doqui.index.ecmengine.business.personalization.multirepository.bootstrap.MultiTAdminServiceImpl.java

@Override
protected void onBootstrap(ApplicationEvent event) {
    logger.debug("[MultiTAdminServiceImpl::onBootstrap] BEGIN");

    ns = (SplittingDbNodeServiceImpl) getApplicationContext().getBean("splittingDbNodeServiceImpl");

    for (Repository repository : repositoryManager.getRepositories()) {
        RepositoryManager.setCurrentRepository(repository.getId());
        logger.info("[MultiTAdminServiceImpl::onBootstrap] Repository '"
                + RepositoryManager.getCurrentRepository() + "' -- Executing multi-tenant admin bootstrap.");

        // initialise the tenant admin service and status of tenants (using attribute service)
        // note: this requires that the repository schema has already been initialised

        // register dictionary - to allow enable/disable tenant callbacks
        register(dictionaryComponent);//from w  ww .j  a va  2s . com

        // register file store - to allow enable/disable tenant callbacks
        register(tenantFileContentStore);

        UserTransaction userTransaction = transactionService.getUserTransaction();
        authenticationComponent.setSystemUserAsCurrentUser();

        try {
            userTransaction.begin();

            // bootstrap Tenant Service internal cache
            List<org.alfresco.repo.tenant.Tenant> tenants = getAllTenants();

            int enabledCount = 0;
            int disabledCount = 0;

            if (tenants != null) {
                for (org.alfresco.repo.tenant.Tenant tenant : tenants) {
                    if (tenant.isEnabled()) {
                        // this will also call tenant deployers registered so far ...
                        enableTenant(tenant.getTenantDomain(), true);
                        enabledCount++;
                    } else {
                        // explicitly disable, without calling disableTenant callback
                        disableTenant(tenant.getTenantDomain(), false);
                        disabledCount++;
                    }
                }

                tenantService.register(this); // callback to refresh tenantStatus cache
            }

            userTransaction.commit();

            if (logger.isInfoEnabled()) {
                logger.info(
                        String.format("Alfresco Multi-Tenant startup - %d enabled tenants, %d disabled tenants",
                                enabledCount, disabledCount));
            }
        } catch (Throwable e) {
            // rollback the transaction
            try {
                if (userTransaction != null) {
                    userTransaction.rollback();
                }
            } catch (Exception ex) {
            }
            try {
                authenticationComponent.clearCurrentSecurityContext();
            } catch (Exception ex) {
            }
            throw new AlfrescoRuntimeException("Failed to bootstrap tenants", e);
        }
    }
    logger.debug("[MultiTAdminServiceImpl::onBootstrap] END");
}

From source file:de.fme.topx.component.TopXUpdateComponent.java

/**
 * increase the hitcount for the given noderef by using the aspect
 * <code>topx:countable</code>. Does not fire events for other behaviours.
 * Using admin use to increment because not everybody has
 * /*from   w ww. ja va 2  s. co m*/
 * @param nodeRef
 * @param userName
 *            current user who reads or updates the document.
 * @param counterUserProperty
 * @throws SystemException
 * @throws NotSupportedException
 * @throws HeuristicRollbackException
 * @throws HeuristicMixedException
 * @throws RollbackException
 * @throws IllegalStateException
 * @throws SecurityException
 */
@SuppressWarnings("unchecked")
public Integer increaseHitcount(final NodeRef nodeRef, final String userName, final QName counterProperty,
        final QName counterDateProperty, final QName counterUserProperty)
        throws NotSupportedException, SystemException, SecurityException, IllegalStateException,
        RollbackException, HeuristicMixedException, HeuristicRollbackException {
    UserTransaction transaction = transactionService.getNonPropagatingUserTransaction(false);
    transaction.begin();

    try {
        Preconditions.checkNotNull(nodeRef, "Passed noderef should not be null");
        Preconditions.checkArgument(nodeService.exists(nodeRef),
                "Node[" + nodeRef + "] must exist in the repository");
        filter.disableAllBehaviours();
        Map<QName, Serializable> newProperties = Maps.newHashMap();
        Integer counter = (Integer) nodeService.getProperty(nodeRef, counterProperty);
        if (counter == null) {
            counter = setHitCountProperties(nodeRef, counterProperty, counterDateProperty, counterUserProperty,
                    newProperties, 1, userName);
        } else {
            boolean shouldCount = true;
            Map<QName, Serializable> properties = nodeService.getProperties(nodeRef);
            Serializable usersValue = properties.get(counterUserProperty);

            List<String> users;
            if (!(usersValue instanceof List)) {
                users = Lists.newArrayList((String) usersValue);
            } else {
                users = (List<String>) usersValue;
            }

            if (users != null) {
                int userIndex = users.indexOf(userName);
                if (userIndex != -1) {
                    List<Date> counterDates = (List<Date>) properties.get(counterDateProperty);
                    Date lastUserReadDate = counterDates.get(userIndex);
                    // only count one download for a
                    // document of
                    // a user per day
                    if (DateUtils.isSameDay(lastUserReadDate, new Date())) {
                        shouldCount = false;
                        LOG.info("User " + userName + " already downloads/updates document " + nodeRef
                                + " today. Skip counting.");
                    }
                }
            }
            if (shouldCount) {
                counter = setHitCountProperties(nodeRef, counterProperty, counterDateProperty,
                        counterUserProperty, newProperties, counter, userName);
            }

        }
        transaction.commit();
        LOG.info("Commiting transaction for Node " + nodeRef);
        return counter;
    } finally {
        filter.enableAllBehaviours();
        if (transaction.getStatus() == javax.transaction.Status.STATUS_ACTIVE) {
            transaction.rollback();
            LOG.warn("Had to rollback the transaction for Node " + nodeRef);
        }

    }
}

From source file:com.flexive.tests.embedded.persistence.StructureTest.java

@Test(groups = { "ejb", "structure" })
public void assignmentGroupProperty() throws Exception {
    Context c = EJBLookup.getInitialContext();
    UserTransaction ut = (UserTransaction) c.lookup("java:comp/UserTransaction");
    ut.begin();/*www .j  av a 2s .co  m*/
    FxString desc = new FxString("group description...");
    desc.setTranslation(2, "gruppen beschreibung");
    final String GROUPNAME = "GROUPTEST" + RandomStringUtils.randomNumeric(5);
    FxGroupEdit ge = FxGroupEdit.createNew(GROUPNAME, desc, new FxString("hint..."), true,
            FxMultiplicity.of(0, FxMultiplicity.N));
    ae.createGroup(ge, "/");
    ge.setName("subgroup");
    ae.createGroup(ge, "/" + GROUPNAME);
    ge.setName("subgroup2");
    ae.createGroup(ge, "/" + GROUPNAME + "/SUBGROUP");
    desc.setTranslation(1, "property description...");
    desc.setTranslation(2, "attribut beschreibung...");
    FxPropertyEdit pe = FxPropertyEdit.createNew("testproperty", desc, new FxString("property hint"), true,
            FxMultiplicity.of(1, 1), true, env().getACL(1), FxDataType.Number, new FxString("123"), true, null,
            null, null);
    ae.createProperty(pe, "/" + GROUPNAME + "/SUBGROUP");
    FxGroupAssignment ga = (FxGroupAssignment) env().getAssignment("ROOT/" + GROUPNAME);
    FxGroupAssignmentEdit gae = FxGroupAssignmentEdit.createNew(ga, env().getType("ROOT"), "GTEST", "/");
    ae.save(gae, true);
    ut.rollback();
}

From source file:it.doqui.index.ecmengine.business.personalization.importer.ArchiveImporterJob.java

private int handleRootFolder(File folder, NodeRef parentNodeRef, QName parentAssocTypeQName,
        QName containerTypeQName, QName containerNamePropertyQName, QName containerAssocTypeQName,
        QName contentTypeQName, QName contentNamePropertyQName) throws Exception {
    logger.debug("[ArchiveImporterJob::handleRootFolder] BEGIN");

    // Conto quanti dati sono stati scritti
    int nContent = 0;

    try {/*w w w. ja  v  a 2s.com*/
        // Prima si inizia col creare i singoli contenuti
        boolean bContent = false;

        {
            // Prendo un oggetto UserTransaction
            UserTransaction transaction = transactionService.getNonPropagatingUserTransaction();

            try {
                // Inizio la transazione
                transaction.begin();

                // Conto i content creati
                int nSubContent = 0;

                // Prima creo i content in una transazione
                File[] folderEntries = folder.listFiles();
                for (File entry : folderEntries) {

                    // Se e' una directory
                    if (!entry.isDirectory()) {
                        logger.debug("[ArchiveImporterJob::handleRootFolder] creating content: "
                                + entry.getName() + ", nodeRef=" + parentNodeRef + ", association="
                                + parentAssocTypeQName);

                        // Creo il contenuti
                        if (createContent(entry, parentNodeRef, contentTypeQName, contentNamePropertyQName,
                                parentAssocTypeQName)) {
                            nSubContent++;
                        }
                    }
                }

                // Se ho inserito 0 content, e non si e' generata una eccezione, vuol dire che i dati inseriti
                // sono tutti dei doppioni, in questo caso, meto bContent a true, e lascio andare avanti l'algoritmo
                bContent = (nSubContent == 0);
                nContent += nSubContent;

                logger.debug("[ArchiveImporterJob::handleRootFolder] Content inseriti: " + nContent);

                // Nel caso che si chiami una commit, senza righe da committare
                // C'e' una eccezione.
                // TODO: gestire le transazioni da 0 contenuti .. ma .. cosa fare in questa situazione?
                transaction.commit();

                // Se non ho ecezione sulla commit, indico come true la creazione content
                bContent = true;

                logger.debug("[ArchiveImporterJob::handleRootFolder] Content bool " + bContent);

            } catch (RollbackException re) {
                try {
                    transaction.rollback();
                } catch (Exception ee) {
                    logger.debug("[ArchiveImporterJob::handleRootFolder] RollbackException");
                }
            } catch (EcmEngineFoundationException e) {
                // Rollback
                try {
                    transaction.rollback();
                } catch (Exception ee) {
                    logger.debug("[ArchiveImporterJob::handleRootFolder] EcmEngineFoundationException");
                }
            } catch (Exception e) {
                logger.debug(e);
                throw e;
            }
        }

        // Se i contenuti vanno a buon fine, inizio a cancellarli da disco
        boolean bDelete = false;
        if (bContent) {
            try {
                // Prima creo i content in una transazione
                File[] folderEntries = folder.listFiles();
                for (File entry : folderEntries) {

                    // Se e' una directory
                    if (!entry.isDirectory()) {
                        // Cancello il contenuto
                        entry.delete();
                    }
                }
                bDelete = true;
            } catch (Exception e) {
                logger.debug(e);
                throw e;
            }
        }

        // Se le delete vanno a buon fine, inizio a creare le directory
        if (bDelete) {
            try {
                boolean bDeleteFolder = true;
                // Per tutti i file della cartella
                File[] folderEntries = folder.listFiles();
                for (File entry : folderEntries) {

                    // Se e' una directory
                    if (entry.isDirectory()) {
                        // Create directory
                        logger.debug("[ArchiveImporterJob::handleRootFolder] creating directory: "
                                + entry.getName() + ", nodeRef=" + parentNodeRef + ", association="
                                + parentAssocTypeQName);

                        // nodo di riferimento
                        NodeRef nr = null;

                        // Stranamente, per una get di dati, viene espressamente richiesta una transazione

                        // Prendo un oggetto UserTransaction
                        UserTransaction transaction = transactionService.getNonPropagatingUserTransaction();
                        try {
                            // Inizio la transazione
                            transaction.begin();

                            // Verifico se la cartella e' presente nel nodo padre
                            nr = nodeService.getChildByName(parentNodeRef, parentAssocTypeQName,
                                    entry.getName());

                            // Anche se non ho fatto
                            transaction.rollback();
                        } catch (Exception e) {
                            logger.debug(e);
                            throw e;
                        } finally {
                            // Anche se non ho fatto
                            try {
                                transaction.rollback();
                            } catch (Exception e) {
                            }
                        }

                        // Prendo un oggetto UserTransaction
                        transaction = transactionService.getNonPropagatingUserTransaction();
                        boolean bTrans = false;
                        try {
                            // Se non e' presente, provo a crearla
                            if (nr == null) {
                                bTrans = true;

                                // Preparo le properties di un folder
                                QName prefixedNameQName = resolvePrefixNameToQName("cm:" + entry.getName());
                                Map<QName, Serializable> props = new HashMap<QName, Serializable>();
                                props.put(containerNamePropertyQName, entry.getName());

                                // Inizio la transazione
                                transaction.begin();

                                // Creo il folder
                                ChildAssociationRef folderNodeRef = nodeService.createNode(parentNodeRef,
                                        parentAssocTypeQName, prefixedNameQName, containerTypeQName, props);

                                // Nel caso che si chiami una commit, senza righe da committare
                                // C'e' una eccezione.
                                // TODO: gestire le transazioni da 0 contenuti
                                transaction.commit();

                                nr = folderNodeRef.getChildRef();
                            }

                            // Creazione del subfolder
                            nContent += handleRootFolder(entry, nr, containerAssocTypeQName, // Non passo il parent, ma passo il containerAssocType nei folder figli
                                    containerTypeQName, containerNamePropertyQName, containerAssocTypeQName,
                                    contentTypeQName, contentNamePropertyQName);

                        } catch (RollbackException re) {
                            if (bTrans) {
                                try {
                                    transaction.rollback();
                                } catch (Exception ee) {
                                    logger.debug(re);
                                }
                            }

                        } catch (EcmEngineFoundationException e) {
                            bDeleteFolder = false;
                            // Rollback
                            try {
                                transaction.rollback();
                            } catch (Exception ee) {
                                logger.debug(e);
                            }

                        } catch (Exception e) {
                            logger.debug(e);
                            throw e;
                        }

                    }
                }

                // Rimuovo la directory, se non ho avuto problemi rimuovendo le subdir
                if (bDeleteFolder) {
                    folder.delete();
                }
            } catch (Exception e) {
                logger.debug(e);
                throw e;
            }
        }

    } catch (Exception e) {
        logger.debug(e);
        throw e;
    } finally {
        logger.debug("[ArchiveImporterJob::handleRootFolder] END");
    }

    return nContent;
}

From source file:com.sirma.itt.cmf.integration.alfresco3.CMFWorkflowDeployer.java

/**
 * Deploy the Workflow Definitions.//  w  ww .ja v a 2  s . c o m
 */
public void init() {
    PropertyCheck.mandatory(this, "transactionService", transactionService);
    PropertyCheck.mandatory(this, "authenticationContext", authenticationContext);
    PropertyCheck.mandatory(this, "workflowService", workflowService);

    String currentUser = authenticationContext.getCurrentUserName();
    if (currentUser == null) {
        authenticationContext.setSystemUserAsCurrentUser();
    }
    if (!transactionService.getAllowWrite()) {
        if (logger.isWarnEnabled())
            logger.warn("Repository is in read-only mode; not deploying workflows.");

        return;
    }

    UserTransaction userTransaction = transactionService.getUserTransaction();
    try {
        userTransaction.begin();

        // bootstrap the workflow models and static labels (from classpath)
        if (models != null && resourceBundles != null
                && ((models.size() > 0) || (resourceBundles.size() > 0))) {
            DictionaryBootstrap dictionaryBootstrap = new DictionaryBootstrap();
            dictionaryBootstrap.setDictionaryDAO(dictionaryDAO);
            dictionaryBootstrap.setTenantService(tenantService);
            dictionaryBootstrap.setModels(models);
            dictionaryBootstrap.setLabels(resourceBundles);
            dictionaryBootstrap.bootstrap(); // also registers with
            // dictionary
        }

        // bootstrap the workflow definitions (from classpath)
        if (workflowDefinitions != null) {
            for (Properties workflowDefinition : workflowDefinitions) {
                // retrieve workflow specification
                String engineId = workflowDefinition.getProperty(ENGINE_ID);
                if (engineId == null || engineId.length() == 0) {
                    throw new WorkflowException("Workflow Engine Id must be provided");
                }

                String location = workflowDefinition.getProperty(LOCATION);
                if (location == null || location.length() == 0) {
                    throw new WorkflowException("Workflow definition location must be provided");
                }

                Boolean redeploy = Boolean.valueOf(workflowDefinition.getProperty(REDEPLOY));
                String mimetype = workflowDefinition.getProperty(MIMETYPE);

                // retrieve input stream on workflow definition
                ClassPathResource workflowResource = new ClassPathResource(location);

                // deploy workflow definition
                if (!redeploy && workflowService.isDefinitionDeployed(engineId,
                        workflowResource.getInputStream(), mimetype)) {
                    if (logger.isDebugEnabled())
                        logger.debug("Workflow deployer: Definition '" + location + "' already deployed");
                } else {
                    if (!redeploy && workflowService.isDefinitionDeployed(engineId,
                            workflowResource.getInputStream(), mimetype)) {
                        if (logger.isDebugEnabled())
                            logger.debug("Workflow deployer: Definition '" + location + "' already deployed");
                    } else {
                        WorkflowDeployment deployment = workflowService.deployDefinition(engineId,
                                workflowResource.getInputStream(), workflowResource.getFilename());
                        logDeployment(location, deployment);
                    }
                }

            }
        }

        userTransaction.commit();
    } catch (Throwable e) {
        // rollback the transaction
        try {
            if (userTransaction != null) {
                userTransaction.rollback();
            }
        } catch (Exception ex) {
            // NOOP
        }
    } finally {
        if (currentUser == null) {
            authenticationContext.clearCurrentSecurityContext();
        }
    }
}

From source file:fr.openwide.talendalfresco.rest.server.command.LoginCommand.java

private User authenticate(String username, String password, String ticket) {

    // case of existing session user : getting alfresco ticket
    User existingSessionUser = null;// www.j av a2  s .  c o  m
    HttpSession session = httpRequest.getSession(false);
    if (session != null) {
        existingSessionUser = (User) session.getAttribute(AuthenticationHelper.AUTHENTICATION_USER);
        if (existingSessionUser != null) {
            String existingSessionTicket = existingSessionUser.getTicket();
            // alternatives :
            // 1. using alfresco ticket rather than sso ticket to speed up things
            // NB. this means that before logging in a different user an explicit logout must be done
            // 2. using sso ticket rather than alfresco one
            // this requires never to give the ticket but when we want to relog, which is bothersome
            if (existingSessionTicket != null) {
                ticket = existingSessionTicket;
            }
        }
    }

    UserTransaction tx = null;
    try {
        // Authenticate via the authentication service, then save the details of user in an object
        // in the session - this is used by the servlet filter etc. on each page to check for login
        if (username != null && password != null) {
            // authentication using login (alfresco or sso), since user/pwd params (even empty ones) have been supplied
            // validation :
            RestServerHelper.validateUsername(session, username);
            RestServerHelper.validatePassword(session, password);
            // login :
            authenticationService.authenticate(username, password.toCharArray());

        } else if (ticket != null && ticket.length() != 0) {
            // authentication using ticket (alfresco or sso), since non empty ticket has been supplied
            authenticationService.validate(ticket);

        } else {
            xmlResult.setError(RestCommandResult.CODE_ERROR_AUTH_MISSING,
                    RestServerHelper.getMessage(session, RestServerHelper.MSG_ERROR_MISSING) + " : " + username,
                    null);
            return null;
        }

        // Set the user name as stored by the back end 
        username = authenticationService.getCurrentUserName();

        if (existingSessionUser != null && existingSessionUser.getUserName().equals(username)) {
            // user was already logged in, nothing else to do
            return existingSessionUser;
        }

        // now setting up logged in user elements
        // using non propagated tx because already inside a tx (commandServlet)
        tx = transactionService.getNonPropagatingUserTransaction();
        tx.begin();

        // remove the session invalidated flag (used to remove last username cookie by AuthenticationFilter)
        if (session != null) {
            session.removeAttribute(AuthenticationHelper.SESSION_INVALIDATED);
        }

        // setup User object and Home space ID
        User user = new User(username, authenticationService.getCurrentTicket(),
                personService.getPerson(username));

        NodeRef homeSpaceRef = (NodeRef) nodeService.getProperty(personService.getPerson(username),
                ContentModel.PROP_HOMEFOLDER);

        // check that the home space node exists - else user cannot login
        if (nodeService.exists(homeSpaceRef) == false) {
            throw new InvalidNodeRefException(homeSpaceRef);
        }
        user.setHomeSpaceId(homeSpaceRef.getId());

        tx.commit();
        tx = null; // clear this so we know not to rollback 

        // put the User object in the Session - the authentication servlet will then allow
        // the app to continue without redirecting to the login page
        if (session == null) {
            session = httpRequest.getSession(true); // creating session if none yet
        }
        session.setAttribute(AuthenticationHelper.AUTHENTICATION_USER, user);

        // Set the current locale for Alfresco web app. NB. session exists now.
        I18NUtil.setLocale(Application.getLanguage(session, true));

        return user;

    } catch (AuthenticationException ae) {
        xmlResult.setError(RestCommandResult.CODE_ERROR_AUTH_UNKNOWN_USER,
                RestServerHelper.getMessage(session, RestServerHelper.MSG_ERROR_UNKNOWN_USER) + " : "
                        + username,
                ae);

    } catch (InvalidNodeRefException inre) {
        xmlResult.setError(RestCommandResult.CODE_ERROR_AUTH_UNKNOWN_USER,
                RestServerHelper.getMessage(session, Repository.ERROR_NOHOME) + " : "
                        + inre.getNodeRef().getId() + " (" + username + ")",
                inre);

    } catch (Throwable e) {
        // Some other kind of serious failure
        xmlResult.setError("Unknown technical error when authenticating user " + username, null);

    } finally {
        try {
            if (tx != null) {
                tx.rollback();
            }
        } catch (Exception tex) {
        }
    }

    return null;
}