Example usage for org.springframework.transaction TransactionDefinition PROPAGATION_REQUIRES_NEW

List of usage examples for org.springframework.transaction TransactionDefinition PROPAGATION_REQUIRES_NEW

Introduction

In this page you can find the example usage for org.springframework.transaction TransactionDefinition PROPAGATION_REQUIRES_NEW.

Prototype

int PROPAGATION_REQUIRES_NEW

To view the source code for org.springframework.transaction TransactionDefinition PROPAGATION_REQUIRES_NEW.

Click Source Link

Document

Create a new transaction, suspending the current transaction if one exists.

Usage

From source file:org.lexevs.dao.database.operation.DefaultLexEvsDatabaseOperations.java

@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
public void dropCodingSchemeTablesByPrefix(final String prefix) {

    TransactionTemplate template = new TransactionTemplate(this.getTransactionManager());
    template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    template.execute(new TransactionCallback() {

        @Override// w w w  .j  av  a 2s .c o m
        public Object doInTransaction(TransactionStatus status) {

            dropCodingSchemeHistoryTablesByPrefix(prefix);
            doExecuteSql(codingSchemeXmlDdl, new DropSchemaPlatformActor(), prefix);

            return null;
        }
    });
}

From source file:org.ohdsi.webapi.cohortdefinition.GenerateCohortTasklet.java

@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext)
        throws Exception {
    Date startTime = Calendar.getInstance().getTime();

    DefaultTransactionDefinition initTx = new DefaultTransactionDefinition();
    initTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
    TransactionStatus initStatus = this.transactionTemplate.getTransactionManager().getTransaction(initTx);
    CohortDefinition df = this.cohortDefinitionRepository.findOne(this.task.getCohortDefinition().getId());
    CohortGenerationInfo info = df.getGenerationInfo();
    if (info == null) {
        info = new CohortGenerationInfo().setCohortDefinition(df);
        df.setGenerationInfo(info);/*from   w w  w  .  j a v a 2 s  .co m*/
    }

    info.setIsValid(false);
    info.setStartTime(startTime);
    info.setStatus(GenerationStatus.RUNNING);
    df = this.cohortDefinitionRepository.save(df);
    this.transactionTemplate.getTransactionManager().commit(initStatus);

    info = df.getGenerationInfo();

    try {
        final int[] ret = this.transactionTemplate.execute(new TransactionCallback<int[]>() {

            @Override
            public int[] doInTransaction(final TransactionStatus status) {
                return doTask();
            }
        });
        log.debug("Update count: " + ret.length);
        info.setIsValid(true);
    } catch (final TransactionException e) {
        info.setIsValid(false);
        log.error(e.getMessage(), e);
        throw e;//FAIL job status
    } finally {
        DefaultTransactionDefinition completeTx = new DefaultTransactionDefinition();
        completeTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
        TransactionStatus completeStatus = this.transactionTemplate.getTransactionManager()
                .getTransaction(completeTx);
        Date endTime = Calendar.getInstance().getTime();
        info.setExecutionDuration(new Integer((int) (endTime.getTime() - startTime.getTime())));
        info.setStatus(GenerationStatus.COMPLETE);
        this.cohortDefinitionRepository.save(df);
        this.transactionTemplate.getTransactionManager().commit(completeStatus);
    }

    return RepeatStatus.FINISHED;
}

From source file:org.ohdsi.webapi.ircalc.PerformAnalysisTasklet.java

@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext)
        throws Exception {
    Date startTime = Calendar.getInstance().getTime();
    Map<String, Object> jobParams = chunkContext.getStepContext().getJobParameters();
    Integer analysisId = Integer.valueOf(jobParams.get("analysis_id").toString());
    Integer sourceId = Integer.valueOf(jobParams.get("source_id").toString());
    boolean isValid = false;
    String statusMessage = "OK";

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.transactionTemplate.getTransactionManager()
            .getTransaction(requresNewTx);
    IncidenceRateAnalysis analysis = this.incidenceRateAnalysisRepository.findOne(analysisId);

    ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(), sourceId);
    analysisInfo.setIsValid(false);/*from w  w  w. j a  va2  s  . c o m*/
    analysisInfo.setStartTime(startTime);
    analysisInfo.setStatus(GenerationStatus.RUNNING);

    this.incidenceRateAnalysisRepository.save(analysis);
    this.transactionTemplate.getTransactionManager().commit(initStatus);

    try {
        final int[] ret = this.transactionTemplate.execute(new TransactionCallback<int[]>() {

            @Override
            public int[] doInTransaction(final TransactionStatus status) {
                return doTask(chunkContext);
            }
        });
        log.debug("Update count: " + ret.length);
        isValid = true;
    } catch (final Exception e) {
        isValid = false;
        statusMessage = e.getMessage();
        log.error(e.getMessage(), e);
        throw e;//FAIL job status
    } finally {
        TransactionStatus completeStatus = this.transactionTemplate.getTransactionManager()
                .getTransaction(requresNewTx);
        Date endTime = Calendar.getInstance().getTime();
        analysis = this.incidenceRateAnalysisRepository.findOne(analysisId);

        analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(), sourceId);
        analysisInfo.setIsValid(isValid);
        analysisInfo.setExecutionDuration((int) (endTime.getTime() - startTime.getTime()));
        analysisInfo.setStatus(GenerationStatus.COMPLETE);
        analysisInfo
                .setMessage(statusMessage.substring(0, Math.min(MAX_MESSAGE_LENGTH, statusMessage.length())));

        this.incidenceRateAnalysisRepository.save(analysis);
        this.transactionTemplate.getTransactionManager().commit(completeStatus);
    }

    return RepeatStatus.FINISHED;
}

From source file:org.ohdsi.webapi.service.CohortDefinitionService.java

/**
 * Queues up a generate cohort task for the specified cohort definition id.
 *
 * @param id - the Cohort Definition ID to generate
 * @return information about the Cohort Analysis Job
 *//*w w w.  ja  v a 2s. c  o  m*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/generate/{sourceKey}")
public JobExecutionResource generateCohort(@PathParam("id") final int id,
        @PathParam("sourceKey") final String sourceKey) {

    Source source = getSourceRepository().findBySourceKey(sourceKey);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    CohortDefinition currentDefinition = this.cohortDefinitionRepository.findOne(id);
    CohortGenerationInfo info = findBySourceId(currentDefinition.getGenerationInfoList(), source.getSourceId());
    if (info == null) {
        info = new CohortGenerationInfo(currentDefinition, source.getSourceId());
        currentDefinition.getGenerationInfoList().add(info);
    }
    info.setStatus(GenerationStatus.PENDING).setStartTime(Calendar.getInstance().getTime());

    this.cohortDefinitionRepository.save(currentDefinition);
    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "generating cohort " + currentDefinition.getId() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();

    log.info(String.format("Beginning generate cohort for cohort definition id: \n %s", "" + id));

    GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("cohortDefinition.generateCohort").tasklet(generateTasklet)
            .build();

    Job generateCohortJob = jobBuilders.get("generateCohort").start(generateCohortStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(generateCohortJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.FeasibilityService.java

@GET
@Path("/{study_id}/generate/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)//from   w w w .j  a va2s.  c  om
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performStudy(@PathParam("study_id") final int study_id,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    FeasibilityStudy study = this.feasibilityStudyRepository.findOne(study_id);

    CohortDefinition indexRule = this.cohortDefinitionRepository.findOne(study.getIndexRule().getId());
    CohortGenerationInfo indexInfo = findCohortGenerationInfoBySourceId(indexRule.getGenerationInfoList(),
            source.getSourceId());
    if (indexInfo == null) {
        indexInfo = new CohortGenerationInfo(indexRule, source.getSourceId());
        indexRule.getGenerationInfoList().add(indexInfo);
    }
    indexInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
    this.cohortDefinitionRepository.save(indexRule);

    if (study.getResultRule() != null) {
        CohortDefinition resultRule = this.cohortDefinitionRepository.findOne(study.getResultRule().getId());
        CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultRule.getGenerationInfoList(),
                source.getSourceId());
        if (resultInfo == null) {
            resultInfo = new CohortGenerationInfo(resultRule, source.getSourceId());
            resultRule.getGenerationInfoList().add(resultInfo);
        }
        resultInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
        this.cohortDefinitionRepository.save(resultRule);
    }

    StudyGenerationInfo studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(),
            source.getSourceId());
    if (studyInfo == null) {
        studyInfo = new StudyGenerationInfo(study, source);
        study.getStudyGenerationInfoList().add(studyInfo);
    }
    studyInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.feasibilityStudyRepository.save(study);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "performing feasibility study on " + indexRule.getName() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + indexRule.getId()));
    builder.addString("study_id", ("" + study_id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();
    final JdbcTemplate sourceJdbcTemplate = getSourceJdbcTemplate(source);

    GenerateCohortTasklet indexRuleTasklet = new GenerateCohortTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("performStudy.generateIndexCohort").tasklet(indexRuleTasklet)
            .exceptionHandler(new TerminateJobStepExceptionHandler()).build();

    PerformFeasibilityTasklet simulateTasket = new PerformFeasibilityTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), feasibilityStudyRepository, cohortDefinitionRepository);

    Step performStudyStep = stepBuilders.get("performStudy.performStudy").tasklet(simulateTasket).build();

    Job performStudyJob = jobBuilders.get("performStudy").start(generateCohortStep).next(performStudyStep)
            .build();

    JobExecutionResource jobExec = this.jobTemplate.launch(performStudyJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.IRAnalysisService.java

@GET
@Path("/{analysis_id}/execute/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)//from  w  ww  .  ja v  a 2  s .  com
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performAnalysis(@PathParam("analysis_id") final int analysisId,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(analysisId);

    ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(),
            source.getSourceId());
    if (analysisInfo != null) {
        if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
            return null; // Exit execution, another process has started it.
    } else {
        analysisInfo = new ExecutionInfo(analysis, source);
        analysis.getExecutionInfoList().add(analysisInfo);
    }

    analysisInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.irAnalysisRepository.save(analysis);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "IR Analysis: " + analysis.getId() + " : " + source.getSourceName() + " ("
            + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("analysis_id", ("" + analysisId));
    builder.addString("source_id", ("" + source.getSourceId()));

    final JobParameters jobParameters = builder.toJobParameters();

    PerformAnalysisTasklet analysisTasklet = new PerformAnalysisTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), irAnalysisRepository);

    Step irAnalysisStep = stepBuilders.get("irAnalysis.execute").tasklet(analysisTasklet).build();

    Job executeAnalysis = jobBuilders.get("irAnalysis").start(irAnalysisStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(executeAnalysis, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.IRAnalysisService.java

/**
 * Exports the analysis definition and results
 *
 * @param id - the IR Analysis ID to export
 * @return Response containing binary stream of zipped data
 *///  w  w w .j a  va  2  s .c  o  m
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/export")
@Transactional
public Response export(@PathParam("id") final int id) {

    Response response = null;
    HashMap<String, String> fileList = new HashMap<>();
    HashMap<Integer, String> distTypeLookup = new HashMap<>();

    distTypeLookup.put(1, "TAR");
    distTypeLookup.put(2, "TTO");

    try {
        IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(id);
        Set<ExecutionInfo> executions = analysis.getExecutionInfoList();

        fileList.put("analysisDefinition.json", analysis.getDetails().getExpression());

        // squentially return reults of IR calculation.  In Spring 1.4.2, we can utlilize @Async operations to do this in parallel.
        // store results in single CSV file
        ArrayList<String[]> summaryLines = new ArrayList<>();
        ArrayList<String[]> strataLines = new ArrayList<>();
        ArrayList<String[]> distLines = new ArrayList<>();

        for (ExecutionInfo execution : executions) {
            Source source = execution.getSource();
            String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);

            // perform this query to CDM in an isolated transaction to avoid expensive JDBC transaction synchronization
            DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
            requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
            TransactionStatus initStatus = this.getTransactionTemplateRequiresNew().getTransactionManager()
                    .getTransaction(requresNewTx);

            // get the summary data
            List<AnalysisReport.Summary> summaryList = getAnalysisSummaryList(id, source);
            if (summaryLines.isEmpty()) {
                summaryLines.add("db_id#targetId#outcomeId#total#timeAtRisk#cases".split("#"));
            }
            for (AnalysisReport.Summary summary : summaryList) {
                summaryLines.add(new String[] { source.getSourceKey(), String.valueOf(summary.targetId),
                        String.valueOf(summary.outcomeId), String.valueOf(summary.totalPersons),
                        String.valueOf(summary.timeAtRisk), String.valueOf(summary.cases) });
            }

            // get the strata results
            List<AnalysisReport.StrataStatistic> strataList = getStrataStatistics(id, source);
            if (strataLines.isEmpty()) {
                strataLines.add(
                        "db_id#targetId#outcomeId#strata_id#strata_name#total#timeAtRisk#cases".split("#"));
            }
            for (AnalysisReport.StrataStatistic strata : strataList) {
                strataLines.add(new String[] { source.getSourceKey(), String.valueOf(strata.targetId),
                        String.valueOf(strata.outcomeId), String.valueOf(strata.id),
                        String.valueOf(strata.name), String.valueOf(strata.totalPersons),
                        String.valueOf(strata.timeAtRisk), String.valueOf(strata.cases) });
            }

            // get the distribution data
            String distQuery = String.format(
                    "select '%s' as db_id, target_id, outcome_id, strata_sequence, dist_type, total, avg_value, std_dev, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value from %s.ir_analysis_dist where analysis_id = %d",
                    source.getSourceKey(), resultsTableQualifier, id);
            String translatedSql = SqlTranslate.translateSql(distQuery, "sql server", source.getSourceDialect(),
                    SessionUtils.sessionId(), resultsTableQualifier);

            SqlRowSet rs = this.getSourceJdbcTemplate(source).queryForRowSet(translatedSql);

            this.getTransactionTemplateRequiresNew().getTransactionManager().commit(initStatus);

            if (distLines.isEmpty()) {
                distLines.add(rs.getMetaData().getColumnNames());
            }
            while (rs.next()) {
                ArrayList<String> columns = new ArrayList<>();
                for (int i = 1; i <= rs.getMetaData().getColumnNames().length; i++) {
                    switch (rs.getMetaData().getColumnName(i)) {
                    case "dist_type":
                        columns.add(distTypeLookup.get(rs.getInt(i)));
                        break;
                    default:
                        columns.add(rs.getString(i));
                        break;
                    }
                }
                distLines.add(columns.toArray(new String[0]));
            }
        }

        // Write report lines to CSV
        StringWriter sw = null;
        CSVWriter csvWriter = null;

        sw = new StringWriter();
        csvWriter = new CSVWriter(sw);
        csvWriter.writeAll(summaryLines);
        csvWriter.flush();
        fileList.put("ir_summary.csv", sw.getBuffer().toString());

        sw = new StringWriter();
        csvWriter = new CSVWriter(sw);
        csvWriter.writeAll(strataLines);
        csvWriter.flush();
        fileList.put("ir_strata.csv", sw.getBuffer().toString());

        sw = new StringWriter();
        csvWriter = new CSVWriter(sw);
        csvWriter.writeAll(distLines);
        csvWriter.flush();
        fileList.put("ir_dist.csv", sw.getBuffer().toString());

        // build zip output
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ZipOutputStream zos = new ZipOutputStream(baos);

        for (String fileName : fileList.keySet()) {
            ZipEntry resultsEntry = new ZipEntry(fileName);
            zos.putNextEntry(resultsEntry);
            zos.write(fileList.get(fileName).getBytes());
        }

        zos.closeEntry();
        zos.close();
        baos.flush();
        baos.close();

        response = Response.ok(baos).type(MediaType.APPLICATION_OCTET_STREAM).header("Content-Disposition",
                String.format("attachment; filename=\"%s\"", "ir_analysis_" + id + ".zip")).build();
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }
    return response;
}

From source file:org.pentaho.platform.repository2.unified.lifecycle.AbstractBackingRepositoryLifecycleManager.java

protected void initTransactionTemplate() {
    // a new transaction must be created (in order to run with the correct user privileges)
    txnTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
}

From source file:org.sakaiproject.tool.assessment.facade.ItemHashUtil.java

private TransactionDefinition requireNewTransaction() {
    return new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
}

From source file:org.skife.jdbi.spring.TestSpringIntegration.java

public void testSuspendAndResumeTx() throws Exception {
    final DefaultTransactionDefinition prop_req = new DefaultTransactionDefinition();
    prop_req.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    final PlatformTransactionManager ptm = (PlatformTransactionManager) ctx.getBean("transactionManager");
    final TransactionStatus prop_req_status = ptm.getTransaction(prop_req);

    final IDBI dbi = (IDBI) ctx.getBean("dbi");

    final Handle one = DBIUtils.getHandle(dbi);

    final DefaultTransactionDefinition req_new = new DefaultTransactionDefinition();
    req_new.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    // REQUIRES_NEW inside PROPAGATION_REQUIRED
    final TransactionStatus req_new_status = ptm.getTransaction(req_new);

    final Handle two = DBIUtils.getHandle(dbi);
    assertNotSame(one, two);/* w ww. j a v a2 s .c  om*/

    ptm.commit(req_new_status);
    ptm.commit(prop_req_status);
}