Example usage for java.lang Exception addSuppressed

List of usage examples for java.lang Exception addSuppressed

Introduction

In this page you can find the example usage for java.lang Exception addSuppressed.

Prototype

public final synchronized void addSuppressed(Throwable exception) 

Source Link

Document

Appends the specified exception to the exceptions that were suppressed in order to deliver this exception.

Usage

From source file:edu.uci.ics.asterix.aql.translator.AqlTranslator.java

private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {

    IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
    String datasetName = stmtIndexDrop.getDatasetName().getValue();
    String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);

    MetadataLockManager.INSTANCE.dropIndexBegin(dataverseName, dataverseName + "." + datasetName);

    String indexName = null;//from w ww .j a  v a2s. co  m
    // For external index
    boolean dropFilesIndex = false;
    List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
    try {

        Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException(
                    "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }

        List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
        boolean resourceInUse = false;
        if (feedConnections != null && !feedConnections.isEmpty()) {
            StringBuilder builder = new StringBuilder();
            for (FeedConnectionId connection : feedConnections) {
                if (connection.getDatasetName().equals(datasetName)) {
                    resourceInUse = true;
                    builder.append(connection + "\n");
                }
            }
            if (resourceInUse) {
                throw new AsterixException(
                        "Dataset" + datasetName + " is currently being fed into by the following feeds " + "."
                                + builder.toString() + "\nOperation not supported.");
            }
        }

        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            }
            //#. prepare a job to drop the index in NC.
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    indexName);
            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));

            //#. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                    new Index(dataverseName, datasetName, indexName, index.getIndexType(),
                            index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
                            index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));

            //#. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

            for (JobSpecification jobSpec : jobsToExecute) {
                runJob(hcc, jobSpec, true);
            }

            //#. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);

            //#. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
        } else {
            // External dataset
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            } else if (ExternalIndexingOperations.isFileIndex(index)) {
                throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
            }
            //#. prepare a job to drop the index in NC.
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    indexName);
            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
            List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                    datasetName);
            if (datasetIndexes.size() == 2) {
                dropFilesIndex = true;
                // only one index + the files index, we need to delete both of the indexes
                for (Index externalIndex : datasetIndexes) {
                    if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
                        cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                                externalIndex.getIndexName());
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
                                metadataProvider, ds));
                        //#. mark PendingDropOp on the existing files index
                        MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
                                externalIndex.getIndexName());
                        MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                                new Index(dataverseName, datasetName, externalIndex.getIndexName(),
                                        externalIndex.getIndexType(), externalIndex.getKeyFieldNames(),
                                        index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
                                        externalIndex.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
                    }
                }
            }

            //#. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                    new Index(dataverseName, datasetName, indexName, index.getIndexType(),
                            index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
                            index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));

            //#. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

            for (JobSpecification jobSpec : jobsToExecute) {
                runJob(hcc, jobSpec, true);
            }

            //#. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);

            //#. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (dropFilesIndex) {
                // delete the files index too
                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
                        ExternalIndexingOperations.getFilesIndexName(datasetName));
                MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
            }
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);

    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            //#. execute compensation operations
            //   remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                //do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }

            //   remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName, indexName);
                if (dropFilesIndex) {
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                            datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
                }
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
                        + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;

    } finally {
        MetadataLockManager.INSTANCE.dropIndexEnd(dataverseName, dataverseName + "." + datasetName);
    }
}

From source file:edu.uci.ics.asterix.aql.translator.AqlTranslator.java

private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();

    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(dataverseName);
    List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
    try {//ww w . j  a v  a2  s  .  co m

        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }

        //# disconnect all feeds from any datasets in the dataverse.
        List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE
                .getActiveFeedConnections(null);
        DisconnectFeedStatement disStmt = null;
        Identifier dvId = new Identifier(dataverseName);
        for (FeedConnectionId connection : activeFeedConnections) {
            FeedId feedId = connection.getFeedId();
            if (feedId.getDataverse().equals(dataverseName)) {
                disStmt = new DisconnectFeedStatement(dvId, new Identifier(feedId.getFeedName()),
                        new Identifier(connection.getDatasetName()));
                try {
                    handleDisconnectFeedStatement(metadataProvider, disStmt, hcc);
                    if (LOGGER.isLoggable(Level.INFO)) {
                        LOGGER.info("Disconnected feed " + feedId.getFeedName() + " from dataset "
                                + connection.getDatasetName());
                    }
                } catch (Exception exception) {
                    if (LOGGER.isLoggable(Level.WARNING)) {
                        LOGGER.warning("Unable to disconnect feed " + feedId.getFeedName() + " from dataset "
                                + connection.getDatasetName() + ". Encountered exception " + exception);
                    }
                }
            }
        }

        //#. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (int j = 0; j < datasets.size(); j++) {
            String datasetName = datasets.get(j).getDatasetName();
            DatasetType dsType = datasets.get(j).getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                        datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (indexes.get(k).isSecondaryIndex()) {
                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName,
                                datasetName, indexes.get(k).getIndexName());
                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
                                datasets.get(j)));
                    }
                }

                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
                jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                        datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName,
                                datasetName, indexes.get(k).getIndexName());
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
                                metadataProvider, datasets.get(j)));
                    } else {
                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName,
                                datasetName, indexes.get(k).getIndexName());
                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
                                datasets.get(j)));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(datasets.get(j));
            }
        }
        jobsToExecute.add(DataverseOperations.createDropDataverseJobSpec(dv, metadataProvider));

        //#. mark PendingDropOp on the dataverse record by
        //   first, deleting the dataverse record from the DATAVERSE_DATASET
        //   second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
                new Dataverse(dataverseName, dv.getDataFormat(), IMetadataEntity.PENDING_DROP_OP));

        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

        for (JobSpecification jobSpec : jobsToExecute) {
            runJob(hcc, jobSpec, true);
        }

        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);

        //#. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
            activeDefaultDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
                activeDefaultDataverse = null;
            }

            //#. execute compensation operations
            //   remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                //do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }

            //   remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse("
                        + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;
    } finally {
        MetadataLockManager.INSTANCE.releaseDataverseWriteLock(dataverseName);
    }
}

From source file:edu.uci.ics.asterix.aql.translator.AqlTranslator.java

@SuppressWarnings("unchecked")
private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {
    ProgressState progress = ProgressState.NO_PROGRESS;
    CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
    String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
    String datasetName = stmtCreateIndex.getDatasetName().getValue();

    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);

    MetadataLockManager.INSTANCE.createIndexBegin(dataverseName, dataverseName + "." + datasetName);

    String indexName = null;/*from  w w w .j a v  a2 s.  com*/
    JobSpecification spec = null;
    Dataset ds = null;
    // For external datasets
    ArrayList<ExternalFile> externalFilesSnapshot = null;
    boolean firstExternalDatasetIndex = false;
    boolean filesIndexReplicated = false;
    Index filesIndex = null;
    boolean datasetLocked = false;
    try {
        ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName);
        if (ds == null) {
            throw new AlgebricksException(
                    "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }

        indexName = stmtCreateIndex.getIndexName().getValue();
        Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName, indexName);

        String itemTypeName = ds.getItemTypeName();
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
                dataverseName, itemTypeName);
        IAType itemType = dt.getDatatype();
        ARecordType aRecordType = (ARecordType) itemType;

        List<List<String>> indexFields = new ArrayList<List<String>>();
        List<IAType> indexFieldTypes = new ArrayList<IAType>();
        for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
            IAType fieldType = null;
            boolean isOpen = aRecordType.isOpen();
            ARecordType subType = aRecordType;
            int i = 0;
            if (fieldExpr.first.size() > 1 && !isOpen) {
                for (; i < fieldExpr.first.size() - 1;) {
                    subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
                    i++;
                    if (subType.isOpen()) {
                        isOpen = true;
                        break;
                    }
                    ;
                }
            }
            if (fieldExpr.second == null) {
                fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
            } else {
                if (!stmtCreateIndex.isEnforced())
                    throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
                            + "\" field without enforcing it's type");
                if (!isOpen)
                    throw new AlgebricksException("Typed index on \"" + fieldExpr.first
                            + "\" field could be created only for open datatype");
                Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
                        indexName, dataverseName);
                TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
                fieldType = typeMap.get(typeSignature);
            }
            if (fieldType == null)
                throw new AlgebricksException("Unknown type " + fieldExpr.second);
            if (isOpen && fieldType.getTypeTag().isDerivedType())
                MetadataManager.INSTANCE.addDatatype(mdTxnCtx,
                        new Datatype(dataverseName, indexName, fieldType, false));

            indexFields.add(fieldExpr.first);
            indexFieldTypes.add(fieldType);
        }

        aRecordType.validateKeyFields(indexFields, indexFieldTypes, stmtCreateIndex.getIndexType());

        if (idx != null) {
            if (stmtCreateIndex.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("An index with this name " + indexName + " already exists.");
            }
        }

        // Checks whether a user is trying to create an inverted secondary index on a dataset with a variable-length primary key.
        // Currently, we do not support this. Therefore, as a temporary solution, we print an error message and stop.
        if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
                || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
                || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
                || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
            for (List<String> partitioningKey : partitioningKeys) {
                IAType keyType = aRecordType.getSubFieldType(partitioningKey);
                ITypeTraits typeTrait = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);

                // If it is not a fixed length
                if (typeTrait.getFixedLength() < 0) {
                    throw new AlgebricksException("The keyword or ngram index -" + indexName
                            + " cannot be created on the dataset -" + datasetName
                            + " due to its variable-length primary key field - " + partitioningKey);
                }

            }
        }

        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            validateIfResourceIsActiveInFeed(dataverseName, datasetName);
        } else {
            // External dataset
            // Check if the dataset is indexible
            if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
                throw new AlgebricksException(
                        "dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
                                + " Adapter can't be indexed");
            }
            // check if the name of the index is valid
            if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
                throw new AlgebricksException("external dataset index name is invalid");
            }

            // Check if the files index exist
            filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                    dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
            firstExternalDatasetIndex = (filesIndex == null);
            // lock external dataset
            ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
            datasetLocked = true;
            if (firstExternalDatasetIndex) {
                // verify that no one has created an index before we acquire the lock
                filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                        dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
                if (filesIndex != null) {
                    ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
                    firstExternalDatasetIndex = false;
                    ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
                }
            }
            if (firstExternalDatasetIndex) {
                // Get snapshot from External File System
                externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
                // Add an entry for the files index
                filesIndex = new Index(dataverseName, datasetName,
                        ExternalIndexingOperations.getFilesIndexName(datasetName), IndexType.BTREE,
                        ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES,
                        ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false,
                        IMetadataEntity.PENDING_ADD_OP);
                MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
                // Add files to the external files index
                for (ExternalFile file : externalFilesSnapshot) {
                    MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
                }
                // This is the first index for the external dataset, replicate the files index
                spec = ExternalIndexingOperations.buildFilesIndexReplicationJobSpec(ds, externalFilesSnapshot,
                        metadataProvider, true);
                if (spec == null) {
                    throw new AsterixException(
                            "Failed to create job spec for replicating Files Index For external dataset");
                }
                filesIndexReplicated = true;
                runJob(hcc, spec, true);
            }
        }

        //check whether there exists another enforced index on the same field
        if (stmtCreateIndex.isEnforced()) {
            List<Index> indexes = MetadataManager.INSTANCE
                    .getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
            for (Index index : indexes) {
                if (index.getKeyFieldNames().equals(indexFields)
                        && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds())
                    throw new AsterixException(
                            "Cannot create index " + indexName + " , enforced index " + index.getIndexName()
                                    + " on field \"" + StringUtils.join(indexFields, ',') + "\" already exist");
            }
        }

        //#. add a new index with PendingAddOp
        Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
                indexFields, indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(),
                false, IMetadataEntity.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);

        ARecordType enforcedType = null;
        if (stmtCreateIndex.isEnforced()) {
            enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType, index);
        }

        //#. prepare to create the index artifact in NC.
        CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
                index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
        spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, aRecordType, enforcedType,
                metadataProvider);
        if (spec == null) {
            throw new AsterixException("Failed to create job spec for creating index '"
                    + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;

        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

        //#. create the index artifact in NC.
        runJob(hcc, spec, true);

        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);

        //#. load data into the index in NC.
        cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
                index.getGramLength(), index.getIndexType());
        spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, aRecordType, enforcedType,
                metadataProvider);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;

        runJob(hcc, spec, true);

        //#. begin new metadataTxn
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);

        //#. add another new index with PendingNoOp after deleting the index with PendingAddOp
        MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
                indexName);
        index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
        MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
        // add another new files index with PendingNoOp after deleting the index with PendingAddOp
        if (firstExternalDatasetIndex) {
            MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                    datasetName, filesIndex.getIndexName());
            filesIndex.setPendingOp(IMetadataEntity.PENDING_NO_OP);
            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
            // update transaction timestamp
            ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);

    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        // If files index was replicated for external dataset, it should be cleaned up on NC side
        if (filesIndexReplicated) {
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    ExternalIndexingOperations.getFilesIndexName(datasetName));
            try {
                JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
                        metadataProvider, ds);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            //#. execute compensation operations
            //   remove the index in NC
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    indexName);
            try {
                JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
                        ds);

                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }

            if (firstExternalDatasetIndex) {
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                metadataProvider.setMetadataTxnContext(mdTxnCtx);
                try {
                    // Drop External Files from metadata
                    MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                } catch (Exception e2) {
                    e.addSuppressed(e2);
                    abort(e, e2, mdTxnCtx);
                    throw new IllegalStateException("System is inconsistent state: pending files for("
                            + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
                }
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                metadataProvider.setMetadataTxnContext(mdTxnCtx);
                try {
                    // Drop the files index from metadata
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                            datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                } catch (Exception e2) {
                    e.addSuppressed(e2);
                    abort(e, e2, mdTxnCtx);
                    throw new IllegalStateException(
                            "System is inconsistent state: pending index(" + dataverseName + "." + datasetName
                                    + "." + ExternalIndexingOperations.getFilesIndexName(datasetName)
                                    + ") couldn't be removed from the metadata",
                            e);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName, indexName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException(
                        "System is in inconsistent state: pending index(" + dataverseName + "." + datasetName
                                + "." + indexName + ") couldn't be removed from the metadata",
                        e);
            }
        }
        throw e;
    } finally {
        MetadataLockManager.INSTANCE.createIndexEnd(dataverseName, dataverseName + "." + datasetName);
        if (datasetLocked) {
            ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
        }
    }
}

From source file:edu.uci.ics.asterix.aql.translator.AqlTranslator.java

private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws AsterixException, Exception {

    ProgressState progress = ProgressState.NO_PROGRESS;
    DatasetDecl dd = (DatasetDecl) stmt;
    String dataverseName = getActiveDataverse(dd.getDataverse());
    String datasetName = dd.getName().getValue();
    DatasetType dsType = dd.getDatasetType();
    String itemTypeName = dd.getItemTypeName().getValue();
    Identifier ngNameId = dd.getDatasetDetailsDecl().getNodegroupName();
    String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
    String compactionPolicy = dd.getDatasetDetailsDecl().getCompactionPolicy();
    Map<String, String> compactionPolicyProperties = dd.getDatasetDetailsDecl().getCompactionPolicyProperties();
    boolean defaultCompactionPolicy = (compactionPolicy == null);
    boolean temp = dd.getDatasetDetailsDecl().isTemp();

    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);

    MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, dataverseName + "." + itemTypeName,
            nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
    Dataset dataset = null;/*from  w w  w.j ava  2  s  .  co m*/
    try {

        IDatasetDetails datasetDetails = null;
        Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
                dataverseName, datasetName);
        if (ds != null) {
            if (dd.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
            }
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
                dataverseName, itemTypeName);
        if (dt == null) {
            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
        }
        switch (dd.getDatasetType()) {
        case INTERNAL: {
            IAType itemType = dt.getDatatype();
            if (itemType.getTypeTag() != ATypeTag.RECORD) {
                throw new AlgebricksException("Can only partition ARecord's.");
            }
            List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
                    .getPartitioningExprs();
            boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
            ARecordType aRecordType = (ARecordType) itemType;
            List<IAType> partitioningTypes = aRecordType.validatePartitioningExpressions(partitioningExprs,
                    autogenerated);

            String ngName = ngNameId != null ? ngNameId.getValue()
                    : configureNodegroupForDataset(dd, dataverseName, mdTxnCtx);
            List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
            if (compactionPolicy == null) {
                if (filterField != null) {
                    // If the dataset has a filter and the user didn't specify a merge policy, then we will pick the
                    // correlated-prefix as the default merge policy.
                    compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
                    compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
                } else {
                    compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
                    compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
                }
            } else {
                validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
            }
            if (filterField != null) {
                aRecordType.validateFilterField(filterField);
            }
            datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
                    InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
                    partitioningTypes, ngName, autogenerated, compactionPolicy, compactionPolicyProperties,
                    filterField, temp);
            break;
        }
        case EXTERNAL: {
            String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
            Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();

            String ngName = ngNameId != null ? ngNameId.getValue()
                    : configureNodegroupForDataset(dd, dataverseName, mdTxnCtx);
            if (compactionPolicy == null) {
                compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
                compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
            } else {
                validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, true);
            }
            datasetDetails = new ExternalDatasetDetails(adapter, properties, ngName, new Date(),
                    ExternalDatasetTransactionState.COMMIT, compactionPolicy, compactionPolicyProperties);
            break;
        }

        }

        //#. initialize DatasetIdFactory if it is not initialized.
        if (!DatasetIdFactory.isInitialized()) {
            DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
        }

        //#. add a new dataset with PendingAddOp
        dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
                DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);

        if (dd.getDatasetType() == DatasetType.INTERNAL) {
            Dataverse dataverse = MetadataManager.INSTANCE
                    .getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName);
            JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
                    metadataProvider);

            //#. make metadataTxn commit before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

            //#. runJob
            runJob(hcc, jobSpec, true);

            //#. begin new metadataTxn
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
        }

        //#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
        MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName);
        dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {

            //#. execute compensation operations
            //   remove the index in NC
            //   [Notice]
            //   As long as we updated(and committed) metadata, we should remove any effect of the job
            //   because an exception occurs during runJob.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
            try {
                JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;

                runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }

            //   remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
                        + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;
    } finally {
        MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, dataverseName + "." + itemTypeName,
                nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
    }
}

From source file:edu.uci.ics.asterix.aql.translator.AqlTranslator.java

private void handleExternalDatasetRefreshStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {
    RefreshExternalDatasetStatement stmtRefresh = (RefreshExternalDatasetStatement) stmt;
    String dataverseName = getActiveDataverse(stmtRefresh.getDataverseName());
    String datasetName = stmtRefresh.getDatasetName().getValue();
    ExternalDatasetTransactionState transactionState = ExternalDatasetTransactionState.COMMIT;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    MetadataLockManager.INSTANCE.refreshDatasetBegin(dataverseName, dataverseName + "." + datasetName);
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    JobSpecification spec = null;/*from   w  w w  .  j  ava  2s .c om*/
    Dataset ds = null;
    List<ExternalFile> metadataFiles = null;
    List<ExternalFile> deletedFiles = null;
    List<ExternalFile> addedFiles = null;
    List<ExternalFile> appendedFiles = null;
    List<Index> indexes = null;
    Dataset transactionDataset = null;
    boolean lockAquired = false;
    boolean success = false;
    try {
        ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName);

        // Dataset exists ?
        if (ds == null) {
            throw new AlgebricksException(
                    "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        // Dataset external ?
        if (ds.getDatasetType() != DatasetType.EXTERNAL) {
            throw new AlgebricksException("dataset " + datasetName + " in dataverse " + dataverseName
                    + " is not an external dataset");
        }
        // Dataset has indexes ?
        indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
        if (indexes.size() == 0) {
            throw new AlgebricksException("External dataset " + datasetName + " in dataverse " + dataverseName
                    + " doesn't have any index");
        }

        // Record transaction time
        Date txnTime = new Date();

        // refresh lock here
        ExternalDatasetsRegistry.INSTANCE.refreshBegin(ds);
        lockAquired = true;

        // Get internal files
        metadataFiles = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, ds);
        deletedFiles = new ArrayList<ExternalFile>();
        addedFiles = new ArrayList<ExternalFile>();
        appendedFiles = new ArrayList<ExternalFile>();

        // Compute delta
        // Now we compare snapshot with external file system
        if (ExternalIndexingOperations.isDatasetUptodate(ds, metadataFiles, addedFiles, deletedFiles,
                appendedFiles)) {
            ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(txnTime);
            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            // latch will be released in the finally clause
            return;
        }

        // At this point, we know data has changed in the external file system, record transaction in metadata and start
        transactionDataset = ExternalIndexingOperations.createTransactionDataset(ds);
        /*
         * Remove old dataset record and replace it with a new one
         */
        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);

        // Add delta files to the metadata
        for (ExternalFile file : addedFiles) {
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }
        for (ExternalFile file : appendedFiles) {
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }
        for (ExternalFile file : deletedFiles) {
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }

        // Create the files index update job
        spec = ExternalIndexingOperations.buildFilesIndexUpdateOp(ds, metadataFiles, deletedFiles, addedFiles,
                appendedFiles, metadataProvider);

        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        transactionState = ExternalDatasetTransactionState.BEGIN;

        //run the files update job
        runJob(hcc, spec, true);

        for (Index index : indexes) {
            if (!ExternalIndexingOperations.isFileIndex(index)) {
                spec = ExternalIndexingOperations.buildIndexUpdateOp(ds, index, metadataFiles, deletedFiles,
                        addedFiles, appendedFiles, metadataProvider);
                //run the files update job
                runJob(hcc, spec, true);
            }
        }

        // all index updates has completed successfully, record transaction state
        spec = ExternalIndexingOperations.buildCommitJob(ds, indexes, metadataProvider);

        // Aquire write latch again -> start a transaction and record the decision to commit
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        bActiveTxn = true;
        ((ExternalDatasetDetails) transactionDataset.getDatasetDetails())
                .setState(ExternalDatasetTransactionState.READY_TO_COMMIT);
        ((ExternalDatasetDetails) transactionDataset.getDatasetDetails()).setRefreshTimestamp(txnTime);
        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        transactionState = ExternalDatasetTransactionState.READY_TO_COMMIT;
        // We don't release the latch since this job is expected to be quick
        runJob(hcc, spec, true);
        // Start a new metadata transaction to record the final state of the transaction
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        bActiveTxn = true;

        for (ExternalFile file : metadataFiles) {
            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
            } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
                Iterator<ExternalFile> iterator = appendedFiles.iterator();
                while (iterator.hasNext()) {
                    ExternalFile appendedFile = iterator.next();
                    if (file.getFileName().equals(appendedFile.getFileName())) {
                        // delete existing file
                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                        // delete existing appended file
                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, appendedFile);
                        // add the original file with appended information
                        appendedFile.setFileNumber(file.getFileNumber());
                        appendedFile.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, appendedFile);
                        iterator.remove();
                    }
                }
            }
        }

        // remove the deleted files delta
        for (ExternalFile file : deletedFiles) {
            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
        }

        // insert new files
        for (ExternalFile file : addedFiles) {
            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
            file.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }

        // mark the transaction as complete
        ((ExternalDatasetDetails) transactionDataset.getDatasetDetails())
                .setState(ExternalDatasetTransactionState.COMMIT);
        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);

        // commit metadata transaction
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        success = true;
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (transactionState == ExternalDatasetTransactionState.READY_TO_COMMIT) {
            throw new IllegalStateException("System is inconsistent state: commit of (" + dataverseName + "."
                    + datasetName + ") refresh couldn't carry out the commit phase", e);
        }
        if (transactionState == ExternalDatasetTransactionState.COMMIT) {
            // Nothing to do , everything should be clean
            throw e;
        }
        if (transactionState == ExternalDatasetTransactionState.BEGIN) {
            // transaction failed, need to do the following
            // clean NCs removing transaction components
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            spec = ExternalIndexingOperations.buildAbortOp(ds, indexes, metadataProvider);
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            try {
                runJob(hcc, spec, true);
            } catch (Exception e2) {
                // This should never happen -- fix throw illegal
                e.addSuppressed(e2);
                throw new IllegalStateException("System is in inconsistent state. Failed to abort refresh", e);
            }
            // remove the delta of files
            // return the state of the dataset to committed
            try {
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                for (ExternalFile file : deletedFiles) {
                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                }
                for (ExternalFile file : addedFiles) {
                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                }
                for (ExternalFile file : appendedFiles) {
                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                }
                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
                // commit metadata transaction
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                abort(e, e2, mdTxnCtx);
                e.addSuppressed(e2);
                throw new IllegalStateException("System is in inconsistent state. Failed to drop delta files",
                        e);
            }
        }
    } finally {
        if (lockAquired) {
            ExternalDatasetsRegistry.INSTANCE.refreshEnd(ds, success);
        }
        MetadataLockManager.INSTANCE.refreshDatasetEnd(dataverseName, dataverseName + "." + datasetName);
    }
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

public void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws AsterixException, Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    DatasetDecl dd = (DatasetDecl) stmt;
    String dataverseName = getActiveDataverse(dd.getDataverse());
    String datasetName = dd.getName().getValue();
    DatasetType dsType = dd.getDatasetType();
    String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
    String itemTypeName = dd.getItemTypeName().getValue();
    String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
    String metaItemTypeName = dd.getMetaItemTypeName().getValue();
    Identifier ngNameId = dd.getNodegroupName();
    String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
    String compactionPolicy = dd.getCompactionPolicy();
    Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
    boolean defaultCompactionPolicy = compactionPolicy == null;
    boolean temp = dd.getDatasetDetailsDecl().isTemp();

    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);

    MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, itemTypeDataverseName,
            itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
            metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
            dataverseName + "." + datasetName, defaultCompactionPolicy);
    Dataset dataset = null;/*from www . ja  va  2 s.c  o  m*/
    try {

        IDatasetDetails datasetDetails = null;
        Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
                dataverseName, datasetName);
        if (ds != null) {
            if (dd.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
            }
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
                itemTypeDataverseName, itemTypeName);
        if (dt == null) {
            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
        }
        String ngName = ngNameId != null ? ngNameId.getValue()
                : configureNodegroupForDataset(dd, dataverseName, mdTxnCtx);

        if (compactionPolicy == null) {
            compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
        } else {
            validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
        }
        switch (dd.getDatasetType()) {
        case INTERNAL:
            IAType itemType = dt.getDatatype();
            if (itemType.getTypeTag() != ATypeTag.RECORD) {
                throw new AlgebricksException("Dataset type has to be a record type.");
            }

            IAType metaItemType = null;
            if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
                metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
            }
            if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.RECORD) {
                throw new AlgebricksException("Dataset meta type has to be a record type.");
            }
            ARecordType metaRecType = (ARecordType) metaItemType;

            List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
                    .getPartitioningExprs();
            List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
                    .getKeySourceIndicators();
            boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
            ARecordType aRecordType = (ARecordType) itemType;
            List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType,
                    metaRecType, partitioningExprs, keySourceIndicators, autogenerated);

            List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
            if (filterField != null) {
                ValidateUtil.validateFilterField(aRecordType, filterField);
            }
            if (compactionPolicy == null && filterField != null) {
                // If the dataset has a filter and the user didn't specify a merge
                // policy, then we will pick the
                // correlated-prefix as the default merge policy.
                compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
                compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
            }
            datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
                    InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
                    keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
            break;
        case EXTERNAL:
            String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
            Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();

            datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(),
                    ExternalDatasetTransactionState.COMMIT);
            break;
        default:
            throw new AsterixException("Unknown datatype " + dd.getDatasetType());
        }

        // #. initialize DatasetIdFactory if it is not initialized.
        if (!DatasetIdFactory.isInitialized()) {
            DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
        }

        // #. add a new dataset with PendingAddOp
        dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName,
                metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy,
                compactionPolicyProperties, datasetDetails, dd.getHints(), dsType,
                DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);

        if (dd.getDatasetType() == DatasetType.INTERNAL) {
            Dataverse dataverse = MetadataManager.INSTANCE
                    .getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName);
            JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
                    metadataProvider);

            // #. make metadataTxn commit before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);

            // #. runJob
            JobUtils.runJob(hcc, jobSpec, true);

            // #. begin new metadataTxn
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
        }

        // #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
        MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName);
        dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }

        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {

            // #. execute compensation operations
            // remove the index in NC
            // [Notice]
            // As long as we updated(and committed) metadata, we should remove any effect of the job
            // because an exception occurs during runJob.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
            try {
                JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }

            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
                        + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;
    } finally {
        MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, itemTypeDataverseName,
                itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
                metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
                dataverseName + "." + datasetName, defaultCompactionPolicy);
    }
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

protected void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {
    ProgressState progress = ProgressState.NO_PROGRESS;
    CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
    String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
    String datasetName = stmtCreateIndex.getDatasetName().getValue();
    List<Integer> keySourceIndicators = stmtCreateIndex.getFieldSourceIndicators();

    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);

    MetadataLockManager.INSTANCE.createIndexBegin(dataverseName, dataverseName + "." + datasetName);

    String indexName = null;//from   ww  w.jav  a2s  . c om
    JobSpecification spec = null;
    Dataset ds = null;
    // For external datasets
    ArrayList<ExternalFile> externalFilesSnapshot = null;
    boolean firstExternalDatasetIndex = false;
    boolean filesIndexReplicated = false;
    Index filesIndex = null;
    boolean datasetLocked = false;
    try {
        ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName);
        if (ds == null) {
            throw new AlgebricksException(
                    "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }

        indexName = stmtCreateIndex.getIndexName().getValue();
        Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                datasetName, indexName);
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
                ds.getItemTypeDataverseName(), ds.getItemTypeName());
        ARecordType aRecordType = (ARecordType) dt.getDatatype();
        ARecordType metaRecordType = null;
        if (ds.hasMetaPart()) {
            Datatype metaDt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
                    ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName());
            metaRecordType = (ARecordType) metaDt.getDatatype();
        }

        List<List<String>> indexFields = new ArrayList<>();
        List<IAType> indexFieldTypes = new ArrayList<>();
        int keyIndex = 0;
        for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
            IAType fieldType = null;
            ARecordType subType = KeyFieldTypeUtils.chooseSource(keySourceIndicators, keyIndex, aRecordType,
                    metaRecordType);
            boolean isOpen = subType.isOpen();
            int i = 0;
            if (fieldExpr.first.size() > 1 && !isOpen) {
                while (i < fieldExpr.first.size() - 1 && !isOpen) {
                    subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
                    i++;
                    isOpen = subType.isOpen();
                }
            }
            if (fieldExpr.second == null) {
                fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
            } else {
                if (!stmtCreateIndex.isEnforced()) {
                    throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
                            + "\" field without enforcing it's type");
                }
                if (!isOpen) {
                    throw new AlgebricksException("Typed index on \"" + fieldExpr.first
                            + "\" field could be created only for open datatype");
                }
                if (stmtCreateIndex.hasMetaField()) {
                    throw new AlgebricksException("Typed open index can only be created on the record part");
                }
                Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
                        indexName, dataverseName);
                TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
                fieldType = typeMap.get(typeSignature);
            }
            if (fieldType == null) {
                throw new AlgebricksException(
                        "Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
            }

            indexFields.add(fieldExpr.first);
            indexFieldTypes.add(fieldType);
            ++keyIndex;
        }

        ValidateUtil.validateKeyFields(aRecordType, metaRecordType, indexFields, keySourceIndicators,
                indexFieldTypes, stmtCreateIndex.getIndexType());

        if (idx != null) {
            if (stmtCreateIndex.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("An index with this name " + indexName + " already exists.");
            }
        }

        // Checks whether a user is trying to create an inverted secondary index on a dataset
        // with a variable-length primary key.
        // Currently, we do not support this. Therefore, as a temporary solution, we print an
        // error message and stop.
        if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
                || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
                || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
                || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
            for (List<String> partitioningKey : partitioningKeys) {
                IAType keyType = aRecordType.getSubFieldType(partitioningKey);
                ITypeTraits typeTrait = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);

                // If it is not a fixed length
                if (typeTrait.getFixedLength() < 0) {
                    throw new AlgebricksException("The keyword or ngram index -" + indexName
                            + " cannot be created on the dataset -" + datasetName
                            + " due to its variable-length primary key field - " + partitioningKey);
                }

            }
        }

        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            validateIfResourceIsActiveInFeed(dataverseName, datasetName);
        } else {
            // External dataset
            // Check if the dataset is indexible
            if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
                throw new AlgebricksException(
                        "dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
                                + " Adapter can't be indexed");
            }
            // Check if the name of the index is valid
            if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
                throw new AlgebricksException("external dataset index name is invalid");
            }

            // Check if the files index exist
            filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                    dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
            firstExternalDatasetIndex = filesIndex == null;
            // Lock external dataset
            ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
            datasetLocked = true;
            if (firstExternalDatasetIndex) {
                // Verify that no one has created an index before we acquire the lock
                filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                        dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
                if (filesIndex != null) {
                    ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
                    firstExternalDatasetIndex = false;
                    ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
                }
            }
            if (firstExternalDatasetIndex) {
                // Get snapshot from External File System
                externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
                // Add an entry for the files index
                filesIndex = new Index(dataverseName, datasetName,
                        ExternalIndexingOperations.getFilesIndexName(datasetName), IndexType.BTREE,
                        ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES, null,
                        ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false,
                        IMetadataEntity.PENDING_ADD_OP);
                MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
                // Add files to the external files index
                for (ExternalFile file : externalFilesSnapshot) {
                    MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
                }
                // This is the first index for the external dataset, replicate the files index
                spec = ExternalIndexingOperations.buildFilesIndexReplicationJobSpec(ds, externalFilesSnapshot,
                        metadataProvider, true);
                if (spec == null) {
                    throw new AsterixException(
                            "Failed to create job spec for replicating Files Index For external dataset");
                }
                filesIndexReplicated = true;
                JobUtils.runJob(hcc, spec, true);
            }
        }

        // check whether there exists another enforced index on the same field
        if (stmtCreateIndex.isEnforced()) {
            List<Index> indexes = MetadataManager.INSTANCE
                    .getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
            for (Index index : indexes) {
                if (index.getKeyFieldNames().equals(indexFields)
                        && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds()) {
                    throw new AsterixException("Cannot create index " + indexName + " , enforced index "
                            + index.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',')
                            + "\" is already defined with type \"" + index.getKeyFieldTypes() + "\"");
                }
            }
        }

        // #. add a new index with PendingAddOp
        Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
                indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getGramLength(),
                stmtCreateIndex.isEnforced(), false, IMetadataEntity.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);

        ARecordType enforcedType = null;
        if (stmtCreateIndex.isEnforced()) {
            enforcedType = createEnforcedType(aRecordType, Lists.newArrayList(index));
        }

        // #. prepare to create the index artifact in NC.
        CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
                index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
        spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, aRecordType, metaRecordType,
                keySourceIndicators, enforcedType, metadataProvider);
        if (spec == null) {
            throw new AsterixException("Failed to create job spec for creating index '"
                    + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;

        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

        // #. create the index artifact in NC.
        JobUtils.runJob(hcc, spec, true);

        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);

        // #. load data into the index in NC.
        cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
                index.getGramLength(), index.getIndexType());

        spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, aRecordType, metaRecordType,
                keySourceIndicators, enforcedType, metadataProvider);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;

        JobUtils.runJob(hcc, spec, true);

        // #. begin new metadataTxn
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);

        // #. add another new index with PendingNoOp after deleting the index with PendingAddOp
        MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
                indexName);
        index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
        MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
        // add another new files index with PendingNoOp after deleting the index with
        // PendingAddOp
        if (firstExternalDatasetIndex) {
            MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                    datasetName, filesIndex.getIndexName());
            filesIndex.setPendingOp(IMetadataEntity.PENDING_NO_OP);
            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
            // update transaction timestamp
            ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);

    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        // If files index was replicated for external dataset, it should be cleaned up on NC side
        if (filesIndexReplicated) {
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    ExternalIndexingOperations.getFilesIndexName(datasetName));
            try {
                JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
                        metadataProvider, ds);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the index in NC
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    indexName);
            try {
                JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
                        ds);

                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }

            if (firstExternalDatasetIndex) {
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                metadataProvider.setMetadataTxnContext(mdTxnCtx);
                try {
                    // Drop External Files from metadata
                    MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                } catch (Exception e2) {
                    e.addSuppressed(e2);
                    abort(e, e2, mdTxnCtx);
                    throw new IllegalStateException("System is inconsistent state: pending files for("
                            + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
                }
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                metadataProvider.setMetadataTxnContext(mdTxnCtx);
                try {
                    // Drop the files index from metadata
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                            datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                } catch (Exception e2) {
                    e.addSuppressed(e2);
                    abort(e, e2, mdTxnCtx);
                    throw new IllegalStateException(
                            "System is inconsistent state: pending index(" + dataverseName + "." + datasetName
                                    + "." + ExternalIndexingOperations.getFilesIndexName(datasetName)
                                    + ") couldn't be removed from the metadata",
                            e);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName, indexName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException(
                        "System is in inconsistent state: pending index(" + dataverseName + "." + datasetName
                                + "." + indexName + ") couldn't be removed from the metadata",
                        e);
            }
        }
        throw e;
    } finally {
        MetadataLockManager.INSTANCE.createIndexEnd(dataverseName, dataverseName + "." + datasetName);
        if (datasetLocked) {
            ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
        }
    }
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

protected void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();
    if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
        throw new HyracksDataException(
                MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
    }/*w w w  . jav  a2s  . co  m*/

    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(dataverseName);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }
        // # disconnect all feeds from any datasets in the dataverse.
        IActiveEntityEventsListener[] activeListeners = ActiveJobNotificationHandler.INSTANCE
                .getEventListeners();
        Identifier dvId = new Identifier(dataverseName);
        for (IActiveEntityEventsListener listener : activeListeners) {
            EntityId activeEntityId = listener.getEntityId();
            if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME)
                    && activeEntityId.getDataverse().equals(dataverseName)) {
                FeedEventsListener feedEventListener = (FeedEventsListener) listener;
                FeedConnectionId[] connections = feedEventListener.getConnections();
                for (FeedConnectionId conn : connections) {
                    disconnectFeedBeforeDelete(dvId, activeEntityId, conn, metadataProvider, hcc);
                }
                // prepare job to remove feed log storage
                jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(MetadataManager.INSTANCE
                        .getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
            }
        }

        // #. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (int j = 0; j < datasets.size(); j++) {
            String datasetName = datasets.get(j).getDatasetName();
            DatasetType dsType = datasets.get(j).getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                        datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (indexes.get(k).isSecondaryIndex()) {
                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName,
                                datasetName, indexes.get(k).getIndexName());
                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
                                datasets.get(j)));
                    }
                }

                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
                jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                        datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName,
                                datasetName, indexes.get(k).getIndexName());
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
                                metadataProvider, datasets.get(j)));
                    } else {
                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName,
                                datasetName, indexes.get(k).getIndexName());
                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
                                datasets.get(j)));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(datasets.get(j));
            }
        }
        jobsToExecute.add(DataverseOperations.createDropDataverseJobSpec(dv, metadataProvider));
        // #. mark PendingDropOp on the dataverse record by
        // first, deleting the dataverse record from the DATAVERSE_DATASET
        // second, inserting the dataverse record with the PendingDropOp value into the
        // DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
                new Dataverse(dataverseName, dv.getDataFormat(), IMetadataEntity.PENDING_DROP_OP));

        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }

        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);

        // #. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
            activeDefaultDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
                activeDefaultDataverse = null;
            }

            // #. execute compensation operations
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }

            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse("
                        + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;
    } finally {
        MetadataLockManager.INSTANCE.releaseDataverseWriteLock(dataverseName);
    }
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

public void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {
    DropDatasetStatement stmtDelete = (DropDatasetStatement) stmt;
    String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
    String datasetName = stmtDelete.getDatasetName().getValue();
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    MutableObject<MetadataTransactionContext> mdTxnCtx = new MutableObject<>(
            MetadataManager.INSTANCE.beginTransaction());
    MutableBoolean bActiveTxn = new MutableBoolean(true);
    metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
    MetadataLockManager.INSTANCE.dropDatasetBegin(dataverseName, dataverseName + "." + datasetName);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    try {/* w  ww. j  a  va 2s .c om*/
        Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
        if (ds == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
                return;
            } else {
                throw new AlgebricksException("There is no dataset with this name " + datasetName
                        + " in dataverse " + dataverseName + ".");
            }
        }

        doDropDataset(ds, datasetName, metadataProvider, mdTxnCtx, jobsToExecute, dataverseName, bActiveTxn,
                progress, hcc);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
    } catch (Exception e) {
        if (bActiveTxn.booleanValue()) {
            abort(e, e, mdTxnCtx.getValue());
        }

        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }

            // remove the record from the metadata.
            mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
            metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx.getValue());
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
                        + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;
    } finally {
        MetadataLockManager.INSTANCE.dropDatasetEnd(dataverseName, dataverseName + "." + datasetName);
    }
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

protected void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
        IHyracksClientConnection hcc) throws Exception {

    IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
    String datasetName = stmtIndexDrop.getDatasetName().getValue();
    String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);

    MetadataLockManager.INSTANCE.dropIndexBegin(dataverseName, dataverseName + "." + datasetName);

    String indexName = null;/*w w  w .j  a v  a2s . c o m*/
    // For external index
    boolean dropFilesIndex = false;
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    try {

        Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException(
                    "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        IActiveEntityEventsListener[] listeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
        StringBuilder builder = null;
        for (IActiveEntityEventsListener listener : listeners) {
            if (listener.isEntityUsingDataset(dataverseName, datasetName)) {
                if (builder == null) {
                    builder = new StringBuilder();
                }
                builder.append(new FeedConnectionId(listener.getEntityId(), datasetName) + "\n");
            }
        }
        if (builder != null) {
            throw new AsterixException("Dataset" + datasetName
                    + " is currently being fed into by the following active entities: " + builder.toString());
        }

        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            }
            // #. prepare a job to drop the index in NC.
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    indexName);
            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));

            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                    new Index(dataverseName, datasetName, indexName, index.getIndexType(),
                            index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(),
                            index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(),
                            IMetadataEntity.PENDING_DROP_OP));

            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }

            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);

            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
        } else {
            // External dataset
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            } else if (ExternalIndexingOperations.isFileIndex(index)) {
                throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
            }
            // #. prepare a job to drop the index in NC.
            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                    indexName);
            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
            List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                    datasetName);
            if (datasetIndexes.size() == 2) {
                dropFilesIndex = true;
                // only one index + the files index, we need to delete both of the indexes
                for (Index externalIndex : datasetIndexes) {
                    if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
                        cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                                externalIndex.getIndexName());
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
                                metadataProvider, ds));
                        // #. mark PendingDropOp on the existing files index
                        MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
                                externalIndex.getIndexName());
                        MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                                new Index(dataverseName, datasetName, externalIndex.getIndexName(),
                                        externalIndex.getIndexType(), externalIndex.getKeyFieldNames(),
                                        externalIndex.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(),
                                        index.isEnforcingKeyFileds(), externalIndex.isPrimaryIndex(),
                                        IMetadataEntity.PENDING_DROP_OP));
                    }
                }
            }

            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                    new Index(dataverseName, datasetName, indexName, index.getIndexType(),
                            index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(),
                            index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(),
                            IMetadataEntity.PENDING_DROP_OP));

            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;

            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }

            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);

            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (dropFilesIndex) {
                // delete the files index too
                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
                        ExternalIndexingOperations.getFilesIndexName(datasetName));
                MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
            }
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);

    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }

        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }

            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                        datasetName, indexName);
                if (dropFilesIndex) {
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
                            datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
                }
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
                        + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
            }
        }

        throw e;

    } finally {
        MetadataLockManager.INSTANCE.dropIndexEnd(dataverseName, dataverseName + "." + datasetName);
    }
}