io.druid.query.metadata.SegmentMetadataQueryRunnerFactory.java Source code

Java tutorial

Introduction

Here is the source code for io.druid.query.metadata.SegmentMetadataQueryRunnerFactory.java

Source

/*
 * Druid - a distributed column store.
 * Copyright 2012 - 2015 Metamarkets Group Inc.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package io.druid.query.metadata;

import com.google.common.base.Function;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Inject;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.logger.Logger;
import io.druid.query.AbstractPrioritizedCallable;
import io.druid.query.ConcatQueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryContextKeys;
import io.druid.query.QueryInterruptedException;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryToolChest;
import io.druid.query.QueryWatcher;
import io.druid.query.metadata.metadata.ColumnAnalysis;
import io.druid.query.metadata.metadata.ColumnIncluderator;
import io.druid.query.metadata.metadata.SegmentAnalysis;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.segment.QueryableIndex;
import io.druid.segment.Segment;
import io.druid.segment.StorageAdapter;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;

public class SegmentMetadataQueryRunnerFactory
        implements QueryRunnerFactory<SegmentAnalysis, SegmentMetadataQuery> {
    private static final SegmentAnalyzer analyzer = new SegmentAnalyzer();
    private static final Logger log = new Logger(SegmentMetadataQueryRunnerFactory.class);

    private final SegmentMetadataQueryQueryToolChest toolChest;
    private final QueryWatcher queryWatcher;

    @Inject
    public SegmentMetadataQueryRunnerFactory(SegmentMetadataQueryQueryToolChest toolChest,
            QueryWatcher queryWatcher) {
        this.toolChest = toolChest;
        this.queryWatcher = queryWatcher;
    }

    @Override
    public QueryRunner<SegmentAnalysis> createRunner(final Segment segment) {
        return new QueryRunner<SegmentAnalysis>() {
            @Override
            public Sequence<SegmentAnalysis> run(Query<SegmentAnalysis> inQ, Map<String, Object> responseContext) {
                SegmentMetadataQuery query = (SegmentMetadataQuery) inQ;

                final QueryableIndex index = segment.asQueryableIndex();

                final Map<String, ColumnAnalysis> analyzedColumns;
                final int numRows;
                long totalSize = 0;
                if (index == null) {
                    // IncrementalIndexSegments (used by in-memory hydrants in the realtime service) do not have a QueryableIndex
                    StorageAdapter segmentAdapter = segment.asStorageAdapter();
                    analyzedColumns = analyzer.analyze(segmentAdapter, query.getAnalysisTypes());
                    numRows = segmentAdapter.getNumRows();
                } else {
                    analyzedColumns = analyzer.analyze(index, query.getAnalysisTypes());
                    numRows = index.getNumRows();
                }

                if (query.hasSize()) {
                    // Initialize with the size of the whitespace, 1 byte per
                    totalSize = analyzedColumns.size() * numRows;
                }

                Map<String, ColumnAnalysis> columns = Maps.newTreeMap();
                ColumnIncluderator includerator = query.getToInclude();
                for (Map.Entry<String, ColumnAnalysis> entry : analyzedColumns.entrySet()) {
                    final String columnName = entry.getKey();
                    final ColumnAnalysis column = entry.getValue();

                    if (!column.isError()) {
                        totalSize += column.getSize();
                    }
                    if (includerator.include(columnName)) {
                        columns.put(columnName, column);
                    }
                }

                return Sequences.simple(Arrays.asList(new SegmentAnalysis(segment.getIdentifier(),
                        Arrays.asList(segment.getDataInterval()), columns, totalSize)));
            }
        };
    }

    @Override
    public QueryRunner<SegmentAnalysis> mergeRunners(ExecutorService exec,
            Iterable<QueryRunner<SegmentAnalysis>> queryRunners) {
        final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(exec);
        return new ConcatQueryRunner<SegmentAnalysis>(Sequences.map(Sequences.simple(queryRunners),
                new Function<QueryRunner<SegmentAnalysis>, QueryRunner<SegmentAnalysis>>() {
                    @Override
                    public QueryRunner<SegmentAnalysis> apply(final QueryRunner<SegmentAnalysis> input) {
                        return new QueryRunner<SegmentAnalysis>() {
                            @Override
                            public Sequence<SegmentAnalysis> run(final Query<SegmentAnalysis> query,
                                    final Map<String, Object> responseContext) {
                                final int priority = query.getContextPriority(0);
                                ListenableFuture<Sequence<SegmentAnalysis>> future = queryExecutor.submit(
                                        new AbstractPrioritizedCallable<Sequence<SegmentAnalysis>>(priority) {
                                            @Override
                                            public Sequence<SegmentAnalysis> call() throws Exception {
                                                return Sequences
                                                        .simple(Sequences.toList(input.run(query, responseContext),
                                                                new ArrayList<SegmentAnalysis>()));
                                            }
                                        });
                                try {
                                    queryWatcher.registerQuery(query, future);
                                    final Number timeout = query.getContextValue(QueryContextKeys.TIMEOUT,
                                            (Number) null);
                                    return timeout == null ? future.get()
                                            : future.get(timeout.longValue(), TimeUnit.MILLISECONDS);
                                } catch (InterruptedException e) {
                                    log.warn(e, "Query interrupted, cancelling pending results, query id [%s]",
                                            query.getId());
                                    future.cancel(true);
                                    throw new QueryInterruptedException("Query interrupted");
                                } catch (CancellationException e) {
                                    throw new QueryInterruptedException("Query cancelled");
                                } catch (TimeoutException e) {
                                    log.info("Query timeout, cancelling pending results for query id [%s]",
                                            query.getId());
                                    future.cancel(true);
                                    throw new QueryInterruptedException("Query timeout");
                                } catch (ExecutionException e) {
                                    throw Throwables.propagate(e.getCause());
                                }
                            }
                        };
                    }
                }));
    }

    @Override
    public QueryToolChest<SegmentAnalysis, SegmentMetadataQuery> getToolchest() {
        return toolChest;
    }
}