com.threewks.thundr.bigmetrics.service.BigMetricsServiceImpl.java Source code

Java tutorial

Introduction

Here is the source code for com.threewks.thundr.bigmetrics.service.BigMetricsServiceImpl.java

Source

/*
 * This file is a component of thundr, a software library from 3wks.
 * Read more: http://www.3wks.com.au/thundr
 * Copyright (C) 2014 3wks, <thundr@3wks.com.au>
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *         http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.threewks.thundr.bigmetrics.service;

import com.atomicleopard.expressive.ETransformer;
import com.atomicleopard.expressive.Expressive;
import com.threewks.thundr.bigmetrics.As;
import com.threewks.thundr.bigmetrics.BigMetricsException;
import com.threewks.thundr.bigmetrics.BigMetricsService;
import com.threewks.thundr.bigmetrics.Event;
import com.threewks.thundr.bigmetrics.Ignore;
import com.threewks.thundr.bigmetrics.bigquery.BigQueryService;
import com.threewks.thundr.bigmetrics.bigquery.BigQueryType;
import com.threewks.thundr.bigmetrics.bigquery.QueryResult;
import com.threewks.thundr.bigmetrics.field.DateTimeFieldProcessor;
import com.threewks.thundr.bigmetrics.field.FieldProcessor;
import com.threewks.thundr.bigmetrics.field.NoopFieldProcessor;
import com.threewks.thundr.logger.Logger;
import com.threewks.thundr.transformer.TransformerManager;
import jodd.util.ReflectUtil;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;

import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;

public class BigMetricsServiceImpl implements BigMetricsService {

    private final BigQueryService bigQueryService;
    private final EventDispatcher eventDispatcher;
    private TransformerManager transformerManager;
    private BigQueryTypeMappings defaultMappings;
    private Map<Class<?>, EventMetadata> eventCache = new ConcurrentHashMap<>();
    private Map<BigQueryType, FieldProcessor<?>> defaultFieldProcessors = new LinkedHashMap<>();
    private Map<BigQueryType, Class<?>> canonicalTypes = canonicalTypes();

    public BigMetricsServiceImpl(BigQueryService bigQueryService, EventDispatcher eventDispatcher,
            TransformerManager transformerManager, BigQueryTypeMappings defaultMappings) {
        this.transformerManager = transformerManager;
        this.bigQueryService = bigQueryService;
        this.eventDispatcher = eventDispatcher;
        this.defaultMappings = defaultMappings;

        this.defaultFieldProcessors.put(BigQueryType.Boolean, new NoopFieldProcessor());
        this.defaultFieldProcessors.put(BigQueryType.Float, new NoopFieldProcessor());
        this.defaultFieldProcessors.put(BigQueryType.Integer, new NoopFieldProcessor());
        this.defaultFieldProcessors.put(BigQueryType.String, new NoopFieldProcessor());
        this.defaultFieldProcessors.put(BigQueryType.Timestamp, new DateTimeFieldProcessor());
    }

    @Override
    public boolean isRegistered(Class<?> event) {
        return eventCache.get(event) != null;
    }

    @Override
    public void registerEvents(Class<?>... events) {
        for (Class<?> event : events) {
            registerEvent(event);
        }
    }

    @Override
    public void registerEvent(Class<?> type) {
        Event event = type.getAnnotation(Event.class);
        if (event == null) {
            throw new BigMetricsException("Failed to register event '%s' - @%s is not present on the class",
                    type.getSimpleName(), Event.class.getSimpleName());
        }
        String eventName = StringUtils.isBlank(event.value()) ? type.getSimpleName() : event.value();
        eventCache.put(type, createMetadata(type, eventName));
    }

    @Override
    public List<EventMetadata> getRegisteredEvents() {
        return new ArrayList<>(eventCache.values());
    }

    @Override
    public EventMetadata getRegisteredEvent(Class<?> event) {
        return eventCache.get(event);
    }

    @Override
    public EventMetadata getRegisteredEvent(String eventName) {
        for (EventMetadata event : eventCache.values()) {
            if (event.getEventName().equalsIgnoreCase(eventName)) {
                return event;
            }
        }
        return null;
    }

    /**
     * Validates that all events are valid (i.e. the types are known and can be mapped properly)
     */
    @Override
    public void validateEvents() {
        for (EventMetadata event : eventCache.values()) {
            validateEvent(event);
        }
    }

    /**
     * Ensures that all required event tables exist. Tables that don't exist will cause issues with recording events.
     */
    @Override
    public void ensureTablesExist() {
        List<String> allTables = bigQueryService.listTables();
        List<String> requiredTables = EventMetadata.Transformers.ToTableNames.from(eventCache.values());
        requiredTables.removeAll(allTables);

        if (!requiredTables.isEmpty()) {
            // Remaining required tables need to be created.
            Map<String, EventMetadata> lookup = EventMetadata.Transformers.ToTableNameLookup
                    .from(eventCache.values());
            for (String table : requiredTables) {
                EventMetadata eventMetadata = lookup.get(table);
                createTable(eventMetadata);
            }
        }
    }

    @Override
    public void asyncEnsureTablesExist() {
        eventDispatcher.ensureTablesExist(this);
    }

    @Override
    public void ensureViewsExist() {
        for (EventMetadata event : eventCache.values()) {
            ensureEventViewExists(event);
        }
    }

    @Override
    public void asyncEnsureViewsExist() {
        eventDispatcher.ensureViewsExist(this);
    }

    @Override
    public String startQuery(String query) {
        return bigQueryService.executeQuery(query);
    }

    @Override
    public boolean isQueryComplete(String jobId) {
        return BigQueryService.JobStatus.Running != bigQueryService.getJobStatus(jobId);
    }

    @Override
    public QueryResult queryResult(String jobId, long pageSize, String pageToken) {
        return bigQueryService.queryResult(jobId, pageSize, pageToken);
    }

    @Override
    public <E> void trackEvent(E event) {
        Class<?> eventType = event.getClass();
        EventMetadata eventMetadata = eventCache.get(eventType);
        if (eventMetadata == null) {
            throw new BigMetricsException("The event %s of type %s cannot be tracked - it was never registered",
                    event, event.getClass());
        }

        // put the request on a push queue
        String tableId = eventMetadata.getTable();
        Map<String, Object> data = generateEventData(eventMetadata, event);
        eventDispatcher.handleEvent(tableId, data);
    }

    protected EventMetadata createMetadata(Class<?> type, String eventName) {
        Map<Field, FieldProcessor<?>> processors = findFieldProcessors(type);
        Map<String, BigQueryType> columns = generateColumnData(processors);
        String tableId = determineTableId(eventName, columns);
        EventMetadata eventMetadata = new EventMetadata(type, eventName, tableId, columns, processors);
        return eventMetadata;
    }

    protected Map<Field, FieldProcessor<?>> findFieldProcessors(Class<?> eventClass) {
        List<Field> fields = Arrays.asList(ReflectUtil.getSupportedFields(eventClass, Object.class));
        Map<Field, FieldProcessor<?>> processors = new LinkedHashMap<>();
        for (Field field : fields) {
            if (!field.isSynthetic() && !Modifier.isTransient(field.getModifiers())
                    && !field.isAnnotationPresent(Ignore.class)) {
                field.setAccessible(true);
                FieldProcessor<?> processor = determineProcessor(field);
                processors.put(field, processor);
            }
        }
        return processors;
    }

    protected Map<String, BigQueryType> generateColumnData(Map<Field, FieldProcessor<?>> processors) {
        Map<String, BigQueryType> columns = new TreeMap<>();
        for (Map.Entry<Field, FieldProcessor<?>> fieldEntry : processors.entrySet()) {
            Field field = fieldEntry.getKey();
            FieldProcessor<?> processor = fieldEntry.getValue();
            BigQueryType type = getType(field);
            columns.putAll(processor.fields(field.getName(), type));
        }
        return columns;
    }

    protected ETransformer<?, ?> getTransformer(Class<?> propertyType, Class<?> targetType) {
        ETransformer<?, ?> transformer = transformerManager.getBestTransformer(propertyType, targetType);
        if (transformer == null) {
            throw new BigMetricsException(
                    "Cannot transform from %s to %s - ensure a transformer is available in the %s",
                    propertyType.getName(), targetType.getName(), TransformerManager.class.getSimpleName());
        }
        return transformer;
    }

    protected BigQueryType getType(Field field) {
        Class<?> type = field.getType();
        As as = field.getAnnotation(As.class);
        BigQueryType bigQueryType = as == null ? null : as.value();
        if (bigQueryType == null) {
            bigQueryType = defaultMappings.get(type);
        }
        if (bigQueryType == null) {
            bigQueryType = BigQueryType.String;
        }
        return bigQueryType;
    }

    /**
     * Determines a (probably) unique id for an event table.
     * In this case, we hash the ordered columns of the data set and convert it to hex
     * 
     * @param eventName
     * @param columns
     * @return
     */
    protected String determineTableId(String eventName, Map<String, BigQueryType> columns) {
        StringBuilder sb = new StringBuilder();
        for (Map.Entry<String, BigQueryType> entry : columns.entrySet()) {
            sb.append(":");
            sb.append(StringUtils.lowerCase(entry.getKey()));
            sb.append("=");
            sb.append(StringUtils.upperCase(entry.getValue().type()));
        }
        return eventName + "_" + Integer.toHexString(sb.toString().hashCode());
    }

    /**
     * Creates or udpates a view for querying across all versions of the event if necessary.
     * 
     * @param eventMetadata
     */
    protected void ensureEventViewExists(EventMetadata eventMetadata) {
        String eventName = eventMetadata.getEventName();
        String currentQuery = bigQueryService.getViewDefinition(eventName);
        String requiredQuery = determineViewQuery(eventName);
        if (currentQuery == null) {
            Logger.info("Creating view for event %s as '%s'", eventName, requiredQuery);
            bigQueryService.createView(eventName, requiredQuery);
        } else if (!requiredQuery.equalsIgnoreCase(currentQuery)) {
            Logger.info("Updating view for event %s to '%s'", eventName, requiredQuery);
            bigQueryService.updateView(eventName, requiredQuery);
        }
    }

    protected String determineViewQuery(String eventName) {
        final String dataset = bigQueryService.getDatasetId();
        List<String> tables = bigQueryService.listTables("^" + eventName + "_");
        Collections.sort(tables);
        tables = Expressive.Transformers.transformAllUsing(new ETransformer<String, String>() {
            @Override
            public String from(String from) {
                return String.format("(SELECT * FROM [%s.%s])", dataset, from);
            }
        }).from(tables);
        return String.format("SELECT * FROM %s", StringUtils.join(tables, ","));
    }

    protected void validateEvent(EventMetadata event) {
        Map<Class<?>, Class<?>> requiredTypes = new HashMap<Class<?>, Class<?>>();
        for (Field field : event.getFieldProcessors().keySet()) {
            String name = field.getName();
            BigQueryType bigQueryType = event.getColumns().get(name);

            Class<?> propertyType = field.getType();
            Class<?> targetType = canonicalTypes.get(bigQueryType);
            requiredTypes.put(propertyType, targetType);
        }
        // ensure we can make the required transformation
        try {
            for (Map.Entry<Class<?>, Class<?>> required : requiredTypes.entrySet()) {
                getTransformer(required.getKey(), required.getValue());
            }
        } catch (BigMetricsException e) {
            throw new BigMetricsException("The event %s (%s) is not valid: %s", event.getType().getSimpleName(),
                    event.getEventName(), e.getMessage());
        }
    }

    /**
     * Creates the set of canonical types. That is, this service
     * considers each BigQuery data type to have a single java
     * type representation. This controls the types available
     * to transform and inputs to {@link FieldProcessor}
     * 
     * @return
     */
    protected Map<BigQueryType, Class<?>> canonicalTypes() {
        // @formatter:off
        return Expressive.map(BigQueryType.Boolean, Boolean.class, BigQueryType.Float, Double.class,
                BigQueryType.Integer, Long.class, BigQueryType.Record, List.class, BigQueryType.String,
                String.class, BigQueryType.Timestamp, DateTime.class);
        // @formatter:on
    }

    protected void createTable(EventMetadata eventMetadata) {
        String table = eventMetadata.getTable();
        String eventName = eventMetadata.getEventName();
        Logger.info("Creating table %s for event %s", table, eventName);
        bigQueryService.createTable(table, eventMetadata.getColumns());
    }

    @SuppressWarnings("unchecked")
    private <E> Map<String, Object> generateEventData(EventMetadata eventMetadata, E event) {
        Map<String, Object> data = new LinkedHashMap<>();
        Map<Field, FieldProcessor<?>> processors = eventMetadata.getFieldProcessors();
        Map<String, BigQueryType> columns = eventMetadata.getColumns();
        try {
            for (Map.Entry<Field, FieldProcessor<?>> fieldEntry : processors.entrySet()) {
                Field field = fieldEntry.getKey();
                String name = field.getName();

                // transform to the canonical type
                BigQueryType bigQueryType = columns.get(name);
                Object bigQueryValue = convertToCanonicalType(bigQueryType, field.getType(), field.get(event));

                // process to output columns
                FieldProcessor<Object> processor = (FieldProcessor<Object>) fieldEntry.getValue();
                data.putAll(processor.process(name, bigQueryType, bigQueryValue));
            }
        } catch (Exception e) {
            throw new BigMetricsException(e, "Failed to generate data for event %s: %s", event, e.getMessage());
        }
        return data;
    }

    @SuppressWarnings("unchecked")
    private Object convertToCanonicalType(BigQueryType bigQueryType, Class<?> currentType, Object value) {
        Class<?> targetType = canonicalTypes.get(bigQueryType);
        ETransformer<Object, Object> transformer = (ETransformer<Object, Object>) getTransformer(currentType,
                targetType);
        return transformer.from(value);
    }

    private FieldProcessor<?> determineProcessor(Field field) {
        try {
            BigQueryType bigQueryType = getType(field);
            As as = field.getAnnotation(As.class);
            Class<? extends FieldProcessor<?>> processorType = (as == null ? null : as.processor());
            FieldProcessor<?> processor = processorType == null ? null : processorType.newInstance();
            if (processor == null) {
                processor = defaultFieldProcessors.get(bigQueryType);
            }
            return processor;
        } catch (InstantiationException | IllegalAccessException e) {
            throw new BigMetricsException(e, "Unable to create %s: %s", FieldProcessor.class.getSimpleName(),
                    e.getMessage());
        }
    }
}