Example usage for java.util EnumMap EnumMap

List of usage examples for java.util EnumMap EnumMap

Introduction

In this page you can find the example usage for java.util EnumMap EnumMap.

Prototype

public EnumMap(Map<K, ? extends V> m) 

Source Link

Document

Creates an enum map initialized from the specified map.

Usage

From source file:org.openecomp.sdc.be.components.impl.ServiceBusinessLogic.java

private void createAudit(User user, AuditingActionEnum auditAction, String comment,
        ResponseFormat responseFormat) {
    EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = new EnumMap<AuditingFieldsKeysEnum, Object>(
            AuditingFieldsKeysEnum.class);

    createAudit(user, auditAction, comment, null, responseFormat, auditingFields);
}

From source file:org.openecomp.sdc.be.components.impl.ServiceBusinessLogic.java

private void createAudit(User user, AuditingActionEnum auditAction, String comment, Service component,
        ResponseFormat responseFormat) {
    EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = new EnumMap<AuditingFieldsKeysEnum, Object>(
            AuditingFieldsKeysEnum.class);
    auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DCURR_STATUS,
            component.getDistributionStatus().name());
    auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DPREV_STATUS,
            component.getDistributionStatus().name());
    createAudit(user, auditAction, comment, component, component.getLifecycleState().name(),
            component.getVersion(), responseFormat, auditingFields);
}

From source file:org.openecomp.sdc.be.components.impl.ServiceBusinessLogic.java

public Either<Service, ResponseFormat> activateDistribution(String serviceId, String envName, User modifier,
        HttpServletRequest request) {// www .  j  a v  a 2 s . com

    Either<User, ResponseFormat> eitherCreator = validateUserExists(modifier.getUserId(),
            "activate Distribution", false);
    if (eitherCreator.isRight()) {
        return Either.right(eitherCreator.right().value());
    }

    User user = eitherCreator.left().value();

    Either<Service, ResponseFormat> result = null;
    ResponseFormat response = null;
    Service updatedService = null;
    String did = ThreadLocalsHolder.getUuid();
    EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = new EnumMap<AuditingFieldsKeysEnum, Object>(
            AuditingFieldsKeysEnum.class);
    auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, did);
    // DE194021
    String configuredEnvName = ConfigurationManager.getConfigurationManager()
            .getDistributionEngineConfiguration().getEnvironments().get(0);
    if (configuredEnvName != null && false == envName.equals(configuredEnvName)) {
        log.trace("Update environment name to be {} instead of {}", configuredEnvName, envName);
        envName = configuredEnvName;
    }
    // DE194021

    ServletContext servletContext = request.getSession().getServletContext();
    boolean isDistributionEngineUp = getHealthCheckBL(servletContext)
            .isDistributionEngineUp(request.getSession().getServletContext()); // DE
    if (!isDistributionEngineUp) {
        BeEcompErrorManager.getInstance().processEcompError(EcompErrorName.BeSystemError,
                "Distribution Engine is DOWN");
        BeEcompErrorManager.getInstance().logBeSystemError("Distribution Engine is DOWN");
        log.debug("Distribution Engine is DOWN");
        response = componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR);
        return Either.right(response);
    }

    Either<Service, StorageOperationStatus> serviceRes = serviceOperation.getService(serviceId);
    if (serviceRes.isRight()) {
        log.debug("failed retrieving service");
        response = componentsUtils.getResponseFormat(componentsUtils
                .convertFromStorageResponse(serviceRes.right().value(), ComponentTypeEnum.SERVICE), serviceId);
        componentsUtils.auditComponent(response, user, null, null, null,
                AuditingActionEnum.DISTRIBUTION_STATE_CHANGE_REQUEST, ComponentTypeEnum.SERVICE,
                auditingFields);
        return Either.right(response);
    }
    Service service = serviceRes.left().value();
    String dcurrStatus = service.getDistributionStatus().name();
    auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DPREV_STATUS, dcurrStatus);

    Either<INotificationData, StorageOperationStatus> readyForDistribution = distributionEngine
            .isReadyForDistribution(service, did, envName);
    if (readyForDistribution.isLeft()) {
        INotificationData notificationData = readyForDistribution.left().value();
        StorageOperationStatus notifyServiceResponse = distributionEngine.notifyService(did, service,
                notificationData, envName, user.getUserId(), user.getFullName());
        if (notifyServiceResponse == StorageOperationStatus.OK) {
            Either<Service, ResponseFormat> updateStateRes = updateDistributionStatusForActivation(service,
                    user, DistributionStatusEnum.DISTRIBUTED);
            if (updateStateRes.isLeft() && updateStateRes.left().value() != null) {
                updatedService = updateStateRes.left().value();
                dcurrStatus = updatedService.getDistributionStatus().name();
            } else {
                // The response is not relevant
                updatedService = service;
            }
            ASDCKpiApi.countActivatedDistribution();
            response = componentsUtils.getResponseFormat(ActionStatus.OK);
            result = Either.left(updatedService);
        } else {
            BeEcompErrorManager.getInstance().processEcompError(EcompErrorName.BeSystemError,
                    "Activate Distribution - send notification");
            BeEcompErrorManager.getInstance().logBeSystemError("Activate Distribution - send notification");
            log.debug("distributionEngine.notifyService response is: {}", notifyServiceResponse);
            response = componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR);
            result = Either.right(response);
        }
    } else {
        StorageOperationStatus distEngineValidationResponse = readyForDistribution.right().value();
        response = componentsUtils.getResponseFormatByDE(
                componentsUtils.convertFromStorageResponse(distEngineValidationResponse), service.getName(),
                envName);
        result = Either.right(response);
    }
    auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DCURR_STATUS, dcurrStatus);
    componentsUtils.auditComponent(response, user, service, null, null,
            AuditingActionEnum.DISTRIBUTION_STATE_CHANGE_REQUEST, ComponentTypeEnum.SERVICE, auditingFields);
    return result;
}

From source file:org.codice.ddf.spatial.ogc.wfs.v2_0_0.catalog.source.WfsFilterDelegate.java

public void setSpatialOps(SpatialOperatorsType spatialOperators) {
    spatialOps = new ConcurrentHashMap<SPATIAL_OPERATORS, SpatialOperatorType>(
            new EnumMap<SPATIAL_OPERATORS, SpatialOperatorType>(SPATIAL_OPERATORS.class));

    for (SpatialOperatorType spatialOp : spatialOperators.getSpatialOperator()) {
        LOGGER.debug("Adding key [spatialOp Name: {}]", spatialOp.getName());
        spatialOps.put(SPATIAL_OPERATORS.valueOf(spatialOp.getName()), spatialOp);
        LOGGER.debug("spatialOps Map: {}", spatialOps.toString());
    }// ww w  .j  av  a2 s.c o m
}

From source file:org.codice.ddf.spatial.ogc.wfs.v2_0_0.catalog.source.WfsFilterDelegate.java

public void setTemporalOps(TemporalOperatorsType temporalOperators) {
    temporalOps = new ConcurrentHashMap<TEMPORAL_OPERATORS, TemporalOperatorType>(
            new EnumMap<TEMPORAL_OPERATORS, TemporalOperatorType>(TEMPORAL_OPERATORS.class));

    for (TemporalOperatorType temporalOp : temporalOperators.getTemporalOperator()) {
        LOGGER.debug("Adding key [temporalOp Name: {}]", temporalOp.getName());
        temporalOps.put(TEMPORAL_OPERATORS.valueOf(temporalOp.getName()), temporalOp);
        LOGGER.debug("temporalOps Map: {}", temporalOps.toString());
    }/*from  ww  w .j  ava  2s  .com*/
}

From source file:org.pentaho.di.repository.pur.PurRepository.java

protected Map<RepositoryObjectType, List<? extends SharedObjectInterface>> loadAndCacheSharedObjects(
        final boolean deepCopy) throws KettleException {
    if (sharedObjectsByType == null) {
        try {// w  ww . j av  a 2  s  . co  m
            sharedObjectsByType = new EnumMap<RepositoryObjectType, List<? extends SharedObjectInterface>>(
                    RepositoryObjectType.class);
            // Slave Servers are referenced by Cluster Schemas so they must be loaded first
            readSharedObjects(sharedObjectsByType, RepositoryObjectType.DATABASE,
                    RepositoryObjectType.PARTITION_SCHEMA, RepositoryObjectType.SLAVE_SERVER,
                    RepositoryObjectType.CLUSTER_SCHEMA);
        } catch (Exception e) {
            sharedObjectsByType = null;
            // TODO i18n
            throw new KettleException("Unable to read shared objects from repository", e); //$NON-NLS-1$
        }
    }
    return deepCopy ? deepCopy(sharedObjectsByType) : sharedObjectsByType;
}

From source file:com.att.aro.diagnostics.GraphPanel.java

/**
 * Creating Alarm triggered data for graph plot
 *///from  w w  w  . j  av  a2  s  . c  o  m
private static void populateAlarmPlot(XYPlot plot, TraceData.Analysis analysis) {

    final XYIntervalSeriesCollection alarmDataCollection = new XYIntervalSeriesCollection();
    if (analysis != null) {

        // Remove old annotation from previous plots
        Iterator<XYPointerAnnotation> pointers = pointerAnnotation.iterator();
        while (pointers.hasNext()) {
            plot.removeAnnotation(pointers.next());
        }
        pointerAnnotation.clear();

        final Map<AlarmType, XYIntervalSeries> seriesMap = new EnumMap<AlarmType, XYIntervalSeries>(
                AlarmType.class);
        for (AlarmType eventType : AlarmType.values()) {
            XYIntervalSeries series = new XYIntervalSeries(eventType);
            seriesMap.put(eventType, series);
            alarmDataCollection.addSeries(series);
        }
        final List<AlarmInfo> alarmInfos = analysis.getAlarmInfos();
        final Map<Double, AlarmInfo> eventMap = new HashMap<Double, AlarmInfo>();
        final Map<Double, ScheduledAlarmInfo> eventMapPending = new HashMap<Double, ScheduledAlarmInfo>();
        List<ScheduledAlarmInfo> pendingAlarms = getHasFiredAlarms(analysis.getScheduledAlarms());
        Iterator<ScheduledAlarmInfo> iterPendingAlarms = pendingAlarms.iterator();
        double firedTime = 0;
        while (iterPendingAlarms.hasNext()) {
            ScheduledAlarmInfo scheduledEvent = iterPendingAlarms.next();
            AlarmType pendingAlarmType = scheduledEvent.getAlarmType();
            if (pendingAlarmType != null) {
                firedTime = (scheduledEvent.getTimeStamp() - scheduledEvent.getRepeatInterval()) / 1000;
                seriesMap.get(pendingAlarmType).add(firedTime, firedTime, firedTime, 1, 0.8, 1);
                eventMapPending.put(firedTime, scheduledEvent);
                logger.fine("populateAlarmScheduledPlot type:\n" + pendingAlarmType + "\ntime "
                        + scheduledEvent.getTimeStamp() + "\nrepeating " + firedTime);
            }
        }

        Iterator<AlarmInfo> iter = alarmInfos.iterator();
        while (iter.hasNext()) {
            AlarmInfo currEvent = iter.next();
            if (currEvent != null) {
                AlarmType alarmType = currEvent.getAlarmType();
                if (alarmType != null) {
                    firedTime = currEvent.getTimeStamp() / 1000;

                    /*
                     * Catching any alarms align to quanta as being
                     * inexactRepeating alarms
                     */
                    if ((currEvent.getTimestampElapsed() / 1000) % 900 < 1) {
                        seriesMap.get(alarmType).add(firedTime, firedTime, firedTime, 1, 0, 0.7);

                        // Adding an arrow to mark these inexactRepeating alarms
                        XYPointerAnnotation xypointerannotation = new XYPointerAnnotation(alarmType.name(),
                                firedTime, 0.6, 3.92699082D);
                        xypointerannotation.setBaseRadius(20D);
                        xypointerannotation.setTipRadius(1D);
                        pointerAnnotation.add(xypointerannotation);
                        plot.addAnnotation(xypointerannotation);

                        logger.info("SetInexactRepeating alarm type: " + alarmType + " time " + firedTime
                                + " epoch " + currEvent.getTimestampEpoch() + " elapsed:\n"
                                + currEvent.getTimestampElapsed() / 1000);
                    } else {
                        seriesMap.get(alarmType).add(firedTime, firedTime, firedTime, 1, 0, 0.5);
                    }
                    eventMap.put(firedTime, currEvent);
                }
            }
        }
        XYItemRenderer renderer = plot.getRenderer();
        renderer.setSeriesPaint(alarmDataCollection.indexOf(AlarmType.RTC_WAKEUP), Color.red);

        renderer.setSeriesPaint(alarmDataCollection.indexOf(AlarmType.RTC), Color.pink);

        renderer.setSeriesPaint(alarmDataCollection.indexOf(AlarmType.ELAPSED_REALTIME_WAKEUP), Color.blue);

        renderer.setSeriesPaint(alarmDataCollection.indexOf(AlarmType.ELAPSED_REALTIME), Color.cyan);

        renderer.setSeriesPaint(alarmDataCollection.indexOf(AlarmType.UNKNOWN), Color.black);

        // Assign ToolTip to renderer
        renderer.setBaseToolTipGenerator(new XYToolTipGenerator() {
            @Override
            public String generateToolTip(XYDataset dataset, int series, int item) {
                AlarmInfo info = eventMap.get(dataset.getX(series, item));
                Date epochTime = new Date();
                if (info != null) {

                    epochTime.setTime((long) info.getTimestampEpoch());

                    StringBuffer displayInfo = new StringBuffer(rb.getString("alarm.tooltip.prefix"));
                    displayInfo.append(MessageFormat.format(rb.getString("alarm.tooltip.content"),
                            info.getAlarmType(), info.getTimeStamp() / 1000, epochTime.toString()));
                    if ((info.getTimestampElapsed() / 1000) % 900 < 1) {
                        displayInfo.append(rb.getString("alarm.tooltip.setInexactRepeating"));
                    }
                    displayInfo.append(rb.getString("alarm.tooltip.suffix"));
                    return displayInfo.toString();
                }
                ScheduledAlarmInfo infoPending = eventMapPending.get(dataset.getX(series, item));
                if (infoPending != null) {

                    epochTime.setTime(
                            (long) (infoPending.getTimestampEpoch() - infoPending.getRepeatInterval()));

                    StringBuffer displayInfo = new StringBuffer(rb.getString("alarm.tooltip.prefix"));
                    displayInfo.append(MessageFormat.format(rb.getString("alarm.tooltip.contentWithName"),
                            infoPending.getAlarmType(),
                            (infoPending.getTimeStamp() - infoPending.getRepeatInterval()) / 1000,
                            epochTime.toString(), infoPending.getApplication(),
                            infoPending.getRepeatInterval() / 1000));
                    displayInfo.append(rb.getString("alarm.tooltip.suffix"));
                    return displayInfo.toString();
                }
                return null;
            }
        });

    }

    plot.setDataset(alarmDataCollection);
}

From source file:org.alfresco.solr.SolrInformationServer.java

@Override
public void indexNodes(List<Node> nodes, boolean overwrite)
        throws IOException, AuthenticationException, JSONException {
    SolrQueryRequest request = null;/*from  w w w . ja  v  a 2 s  .  c o m*/
    UpdateRequestProcessor processor = null;
    try {
        request = getLocalSolrQueryRequest();
        processor = this.core.getUpdateProcessingChain(null).createProcessor(request, new SolrQueryResponse());

        Map<Long, Node> nodeIdsToNodes = new HashMap<>();
        EnumMap<SolrApiNodeStatus, List<Long>> nodeStatusToNodeIds = new EnumMap<SolrApiNodeStatus, List<Long>>(
                SolrApiNodeStatus.class);
        categorizeNodes(nodes, nodeIdsToNodes, nodeStatusToNodeIds);
        List<Long> deletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.DELETED));
        List<Long> shardDeletedNodeIds = mapNullToEmptyList(
                nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_DELETED));
        List<Long> shardUpdatedNodeIds = mapNullToEmptyList(
                nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_UPDATED));
        List<Long> unknownNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UNKNOWN));
        List<Long> updatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UPDATED));

        if (!deletedNodeIds.isEmpty() || !shardDeletedNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty()
                || !unknownNodeIds.isEmpty()) {
            // Delete any cached SOLR content documents  appropriate for this shard
            List<NodeMetaData> nodeMetaDatas = new ArrayList<>();

            // For all deleted nodes, fake the node metadata
            for (Long deletedNodeId : deletedNodeIds) {
                Node node = nodeIdsToNodes.get(deletedNodeId);
                NodeMetaData nodeMetaData = createDeletedNodeMetaData(node);
                nodeMetaDatas.add(nodeMetaData);
            }

            if (!unknownNodeIds.isEmpty()) {
                NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
                nmdp.setNodeIds(unknownNodeIds);
                nodeMetaDatas.addAll(repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE));
            }

            for (NodeMetaData nodeMetaData : nodeMetaDatas) {
                Node node = nodeIdsToNodes.get(nodeMetaData.getId());
                if (nodeMetaData.getTxnId() > node.getTxnId()) {
                    // the node has moved on to a later transaction
                    // it will be indexed later
                    continue;
                }
                if (nodeMetaData != null) {
                    this.removeDocFromContentStore(nodeMetaData);
                }
            }

            // Delete the nodes from the index
            if (log.isDebugEnabled()) {
                log.debug(".. deleting");
            }
            DeleteUpdateCommand delDocCmd = new DeleteUpdateCommand(request);
            String query = this.cloud.getQuery(FIELD_DBID, OR, deletedNodeIds, shardDeletedNodeIds,
                    shardUpdatedNodeIds, unknownNodeIds);
            delDocCmd.setQuery(query);
            processor.processDelete(delDocCmd);
        }

        if (!updatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty()) {
            log.info(".. updating");
            NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
            List<Long> nodeIds = new LinkedList<>();
            nodeIds.addAll(updatedNodeIds);
            nodeIds.addAll(unknownNodeIds);
            nodeIds.addAll(shardUpdatedNodeIds);
            nmdp.setNodeIds(nodeIds);

            // Fetches bulk metadata
            List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE);

            NEXT_NODE: for (NodeMetaData nodeMetaData : nodeMetaDatas) {
                long start = System.nanoTime();

                Node node = nodeIdsToNodes.get(nodeMetaData.getId());
                if (nodeMetaData.getTxnId() > node.getTxnId()) {
                    // the node has moved on to a later transaction
                    // it will be indexed later
                    continue;
                }

                // All do potential cascade
                if (mayHaveChildren(nodeMetaData)) {
                    cascadeUpdate(nodeMetaData, overwrite, request, processor);
                }

                // NON_SHARD_UPDATED do not index just cascade
                if (nodeIdsToNodes.get(nodeMetaData.getId())
                        .getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED) {
                    continue;
                }

                AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
                addDocCmd.overwrite = overwrite;

                // check index control
                Map<QName, PropertyValue> properties = nodeMetaData.getProperties();
                StringPropertyValue pValue = (StringPropertyValue) properties.get(ContentModel.PROP_IS_INDEXED);
                if (pValue != null) {
                    Boolean isIndexed = Boolean.valueOf(pValue.getValue());
                    if (!isIndexed.booleanValue()) {
                        if (log.isDebugEnabled()) {
                            log.debug(".. clearing unindexed");
                        }
                        deleteNode(processor, request, node);

                        SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_UNINDEXED_NODE);
                        storeDocOnSolrContentStore(nodeMetaData, doc);
                        addDocCmd.solrDoc = doc;
                        if (recordUnindexedNodes) {
                            processor.processAdd(addDocCmd);
                        }

                        long end = System.nanoTime();
                        this.trackerStats.addNodeTime(end - start);
                        continue NEXT_NODE;
                    }
                }

                // Make sure any unindexed or error doc is removed.
                if (log.isDebugEnabled()) {
                    log.debug(".. deleting node " + node.getId());
                }
                deleteNode(processor, request, node);

                SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_NODE);
                addToNewDocAndCache(nodeMetaData, doc);
                addDocCmd.solrDoc = doc;
                processor.processAdd(addDocCmd);

                long end = System.nanoTime();
                this.trackerStats.addNodeTime(end - start);
            } // Ends iteration over nodeMetadatas
        } // Ends checking for the existence of updated or unknown node ids 

    } catch (Exception e) {
        // Bulk version failed, so do one at a time.
        for (Node node : nodes) {
            this.indexNode(node, true);
        }
    } finally {
        if (processor != null) {
            processor.finish();
        }
        if (request != null) {
            request.close();
        }
    }

}

From source file:org.pentaho.di.repository.pur.PurRepository.java

private Map<RepositoryObjectType, List<? extends SharedObjectInterface>> deepCopy(
        Map<RepositoryObjectType, List<? extends SharedObjectInterface>> orig) throws KettleException {
    Map<RepositoryObjectType, List<? extends SharedObjectInterface>> copy = new EnumMap<RepositoryObjectType, List<? extends SharedObjectInterface>>(
            RepositoryObjectType.class);
    for (Entry<RepositoryObjectType, List<? extends SharedObjectInterface>> entry : orig.entrySet()) {
        RepositoryObjectType type = entry.getKey();
        List<? extends SharedObjectInterface> value = entry.getValue();

        List<SharedObjectInterface> newValue = new ArrayList<SharedObjectInterface>(value.size());
        for (SharedObjectInterface obj : value) {
            SharedObjectInterface newValueItem;
            if (obj instanceof DatabaseMeta) {
                DatabaseMeta databaseMeta = (DatabaseMeta) ((DatabaseMeta) obj).clone();
                databaseMeta.setObjectId(((DatabaseMeta) obj).getObjectId());
                databaseMeta.clearChanged();
                newValueItem = databaseMeta;
            } else if (obj instanceof SlaveServer) {
                SlaveServer slaveServer = (SlaveServer) ((SlaveServer) obj).clone();
                slaveServer.setObjectId(((SlaveServer) obj).getObjectId());
                slaveServer.clearChanged();
                newValueItem = slaveServer;
            } else if (obj instanceof PartitionSchema) {
                PartitionSchema partitionSchema = (PartitionSchema) ((PartitionSchema) obj).clone();
                partitionSchema.setObjectId(((PartitionSchema) obj).getObjectId());
                partitionSchema.clearChanged();
                newValueItem = partitionSchema;
            } else if (obj instanceof ClusterSchema) {
                ClusterSchema clusterSchema = ((ClusterSchema) obj).clone();
                clusterSchema.setObjectId(((ClusterSchema) obj).getObjectId());
                clusterSchema.clearChanged();
                newValueItem = clusterSchema;
            } else {
                throw new KettleException("unknown shared object class");
            }/*from   w ww. j  a  va  2 s .co  m*/
            newValue.add(newValueItem);
        }
        copy.put(type, newValue);
    }
    return copy;
}

From source file:com.att.aro.diagnostics.GraphPanel.java

private static void populateUserEventPlot(XYPlot plot, TraceData.Analysis analysis) {

    final XYIntervalSeriesCollection userInputData = new XYIntervalSeriesCollection();
    if (analysis != null) {

        // create the dataset...
        Map<UserEvent.UserEventType, XYIntervalSeries> seriesMap = new EnumMap<UserEvent.UserEventType, XYIntervalSeries>(
                UserEvent.UserEventType.class);
        for (UserEvent.UserEventType eventType : UserEvent.UserEventType.values()) {
            XYIntervalSeries series = new XYIntervalSeries(eventType);
            seriesMap.put(eventType, series);
            userInputData.addSeries(series);
        }/*  w  ww . j  a  v a2s .c om*/
        // Populate the data set
        for (UserEvent event : analysis.getUserEvents()) {
            seriesMap.get(event.getEventType()).add(event.getPressTime(), event.getPressTime(),
                    event.getReleaseTime(), 0.5, 0, 1);
        }

        // Assign ToolTip to renderer
        XYItemRenderer renderer = plot.getRenderer();
        renderer.setSeriesPaint(userInputData.indexOf(UserEventType.SCREEN_LANDSCAPE), Color.BLUE);
        renderer.setSeriesPaint(userInputData.indexOf(UserEventType.SCREEN_PORTRAIT), Color.BLUE);
        renderer.setBaseToolTipGenerator(new XYToolTipGenerator() {

            @Override
            public String generateToolTip(XYDataset dataset, int series, int item) {
                UserEvent.UserEventType eventType = (UserEvent.UserEventType) userInputData.getSeries(series)
                        .getKey();
                return ResourceBundleManager.getEnumString(eventType);
            }
        });

    }

    plot.setDataset(userInputData);
}