Example usage for java.util List clear

List of usage examples for java.util List clear

Introduction

In this page you can find the example usage for java.util List clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this list (optional operation).

Usage

From source file:com.pureinfo.srm.reports.table.MyTabeleDataHelper.java

public static Map getDataMap2d(Class _contentClass, SQLCondition _condition, String _sGroupPropRow,
        String _sGroupPropCol, String[] _caredValuesRow, String[] _caredValuesCol) throws PureException {
    IContentMgr mgr = ArkContentHelper.getContentMgrOf(_contentClass);
    List params = new ArrayList();
    IObjects datas = null;// w w w  .j  a va 2s . c o m
    IStatement query = null;
    Map map2d = null;
    try {
        String sCondion = "";
        if (_condition != null)
            sCondion = _condition.toSQL(params);
        String strSQL = "SELECT COUNT(*) AS _COUNT, {this." + _sGroupPropRow + "} AS " + STR_ROW + ", {this."
                + _sGroupPropCol + "} AS " + STR_COL + " FROM {this}"
                + (sCondion == null || sCondion.trim().length() < 1 ? "" : " WHERE ") + sCondion
                + " GROUP BY {this." + _sGroupPropRow + "},{this." + _sGroupPropCol + '}';
        query = mgr.createQuery(strSQL, 0);
        if (!params.isEmpty()) {
            query.setParameters(0, params);
        }
        datas = query.executeQuery();
        map2d = iObjectsToMap2D(datas);
    } finally {
        params.clear();
        DolphinHelper.clear(datas, query);
    }
    calculateMap2d(map2d, _caredValuesRow, _caredValuesCol);
    return map2d;
}

From source file:dbconverter.dao.util.ToolKit.java

/**
 * Indexes every document within a ResultSet object
 * @param resultSet The ResultSet containing all documents to be indexed
 * @param bl Determines where to index the data
 * @param uploadInterval Determines how frequently to clear local memory
 * @return The number of documents indexed
 * @author hightowe//from  ww w  .  ja v  a  2  s  .c om
 */
public static int bulkIndexResultSet(ResultSet resultSet, BulkLoader bl, int uploadInterval) {
    assert resultSet != null : PARAMETER_ERROR;
    assert uploadInterval > 0 : PARAMETER_ERROR;
    assert bl != null && bl.isConfigured() : PARAMETER_ERROR;

    int count = 0;
    try {
        ResultSetMetaData rsMetaData = resultSet.getMetaData();
        int columnNumbers = rsMetaData.getColumnCount();
        List<Map> docsList = new ArrayList<>();

        while (resultSet.next()) {
            Map<String, Object> dataMap = new HashMap<>();
            for (int i = 1; i <= columnNumbers; i++) {
                dataMap.put(rsMetaData.getColumnLabel(i), resultSet.getString(i));
            }

            // append a timestamp of when this document was created
            dataMap.put(TIME_STAMP, getISOTime(TIME_STAMP_FORMAT));

            docsList.add(dataMap);
            count++;

            if (count % uploadInterval == 0) {
                bl.bulkIndex(docsList);
                logger.info("Indexed " + count + " documents " + getISOTime(TIME_STAMP_FORMAT));
                docsList.clear();
            }
        }

        if (docsList.size() > 0) {
            bl.bulkIndex(docsList);
            logger.info("Indexed " + count + " documents " + getISOTime(TIME_STAMP_FORMAT));
        }
    } catch (SQLException ex) {
        logger.error(ex);
    }

    logger.info("Total documents indexed: " + count + ", " + getISOTime(TIME_STAMP_FORMAT));

    return count;
}

From source file:jp.co.nemuzuka.service.impl.GanttServiceImpl.java

/**
 * TicketList?./*from w w  w  .  j a va  2 s .c om*/
 * Ticket?Map????List????
 * @param ticketList TicketList
 * @param map Ticket?Map
 * @param parentMap Ticket?Map
 */
private void createList(List<TicketModelEx> ticketList, Map<Key, TicketModelEx> map,
        Map<Key, ChildKeyListEntity> parentMap) {

    ticketList.clear();

    for (Map.Entry<Key, ChildKeyListEntity> e : parentMap.entrySet()) {
        ChildKeyListEntity targetEntity = e.getValue();
        int nestingLevel = 0;
        setTicketList(targetEntity, nestingLevel, ticketList, map);
    }
}

From source file:org.energyos.espi.common.service.impl.ElectricPowerQualitySummaryServiceImpl.java

@Override
public EntryType findEntryType(Long retailCustomerId, Long usagePointId, Long electricPowerQualitySummaryId) {
    EntryType result = null;/*from  w  ww .j  av a  2 s  . c  o  m*/
    try {
        // TODO - this is sub-optimal (but defers the need to understand
        // creation of an EntryType
        List<Long> temp = new ArrayList<Long>();
        temp = resourceService.findAllIdsByXPath(retailCustomerId, usagePointId,
                ElectricPowerUsageSummary.class);
        // temp.add(electricPowerQualitySummaryId);
        if (temp.contains(electricPowerQualitySummaryId)) {
            temp.clear();
            temp.add(electricPowerQualitySummaryId);
        } else {
            temp.clear();
        }
        result = (new EntryTypeIterator(resourceService, temp, ElectricPowerQualitySummary.class))
                .nextEntry(ElectricPowerQualitySummary.class);
    } catch (Exception e) {
        // TODO need a log file entry as we are going to return a null if
        // it's not found
        result = null;
    }
    return result;
}

From source file:com.hp.mqm.atrf.alm.services.AlmWrapperService.java

public void fetchRunRelatedEntities(List<Run> runs) {
    //clear cache maps
    clearMapIfSizeIsExceed(tests, 4000);
    if (clearMapIfSizeIsExceed(testFolders, 3000)) {
        tests.clear();
    }//from w  w  w.  j a va 2  s .c om
    clearMapIfSizeIsExceed(testSets, 3000);
    clearMapIfSizeIsExceed(testConfigurations, 4000);

    //fill cache maps
    List<AlmEntity> tests = fetchTests(runs);
    fetchTestFolders(tests);
    fetchTestSets(runs);
    fetchTestConfigurations(runs);
}

From source file:com.streamsets.pipeline.stage.processor.mapper.FieldMapperProcessor.java

private void transformFieldPaths(Record record) throws StageException {
    final Map<String, List<Field>> newPathsToFields = new LinkedHashMap<>();
    final LinkedList<String> pathsToDelete = new LinkedList<>();
    final Map<Field, String> fieldsToPreviousPaths = new HashMap<>();

    record.forEachField(fv -> {//from w w w  .  ja v  a  2  s.  c  o  m
        final String fieldPath = fv.getFieldPath();
        final String fieldName = fv.getFieldName();
        final Field field = fv.getField();
        if (checkSkipFieldAndSetContextVar(fieldPath, fieldName, field, true)) {
            return;
        }
        try {
            final String newPath = mapperExpressionEval.eval(expressionVars,
                    fieldMapperConfig.mappingExpression, String.class);

            newPathsToFields.computeIfAbsent(newPath, k -> new LinkedList<>());
            newPathsToFields.get(newPath).add(field);
        } catch (ELEvalException e) {
            throw new RuntimeException(String.format("Failed to evaluate mapper expression %s: %s",
                    fieldMapperConfig.mappingExpression, e.getMessage()), e);
        }
        if (!fieldMapperConfig.maintainOriginalPaths) {
            pathsToDelete.add(fieldPath);
        }
        fieldsToPreviousPaths.put(field, fieldPath);
    });

    for (String newPath : newPathsToFields.keySet()) {
        final List<Field> mappedFields = new LinkedList<>(newPathsToFields.get(newPath));
        if (aggregationEval != null) {
            expressionVars.addVariable("fields", mappedFields);
            AggregationEL.setFieldsToPreviousPathsInContext(expressionVars, fieldsToPreviousPaths);
            final Object aggregationResult = aggregationEval.eval(expressionVars,
                    fieldMapperConfig.aggregationExpression, Object.class);
            expressionVars.addVariable("fields", null);
            if (aggregationResult instanceof Field) {
                record.set(newPath, (Field) aggregationResult);
            } else {
                final Field.Type aggregationResultType = FieldUtils.getTypeFromObject(aggregationResult);
                record.set(newPath, Field.create(aggregationResultType, aggregationResult));
            }
        } else {
            boolean replaceValues = false;
            if (record.has(newPath)) {
                final Field existingField = record.get(newPath);
                if (existingField.getType() == Field.Type.LIST) {
                    final List<Field> valueAsList = existingField.getValueAsList();
                    if (!fieldMapperConfig.appendListValues) {
                        valueAsList.clear();
                    }
                    valueAsList.addAll(mappedFields);
                } else if (fieldMapperConfig.structureChangeAllowed) {
                    replaceValues = true;
                }
            } else if (fieldMapperConfig.structureChangeAllowed) {
                replaceValues = true;
            }

            if (replaceValues) {
                if (mappedFields.size() > 1) {
                    record.set(newPath, Field.create(new LinkedList<>(mappedFields)));
                } else {
                    record.set(newPath, mappedFields.iterator().next());
                }
            }
        }
    }
    pathsToDelete.descendingIterator().forEachRemaining(path -> record.delete(path));
}

From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByForSubplanRule.java

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
        throws AlgebricksException {
    AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
    if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
        return false;
    }//from   w w w  .jav a  2  s  .co m
    SubplanOperator subplan = (SubplanOperator) op0;

    Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator();
    ILogicalPlan p = null;
    while (plansIter.hasNext()) {
        p = plansIter.next();
    }
    if (p == null) {
        return false;
    }
    if (p.getRoots().size() != 1) {
        return false;
    }
    Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0);
    AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue();

    Mutable<ILogicalOperator> botRef = subplanRoot;
    AbstractLogicalOperator op2;
    // Project is optional
    if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) {
        op2 = op1;
    } else {
        ProjectOperator project = (ProjectOperator) op1;
        botRef = project.getInputs().get(0);
        op2 = (AbstractLogicalOperator) botRef.getValue();
    }
    if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
        return false;
    }
    AggregateOperator aggregate = (AggregateOperator) op2;

    Set<LogicalVariable> free = new HashSet<LogicalVariable>();
    VariableUtilities.getUsedVariables(aggregate, free);

    Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0);
    AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue();

    while (op3.getInputs().size() == 1) {
        Set<LogicalVariable> prod = new HashSet<LogicalVariable>();
        VariableUtilities.getProducedVariables(op3, prod);
        free.removeAll(prod);
        VariableUtilities.getUsedVariables(op3, free);
        botRef = op3Ref;
        op3Ref = op3.getInputs().get(0);
        op3 = (AbstractLogicalOperator) op3Ref.getValue();
    }

    if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN
            && op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
        return false;
    }
    AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3;
    if (join.getCondition().getValue() == ConstantExpression.TRUE) {
        return false;
    }
    VariableUtilities.getUsedVariables(join, free);

    AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue();
    // see if there's an NTS at the end of the pipeline
    NestedTupleSourceOperator outerNts = getNts(b0);
    if (outerNts == null) {
        AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue();
        outerNts = getNts(b1);
        if (outerNts == null) {
            return false;
        }
    }

    Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
    if (pkVars == null || pkVars.size() < 1) {
        // there is no non-trivial primary key, group-by keys are all live variables
        ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
        pkVars = new HashSet<LogicalVariable>();
        VariableUtilities.getLiveVariables(subplanInput, pkVars);
    }
    AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);

    Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
    LogicalVariable testForNull = null;
    AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue();
    switch (right.getOperatorTag()) {
    case UNNEST: {
        UnnestOperator innerUnnest = (UnnestOperator) right;
        // Select [ $y != null ]
        testForNull = innerUnnest.getVariable();
        break;
    }
    case RUNNINGAGGREGATE: {
        ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue();
        Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>();
        VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars);
        if (!producedVars.isEmpty()) {
            // Select [ $y != null ]
            testForNull = producedVars.iterator().next();
        }
        break;
    }
    case DATASOURCESCAN: {
        DataSourceScanOperator innerScan = (DataSourceScanOperator) right;
        // Select [ $y != null ]
        if (innerScan.getVariables().size() == 1) {
            testForNull = innerScan.getVariables().get(0);
        }
        break;
    }
    }
    if (testForNull == null) {
        testForNull = context.newVar();
        AssignOperator tmpAsgn = new AssignOperator(testForNull,
                new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
        tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
        rightRef.setValue(tmpAsgn);
        context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
    }

    IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_NULL);
    ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
    IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
    ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
            new MutableObject<ILogicalExpression>(isNullTest));
    SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
            null);
    GroupByOperator g = new GroupByOperator();
    Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
    NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
    opRef.setValue(g);
    selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts));

    List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs();
    prodInpList.clear();
    prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull));

    ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue()));
    g.getNestedPlans().add(gPlan);
    subplanRoot.setValue(op3Ref.getValue());
    g.getInputs().add(newSubplanRef);

    HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>();
    VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars);
    underVars.removeAll(pkVars);
    Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList());
    context.updatePrimaryKeys(mappedVars);
    for (LogicalVariable uv : underVars) {
        g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null,
                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv))));
    }
    OperatorPropertiesUtil.typeOpRec(subplanRoot, context);
    OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context);
    context.computeAndSetTypeEnvironmentForOperator(g);
    return true;
}

From source file:edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectIntoJoinRule.java

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
        throws AlgebricksException {
    Collection<LogicalVariable> joinLiveVarsLeft = new HashSet<LogicalVariable>();
    Collection<LogicalVariable> joinLiveVarsRight = new HashSet<LogicalVariable>();
    Collection<LogicalVariable> liveInOpsToPushLeft = new HashSet<LogicalVariable>();
    Collection<LogicalVariable> liveInOpsToPushRight = new HashSet<LogicalVariable>();

    List<ILogicalOperator> pushedOnLeft = new ArrayList<ILogicalOperator>();
    List<ILogicalOperator> pushedOnRight = new ArrayList<ILogicalOperator>();
    LinkedList<ILogicalOperator> notPushedStack = new LinkedList<ILogicalOperator>();
    Collection<LogicalVariable> usedVars = new HashSet<LogicalVariable>();
    Collection<LogicalVariable> producedVars = new HashSet<LogicalVariable>();

    AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
    if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
        return false;
    }/*from www. j  a v  a2 s .  c o m*/
    SelectOperator select = (SelectOperator) op;
    Mutable<ILogicalOperator> opRef2 = op.getInputs().get(0);
    AbstractLogicalOperator son = (AbstractLogicalOperator) opRef2.getValue();
    AbstractLogicalOperator op2 = son;
    boolean needToPushOps = false;
    while (son.isMap()) {
        needToPushOps = true;
        Mutable<ILogicalOperator> opRefLink = son.getInputs().get(0);
        son = (AbstractLogicalOperator) opRefLink.getValue();
    }

    if (son.getOperatorTag() != LogicalOperatorTag.INNERJOIN
            && son.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
        return false;
    }
    boolean isLoj = son.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN;
    AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) son;

    Mutable<ILogicalOperator> joinBranchLeftRef = join.getInputs().get(0);
    Mutable<ILogicalOperator> joinBranchRightRef = join.getInputs().get(1);

    if (needToPushOps) {
        ILogicalOperator joinBranchLeft = joinBranchLeftRef.getValue();
        ILogicalOperator joinBranchRight = joinBranchRightRef.getValue();
        VariableUtilities.getLiveVariables(joinBranchLeft, joinLiveVarsLeft);
        VariableUtilities.getLiveVariables(joinBranchRight, joinLiveVarsRight);
        Mutable<ILogicalOperator> opIterRef = opRef2;
        ILogicalOperator opIter = op2;
        while (opIter != join) {
            LogicalOperatorTag tag = ((AbstractLogicalOperator) opIter).getOperatorTag();
            if (tag == LogicalOperatorTag.PROJECT) {
                notPushedStack.addFirst(opIter);
            } else {
                VariableUtilities.getUsedVariables(opIter, usedVars);
                VariableUtilities.getProducedVariables(opIter, producedVars);
                if (joinLiveVarsLeft.containsAll(usedVars)) {
                    pushedOnLeft.add(opIter);
                    liveInOpsToPushLeft.addAll(producedVars);
                } else if (joinLiveVarsRight.containsAll(usedVars)) {
                    pushedOnRight.add(opIter);
                    liveInOpsToPushRight.addAll(producedVars);
                } else {
                    return false;
                }
            }
            opIterRef = opIter.getInputs().get(0);
            opIter = opIterRef.getValue();
        }
        if (isLoj && pushedOnLeft.isEmpty()) {
            return false;
        }
    }

    boolean intersectsAllBranches = true;
    boolean[] intersectsBranch = new boolean[join.getInputs().size()];
    LinkedList<LogicalVariable> selectVars = new LinkedList<LogicalVariable>();
    select.getCondition().getValue().getUsedVariables(selectVars);
    int i = 0;
    for (Mutable<ILogicalOperator> branch : join.getInputs()) {
        LinkedList<LogicalVariable> branchVars = new LinkedList<LogicalVariable>();
        VariableUtilities.getLiveVariables(branch.getValue(), branchVars);
        if (i == 0) {
            branchVars.addAll(liveInOpsToPushLeft);
        } else {
            branchVars.addAll(liveInOpsToPushRight);
        }
        if (OperatorPropertiesUtil.disjoint(selectVars, branchVars)) {
            intersectsAllBranches = false;
        } else {
            intersectsBranch[i] = true;
        }
        i++;
    }
    if (!intersectsBranch[0] && !intersectsBranch[1]) {
        return false;
    }
    if (needToPushOps) {
        pushOps(pushedOnLeft, joinBranchLeftRef, context);
        pushOps(pushedOnRight, joinBranchRightRef, context);
    }
    if (intersectsAllBranches) {
        addCondToJoin(select, join, context);
    } else { // push down
        Iterator<Mutable<ILogicalOperator>> branchIter = join.getInputs().iterator();
        ILogicalExpression selectCondition = select.getCondition().getValue();
        boolean lojToInner = false;
        for (int j = 0; j < intersectsBranch.length; j++) {
            Mutable<ILogicalOperator> branch = branchIter.next();
            boolean inter = intersectsBranch[j];
            if (inter) {
                if (j > 0 && isLoj) {
                    // if a left outer join, if the select condition is not-null filtering,
                    // we rewrite left outer join
                    // to inner join for this case.
                    if (containsNotNullFiltering(selectCondition)) {
                        lojToInner = true;
                    }
                }
                if ((j > 0 && isLoj) && containsNullFiltering(selectCondition)) {
                    // Select is-null($$var) cannot be pushed in the right branch of a LOJ;
                    notPushedStack.addFirst(select);
                } else {
                    // Conditions for the left branch can always be pushed.
                    // Other conditions can be pushed to the right branch of a LOJ.
                    copySelectToBranch(select, branch, context);
                }
            }
        }
        if (lojToInner) {
            // Rewrites left outer join  to inner join.
            InnerJoinOperator innerJoin = new InnerJoinOperator(join.getCondition());
            innerJoin.getInputs().addAll(join.getInputs());
            join = innerJoin;
            context.computeAndSetTypeEnvironmentForOperator(join);
        }
    }
    ILogicalOperator top = join;
    for (ILogicalOperator npOp : notPushedStack) {
        List<Mutable<ILogicalOperator>> npInpList = npOp.getInputs();
        npInpList.clear();
        npInpList.add(new MutableObject<ILogicalOperator>(top));
        context.computeAndSetTypeEnvironmentForOperator(npOp);
        top = npOp;
    }
    opRef.setValue(top);
    return true;

}

From source file:com.github.ibm.domino.client.CalendarClientTest.java

@Test
public void test9DeleteEvents() {
    System.out.println("deleteEvents");
    DominoRestClient instance = initClient();
    java.util.Calendar calendar = java.util.Calendar.getInstance();

    List<String> eventIds = new ArrayList<>();
    eventIds.add("EvEnTiD...wE...DO..not...EXPECT...to...FIND...EVER!!!");
    List<String> eventsNotDeleted = instance.deleteEvent(eventIds);
    assertTrue(eventsNotDeleted.size() == 1);

    instance.since(ZonedDateTime.now().minusHours(1));
    List<CalendarEvent> result = instance.getEvents();

    eventIds.clear();
    result.stream().forEach((calendarEvent) -> {
        eventIds.add(calendarEvent.getId());
    });/*from  w w  w  .ja v a2  s  . c o m*/
    eventsNotDeleted = instance.deleteEvent(eventIds);
    assertTrue(eventsNotDeleted.isEmpty());
}

From source file:info.raack.appliancelabeler.datacollector.EnergyDataLoader.java

private void removeDuplicates(List<SecondData> data) {
    // remove any duplicates
    Map<Long, SecondData> singleDatas = new HashMap<Long, SecondData>();
    for (SecondData point : data) {
        singleDatas.put(point.getCalLong(), point);
    }//from w w  w.j a v a2 s .c  om

    data.clear();
    data.addAll(singleDatas.values());
}