Example usage for java.util Collections min

List of usage examples for java.util Collections min

Introduction

In this page you can find the example usage for java.util Collections min.

Prototype

public static <T extends Object & Comparable<? super T>> T min(Collection<? extends T> coll) 

Source Link

Document

Returns the minimum element of the given collection, according to the natural ordering of its elements.

Usage

From source file:de.hub.cs.dbis.aeolus.queries.utils.TimestampMergerTest.java

@SuppressWarnings("unchecked")
private void testExecuteMerge(int numberOfProducers, int numberOfTasks, double duplicatesFraction,
        boolean tsIndexOrName, int minNumberOfAttributes, int maxNumberOfAttributes) {
    int createdTasks = this.mockInputs(numberOfProducers, numberOfTasks, tsIndexOrName, minNumberOfAttributes,
            maxNumberOfAttributes);/*from w  ww  .j a  v  a 2 s  .c om*/

    final int numberOfTuples = createdTasks * 10 + this.r.nextInt(createdTasks * (1 + this.r.nextInt(10)));
    TimestampOrderChecker checkerBolt;
    TimestampMerger merger;
    if (tsIndexOrName) {
        checkerBolt = new TimestampOrderChecker(forwarder, 0, duplicatesFraction != 0);
        merger = new TimestampMerger(checkerBolt, 0);
    } else {
        checkerBolt = new TimestampOrderChecker(forwarder, "ts", duplicatesFraction != 0);
        merger = new TimestampMerger(checkerBolt, "ts");
    }
    TestOutputCollector collector = new TestOutputCollector();

    merger.prepare(null, this.topologyContextMock, new OutputCollector(collector));

    this.input = new LinkedList[createdTasks];
    for (int i = 0; i < createdTasks; ++i) {
        this.input[i] = new LinkedList<Tuple>();
    }
    this.result.clear();

    int numberDistinctValues = 1;
    int counter = 0;
    while (true) {
        int taskId = this.r.nextInt(createdTasks);

        Fields schema = this.contextMock.getComponentOutputFields(this.contextMock.getComponentId(taskId),
                null);
        int numberOfAttributes = schema.size();
        List<Object> value = new ArrayList<Object>(numberOfAttributes);
        for (int i = 0; i < numberOfAttributes; ++i) {
            value.add(new Character((char) (32 + this.r.nextInt(95))));
        }
        Long ts = new Long(numberDistinctValues - 1);
        value.set(schema.fieldIndex("ts"), ts);

        this.result.add(value);
        this.input[taskId].add(new TupleImpl(this.contextMock, value, taskId, null));

        if (++counter == numberOfTuples) {
            break;
        }

        if (1 - this.r.nextDouble() > duplicatesFraction) {
            ++numberDistinctValues;
        }
    }

    int[] max = new int[createdTasks];
    int[][] bucketSums = new int[createdTasks][numberDistinctValues];
    for (int i = 0; i < numberOfTuples; ++i) {
        int taskId = this.r.nextInt(createdTasks);

        while (this.input[taskId].size() == 0) {
            taskId = (taskId + 1) % createdTasks;
        }

        Tuple t = this.input[taskId].removeFirst();
        max[taskId] = t.getLongByField("ts").intValue();
        ++bucketSums[taskId][max[taskId]];
        merger.execute(t);
    }

    int stillBuffered = numberOfTuples;
    int smallestMax = Collections.min(Arrays.asList(ArrayUtils.toObject(max))).intValue();
    for (int i = 0; i < createdTasks; ++i) {
        for (int j = 0; j <= smallestMax; ++j) {
            stillBuffered -= bucketSums[i][j];
        }
    }

    Assert.assertEquals(this.result.subList(0, this.result.size() - stillBuffered),
            collector.output.get(Utils.DEFAULT_STREAM_ID));
    Assert.assertTrue(collector.acked.size() == numberOfTuples - stillBuffered);
    Assert.assertTrue(collector.failed.size() == 0);

}

From source file:de.hub.cs.dbis.aeolus.utils.TimestampMergerTest.java

@SuppressWarnings("unchecked")
private void testExecuteMerge(int numberOfProducers, int numberOfTasks, double duplicatesFraction,
        boolean tsIndexOrName, int minNumberOfAttributes, int maxNumberOfAttributes) {
    int createdTasks = this.mockInputs(numberOfProducers, numberOfTasks, tsIndexOrName, minNumberOfAttributes,
            maxNumberOfAttributes);//from   w w  w . j  ava2 s  . com

    final int numberOfTuples = createdTasks * 10 + this.r.nextInt(createdTasks * (1 + this.r.nextInt(10)));
    TimestampOrderChecker checkerBolt;
    TimestampMerger merger;
    if (tsIndexOrName) {
        checkerBolt = new TimestampOrderChecker(forwarder, 0, duplicatesFraction != 0);
        merger = new TimestampMerger(checkerBolt, 0);
    } else {
        checkerBolt = new TimestampOrderChecker(forwarder, "ts", duplicatesFraction != 0);
        merger = new TimestampMerger(checkerBolt, "ts");
    }
    TestOutputCollector collector = new TestOutputCollector();

    merger.prepare(null, this.topologyContextMock, new OutputCollector(collector));

    this.input = new LinkedList[createdTasks];
    for (int i = 0; i < createdTasks; ++i) {
        this.input[i] = new LinkedList<Tuple>();
    }
    this.result.clear();

    int numberDistinctValues = 1;
    int counter = 0;
    while (true) {
        int taskId = this.r.nextInt(createdTasks);

        Fields schema = this.contextMock.getComponentOutputFields(this.contextMock.getComponentId(taskId),
                null);
        int numberOfAttributes = schema.size();
        List<Object> value = new ArrayList<Object>(numberOfAttributes);
        for (int i = 0; i < numberOfAttributes; ++i) {
            value.add(new Character((char) (32 + this.r.nextInt(95))));
        }
        Long ts = new Long(numberDistinctValues - 1);
        value.set(schema.fieldIndex("ts"), ts);

        this.result.add(value);
        this.input[taskId].add(new TupleImpl(this.contextMock, value, taskId, null));

        if (++counter == numberOfTuples) {
            break;
        }

        if (1 - this.r.nextDouble() > duplicatesFraction) {
            ++numberDistinctValues;
        }
    }

    int[] max = new int[createdTasks];
    for (int i = 0; i < max.length; ++i) {
        max[i] = -1;
    }
    int[][] bucketSums = new int[createdTasks][numberDistinctValues];
    for (int i = 0; i < numberOfTuples; ++i) {
        int taskId = this.r.nextInt(createdTasks);

        while (this.input[taskId].size() == 0) {
            taskId = (taskId + 1) % createdTasks;
        }

        Tuple t = this.input[taskId].removeFirst();
        max[taskId] = t.getLongByField("ts").intValue();
        ++bucketSums[taskId][max[taskId]];
        merger.execute(t);
    }

    int stillBuffered = numberOfTuples;
    int smallestMax = Collections.min(Arrays.asList(ArrayUtils.toObject(max))).intValue();
    for (int i = 0; i < createdTasks; ++i) {
        for (int j = 0; j <= smallestMax; ++j) {
            stillBuffered -= bucketSums[i][j];
        }
    }
    List<List<Object>> expectedResult = this.result.subList(0, this.result.size() - stillBuffered);

    if (expectedResult.size() > 0) {
        Assert.assertEquals(expectedResult, collector.output.get(Utils.DEFAULT_STREAM_ID));
    } else {
        Assert.assertNull(collector.output.get(Utils.DEFAULT_STREAM_ID));
    }
    Assert.assertTrue(collector.acked.size() == numberOfTuples - stillBuffered);
    Assert.assertTrue(collector.failed.size() == 0);

}

From source file:net.sourceforge.fenixedu.applicationTier.Servico.resourceAllocationManager.SearchExecutionCourses.java

private List<InfoExecutionCourse> fillInfoExecutionCourses(final AcademicInterval academicInterval,
        List<ExecutionCourse> executionCourses) {
    List<InfoExecutionCourse> result;
    result = (List<InfoExecutionCourse>) CollectionUtils.collect(executionCourses, new Transformer() {
        @Override/*w  w w . j  a  v a2s  .  co  m*/
        public Object transform(Object arg0) {
            InfoExecutionCourse infoExecutionCourse = null;
            infoExecutionCourse = getOccupancyLevels(arg0);
            getTeacherReportInformation(infoExecutionCourse, arg0);
            return infoExecutionCourse;
        }

        private void getTeacherReportInformation(InfoExecutionCourse infoExecutionCourse, Object arg0) {

            ExecutionCourse executionCourse = (ExecutionCourse) arg0;

            if (executionCourse.getAssociatedCurricularCoursesSet() != null) {

                InfoSiteEvaluationStatistics infoSiteEvaluationStatistics = new InfoSiteEvaluationStatistics();
                int enrolledInCurricularCourse = 0;
                int evaluated = 0;
                int approved = 0;
                Iterator<CurricularCourse> iter = executionCourse.getAssociatedCurricularCoursesSet()
                        .iterator();

                while (iter.hasNext()) {
                    CurricularCourse curricularCourse = iter.next();

                    final List<Enrolment> enroled = curricularCourse
                            .getEnrolmentsByAcademicInterval(academicInterval);
                    enrolledInCurricularCourse += enroled.size();
                    evaluated = Enrolment.countEvaluated(enroled);
                    approved = Enrolment.countApproved(enroled);
                }
                infoSiteEvaluationStatistics.setEnrolled(Integer.valueOf(enrolledInCurricularCourse));
                infoSiteEvaluationStatistics.setEvaluated(Integer.valueOf(evaluated));
                infoSiteEvaluationStatistics.setApproved(Integer.valueOf(approved));

                infoExecutionCourse.setInfoSiteEvaluationStatistics(infoSiteEvaluationStatistics);
            }
        }

        private InfoExecutionCourse getOccupancyLevels(Object arg0) {

            InfoExecutionCourse infoExecutionCourse;
            ExecutionCourse executionCourse = (ExecutionCourse) arg0;

            Integer theoreticalCapacity = Integer.valueOf(0);
            Integer theoPraticalCapacity = Integer.valueOf(0);
            Integer praticalCapacity = Integer.valueOf(0);
            Integer labCapacity = Integer.valueOf(0);
            Integer doubtsCapacity = Integer.valueOf(0);
            Integer reserveCapacity = Integer.valueOf(0);

            Integer semCapacity = Integer.valueOf(0);
            Integer probCapacity = Integer.valueOf(0);
            Integer fieldCapacity = Integer.valueOf(0);
            Integer trainCapacity = Integer.valueOf(0);
            Integer tutCapacity = Integer.valueOf(0);

            Set<Shift> shifts = executionCourse.getAssociatedShifts();
            Iterator<Shift> iterator = shifts.iterator();

            while (iterator.hasNext()) {

                Shift shift = iterator.next();

                if (shift.containsType(ShiftType.TEORICA)) {
                    theoreticalCapacity = Integer
                            .valueOf(theoreticalCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.TEORICO_PRATICA)) {
                    theoPraticalCapacity = Integer
                            .valueOf(theoPraticalCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.DUVIDAS)) {
                    doubtsCapacity = Integer.valueOf(doubtsCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.LABORATORIAL)) {
                    labCapacity = Integer.valueOf(labCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.PRATICA)) {
                    praticalCapacity = Integer
                            .valueOf(praticalCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.RESERVA)) {
                    reserveCapacity = Integer
                            .valueOf(reserveCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.SEMINARY)) {
                    semCapacity = Integer.valueOf(semCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.PROBLEMS)) {
                    probCapacity = Integer.valueOf(probCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.FIELD_WORK)) {
                    fieldCapacity = Integer.valueOf(fieldCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.TRAINING_PERIOD)) {
                    trainCapacity = Integer.valueOf(trainCapacity.intValue() + shift.getLotacao().intValue());

                } else if (shift.containsType(ShiftType.TUTORIAL_ORIENTATION)) {
                    tutCapacity = Integer.valueOf(tutCapacity.intValue() + shift.getLotacao().intValue());
                }
            }

            infoExecutionCourse = InfoExecutionCourse.newInfoFromDomain(executionCourse);
            List<Integer> capacities = new ArrayList<Integer>();

            if (theoreticalCapacity.intValue() != 0) {
                capacities.add(theoreticalCapacity);
            }
            if (theoPraticalCapacity.intValue() != 0) {
                capacities.add(theoPraticalCapacity);
            }
            if (doubtsCapacity.intValue() != 0) {
                capacities.add(doubtsCapacity);
            }
            if (labCapacity.intValue() != 0) {
                capacities.add(labCapacity);
            }
            if (praticalCapacity.intValue() != 0) {
                capacities.add(praticalCapacity);
            }
            if (reserveCapacity.intValue() != 0) {
                capacities.add(reserveCapacity);
            }

            if (semCapacity.intValue() != 0) {
                capacities.add(semCapacity);
            }
            if (probCapacity.intValue() != 0) {
                capacities.add(probCapacity);
            }
            if (fieldCapacity.intValue() != 0) {
                capacities.add(fieldCapacity);
            }
            if (trainCapacity.intValue() != 0) {
                capacities.add(trainCapacity);
            }
            if (tutCapacity.intValue() != 0) {
                capacities.add(tutCapacity);
            }

            int total = 0;

            if (!capacities.isEmpty()) {
                total = (Collections.min(capacities)).intValue();
            }

            if (total == 0) {
                infoExecutionCourse.setOccupancy(Double.valueOf(-1));
            } else {
                infoExecutionCourse.setOccupancy(NumberUtils.formatNumber(Double.valueOf(
                        (Double.valueOf(executionCourse.getAttendsSet().size()).floatValue() * 100 / total)),
                        1));
            }
            return infoExecutionCourse;
        }
    });

    return result;
}

From source file:org.apache.hadoop.hbase.regionserver.wal.SequenceIdAccounting.java

/**
 * @param encodedRegionName Region to flush.
 * @param families Families to flush. May be a subset of all families in the region.
 * @return Returns {@link HConstants#NO_SEQNUM} if we are flushing the whole region OR if
 * we are flushing a subset of all families but there are no edits in those families not
 * being flushed; in other words, this is effectively same as a flush of all of the region
 * though we were passed a subset of regions. Otherwise, it returns the sequence id of the
 * oldest/lowest outstanding edit./* w  ww  . j a va2 s. c  om*/
 */
Long startCacheFlush(final byte[] encodedRegionName, final Set<byte[]> families) {
    Map<byte[], Long> oldSequenceIds = null;
    Long lowestUnflushedInRegion = HConstants.NO_SEQNUM;
    synchronized (tieLock) {
        Map<byte[], Long> m = this.lowestUnflushedSequenceIds.get(encodedRegionName);
        if (m != null) {
            // NOTE: Removal from this.lowestUnflushedSequenceIds must be done in controlled
            // circumstance because another concurrent thread now may add sequenceids for this family
            // (see above in getOrCreateLowestSequenceId). Make sure you are ok with this. Usually it
            // is fine because updates are blocked when this method is called. Make sure!!!
            for (byte[] familyName : families) {
                Long seqId = m.remove(familyName);
                if (seqId != null) {
                    if (oldSequenceIds == null)
                        oldSequenceIds = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
                    oldSequenceIds.put(familyName, seqId);
                }
            }
            if (oldSequenceIds != null && !oldSequenceIds.isEmpty()) {
                if (this.flushingSequenceIds.put(encodedRegionName, oldSequenceIds) != null) {
                    LOG.warn("Flushing Map not cleaned up for " + Bytes.toString(encodedRegionName)
                            + ", sequenceid=" + oldSequenceIds);
                }
            }
            if (m.isEmpty()) {
                // Remove it otherwise it will be in oldestUnflushedStoreSequenceIds for ever
                // even if the region is already moved to other server.
                // Do not worry about data racing, we held write lock of region when calling
                // startCacheFlush, so no one can add value to the map we removed.
                this.lowestUnflushedSequenceIds.remove(encodedRegionName);
            } else {
                // Flushing a subset of the region families. Return the sequence id of the oldest entry.
                lowestUnflushedInRegion = Collections.min(m.values());
            }
        }
    }
    // Do this check outside lock.
    if (oldSequenceIds != null && oldSequenceIds.isEmpty()) {
        // TODO: if we have no oldStoreSeqNum, and WAL is not disabled, presumably either
        // the region is already flushing (which would make this call invalid), or there
        // were no appends after last flush, so why are we starting flush? Maybe we should
        // assert not empty. Less rigorous, but safer, alternative is telling the caller to stop.
        // For now preserve old logic.
        LOG.warn("Couldn't find oldest sequenceid for " + Bytes.toString(encodedRegionName));
    }
    return lowestUnflushedInRegion;
}

From source file:org.dawnsci.plotting.tools.powdercheck.PowderCheckJob.java

private List<PowderCheckResult> fitPeaksToTrace(final Dataset xIn, final Dataset yIn, Dataset baselineIn) {

    resultList.clear();// ww  w.  jav  a2s.  c om

    List<HKL> spacings = CalibrationFactory.getCalibrationStandards().getCalibrant().getHKLs();
    final double[] qVals = new double[spacings.size()];

    for (int i = 0; i < spacings.size(); i++) {
        if (xAxis == XAxis.ANGLE)
            qVals[i] = 2 * Math.toDegrees(Math.asin((metadata.getDiffractionCrystalEnvironment().getWavelength()
                    / (2 * spacings.get(i).getDNano() * 10))));
        else
            qVals[i] = (Math.PI * 2) / (spacings.get(i).getDNano() * 10);
    }

    double qMax = xIn.max().doubleValue();
    double qMin = xIn.min().doubleValue();

    List<Double> qList = new ArrayList<Double>();

    int count = 0;

    for (double q : qVals) {
        if (q > qMax || q < qMin)
            continue;
        count++;
        qList.add(q);
    }

    double minPeak = Collections.min(qList);
    double maxPeak = Collections.max(qList);

    int minXidx = ROISliceUtils.findPositionOfClosestValueInAxis(xIn, minPeak) - EDGE_PIXEL_NUMBER;
    int maxXidx = ROISliceUtils.findPositionOfClosestValueInAxis(xIn, maxPeak) + EDGE_PIXEL_NUMBER;

    int maxSize = xIn.getSize();

    minXidx = minXidx < 0 ? 0 : minXidx;
    maxXidx = maxXidx > maxSize - 1 ? maxSize - 1 : maxXidx;

    final Dataset x = xIn.getSlice(new int[] { minXidx }, new int[] { maxXidx }, null);
    final Dataset y = yIn.getSlice(new int[] { minXidx }, new int[] { maxXidx }, null);
    y.setName("Fit");
    Dataset baseline = baselineIn.getSlice(new int[] { minXidx }, new int[] { maxXidx }, null);

    List<APeak> peaks = Generic1DFitter.fitPeaks(x, y, Gaussian.class, count + 10);

    List<PowderCheckResult> initResults = new ArrayList<PowderCheckResult>();

    CompositeFunction cf = new CompositeFunction();

    for (APeak peak : peaks)
        cf.addFunction(peak);

    double limit = findMatchLimit(qList, cf);

    while (cf.getNoOfFunctions() != 0 && !qList.isEmpty())
        findMatches(initResults, qList, cf, limit);

    final CompositeFunction cfFinal = compositeFunctionFromResults(initResults);

    double[] initParam = new double[cfFinal.getFunctions().length * 3];

    {
        int i = 0;
        for (IFunction func : cfFinal.getFunctions()) {
            initParam[i++] = func.getParameter(0).getValue();
            initParam[i++] = func.getParameter(1).getValue();
            initParam[i++] = func.getParameter(2).getValue();
        }
    }

    final Dataset yfit = DatasetFactory.zeros(x, Dataset.FLOAT64);

    MultivariateOptimizer opt = new SimplexOptimizer(REL_TOL, ABS_TOL);

    MultivariateFunction fun = new MultivariateFunction() {

        @Override
        public double value(double[] arg0) {

            int j = 0;
            for (IFunction func : cfFinal.getFunctions()) {

                double[] p = func.getParameterValues();
                p[0] = arg0[j++];
                p[1] = arg0[j++];
                p[2] = arg0[j++];
                func.setParameterValues(p);
            }

            for (int i = 0; i < yfit.getSize(); i++) {
                yfit.set(cfFinal.val(x.getDouble(i)), i);
            }

            return y.residual(yfit);
        }
    };

    opt.optimize(new InitialGuess(initParam), GoalType.MINIMIZE, new ObjectiveFunction(fun),
            new MaxEval(MAX_EVAL), new NelderMeadSimplex(initParam.length));

    Dataset fit = Maths.add(yfit, baseline);
    fit.setName("Fit");
    Dataset residual = Maths.subtract(y, yfit);
    residual.setName("Residual");

    system.updatePlot1D(x, Arrays.asList(new IDataset[] { fit, residual }), null);
    setPlottingSystemAxes();
    for (int i = 0; i < cfFinal.getNoOfFunctions(); i++) {
        resultList.add(new PowderCheckResult(cfFinal.getFunction(i), initResults.get(i).getCalibrantQValue()));
    }

    return resultList;

}

From source file:io.hummer.util.test.GenericTestResult.java

public double getMinimum(String valueName) {
    try {/*ww w .  ja  v a  2 s .com*/
        return Collections.min(getValues(valueName));
    } catch (RuntimeException e) {
        logger.warn("Unable to get minimum value for key: " + valueName);
        throw e;
    }
}

From source file:eionet.cr.dao.virtuoso.VirtuosoEndpointHarvestQueryDAO.java

@Override
public void move(String endpointUrl, Set<Integer> ids, int direction) throws DAOException {

    if (StringUtils.isBlank(endpointUrl) || ids == null || ids.isEmpty()) {
        return;/*  w  w  w .ja  v a2 s.co m*/
    }

    if (direction == 0) {
        throw new IllegalArgumentException("Direction must not be 0!");
    }

    // Prepare map where we can get queries by position, also find the max and min positions.
    LinkedHashMap<Integer, EndpointHarvestQueryDTO> queriesByPos = getQueriesByPosition(endpointUrl);
    if (queriesByPos.isEmpty()) {
        return;
    }
    Set<Integer> positions = queriesByPos.keySet();
    int maxPos = Collections.max(positions);
    int minPos = Collections.min(positions);

    Connection conn = null;
    try {
        conn = getSQLConnection();
        conn.setAutoCommit(false);

        // If even one query is already at position 1 then moving up is not considered possible.
        // And conversely, if even one query is already at the last position, then moving down
        // is not considered possible either.

        boolean isMovingPossible = true;
        List<Integer> selectedPositions = new ArrayList<Integer>();
        List<EndpointHarvestQueryDTO> queries = new ArrayList<EndpointHarvestQueryDTO>(queriesByPos.values());
        for (EndpointHarvestQueryDTO query : queries) {

            if (ids.contains(query.getId())) {

                int pos = query.getPosition();
                if ((direction < 0 && pos == minPos) || (direction > 0 && pos == maxPos)) {
                    isMovingPossible = false;
                } else {
                    selectedPositions.add(pos);
                }
            }
        }

        if (isMovingPossible) {

            if (direction < 0) {
                for (Integer selectedPosition : selectedPositions) {

                    EndpointHarvestQueryDTO queryToMove = queriesByPos.get(selectedPosition);
                    int i = queries.indexOf(queryToMove);
                    queries.set(i, queries.get(i - 1));
                    queries.set(i - 1, queryToMove);
                }
            } else {
                for (int j = selectedPositions.size() - 1; j >= 0; j--) {

                    EndpointHarvestQueryDTO queryToMove = queriesByPos.get(selectedPositions.get(j));
                    int i = queries.indexOf(queryToMove);
                    queries.set(i, queries.get(i + 1));
                    queries.set(i + 1, queryToMove);
                }
            }
        }

        SQLUtil.executeUpdate(INCREASE_POSITIONS_SQL, Arrays.asList(maxPos, endpointUrl), conn);
        for (int i = 0; i < queries.size(); i++) {
            SQLUtil.executeUpdate(UPDATE_POSITION_SQL, Arrays.asList(i + 1, queries.get(i).getId()), conn);
        }
        conn.commit();

    } catch (Exception e) {
        SQLUtil.rollback(conn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(conn);
    }
}

From source file:edu.wisc.ssec.mcidasv.data.hydra.Statistics.java

public static String sparkline(FlatField field, Statistics s) throws VisADException, RemoteException {
    Long[] values = histogram(field, 20);
    Real sMin = (Real) s.min();//from  ww w . ja v  a 2 s. c  om
    Real sMax = (Real) s.max();
    Collection<Long> collection = asList(values);
    long max = Collections.max(collection);
    long min = Collections.min(collection);
    float scale = (max - min) / 7f;
    final StringBuilder buf = new StringBuilder(values.length);

    // TJJ Mar 2018 - sandwich with min/max
    // http://mcidas.ssec.wisc.edu/inquiry-v/?inquiry=2548
    buf.append(fmtMe((sMin).getValue()));
    for (Long value : values) {
        int index = Math.round((value - min) / scale);
        buf.append(CHARS.get(index));
    }
    buf.append(fmtMe((sMax).getValue()));

    return buf.toString();
}

From source file:de.Maxr1998.xposed.maxlock.ui.LockFragment.java

@SuppressWarnings("deprecation")
private void setupKnockCodeLayout() {
    final View container = rootView.findViewById(R.id.container);
    LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) container.getLayoutParams();
    params.setMargins(0, 0, 0, 0);/*from  w  ww. j  av a2 s . co m*/
    container.setLayoutParams(params);
    container.setOnTouchListener(new View.OnTouchListener() {
        @Override
        public boolean onTouch(View v, MotionEvent e) {
            if (e.getActionMasked() == MotionEvent.ACTION_DOWN) {
                mInputText.append("\u2022");

                // Center values
                int[] loc = new int[2];
                container.getLocationOnScreen(loc);
                int viewCenterX = loc[0] + container.getWidth() / 2;
                int viewCenterY = loc[1] + container.getHeight() / 2;

                // Track touch positions
                knockCodeX.add(e.getRawX());
                knockCodeY.add(e.getRawY());
                if (knockCodeX.size() != knockCodeY.size()) {
                    throw new RuntimeException("The amount of the X and Y coordinates doesn't match!");
                }

                // Calculate center
                float centerX;
                float differenceX = Collections.max(knockCodeX) - Collections.min(knockCodeX);
                if (differenceX > 50) {
                    centerX = Collections.min(knockCodeX) + differenceX / 2;
                } else
                    centerX = viewCenterX;

                float centerY;
                float differenceY = Collections.max(knockCodeY) - Collections.min(knockCodeY);
                if (differenceY > 50) {
                    centerY = Collections.min(knockCodeY) + differenceY / 2;
                } else
                    centerY = viewCenterY;

                // Calculate key
                key.setLength(0);
                for (int i = 0; i < knockCodeX.size(); i++) {
                    float x = knockCodeX.get(i), y = knockCodeY.get(i);
                    if (x < centerX && y < centerY)
                        key.append("1");
                    else if (x > centerX && y < centerY)
                        key.append("2");
                    else if (x < centerX && y > centerY)
                        key.append("3");
                    else if (x > centerX && y > centerY)
                        key.append("4");
                }
                checkInput();
                return true;
            }
            return false;
        }
    });
    divider = new View(getActivity());
    divider.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
            Math.round(getResources().getDisplayMetrics().density)));
    divider.setBackgroundColor(getResources().getColor(R.color.light_white));
    ((ViewGroup) container).addView(divider);
    if (prefs.getBoolean(Common.INVERT_COLOR, false) && prefs.getBoolean(Common.KC_SHOW_DIVIDERS, true)) {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
            divider.setBackground(getResources().getDrawable(android.R.color.black));
        else
            divider.setBackgroundDrawable(getResources().getDrawable(android.R.color.black));
    } else if (!prefs.getBoolean(Common.KC_SHOW_DIVIDERS, true) || screenWidth > screenHeight) {
        divider.setVisibility(View.GONE);
    }
}

From source file:wef.articulab.view.ui.CombinedBNXYPlot.java

public void setDataset(ChartContainer chartContainer, ArrayList<Double>[] behaviors, double threshhold,
        String nameBehActivated, double activation) {
    chartContainer.behaviors = behaviors;
    chartContainer.thresholds.add(threshhold);
    chartContainer.activations.add(new Object[] { nameBehActivated, activation });

    chartContainer.maxThreshold = Collections.max(chartContainer.thresholds);
    chartContainer.minThreshold = Collections.min(chartContainer.thresholds);
    if (activation > chartContainer.maxActivation) {
        chartContainer.maxActivation = activation;
    }//from ww  w .j a  va2  s  .  co m
    double portion = (chartContainer.maxThreshold - chartContainer.minThreshold) / 6;
    if ((portion * 6) > (chartContainer.maxActivation / 4)) {
        chartContainer.maxThreshold = threshhold + portion > chartContainer.maxThreshold
                ? chartContainer.maxThreshold
                : threshhold + portion;
        chartContainer.minThreshold = threshhold - portion < chartContainer.minThreshold
                ? chartContainer.minThreshold
                : threshhold - portion;
    }
    refreshDataset(chartContainer);
}