Example usage for java.util Collections min

List of usage examples for java.util Collections min

Introduction

In this page you can find the example usage for java.util Collections min.

Prototype

public static <T extends Object & Comparable<? super T>> T min(Collection<? extends T> coll) 

Source Link

Document

Returns the minimum element of the given collection, according to the natural ordering of its elements.

Usage

From source file:com.example.lijingjiang.mobile_sensor_display.SimplePedometerActivity.java

public double[] SleepEfficiency(ArrayList<Integer> sleep, double targetRating) {
    int LF = 0;//from  w w  w  . j  av  a 2 s  .c  om
    int HF = 0;
    int window = 60;
    int index = 0;
    while (index + window < sleep.size()) {
        List<Integer> subSleep = sleep.subList(index, index + window);
        if (Collections.max(subSleep) - Collections.min(subSleep) < 9) {
            LF++;
        } else {
            HF++;
        }
        index = index + window;
    }
    List<Integer> subSleep = sleep.subList(index, sleep.size() - 1);
    if (Collections.max(subSleep) - Collections.min(subSleep) < 9) {
        LF++;
    } else {
        HF++;
    }
    double data[] = new double[2];
    data[0] = LF / HF; //rating
    data[1] = 1 - (Math.abs(targetRating - data[0]) / data[0]); //efficiency
    return data;
}

From source file:org.apache.accumulo.examples.wikisearch.iterator.BooleanLogicIterator.java

private void handleAND(BooleanLogicTreeNode me) {
    if (log.isDebugEnabled()) {
        log.debug("handleAND::" + me.getContents());
    }/*from ww  w  .ja  v  a2  s  . co m*/
    Enumeration<?> children = me.children();
    me.setValid(true); // it's easier to prove false than true

    HashSet<Key> goodSet = new HashSet<Key>();
    HashSet<Key> badSet = new HashSet<Key>();
    while (children.hasMoreElements()) {
        BooleanLogicTreeNode child = (BooleanLogicTreeNode) children.nextElement();

        if (child.getType() == ParserTreeConstants.JJTEQNODE
                || child.getType() == ParserTreeConstants.JJTANDNODE
                || child.getType() == ParserTreeConstants.JJTERNODE
                || child.getType() == ParserTreeConstants.JJTNENODE
                || child.getType() == ParserTreeConstants.JJTGENODE
                || child.getType() == ParserTreeConstants.JJTLENODE
                || child.getType() == ParserTreeConstants.JJTGTNODE
                || child.getType() == ParserTreeConstants.JJTLTNODE) {

            if (child.isNegated()) {
                if (child.hasTop()) {
                    badSet.add(child.getTopKey());
                    if (goodSet.contains(child.getTopKey())) {
                        me.setValid(false);
                        return;
                    }
                    if (child.isValid()) {
                        me.setValid(false);
                        return;
                    }
                }
            } else {
                if (child.hasTop()) {
                    if (log.isDebugEnabled()) {
                        log.debug("handleAND, child node: " + child.getContents());
                    }
                    // if you're in the bad set, you're done.
                    if (badSet.contains(child.getTopKey())) {
                        if (log.isDebugEnabled()) {
                            log.debug("handleAND, child is in bad set, setting parent false");
                        }
                        me.setValid(false);
                        return;
                    }

                    // if good set is empty, add it.
                    if (goodSet.isEmpty()) {
                        if (log.isDebugEnabled()) {
                            log.debug("handleAND, goodSet is empty, adding child: " + child.getContents());
                        }
                        goodSet.add(child.getTopKey());
                    } else {
                        // must be in the good set & not in the bad set
                        // if either fails, I'm false.
                        if (!goodSet.contains(child.getTopKey())) {
                            if (log.isDebugEnabled()) {
                                log.debug(
                                        "handleAND, goodSet is not empty, and does NOT contain child, setting false.  child: "
                                                + child.getContents());
                            }
                            me.setValid(false);
                            return;
                        } else {
                            // trim the good set to this one value
                            // (handles the case were the initial encounters were ORs)
                            goodSet = new HashSet<Key>();
                            goodSet.add(child.getTopKey());
                            if (log.isDebugEnabled()) {
                                log.debug("handleAND, child in goodset, trim to this value: "
                                        + child.getContents());
                            }
                        }
                    }
                } else {
                    // test if its children are all false
                    if (child.getChildCount() > 0) {
                        Enumeration<?> subchildren = child.children();
                        boolean allFalse = true;
                        while (subchildren.hasMoreElements()) {
                            BooleanLogicTreeNode subchild = (BooleanLogicTreeNode) subchildren.nextElement();
                            if (!subchild.isNegated()) {
                                allFalse = false;
                                break;
                            } else if (subchild.isNegated() && subchild.hasTop()) {
                                allFalse = false;
                                break;
                            }
                        }
                        if (!allFalse) {
                            me.setValid(false);
                            return;
                        }
                    } else {
                        // child returned a null value and is not a negation, this in turn makes me false.
                        me.setValid(false);
                        return;
                    }
                }
            }

        } else if (child.getType() == ParserTreeConstants.JJTORNODE) {// BooleanLogicTreeNode.NodeType.OR) {

            // NOTE: The OR may be an OrIterator in which case it will only produce
            // a single unique identifier, or it may be a pure logical construct and
            // be capable of producing multiple unique identifiers.
            // This should handle all cases.
            Iterator<?> iter = child.getSetIterator();
            boolean goodSetEmpty = goodSet.isEmpty();
            boolean matchedOne = false;
            boolean pureNegations = true;
            if (!child.isValid()) {
                if (log.isDebugEnabled()) {
                    log.debug("handleAND, child is an OR and it is not valid, setting false, ALL NEGATED?: "
                            + child.isChildrenAllNegated());
                }
                me.setValid(false); // I'm an AND if one of my children is false, I'm false.
                return;
            } else if (child.isValid() && !child.hasTop()) {
                // pure negation, do nothing
            } else if (child.isValid() && child.hasTop()) { // I need to match one
                if (log.isDebugEnabled()) {
                    log.debug("handleAND, child OR, valid and has top, means not pureNegations");
                }
                pureNegations = false;
                while (iter.hasNext()) {
                    Key i = (Key) iter.next();
                    if (child.isNegated()) {
                        badSet.add(i);
                        if (goodSet.contains(i)) {
                            if (log.isDebugEnabled()) {
                                log.debug("handleAND, child OR, goodSet contains bad value: " + i);
                            }
                            me.setValid(false);
                            return;
                        }
                    } else {
                        // if the good set is empty, then push all of my ids.
                        if (goodSetEmpty && !badSet.contains(i)) {
                            goodSet.add(i);
                            matchedOne = true;
                        } else {
                            // I need at least one to match
                            if (goodSet.contains(i)) {
                                matchedOne = true;
                            }
                        }
                    }
                }
            }

            // is the goodSet still empty? that means were were only negations
            // otherwise, if it's not empty and we didn't match one, false
            if (child.isNegated()) {
                // we're ok
            } else {
                if (goodSet.isEmpty() && !pureNegations) {
                    if (log.isDebugEnabled()) {
                        log.debug("handleAND, child OR, empty goodset && !pureNegations, set false");
                    }
                    // that's bad, we weren't negated, should've pushed something in there.
                    me.setValid(false);
                    return;
                } else if (!goodSet.isEmpty() && !pureNegations) { // goodSet contains values.
                    if (!matchedOne) { // but we didn't match any.
                        if (log.isDebugEnabled()) {
                            log.debug("handleAND, child OR, goodSet had values but I didn't match any, false");
                        }
                        me.setValid(false);
                        return;
                    }

                    // we matched something, trim the set.
                    // i.e. two child ORs
                    goodSet = child.getIntersection(goodSet);
                }
            }

        }
    } // end while

    if (goodSet.isEmpty()) { // && log.isDebugEnabled()) {
        if (log.isDebugEnabled()) {
            log.debug("handleAND-> goodSet is empty, pure negations?");
        }
    } else {
        me.setTopKey(Collections.min(goodSet));
        if (log.isDebugEnabled()) {
            log.debug("End of handleAND, this node's topKey: " + me.getTopKey());
        }
    }
}

From source file:org.cds06.speleograph.data.Series.java

private void setMinMaxValue() {
    seriesMaxValue = Collections.max(items).getValue();
    seriesMinValue = Collections.min(items).getValue();
}

From source file:com.thoughtworks.go.server.dao.PipelineSqlMapDao.java

private PipelineInstanceModels loadHistory(String pipelineName, List<Long> ids) {
    if (ids.isEmpty()) {
        return PipelineInstanceModels.createPipelineInstanceModels();
    }//from   w  w  w. j ava  2 s . c om

    Map<String, Object> args = arguments("pipelineName", pipelineName).and("from", Collections.min(ids))
            .and("to", Collections.max(ids)).asMap();
    PipelineInstanceModels history = PipelineInstanceModels
            .createPipelineInstanceModels((List<PipelineInstanceModel>) getSqlMapClientTemplate()
                    .queryForList("getPipelineHistoryByName", args));
    for (PipelineInstanceModel pipelineInstanceModel : history) {
        loadPipelineHistoryBuildCause(pipelineInstanceModel);
    }
    return history;
}

From source file:com.hortonworks.historian.nifi.reporter.HistorianDeanReporter.java

private void indexLateData(List<String> dataSourceExclusions) {
    String nifiControllersUrl = nifiUrl + "/nifi-api/flow/process-groups/root/controller-services";

    try {// w ww . j  av  a  2  s.  c om
        JSONArray controllers = getJSONFromUrl(nifiControllersUrl, basicAuth)
                .getJSONArray("controllerServices");
        getLogger().info("********** Getting List of Druid Tranquility Controllers...");
        for (int i = 0; i < controllers.length(); i++) {
            JSONObject currentController = controllers.getJSONObject(i).getJSONObject("component");
            String currentControllerType = currentController.getString("type");
            if (currentControllerType
                    .equalsIgnoreCase("com.hortonworks.nifi.controller.DruidTranquilityController")) {
                String lateDataPath = lateDataRoot + "/" + currentController.getJSONObject("properties")
                        .getString("query_granularity").toLowerCase() + "/";
                getLogger().info("********** Checking for Late Arriving Data at HDFS Path: " + lateDataPath);
                if (fs.exists(new Path(lateDataPath))) {
                    FileStatus[] fileStatus = fs.listStatus(new Path(lateDataPath));
                    List<Date> dates = new ArrayList<Date>();
                    List<String> sourceData = new ArrayList<String>();
                    for (FileStatus status : fileStatus) {
                        String[] address = status.getPath().toString().split("/");
                        String currentBin = address[address.length - 1];
                        Date binDate = new SimpleDateFormat("yyyy-MM-dd-HH-mm").parse(currentBin);
                        sourceData.add(lateDataPath + currentBin);
                        dates.add(binDate);
                    }
                    ((Collection<?>) sourceData).removeAll(dataSourceExclusions);
                    getLogger().info("********** Detected " + sourceData.size()
                            + " bins of relevant late data, initiating Delta Indexing task...");

                    if (fileStatus.length > 0 && sourceData.size() > 0) {
                        String intervalStart = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
                                .format(Collections.min(dates));
                        String intervalEnd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
                                .format(Collections.max(dates));
                        String bins = String.join(",", sourceData);
                        JSONArray dimensionsList = new JSONArray(Arrays.asList(currentController
                                .getJSONObject("properties").getString("dimensions_list").split(",")));
                        String ingestSpec = "{" + "     \"type\" : \"index_hadoop\"," + "     \"spec\" : {"
                                + "      \"dataSchema\" : {" + "         \"dataSource\": \""
                                + currentController.getJSONObject("properties").getString("data_source") + "\","
                                + "         \"parser\" : {" + "            \"type\" : \"hadoopyString\","
                                + "            \"parseSpec\" : {" + "               \"format\" : \"json\","
                                + "               \"timestampSpec\" : {" + "                  \"column\" : \""
                                + currentController.getJSONObject("properties").getString("timestamp_field")
                                + "\"," + "                  \"format\" : \"auto\"" + "               },"
                                + "               \"dimensionsSpec\" : {" + "                  \"dimensions\": "
                                + dimensionsList + "               }" + "            }" + "         },"
                                + "         \"metricsSpec\" : "
                                + currentController.getJSONObject("properties")
                                        .getString("aggregators_descriptor")
                                + "," + "         \"granularitySpec\" : {"
                                + "            \"type\" : \"uniform\","
                                + "            \"segmentGranularity\" : \""
                                + currentController.getJSONObject("properties").getString("segment_granularity")
                                + "\"," + "            \"queryGranularity\" : \""
                                + currentController.getJSONObject("properties").getString("query_granularity")
                                + "\"," + "            \"intervals\": [\"" + intervalStart + "/" + intervalEnd
                                + "\"]" + "         }" + "      }," + "      \"ioConfig\" : {"
                                + "         \"type\" : \"hadoop\"," + "         \"inputSpec\" : {"
                                + "            \"type\" : \"multi\"," + "            \"children\": ["
                                + "               {" + "                  \"type\" : \"dataSource\","
                                + "                  \"ingestionSpec\" : {"
                                + "                     \"dataSource\": \""
                                + currentController.getJSONObject("properties").getString("data_source") + "\","
                                + "                     \"intervals\": [\"" + intervalStart + "/" + intervalEnd
                                + "\"]" + "                  }" + "               }," + "               {"
                                + "                  \"type\" : \"static\"," + "                  \"paths\": \""
                                + bins + "\"" + "               }" + "            ]" + "         }" + "      },"
                                + "      \"tuningConfig\" : {" + "         \"type\": \"hadoop\"" + "      }"
                                + "     }" + "}";
                        getLogger().info("********** Delta Ingestion Spec: " + ingestSpec);
                        String indexTaskId = createDruidIndexingTask(ingestSpec);
                        getLogger().info("********** Created Indexing Task " + indexTaskId);
                        Map<String, Object> currentTaskMetaData = new HashMap<String, Object>();
                        currentTaskMetaData.put("ingestSpec", ingestSpec);
                        currentTaskMetaData.put("sourceData", sourceData);
                        deltaIndexTasks.put(indexTaskId, currentTaskMetaData);
                        String currentTaskDirPath = lateDataTasksPath + "/" + indexTaskId.replace(":", "__");
                        getLogger().info("********** Persisting Record of Task: " + currentTaskDirPath);
                        currentTaskDirPath = createHDFSDirectory(currentTaskDirPath);
                        writeHDFSFile(currentTaskDirPath + "/ingestSpec", ingestSpec);
                        writeHDFSFile(currentTaskDirPath + "/sourceData", bins);
                    } else {
                        getLogger().info("********** " + lateDataPath + " does not contain any data...");
                    }
                } else {
                    getLogger().info("********** There is a Druid Controller mapped to " + lateDataPath
                            + ", however, the path does not yet exist...");
                }
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    } catch (JSONException e) {
        e.printStackTrace();
    }
}

From source file:webServices.RestServiceImpl.java

public String wktToExtent(String wkt) {
    Vector<Double> allX = new Vector<Double>();
    Vector<Double> allY = new Vector<Double>();

    wkt = wkt.substring(wkt.indexOf("((") + 2, wkt.indexOf("))"));

    String[] parse = wkt.split(", ");

    try {//w w w .  j  a v  a2s . c  om
        for (int i = 0; i < parse.length; i++) {
            String[] getXY = parse[i].split(" ");
            allX.add(Double.parseDouble(getXY[0]));
            allY.add(Double.parseDouble(getXY[1]));
        }

        return "[" + Collections.min(allX).toString() + ", " + Collections.min(allY).toString() + ", "
                + Collections.max(allX).toString() + ", " + Collections.max(allY).toString() + "]";
    } catch (NumberFormatException e) {
        return "[-180.0, -90.0, 180.0, 90.0]";
    }
}

From source file:carskit.data.processor.DataDAO.java

public double getRatingMin() {
    if (MaxRate == -1)
        MinRate = Collections.min(getRatingScale());
    return MinRate;
}

From source file:edu.brown.hstore.HStoreSite.java

/**
 * Take snapshots/*from www  .java 2s . co m*/
 */
private void takeSnapshot() {
    // Do this only on site lowest id
    Host catalog_host = this.getHost();
    Integer lowest_site_id = Integer.MAX_VALUE, s_id;

    for (Site st : CatalogUtil.getAllSites(catalog_host)) {
        s_id = st.getId();
        lowest_site_id = Math.min(s_id, lowest_site_id);
    }

    int m_siteId = this.getSiteId();

    if (m_siteId == lowest_site_id) {
        if (debug.val)
            LOG.warn("Taking snapshot at site " + m_siteId);
        try {
            File snapshotDir = this.getSnapshotDir();
            String path = snapshotDir.getAbsolutePath();

            java.util.Date date = new java.util.Date();
            Timestamp current = new Timestamp(date.getTime());
            String nonce = Long.toString(current.getTime());

            CatalogContext cc = this.getCatalogContext();
            String procName = VoltSystemProcedure.procCallName(SnapshotSave.class);
            Procedure catalog_proc = cc.procedures.getIgnoreCase(procName);

            ParameterSet params = new ParameterSet();
            params.setParameters(path, // snapshot dir
                    nonce, // nonce - timestamp
                    1 // block
            );

            int base_partition = Collections.min(this.local_partitions);

            RpcCallback<ClientResponseImpl> callback = new RpcCallback<ClientResponseImpl>() {
                @Override
                public void run(ClientResponseImpl parameter) {
                    // Do nothing!
                }
            };

            LocalTransaction ts = this.txnInitializer.createLocalTransaction(null, EstTime.currentTimeMillis(),
                    99999999, base_partition, catalog_proc, params, callback);

            LOG.warn("Queuing snapshot transaction : base partition : " + base_partition + " path :" + path
                    + " nonce :" + nonce);

            // Queue @SnapshotSave transaction
            this.transactionQueue(ts);

        } catch (Exception ex) {
            ex.printStackTrace();
            LOG.fatal("SnapshotSave exception: " + ex.getMessage());
            this.hstore_coordinator.shutdown();
        }
    }

}

From source file:org.zkoss.ganttz.data.GanttDiagramGraph.java

private GanttDate getSmallestBeginDateFromChildrenFor(V container) {
    return Collections.min(getChildrenDates(container, Point.START));
}

From source file:org.libreplan.business.planner.entities.ResourceAllocation.java

private ResourcesPerDay calculateResourcesPerDayFromAssignments(Collection<? extends T> assignments) {
    if (assignments.isEmpty()) {
        return ResourcesPerDay.amount(0);
    }//from  w  w  w  . j  a v a2s.  c om

    Map<LocalDate, List<T>> byDay = DayAssignment.byDay(assignments);
    LocalDate min = Collections.min(byDay.keySet());
    LocalDate max = Collections.max(byDay.keySet());
    Iterable<PartialDay> daysToIterate = startFor(min).daysUntil(endFor(max));

    EffortDuration sumTotalEffort = zero();
    EffortDuration sumWorkableEffort = zero();
    final ResourcesPerDay ONE_RESOURCE_PER_DAY = ResourcesPerDay.amount(1);

    for (PartialDay day : daysToIterate) {
        List<T> assignmentsAtDay = avoidNull(byDay.get(day.getDate()), Collections.<T>emptyList());

        EffortDuration incrementWorkable = getAllocationCalendar().asDurationOn(day, ONE_RESOURCE_PER_DAY);
        sumWorkableEffort = sumWorkableEffort.plus(incrementWorkable);

        sumTotalEffort = sumTotalEffort.plus(sumDuration(assignmentsAtDay));
    }
    if (sumWorkableEffort.equals(zero())) {
        return ResourcesPerDay.amount(0);
    }

    return ResourcesPerDay.calculateFrom(sumTotalEffort, sumWorkableEffort);
}