Example usage for java.util HashSet size

List of usage examples for java.util HashSet size

Introduction

In this page you can find the example usage for java.util HashSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:com.china317.gmmp.gmmp_report_analysis.App.java

private static void analysisDgm(String yyyyMMdd, ApplicationContext context) {
    try {/*from  w ww  .ja  v a 2  s  . c  o  m*/
        log.info("[Dgm App started]");
        String businessType = "1";

        DgmAnalysisImp.getInstance().clear();
        // System.out.println("[classpath]"+System.getProperty("java.class.path"));//classpaht
        // System.out.println("[path]"+System.getProperty("user.dir"));//?

        log.info("[Dgm get baseVehicle begin---------]");
        VehicleDao vehicleDao = (VehicleDao) context.getBean("vehicleDao");
        List<Vehicle> vehs = vehicleDao.getBaseVehicleByDate(yyyyMMdd, businessType);

        List<List<Vehicle>> list_tm = ListUtil.splitList(vehs, 400);
        log.info("[Dgm get baseVehicle end1---------],vehicle total:" + vehs.size());
        log.info("[Dgm get baseVehicle end2---------],list_tm total:" + list_tm.size());
        for (List<Vehicle> vls : list_tm) {
            Map<String, Vehicle> vehMap = new HashMap<String, Vehicle>();

            log.info("[Dgm code set init------]");
            HashSet<String> codes = new HashSet<String>();
            for (Vehicle v : vls) {
                codes.add(v.getCode());
                vehMap.put(v.getCode(), v);
            }

            log.info("[Dgm code set end------]" + "setSize:" + vehMap.size());
            List<VehicleLocate> list = new ArrayList<VehicleLocate>();
            if (codes.size() > 0) {
                VehicleLocateDao vehicleLocateDao_gmmpraw = (VehicleLocateDao) context
                        .getBean("vehicleLocateDaoGmmpRaw");
                list = vehicleLocateDao_gmmpraw.findHistoryByParams(yyyyMMdd, codes);
                log.info("[Dgm this time total Points Size]:" + list.size());
            }

            Map<String, List<VehicleLocate>> map = new HashMap<String, List<VehicleLocate>>();
            for (VehicleLocate entity : list) {
                if (entity.getGpsSpeed() < 160) {
                    // businessType
                    Vehicle tmpV = vehMap.get(entity.getCode());
                    entity.setBusinessType(tmpV.getBusinessType());
                    List<VehicleLocate> records = map.get(entity.getCode());
                    if (records == null) {
                        records = new ArrayList<VehicleLocate>();
                    }
                    long lastlong = DateTime.accountTime3(entity.getGpsTime(), entity.getGetTime());
                    if (lastlong <= 10 * 60) {
                        records.add(entity);
                    }
                    map.put(entity.getCode(), records);
                }
            }

            log.info("analysis begin ,total:" + map.size());

            Iterator<String> it = map.keySet().iterator();
            int index = 0;
            while (it.hasNext()) {
                index++;
                String key = it.next();
                List<VehicleLocate> tmps = map.get(key);
                log.info("[Dgm]" + index + "analysis vehicle code:" + key + "sort list begin, list size:"
                        + tmps.size());
                Collections.sort(tmps, new Comparator<VehicleLocate>() {
                    public int compare(VehicleLocate o1, VehicleLocate o2) {
                        Date d1 = o1.getGpsTime();
                        Date d2 = o2.getGpsTime();
                        if (d1.after(d2)) {
                            return 1;
                        } else if (d1.before(d2)) {
                            return -1;
                        } else {
                            return 0;
                        }
                    }
                });
                log.info("analysis vehicle code:" + key + "sort list end");

                DgmAnalysisImp.getInstance().fatigueAnalysis(tmps, yyyyMMdd);
                log.info("-------Fatigue Analysis end");

                for (int i = 0; i < tmps.size(); i++) {
                    VehicleLocate e = tmps.get(i);
                    AreaAddProcessor.addAreaRuleInfo(e);
                    /*
                     * log.info("[Dgm vehcilelocate properties]" +
                     * e.getCode() + "; gpstime:" + e.getGpsTime() +
                     * "; gpsSpeed:" + e.getGpsSpeed() + "; businessType:" +
                     * e.getBusinessType() + "; lon:" + e.getLon() +
                     * "; lat:" + e.getLat() + "; acc:" + e.getACCState());
                     */
                    DgmAnalysisImp.getInstance().addZeroBegin(e);
                    DgmAnalysisImp.getInstance().overSpeedAnalysis(e);
                    DgmAnalysisImp.getInstance().offlineAnalysis(e, yyyyMMdd);
                    DgmAnalysisImp.getInstance().fobiddenAnalysis(e, i, tmps.size(), yyyyMMdd);
                    DgmAnalysisImp.getInstance().illegalParkingAnalysis(e);
                    DgmAnalysisImp.getInstance().illegalInOutAnalysis(e);

                    // ?
                    DgmAnalysisImp.getInstance().putLastRecord(e);
                }

                log.info("analysis vehicle code:" + key + "OVERSPEED OFFLINE ANALYSIS end");

                log.info("result: overspeed:" + DgmAnalysisImp.getInstance().getOverSpeedRecordsSize()
                        + "; offline:" + DgmAnalysisImp.getInstance().getOfflineRecordsSize());

                // DgmAnalysisImp.getInstance().clear();
            }
            FatigueRecordsStoreIntoDB(DgmAnalysisImp.getInstance().getFatigueMap(), context);
            log.info("--------Fatigue store into DB end");
            // OverSpeedRecordsStoreIntoDB(DgmAnalysisImp.getInstance()
            // .getOverSpeedMap(), context);

            DgmOverSpeedRecordsStoreIntoDB(DgmAnalysisImp.getInstance().getOverSpeedMap(), context);

            DgmEntryExitStoreIntoDB(DgmAnalysisImp.getInstance().getExitMap(), context);
            DgmFobbidenStoreIntoDB(DgmAnalysisImp.getInstance().getForbiddeningMap(), context);
            DgmIllegalParkingStoreIntoDB(DgmAnalysisImp.getInstance().getIllegalParking(), context);
        }

        log.info("analysis end");

        log.info("[Dgm ended]");
    } catch (Exception e) {
        log.error(e);
    }
}

From source file:com.act.reachables.Network.java

public static JSONObject get(MongoDB db, Set<Node> nodes, Set<Edge> edges, HashMap<Long, Long> parentIds,
        HashMap<Long, Edge> toParentEdges) throws JSONException {
    // init the json object with structure:
    // {// www . ja v a 2  s. c om
    //   "name": "nodeid"
    //   "children": [
    //     { "name": "childnodeid", toparentedge: {}, nodedata:.. }, ...
    //   ]
    // }

    HashMap<Long, Node> nodeById = new HashMap<>();
    for (Node n : nodes)
        nodeById.put(n.id, n);

    HashMap<Long, JSONObject> nodeObjs = new HashMap<>();
    // un-deconstruct tree...
    for (Long nid : parentIds.keySet()) {
        JSONObject nObj = JSONHelper.nodeObj(db, nodeById.get(nid));
        nObj.put("name", nid);

        if (toParentEdges.get(nid) != null) {
            JSONObject eObj = JSONHelper.edgeObj(toParentEdges.get(nid),
                    null /* no ordering reqd for referencing nodeMapping */);
            nObj.put("edge_up", eObj);
        } else {
        }
        nodeObjs.put(nid, nObj);
    }

    // now that we know that each node has an associated obj
    // link the objects together into the tree structure
    // put each object inside its parent
    HashSet<Long> unAssignedToParent = new HashSet<>(parentIds.keySet());
    for (Long nid : parentIds.keySet()) {
        JSONObject child = nodeObjs.get(nid);
        // append child to "children" key within parent
        JSONObject parent = nodeObjs.get(parentIds.get(nid));
        if (parent != null) {
            parent.append("children", child);
            unAssignedToParent.remove(nid);
        } else {
        }
    }

    // outputting a single tree makes front end processing easier
    // we can always remove the root in the front end and get the forest again

    // if many trees remain, assuming they indicate a disjoint forest,
    //    add then as child to a proxy root.
    // if only one tree then return it

    JSONObject json;
    if (unAssignedToParent.size() == 0) {
        json = null;
        throw new RuntimeException("All nodeMapping have parents! Where is the root? Abort.");
    } else if (unAssignedToParent.size() == 1) {
        json = unAssignedToParent.toArray(new JSONObject[0])[0]; // return the only element in the set
    } else {
        json = new JSONObject();
        for (Long cid : unAssignedToParent) {
            json.put("name", "root");
            json.append("children", nodeObjs.get(cid));
        }
    }

    return json;
}

From source file:eu.fbk.dkm.sectionextractor.WikipediaSectionTitlesExtractor.java

public WikipediaSectionTitlesExtractor(int numThreads, int numPages, Locale locale, File outFile,
        Integer configuredDepth, int maxNum, boolean printTitles, HashSet<String> pagesToConsider) {
    super(numThreads, numPages, locale);

    logger.info("Locale: " + locale);
    logger.info("Page to consider: " + pagesToConsider.size());
    logger.info("Configured depth: " + configuredDepth);
    logger.info("Max number of sections: " + maxNum);
    logger.info("Print titles: " + Boolean.toString(printTitles));
    logger.info("Output file: " + outFile);

    this.configuredDepth = configuredDepth;
    this.maxNum = maxNum;
    this.printTitles = printTitles;
    this.pagesToConsider = pagesToConsider;

    try {//from   w  w w.ja va 2  s.c  o  m
        writer = new BufferedWriter(new FileWriter(outFile));
    } catch (Exception e) {
        logger.error(e.getMessage());
    }
}

From source file:com.cyberway.issue.crawler.util.BdbUriUniqFilterTest.java

/**
 * Verify that two URIs which gave colliding hashes, when previously
 * the last 40bits of the composite did not sufficiently vary with certain
 * inputs, no longer collide. /*from   ww w  .ja v a2 s  .  c o  m*/
 */
public void testCreateKeyCollisions() {
    HashSet<Long> fingerprints = new HashSet<Long>();
    fingerprints.add(new Long(BdbUriUniqFilter.createKey("dns:mail.daps.dla.mil")));
    fingerprints.add(new Long(BdbUriUniqFilter.createKey("dns:militaryreview.army.mil")));
    assertEquals("colliding fingerprints", 2, fingerprints.size());
}

From source file:org.apache.solr.analytics.facet.AbstractAnalyticsFacetCloudTest.java

@SuppressWarnings({ "unchecked", "rawtypes" })
public <T extends Comparable<T>> ArrayList calculateStat(ArrayList<ArrayList<T>> lists, String stat) {
    ArrayList result;/*from  w ww .  java  2s  .com*/
    if (stat.contains("perc_")) {
        result = new ArrayList<T>();
        for (List<T> list : lists) {
            if (list.size() == 0)
                continue;
            int ord = (int) Math.ceil(Double.parseDouble(stat.substring(5)) / 100 * list.size()) - 1;
            ArrayList<Integer> percs = new ArrayList<>(1);
            percs.add(ord);
            OrdinalCalculator.putOrdinalsInPosition(list, percs);
            result.add(list.get(ord));
        }
    } else if (stat.equals("count")) {
        result = new ArrayList<Long>();
        for (List<T> list : lists) {
            result.add((long) list.size());
        }
    } else if (stat.equals("missing")) {
        result = new ArrayList<Long>();
        for (ArrayList<T> list : lists) {
            result.add(calculateMissing(list, latestType));
        }
    } else if (stat.equals("unique")) {
        result = new ArrayList<Long>();
        for (List<T> list : lists) {
            HashSet<T> set = new HashSet<>();
            set.addAll(list);
            result.add((long) set.size());
        }
    } else if (stat.equals("max")) {
        result = new ArrayList<T>();
        for (List<T> list : lists) {
            if (list.size() == 0)
                continue;
            Collections.sort(list);
            result.add(list.get(list.size() - 1));
        }
    } else if (stat.equals("min")) {
        result = new ArrayList<T>();
        for (List<T> list : lists) {
            if (list.size() == 0)
                continue;
            Collections.sort((List<T>) list);
            result.add(list.get(0));
        }
    } else {
        result = null;
    }
    return result;
}

From source file:org.ambraproject.admin.service.AdminRolesServiceTest.java

@Test(dataProvider = "userProfileAndRoles3")
public void testGetUserRoles(UserProfile up, HashSet<UserRole> roles) {
    Set<UserRoleView> rolesFromDB = adminRolesService.getUserRoles(up.getID());

    assertEquals(roles.size(), rolesFromDB.size());

    for (UserRoleView urv : rolesFromDB) {
        boolean found = false;

        for (UserRole role : roles) {
            if (urv.getID().equals(role.getID())) {
                found = true;/*from  w  w w.j  av  a  2 s.  com*/
                break;
            }
        }

        assertTrue(found);
    }
}

From source file:blusunrize.immersiveengineering.api.energy.wires.TileEntityImmersiveConnectable.java

public Set<Connection> genConnBlockstate() {
    Set<Connection> conns = ImmersiveNetHandler.INSTANCE.getConnections(world, pos);
    if (conns == null)
        return ImmutableSet.of();
    Set<Connection> ret = new HashSet<Connection>() {
        @Override//w ww  . j a v a  2  s.  c  o m
        public boolean equals(Object o) {
            if (o == this)
                return true;
            if (!(o instanceof HashSet))
                return false;
            HashSet<Connection> other = (HashSet<Connection>) o;
            if (other.size() != this.size())
                return false;
            for (Connection c : this)
                if (!other.contains(c))
                    return false;
            return true;
        }
    };
    //TODO thread safety!
    for (Connection c : conns) {
        IImmersiveConnectable end = ApiUtils.toIIC(c.end, world, false);
        if (end == null)
            continue;
        // generate subvertices
        c.getSubVertices(world);
        ret.add(c);
    }

    return ret;
}

From source file:edu.anu.spice.SemanticConcept.java

public float similarity(Object o) {
    if (o == null) {
        return 0;
    }/*from  w  w w.j  a va2  s. c  om*/
    if (!(o instanceof SemanticConcept)) {
        return 0;
    }
    SemanticConcept otherConcept = (SemanticConcept) o;
    HashSet<String> concept_intersection = new HashSet<String>(this.concepts);
    concept_intersection.retainAll(otherConcept.concepts);
    if (!concept_intersection.isEmpty()) {
        return 1;
    }
    HashSet<Integer> synset_intersection = new HashSet<Integer>(this.synsets);
    synset_intersection.retainAll(otherConcept.synsets);
    HashSet<Integer> synset_union = new HashSet<Integer>(this.synsets);
    synset_union.addAll(otherConcept.synsets);
    return ((float) synset_intersection.size()) / ((float) synset_union.size());
}

From source file:org.osiam.resource_server.resources.helper.AttributesRemovalHelper.java

private ObjectWriter getObjectWriter(ObjectMapper mapper, String[] fieldsToReturn) {

    if (fieldsToReturn.length != 0) {
        mapper.addMixInAnnotations(Object.class, PropertyFilterMixIn.class);

        HashSet<String> givenFields = new HashSet<String>();
        givenFields.add("schemas");
        for (String field : fieldsToReturn) {
            givenFields.add(field);//from   w ww. j  av a 2s  .c  o m
        }
        String[] finalFieldsToReturn = givenFields.toArray(new String[givenFields.size()]);

        FilterProvider filters = new SimpleFilterProvider().addFilter("filter properties by name",
                SimpleBeanPropertyFilter.filterOutAllExcept(finalFieldsToReturn));
        return mapper.writer(filters);
    }
    return mapper.writer();
}

From source file:mase.generic.WeightedClusterSCPostEval.java

@Override
protected void initializeClusters(EvolutionState state) {
    // initialization should also be biased. the probability of being chosen
    // is proportional to the weight

    this.clusters = new double[numClusters][];
    this.counts = new int[numClusters];
    Integer[] list = new Integer[buffer.size()];
    buffer.keySet().toArray(list);//from   w w  w. j  av a 2s  . co m
    HashSet<Integer> randomKeys = new HashSet<Integer>(numClusters * 2);
    HashMap<Integer, Double> pointWeight = stateCorrelations(state);
    double totalWeight = 0;
    for (Double d : pointWeight.values()) {
        totalWeight += d;
    }

    while (randomKeys.size() < numClusters) {
        int next = -1;
        double rand = state.random[0].nextDouble() * totalWeight;
        for (int i = 0; i < list.length; i++) {
            rand -= pointWeight.get(list[i]);
            if (rand <= 0.0) {
                next = list[i];
                break;
            }
        }
        if (!randomKeys.contains(next)) {
            randomKeys.add(next);
        }
    }
    int clusterIndex = 0;
    for (Integer key : randomKeys) {
        byte[] s = globalKey.get(key);
        double[] cl = new double[s.length];
        for (int i = 0; i < s.length; i++) {
            cl[i] = s[i];
        }
        clusters[clusterIndex++] = cl;
    }

}