Example usage for java.util HashSet size

List of usage examples for java.util HashSet size

Introduction

In this page you can find the example usage for java.util HashSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:disko.flow.analyzers.socket.SocketTransmitter.java

public void process(C ctx, Ports ports) throws InterruptedException {
    HashSet<InputPort<?>> closedPorts = new HashSet<InputPort<?>>();
    log.debug("starting socket transmitter.");
    while (closedPorts.size() < ports.getInputCount()) {
        for (InputPort<?> inputPort : ports.getInputPorts()) {
            if (closedPorts.contains(inputPort))
                continue;
            Object data = inputPort.take();
            if (inputPort.isEOS(data)) {
                closedPorts.add(inputPort);
                log.debug("Input port " + inputPort + " closed.");
                final String channelId = inputPort.getChannel().getId();
                writeMessage(new Message(channelId, null));
                log.debug("Sent EOS to " + channelId);
                continue;
            }/*from w w w  .  j a v a2s  . c  o m*/
            final Message message = new Message(inputPort.getChannel().getId(), (Serializable) data);
            log.debug("Sending msg " + data);
            writeMessage(message);
        }
    }
}

From source file:index.partition.ZCurvePartitioner.java

public void createFromPoints_inputNumSplits(HashSet<Point> points, int numSplits) {
    long[] zValues = new long[points.size()];
    int i = 0;//from  w  w w .  j a va2  s  . co m
    for (Point point : points) {
        zValues[i] = computeZ(mbr, point.x, point.y);
        i++;
    }
    createFromZValues_numSplits(zValues, numSplits);
}

From source file:amie.keys.CombinationsExplorationNew.java

/**
 *
 * @param ruleToExtendWith/*from   w  w  w.j a  va  2s  . c o  m*/
 * @param ruleToGraphNewFirstLevel
 * @param ruleToGraphNewLastLevel
 * @param kb
 */
private static void discoverConditionalKeysPerLevel(HashMap<Rule, HashSet<String>> ruleToExtendWith,
        HashMap<Rule, GraphNew> ruleToGraphNewFirstLevel, HashMap<Rule, GraphNew> ruleToGraphNewLastLevel) {
    HashMap<Rule, GraphNew> ruleToGraphNewThisLevel = new HashMap<>();
    for (Rule currentRule : ruleToExtendWith.keySet()) {
        for (String conditionProperty : ruleToExtendWith.get(currentRule)) {
            if (Utilities.getRelationIds(currentRule, property2Id).last() > property2Id
                    .get(conditionProperty)) {
                GraphNew graph = ruleToGraphNewLastLevel.get(currentRule);
                GraphNew currentGraphNew = (GraphNew) graph.clone();
                Integer propertyId = property2Id.get(conditionProperty);
                HashSet<Integer> propertiesSet = new HashSet<>();
                propertiesSet.add(propertyId);
                Node node = currentGraphNew.createNode(propertiesSet);
                node.toExplore = false;
                Iterable<Rule> conditions = Utilities.getConditions(currentRule, conditionProperty,
                        (int) support, kb);
                for (Rule conditionRule : conditions) {
                    Rule complementaryRule = getComplementaryRule(conditionRule);
                    if (!ruleToGraphNewFirstLevel.containsKey(complementaryRule)) {
                        // We should never fall in this case
                        for (Rule r : ruleToGraphNewFirstLevel.keySet()) {
                            System.out.println(r.getDatalogBasicRuleString());
                        }
                        System.out.println(complementaryRule.getDatalogBasicRuleString());
                        System.out.println(complementaryRule + " not found in the first level graph");
                    }
                    GraphNew complementaryGraphNew = ruleToGraphNewFirstLevel.get(complementaryRule);
                    GraphNew newGraphNew = (GraphNew) currentGraphNew.clone();
                    HashSet<Integer> conditionProperties = new HashSet<>();
                    conditionProperties.addAll(getRelations(conditionRule, property2Id));
                    conditionProperties.addAll(getRelations(currentRule, property2Id));
                    newGraphNew = mergeGraphNews(newGraphNew, complementaryGraphNew,
                            newGraphNew.topGraphNodes(), conditionProperties);

                    discoverConditionalKeysForComplexConditions(newGraphNew, newGraphNew.topGraphNodes(),
                            conditionRule);
                    ruleToGraphNewThisLevel.put(conditionRule, newGraphNew);
                }
            }
        }
    }
    HashMap<Rule, HashSet<String>> newRuleToExtendWith = new HashMap<>();
    for (Rule conRule : ruleToGraphNewThisLevel.keySet()) {
        GraphNew newGraphNew = ruleToGraphNewThisLevel.get(conRule);
        for (Node node : newGraphNew.topGraphNodes()) {
            HashSet<String> properties = new HashSet<>();
            if (node.toExplore) {
                Iterator<Integer> it = node.set.iterator();
                int prop = it.next();
                String propertyStr = id2Property.get(prop);
                properties.add(propertyStr);
            }
            if (properties.size() != 0) {
                newRuleToExtendWith.put(conRule, properties);
            }
        }
    }

    if (newRuleToExtendWith.size() != 0) {
        discoverConditionalKeysPerLevel(newRuleToExtendWith, ruleToGraphNewFirstLevel, ruleToGraphNewThisLevel);
    }
}

From source file:main.java.workload.WorkloadExecutor.java

public static Transaction streamOneTransaction(Database db, Cluster cluster, Workload wrl, WorkloadBatch wb) {

    Set<Integer> trTupleSet = null;
    Set<Integer> trDataSet = null;

    int min = 0, i = 0, n = 0, tr_id = 0;
    int type = trDistribution.sample();

    Transaction tr = null;//  w  w  w  .j a  va 2s  .com

    if (!wb.getTrMap().containsKey(type))
        wb.getTrMap().put(type, new TreeMap<Integer, Transaction>());

    // new
    double rand_val = Global.rand.nextDouble();
    int toBeRemovedKey = -1;

    /**
     *  Implementing the new Workload Generation model 
     *  (Finalised as per November 20, 2014 and later improved on February 13-14, 2015)      
     */
    ++Global.global_trCount;

    // Transaction birth
    if (wb.getTrMap().get(type).isEmpty() || rand_val <= Global.percentageChangeInWorkload) {

        trTupleSet = wrl.getTrTupleSet(db, type);
        trDataSet = Workload.getTrDataSet(db, cluster, wb, trTupleSet);

        ++Global.global_trSeq;
        tr = new Transaction(Global.global_trSeq, type, trDataSet, Sim.time());

        // Add the incident transaction id
        wb.addIncidentTrId(cluster, trDataSet, Global.global_trSeq);

        // Add the newly created Transaction in the Workload Transaction map   
        wb.getTrMap().get(type).put(tr.getTr_id(), tr);

        // New improvements------------------------------------------------------------------------------
        double initial_period = (double) WorkloadExecutor.uNmax; // initialisation         
        tr.setTr_period(initial_period);

        perfm.Period.put(tr.getTr_id(), initial_period);
        Time.put(tr.getTr_id(), Sim.time());

        // Transaction repetition and retention of old transaction
    } else {

        ArrayList<Integer> idx2_id = new ArrayList<Integer>();
        ArrayList<Integer> idx_value = new ArrayList<Integer>();
        ArrayList<Integer> uT = new ArrayList<Integer>();

        TreeMap<Integer, Integer> idx2 = new TreeMap<Integer, Integer>(new ValueComparator<Integer>(idx));
        idx2.putAll(idx);

        min = Math.min(idx.size(), uNmax); // uNmax or uNmaxT

        i = 0;
        Iterator<Entry<Integer, Integer>> itr = idx2.entrySet().iterator();
        while (i < min) {
            idx2_id.add(itr.next().getKey());
            ++i;
        }

        // Deleting old Transactions
        if (idx2.size() > min) {
            toBeRemovedKey = idx2.lastKey();

            Transaction tr_old = wb.getTransaction(toBeRemovedKey);
            tr_old.calculateSpans(cluster);

            wb.removeTransaction(cluster, tr_old);
            idx.remove(toBeRemovedKey);
        }

        i = 0;
        while (i < idx2_id.size()) {
            idx_value.add(idx.get(idx2_id.get(i)));
            ++i;
        }

        i = 0;
        while (i < idx_value.size()) {
            uT.add(T.get(idx_value.get(i) - 1));
            ++i;
        }

        if (uT.size() == 1)
            n = 0;
        else
            n = Global.rand.nextInt(uT.size());

        tr_id = uT.get(n);

        tr = wb.getTransaction(tr_id);
        tr.setProcessed(false);

        // New improvements------------------------------------------------------------------------------
        double prev_period = perfm.Period.get(tr.getTr_id());
        double prev_time = Time.get(tr.getTr_id());

        double new_period = Global.expAvgWt * prev_period + (1 - Global.expAvgWt) * (Sim.time() - prev_time);

        tr.setTr_period(new_period);

        perfm.Period.remove(tr.getTr_id());
        perfm.Period.put(tr.getTr_id(), new_period);

        Time.remove(tr.getTr_id());
        Time.put(tr.getTr_id(), Sim.time());

    } // end-if-else()

    // Calculate latest Span
    tr.calculateSpans(cluster);

    // Update Idt
    tr.calculateIdt();

    if (perfm.Span.containsKey(tr.getTr_id()))
        perfm.Span.remove(tr.getTr_id());

    perfm.Span.put(tr.getTr_id(), tr.getTr_serverSpanCost());

    // Create an index entry for each newly created Transaction      
    idx.put(tr.getTr_id(), Global.global_trCount);
    T.add(tr.getTr_id());

    // New improvements------------------------------------------------------------------------------
    if (Global.global_trCount > Global.observationWindow) {

        _i = Global.global_trCount; // _i ~ Sim.time() 
        _W = Global.observationWindow; // _W ~ time 

        HashSet<Integer> unq = new HashSet<Integer>(T);
        for (int _n = (_i - _W); n <= _i; n++) {
            unq.add(T.get(_n));
        }

        // Captures the number of total unique transaction for this observation window
        perfm.Unqlen.put((_i - _W), unq.size());

        // Calculate the impact of distributed transaction per transaction basis               
        double sum_of_span_by_period = 0.0;
        sum_of_one_by_period = 0.0;

        Iterator<Integer> unq_itr = unq.iterator();
        while (unq_itr.hasNext()) {
            int unq_T = unq_itr.next();

            int span = perfm.Span.get(unq_T);
            double period = perfm.Period.get(unq_T);

            double span_by_period = span / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)
            double one_by_period = 1 / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)

            sum_of_span_by_period += span_by_period;
            sum_of_one_by_period += one_by_period;
        }

        double i_dt = (sum_of_span_by_period) / (Global.servers * sum_of_one_by_period);
        perfm.I_Dt.put((_i - _W), i_dt);

        if (Double.isNaN(i_dt))
            currentIDt = 0;
        else
            currentIDt = i_dt;

        // Reset repartitioning cooling off period
        if (WorkloadExecutor.repartitioningCoolingOff
                && Sim.time() >= WorkloadExecutor.RepartitioningCoolingOffPeriod) {

            WorkloadExecutor.repartitioningCoolingOff = false;

            Global.LOGGER.info("-----------------------------------------------------------------------------");
            Global.LOGGER.info("Simulation time: " + Sim.time() / (double) Global.observationWindow + " hrs");
            Global.LOGGER.info("Repartitioning cooling off period ends.");
            Global.LOGGER
                    .info("System will now check whether another repartitioning is required at this moment.");
            Global.LOGGER.info("Current IDt: " + currentIDt);
            Global.LOGGER.info("User defined IDt threshold: " + Global.userDefinedIDtThreshold);

            if (currentIDt < Global.userDefinedIDtThreshold) {
                Global.LOGGER.info("Repartitioning is not required at this moment.");

                //This is to disable on-demand atomic repartitioning for A-ARHC only
                if (Global.adaptive) {
                    Global.LOGGER.info("Disabling on-demand atomic repartitioning for A-ARHC ...");
                    WorkloadExecutor.isAdaptive = false;
                }

                Global.LOGGER.info("Continuing transaction processing ...");
            }
        }

        perfm.time.put((_i - _W), Sim.time());
    }

    // Add a hyperedge to workload hypergraph
    wb.addHGraphEdge(cluster, tr);

    // Collect transactional streams if data stream mining is enabled
    if (Global.streamCollection)
        Global.dsm.collectStream(cluster, tr);

    return tr;
}

From source file:org.graphstream.algorithm.measure.MobileCommunityMeasure.java

public void compute() {
    if (graphChanged) {
        // Default measure is the number of communities
        M = (float) communities.size();

        double[] avgValueDistribution = new double[(int) M];
        double[] stdValueDistribution = new double[(int) M];
        //         double[] sizeDistribution = new double[(int) M];
        int k = 0;

        for (Object c : communities.keySet()) {

            HashSet<Node> nodes = communities.get(c);
            int size = (int) nodes.size();
            double[] valueDistribution = new double[size];
            int j = 0;
            Mean mean = new Mean();
            StandardDeviation stdev = new StandardDeviation();
            //            System.out.println("com " + c + ", nodes "+ size);
            for (Node n : nodes) {
                valueDistribution[j++] = n.getAttribute(this.mobMarker) == null ? 0.0
                        : (Double) n.getAttribute(this.mobMarker);
            }//  w w w.  j  a v a 2s .  c  om
            // Compute the statistical moments
            float avgValue = (float) mean.evaluate(valueDistribution);
            float stdevValue = (float) stdev.evaluate(valueDistribution);

            avgValueDistribution[k] = avgValue;
            stdValueDistribution[k] = stdevValue;
            ++k;
        }

        // Compute the statistical moments
        Mean mean = new Mean();
        StandardDeviation stdev = new StandardDeviation();
        avgValue = (float) mean.evaluate(avgValueDistribution);
        stdevValue = (float) stdev.evaluate(avgValueDistribution);
        avgStddev = (float) mean.evaluate(stdValueDistribution);
        graphChanged = false;
    }

}

From source file:no.group09.connection.ConnectionMetadata.java

/**
 * Returns an array of all associated pins to the service
 * @param service which service we want to retrieve the pins for
 * @return an array of Integer objects (empty if there are no specific pins)
 *///from www  .  j  a v a2s  . c  o m
public Integer[] getServicePins(String service) {
    HashSet<Integer> set = servicePin.get(service);
    return set.toArray(new Integer[set.size()]);
}

From source file:amie.keys.CombinationsExplorationNew.java

private static HashMap<Rule, HashSet<String>> discoverConditionalKeysFirstLevel(
        HashMap<Rule, GraphNew> ruleToGraphNew, HashMap<Integer, GraphNew> instantiatedProperty2GraphNew) {
    Rule rule = new Rule();
    for (int conditionProperty : instantiatedProperty2GraphNew.keySet()) {
        GraphNew graph = instantiatedProperty2GraphNew.get(conditionProperty);
        String prop = id2Property.get(conditionProperty);

        Iterable<Rule> conditions = Utilities.getConditions(rule, prop, (int) support, kb);
        for (Rule conditionRule : conditions) {
            GraphNew newGraph = new GraphNew();
            discoverConditionalKeysForCondition(newGraph, graph, graph.topGraphNodes(), conditionRule);
            if (newGraph != null) {
                ruleToGraphNew.put(conditionRule, newGraph);
            }/*from  w  ww.j a  v a2 s. c om*/
        }
    }

    HashMap<Rule, HashSet<String>> newRuleToExtendWith = new HashMap<>();
    for (Rule conRule : ruleToGraphNew.keySet()) {
        GraphNew newGraph = ruleToGraphNew.get(conRule);
        HashSet<String> properties = new HashSet<>();
        for (Node node : newGraph.topGraphNodes()) {
            if (node.toExplore) {
                Iterator<Integer> it = node.set.iterator();
                int prop = it.next();
                String propertyStr = id2Property.get(prop);
                properties.add(propertyStr);
            }

        }
        if (properties.size() != 0) {
            newRuleToExtendWith.put(conRule, properties);
        }
    }
    return newRuleToExtendWith;
}

From source file:cm.confide.ex.chips.RecipientAlternatesAdapter.java

/**
 * Get a HashMap of address to RecipientEntry that contains all contact
 * information for a contact with the provided address, if one exists. This
 * may block the UI, so run it in an async task.
 *
 * @param context Context./*from   w ww  . j a  v a  2 s  .  co m*/
 * @param inAddresses Array of addresses on which to perform the lookup.
 * @param callback RecipientMatchCallback called when a match or matches are found.
 * @return HashMap<String,RecipientEntry>
 */
public static void getMatchingRecipients(Context context, BaseRecipientAdapter adapter,
        ArrayList<String> inAddresses, int addressType, Account account, RecipientMatchCallback callback) {
    Queries.Query query;
    if (addressType == QUERY_TYPE_EMAIL) {
        query = Queries.EMAIL;
    } else {
        query = Queries.PHONE;
    }
    int addressesSize = Math.min(MAX_LOOKUPS, inAddresses.size());
    HashSet<String> addresses = new HashSet<String>();
    StringBuilder bindString = new StringBuilder();
    // Create the "?" string and set up arguments.
    for (int i = 0; i < addressesSize; i++) {
        Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(inAddresses.get(i).toLowerCase());
        addresses.add(tokens.length > 0 ? tokens[0].getAddress() : inAddresses.get(i));
        bindString.append("?");
        if (i < addressesSize - 1) {
            bindString.append(",");
        }
    }

    if (Log.isLoggable(TAG, Log.DEBUG)) {
        Log.d(TAG, "Doing reverse lookup for " + addresses.toString());
    }

    String[] addressArray = new String[addresses.size()];
    addresses.toArray(addressArray);
    HashMap<String, RecipientEntry> recipientEntries = null;
    Cursor c = null;

    try {
        c = context.getContentResolver().query(query.getContentUri(), query.getProjection(),
                query.getProjection()[Queries.Query.DESTINATION] + " IN (" + bindString.toString() + ")",
                addressArray, null);
        recipientEntries = processContactEntries(c);
        callback.matchesFound(recipientEntries);
    } finally {
        if (c != null) {
            c.close();
        }
    }
    // See if any entries did not resolve; if so, we need to check other
    // directories
    final Set<String> matchesNotFound = new HashSet<String>();
    if (recipientEntries.size() < addresses.size()) {
        final List<DirectorySearchParams> paramsList;
        Cursor directoryCursor = null;
        try {
            directoryCursor = context.getContentResolver().query(DirectoryListQuery.URI,
                    DirectoryListQuery.PROJECTION, null, null, null);
            if (directoryCursor == null) {
                paramsList = null;
            } else {
                paramsList = BaseRecipientAdapter.setupOtherDirectories(context, directoryCursor, account);
            }
        } finally {
            if (directoryCursor != null) {
                directoryCursor.close();
            }
        }
        // Run a directory query for each unmatched recipient.
        HashSet<String> unresolvedAddresses = new HashSet<String>();
        for (String address : addresses) {
            if (!recipientEntries.containsKey(address)) {
                unresolvedAddresses.add(address);
            }
        }

        matchesNotFound.addAll(unresolvedAddresses);

        if (paramsList != null) {
            Cursor directoryContactsCursor = null;
            for (String unresolvedAddress : unresolvedAddresses) {
                for (int i = 0; i < paramsList.size(); i++) {
                    try {
                        directoryContactsCursor = doQuery(unresolvedAddress, 1, paramsList.get(i).directoryId,
                                account, context.getContentResolver(), query);
                    } finally {
                        if (directoryContactsCursor != null && directoryContactsCursor.getCount() == 0) {
                            directoryContactsCursor.close();
                            directoryContactsCursor = null;
                        } else {
                            break;
                        }
                    }
                }
                if (directoryContactsCursor != null) {
                    try {
                        final Map<String, RecipientEntry> entries = processContactEntries(
                                directoryContactsCursor);

                        for (final String address : entries.keySet()) {
                            matchesNotFound.remove(address);
                        }

                        callback.matchesFound(entries);
                    } finally {
                        directoryContactsCursor.close();
                    }
                }
            }
        }
    }

    // If no matches found in contact provider or the directories, try the extension
    // matcher.
    // todo (aalbert): This whole method needs to be in the adapter?
    if (adapter != null) {
        final Map<String, RecipientEntry> entries = adapter.getMatchingRecipients(matchesNotFound);
        if (entries != null && entries.size() > 0) {
            callback.matchesFound(entries);
            for (final String address : entries.keySet()) {
                matchesNotFound.remove(address);
            }
        }
    }
    callback.matchesNotFound(matchesNotFound);
}

From source file:com.fstx.stdlib.author.old.AuthorizationBeanBuilderTest.java

public void testBuildAuthorizationBean() throws DAOException {

    AuthorizationBean ab = new AuthorizationBeanBuilder().buildAuthorizationBean(u1.getUsername());
    HashSet myRights = ab.getRights();

    assertEquals(myRights.size(), 4);
    assertTrue(myRights.contains("right1"));
    assertTrue(myRights.contains("right2"));
    assertTrue(myRights.contains("right3"));
    assertTrue(myRights.contains("Otherright2"));

    //            Iterator i = ab.getRights().iterator();
    //            String grTemp;
    //            while(i.hasNext())
    //            {
    //               grTemp = (String)i.next();
    //               
    //               log.info("My Sting: "+grTemp);
    //            }
    //            
}

From source file:ru.retbansk.utils.scheduled.impl.ReadEmailAndConvertToXmlSpringImplTest.java

@Test
public void readEmailTest() throws Exception {
    HashSet<DayReport> daySet = reader.readEmail();
    Assert.assertNotNull(daySet);/* www . j  av a 2  s . com*/
    Assert.assertEquals(2, daySet.size());
    DayReport fromTester = null;
    DayReport fromAnotherTester = null;
    SortedSet<DayReport> sortedDaySet = new TreeSet<DayReport>();
    sortedDaySet.addAll(daySet);
    Assert.assertEquals(2, sortedDaySet.size());
    Iterator<DayReport> iterator = sortedDaySet.iterator();
    while (iterator.hasNext()) {
        fromAnotherTester = iterator.next();
        fromTester = iterator.next();
    }

    Assert.assertNotNull(fromAnotherTester);
    Assert.assertNotNull(fromTester);
    Assert.assertEquals(USER2, fromTester.getPersonId());
    Assert.assertEquals(USER, fromAnotherTester.getPersonId());
    Assert.assertEquals(3, fromTester.getReportList().size());
    Assert.assertEquals(1, fromAnotherTester.getReportList().size());
    Assert.assertEquals(TEST_STRING, fromTester.getReportList().get(0).getWorkDescription());
    Assert.assertEquals(8, fromAnotherTester.getReportList().get(0).getElapsedTime());
    Assert.assertEquals(TEST_STRING2, fromTester.getReportList().get(2).getWorkDescription());

}