Example usage for java.util HashSet add

List of usage examples for java.util HashSet add

Introduction

In this page you can find the example usage for java.util HashSet add.

Prototype

public boolean add(E e) 

Source Link

Document

Adds the specified element to this set if it is not already present.

Usage

From source file:edu.udel.ece.infolab.btc.Utils.java

/**
 * Flatten a list of triples to n-tuples containing many objects for the same
 * predicate. Generate one n-tuple per predicate.
 * /*from  www.  j a v a 2  s .co m*/
 * @param values
 *          The list of n-triples.
 * @return The n-tuples concatenated.
 */
private static void flattenNTriples(final String triples, final Map<String, HashSet<String>> map,
        final HashSet<String> types, final boolean isOut) {
    try {
        initParser();
        parser.parse(new StringReader(triples), "");
        for (Statement st : collector.getStatements()) {
            sb.setLength(0);
            final String subject = sb.append('<').append(st.getSubject().toString()).append('>').toString();
            sb.setLength(0);
            final String predicate = sb.append('<').append(st.getPredicate().toString()).append('>').toString();
            sb.setLength(0);
            final String object = (st.getObject() instanceof URI)
                    ? sb.append('<').append(st.getObject().toString()).append('>').toString()
                    : st.getObject().toString();
            if (types != null && predicate.equals(RDF_TYPE)) {
                types.add(object);
            } else {
                HashSet<String> hs = map.get(predicate);
                final String toAdd = isOut ? object : subject;
                if (hs == null) {
                    hs = new HashSet<String>();
                    map.put(predicate, hs);
                }
                if (hs.size() < 65535) // 2 ^ 16 - 1
                    hs.add(toAdd);
            }
        }
    } catch (RDFParseException e1) {
    } catch (RDFHandlerException e1) {
    } catch (IOException e1) {
    }
}

From source file:main.java.repartition.SimpleTr.java

static double getDeltaIdt(WorkloadBatch wb, SimpleTr t, MigrationPlan m) {

    int span = m.fromSet.size();
    double idt = (double) (span) * (1 / t.period);

    int incident_span = 0;
    double incident_idt = 0.0d;

    for (Entry<Integer, HashSet<Integer>> entry : t.serverDataSet.entrySet()) {

        HashSet<Integer> unique_trSet = new HashSet<Integer>();

        for (Integer d_id : entry.getValue()) {
            SimpleVertex v = wb.hgr.getVertex(d_id);

            for (SimpleHEdge h : wb.hgr.getIncidentEdges(v)) {
                Transaction incident_tr = wb.getTransaction(h.getId());

                if (incident_tr.getTr_id() != t.id) {
                    if (!unique_trSet.contains(incident_tr.getTr_id())) {
                        incident_span += getIncidentSpan(incident_tr, m);
                    }/*from  w w w .j  av  a  2s.  c om*/

                    unique_trSet.add(incident_tr.getTr_id());
                }
            }
        }
    }

    // Calculate incident idt
    incident_idt = (double) (incident_span) * (1 / t.period);

    // Calculate total idt
    double total_idt = idt + incident_idt;

    // Finally, calculate the delta
    double delta_idt = (double) (total_idt / (Global.servers * WorkloadExecutor.sum_of_one_by_period));

    // Return Net Improvement Per Data Migration (NIPDM)
    return delta_idt;
    //return ((double) (delta_idt)/m.req_data_mgr);      
}

From source file:carmen.LocationResolver.java

protected static void loadNameAndAbbreviation(String filename, HashSet<String> fullName,
        HashMap<String, String> abbreviations, boolean secondColumnKey) throws FileNotFoundException {
    Scanner inputScanner = new Scanner(new FileInputStream(filename), "UTF-8");
    while (inputScanner.hasNextLine()) {
        String line = inputScanner.nextLine().toLowerCase();
        String[] splitString = line.split("\t");
        splitString[0] = splitString[0].trim();
        if (fullName != null)
            fullName.add(splitString[0]);
        if (abbreviations != null) {
            if (!secondColumnKey) {
                abbreviations.put(splitString[0], splitString[1]);
            } else {
                abbreviations.put(splitString[1], splitString[0]);
            }//  ww  w  .ja v  a2s.c om
        }
    }
    inputScanner.close();
}

From source file:edu.illinois.cs.cogcomp.transliteration.WikiTransliteration.java

/**
 * This is the same as probs, only in a more convenient format. Does not include weights on productions.
 * @param probs production probabilities
 * @return hashmap mapping from Production[0] => Production[1]
 *///w  w  w .  j  av a 2s.  c o  m
public static HashMap<String, HashSet<String>> GetProbMap(HashMap<Production, Double> probs) {
    HashMap<String, HashSet<String>> result = new HashMap<>();
    for (Production pair : probs.keySet()) {
        if (!result.containsKey(pair.getFirst())) {
            result.put(pair.getFirst(), new HashSet<String>());
        }
        HashSet<String> set = result.get(pair.getFirst());
        set.add(pair.getSecond());

        result.put(pair.getFirst(), set);
    }

    return result;
}

From source file:amie.keys.CombinationsExplorationNew.java

private static HashSet<Integer> getRelations(Rule rule, Map<String, Integer> relation2Id) {
    List<ByteString> relationsInRule = rule.getAllRelationsBS();
    HashSet<Integer> result = new HashSet<>();
    for (ByteString relation : relationsInRule) {
        Integer id = relation2Id.get(relation.toString());
        if (id != null) {
            result.add(id);
        }//from   w w  w.  ja  va2s  .c  om
    }
    return result;

}

From source file:com.opensearchserver.textextractor.Main.java

@Override
public Set<Class<?>> getClasses() {
    HashSet<Class<?>> classes = new HashSet<Class<?>>();
    classes.add(JacksonConfig.class);
    classes.add(JacksonJsonProvider.class);
    classes.add(ParserService.class);
    return classes;
}

From source file:javadepchecker.Main.java

private static boolean checkPkg(File env) {
    boolean needed = true;
    HashSet<String> pkgs = new HashSet<String>();
    Collection<String> deps = null;

    BufferedReader in = null;//  w  w  w.j  a  v  a  2s. co  m
    try {
        Pattern dep_re = Pattern.compile("^DEPEND=\"([^\"]*)\"$");
        Pattern cp_re = Pattern.compile("^CLASSPATH=\"([^\"]*)\"$");

        String line;
        in = new BufferedReader(new FileReader(env));
        while ((line = in.readLine()) != null) {
            Matcher m = dep_re.matcher(line);
            if (m.matches()) {
                String atoms = m.group(1);
                for (String atom : atoms.split(":")) {
                    String pkg = atom;
                    if (atom.contains("@")) {
                        pkg = atom.split("@")[1];
                    }
                    pkgs.add(pkg);
                }
                continue;
            }
            m = cp_re.matcher(line);
            if (m.matches()) {
                Main classParser = new Main();
                for (String jar : m.group(1).split(":")) {
                    if (jar.endsWith(".jar")) {
                        classParser.processJar(new JarFile(image + jar));
                    }
                }
                deps = classParser.getDeps();
            }
        }

        for (String pkg : pkgs) {
            if (!depNeeded(pkg, deps)) {
                System.out.println(pkg);
                needed = false;
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            in.close();
        } catch (IOException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return needed;
}

From source file:buildcraft.transport.ItemFacade.java

private static void generateFacadeStacks() {
    HashSet<IBlockState> states = new HashSet<>();

    for (Block b : Block.REGISTRY) {
        for (int i = 0; i < 16; i++) {
            try {
                Item item = Item.getItemFromBlock(b);
                if (item != null) {
                    IBlockState state = b.getStateFromMeta(i);
                    if (!states.contains(state) && isValidFacade(state)) {
                        states.add(state);
                        allStacks.add(BuildCraftTransport.facadeItem.getFacadeForBlock(state));
                    }//from   w  w  w.j a  v a 2 s .  com
                }
            } catch (Exception e) {

            }
        }
    }

    if (BuildCraftTransport.showAllFacadesCreative) {
        previewStacks = allStacks;
    } else {
        previewStacks = new ArrayList<>();

        List<ItemStack> hollowFacades = new ArrayList<>();
        for (Block b : PREVIEW_FACADES) {
            if (isValidFacade(b.getDefaultState()) && !blacklistedFacades.contains(b.getDefaultState())) {
                ItemStack facade = BuildCraftTransport.facadeItem.getFacadeForBlock(b.getDefaultState());
                previewStacks.add(facade);
                FacadeState state = getFacadeStates(facade)[0];
                hollowFacades.add(getFacade(new FacadeState(state.state, state.wire, true)));
            }
        }
        previewStacks.addAll(hollowFacades);
    }
}

From source file:main.java.workload.WorkloadExecutor.java

public static Transaction streamOneTransaction(Database db, Cluster cluster, Workload wrl, WorkloadBatch wb) {

    Set<Integer> trTupleSet = null;
    Set<Integer> trDataSet = null;

    int min = 0, i = 0, n = 0, tr_id = 0;
    int type = trDistribution.sample();

    Transaction tr = null;//from  www.j ava2 s  . c  om

    if (!wb.getTrMap().containsKey(type))
        wb.getTrMap().put(type, new TreeMap<Integer, Transaction>());

    // new
    double rand_val = Global.rand.nextDouble();
    int toBeRemovedKey = -1;

    /**
     *  Implementing the new Workload Generation model 
     *  (Finalised as per November 20, 2014 and later improved on February 13-14, 2015)      
     */
    ++Global.global_trCount;

    // Transaction birth
    if (wb.getTrMap().get(type).isEmpty() || rand_val <= Global.percentageChangeInWorkload) {

        trTupleSet = wrl.getTrTupleSet(db, type);
        trDataSet = Workload.getTrDataSet(db, cluster, wb, trTupleSet);

        ++Global.global_trSeq;
        tr = new Transaction(Global.global_trSeq, type, trDataSet, Sim.time());

        // Add the incident transaction id
        wb.addIncidentTrId(cluster, trDataSet, Global.global_trSeq);

        // Add the newly created Transaction in the Workload Transaction map   
        wb.getTrMap().get(type).put(tr.getTr_id(), tr);

        // New improvements------------------------------------------------------------------------------
        double initial_period = (double) WorkloadExecutor.uNmax; // initialisation         
        tr.setTr_period(initial_period);

        perfm.Period.put(tr.getTr_id(), initial_period);
        Time.put(tr.getTr_id(), Sim.time());

        // Transaction repetition and retention of old transaction
    } else {

        ArrayList<Integer> idx2_id = new ArrayList<Integer>();
        ArrayList<Integer> idx_value = new ArrayList<Integer>();
        ArrayList<Integer> uT = new ArrayList<Integer>();

        TreeMap<Integer, Integer> idx2 = new TreeMap<Integer, Integer>(new ValueComparator<Integer>(idx));
        idx2.putAll(idx);

        min = Math.min(idx.size(), uNmax); // uNmax or uNmaxT

        i = 0;
        Iterator<Entry<Integer, Integer>> itr = idx2.entrySet().iterator();
        while (i < min) {
            idx2_id.add(itr.next().getKey());
            ++i;
        }

        // Deleting old Transactions
        if (idx2.size() > min) {
            toBeRemovedKey = idx2.lastKey();

            Transaction tr_old = wb.getTransaction(toBeRemovedKey);
            tr_old.calculateSpans(cluster);

            wb.removeTransaction(cluster, tr_old);
            idx.remove(toBeRemovedKey);
        }

        i = 0;
        while (i < idx2_id.size()) {
            idx_value.add(idx.get(idx2_id.get(i)));
            ++i;
        }

        i = 0;
        while (i < idx_value.size()) {
            uT.add(T.get(idx_value.get(i) - 1));
            ++i;
        }

        if (uT.size() == 1)
            n = 0;
        else
            n = Global.rand.nextInt(uT.size());

        tr_id = uT.get(n);

        tr = wb.getTransaction(tr_id);
        tr.setProcessed(false);

        // New improvements------------------------------------------------------------------------------
        double prev_period = perfm.Period.get(tr.getTr_id());
        double prev_time = Time.get(tr.getTr_id());

        double new_period = Global.expAvgWt * prev_period + (1 - Global.expAvgWt) * (Sim.time() - prev_time);

        tr.setTr_period(new_period);

        perfm.Period.remove(tr.getTr_id());
        perfm.Period.put(tr.getTr_id(), new_period);

        Time.remove(tr.getTr_id());
        Time.put(tr.getTr_id(), Sim.time());

    } // end-if-else()

    // Calculate latest Span
    tr.calculateSpans(cluster);

    // Update Idt
    tr.calculateIdt();

    if (perfm.Span.containsKey(tr.getTr_id()))
        perfm.Span.remove(tr.getTr_id());

    perfm.Span.put(tr.getTr_id(), tr.getTr_serverSpanCost());

    // Create an index entry for each newly created Transaction      
    idx.put(tr.getTr_id(), Global.global_trCount);
    T.add(tr.getTr_id());

    // New improvements------------------------------------------------------------------------------
    if (Global.global_trCount > Global.observationWindow) {

        _i = Global.global_trCount; // _i ~ Sim.time() 
        _W = Global.observationWindow; // _W ~ time 

        HashSet<Integer> unq = new HashSet<Integer>(T);
        for (int _n = (_i - _W); n <= _i; n++) {
            unq.add(T.get(_n));
        }

        // Captures the number of total unique transaction for this observation window
        perfm.Unqlen.put((_i - _W), unq.size());

        // Calculate the impact of distributed transaction per transaction basis               
        double sum_of_span_by_period = 0.0;
        sum_of_one_by_period = 0.0;

        Iterator<Integer> unq_itr = unq.iterator();
        while (unq_itr.hasNext()) {
            int unq_T = unq_itr.next();

            int span = perfm.Span.get(unq_T);
            double period = perfm.Period.get(unq_T);

            double span_by_period = span / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)
            double one_by_period = 1 / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)

            sum_of_span_by_period += span_by_period;
            sum_of_one_by_period += one_by_period;
        }

        double i_dt = (sum_of_span_by_period) / (Global.servers * sum_of_one_by_period);
        perfm.I_Dt.put((_i - _W), i_dt);

        if (Double.isNaN(i_dt))
            currentIDt = 0;
        else
            currentIDt = i_dt;

        // Reset repartitioning cooling off period
        if (WorkloadExecutor.repartitioningCoolingOff
                && Sim.time() >= WorkloadExecutor.RepartitioningCoolingOffPeriod) {

            WorkloadExecutor.repartitioningCoolingOff = false;

            Global.LOGGER.info("-----------------------------------------------------------------------------");
            Global.LOGGER.info("Simulation time: " + Sim.time() / (double) Global.observationWindow + " hrs");
            Global.LOGGER.info("Repartitioning cooling off period ends.");
            Global.LOGGER
                    .info("System will now check whether another repartitioning is required at this moment.");
            Global.LOGGER.info("Current IDt: " + currentIDt);
            Global.LOGGER.info("User defined IDt threshold: " + Global.userDefinedIDtThreshold);

            if (currentIDt < Global.userDefinedIDtThreshold) {
                Global.LOGGER.info("Repartitioning is not required at this moment.");

                //This is to disable on-demand atomic repartitioning for A-ARHC only
                if (Global.adaptive) {
                    Global.LOGGER.info("Disabling on-demand atomic repartitioning for A-ARHC ...");
                    WorkloadExecutor.isAdaptive = false;
                }

                Global.LOGGER.info("Continuing transaction processing ...");
            }
        }

        perfm.time.put((_i - _W), Sim.time());
    }

    // Add a hyperedge to workload hypergraph
    wb.addHGraphEdge(cluster, tr);

    // Collect transactional streams if data stream mining is enabled
    if (Global.streamCollection)
        Global.dsm.collectStream(cluster, tr);

    return tr;
}

From source file:com.opensearchserver.affinities.Main.java

@Override
public Set<Class<?>> getClasses() {
    HashSet<Class<?>> classes = new HashSet<Class<?>>();
    classes.add(JacksonConfig.class);
    classes.add(JacksonJsonProvider.class);
    classes.add(JacksonJsonpInterceptor.class);
    classes.add(AffinitiesService.class);
    return classes;
}