Example usage for java.util HashMap containsKey

List of usage examples for java.util HashMap containsKey

Introduction

In this page you can find the example usage for java.util HashMap containsKey.

Prototype

public boolean containsKey(Object key) 

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:com.advdb.footballclub.FootBallClub.java

private void createFactMatch(Session session) {
    Transaction transaction = null;// w  w  w. j av a 2 s .c  om
    try {
        System.out.println("start createFactMatch.");
        transaction = session.beginTransaction();
        String hqlDC = "from DimCompetition dc";
        List result = session.createQuery(hqlDC).list();
        result.forEach((object) -> {

            DimCompetition dimCompetition = (DimCompetition) object;
            int startYear = dimCompetition.getSeasonStartYear();
            int endYear = dimCompetition.getSeasonEndYear();
            GregorianCalendar cal = randomYear(startYear, endYear);
            //                createDate(session, gregorianCalendar);
            if (dimCompetition.getCompetiotionName().equals(COMPETITION_NAME_ARR[0])
                    || dimCompetition.getCompetiotionName().equals(COMPETITION_NAME_ARR[1])) {

                int times = randomWithRange(1, 7);
                //Random opponent
                String hqlDO = "from DimOpponent do";
                List resultDO = session.createQuery(hqlDO).list();
                HashMap<Integer, Integer> opponentMap = new HashMap<Integer, Integer>();
                int opponentIndex;
                do {

                    opponentIndex = randBetween(0, resultDO.size());
                    if (!opponentMap.containsKey(opponentIndex)) {
                        opponentMap.put(opponentIndex, opponentIndex);
                        generateFactMatch(opponentIndex, cal, dimCompetition.getCompetitionKy(), session);
                        //Random tactic
                        //Random player

                    }
                } while (opponentMap.size() != times);

            } else if (dimCompetition.getCompetiotionName().equals(COMPETITION_NAME_ARR[2])
                    || dimCompetition.getCompetiotionName().equals(COMPETITION_NAME_ARR[3])) {

                //Random opponent
                String hqlDO = "from DimOpponent do";
                List resultDO = session.createQuery(hqlDO).list();
                HashMap<Integer, Integer> opponentMap = new HashMap<Integer, Integer>();
                int opponentIndex;
                do {
                    opponentIndex = randBetween(0, resultDO.size());
                    if (!opponentMap.containsKey(opponentIndex)) {
                        opponentMap.put(opponentIndex, opponentIndex);
                        generateFactMatch(opponentIndex, cal, dimCompetition.getCompetitionKy(), session);

                    }
                } while (opponentMap.size() != 38);

            } else {

            }

        });
        session.flush();
        session.clear();
        //            }
        transaction.commit();
        System.out.println("finish createFactMatch.");
    } catch (HibernateException e) {
        if (transaction != null) {
            transaction.rollback();
        }
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.thoratou.exact.processors.ExactProcessor.java

private List<PathStep> getPathStepList(HashMap<String, List<PathStep>> mergedMap, String baseClassName) {
    if (mergedMap.containsKey(baseClassName)) {
        return mergedMap.get(baseClassName);
    } else {/*from   www. j  a va  2 s  . c o  m*/
        List<PathStep> newPathStepList = new ArrayList<PathStep>();
        mergedMap.put(baseClassName, newPathStepList);
        return newPathStepList;
    }

}

From source file:com.intellij.plugins.haxe.ide.annotator.HaxeSemanticAnnotator.java

static public void checkMethodArguments(final HaxeMethodModel currentMethod, final AnnotationHolder holder) {
    boolean hasOptional = false;
    HashMap<String, PsiElement> argumentNames = new HashMap<String, PsiElement>();
    for (final HaxeParameterModel param : currentMethod.getParameters()) {
        String paramName = param.getName();

        if (param.hasOptionalPsi() && param.getVarInitPsi() != null) {
            // @TODO: Move to bundle
            holder.createWarningAnnotation(param.getOptionalPsi(),
                    "Optional not needed when specified an init value");
        }//from   ww w .j a  v  a2s .  c o  m
        if (param.getVarInitPsi() != null && param.getTypeTagPsi() != null) {
            TypeTagChecker.check(param.getPsi(), param.getTypeTagPsi(), param.getVarInitPsi(), true, holder);
        }
        if (param.isOptional()) {
            hasOptional = true;
        } else if (hasOptional) {
            // @TODO: Move to bundle
            holder.createWarningAnnotation(param.getPsi(), "Non-optional argument after optional argument");
        }

        if (argumentNames.containsKey(paramName)) {
            // @TODO: Move to bundle
            holder.createWarningAnnotation(param.getNameOrBasePsi(),
                    "Repeated argument name '" + paramName + "'");
            holder.createWarningAnnotation(argumentNames.get(paramName),
                    "Repeated argument name '" + paramName + "'");
        } else {
            argumentNames.put(paramName, param.getNameOrBasePsi());
        }
    }
}

From source file:com.googlesource.gerrit.plugins.supermanifest.JiriManifestParser.java

public static JiriProjects getProjects(GerritRemoteReader reader, String repoKey, String ref, String manifest)
        throws ConfigInvalidException, IOException {

    try (RepoMap<String, Repository> repoMap = new RepoMap<>()) {
        repoMap.put(repoKey, reader.openRepository(repoKey));
        Queue<ManifestItem> q = new LinkedList<>();
        q.add(new ManifestItem(repoKey, manifest, ref, "", false));
        HashMap<String, HashSet<String>> processedRepoFiles = new HashMap<>();
        HashMap<String, JiriProjects.Project> projectMap = new HashMap<>();

        while (q.size() != 0) {
            ManifestItem mi = q.remove();
            Repository repo = repoMap.get(mi.repoKey);
            if (repo == null) {
                repo = reader.openRepository(mi.repoKey);
                repoMap.put(mi.repoKey, repo);
            }//from  w  w  w .  ja  va  2  s.c  om
            HashSet<String> processedFiles = processedRepoFiles.get(mi.repoKey);
            if (processedFiles == null) {
                processedFiles = new HashSet<String>();
                processedRepoFiles.put(mi.repoKey, processedFiles);
            }
            if (processedFiles.contains(mi.manifest)) {
                continue;
            }
            processedFiles.add(mi.manifest);
            JiriManifest m;
            try {
                m = parseManifest(repo, mi.ref, mi.manifest);
            } catch (JAXBException | XMLStreamException e) {
                throw new ConfigInvalidException("XML parse error", e);
            }

            for (JiriProjects.Project project : m.projects.getProjects()) {
                project.fillDefault();
                if (mi.revisionPinned && project.Key().equals(mi.projectKey)) {
                    project.setRevision(mi.ref);
                }
                if (projectMap.containsKey(project.Key())) {
                    if (!projectMap.get(project.Key()).equals(project))
                        throw new ConfigInvalidException(String.format(
                                "Duplicate conflicting project %s in manifest %s\n%s\n%s", project.Key(),
                                mi.manifest, project.toString(), projectMap.get(project.Key()).toString()));
                } else {
                    projectMap.put(project.Key(), project);
                }
            }

            URI parentURI;
            try {
                parentURI = new URI(mi.manifest);
            } catch (URISyntaxException e) {
                throw new ConfigInvalidException("Invalid parent URI", e);
            }
            for (JiriManifest.LocalImport l : m.imports.getLocalImports()) {
                ManifestItem tw = new ManifestItem(mi.repoKey, parentURI.resolve(l.getFile()).getPath(), mi.ref,
                        mi.projectKey, mi.revisionPinned);
                q.add(tw);
            }

            for (JiriManifest.Import i : m.imports.getImports()) {
                i.fillDefault();
                URI uri;
                try {
                    uri = new URI(i.getRemote());
                } catch (URISyntaxException e) {
                    throw new ConfigInvalidException("Invalid URI", e);
                }
                String iRepoKey = new Project.NameKey(StringUtils.strip(uri.getPath(), "/")).toString();
                String iRef = i.getRevision();
                boolean revisionPinned = true;
                if (iRef.isEmpty()) {
                    iRef = REFS_HEADS + i.getRemotebranch();
                    revisionPinned = false;
                }

                ManifestItem tmi = new ManifestItem(iRepoKey, i.getManifest(), iRef, i.Key(), revisionPinned);
                q.add(tmi);
            }
        }
        return new JiriProjects(projectMap.values().toArray(new JiriProjects.Project[0]));
    }
}

From source file:edu.illinois.cs.cogcomp.transliteration.WikiTransliteration.java

/**
 * This used to have productions as an output variable. I (SWM) added it as the second element of return pair.
 * @param word1/* www.  ja v a 2s .co m*/
 * @param word2
 * @param maxSubstringLength
 * @param probs
 * @param floorProb
 * @param memoizationTable
 * @return
 */
public static Pair<Double, List<Production>> GetAlignmentProbabilityDebug(String word1, String word2,
        int maxSubstringLength, HashMap<Production, Double> probs, double floorProb,
        HashMap<Production, Pair<Double, List<Production>>> memoizationTable) {
    List<Production> productions = new ArrayList<>();
    Production bestPair = new Production(null, null);

    if (word1.length() == 0 && word2.length() == 0)
        return new Pair<>(1.0, productions);
    if (word1.length() * maxSubstringLength < word2.length())
        return new Pair<>(0.0, productions); //no alignment possible
    if (word2.length() * maxSubstringLength < word1.length())
        return new Pair<>(0.0, productions);

    Pair<Double, List<Production>> cached;
    if (memoizationTable.containsKey(new Production(word1, word2))) {
        cached = memoizationTable.get(new Production(word1, word2));
        productions = cached.getSecond();
        return new Pair<>(cached.getFirst(), productions);
    }

    double maxProb = 0;

    int maxSubstringLength1 = Math.min(word1.length(), maxSubstringLength);
    int maxSubstringLength2 = Math.min(word2.length(), maxSubstringLength);

    for (int i = 1; i <= maxSubstringLength1; i++) {
        String substring1 = word1.substring(0, i);
        for (int j = 0; j <= maxSubstringLength2; j++) {
            double localProb;
            if (probs.containsKey(new Production(substring1, word2.substring(0, j)))) {
                localProb = probs.get(new Production(substring1, word2.substring(0, j)));
                //double localProb = ((double)count) / totals[substring1];
                if (localProb < maxProb || localProb < floorProb)
                    continue; //this is a really bad transition--discard

                List<Production> outProductions;
                Pair<Double, List<Production>> ret = GetAlignmentProbabilityDebug(word1.substring(i),
                        word2.substring(j), maxSubstringLength, probs, maxProb / localProb, memoizationTable);
                outProductions = ret.getSecond();

                localProb *= ret.getFirst();
                if (localProb > maxProb) {
                    productions = outProductions;
                    maxProb = localProb;
                    bestPair = new Production(substring1, word2.substring(0, j));
                }
            }
        }
    }

    productions = new ArrayList<>(productions); //clone it before modifying
    productions.add(0, bestPair);

    memoizationTable.put(new Production(word1, word2), new Pair<>(maxProb, productions));

    return new Pair<>(maxProb, productions);
}

From source file:edu.lternet.pasta.portal.statistics.GrowthStats.java

private HashMap<String, Long> buildHashMap(String sql) throws SQLException {
    Connection conn = databaseClient.getConnection();
    HashMap<String, Long> map = new HashMap<String, Long>();

    try {/*from w  w  w .  j a va  2 s .  c  om*/
        if (conn != null) {
            Statement stmnt = conn.createStatement();
            ResultSet rs = stmnt.executeQuery(sql);

            while (rs.next()) {
                String key = rs.getString(1);
                Long date_created = rs.getTimestamp(2).getTime();
                if (!map.containsKey(key)) {
                    map.put(key, date_created);
                }
            }
        }
    } finally {
        databaseClient.closeConnection(conn);
    }

    return map;

}

From source file:com.krawler.spring.crm.common.globalSearchDAOImpl.java

public KwlReturnObject globalQuickSearch(HashMap<String, Object> requestParams) throws ServiceException {
    List ll = null;//from   w  ww . j  a v a  2 s  .c o  m
    int dl = 0;
    String keyword = "";
    String type = "";
    String companyid = "";
    String Hql = "";
    try {
        if (requestParams.containsKey("type") && requestParams.get("type") != null) {
            type = requestParams.get("type").toString();
        }
        if (requestParams.containsKey("keyword") && requestParams.get("keyword") != null) {
            keyword = StringEscapeUtils.escapeJavaScript(requestParams.get("keyword").toString());
        }
        if (requestParams.containsKey("companyid") && requestParams.get("companyid") != null) {
            companyid = requestParams.get("companyid").toString();
        }
        Pattern p = Pattern.compile("(?i)tag:['?(\\s*\\w+)'?]*", Pattern.CASE_INSENSITIVE);
        Matcher m = p.matcher(keyword);
        boolean tagQuery = m.matches();
        ArrayList filter_params = new ArrayList();
        if (!tagQuery) {
            String MyQuery = keyword;
            String MyQuery1 = keyword;
            if (keyword.length() > 2) {
                MyQuery = keyword + "%";
                MyQuery1 = "% " + MyQuery;
            }
            if (type.equals("user")) {
                Hql = "select u from User u where  u.deleteflag = 0  and u.company.companyID= ? and ( u.firstName like ? or u.lastName like ?) ";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and u.userID in (" + requestParams.get("usersList").toString() + ") ";
                }
            } else if (type.equals("cam")) {
                Hql = "select c from CrmCampaign c where c.deleteflag = 0  and c.company.companyID= ? and ( c.campaignname like ? or c.campaignname like ?) and c.isarchive=false and c.validflag=1";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and c.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }
            } else if (type.equals("acc")) {
                Hql = "select distinct c from accountOwners ao inner join ao.account c  left join c.crmProducts as p where c.deleteflag = 0  and c.company.companyID= ? and ( c.accountname like ? or c.accountname like ?) and c.isarchive=false and c.validflag=1";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and ao.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }

            } else if (type.equals("opp")) {
                Hql = "select distinct c from opportunityOwners oo inner join oo.opportunity c left join c.crmProducts as p where c.deleteflag = 0  and c.company.companyID= ? and ( c.oppname like ? or c.oppname like ?) and c.isarchive=false and c.validflag=1";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and oo.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }
            } else if (type.equals("lea")) {
                Hql = "select distinct c from LeadOwners lo inner join lo.leadid c  left join c.crmProducts as p where c.deleteflag = 0 and c.company.companyID= ? and ( c.lastname like ? or c.lastname like ?) and c.isarchive=false and c.validflag=1 and c.isconverted= 0";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and lo.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }

            } else if (type.equals("con")) {
                Hql = "select distinct c from contactOwners co inner join co.contact c where c.deleteflag = 0  and c.company.companyID= ? and ( c.firstname like ? or c.firstname like ? or c.lastname like ? or c.lastname like ?) and c.isarchive=false and c.validflag=1 ";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and co.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }
            } else if (type.equals("cas")) {
                Hql = "select c from CrmCase c where  c.deleteflag = 0  and c.company.companyID= ? and ( c.subject like ? or c.subject like ? ) and c.isarchive=false and c.validflag=1";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and c.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }
            } else if (type.equals("pro")) {
                Hql = "select c from CrmProduct c where  c.deleteflag = 0  and c.company.companyID= ? and ( c.productname like ? or c.productname like ?) and c.isarchive=false and c.validflag=1";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and c.usersByUserid.userID in (" + requestParams.get("usersList").toString()
                            + ") ";
                }
            } else if (type.equals("docs")) {
                Hql = "select c from com.krawler.common.admin.Docs c  where c.company.companyID=? and c.deleteflag=0 and ( c.docname like ? or c.docname like ?)";
                filter_params.add(companyid);
                filter_params.add(MyQuery);
                filter_params.add(MyQuery1);
                if (requestParams.containsKey("usersList") && requestParams.get("usersList") != null) {
                    Hql += " and c.userid.userID in (" + requestParams.get("usersList").toString() + ") ";
                }
            }
        }
        ll = executeQuery(Hql, filter_params.toArray());
        dl = ll.size();
    } catch (Exception e) {
        logger.warn(e.getMessage(), e);
        throw ServiceException.FAILURE("globalSearchDAOImpl.globalQuickSearch", e);
    }
    return new KwlReturnObject(true, KWLErrorMsgs.S01, "", ll, dl);
}

From source file:com.krawler.spring.hrms.common.hrmsExtApplDocsDAOImpl.java

public KwlReturnObject getDocs(HashMap<String, Object> requestParams) {
    boolean success = false;
    List lst = null;//w  ww  .  j av  a  2 s .  c  o  m
    KwlReturnObject result = null;
    try {
        ArrayList name = null;
        String hql = "";
        ArrayList value = null;
        ArrayList orderby = null;
        ArrayList ordertype = null;
        String[] searchCol = null;
        if (requestParams.containsKey("primary") && (Boolean) requestParams.get("primary")) {
            hql = "from com.krawler.hrms.common.docs.HrmsDocmap where id=?";
            String id = requestParams.get("id").toString();
            lst = HibernateUtil.executeQuery(hibernateTemplate, hql, new Object[] { id });
            result = new KwlReturnObject(success, "success", "", lst, lst.size());
            return result;
        }
        hql = "from com.krawler.hrms.common.docs.HrmsDocmap";
        if (requestParams.get("filter_names") != null && requestParams.get("filter_values") != null) {
            name = (ArrayList) requestParams.get("filter_names");
            value = (ArrayList) requestParams.get("filter_values");
            hql += com.krawler.common.util.StringUtil.filterQuery(name, "where");
        }

        if (requestParams.get("searchcol") != null && requestParams.get("ss") != null) {
            searchCol = (String[]) requestParams.get("searchcol");
            hql += StringUtil.getSearchquery(requestParams.get("ss").toString(), searchCol, value);
        }

        if (requestParams.get("order_by") != null && requestParams.get("order_type") != null) {
            orderby = (ArrayList) requestParams.get("order_by");
            ordertype = (ArrayList) requestParams.get("order_type");
            hql += com.krawler.common.util.StringUtil.orderQuery(orderby, ordertype);
        }
        result = StringUtil.getPagingquery(requestParams, searchCol, hibernateTemplate, hql, value);
        success = true;
    } catch (Exception ex) {
        success = false;
    } finally {
        return result;
    }
}

From source file:edu.utah.bmi.ibiomes.topo.bio.Biomolecule.java

/**
 * Get list of different residues present in the molecule.
 * @return List of residue occurrences/*from  w  w  w  .  j a v a  2 s.  c  o  m*/
 */
public List<ResidueOccurrence> getResidueOccurences() {
    HashMap<String, ResidueOccurrence> occurences = new HashMap<String, ResidueOccurrence>();
    List<ResidueOccurrence> occurrenceList = new ArrayList<ResidueOccurrence>();
    if (this.residues != null) {
        for (Residue residue : residues) {
            String residueType = residue.getCode();
            if (occurences.containsKey(residueType)) {
                occurences.get(residueType).addOccurrence();
            } else {
                occurences.put(residueType, new ResidueOccurrence(residue, 1));
            }
        }
    }
    for (ResidueOccurrence occurrence : occurences.values()) {
        occurrenceList.add(occurrence);
    }
    return occurrenceList;
}

From source file:europarl.PhraseTranslation.java

public boolean getFromGz(String fileName, String targetWord, int limit) {
    String strLine;//  w  w w .  ja  v a2 s .c  om
    ArrayList<String> line_triple = new ArrayList<String>();

    BufferedReader gzipReader;
    Pattern word_align = Pattern.compile("(\\w+) \\(\\{(.*?)\\}\\) ");

    Bag<String> words_list = new Bag<String>(); //Set of ALL words: it will be the list of attributes
    ArrayList<PhraseTranslation> translations = new ArrayList<PhraseTranslation>();
    try {
        gzipReader = new BufferedReader(
                new InputStreamReader(new GZIPInputStream(new FileInputStream(fileName))));

        while ((strLine = gzipReader.readLine()) != null) //read-everything
        {
            line_triple.add(strLine);
            if (line_triple.size() == 3) //triple finished
            {
                //TODO: match only complete words
                //TODO: stem it before doing this

                Matcher matcher = word_align.matcher(line_triple.get(2));
                String[] foreign_words = line_triple.get(1).split(" ");
                line_triple.clear();
                if (!strLine.contains(targetWord)) //skip it
                    continue;

                ArrayList<String> e_phrase = new ArrayList<String>();
                String translation = "";
                while (matcher.find()) //each iteration is word +alignment
                {
                    assert matcher.groupCount() == 2;
                    String e_word = matcher.group(1).trim();
                    if (e_word.equals("NULL"))
                        e_word = "";
                    if (stopwordsList.contains(e_word))
                        continue;
                    if (stemmer != null)
                        e_word = stemmer.stem(e_word);

                    e_phrase.add(e_word);
                    words_list.add(e_word);

                    //we don't care about the alignment of non-target words
                    if (!e_word.equals(targetWord))
                        continue;

                    //parse the { x y z } alignment part
                    ArrayList<Integer> f_words = new ArrayList<Integer>();
                    translation = "";
                    //for each number between curly brackets
                    for (String number : matcher.group(2).split(" ")) {
                        if (!number.isEmpty()) {
                            int n_word = Integer.parseInt(number) - 1;
                            f_words.add(n_word);
                            translation += foreign_words[n_word] + " ";
                        }
                    } // end of curly brackets for

                } //end of word+alignment while
                if (!translation.isEmpty()) {
                    PhraseTranslation trans = new PhraseTranslation(e_phrase, translation);
                    translations.add(trans);
                }
                line_triple.clear();
            } //end of triple-finished if
            if (translations.size() == limit)
                break; //stop collecting!
        } //end of the read-everything while
    } catch (Exception e) {
        log.error("Error: " + e);
        e.printStackTrace();
        return false;
    }

    //what we NOW have: a set of attributes in HashSet<String>words_list
    //a ArrayList<PhraseTranslation> translations      
    log.info("Collected " + translations.size() + " phrases and " + words_list.size() + " words");

    postProcessData(translations, words_list);

    //now convert the data we collected to Weka data
    //we needed to do "double passing" because we need to initialize
    //the dataset with the complete list of attributes

    //this will convert word to attributes: they are all "boolean"
    ArrayList<Attribute> attrs = new ArrayList<Attribute>();
    HashMap<String, Attribute> attrs_map = new HashMap<String, Attribute>();
    Attribute att;
    for (String word : words_list) {
        att = new Attribute(word);
        attrs.add(att);
        attrs_map.put(word, att);
    }

    //now we need to manage class.
    //each translation is a class, so we need to get all of them
    HashMap<String, Integer> class_map = new HashMap<String, Integer>();
    ArrayList<String> classes = new ArrayList<String>();
    for (PhraseTranslation phraseTranslation : translations) {
        if (!class_map.containsKey(phraseTranslation.getTranslatedWord())) {
            class_map.put(phraseTranslation.getTranslatedWord(), classes.size());
            classes.add(phraseTranslation.getTranslatedWord());
        }
    }

    log.info(targetWord + " has " + classes.size() + " translations:");
    if (log.isInfoEnabled())
        for (String translation : classes)
            System.out.println(translation);
    att = new Attribute("%class", classes);
    attrs.add(att);
    attrs_map.put("%class", att);
    dataSet = new Instances("dataset", attrs, 0);
    for (PhraseTranslation phraseTranslation : translations) {
        SparseInstance inst = new SparseInstance(attrs.size());
        //set everything to 0
        for (int i = 0; i < attrs.size(); i++)
            inst.setValue(i, 0);
        //set present word to 1
        for (String word : phraseTranslation.getPhraseWords())
            inst.setValue(attrs_map.get(word), 1);
        //set class of instance
        inst.setValue(attrs_map.get("%class"), class_map.get(phraseTranslation.getTranslatedWord()));
        dataSet.add(inst);
    }

    return true;
}