Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:edu.ucla.cs.scai.canali.core.index.BuildIndex.java

private void loadTriples() throws Exception {
    HashMap<String, Integer> propertyFrequency = new HashMap<>();
    HashSet<String> shortProperties = new HashSet<>();
    if (minPropertyLength > 1) {
        System.out.println(//from  ww  w .j  a v a  2 s .  c  om
                "Finding propertys to be ignored because they have lenght less than " + minPropertyLength);
        int i = 0;
        try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "property_labels"))) {
            String l;
            while ((l = in.readLine()) != null) {
                i++;
                if (l.length() > 0) {
                    try {
                        StringTokenizer st = new StringTokenizer(l, "\t<> ");
                        String uri = st.nextToken().trim();
                        if (uri.startsWith("http")) {
                            String label = st.hasMoreTokens() ? st.nextToken().trim() : "";
                            if (label.length() < minPropertyLength && !shortProperties.contains(uri)) {
                                shortProperties.add(uri);
                                System.out
                                        .println("Property " + uri + " will be ignored, having label " + label);
                                propertyFrequency.put(uri, 0);
                            }
                        }
                    } catch (Exception e) {
                        System.out.println("Error at line " + i + ": " + l);
                        e.printStackTrace();
                    }
                }
            }
        }
        System.out.println(shortProperties.size() + " propertys will be ignored, having lenght less than "
                + minPropertyLength);
    }
    int maxNumberOfProperties = 100000;
    System.out.println("Finding the the " + maxNumberOfProperties
            + " most frequent propertys of the propertys whose label has at least two characters");
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            if (l.contains("classDegree")) {
                System.out.print("");
            }
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String subject = st.nextToken();
            String property = st.nextToken();
            String value = st.nextToken();
            if (subject.startsWith("http") && property.startsWith("http")
                    && !shortProperties.contains(property)) {
                if (value.startsWith("http") || value.startsWith("ftp:")) { //it is an entity
                    Integer c = propertyFrequency.get(property);
                    if (c == null) {
                        propertyFrequency.put(property, 1);
                    } else {
                        propertyFrequency.put(property, 1 + c);
                    }
                } else { //it is a literal
                    if (value.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            Integer c = propertyFrequency.get(property);
                            if (c == null) {
                                propertyFrequency.put(property, 1);
                            } else {
                                propertyFrequency.put(property, 1 + c);
                            }
                        } else {
                            System.out.println("Basic type not recognized in " + l);
                        }
                    } else {
                        if (value.startsWith("\"")) { //it is a String
                            Integer c = propertyFrequency.get(property);
                            if (c == null) {
                                propertyFrequency.put(property, 1);
                            } else {
                                propertyFrequency.put(property, 1 + c);
                            }
                        } else {
                            System.out.println("Basic type not recognized in " + l);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Scanned " + (n / 1000000) + "M triples");
                }
            } else {
                //System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    shortProperties = null;
    System.gc();
    ArrayList<Map.Entry<String, Integer>> f = new ArrayList<>(propertyFrequency.entrySet());
    Collections.sort(f, new Comparator<Map.Entry<String, Integer>>() {
        @Override
        public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
            return Integer.compare(o2.getValue(), o1.getValue());
        }
    });
    int minFreq = 1;
    if (f.size() > maxNumberOfProperties) {
        minFreq = f.get(maxNumberOfProperties - 1).getValue();
        if (f.get(maxNumberOfProperties).equals(f.get(maxNumberOfProperties - 1))) {
            minFreq++;
        }
    }
    for (Map.Entry<String, Integer> e : f) {
        System.out.println(e.getKey() + "\t" + e.getValue());
    }
    System.out.println("Keeping propertys with at least " + minFreq + " occurrences");
    HashSet<String> acceptedProperties = new HashSet<>();
    for (Map.Entry<String, Integer> e : propertyFrequency.entrySet()) {
        if (e.getValue() >= minFreq) {
            acceptedProperties.add(e.getKey());
        }
    }
    System.out.println(acceptedProperties.size() + " propertys kept over " + f.size());
    f = null;
    propertyFrequency = null;
    System.gc();
    System.out.println("Mapping entities and property URIs to ids");
    int nEntityTriples = 0;
    HashMap<String, Integer> nLiteralTriples = new HashMap<>();
    for (String type : literalTypes) {
        nLiteralTriples.put(type, 0);
    }
    HashSet<String> unrecognizedBasicTypes = new HashSet<>();
    //count entity-valued and literal-valued triples
    //and
    //create the association between uris and ids for entities        
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String subject = st.nextToken();
            String property = st.nextToken();
            if (!acceptedProperties.contains(property)) {
                l = in.readLine();
                continue;
            }
            String value = st.nextToken();
            if (subject.startsWith("http") && property.startsWith("http")) {
                Integer idSbj = getEntityIdFromUri(subject); //entityIdFromUri.get(subject);
                if (idSbj == null) {
                    idSbj = entityIdFromUriWithPrefix.size() + 1;//entityIdFromUri.size() + 1;
                    putEntityIdFromUri(subject, idSbj); //entityIdFromUri.put(subject, idSbj);
                }
                Integer idAttr = propertyIdFromUri.get(property);
                if (idAttr == null) {
                    idAttr = propertyIdFromUri.size() + 1;
                    propertyIdFromUri.put(property, idAttr);
                }
                if (value.startsWith("http") || value.startsWith("ftp:")) { //it is an entity
                    Integer idVal = getEntityIdFromUri(value); //entityIdFromUri.get(value);
                    if (idVal == null) {
                        idVal = entityIdFromUriWithPrefix.size() + 1;//entityIdFromUri.size() + 1;
                        putEntityIdFromUri(value, idVal);//entityIdFromUri.put(value, idVal);
                    }
                    Integer idInvAttr = propertyIdFromUri.get(property + "Inv");
                    if (idInvAttr == null) {
                        idInvAttr = propertyIdFromUri.size() + 1;
                        propertyIdFromUri.put(property + "Inv", idInvAttr);
                    }
                    nEntityTriples += 2;
                } else { //it is a literal
                    if (value.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            nLiteralTriples.put(literalType, nLiteralTriples.get(literalType) + 1);
                        } else {
                            if (!unrecognizedBasicTypes.contains(type)) {
                                System.out.println("Unrecognized type: " + type);
                                System.out.println("in line: " + l);
                                unrecognizedBasicTypes.add(type);
                            }
                        }
                    } else {
                        if (value.startsWith("\"")) { //it is a String
                            nLiteralTriples.put(STRING, nLiteralTriples.get(STRING) + 1);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Loaded " + (n / 1000000) + "M triples");
                }
            } else {
                System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    System.out.println("Number of triples with entity value: " + nEntityTriples);
    for (String type : literalTypes) {
        System.out.println("Number of triples with " + type + " value: " + nLiteralTriples.get(type));
    }
    entityTriplesSubjects = new int[nEntityTriples];
    entityTriplesProperties = new int[nEntityTriples];
    entityTriplesValues = new int[nEntityTriples];
    for (String type : literalTypes) {
        literalTriplesSubjects.put(type, new int[nLiteralTriples.get(type)]);
        literalTriplesProperties.put(type, new int[nLiteralTriples.get(type)]);
    }
    //load the triples into the arrays creaded above
    System.out.println("Loading triples");
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String sbj = st.nextToken();
            String attr = st.nextToken();
            if (!acceptedProperties.contains(attr)) {
                l = in.readLine();
                continue;
            }
            String val = st.nextToken();
            if (sbj.startsWith("http") && attr.startsWith("http")) {
                if (val.startsWith("http") || val.startsWith("ftp:")) { //it is an entity
                    updateTriples(sbj, attr, val, null);
                } else { //it is a literal
                    if (val.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            updateTriples(sbj, attr, null, literalType);
                        } else {
                            if (!unrecognizedBasicTypes.contains(type)) {
                                System.out.println("Unrecognized type: " + type);
                                System.out.println("in line: " + l);
                                unrecognizedBasicTypes.add(type);
                            }
                        }
                    } else {
                        if (val.startsWith("\"")) { //it is a String
                            updateTriples(sbj, attr, null, STRING);
                        } else {
                            System.out.println("Unexpected line: " + l);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Loaded " + (n / 1000000) + "M triples");
                }
            } else {
                System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    System.out.println("Entity value triples: " + entityTriplesSubjects.length);
    for (String type : literalTriplesSubjects.keySet()) {
        System.out.println(type + " value triples: " + literalTriplesSubjects.get(type).length);
    }
    propertyUri = new String[propertyIdFromUri.size() + 1];
    for (Map.Entry<String, Integer> e : propertyIdFromUri.entrySet()) {
        propertyUri[e.getValue()] = e.getKey();
    }
    entityUriWithPrefix = new String[entityIdFromUriWithPrefix.size() + 1];
    for (Map.Entry<String, Integer> e : entityIdFromUriWithPrefix.entrySet()) {
        entityUriWithPrefix[e.getValue()] = e.getKey();
    }
    //entityUri = new String[entityIdFromUri.size() + 1];
    //for (Map.Entry<String, Integer> e : entityIdFromUri.entrySet()) {
    //    entityUri[e.getValue()] = e.getKey();
    //}
    entityLabels = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityLabels = new HashSet[entityIdFromUri.size() + 1];
    entityClasses = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityClasses = new HashSet[entityIdFromUri.size() + 1];
    propertyLabels = new HashSet[propertyIdFromUri.size() + 1];
    entityOutProperties = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityOutProperties = new HashSet[entityIdFromUri.size() + 1];
    entityInProperties = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityInProperties = new HashSet[entityIdFromUri.size() + 1];
    propertyOutProperties = new HashSet[propertyIdFromUri.size() + 1];
    propertyInProperties = new HashSet[propertyIdFromUri.size() + 1];
    propertyHasLiteralRange = new boolean[propertyIdFromUri.size() + 1];
    propertyCount = new int[propertyIdFromUri.size() + 1];
}

From source file:it.cnr.icar.eric.client.xml.registry.LifeCycleManagerImpl.java

/**
 * @param keys/*w w  w  .  j  a  v a2 s  . c  o  m*/
 *            Collection of objects which are typically Key-s. Non Key
 *            objects are ignored.
 * 
 * @return an ObjectRefList binding object representing the list of unique
 *         Keys
 */
private List<ObjectRefType> createObjectRefList(Collection<?> keys) throws JAXRException {
    ArrayList<ObjectRefType> orl = new ArrayList<ObjectRefType>();

    // Used to prevent duplicate keys from being sent
    HashSet<String> processedIds = new HashSet<String>();
    processedIds.add(null);

    if (keys != null) {
        for (Iterator<?> it = keys.iterator(); it.hasNext();) {
            Object obj = it.next();

            if (obj instanceof KeyImpl) {
                KeyImpl key = (KeyImpl) obj;
                String id = key.getId();

                if (!processedIds.contains(id)) {
                    processedIds.add(id);

                    ObjectRefType ebObjectRefType = rimFac.createObjectRefType();
                    ebObjectRefType.setId(id);
                    orl.add(ebObjectRefType);
                }
            }
        }
    }

    return orl;
}

From source file:mobisocial.musubi.nearby.scanner.GpsScannerTask.java

@Override
protected List<NearbyItem> doInBackground(Void... params) {
    if (DBG)// www.  ja va2  s . c  o m
        Log.d(TAG, "Scanning for nearby gps...");
    while (!mmLocationScanComplete) {
        synchronized (mmLocationResult) {
            if (!mmLocationScanComplete) {
                try {
                    if (DBG)
                        Log.d(TAG, "Waiting for location results...");
                    mmLocationResult.wait();
                } catch (InterruptedException e) {
                }
            }
        }
    }
    if (DBG)
        Log.d(TAG, "Got location " + mmLocation);
    if (isCancelled()) {
        return null;
    }

    try {
        if (DBG)
            Log.d(TAG, "Querying gps server...");
        Uri uri = Uri.parse("http://bumblebee.musubi.us:6253/nearbyapi/0/findgroup");

        StringBuffer sb = new StringBuffer();
        DefaultHttpClient client = new DefaultHttpClient();
        HttpPost httpPost = new HttpPost(uri.toString());
        httpPost.addHeader("Content-Type", "application/json");
        JSONArray buckets = new JSONArray();

        double lat = mmLocation.getLatitude();
        double lng = mmLocation.getLongitude();

        long[] coords = GridHandler.getGridCoords(lat, lng, 5280 / 2);
        Log.i(TAG, "coords: " + Arrays.toString(coords));

        //TODO: encrypt coords with mmPassword

        for (long c : coords) {
            MessageDigest md;
            try {
                byte[] obfuscate = ("sadsalt193s" + mmPassword).getBytes();
                md = MessageDigest.getInstance("SHA-256");
                ByteBuffer b = ByteBuffer.allocate(8 + obfuscate.length);
                b.putLong(c);
                b.put(obfuscate);
                String secret_bucket = Base64.encodeToString(md.digest(b.array()), Base64.DEFAULT);
                buckets.put(buckets.length(), secret_bucket);
            } catch (NoSuchAlgorithmException e) {
                throw new RuntimeException("your platform does not support sha256", e);
            }
        }
        Log.i(TAG, "buckets: " + buckets);
        httpPost.setEntity(new StringEntity(buckets.toString()));
        try {
            HttpResponse execute = client.execute(httpPost);
            InputStream content = execute.getEntity().getContent();
            BufferedReader buffer = new BufferedReader(new InputStreamReader(content));
            String s = "";
            while ((s = buffer.readLine()) != null) {
                if (isCancelled()) {
                    return null;
                }
                sb.append(s);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        HashSet<Pair<TByteArrayList, TByteArrayList>> dupes = new HashSet<Pair<TByteArrayList, TByteArrayList>>();

        String response = sb.toString();
        JSONArray groupsJSON = new JSONArray(response);
        Log.d(TAG, "Got " + groupsJSON.length() + " groups");
        for (int i = 0; i < groupsJSON.length(); i++) {
            try {
                String s_enc_data = groupsJSON.get(i).toString();
                byte[] enc_data = Base64.decode(s_enc_data, Base64.DEFAULT);
                byte[] key = Util.sha256(("happysalt621" + mmPassword).getBytes());
                byte[] data;

                Cipher cipher;
                AlgorithmParameterSpec iv_spec;
                SecretKeySpec sks;
                try {
                    cipher = Cipher.getInstance("AES/CBC/PKCS7Padding");
                } catch (Exception e) {
                    throw new RuntimeException("AES not supported on this platform", e);
                }
                try {
                    iv_spec = new IvParameterSpec(enc_data, 0, 16);
                    sks = new SecretKeySpec(key, "AES");
                    cipher.init(Cipher.DECRYPT_MODE, sks, iv_spec);
                } catch (Exception e) {
                    throw new RuntimeException("bad iv or key", e);
                }
                try {
                    data = cipher.doFinal(enc_data, 16, enc_data.length - 16);
                } catch (Exception e) {
                    throw new RuntimeException("body decryption failed", e);
                }

                JSONObject group = new JSONObject(new String(data));

                String group_name = group.getString("group_name");
                byte[] group_capability = Base64.decode(group.getString("group_capability"), Base64.DEFAULT);
                String sharer_name = group.getString("sharer_name");
                byte[] sharer_hash = Base64.decode(group.getString("sharer_hash"), Base64.DEFAULT);
                byte[] thumbnail = null;
                if (group.has("thumbnail"))
                    thumbnail = Base64.decode(group.getString("thumbnail"), Base64.DEFAULT);
                int member_count = group.getInt("member_count");
                int sharer_type = group.getInt("sharer_type");
                Pair<TByteArrayList, TByteArrayList> p = Pair.with(new TByteArrayList(sharer_hash),
                        new TByteArrayList(group_capability));
                if (dupes.contains(p))
                    continue;
                dupes.add(p);
                addNearbyItem(new NearbyFeed(mContext, group_name, group_capability, sharer_name,
                        Authority.values()[sharer_type], sharer_hash, thumbnail, member_count));
            } catch (Throwable e) {
                Log.e(TAG, "Failed to parse group " + i, e);
            }
        }
    } catch (Exception e) {
        if (DBG)
            Log.d(TAG, "Error searching nearby feeds", e);
    }
    return null;
}

From source file:com.bluexml.side.Integration.alfresco.xforms.webscript.XFormsWork.java

private void exceptionToString(Throwable e, StringBuffer sb, HashSet<Throwable> causes) {
    exceptionToStringOpenTag("exception", sb);
    exceptionToStringAddType(e.getClass(), sb);
    // exceptionToStringAddEntry("message", e.getMessage(), sb);
    exceptionToStringOpenTag("message", sb);
    sb.append(e.getMessage());/*from   ww  w  . ja  v  a  2s  . c o m*/
    if (faultyId != null) {
        // Brice : added information to return suspected id
        exceptionToStringAddEntry("suspectedId", faultyId, sb);
    }
    exceptionToStringCloseTag("message", sb);
    StringWriter sw = new StringWriter();
    e.printStackTrace(new PrintWriter(sw));
    String stacktrace = sw.toString();
    exceptionToStringAddEntry("stacktrace", stacktrace, sb);
    Throwable cause = e.getCause();
    if (cause != null && !causes.contains(cause)) {
        causes.add(cause);
        exceptionToStringOpenTag("cause", sb);
        exceptionToString(cause, sb, causes);
        exceptionToStringCloseTag("cause", sb);
    }
    exceptionToStringCloseTag("exception", sb);
}

From source file:com.splicemachine.derby.impl.sql.execute.actions.DDLConstantOperation.java

/**
 * Adjust dependencies of a table on ANSI UDTs. We only add one dependency
 * between a table and a UDT. If the table already depends on the UDT, we don't add
 * a redundant dependency.//from  ww w  .  j  a v a2s. com
 */
protected void adjustUDTDependencies(Activation activation, ColumnInfo[] columnInfos, boolean dropWholeTable)
        throws StandardException {
    if ((!dropWholeTable) && (columnInfos == null)) {
        return;
    }

    LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
    TransactionController tc = lcc.getTransactionExecute();
    DataDictionary dd = lcc.getDataDictionary();
    TableDescriptor td = activation.getDDLTableDescriptor();

    int changedColumnCount = columnInfos == null ? 0 : columnInfos.length;
    HashMap addUdtMap = new HashMap();
    HashMap dropUdtMap = new HashMap();
    HashSet addColumnNames = new HashSet();
    HashSet dropColumnNames = new HashSet();

    // first find all of the new ansi udts which the table must depend on
    // and the old ones which are candidates for removal
    for (int i = 0; i < changedColumnCount; i++) {
        ColumnInfo ci = columnInfos[i];

        // skip this column if it is not a UDT
        AliasDescriptor ad = dd.getAliasDescriptorForUDT(tc, columnInfos[i].dataType);
        if (ad == null) {
            continue;
        }

        String key = ad.getObjectID().toString();

        if (ci.action == ColumnInfo.CREATE) {
            addColumnNames.add(ci.name);

            // no need to add the descriptor if it is already on the list
            if (addUdtMap.get(key) != null) {
                continue;
            }

            addUdtMap.put(key, ad);
        } else if (ci.action == ColumnInfo.DROP) {
            dropColumnNames.add(ci.name);
            dropUdtMap.put(key, ad);
        }
    }

    // nothing to do if there are no changed columns of udt type
    // and this is not a DROP TABLE command
    if ((!dropWholeTable) && (addUdtMap.size() == 0) && (dropUdtMap.size() == 0)) {
        return;
    }

    //
    // Now prune from the add list all udt descriptors for which we already have dependencies.
    // These are the udts for old columns. This supports the ALTER TABLE ADD COLUMN
    // case.
    //
    // Also prune from the drop list add udt descriptors which will still be
    // referenced by the remaining columns.
    //
    ColumnDescriptorList cdl = td.getColumnDescriptorList();
    int totalColumnCount = cdl.size();

    for (int i = 0; i < totalColumnCount; i++) {
        ColumnDescriptor cd = cdl.elementAt(i);

        // skip columns that are being added and dropped. we only want the untouched columns
        if (addColumnNames.contains(cd.getColumnName()) || dropColumnNames.contains(cd.getColumnName())) {
            continue;
        }

        // nothing to do if the old column isn't a UDT
        AliasDescriptor ad = dd.getAliasDescriptorForUDT(tc, cd.getType());
        if (ad == null) {
            continue;
        }

        String key = ad.getObjectID().toString();

        // ha, it is a UDT.
        if (dropWholeTable) {
            dropUdtMap.put(key, ad);
        } else {
            if (addUdtMap.get(key) != null) {
                addUdtMap.remove(key);
            }
            if (dropUdtMap.get(key) != null) {
                dropUdtMap.remove(key);
            }
        }
    }

    adjustUDTDependencies(lcc, dd, td, addUdtMap, dropUdtMap);
}

From source file:es.caib.seycon.ng.servei.XarxaServiceImpl.java

private boolean sonAliasIguales(String alias1, String alias2) {
    if ((alias1 == null && alias2 != null) || (alias1 != null && alias2 == null))
        return false; // solo 1 nulo
    if (alias1 == null && alias2 == null)
        return true; // ambos nulos
    HashSet h_alias1 = new HashSet();
    HashSet h_alias2 = new HashSet();
    // alias1 y alias2 NO son nulos
    String[] v_alias1 = alias1.split(" "); //$NON-NLS-1$
    String[] v_alias2 = alias2.split(" "); //$NON-NLS-1$
    // Los guardamos en los sets
    if (v_alias1 != null)
        for (int i = 0; i < v_alias1.length; i++) {
            String act = v_alias1[i];
            if (act != null && !"".equals(act.trim())) //$NON-NLS-1$
                h_alias1.add(act);//from  w  w  w.  j av a  2 s.  c om
        }
    if (v_alias2 != null)
        for (int i = 0; i < v_alias2.length; i++) {
            String act = v_alias2[i];
            if (act != null && !"".equals(act.trim())) //$NON-NLS-1$
                h_alias2.add(act);
        }
    if (h_alias1.size() != h_alias2.size())
        return false; // No tienen el mismo tamao
    // Los comparamos buscando todos los del primero en el segundo:
    for (Iterator it = h_alias1.iterator(); it.hasNext();) {
        String elem = (String) it.next();
        if (!h_alias2.contains(elem))
            return false;
    }
    return true;
}

From source file:edu.mayo.informatics.cts.CTSVAPI.lucene.LuceneSearch.java

/**
 * This method implements the CTS search lookupConceptCodesByDesignation. The parameters it takes are not exactly
 * the same, nor are the exceptions that it throws. It is meant to be used in conjunction with the existing
 * implementation(s).//from ww w .j a v a2  s .  c o m
 * 
 * The return type is a hack - ConceptID's are suppose to contain the concept code, and code system id. In this
 * case, I put in the codeSystemName instead of ID. The ID's need to be filled in as a post process (in the existing
 * implmementation)
 * 
 */
public ConceptId[] luceneLookupConceptCodesByDesignation(String codeSystemName, String matchText,
        String matchAlgorithm_code, String language_code, boolean activeConceptsOnly, int timeout,
        int sizeLimit) throws BadlyFormedMatchText, UnexpectedError, TimeoutError, UnknownMatchAlgorithm {
    ArrayList resultsToReturn = new ArrayList();
    HashSet resultsDupeRemover = new HashSet();
    try {
        SearchServiceInterface searcher = getSearcher(codeSystemName, matchAlgorithm_code);

        StringBuffer queryString = new StringBuffer();

        queryString.append("property:(textualPresentation)");
        if (matchText != null && matchText.length() > 0) {
            queryString.append(" AND " + makeMatchTextQueryPortion(matchAlgorithm_code, matchText));
        }

        if (activeConceptsOnly) {
            queryString.append(" AND NOT isActive:(F)");
        }

        // if they supply *, search all code systems (by not restricting the query)
        if (!codeSystemName.equals("*")) {
            queryString.append(" AND codingSchemeName:(\"" + codeSystemName + "\")");
        }

        if (language_code != null && language_code.length() > 0) {
            queryString.append(" AND language:(" + language_code + "*)");
        }

        Query query;
        try {
            query = parser_.parse(queryString.toString());
        } catch (ParseException e) {
            throw new BadlyFormedMatchText(matchText);
        }

        // make it bigger, because it will usually match on multiple designations per concept,
        // and I will end up returning less concepts than the limit requested in that case.
        int localLimit = (sizeLimit == 0 ? Integer.MAX_VALUE : sizeLimit * 5);

        Document[] docs = searcher.search(query, null, true, localLimit);
        float[] scores = searcher.getScores();

        for (int i = 0; i < docs.length; i++) {
            if (sizeLimit != 0 && resultsToReturn.size() == sizeLimit) {
                break;
            }
            ConceptId temp = new ConceptId();

            //chop off any urn:oid: prefix stuff
            String tempId = docs[i].get("codingSchemeId");
            if (tempId.toLowerCase().startsWith("urn:oid:")) {
                tempId = tempId.substring("urn:oid:".length());
            }
            temp.setCodeSystem_id(tempId);
            temp.setConcept_code(docs[i].get("conceptCode"));

            if (!resultsDupeRemover.contains(temp.getCodeSystem_id() + ":" + temp.getConcept_code())) {
                ScoredConceptId scoredConceptId = new ScoredConceptId();
                scoredConceptId.conceptId = temp;
                scoredConceptId.score = scores[i];

                String isPreferred = docs[i].get("isPreferred");

                scoredConceptId.isPreferred = isPreferred == null || isPreferred.equals("F") ? false : true;

                resultsToReturn.add(scoredConceptId);
                resultsDupeRemover.add(temp.getCodeSystem_id() + ":" + temp.getConcept_code());
            }
        }
        // sort them further (break lucene ties based on preferred flags)
        Collections.sort(resultsToReturn, new ScoredConceptIdComparator());
    } catch (UnknownMatchAlgorithm e) {
        throw e;
    } catch (UnexpectedError e) {
        throw e;
    } catch (BadlyFormedMatchText e) {
        throw e;
    } catch (InternalIndexerErrorException e) {
        throw new UnexpectedError(e.toString() + " " + (e.getCause() == null ? "" : e.getCause().toString()));
    } catch (Exception e) {
        logger.error("Unexpected Error", e);
        ;
        throw new UnexpectedError(e.toString() + " " + (e.getCause() == null ? "" : e.getCause().toString()));
    }

    ConceptId[] finalResult = new ConceptId[resultsToReturn.size()];

    for (int i = 0; i < resultsToReturn.size(); i++) {
        finalResult[i] = ((ScoredConceptId) resultsToReturn.get(i)).conceptId;
    }

    return finalResult;
}

From source file:com.splicemachine.db.impl.sql.compile.TableElementList.java

/**
 * Complain if a generation clause references other generated columns. This
 * is required by the SQL Standard, part 2, section 4.14.8.
 *
 * @param fromList      The FromList in question.
 * @param baseTable  Table descriptor if this is an ALTER TABLE statement.
 * @exception StandardException      Thrown on error
 *///from   www .j a v a 2s.co  m
void findIllegalGenerationReferences(FromList fromList, TableDescriptor baseTable) throws StandardException {
    ArrayList generatedColumns = new ArrayList();
    HashSet names = new HashSet();
    int size = size();

    // add in existing generated columns if this is an ALTER TABLE statement
    if (baseTable != null) {
        ColumnDescriptorList cdl = baseTable.getGeneratedColumns();
        int count = cdl.size();
        for (int i = 0; i < count; i++) {
            names.add(cdl.elementAt(i).getColumnName());
        }
    }

    // find all of the generated columns
    for (int index = 0; index < size; index++) {
        ColumnDefinitionNode cdn;
        TableElementNode element = (TableElementNode) elementAt(index);

        if (!(element instanceof ColumnDefinitionNode)) {
            continue;
        }

        cdn = (ColumnDefinitionNode) element;

        if (!cdn.hasGenerationClause()) {
            continue;
        }

        generatedColumns.add(cdn);
        names.add(cdn.getColumnName());
    }

    // now look at their generation clauses to see if they reference one
    // another
    int count = generatedColumns.size();
    for (int i = 0; i < count; i++) {
        ColumnDefinitionNode cdn = (ColumnDefinitionNode) generatedColumns.get(i);
        GenerationClauseNode generationClauseNode = cdn.getGenerationClauseNode();
        Vector referencedColumns = generationClauseNode.findReferencedColumns();
        int refCount = referencedColumns.size();
        for (int j = 0; j < refCount; j++) {
            String name = ((ColumnReference) referencedColumns.elementAt(j)).getColumnName();

            if (name != null) {
                if (names.contains(name)) {
                    throw StandardException.newException(SQLState.LANG_CANT_REFERENCE_GENERATED_COLUMN,
                            cdn.getColumnName());
                }
            }
        }
    }

}

From source file:edu.ucla.cs.scai.canali.core.index.utils.DBpediaOntologyExtendedUtils.java

public void createEntityClassesFile(HashSet<String> acceptableEntities, HashSet<String> acceptableClasses)
        throws Exception {
    System.out.println("Saving entity classes");
    try (PrintWriter out = new PrintWriter(new FileOutputStream(destinationPath + "entity_classes", false),
            true);/*from  w w  w .ja  v  a  2  s  .  c  om*/
            BufferedReader in1 = new BufferedReader(
                    new FileReader(downloadedFilesPath + "instance_types_en.nt"));
            BufferedReader in2 = new BufferedReader(
                    new FileReader(downloadedFilesPath + "instance_types_heuristic_en.nt"));
            BufferedReader in3 = new BufferedReader(new FileReader(downloadedFilesPath + "yago_types.nt"))) {
        String regex = "<(.*)> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <(.*)>";
        Pattern p = Pattern.compile(regex);
        String l = in1.readLine();
        while (l != null) {
            Matcher m = p.matcher(l);
            if (m.find()) {
                String eUri = m.group(1);
                String cUri = m.group(2);
                if (acceptableEntities.contains(eUri) && acceptableClasses.contains(cUri)) {
                    out.println(eUri + "\t" + cUri);
                }
            }
            l = in1.readLine();
        }
        l = in2.readLine();
        while (l != null) {
            Matcher m = p.matcher(l);
            if (m.find()) {
                String eUri = m.group(1);
                String cUri = m.group(2);
                if (acceptableEntities.contains(eUri) && acceptableClasses.contains(cUri)) {
                    out.println(eUri + "\t" + cUri);
                }
            }
            l = in2.readLine();
        }
        l = in3.readLine();
        while (l != null) {
            Matcher m = p.matcher(l);
            if (m.find()) {
                String eUri = m.group(1);
                String cUri = m.group(2);
                if (acceptableEntities.contains(eUri) && acceptableClasses.contains(cUri)) {
                    out.println(eUri + "\t" + cUri);
                }
            }
            l = in3.readLine();
        }
    }
}

From source file:edu.ucla.cs.scai.canali.core.index.utils.DBpediaOntology201510Utils.java

public void createEntityClassesFile(HashSet<String> acceptableEntities, HashSet<String> acceptableClasses)
        throws Exception {
    System.out.println("Saving entity classes");
    try (PrintWriter out = new PrintWriter(new FileOutputStream(destinationPath + "entity_classes", false),
            true);//from w w  w .ja v  a 2  s.  c  om
            BufferedReader in1 = new BufferedReader(
                    new FileReader(downloadedFilesPath + "instance_types_en.ttl"));
            BufferedReader in2 = new BufferedReader(
                    new FileReader(downloadedFilesPath + "instance_types_sdtyped-dbo_en.nt"));
            BufferedReader in3 = new BufferedReader(new FileReader(downloadedFilesPath + "yago_types.nt"))) {
        String regex = "<(.*)> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <(.*)>";
        Pattern p = Pattern.compile(regex);
        String l = in1.readLine();
        while (l != null) {
            Matcher m = p.matcher(l);
            if (m.find()) {
                String eUri = m.group(1);
                String cUri = m.group(2);
                if (acceptableEntities.contains(eUri) && acceptableClasses.contains(cUri)) {
                    out.println(eUri + "\t" + cUri);
                }
            }
            l = in1.readLine();
        }
        l = in2.readLine();
        while (l != null) {
            Matcher m = p.matcher(l);
            if (m.find()) {
                String eUri = m.group(1);
                String cUri = m.group(2);
                if (acceptableEntities.contains(eUri) && acceptableClasses.contains(cUri)) {
                    out.println(eUri + "\t" + cUri);
                }
            }
            l = in2.readLine();
        }
        l = in3.readLine();
        while (l != null) {
            Matcher m = p.matcher(l);
            if (m.find()) {
                String eUri = m.group(1);
                String cUri = m.group(2);
                if (acceptableEntities.contains(eUri) && acceptableClasses.contains(cUri)) {
                    out.println(eUri + "\t" + cUri);
                }
            }
            l = in3.readLine();
        }
    }
}