Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:edu.mit.viral.shen.DroidFish.java

private final void setBoardFlip(boolean matchPlayerNames) {
    boolean flipped = boardFlipped;
    if (playerNameFlip && matchPlayerNames && (ctrl != null)) {
        final TreeMap<String, String> headers = new TreeMap<String, String>();
        ctrl.getHeaders(headers);/*  ww w . j  a  va  2 s .c o m*/
        int whiteMatch = nameMatchScore(headers.get("White"), playerName);
        int blackMatch = nameMatchScore(headers.get("Black"), playerName);
        if ((flipped && (whiteMatch > blackMatch)) || (!flipped && (whiteMatch < blackMatch))) {
            flipped = !flipped;
            boardFlipped = flipped;
            setBooleanPref("boardFlipped", flipped);
        }
    }
    if (autoSwapSides) {
        if (gameMode.analysisMode()) {
            flipped = !cb.pos.whiteMove;
        } else if (gameMode.playerWhite() && gameMode.playerBlack()) {
            flipped = !cb.pos.whiteMove;
        } else if (gameMode.playerWhite()) {
            flipped = false;
        } else if (gameMode.playerBlack()) {
            flipped = true;
        } else { // two computers
            flipped = !cb.pos.whiteMove;
        }
    }
    cb.setFlipped(flipped);
}

From source file:org.apache.pdfbox.pdfparser.NonSequentialPDFParser.java

/**
 * Will parse every object necessary to load a single page from the pdf document.
 * We try our best to order objects according to offset in file before reading
 * to minimize seek operations./* w ww  .jav  a2s . c  o m*/
 * 
 * @param dict the COSObject from the parent pages.
 * @param excludeObjects dictionary object reference entries with these names will not be parsed
 * 
 * @throws IOException
 */
private void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException {
    // ---- create queue for objects waiting for further parsing
    final Queue<COSBase> toBeParsedList = new LinkedList<COSBase>();
    // offset ordered object map
    final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<Long, List<COSObject>>();
    // in case of compressed objects offset points to stmObj
    final Set<Long> parsedObjects = new HashSet<Long>();
    final Set<Long> addedObjects = new HashSet<Long>();

    // ---- add objects not to be parsed to list of already parsed objects
    if (excludeObjects != null) {
        for (COSName objName : excludeObjects) {
            COSBase baseObj = dict.getItem(objName);
            if (baseObj instanceof COSObject) {
                parsedObjects.add(getObjectId((COSObject) baseObj));
            }
        }
    }

    addNewToList(toBeParsedList, dict.getValues(), addedObjects);

    // ---- go through objects to be parsed
    while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) {
        // -- first get all COSObject from other kind of objects and
        //    put them in objToBeParsed; afterwards toBeParsedList is empty
        COSBase baseObj;
        while ((baseObj = toBeParsedList.poll()) != null) {
            if (baseObj instanceof COSStream) {
                addNewToList(toBeParsedList, ((COSStream) baseObj).getValues(), addedObjects);
            } else if (baseObj instanceof COSDictionary) {
                addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects);
            } else if (baseObj instanceof COSArray) {
                final Iterator<COSBase> arrIter = ((COSArray) baseObj).iterator();
                while (arrIter.hasNext()) {
                    addNewToList(toBeParsedList, arrIter.next(), addedObjects);
                }
            } else if (baseObj instanceof COSObject) {
                COSObject obj = (COSObject) baseObj;
                long objId = getObjectId(obj);
                COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber().intValue(),
                        obj.getGenerationNumber().intValue());

                if (!(parsedObjects.contains(objId) /*|| document.hasObjectInPool( objKey ) */ )) {
                    Long fileOffset = xrefTrailerResolver.getXrefTable().get(objKey);
                    //  it is allowed that object references point to null, thus we have to test
                    if (fileOffset != null) {
                        if (fileOffset > 0) {
                            objToBeParsed.put(fileOffset, Collections.singletonList(obj));
                        } else {
                            // negative offset means we have a compressed object within object stream;
                            // get offset of object stream
                            fileOffset = xrefTrailerResolver.getXrefTable()
                                    .get(new COSObjectKey(-fileOffset, 0));
                            if ((fileOffset == null) || (fileOffset <= 0)) {
                                throw new IOException(
                                        "Invalid object stream xref object reference: " + fileOffset);
                            }

                            List<COSObject> stmObjects = objToBeParsed.get(fileOffset);
                            if (stmObjects == null) {
                                objToBeParsed.put(fileOffset, stmObjects = new ArrayList<COSObject>());
                            }
                            stmObjects.add(obj);
                        }
                    } else {
                        // NULL object
                        COSObject pdfObject = document.getObjectFromPool(objKey);
                        pdfObject.setObject(COSNull.NULL);
                    }
                }
            }
        }

        // ---- read first COSObject with smallest offset;
        //      resulting object will be added to toBeParsedList
        if (objToBeParsed.isEmpty()) {
            break;
        }

        for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) {
            COSBase parsedObj = parseObjectDynamically(obj, false);

            obj.setObject(parsedObj);
            addNewToList(toBeParsedList, parsedObj, addedObjects);

            parsedObjects.add(getObjectId(obj));
        }
    }
}

From source file:org.apache.pdfbox.pdfparser.COSParser.java

/**
 * Will parse every object necessary to load a single page from the pdf document. We try our
 * best to order objects according to offset in file before reading to minimize seek operations.
 *
 * @param dict the COSObject from the parent pages.
 * @param excludeObjects dictionary object reference entries with these names will not be parsed
 *
 * @throws IOException if something went wrong
 *///  w w w .  ja  v  a 2  s  .c  o  m
protected void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException {
    // ---- create queue for objects waiting for further parsing
    final Queue<COSBase> toBeParsedList = new LinkedList<COSBase>();
    // offset ordered object map
    final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<Long, List<COSObject>>();
    // in case of compressed objects offset points to stmObj
    final Set<Long> parsedObjects = new HashSet<Long>();
    final Set<Long> addedObjects = new HashSet<Long>();

    addExcludedToList(excludeObjects, dict, parsedObjects);
    addNewToList(toBeParsedList, dict.getValues(), addedObjects);

    // ---- go through objects to be parsed
    while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) {
        // -- first get all COSObject from other kind of objects and
        // put them in objToBeParsed; afterwards toBeParsedList is empty
        COSBase baseObj;
        while ((baseObj = toBeParsedList.poll()) != null) {
            if (baseObj instanceof COSDictionary) {
                addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects);
            } else if (baseObj instanceof COSArray) {
                final Iterator<COSBase> arrIter = ((COSArray) baseObj).iterator();
                while (arrIter.hasNext()) {
                    addNewToList(toBeParsedList, arrIter.next(), addedObjects);
                }
            } else if (baseObj instanceof COSObject) {
                COSObject obj = (COSObject) baseObj;
                long objId = getObjectId(obj);
                COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber(), obj.getGenerationNumber());

                if (!parsedObjects.contains(objId)) {
                    Long fileOffset = xrefTrailerResolver.getXrefTable().get(objKey);
                    // it is allowed that object references point to null,
                    // thus we have to test
                    if (fileOffset != null && fileOffset != 0) {
                        if (fileOffset > 0) {
                            objToBeParsed.put(fileOffset, Collections.singletonList(obj));
                        } else {
                            // negative offset means we have a compressed
                            // object within object stream;
                            // get offset of object stream
                            fileOffset = xrefTrailerResolver.getXrefTable()
                                    .get(new COSObjectKey((int) -fileOffset, 0));
                            if ((fileOffset == null) || (fileOffset <= 0)) {
                                throw new IOException("Invalid object stream xref object reference for key '"
                                        + objKey + "': " + fileOffset);
                            }

                            List<COSObject> stmObjects = objToBeParsed.get(fileOffset);
                            if (stmObjects == null) {
                                stmObjects = new ArrayList<COSObject>();
                                objToBeParsed.put(fileOffset, stmObjects);
                            }
                            stmObjects.add(obj);
                        }
                    } else {
                        // NULL object
                        COSObject pdfObject = document.getObjectFromPool(objKey);
                        pdfObject.setObject(COSNull.NULL);
                    }
                }
            }
        }

        // ---- read first COSObject with smallest offset
        // resulting object will be added to toBeParsedList
        if (objToBeParsed.isEmpty()) {
            break;
        }

        for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) {
            COSBase parsedObj = parseObjectDynamically(obj, false);

            obj.setObject(parsedObj);
            addNewToList(toBeParsedList, parsedObj, addedObjects);

            parsedObjects.add(getObjectId(obj));
        }
    }
}

From source file:com.sfs.whichdoctor.dao.AccreditationDAOImpl.java

/**
 * Gets the training summary./* w ww.j a v a2  s .  co m*/
 *
 * @param guid the guid
 * @param type the type
 *
 * @return the training summary
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
@SuppressWarnings("unchecked")
public final TreeMap<String, AccreditationBean[]> getTrainingSummary(final int guid, final String type)
        throws WhichDoctorDaoException {

    if (type == null) {
        throw new NullPointerException("Training type cannot be null");
    }

    dataLogger.info("Getting " + type + " Training Summary for Member GUID: " + guid);

    TreeMap<String, AccreditationBean[]> summary = new TreeMap<String, AccreditationBean[]>();

    Collection<AccreditationBean> accreditations = new ArrayList<AccreditationBean>();
    try {
        accreditations = this.getJdbcTemplateReader().query(this.getSQL().getValue("accreditation/loadSummary"),
                new Object[] { guid, type }, new RowMapper() {
                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        AccreditationBean accreditation = new AccreditationBean();

                        accreditation.setAbbreviation(rs.getString("AccreditationTypeAbbreviation"));
                        accreditation.setAccreditationType(rs.getString("AccreditationType"));
                        accreditation.setSpecialtyType(rs.getString("SpecialtyTypeClass"));
                        accreditation.setSpecialtySubType(rs.getString("SpecialtyTypeName"));
                        accreditation.setSpecialtyTypeAbbreviation(rs.getString("SpecialtyTypeAbbreviation"));
                        accreditation.setCore(rs.getBoolean("Core"));
                        accreditation.setWeeksApproved(rs.getInt("WeeksApproved"));
                        accreditation.setWeeksCertified(rs.getInt("WeeksCertified"));

                        // The active flag holds whether the accreditation is excess
                        boolean active = true;

                        String trainingClass = rs.getString("TrainingClass");
                        if (StringUtils.contains(trainingClass, "nterrupted")
                                || StringUtils.contains(trainingClass, "ontinuing")) {
                            active = false;
                        }
                        accreditation.setActive(active);

                        return accreditation;
                    }
                });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for search: " + ie.getMessage());
    }

    for (AccreditationBean acrd : accreditations) {
        if (acrd.getActive()) {

            // Generate index key
            String specialtyAbbreviation = acrd.getAccreditationType();
            String specialtyTypeName = acrd.getSpecialtyType();
            if (StringUtils.isNotBlank(acrd.getAbbreviation())) {
                specialtyAbbreviation = acrd.getAbbreviation();
            }
            if (StringUtils.isNotBlank(acrd.getSpecialtySubType())) {
                specialtyTypeName = acrd.getSpecialtyType() + " - " + acrd.getSpecialtySubType();
            }
            String specialtyKey = specialtyAbbreviation + ": " + specialtyTypeName;

            AccreditationBean core = new AccreditationBean();
            core.setAbbreviation(acrd.getAbbreviation());
            core.setAccreditationType(acrd.getAccreditationType());
            core.setCore(true);
            core.setSpecialtyType(acrd.getSpecialtyType());
            core.setSpecialtySubType(acrd.getSpecialtySubType());
            core.setSpecialtyTypeAbbreviation(acrd.getSpecialtyTypeAbbreviation());

            AccreditationBean nonCore = new AccreditationBean();
            nonCore.setAbbreviation(acrd.getAbbreviation());
            nonCore.setAccreditationType(acrd.getAccreditationType());
            nonCore.setCore(false);
            nonCore.setSpecialtyType(acrd.getSpecialtyType());
            nonCore.setSpecialtySubType(acrd.getSpecialtySubType());
            nonCore.setSpecialtyTypeAbbreviation(acrd.getSpecialtyTypeAbbreviation());

            if (summary.containsKey(specialtyKey)) {
                // Specialty exists in TreeMap -> Get array and modify
                try {
                    AccreditationBean[] existing = summary.get(specialtyKey);
                    core = existing[0];
                    nonCore = existing[1];
                } catch (Exception e) {
                    dataLogger.error("Error loading existing training summary item: " + e.getMessage());
                }
            }

            // Add to the relevant core/nonCore running totals
            if (acrd.getCore()) {
                core.setWeeksApproved(core.getWeeksApproved() + acrd.getWeeksApproved());
                core.setWeeksCertified(core.getWeeksCertified() + acrd.getWeeksCertified());
            } else {
                nonCore.setWeeksApproved(nonCore.getWeeksApproved() + acrd.getWeeksApproved());
                nonCore.setWeeksCertified(nonCore.getWeeksCertified() + acrd.getWeeksCertified());
            }

            // Set accreditation details
            AccreditationBean[] details = new AccreditationBean[] { core, nonCore };

            // Add accreditation to map
            summary.put(specialtyKey, details);
        }
    }
    return summary;
}

From source file:org.dllearner.reasoning.SPARQLReasoner.java

@Override
public DatatypePropertyHierarchy prepareDatatypePropertyHierarchy() throws ReasoningMethodUnsupportedException {
    logger.info("Preparing data property subsumption hierarchy ...");
    long startTime = System.currentTimeMillis();
    TreeMap<OWLDataProperty, SortedSet<OWLDataProperty>> subsumptionHierarchyUp = new TreeMap<>();
    TreeMap<OWLDataProperty, SortedSet<OWLDataProperty>> subsumptionHierarchyDown = new TreeMap<>();

    String query = "SELECT * WHERE {" + "?sub a <http://www.w3.org/2002/07/owl#DatatypeProperty> . "
            + "OPTIONAL {" + "?sub <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> ?sup ."
            + "?sup a <http://www.w3.org/2002/07/owl#DatatypeProperty> . "
            + "FILTER(?sup != ?sub && ?sup != <http://www.w3.org/2002/07/owl#topDatatypeProperty> )" + "}"
            + "FILTER(?sub != <http://www.w3.org/2002/07/owl#topDatatypeProperty> && ?sub != <http://www.w3.org/2002/07/owl#bottomDatatypeProperty>)"
            + "}";
    ResultSet rs = executeSelectQuery(query);

    while (rs.hasNext()) {
        QuerySolution qs = rs.next();/* w w  w.  ja  v a 2s.c o  m*/
        if (qs.get("sub").isURIResource()) {
            OWLDataProperty sub = df.getOWLDataProperty(IRI.create(qs.get("sub").asResource().getURI()));

            // add sub properties entry
            if (!subsumptionHierarchyDown.containsKey(sub)) {
                subsumptionHierarchyDown.put(sub, new TreeSet<>());
            }

            // add super properties entry
            if (!subsumptionHierarchyUp.containsKey(sub)) {
                subsumptionHierarchyUp.put(sub, new TreeSet<>());
            }

            // if there is a super property
            if (qs.get("sup") != null && qs.get("sup").isURIResource()) {
                OWLDataProperty sup = df.getOWLDataProperty(IRI.create(qs.get("sup").asResource().getURI()));

                // add sub properties entry
                if (!subsumptionHierarchyDown.containsKey(sup)) {
                    subsumptionHierarchyDown.put(sup, new TreeSet<>());
                }

                // add super properties entry
                if (!subsumptionHierarchyUp.containsKey(sup)) {
                    subsumptionHierarchyUp.put(sup, new TreeSet<>());
                }

                // add super properties entry
                SortedSet<OWLDataProperty> superClasses = subsumptionHierarchyUp.get(sub);
                if (superClasses == null) {
                    superClasses = new TreeSet<>();
                    subsumptionHierarchyUp.put(sub, superClasses);
                }
                superClasses.add(sup);

                // add sub properties entry
                SortedSet<OWLDataProperty> subProperties = subsumptionHierarchyDown.get(sup);
                if (subProperties == null) {
                    subProperties = new TreeSet<>();
                    subsumptionHierarchyDown.put(sup, subProperties);
                }
                subProperties.add(sub);
            }
        }
    }

    logger.info("... done in {}ms", (System.currentTimeMillis() - startTime));
    datatypePropertyHierarchy = new DatatypePropertyHierarchy(subsumptionHierarchyUp, subsumptionHierarchyDown);
    return datatypePropertyHierarchy;
}

From source file:org.biomart.configurator.controller.MartController.java

/**
 * @param fksToBeDropped//from w w  w .ja  v  a2 s .  com
 * @param dmd
 * @param schema
 * @param catalog
 * @param stepSize
 * @throws SQLException
 * @throws DataModelException
 */
public void synchroniseKeysUsingDMD(final SourceSchema ss, final Collection<ForeignKey> fksToBeDropped,
        final DatabaseMetaData dmd, final String schema, final String catalog)
        throws SQLException, DataModelException {
    Log.debug("Running DMD key synchronisation");
    // Loop through all the tables in the database, which is the same
    // as looping through all the primary keys.
    Log.debug("Finding tables");
    for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) {

        // Obtain the table and its primary key.
        final SourceTable pkTable = (SourceTable) i.next();
        final PrimaryKey pk = pkTable.getPrimaryKey();
        // Skip all tables which have no primary key.
        if (pk == null)
            continue;

        Log.debug("Processing primary key " + pk);

        // Make a list of relations that already exist in this schema,
        // from some previous run. Any relations that are left in this
        // list by the end of the loop for this table no longer exist in
        // the database, and will be dropped.
        final Collection<Relation> relationsToBeDropped = new TreeSet<Relation>(pk.getRelations()); // Tree for
                                                                                                    // order

        // Identify all foreign keys in the database metadata that refer
        // to the current primary key.
        Log.debug("Finding referring foreign keys");
        String searchCatalog = catalog;
        String searchSchema = schema;
        final ResultSet dbTblFKCols = dmd.getExportedKeys(searchCatalog, searchSchema, pkTable.getName());

        // Loop through the results. There will be one result row per
        // column per key, so we need to build up a set of key columns
        // in a map.
        // The map keys represent the column position within a key. Each
        // map value is a list of columns. In essence the map is a 2-D
        // representation of the foreign keys which refer to this PK,
        // with the keys of the map (Y-axis) representing the column
        // position in the FK, and the values of the map (X-axis)
        // representing each individual FK. In all cases, FK columns are
        // assumed to be in the same order as the PK columns. The map is
        // sorted by key column position.
        // An assumption is made that the query will return columns from
        // the FK in the same order as all other FKs, ie. all column 1s
        // will be returned before any 2s, and then all 2s will be
        // returned
        // in the same order as the 1s they are associated with, etc.
        final TreeMap<Short, List<Column>> dbFKs = new TreeMap<Short, List<Column>>();
        while (dbTblFKCols.next()) {
            final String fkTblName = dbTblFKCols.getString("FKTABLE_NAME");
            final String fkColName = dbTblFKCols.getString("FKCOLUMN_NAME");
            final Short fkColSeq = new Short(dbTblFKCols.getShort("KEY_SEQ"));
            if (fkTblName != null && fkTblName.contains("$")) { // exclude ORACLE's temporary tables (unlikely to be
                                                                // found here though)
                continue;
            }

            // Note the column.
            if (!dbFKs.containsKey(fkColSeq))
                dbFKs.put(fkColSeq, new ArrayList<Column>());
            // In some dbs, FKs can be invalid, so we need to check
            // them.
            final Table fkTbl = ss.getTableByName(fkTblName);
            if (fkTbl != null) {
                final Column fkCol = (Column) fkTbl.getColumnByName(fkColName);
                if (fkCol != null)
                    (dbFKs.get(fkColSeq)).add(fkCol);
            }
        }
        dbTblFKCols.close();

        // Sort foreign keys by name (case insensitive)
        for (List<Column> columnList : dbFKs.values()) {
            Collections.sort(columnList);
        }

        // Only construct FKs if we actually found any.
        if (!dbFKs.isEmpty()) {
            // Identify the sequence of the first column, which may be 0
            // or 1, depending on database implementation.
            final int firstColSeq = ((Short) dbFKs.firstKey()).intValue();

            // How many columns are in the PK?
            final int pkColCount = pkTable.getPrimaryKey().getColumns().size();

            // How many FKs do we have?
            final int fkCount = dbFKs.get(dbFKs.firstKey()).size();

            // Loop through the FKs, and construct each one at a time.
            for (int j = 0; j < fkCount; j++) {
                // Set up an array to hold the FK columns.
                final List<Column> candidateFKColumns = new ArrayList<Column>();

                // For each FK column name, look up the actual column in
                // the table.
                for (final Iterator<Map.Entry<Short, List<Column>>> k = dbFKs.entrySet().iterator(); k
                        .hasNext();) {
                    final Map.Entry<Short, List<Column>> entry = k.next();
                    final Short keySeq = (Short) entry.getKey();
                    // Convert the db-specific column index to a
                    // 0-indexed figure for the array of fk columns.
                    final int fkColSeq = keySeq.intValue() - firstColSeq;
                    candidateFKColumns.add((Column) (entry.getValue()).get(j));
                }

                // Create a template foreign key based around the set
                // of candidate columns we found.
                ForeignKey fkObject;
                try {
                    List<Column> columns = new ArrayList<Column>();
                    for (int k = 0; k < candidateFKColumns.size(); k++) {
                        columns.add(candidateFKColumns.get(k));
                    }
                    fkObject = new ForeignKey(columns);
                    // new KeyController(fkObject);
                } catch (final Throwable t) {
                    throw new BioMartError(t);
                }
                final Table fkTable = fkObject.getTable();

                // If any FK already exists on the target table with the
                // same columns in the same order, then reuse it.
                boolean fkAlreadyExists = false;
                for (final Iterator<ForeignKey> f = fkTable.getForeignKeys().iterator(); f.hasNext()
                        && !fkAlreadyExists;) {
                    final ForeignKey candidateFK = f.next();
                    if (candidateFK.equals(fkObject)) {
                        // Found one. Reuse it!
                        fkObject = candidateFK;
                        // Update the status to indicate that the FK is
                        // backed by the database, if previously it was
                        // handmade.
                        if (fkObject.getStatus().equals(ComponentStatus.HANDMADE))
                            fkObject.setStatus(ComponentStatus.INFERRED);
                        // Remove the FK from the list to be dropped
                        // later, as it definitely exists now.
                        fksToBeDropped.remove(candidateFK);
                        // Flag the key as existing.
                        fkAlreadyExists = true;
                    }
                }

                // Has the key been reused, or is it a new one?
                if (!fkAlreadyExists)
                    try {
                        fkTable.getForeignKeys().add(fkObject);
                        // fkTable.getForeignKeys().add(fk);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }

                // Work out whether the relation from the FK to
                // the PK should be 1:M or 1:1. The rule is that
                // it will be 1:M in all cases except where the
                // FK table has a PK with identical columns to
                // the FK, in which case it is 1:1, as the FK
                // is unique.
                Cardinality card = Cardinality.MANY_A;
                final PrimaryKey fkPK = fkTable.getPrimaryKey();
                if (fkPK != null && fkObject.getColumns().equals(fkPK.getColumns()))
                    card = Cardinality.ONE;

                // Check to see if it already has a relation.
                boolean relationExists = false;
                for (final Iterator<Relation> f = fkObject.getRelations().iterator(); f.hasNext();) {
                    // Obtain the next relation.
                    final Relation candidateRel = f.next();

                    // a) a relation already exists between the FK
                    // and the PK.
                    if (candidateRel.getOtherKey(fkObject).equals(pk)) {
                        // If cardinality matches, make it
                        // inferred. If doesn't match, make it
                        // modified and update original cardinality.
                        try {
                            if (card.equals(candidateRel.getCardinality())) {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.INFERRED);
                            } else {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.MODIFIED);
                                candidateRel.setOriginalCardinality(card);
                            }
                        } catch (final AssociationException ae) {
                            throw new BioMartError(ae);
                        }
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                        // Say we've found it.
                        relationExists = true;
                    }

                    // b) a handmade relation exists elsewhere which
                    // should not be dropped. All other relations
                    // elsewhere will be dropped.
                    else if (candidateRel.getStatus().equals(ComponentStatus.HANDMADE))
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                }

                // If relation did not already exist, create it.
                if (!relationExists && !pk.equals(fkObject)) {
                    // Establish the relation.
                    try {
                        new RelationSource(pk, fkObject, card);
                        // pk.getObject().addRelation(relation);
                        // fk.getObject().addRelation(relation);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }
                }
            }
        }

        // Remove any relations that we didn't find in the database (but
        // leave the handmade ones behind).
        for (final Iterator<Relation> j = relationsToBeDropped.iterator(); j.hasNext();) {
            final Relation r = j.next();
            if (r.getStatus().equals(ComponentStatus.HANDMADE))
                continue;
            r.getFirstKey().removeRelation(r);
            r.getSecondKey().removeRelation(r);
        }
    }
}

From source file:net.spfbl.core.User.java

public TreeMap<Long, Query> getQueryMap(Long begin, String filter) {
    TreeMap<Long, Query> queryLocalMap = getQueryHeadMap(begin);
    Connection connection = Core.poolConnectionMySQL();
    try {//from  www.  ja  v  a  2  s. c  o  m
        if (connection != null) {
            try {
                String ipParam = Subnet.isValidIP(filter) ? Subnet.normalizeIP(filter) : null;
                String emailParam = Domain.isValidEmail(filter) ? filter.toLowerCase() : null;
                String command = "SELECT * FROM spfbl.user_query\n" + "WHERE user = '" + getEmail() + "'\n"
                        + (begin == null ? "" : "AND time <= " + begin + "\n")
                        + ("rejeitada".equals(filter) ? "AND result " + "IN('BLOCK','REJECT')\n" : "")
                        + (ipParam == null ? "" : "AND ip = '" + ipParam + "'\n")
                        + (emailParam == null ? ""
                                : "AND '" + emailParam + "' " + "IN(sender, mailFrom, replyto, recipient)\n")
                        + "ORDER BY time DESC\n" + "LIMIT " + (QUERY_MAX_ROWS + 1);
                Statement statement = connection.createStatement();
                try {
                    ResultSet rs = statement.executeQuery(command);
                    while (rs.next()) {
                        try {
                            long time = rs.getLong("time");
                            Query query = queryLocalMap.get(time);
                            if (query == null) {
                                query = new Query(rs);
                                queryLocalMap.put(time, query);
                            }
                        } catch (Exception ex) {
                            Server.logError(ex);
                        }
                    }
                } finally {
                    statement.close();
                }
            } catch (SQLException ex) {
                Server.logError(ex);
            }
        }
    } finally {
        Core.offerConnectionMySQL(connection);
    }
    TreeMap<Long, Query> resultMap = new TreeMap<Long, Query>();
    while (resultMap.size() < (QUERY_MAX_ROWS + 1)) {
        Entry<Long, Query> entry = queryLocalMap.pollLastEntry();
        if (entry == null) {
            break;
        } else {
            long time = entry.getKey();
            Query query = entry.getValue();
            if (filter == null) {
                resultMap.put(time, query);
            } else if (filter.length() == 0) {
                resultMap.put(time, query);
            } else if (query.match(filter)) {
                resultMap.put(time, query);
            }
        }
    }
    return resultMap;
}

From source file:org.dllearner.reasoning.SPARQLReasoner.java

@Override
public ObjectPropertyHierarchy prepareObjectPropertyHierarchy() throws ReasoningMethodUnsupportedException {
    //      if(precomputeObjectPropertyHierarchy) {
    logger.info("Preparing object property subsumption hierarchy ...");
    long startTime = System.currentTimeMillis();
    TreeMap<OWLObjectProperty, SortedSet<OWLObjectProperty>> subsumptionHierarchyUp = new TreeMap<>();
    TreeMap<OWLObjectProperty, SortedSet<OWLObjectProperty>> subsumptionHierarchyDown = new TreeMap<>();

    String query = "SELECT * WHERE {" + "?sub a <http://www.w3.org/2002/07/owl#ObjectProperty> . "
            + "FILTER NOT EXISTS{?sub a <http://www.w3.org/2002/07/owl#DatatypeProperty>}" // TODO remove workaround
            + "FILTER(?sub != <http://www.w3.org/2002/07/owl#bottomObjectProperty> && ?sub != <http://www.w3.org/2002/07/owl#topObjectProperty>)"
            + "OPTIONAL {" + "?sub <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> ?sup ."
            + "?sup a <http://www.w3.org/2002/07/owl#ObjectProperty> . "
            + "FILTER(?sup != ?sub && ?sup != <http://www.w3.org/2002/07/owl#topObjectProperty>)" + "}" + "}";
    ResultSet rs = executeSelectQuery(query);

    while (rs.hasNext()) {
        QuerySolution qs = rs.next();//from w  ww . j  a  v  a  2 s. com
        if (qs.get("sub").isURIResource()) {
            IRI iri = IRI.create(qs.get("sub").asResource().getURI());

            if (!iri.isReservedVocabulary()) {
                OWLObjectProperty sub = df.getOWLObjectProperty(iri);

                // add sub properties entry
                if (!subsumptionHierarchyDown.containsKey(sub)) {
                    subsumptionHierarchyDown.put(sub, new TreeSet<>());
                }

                // add super properties entry
                if (!subsumptionHierarchyUp.containsKey(sub)) {
                    subsumptionHierarchyUp.put(sub, new TreeSet<>());
                }

                // if there is a super property
                if (qs.get("sup") != null && qs.get("sup").isURIResource()) {
                    OWLObjectProperty sup = df
                            .getOWLObjectProperty(IRI.create(qs.get("sup").asResource().getURI()));

                    // add sub properties entry
                    if (!subsumptionHierarchyDown.containsKey(sup)) {
                        subsumptionHierarchyDown.put(sup, new TreeSet<>());
                    }

                    // add super properties entry
                    if (!subsumptionHierarchyUp.containsKey(sup)) {
                        subsumptionHierarchyUp.put(sup, new TreeSet<>());
                    }

                    // add super properties entry
                    SortedSet<OWLObjectProperty> superClasses = subsumptionHierarchyUp.get(sub);
                    if (superClasses == null) {
                        superClasses = new TreeSet<>();
                        subsumptionHierarchyUp.put(sub, superClasses);
                    }
                    superClasses.add(sup);

                    // add sub properties entry
                    SortedSet<OWLObjectProperty> subProperties = subsumptionHierarchyDown.get(sup);
                    if (subProperties == null) {
                        subProperties = new TreeSet<>();
                        subsumptionHierarchyDown.put(sup, subProperties);
                    }
                    subProperties.add(sub);
                }
            }
        }
    }
    logger.info("... done in {}ms", (System.currentTimeMillis() - startTime));
    roleHierarchy = new ObjectPropertyHierarchy(subsumptionHierarchyUp, subsumptionHierarchyDown);
    //      }
    return roleHierarchy;
}

From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java

public Map<Integer, PValuePacket> getLoggedAccepts(String paxosID, int version, int firstSlot,
        Integer maxSlot) {//from w  ww. j  av a2 s .com

    if (ENABLE_JOURNALING && !DB_INDEX_JOURNAL)
        return this.getLoggedFromMessageLog(paxosID, version, firstSlot, maxSlot,
                PaxosPacketType.ACCEPT.getInt());

    // long t1 = System.currentTimeMillis();
    // fetch all accepts and then weed out those below firstSlot
    ArrayList<PaxosPacket> list = this.getLoggedMessages(paxosID, " and packet_type="
            + PaxosPacketType.ACCEPT.getInt() + " and " + getIntegerGTEConstraint("slot", firstSlot)
            // maxSlot is null for getting lower ballot pvalues
            + (maxSlot != null ? " and " + getIntegerLTConstraint("slot", maxSlot) : "") + " and version="
            + version);

    TreeMap<Integer, PValuePacket> accepted = new TreeMap<Integer, PValuePacket>();
    for (PaxosPacket p : list) {
        int slot = AbstractPaxosLogger.getSlotBallot(p)[0];
        assert (p instanceof AcceptPacket) : p.getType() + ":" + p;
        AcceptPacket accept = (AcceptPacket) p;
        if ((slot - firstSlot >= 0) && /* wraparound-arithmetic */
                (!accepted.containsKey(slot) || accepted.get(slot).ballot.compareTo(accept.ballot) < 0))
            accepted.put(slot, accept);
    }
    // DelayProfiler.updateDelay("getAccepts", t1);
    return accepted;
}

From source file:org.apache.coyote.tomcat5.CoyoteRequest.java

/**
 * Parse accept-language header value./*from  w  ww.j  a  va  2  s. c om*/
 */
protected void parseLocalesHeader(String value) {

    // Store the accumulated languages that have been requested in
    // a local collection, sorted by the quality value (so we can
    // add Locales in descending order).  The values will be ArrayLists
    // containing the corresponding Locales to be added
    TreeMap locales = new TreeMap();

    // Preprocess the value to remove all whitespace
    int white = value.indexOf(' ');
    if (white < 0)
        white = value.indexOf('\t');
    if (white >= 0) {
        StringBuffer sb = new StringBuffer();
        int len = value.length();
        for (int i = 0; i < len; i++) {
            char ch = value.charAt(i);
            if ((ch != ' ') && (ch != '\t'))
                sb.append(ch);
        }
        value = sb.toString();
    }

    // Process each comma-delimited language specification
    parser.setString(value); // ASSERT: parser is available to us
    int length = parser.getLength();
    while (true) {

        // Extract the next comma-delimited entry
        int start = parser.getIndex();
        if (start >= length)
            break;
        int end = parser.findChar(',');
        String entry = parser.extract(start, end).trim();
        parser.advance(); // For the following entry

        // Extract the quality factor for this entry
        double quality = 1.0;
        int semi = entry.indexOf(";q=");
        if (semi >= 0) {
            try {
                quality = Double.parseDouble(entry.substring(semi + 3));
            } catch (NumberFormatException e) {
                quality = 0.0;
            }
            entry = entry.substring(0, semi);
        }

        // Skip entries we are not going to keep track of
        if (quality < 0.00005)
            continue; // Zero (or effectively zero) quality factors
        if ("*".equals(entry))
            continue; // FIXME - "*" entries are not handled

        // Extract the language and country for this entry
        String language = null;
        String country = null;
        String variant = null;
        int dash = entry.indexOf('-');
        if (dash < 0) {
            language = entry;
            country = "";
            variant = "";
        } else {
            language = entry.substring(0, dash);
            country = entry.substring(dash + 1);
            int vDash = country.indexOf('-');
            if (vDash > 0) {
                String cTemp = country.substring(0, vDash);
                variant = country.substring(vDash + 1);
                country = cTemp;
            } else {
                variant = "";
            }
        }

        // Add a new Locale to the list of Locales for this quality level
        Locale locale = new Locale(language, country, variant);
        Double key = new Double(-quality); // Reverse the order
        ArrayList values = (ArrayList) locales.get(key);
        if (values == null) {
            values = new ArrayList();
            locales.put(key, values);
        }
        values.add(locale);

    }

    // Process the quality values in highest->lowest order (due to
    // negating the Double value when creating the key)
    Iterator keys = locales.keySet().iterator();
    while (keys.hasNext()) {
        Double key = (Double) keys.next();
        ArrayList list = (ArrayList) locales.get(key);
        Iterator values = list.iterator();
        while (values.hasNext()) {
            Locale locale = (Locale) values.next();
            addLocale(locale);
        }
    }

}