Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.netxforge.oss2.config.AmiPeerFactory.java

/**
 * Combine specific and range elements so that AMIPeerFactory has to spend
 * less time iterating all these elements.
 * TODO This really should be pulled up into PeerFactory somehow, but I'm not sure how (given that "Definition" is different for both
 * SNMP and AMI.  Maybe some sort of visitor methodology would work.  The basic logic should be fine as it's all IP address manipulation
 *
 * @throws UnknownHostException/*ww  w  .jav a  2  s  .  c o m*/
 */
void optimize() throws UnknownHostException {
    getWriteLock().lock();

    try {
        // First pass: Remove empty definition elements
        for (final Iterator<Definition> definitionsIterator = m_config.getDefinitionCollection()
                .iterator(); definitionsIterator.hasNext();) {
            final Definition definition = definitionsIterator.next();
            if (definition.getSpecificCount() == 0 && definition.getRangeCount() == 0) {
                LogUtils.debugf(this, "optimize: Removing empty definition element");
                definitionsIterator.remove();
            }
        }

        // Second pass: Replace single IP range elements with specific elements
        for (Definition definition : m_config.getDefinitionCollection()) {
            for (Iterator<Range> rangesIterator = definition.getRangeCollection().iterator(); rangesIterator
                    .hasNext();) {
                Range range = rangesIterator.next();
                if (range.getBegin().equals(range.getEnd())) {
                    definition.addSpecific(range.getBegin());
                    rangesIterator.remove();
                }
            }
        }

        // Third pass: Sort specific and range elements for improved XML
        // readability and then combine them into fewer elements where possible
        for (final Definition definition : m_config.getDefinitionCollection()) {
            // Sort specifics
            final TreeMap<InetAddress, String> specificsMap = new TreeMap<InetAddress, String>(
                    new InetAddressComparator());
            for (final String specific : definition.getSpecificCollection()) {
                specificsMap.put(InetAddressUtils.getInetAddress(specific), specific.trim());
            }

            // Sort ranges
            final TreeMap<InetAddress, Range> rangesMap = new TreeMap<InetAddress, Range>(
                    new InetAddressComparator());
            for (final Range range : definition.getRangeCollection()) {
                rangesMap.put(InetAddressUtils.getInetAddress(range.getBegin()), range);
            }

            // Combine consecutive specifics into ranges
            InetAddress priorSpecific = null;
            Range addedRange = null;
            for (final InetAddress specific : specificsMap.keySet()) {
                if (priorSpecific == null) {
                    priorSpecific = specific;
                    continue;
                }

                if (BigInteger.ONE.equals(InetAddressUtils.difference(specific, priorSpecific))
                        && InetAddressUtils.inSameScope(specific, priorSpecific)) {
                    if (addedRange == null) {
                        addedRange = new Range();
                        addedRange.setBegin(InetAddressUtils.toIpAddrString(priorSpecific));
                        rangesMap.put(priorSpecific, addedRange);
                        specificsMap.remove(priorSpecific);
                    }

                    addedRange.setEnd(InetAddressUtils.toIpAddrString(specific));
                    specificsMap.remove(specific);
                } else {
                    addedRange = null;
                }

                priorSpecific = specific;
            }

            // Move specifics to ranges
            for (final InetAddress specific : new ArrayList<InetAddress>(specificsMap.keySet())) {
                for (final InetAddress begin : new ArrayList<InetAddress>(rangesMap.keySet())) {

                    if (!InetAddressUtils.inSameScope(begin, specific)) {
                        continue;
                    }

                    if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                            .compareTo(InetAddressUtils.toInteger(specific)) > 0) {
                        continue;
                    }

                    final Range range = rangesMap.get(begin);

                    final InetAddress end = InetAddressUtils.getInetAddress(range.getEnd());

                    if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                            .compareTo(InetAddressUtils.toInteger(specific)) < 0) {
                        continue;
                    }

                    if (InetAddressUtils.toInteger(specific).compareTo(InetAddressUtils.toInteger(begin)) >= 0
                            && InetAddressUtils.toInteger(specific)
                                    .compareTo(InetAddressUtils.toInteger(end)) <= 0) {
                        specificsMap.remove(specific);
                        break;
                    }

                    if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                            .equals(InetAddressUtils.toInteger(specific))) {
                        rangesMap.remove(begin);
                        rangesMap.put(specific, range);
                        range.setBegin(InetAddressUtils.toIpAddrString(specific));
                        specificsMap.remove(specific);
                        break;
                    }

                    if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                            .equals(InetAddressUtils.toInteger(specific))) {
                        range.setEnd(InetAddressUtils.toIpAddrString(specific));
                        specificsMap.remove(specific);
                        break;
                    }
                }
            }

            // Combine consecutive ranges
            Range priorRange = null;
            InetAddress priorBegin = null;
            InetAddress priorEnd = null;
            for (final Iterator<InetAddress> rangesIterator = rangesMap.keySet().iterator(); rangesIterator
                    .hasNext();) {
                final InetAddress beginAddress = rangesIterator.next();
                final Range range = rangesMap.get(beginAddress);
                final InetAddress endAddress = InetAddressUtils.getInetAddress(range.getEnd());

                if (priorRange != null) {
                    if (InetAddressUtils.inSameScope(beginAddress, priorEnd) && InetAddressUtils
                            .difference(beginAddress, priorEnd).compareTo(BigInteger.ONE) <= 0) {
                        priorBegin = new InetAddressComparator().compare(priorBegin, beginAddress) < 0
                                ? priorBegin
                                : beginAddress;
                        priorRange.setBegin(InetAddressUtils.toIpAddrString(priorBegin));
                        priorEnd = new InetAddressComparator().compare(priorEnd, endAddress) > 0 ? priorEnd
                                : endAddress;
                        priorRange.setEnd(InetAddressUtils.toIpAddrString(priorEnd));

                        rangesIterator.remove();
                        continue;
                    }
                }

                priorRange = range;
                priorBegin = beginAddress;
                priorEnd = endAddress;
            }

            // Update changes made to sorted maps
            definition.setSpecific(specificsMap.values().toArray(new String[0]));
            definition.setRange(rangesMap.values().toArray(new Range[0]));
        }
    } finally {
        getWriteLock().unlock();
    }
}

From source file:org.apache.hadoop.chukwa.extraction.engine.datasource.database.DatabaseDS.java

@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", justification = "Dynamic based upon tables in the database")
public SearchResult search(SearchResult result, String cluster, String dataSource, long t0, long t1,
        String filter, Token token) throws DataSourceException {
    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd kk:mm:ss");
    String timeField = null;//  w ww .  j av  a 2  s . c om
    TreeMap<Long, List<Record>> records = result.getRecords();

    if (cluster == null) {
        cluster = "demo";
    }

    if (dataSource.equalsIgnoreCase("MRJob")) {
        timeField = "LAUNCH_TIME";
    } else if (dataSource.equalsIgnoreCase("HodJob")) {
        timeField = "StartTime";
    } else {
        timeField = "timestamp";
    }
    String startS = formatter.format(t0);
    String endS = formatter.format(t1);
    Statement stmt = null;
    ResultSet rs = null;
    try {
        String dateclause = timeField + " >= '" + startS + "' and " + timeField + " <= '" + endS + "'";

        // ClusterConfig cc = new ClusterConfig();
        String jdbc = ""; // cc.getURL(cluster);

        Connection conn = org.apache.hadoop.chukwa.util.DriverManagerUtil.getConnection(jdbc);

        stmt = conn.createStatement();
        String query = "";
        query = "select * from " + dataSource + " where " + dateclause + ";";
        rs = stmt.executeQuery(query);
        if (stmt.execute(query)) {
            rs = stmt.getResultSet();
            ResultSetMetaData rmeta = rs.getMetaData();
            int col = rmeta.getColumnCount();
            while (rs.next()) {
                ChukwaRecord event = new ChukwaRecord();
                StringBuilder cell = new StringBuilder();
                ;
                long timestamp = 0;

                for (int i = 1; i < col; i++) {
                    String value = rs.getString(i);
                    if (value != null) {
                        cell.append(" ");
                        cell.append(rmeta.getColumnName(i));
                        cell.append(":");
                        cell.append(value);
                    }
                    if (rmeta.getColumnName(i).equals(timeField)) {
                        timestamp = rs.getLong(i);
                        event.setTime(timestamp);
                    }
                }
                boolean isValid = false;
                if (filter == null || filter.equals("")) {
                    isValid = true;
                } else if (cell.indexOf(filter) > 0) {
                    isValid = true;
                }
                if (!isValid) {
                    continue;
                }

                event.add(Record.bodyField, cell.toString());
                event.add(Record.sourceField, cluster + "." + dataSource);
                if (records.containsKey(timestamp)) {
                    records.get(timestamp).add(event);
                } else {
                    List<Record> list = new LinkedList<Record>();
                    list.add(event);
                    records.put(event.getTime(), list);
                }
            }
        }
    } catch (SQLException e) {
        e.printStackTrace();
        throw new DataSourceException(e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException sqlEx) {
                log.debug(ExceptionUtil.getStackTrace(sqlEx));
            }
            rs = null;
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException sqlEx) {
                log.debug(ExceptionUtil.getStackTrace(sqlEx));
            }
            stmt = null;
        }
    }
    return result;
}

From source file:de.tudarmstadt.ukp.uby.integration.alignment.xml.transform.sensealignments.VnFnSenseAlignmentXml.java

/**
 * @param metadata//from  w  w w  . j ava2s  .c o  m
 * @throws IOException
 */
@Override
public void toAlignmentXml(XmlMeta metadata) throws IOException {

    Lexicon vn = uby.getLexiconByName(lexiconName);
    TreeMap<String, Source> sourceMap = new TreeMap<>();

    int noSource = 0;
    int lines = 0;
    int count = 0;
    ArrayList<String> output = new ArrayList<String>();
    try {
        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
        DocumentBuilder builder = factory.newDocumentBuilder();
        Document doc = builder.parse(new File(alignmentFile));
        doc.getDocumentElement().normalize();
        NodeList entries = doc.getElementsByTagName("vncls");
        for (int i = 0; i < entries.getLength(); i++) {
            Node alignment = entries.item(i);
            NamedNodeMap atts = alignment.getAttributes();
            String vnClass = atts.getNamedItem("class").getTextContent();
            String vnLemma = atts.getNamedItem("vnmember").getTextContent();
            String luId = atts.getNamedItem("fnlexent").getTextContent();
            // there are mappings with empty (fn) target:
            if (luId.equals("")) {
                noSource++;
            } else {
                // add output here
                output.add(luId + "\t" + vnLemma + "\t" + vnClass + "\n");

                List<LexicalEntry> vnentries = uby.getLexicalEntries(vnLemma, EPartOfSpeech.verb, vn);
                if (vnentries.size() > 0) {
                    for (LexicalEntry e : vnentries) {
                        List<Sense> vnSenses = e.getSenses();
                        for (Sense vns : vnSenses) {
                            String senseId = vns.getId();
                            // filter by VN-class
                            List<SemanticLabel> labels = uby.getSemanticLabelsbySenseIdbyType(senseId,
                                    ELabelTypeSemantics.verbnetClass.toString());
                            for (SemanticLabel l : labels) {
                                String[] labelItems = l.getLabel().split("-");
                                StringBuffer parsedLabel = new StringBuffer();
                                parsedLabel.append(labelItems[1]);
                                for (int ji = 2; ji < labelItems.length; ji++) {
                                    parsedLabel.append("-" + labelItems[ji]);
                                }
                                if (parsedLabel.toString().equals(vnClass)) {
                                    // get sourceMa
                                    Source source = null;
                                    if (sourceMap.containsKey(luId)) {
                                        source = sourceMap.get(luId);
                                    } else {
                                        source = new Source();
                                        source.ref = luId;
                                    }

                                    Target target = new Target();
                                    target.ref = vns.getMonolingualExternalRefs().iterator().next()
                                            .getExternalReference();
                                    target.decision = new Decision();
                                    target.decision.value = true;
                                    target.decision.confidence = DEFAULTCONFIDENCE;

                                    // add target to source
                                    if (source.targets.size() > 0) {
                                        source.targets.add(target);
                                    } else {
                                        source.targets.add(target);
                                    }
                                    count++;
                                    sourceMap.put(source.ref, source);
                                }
                            }
                        }
                    }
                }
            }
            lines++;
        }
    } catch (IOException | ParserConfigurationException | SAXException e) {
        throw new IOException(e);
    }
    logString.append("Converted " + alignmentFile + ", statistics:" + LF);
    logString.append("\tInput Lines: " + lines + LF);
    logString.append("\tOutput: " + output.size() + LF);
    logString.append("\tNo alignment target: " + noSource + LF);
    logString.append("\tControl: output +  no alignment = input lines: " + (output.size() + noSource) + LF);
    logString.append("\tNumber of alignment pairs in output:" + count);
    logger.info(logString.toString());

    writer.writeMetaData(metadata);
    Alignments alignments = new Alignments();
    alignments.source = new LinkedList<>();
    alignments.source.addAll(sourceMap.values());
    writer.writeAlignments(alignments);
    writer.close();
}

From source file:org.dspace.discovery.SolrServiceImpl.java

protected DiscoverResult retrieveResult(Context context, DiscoverQuery query, QueryResponse solrQueryResponse)
        throws SQLException {
    DiscoverResult result = new DiscoverResult();

    if (solrQueryResponse != null) {
        result.setSearchTime(solrQueryResponse.getQTime());
        result.setStart(query.getStart());
        result.setMaxResults(query.getMaxResults());
        result.setTotalSearchResults(solrQueryResponse.getResults().getNumFound());

        List<String> searchFields = query.getSearchFields();
        for (SolrDocument doc : solrQueryResponse.getResults()) {
            DSpaceObject dso = findDSpaceObject(context, doc);

            if (dso != null) {
                result.addDSpaceObject(dso);
            } else {
                log.error(LogManager.getHeader(context,
                        "Error while retrieving DSpace object from discovery index",
                        "Handle: " + doc.getFirstValue("handle")));
                continue;
            }/*from  www  .j  a va  2 s . c om*/

            DiscoverResult.SearchDocument resultDoc = new DiscoverResult.SearchDocument();
            //Add information about our search fields
            for (String field : searchFields) {
                List<String> valuesAsString = new ArrayList<String>();
                for (Object o : doc.getFieldValues(field)) {
                    valuesAsString.add(String.valueOf(o));
                }
                resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()]));
            }
            result.addSearchDocument(dso, resultDoc);

            if (solrQueryResponse.getHighlighting() != null) {
                Map<String, List<String>> highlightedFields = solrQueryResponse.getHighlighting()
                        .get(dso.getType() + "-" + dso.getID());
                if (MapUtils.isNotEmpty(highlightedFields)) {
                    //We need to remove all the "_hl" appendix strings from our keys
                    Map<String, List<String>> resultMap = new HashMap<String, List<String>>();
                    for (String key : highlightedFields.keySet()) {
                        resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key));
                    }

                    result.addHighlightedResult(dso,
                            new DiscoverResult.DSpaceObjectHighlightResult(dso, resultMap));
                }
            }
        }

        //Resolve our facet field values
        List<FacetField> facetFields = solrQueryResponse.getFacetFields();
        if (facetFields != null) {
            for (int i = 0; i < facetFields.size(); i++) {
                FacetField facetField = facetFields.get(i);
                DiscoverFacetField facetFieldConfig = query.getFacetFields().get(i);
                List<FacetField.Count> facetValues = facetField.getValues();
                if (facetValues != null) {
                    if (facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)
                            && facetFieldConfig.getSortOrder()
                                    .equals(DiscoveryConfigurationParameters.SORT.VALUE)) {
                        //If we have a date & are sorting by value, ensure that the results are flipped for a proper result
                        Collections.reverse(facetValues);
                    }

                    for (FacetField.Count facetValue : facetValues) {
                        String displayedValue = transformDisplayedValue(context, facetField.getName(),
                                facetValue.getName());
                        String field = transformFacetField(facetFieldConfig, facetField.getName(), true);
                        String authorityValue = transformAuthorityValue(context, facetField.getName(),
                                facetValue.getName());
                        String sortValue = transformSortValue(context, facetField.getName(),
                                facetValue.getName());
                        String filterValue = displayedValue;
                        if (StringUtils.isNotBlank(authorityValue)) {
                            filterValue = authorityValue;
                        }
                        result.addFacetResult(field, new DiscoverResult.FacetResult(filterValue, displayedValue,
                                authorityValue, sortValue, facetValue.getCount()));
                    }
                }
            }
        }

        if (solrQueryResponse.getFacetQuery() != null) {
            //TODO: do not sort when not a date, just retrieve the facets in the order they where requested !
            //At the moment facet queries are only used for dates so we need to sort our results
            TreeMap<String, Integer> sortedFacetQueries = new TreeMap<String, Integer>(
                    solrQueryResponse.getFacetQuery());
            for (String facetQuery : sortedFacetQueries.descendingKeySet()) {
                //TODO: do not assume this, people may want to use it for other ends, use a regex to make sure
                //We have a facet query, the values looks something like: dateissued.year:[1990 TO 2000] AND -2000
                //Prepare the string from {facet.field.name}:[startyear TO endyear] to startyear - endyear
                String facetField = facetQuery.substring(0, facetQuery.indexOf(":"));
                String name = facetQuery.substring(facetQuery.indexOf('[') + 1);
                name = name.substring(0, name.lastIndexOf(']')).replaceAll("TO", "-");
                String filter = facetQuery.substring(facetQuery.indexOf('['));
                filter = filter.substring(0, filter.lastIndexOf(']') + 1);

                Integer count = sortedFacetQueries.get(facetQuery);

                //No need to show empty years
                if (0 < count) {
                    result.addFacetResult(facetField,
                            new DiscoverResult.FacetResult(filter, name, null, name, count));
                }
            }
        }
    }

    return result;
}

From source file:com.sfs.whichdoctor.dao.PersonDAOImpl.java

/**
 * Load a list of people this person has supervised in the past.
 *
 * @param guid the guid/*  ww  w  .j  a  va  2s. c om*/
 * @param allRotations the all rotations
 * @return the collection
 */
private HashMap<String, ArrayList<PersonBean>> loadSupervisedPeople(final int guid,
        final boolean allRotations) {

    HashMap<String, ArrayList<PersonBean>> supervisedPeople = new HashMap<String, ArrayList<PersonBean>>();

    // Create new SearchBean of with default values
    SearchBean searchRotations = this.getSearchDAO().initiate("rotation", null);
    searchRotations.setLimit(0);

    RotationBean rotationParam = (RotationBean) searchRotations.getSearchCriteria();
    SupervisorBean supervisor = new SupervisorBean();
    supervisor.setPersonGUID(guid);
    rotationParam.addSupervisor(supervisor);

    BuilderBean loadDetails = new BuilderBean();
    loadDetails.setParameter("ASSESSMENTS", true);
    loadDetails.setParameter("SUPERVISORS", true);

    searchRotations.setSearchCriteria(rotationParam);
    searchRotations.setOrderColumn("rotation.StartDate");
    searchRotations.setOrderColumn2("people.LastName");
    searchRotations.setOrderAscending(false);

    SearchResultsBean studentsSupervised = new SearchResultsBean();
    try {
        studentsSupervised = this.getSearchDAO().search(searchRotations, loadDetails);
    } catch (Exception e) {
        dataLogger.error("Error searching for supervised people: " + e.getMessage());
    }

    final Calendar currentDate = Calendar.getInstance();

    final TreeMap<String, ArrayList<RotationBean>> currentlySupervising = new TreeMap<String, ArrayList<RotationBean>>();
    final TreeMap<String, ArrayList<RotationBean>> previouslySupervised = new TreeMap<String, ArrayList<RotationBean>>();
    final HashMap<String, PersonBean> personMap = new HashMap<String, PersonBean>();

    for (Object rotationObj : studentsSupervised.getSearchResults()) {
        final RotationBean rotation = (RotationBean) rotationObj;

        boolean currentlyTakingPlace = false;

        if (rotation.getStartDate().before(currentDate.getTime())
                && rotation.getEndDate().after(currentDate.getTime())) {
            currentlyTakingPlace = true;
        }

        if (rotation.getPerson() != null) {
            final PersonBean person = rotation.getPerson();

            final String index = person.getLastName() + " " + person.getPreferredName() + " "
                    + person.getPersonIdentifier();

            boolean processed = false;

            if (currentlySupervising.containsKey(index)) {
                // The person exists in the currently supervising list.
                ArrayList<RotationBean> tneRots = currentlySupervising.get(index);
                if (allRotations || currentlyTakingPlace) {
                    tneRots.add(rotation);
                }
                currentlySupervising.put(index, tneRots);
                processed = true;
            }
            if (previouslySupervised.containsKey(index)) {
                // The person exists in the previously supervised list
                ArrayList<RotationBean> tneRots = previouslySupervised.get(index);
                if (allRotations || currentlyTakingPlace) {
                    tneRots.add(rotation);
                }
                if (currentlyTakingPlace) {
                    // This is a current rotation, remove from the previously
                    // supervised list and add to currently supervising.
                    previouslySupervised.remove(index);
                    currentlySupervising.put(index, tneRots);
                } else {
                    previouslySupervised.put(index, tneRots);
                }
                processed = true;
            }

            if (!processed) {
                // This person has not been encountered yet.
                personMap.put(index, person);

                ArrayList<RotationBean> tneRots = new ArrayList<RotationBean>();
                if (allRotations || currentlyTakingPlace) {
                    tneRots.add(rotation);
                }
                if (currentlyTakingPlace) {
                    currentlySupervising.put(index, tneRots);
                } else {
                    previouslySupervised.put(index, tneRots);
                }
            }
        }
    }

    final ArrayList<PersonBean> currentPeople = new ArrayList<PersonBean>();
    final ArrayList<PersonBean> previousPeople = new ArrayList<PersonBean>();

    for (String index : currentlySupervising.keySet()) {
        final PersonBean person = personMap.get(index);
        final ArrayList<RotationBean> tneRots = currentlySupervising.get(index);
        person.setRotations(tneRots);
        currentPeople.add(person);
    }
    for (String index : previouslySupervised.keySet()) {
        final PersonBean person = personMap.get(index);
        final ArrayList<RotationBean> tneRots = previouslySupervised.get(index);
        person.setRotations(tneRots);
        previousPeople.add(person);
    }

    supervisedPeople.put("current", currentPeople);
    supervisedPeople.put("previous", previousPeople);

    return supervisedPeople;
}

From source file:com.sfs.whichdoctor.importer.ExamImporter.java

/**
 * Assign data.//from  w ww .  j  a  v a 2 s . c  om
 *
 * @param personDAO the person dao
 *
 * @return the hash map< object, list< object>>
 *
 * @throws WhichDoctorImporterException the which doctor importer exception
 */
protected final HashMap<Object, List<Object>> assignData(final PersonDAO personDAO)
        throws WhichDoctorImporterException {

    importLogger.debug("Getting data map");
    HashMap<String, List<String>> dataMap = this.getDataMap();

    if (dataMap == null) {
        throw new WhichDoctorImporterException("Error importing " + "- the data map cannot be null");
    }

    List<Integer> indexValues = new ArrayList<Integer>();
    TreeMap<Integer, PersonBean> keyMap = new TreeMap<Integer, PersonBean>();

    // Load the index values
    if (dataMap.containsKey("MIN") || dataMap.containsKey("Candidate No.")) {
        importLogger.debug("Datamap contains index key");

        // The submitted data has a key field, load associated people
        if (dataMap.containsKey("MIN")) {
            // Load person based on MIN
            for (String strIdentifier : dataMap.get("MIN")) {
                try {
                    Integer identifier = new Integer(strIdentifier);
                    if (!keyMap.containsKey(identifier)) {
                        PersonBean person = personDAO.loadIdentifier(identifier, new BuilderBean());
                        if (person != null) {
                            keyMap.put(identifier, person);
                        }
                    }
                    indexValues.add(identifier);
                } catch (Exception e) {
                    setImportMessage("Error loading person with MIN: " + strIdentifier);
                    importLogger.error("Error loading person with MIN: " + e.getMessage());
                }
            }
        } else {
            // dataMap has Candidate number but not MIN
            for (String strCandidateNo : dataMap.get("Candidate No.")) {
                try {
                    Integer candidateNo = new Integer(strCandidateNo);
                    if (!keyMap.containsKey(candidateNo)) {
                        PersonBean person = personDAO.loadCandidateNumber(candidateNo.intValue(),
                                new BuilderBean());
                        if (person != null) {
                            keyMap.put(candidateNo, person);
                        }
                    }
                    indexValues.add(candidateNo);
                } catch (Exception e) {
                    setImportMessage("Error loading person with Candidate " + "Number: " + strCandidateNo);
                    importLogger.error("Error loading person with " + "Candidate Number: " + e.getMessage());
                }
            }
        }
    }

    // With the index values loaded cycle through and create the actual
    // beans
    for (int i = 0; i < indexValues.size(); i++) {
        Integer index = (Integer) indexValues.get(i);

        if (keyMap.containsKey(index)) {
            PersonBean person = (PersonBean) keyMap.get(index);
            if (person == null) {
                throw new WhichDoctorImporterException("Person is null");
            }
            try {
                // Set the values of the exam object
                ExamBean exam = new ExamBean();
                exam.setReferenceGUID(person.getGUID());
                exam.setLogMessage("Created via automated import process");

                if (dataMap.containsKey("Exam Type")) {
                    List<String> values = dataMap.get("Exam Type");
                    importLogger.info("Exam Type: " + values.get(i));
                    exam.setType(values.get(i));
                }
                if (dataMap.containsKey("Exam Date")) {
                    List<String> values = dataMap.get("Exam Date");
                    importLogger.info("Exam Date: " + DataFilter.parseDate(values.get(i), true));
                    exam.setDateSat(DataFilter.parseDate(values.get(i), true));
                }
                if (dataMap.containsKey("Exam Venue")) {
                    List<String> values = dataMap.get("Exam Venue");
                    importLogger.info("Exam Venue: " + values.get(i));
                    exam.setLocation(values.get(i));
                }
                if (dataMap.containsKey("Result")) {
                    List<String> values = dataMap.get("Result");
                    importLogger.info("Result: " + values.get(i));
                    String status = checkInput("Result", values.get(i));
                    exam.setStatus(status);
                }
                if (dataMap.containsKey("Result Band")) {
                    List<String> values = dataMap.get("Result Band");
                    importLogger.info("Result Band: " + values.get(i));
                    exam.setStatusLevel(values.get(i));
                }

                setBeanArray(person, exam);

            } catch (Exception e) {
                setImportMessage("Error setting values for exam associated to: "
                        + OutputFormatter.toFormattedName(person));
                importLogger.error("Error setting values for exam: " + e.getMessage());
            }
        }
    }
    return this.getBeanMap();
}

From source file:ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition.java

private void scanCompositeElementForChildren() {
    Set<String> elementNames = new HashSet<String>();
    TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> orderToElementDef = new TreeMap<Integer, BaseRuntimeDeclaredChildDefinition>();
    TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> orderToExtensionDef = new TreeMap<Integer, BaseRuntimeDeclaredChildDefinition>();

    scanCompositeElementForChildren(elementNames, orderToElementDef, orderToExtensionDef);

    if (forcedOrder != null) {
        /* /*from   ww  w .ja  v a2 s .co  m*/
         * Find out how many elements don't match any entry in the list
         * for forced order. Those elements come first.
         */
        TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> newOrderToExtensionDef = new TreeMap<Integer, BaseRuntimeDeclaredChildDefinition>();
        int unknownCount = 0;
        for (BaseRuntimeDeclaredChildDefinition nextEntry : orderToElementDef.values()) {
            if (!forcedOrder.containsKey(nextEntry.getElementName())) {
                newOrderToExtensionDef.put(unknownCount, nextEntry);
                unknownCount++;
            }
        }
        for (BaseRuntimeDeclaredChildDefinition nextEntry : orderToElementDef.values()) {
            if (forcedOrder.containsKey(nextEntry.getElementName())) {
                Integer newOrder = forcedOrder.get(nextEntry.getElementName());
                newOrderToExtensionDef.put(newOrder + unknownCount, nextEntry);
            }
        }
        orderToElementDef = newOrderToExtensionDef;
    }

    // while (orderToElementDef.size() > 0 && orderToElementDef.firstKey() <
    // 0) {
    // BaseRuntimeDeclaredChildDefinition elementDef =
    // orderToElementDef.remove(orderToElementDef.firstKey());
    // if (elementDef.getElementName().equals("identifier")) {
    // orderToElementDef.put(theIdentifierOrder, elementDef);
    // } else {
    // throw new ConfigurationException("Don't know how to handle element: "
    // + elementDef.getElementName());
    // }
    // }

    TreeSet<Integer> orders = new TreeSet<Integer>();
    orders.addAll(orderToElementDef.keySet());
    orders.addAll(orderToExtensionDef.keySet());

    for (Integer i : orders) {
        BaseRuntimeChildDefinition nextChild = orderToElementDef.get(i);
        if (nextChild != null) {
            this.addChild(nextChild);
        }
        BaseRuntimeDeclaredChildDefinition nextExt = orderToExtensionDef.get(i);
        if (nextExt != null) {
            this.addExtension((RuntimeChildDeclaredExtensionDefinition) nextExt);
        }
    }

}

From source file:chibi.gemmaanalysis.CoexpressionAnalysisService.java

/**
 * Calculates all pairwise correlations between the query and target composite sequences and then takes the median
 * correlation//from ww  w .  ja v  a 2s . c o  m
 * 
 * @param queryCss
 * @param targetCss
 * @param dataMatrix
 * @return
 */
private CorrelationSampleSize calculateCorrelation(Collection<CompositeSequence> queryCss,
        Collection<CompositeSequence> targetCss, ExpressionDataDoubleMatrix dataMatrix,
        CorrelationMethod method) {
    TreeMap<Double, Double> correlNumUsedMap = new TreeMap<Double, Double>();
    // calculate all pairwise correlations between cs groups
    for (CompositeSequence queryCs : queryCss) {
        for (CompositeSequence targetCs : targetCss) {
            Double[] queryVals = dataMatrix.getRow(queryCs);
            Double[] targetVals = dataMatrix.getRow(targetCs);
            if (queryVals != null && targetVals != null) {
                double[] v1 = new double[queryVals.length];
                double[] v2 = new double[targetVals.length];
                for (int i = 0; i < queryVals.length; i++) {
                    if (queryVals[i] != null)
                        v1[i] = queryVals[i];
                    else
                        v1[i] = Double.NaN;
                }
                for (int i = 0; i < targetVals.length; i++) {
                    if (targetVals[i] != null)
                        v2[i] = targetVals[i];
                    else
                        v2[i] = Double.NaN;
                }

                int numUsed = 0;
                for (int i = 0; i < v1.length && i < v2.length; i++)
                    if (!Double.isNaN(v1[i]) && !Double.isNaN(v2[i]))
                        numUsed++;
                if (numUsed > MIN_NUM_USED) {
                    double correlation;
                    switch (method) {
                    case SPEARMAN:
                        correlation = Distance.spearmanRankCorrelation(new DoubleArrayList(v1),
                                new DoubleArrayList(v2));
                        break;
                    case PEARSON:
                    default:
                        correlation = CorrelationStats.correl(v1, v2);

                    }
                    correlNumUsedMap.put(correlation, (double) numUsed);
                }
            }
        }
    }
    if (correlNumUsedMap.size() == 0) {
        return null;
    }
    List<Double> correlations = new ArrayList<Double>(correlNumUsedMap.keySet());
    // take the median correlation
    Double correlation = correlations.get(correlations.size() / 2);
    Double sampleSize = correlNumUsedMap.get(correlation);
    CorrelationSampleSize c = new CorrelationSampleSize();
    c.correlation = correlation;
    c.sampleSize = sampleSize;
    return c;

}

From source file:jp.zippyzip.impl.GeneratorServiceImpl.java

public void preZips() {

    Date timestamp = getLzhDao().getZipInfo().getTimestamp();
    LinkedList<Pref> prefs = getPrefs();
    LinkedList<City> cities = getCities();

    try {// ww  w.j a va  2  s.c o m

        for (Pref pref : prefs) {

            TreeMap<String, TreeSet<String>> zips = new TreeMap<String, TreeSet<String>>();

            for (City city : cities) {

                if (!city.getCode().startsWith(pref.getCode())) {
                    continue;
                }

                ParentChild data = getParentChildDao().get(city.getCode());

                if (data != null) {

                    for (String json : data.getChildren()) {

                        String zip = new JSONObject(json).optString("code", "");

                        if (!zips.containsKey(zip)) {
                            zips.put(zip, new TreeSet<String>());
                        }

                        zips.get(zip).add(city.getCode());
                    }
                }

                data = getParentChildDao().get(city.getCode() + "c");

                if (data != null) {

                    for (String json : data.getChildren()) {

                        String zip = new JSONObject(json).optString("code", "");

                        if (!zips.containsKey(zip)) {
                            zips.put(zip, new TreeSet<String>());
                        }

                        zips.get(zip).add(city.getCode() + "c");
                    }
                }
            }

            StringBuilder rec = new StringBuilder("[");
            LinkedList<String> list = new LinkedList<String>();

            for (String zip : zips.keySet()) {

                for (String key : zips.get(zip)) {

                    rec.append(new JSONStringer().object().key("zip").value(zip).key("key").value(key)
                            .endObject().toString());

                    if (rec.length() > 400) {
                        rec.append("]");
                        list.add(rec.toString());
                        rec = new StringBuilder("[");
                    } else {
                        rec.append(",");
                    }
                }
            }

            if (rec.length() > 1) {
                rec.append("]");
                list.add(rec.toString());
            }

            getParentChildDao()
                    .store(new ParentChild("pre" + pref.getCode(), timestamp, new LinkedList<String>(), list));
            log.info(pref.getCode() + ":" + list.size());
        }

    } catch (JSONException e) {
        log.log(Level.WARNING, "", e);
    }

    return;
}

From source file:net.triptech.metahive.service.EmailSenderService.java

/**
 * Send an email message using the configured Spring sender. On success
 * record the sent message in the datastore for reporting purposes
 *
 * @param email the email/*from   w  w  w .j ava  2  s. co m*/
 * @param attachments the attachments
 * @throws ServiceException the service exception
 */
public final void send(final SimpleMailMessage email, TreeMap<String, Object> attachments)
        throws ServiceException {

    // Check to see whether the required fields are set (to, from, message)
    if (email.getTo() == null) {
        throw new ServiceException("Error sending email: Recipient " + "address required");
    }
    if (StringUtils.isBlank(email.getFrom())) {
        throw new ServiceException("Error sending email: Email requires " + "a from address");
    }
    if (StringUtils.isBlank(email.getText())) {
        throw new ServiceException("Error sending email: No email " + "message specified");
    }
    if (mailSender == null) {
        throw new ServiceException("The JavaMail sender has not " + "been configured");
    }

    // Prepare the email message
    MimeMessage message = mailSender.createMimeMessage();
    MimeMessageHelper helper = null;
    boolean htmlMessage = false;
    if (StringUtils.containsIgnoreCase(email.getText(), "<html")) {
        htmlMessage = true;
        try {
            helper = new MimeMessageHelper(message, true, "UTF-8");
        } catch (MessagingException me) {
            throw new ServiceException("Error preparing email for sending: " + me.getMessage());
        }
    } else {
        helper = new MimeMessageHelper(message);
    }

    try {
        helper.setTo(email.getTo());
        helper.setFrom(email.getFrom());
        helper.setSubject(email.getSubject());

        if (email.getCc() != null) {
            helper.setCc(email.getCc());
        }
        if (email.getBcc() != null) {
            helper.setBcc(email.getBcc());
        }

        if (htmlMessage) {
            String plainText = email.getText();
            try {
                ConvertHtmlToText htmlToText = new ConvertHtmlToText();
                plainText = htmlToText.convert(email.getText());
            } catch (Exception e) {
                logger.error("Error converting HTML to plain text: " + e.getMessage());
            }
            helper.setText(plainText, email.getText());
        } else {
            helper.setText(email.getText());
        }

        if (email.getSentDate() != null) {
            helper.setSentDate(email.getSentDate());
        } else {
            helper.setSentDate(Calendar.getInstance().getTime());
        }

    } catch (MessagingException me) {
        throw new ServiceException("Error preparing email for sending: " + me.getMessage());
    }

    // Append any attachments (if an HTML email)
    if (htmlMessage && attachments != null) {
        for (String id : attachments.keySet()) {
            Object reference = attachments.get(id);

            if (reference instanceof File) {
                try {
                    FileSystemResource res = new FileSystemResource((File) reference);
                    helper.addInline(id, res);
                } catch (MessagingException me) {
                    logger.error("Error appending File attachment: " + me.getMessage());
                }
            }
            if (reference instanceof URL) {
                try {
                    UrlResource res = new UrlResource((URL) reference);
                    helper.addInline(id, res);
                } catch (MessagingException me) {
                    logger.error("Error appending URL attachment: " + me.getMessage());
                }
            }
        }
    }

    // Send the email message
    try {
        mailSender.send(message);
    } catch (MailException me) {
        logger.error("Error sending email: " + me.getMessage());
        throw new ServiceException("Error sending email: " + me.getMessage());
    }
}