Example usage for java.util TreeMap keySet

List of usage examples for java.util TreeMap keySet

Introduction

In this page you can find the example usage for java.util TreeMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.sfs.whichdoctor.dao.PersonDAOImpl.java

/**
 * Load a list of people this person has supervised in the past.
 *
 * @param guid the guid/*from  w w  w  .  j av  a2 s .c o  m*/
 * @param allRotations the all rotations
 * @return the collection
 */
private HashMap<String, ArrayList<PersonBean>> loadSupervisedPeople(final int guid,
        final boolean allRotations) {

    HashMap<String, ArrayList<PersonBean>> supervisedPeople = new HashMap<String, ArrayList<PersonBean>>();

    // Create new SearchBean of with default values
    SearchBean searchRotations = this.getSearchDAO().initiate("rotation", null);
    searchRotations.setLimit(0);

    RotationBean rotationParam = (RotationBean) searchRotations.getSearchCriteria();
    SupervisorBean supervisor = new SupervisorBean();
    supervisor.setPersonGUID(guid);
    rotationParam.addSupervisor(supervisor);

    BuilderBean loadDetails = new BuilderBean();
    loadDetails.setParameter("ASSESSMENTS", true);
    loadDetails.setParameter("SUPERVISORS", true);

    searchRotations.setSearchCriteria(rotationParam);
    searchRotations.setOrderColumn("rotation.StartDate");
    searchRotations.setOrderColumn2("people.LastName");
    searchRotations.setOrderAscending(false);

    SearchResultsBean studentsSupervised = new SearchResultsBean();
    try {
        studentsSupervised = this.getSearchDAO().search(searchRotations, loadDetails);
    } catch (Exception e) {
        dataLogger.error("Error searching for supervised people: " + e.getMessage());
    }

    final Calendar currentDate = Calendar.getInstance();

    final TreeMap<String, ArrayList<RotationBean>> currentlySupervising = new TreeMap<String, ArrayList<RotationBean>>();
    final TreeMap<String, ArrayList<RotationBean>> previouslySupervised = new TreeMap<String, ArrayList<RotationBean>>();
    final HashMap<String, PersonBean> personMap = new HashMap<String, PersonBean>();

    for (Object rotationObj : studentsSupervised.getSearchResults()) {
        final RotationBean rotation = (RotationBean) rotationObj;

        boolean currentlyTakingPlace = false;

        if (rotation.getStartDate().before(currentDate.getTime())
                && rotation.getEndDate().after(currentDate.getTime())) {
            currentlyTakingPlace = true;
        }

        if (rotation.getPerson() != null) {
            final PersonBean person = rotation.getPerson();

            final String index = person.getLastName() + " " + person.getPreferredName() + " "
                    + person.getPersonIdentifier();

            boolean processed = false;

            if (currentlySupervising.containsKey(index)) {
                // The person exists in the currently supervising list.
                ArrayList<RotationBean> tneRots = currentlySupervising.get(index);
                if (allRotations || currentlyTakingPlace) {
                    tneRots.add(rotation);
                }
                currentlySupervising.put(index, tneRots);
                processed = true;
            }
            if (previouslySupervised.containsKey(index)) {
                // The person exists in the previously supervised list
                ArrayList<RotationBean> tneRots = previouslySupervised.get(index);
                if (allRotations || currentlyTakingPlace) {
                    tneRots.add(rotation);
                }
                if (currentlyTakingPlace) {
                    // This is a current rotation, remove from the previously
                    // supervised list and add to currently supervising.
                    previouslySupervised.remove(index);
                    currentlySupervising.put(index, tneRots);
                } else {
                    previouslySupervised.put(index, tneRots);
                }
                processed = true;
            }

            if (!processed) {
                // This person has not been encountered yet.
                personMap.put(index, person);

                ArrayList<RotationBean> tneRots = new ArrayList<RotationBean>();
                if (allRotations || currentlyTakingPlace) {
                    tneRots.add(rotation);
                }
                if (currentlyTakingPlace) {
                    currentlySupervising.put(index, tneRots);
                } else {
                    previouslySupervised.put(index, tneRots);
                }
            }
        }
    }

    final ArrayList<PersonBean> currentPeople = new ArrayList<PersonBean>();
    final ArrayList<PersonBean> previousPeople = new ArrayList<PersonBean>();

    for (String index : currentlySupervising.keySet()) {
        final PersonBean person = personMap.get(index);
        final ArrayList<RotationBean> tneRots = currentlySupervising.get(index);
        person.setRotations(tneRots);
        currentPeople.add(person);
    }
    for (String index : previouslySupervised.keySet()) {
        final PersonBean person = personMap.get(index);
        final ArrayList<RotationBean> tneRots = previouslySupervised.get(index);
        person.setRotations(tneRots);
        previousPeople.add(person);
    }

    supervisedPeople.put("current", currentPeople);
    supervisedPeople.put("previous", previousPeople);

    return supervisedPeople;
}

From source file:net.spfbl.spf.SPF.java

public static TreeSet<String> clear(String token) {
    TreeSet<String> clearSet = new TreeSet<String>();
    TreeMap<String, Distribution> distribuitonMap = CacheDistribution.getAll(token);
    for (String key : distribuitonMap.keySet()) {
        Distribution distribution = distribuitonMap.get(key);
        if (distribution != null) {
            if (distribution.clear()) {
                clearSet.add(key);/*from   www  .j  a  v a 2s  .c o  m*/
                distribution.getStatus(token);
                Peer.sendToAll(key, distribution);
            }
        }
        if (Block.dropExact(key)) {
            clearSet.add(key);
        }
    }
    for (String key : Block.getAllTokens(token)) {
        if (Block.dropExact(key)) {
            clearSet.add(key);
        }
    }
    //        for (String key : Peer.clearAllReputation(token)) {
    //            clearSet.add(key);
    //        }
    return clearSet;
}

From source file:org.apache.coyote.tomcat5.CoyoteRequest.java

/**
 * Parse accept-language header value.//from www .jav  a 2s.  com
 */
protected void parseLocalesHeader(String value) {

    // Store the accumulated languages that have been requested in
    // a local collection, sorted by the quality value (so we can
    // add Locales in descending order).  The values will be ArrayLists
    // containing the corresponding Locales to be added
    TreeMap locales = new TreeMap();

    // Preprocess the value to remove all whitespace
    int white = value.indexOf(' ');
    if (white < 0)
        white = value.indexOf('\t');
    if (white >= 0) {
        StringBuffer sb = new StringBuffer();
        int len = value.length();
        for (int i = 0; i < len; i++) {
            char ch = value.charAt(i);
            if ((ch != ' ') && (ch != '\t'))
                sb.append(ch);
        }
        value = sb.toString();
    }

    // Process each comma-delimited language specification
    parser.setString(value); // ASSERT: parser is available to us
    int length = parser.getLength();
    while (true) {

        // Extract the next comma-delimited entry
        int start = parser.getIndex();
        if (start >= length)
            break;
        int end = parser.findChar(',');
        String entry = parser.extract(start, end).trim();
        parser.advance(); // For the following entry

        // Extract the quality factor for this entry
        double quality = 1.0;
        int semi = entry.indexOf(";q=");
        if (semi >= 0) {
            try {
                quality = Double.parseDouble(entry.substring(semi + 3));
            } catch (NumberFormatException e) {
                quality = 0.0;
            }
            entry = entry.substring(0, semi);
        }

        // Skip entries we are not going to keep track of
        if (quality < 0.00005)
            continue; // Zero (or effectively zero) quality factors
        if ("*".equals(entry))
            continue; // FIXME - "*" entries are not handled

        // Extract the language and country for this entry
        String language = null;
        String country = null;
        String variant = null;
        int dash = entry.indexOf('-');
        if (dash < 0) {
            language = entry;
            country = "";
            variant = "";
        } else {
            language = entry.substring(0, dash);
            country = entry.substring(dash + 1);
            int vDash = country.indexOf('-');
            if (vDash > 0) {
                String cTemp = country.substring(0, vDash);
                variant = country.substring(vDash + 1);
                country = cTemp;
            } else {
                variant = "";
            }
        }

        // Add a new Locale to the list of Locales for this quality level
        Locale locale = new Locale(language, country, variant);
        Double key = new Double(-quality); // Reverse the order
        ArrayList values = (ArrayList) locales.get(key);
        if (values == null) {
            values = new ArrayList();
            locales.put(key, values);
        }
        values.add(locale);

    }

    // Process the quality values in highest->lowest order (due to
    // negating the Double value when creating the key)
    Iterator keys = locales.keySet().iterator();
    while (keys.hasNext()) {
        Double key = (Double) keys.next();
        ArrayList list = (ArrayList) locales.get(key);
        Iterator values = list.iterator();
        while (values.hasNext()) {
            Locale locale = (Locale) values.next();
            addLocale(locale);
        }
    }

}

From source file:fr.cirad.mgdb.exporting.individualoriented.DARwinExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles,
        boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor,
        Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    GenotypingProject aProject = mongoTemplate.findOne(
            new Query(Criteria.where(GenotypingProject.FIELDNAME_PLOIDY_LEVEL).exists(true)),
            GenotypingProject.class);
    if (aProject == null)
        LOG.warn("Unable to find a project containing ploidy level information! Assuming ploidy level is 2.");

    int ploidy = aProject == null ? 2 : aProject.getPloidyLevel();

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }/*from  ww w. j  a  va  2  s  .  com*/
        }

    String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size()
            + "individuals";

    StringBuffer donFileContents = new StringBuffer(
            "@DARwin 5.0 - DON -" + LINE_SEPARATOR + individualExportFiles.size() + "\t" + 1 + LINE_SEPARATOR
                    + "N" + "\t" + "individual" + LINE_SEPARATOR);

    int count = 0;
    String missingGenotype = "";
    for (int j = 0; j < ploidy; j++)
        missingGenotype += "\tN";

    zos.putNextEntry(new ZipEntry(exportName + ".var"));
    zos.write(("@DARwin 5.0 - ALLELIC - " + ploidy + LINE_SEPARATOR + individualExportFiles.size() + "\t"
            + markerCount * ploidy + LINE_SEPARATOR + "N").getBytes());

    DBCursor markerCursorCopy = markerCursor.copy(); // dunno how expensive this is, but seems safer than keeping all IDs in memory at any time

    short nProgress = 0, nPreviousProgress = 0;
    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    markerCursorCopy.batchSize(nChunkSize);

    int nMarkerIndex = 0;
    while (markerCursorCopy.hasNext()) {
        DBObject exportVariant = markerCursorCopy.next();
        Comparable markerId = (Comparable) exportVariant.get("_id");

        if (markerSynonyms != null) {
            Comparable syn = markerSynonyms.get(markerId);
            if (syn != null)
                markerId = syn;
        }
        for (int j = 0; j < ploidy; j++)
            zos.write(("\t" + markerId).getBytes());
    }

    TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>();
    ArrayList<String> distinctAlleles = new ArrayList<String>(); // the index of each allele will be used as its code
    int i = 0;
    for (File f : individualExportFiles) {
        BufferedReader in = new BufferedReader(new FileReader(f));
        try {
            String individualId, line = in.readLine(); // read sample id

            if (line != null)
                individualId = line;
            else
                throw new Exception("Unable to read first line of temp export file " + f.getName());

            donFileContents.append(++count + "\t" + individualId + LINE_SEPARATOR);

            zos.write((LINE_SEPARATOR + count).getBytes());
            nMarkerIndex = 0;

            while ((line = in.readLine()) != null) {
                List<String> genotypes = MgdbDao.split(line, "|");
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                int highestGenotypeCount = 0;
                String mostFrequentGenotype = null;
                for (String genotype : genotypes) {
                    if (genotype.length() == 0)
                        continue; /* skip missing genotypes */

                    int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                    if (gtCount > highestGenotypeCount) {
                        highestGenotypeCount = gtCount;
                        mostFrequentGenotype = genotype;
                    }
                    genotypeCounts.put(genotype, gtCount);
                }

                if (genotypeCounts.size() > 1) {
                    warningFileWriter.write("- Dissimilar genotypes found for variant __" + nMarkerIndex
                            + "__, individual " + individualId + ". Exporting most frequent: "
                            + mostFrequentGenotype + "\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String codedGenotype = "";
                if (mostFrequentGenotype != null)
                    for (String allele : mostFrequentGenotype.split(" ")) {
                        if (!distinctAlleles.contains(allele))
                            distinctAlleles.add(allele);
                        codedGenotype += "\t" + distinctAlleles.indexOf(allele);
                    }
                else
                    codedGenotype = missingGenotype.replaceAll("N", "-1"); // missing data is coded as -1
                zos.write(codedGenotype.getBytes());

                nMarkerIndex++;
            }
        } catch (Exception e) {
            LOG.error("Error exporting data", e);
            progress.setError("Error exporting data: " + e.getClass().getSimpleName()
                    + (e.getMessage() != null ? " - " + e.getMessage() : ""));
            return;
        } finally {
            in.close();
        }

        if (progress.hasAborted())
            return;

        nProgress = (short) (++i * 100 / individualExportFiles.size());
        if (nProgress > nPreviousProgress) {
            //            LOG.debug("============= doDARwinExport (" + i + "): " + nProgress + "% =============");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }

        if (!f.delete()) {
            f.deleteOnExit();
            LOG.info("Unable to delete tmp export file " + f.getAbsolutePath());
        }
    }

    zos.putNextEntry(new ZipEntry(exportName + ".don"));
    zos.write(donFileContents.toString().getBytes());

    // now read variant names for those that induced warnings
    nMarkerIndex = 0;
    markerCursor.batchSize(nChunkSize);
    while (markerCursor.hasNext()) {
        DBObject exportVariant = markerCursor.next();
        if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) {
            Comparable markerId = (Comparable) exportVariant.get("_id");

            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(markerId);
                if (syn != null)
                    markerId = syn;
            }
            for (int j = 0; j < ploidy; j++)
                zos.write(("\t" + markerId).getBytes());

            problematicMarkerIndexToNameMap.put(nMarkerIndex, markerId);
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet())
                sLine = sLine.replaceAll("__" + aMarkerIndex + "__",
                        problematicMarkerIndexToNameMap.get(aMarkerIndex).toString());
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:org.apache.catalina.core.StandardContext.java

/**
 * Load and initialize all servlets marked "load on startup" in the
 * web application deployment descriptor.
 *
 * @param children Array of wrappers for all currently defined
 *  servlets (including those not declared load on startup)
 *//*from   ww  w  . j  ava  2s  . c om*/
public void loadOnStartup(Container children[]) {

    // Collect "load on startup" servlets that need to be initialized
    TreeMap map = new TreeMap();
    for (int i = 0; i < children.length; i++) {
        Wrapper wrapper = (Wrapper) children[i];
        int loadOnStartup = wrapper.getLoadOnStartup();
        if (loadOnStartup < 0)
            continue;
        if (loadOnStartup == 0) // Arbitrarily put them last
            loadOnStartup = Integer.MAX_VALUE;
        Integer key = new Integer(loadOnStartup);
        ArrayList list = (ArrayList) map.get(key);
        if (list == null) {
            list = new ArrayList();
            map.put(key, list);
        }
        list.add(wrapper);
    }

    // Load the collected "load on startup" servlets
    Iterator keys = map.keySet().iterator();
    while (keys.hasNext()) {
        Integer key = (Integer) keys.next();
        ArrayList list = (ArrayList) map.get(key);
        Iterator wrappers = list.iterator();
        while (wrappers.hasNext()) {
            Wrapper wrapper = (Wrapper) wrappers.next();
            try {
                wrapper.load();
            } catch (ServletException e) {
                getServletContext().log(sm.getString("standardWrapper.loadException", getName()), e);
                // NOTE: load errors (including a servlet that throws
                // UnavailableException from tht init() method) are NOT
                // fatal to application startup
            }
        }
    }

}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeAggregateStatisticsForOptimisationConstraints_ISARHP_ISARCEP(String a_OutputDir_String)
        throws Exception {
    HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap();
    File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs);
    OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream);
    a_FileOutputStream.flush();//from  w  ww .j  a va 2s .  c om
    Object[] fitnessCounts;
    HashMap<String, Integer> a_SARCounts = null;
    TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet();
    String s2;
    String s1;
    Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator();
    while (a_Iterator_String.hasNext()) {
        // Need to reorder data for each LAD as OAs not necessarily returned
        // in any order and an ordered result is wanted
        TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>();
        boolean setPrevious_OA_String = true;
        s1 = a_Iterator_String.next();
        s2 = s1.substring(0, 3);
        File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv");
        // A few results are missing
        if (resultsFile.exists()) {
            System.out.println(resultsFile.toString() + " exists");
            String previous_OA_String = "";
            BufferedReader aBufferedReader = new BufferedReader(
                    new InputStreamReader(new FileInputStream(resultsFile)));
            StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader);
            Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer);
            String line = "";
            int tokenType = aStreamTokenizer.nextToken();
            while (tokenType != StreamTokenizer.TT_EOF) {
                switch (tokenType) {
                case StreamTokenizer.TT_EOL:
                    //System.out.println(line);
                    String[] lineFields = line.split(",");
                    String a_OA_String = lineFields[0];
                    if (previous_OA_String.equalsIgnoreCase(a_OA_String)) {
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                        }
                    } else {
                        // Store result
                        if (setPrevious_OA_String) {
                            previous_OA_String = a_OA_String;
                            setPrevious_OA_String = false;
                        } else {
                            // Store
                            resultsForLAD.put(previous_OA_String, a_SARCounts);
                        }
                        // Initialise/Re-initialise
                        CASDataRecord a_CASDataRecord = (CASDataRecord) _CASDataHandler
                                .getDataRecord(a_OA_String);
                        fitnessCounts = GeneticAlgorithm_ISARHP_ISARCEP.getFitnessCounts(a_CASDataRecord);
                        a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1];
                        // Start a new aggregation
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_ISARDataRecord.toString());
                        }
                        //a_OA_String = lineFields[0];
                    }
                    previous_OA_String = a_OA_String;
                    break;
                case StreamTokenizer.TT_WORD:
                    line = aStreamTokenizer.sval;
                    break;
                }
                tokenType = aStreamTokenizer.nextToken();
            }
        } else {
            System.out.println(resultsFile.toString() + " !exists");
        }
        Iterator<String> string_Iterator = resultsForLAD.keySet().iterator();
        while (string_Iterator.hasNext()) {
            String oa_Code = string_Iterator.next();
            a_SARCounts = resultsForLAD.get(oa_Code);
            //GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(null, a_ID_RecordID_HashMap, _Random)
            OutputDataHandler_OptimisationConstraints.writeISARHP_ISARCEP(a_SARCounts, oa_Code,
                    a_FileOutputStream);
        }
    }
    a_FileOutputStream.close();
}

From source file:edu.hawaii.soest.kilonalu.adam.AdamSource.java

/**
 * A method that processes the data ByteBuffer passed in for the given IP
 * address of the ADAM sensor, parses the binary ADAM data, and flushes the
 * data to the DataTurbine given the sensor properties in the XMLConfiguration
 * passed in./*from  w w  w .j av  a  2s .  com*/
 *
 * @param datagramAddress - the IP address of the datagram of this packet of data
 * @param xmlConfig       - the XMLConfiguration object containing the list of
 *                          sensor properties
 * @param sampleBuffer    - the binary data sample as a ByteBuffer
 */
protected boolean process(String datagramAddress, XMLConfiguration xmlConfig, ByteBuffer sampleBuffer) {

    logger.debug("AdamSource.process() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean failed = false;

    try {

        // add channels of data that will be pushed to the server.  
        // Each sample will be sent to the Data Turbine as an rbnb frame.  Information
        // on each channel is found in the XMLConfiguration file (sensors.properties.xml)
        // and the AdamParser object (to get the actual voltages for each ADAM channel)
        ChannelMap rbnbChannelMap = new ChannelMap(); // used to flush channels
        ChannelMap registerChannelMap = new ChannelMap(); // used to register channels
        int channelIndex = 0;

        this.adamParser = new AdamParser(sampleBuffer);

        logger.debug("\n" + "channelZero       : " + this.adamParser.getChannelZero() + "\n"
                + "channelOne        : " + this.adamParser.getChannelOne() + "\n" + "channelTwo        : "
                + this.adamParser.getChannelTwo() + "\n" + "channelThree      : "
                + this.adamParser.getChannelThree() + "\n" + "channelFour       : "
                + this.adamParser.getChannelFour() + "\n" + "channelFive       : "
                + this.adamParser.getChannelFive() + "\n" + "channelSix        : "
                + this.adamParser.getChannelSix() + "\n" + "channelSeven      : "
                + this.adamParser.getChannelSeven() + "\n" + "channelAverage    : "
                + this.adamParser.getChannelAverage() + "\n" + "channelZeroMax    : "
                + this.adamParser.getChannelZeroMax() + "\n" + "channelOneMax     : "
                + this.adamParser.getChannelOneMax() + "\n" + "channelTwoMax     : "
                + this.adamParser.getChannelTwoMax() + "\n" + "channelThreeMax   : "
                + this.adamParser.getChannelThreeMax() + "\n" + "channelFourMax    : "
                + this.adamParser.getChannelFourMax() + "\n" + "channelFiveMax    : "
                + this.adamParser.getChannelFiveMax() + "\n" + "channelSixMax     : "
                + this.adamParser.getChannelSixMax() + "\n" + "channelSevenMax   : "
                + this.adamParser.getChannelSevenMax() + "\n" + "channelAverageMax : "
                + this.adamParser.getChannelAverageMax() + "\n" + "channelZeroMin    : "
                + this.adamParser.getChannelZeroMin() + "\n" + "channelOneMin     : "
                + this.adamParser.getChannelOneMin() + "\n" + "channelTwoMin     : "
                + this.adamParser.getChannelTwoMin() + "\n" + "channelThreeMin   : "
                + this.adamParser.getChannelThreeMin() + "\n" + "channelFourMin    : "
                + this.adamParser.getChannelFourMin() + "\n" + "channelFiveMin    : "
                + this.adamParser.getChannelFiveMin() + "\n" + "channelSixMin     : "
                + this.adamParser.getChannelSixMin() + "\n" + "channelSevenMin   : "
                + this.adamParser.getChannelSevenMin() + "\n" + "channelAverageMin : "
                + this.adamParser.getChannelAverageMin() + "\n"

        );

        // create a TreeMap to hold the voltageChannel and its associated
        // RBNB ChannelMap channel string.  When the RBNB ChannelMap is
        // populated, this TreeMap will be consulted
        TreeMap<Integer, String> voltageChannelTreeMap = new TreeMap<Integer, String>();

        // create a character string to store characters from the voltage values
        StringBuilder decimalASCIISampleData = new StringBuilder();

        // Create a list of sensors from the properties file, and iterate through
        // the list, matching the datagram IP address to the address in the 
        // xml configuration file.  If there is a match, find the correct voltage
        // channel to measurement mappings, create a corresponding RBNB channel
        // map, and flush the data to the DataTurbine.        

        List sensorList = xmlConfig.getList("sensor.address");

        // declare the properties that will be pulled from the 
        // sensor.properties.xml file
        String address = "";
        String sourceName = "";
        String description = "";
        String type = "";
        String cacheSize = "";
        String archiveSize = "";
        String archiveChannel = "";
        String portNumber = "";
        String voltageChannel = "";
        String measurement = "";

        // evaluate each sensor listed in the sensor.properties.xml file
        for (Iterator sIterator = sensorList.iterator(); sIterator.hasNext();) {

            // get each property value of the sensor
            int index = sensorList.indexOf(sIterator.next());
            address = (String) xmlConfig.getProperty("sensor(" + index + ").address");
            sourceName = (String) xmlConfig.getProperty("sensor(" + index + ").name");
            description = (String) xmlConfig.getProperty("sensor(" + index + ").description");
            type = (String) xmlConfig.getProperty("sensor(" + index + ").type");

            logger.debug("Sensor details:" + "\n\t\t\t\t\t\t\t\t\t\taddress     : " + address
                    + "\n\t\t\t\t\t\t\t\t\t\tname        : " + sourceName
                    + "\n\t\t\t\t\t\t\t\t\t\tdescription : " + description
                    + "\n\t\t\t\t\t\t\t\t\t\ttype        : " + type);

            // move to the next sensor if this doesn't match the RBNB source name
            if (!sourceName.equals(getRBNBClientName())) {
                continue;
            }

            List portList = xmlConfig.getList("sensor(" + index + ").ports.port[@number]");
            // get each port of the sensor, along with the port properties
            for (Iterator pIterator = portList.iterator(); pIterator.hasNext();) {
                int pindex = portList.indexOf(pIterator.next());

                // get the port number value
                portNumber = (String) xmlConfig
                        .getProperty("sensor(" + index + ").ports.port(" + pindex + ")[@number]");

                logger.debug("\tport " + portNumber + " details:");

                List measurementList = xmlConfig
                        .getList("sensor(" + index + ").ports.port(" + pindex + ").measurement[@label]");

                // get each measurement and voltageChannel for the given port
                for (Iterator mIterator = measurementList.iterator(); mIterator.hasNext();) {
                    int mindex = measurementList.indexOf(mIterator.next());

                    // build the property paths into the config file
                    String voltagePath = "sensor(" + index + ").ports.port(" + pindex + ").measurement("
                            + mindex + ").voltageChannel";

                    String measurementPath = "sensor(" + index + ").ports.port(" + pindex + ").measurement("
                            + mindex + ")[@label]";

                    // get the voltageChannel and measurement label values
                    voltageChannel = (String) xmlConfig.getProperty(voltagePath);
                    measurement = (String) xmlConfig.getProperty(measurementPath);
                    logger.debug("\t\t" + "voltageChannel: " + voltageChannel + "\n\t\t\t\t\t\t\t\t\t\t\t"
                            + "measurement label: " + measurement);

                    // Match the datagram address with the address in the xmlConfig file
                    if (datagramAddress.equals(address)) {

                        // and only add channel data for this class instance RBNB Source name
                        if (sourceName.equals(getRBNBClientName())) {

                            // create an Integer out of the voltageChannel
                            Integer voltageChannelInt = new Integer(voltageChannel);
                            // build the RBNB channel path string
                            String channelPath = "port" + "/" + portNumber + "/" + measurement;
                            voltageChannelTreeMap.put(voltageChannelInt, channelPath);

                        } else {
                            logger.debug("\t\tSource names don't match: " + sourceName + " != "
                                    + getRBNBClientName());

                        } // end sourceName if() statement

                    } else {
                        logger.debug("\t\tNo IP address match. " + datagramAddress + " != " + address);

                    } //end IP address if() statement
                } // end for each channel
            } // end for each port

            // now that we've found the correct sensor, exit the sensor loop
            break;

        } // end for each sensor

        // Build the RBNB channel map from the entries in the tree map
        // by doing a lookup of the ADAM voltage channel values based
        // on the voltage channel number in the treemap.  Also add the voltages
        // to the DecimalASCIISampleData string (and then channel)
        for (Iterator vcIterator = voltageChannelTreeMap.keySet().iterator(); vcIterator.hasNext();) {

            int voltageChannelFromMap = ((Integer) vcIterator.next()).intValue();
            String channelPathFromMap = voltageChannelTreeMap.get(voltageChannelFromMap);
            float voltageValue = -9999.0f;

            // look up the voltage value from the AdamParser object based
            // on the voltage channel set in the xmlConfig file (via the treemap)
            switch (voltageChannelFromMap) {
            case 0:
                voltageValue = this.adamParser.getChannelZero();
                break;
            case 1:
                voltageValue = this.adamParser.getChannelOne();
                break;
            case 2:
                voltageValue = this.adamParser.getChannelTwo();
                break;
            case 3:
                voltageValue = this.adamParser.getChannelThree();
                break;
            case 4:
                voltageValue = this.adamParser.getChannelFour();
                break;
            case 5:
                voltageValue = this.adamParser.getChannelFive();
                break;
            case 6:
                voltageValue = this.adamParser.getChannelSix();
                break;
            case 7:
                voltageValue = this.adamParser.getChannelSeven();
                break;
            }

            // now add the channel and the voltage value to the RBNB channel maps

            channelIndex = registerChannelMap.Add(channelPathFromMap);
            registerChannelMap.PutUserInfo(channelIndex, "units=volts");
            registerChannelMap.PutUserInfo(channelIndex, "description=" + description);

            logger.debug("Voltage Channel Tree Map: " + voltageChannelTreeMap.toString());

            // then the channel and voltage
            channelIndex = rbnbChannelMap.Add(channelPathFromMap);
            rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
            rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { voltageValue });
            decimalASCIISampleData.append(String.format("%05.3f", (Object) voltageValue) + ", ");

        }

        // and only flush data for this class instance RBNB Source name
        if (sourceName.equals(getRBNBClientName()) && datagramAddress.equals(address)) {

            // add the timestamp to the rbnb channel map
            registerChannelMap.PutTimeAuto("server");
            rbnbChannelMap.PutTimeAuto("server");

            // then add a timestamp to the end of the ASCII version of the sample
            DATE_FORMAT.setTimeZone(TZ);
            String sampleDateAsString = DATE_FORMAT.format(new Date()).toString();
            decimalASCIISampleData.append(sampleDateAsString);
            decimalASCIISampleData.append("\n");

            // add the DecimalASCIISampleData channel to the channelMap
            channelIndex = registerChannelMap.Add(getRBNBChannelName());
            channelIndex = rbnbChannelMap.Add(getRBNBChannelName());
            rbnbChannelMap.PutMime(channelIndex, "text/plain");
            rbnbChannelMap.PutDataAsString(channelIndex, decimalASCIISampleData.toString());

            // Now register the RBNB channels, and flush the rbnbChannelMap to the
            // DataTurbine
            getSource().Register(registerChannelMap);
            getSource().Flush(rbnbChannelMap);
            logger.info(getRBNBClientName() + " Sample sent to the DataTurbine: "
                    + decimalASCIISampleData.toString());
            registerChannelMap.Clear();
            rbnbChannelMap.Clear();

            sampleBuffer.clear();
        } else {
            logger.debug("\t\tSource names don't match: " + sourceName + " != " + getRBNBClientName());
            registerChannelMap.Clear();
            rbnbChannelMap.Clear();

            sampleBuffer.clear();
        }

    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        failed = true;
        sapie.printStackTrace();
        return !failed;

    }

    return !failed;
}

From source file:org.opendatakit.services.database.utlities.ODKDatabaseImplUtils.java

/**
 * If the caller specified a complex json value for a structured type, flush
 * the value through to the individual columns.
 *
 * @param orderedColumns//from   w  ww.  j  ava 2  s  . c  o  m
 * @param values
 */
private void cleanUpValuesMap(OrderedColumns orderedColumns, Map<String, Object> values) {

    TreeMap<String, String> toBeResolved = new TreeMap<String, String>();

    for (String key : values.keySet()) {
        if (DataTableColumns.CONFLICT_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.FILTER_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.FILTER_VALUE.equals(key)) {
            continue;
        } else if (DataTableColumns.FORM_ID.equals(key)) {
            continue;
        } else if (DataTableColumns.ID.equals(key)) {
            continue;
        } else if (DataTableColumns.LOCALE.equals(key)) {
            continue;
        } else if (DataTableColumns.ROW_ETAG.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_CREATOR.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_TIMESTAMP.equals(key)) {
            continue;
        } else if (DataTableColumns.SAVEPOINT_TYPE.equals(key)) {
            continue;
        } else if (DataTableColumns.SYNC_STATE.equals(key)) {
            continue;
        } else if (DataTableColumns._ID.equals(key)) {
            continue;
        }
        // OK it is one of the data columns
        ColumnDefinition cp = orderedColumns.find(key);
        if (!cp.isUnitOfRetention()) {
            toBeResolved.put(key, (String) values.get(key));
        }
    }

    // remove these non-retained values from the values set...
    for (String key : toBeResolved.keySet()) {
        values.remove(key);
    }

    while (!toBeResolved.isEmpty()) {

        TreeMap<String, String> moreToResolve = new TreeMap<String, String>();

        for (TreeMap.Entry<String, String> entry : toBeResolved.entrySet()) {
            String key = entry.getKey();
            String json = entry.getValue();
            if (json == null) {
                // don't need to do anything
                // since the value is null
                continue;
            }
            ColumnDefinition cp = orderedColumns.find(key);
            try {
                TypeReference<Map<String, Object>> reference = new TypeReference<Map<String, Object>>() {
                };
                Map<String, Object> struct = ODKFileUtils.mapper.readValue(json, reference);
                for (ColumnDefinition child : cp.getChildren()) {
                    String subkey = child.getElementKey();
                    ColumnDefinition subcp = orderedColumns.find(subkey);
                    if (subcp.isUnitOfRetention()) {
                        ElementType subtype = subcp.getType();
                        ElementDataType type = subtype.getDataType();
                        if (type == ElementDataType.integer) {
                            values.put(subkey, (Integer) struct.get(subcp.getElementName()));
                        } else if (type == ElementDataType.number) {
                            values.put(subkey, (Double) struct.get(subcp.getElementName()));
                        } else if (type == ElementDataType.bool) {
                            values.put(subkey, ((Boolean) struct.get(subcp.getElementName())) ? 1 : 0);
                        } else {
                            values.put(subkey, (String) struct.get(subcp.getElementName()));
                        }
                    } else {
                        // this must be a javascript structure... re-JSON it and save (for
                        // next round).
                        moreToResolve.put(subkey,
                                ODKFileUtils.mapper.writeValueAsString(struct.get(subcp.getElementName())));
                    }
                }
            } catch (JsonParseException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            } catch (JsonMappingException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            } catch (IOException e) {
                e.printStackTrace();
                throw new IllegalStateException("should not be happening");
            }
        }

        toBeResolved = moreToResolve;
    }
}

From source file:fr.cirad.mgdb.exporting.individualoriented.PLinkExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles,
        boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor,
        Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }// w  w  w. j a  v  a  2  s . c  o  m
        }

    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    int markerCount = markerCursor.count();

    String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size()
            + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".ped"));

    TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>();
    short nProgress = 0, nPreviousProgress = 0;
    int i = 0;
    for (File f : individualExportFiles) {
        BufferedReader in = new BufferedReader(new FileReader(f));
        try {
            String individualId, line = in.readLine(); // read sample id
            if (line != null) {
                individualId = line;
                String population = getIndividualPopulation(sModule, line);
                String individualInfo = (population == null ? "." : population) + " " + individualId;
                zos.write((individualInfo + " 0 0 0 " + getIndividualGenderCode(sModule, individualId))
                        .getBytes());
            } else
                throw new Exception("Unable to read first line of temp export file " + f.getName());

            int nMarkerIndex = 0;
            while ((line = in.readLine()) != null) {
                List<String> genotypes = MgdbDao.split(line, "|");
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                int highestGenotypeCount = 0;
                String mostFrequentGenotype = null;
                for (String genotype : genotypes) {
                    if (genotype.length() == 0)
                        continue; /* skip missing genotypes */

                    int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                    if (gtCount > highestGenotypeCount) {
                        highestGenotypeCount = gtCount;
                        mostFrequentGenotype = genotype;
                    }
                    genotypeCounts.put(genotype, gtCount);
                }

                if (genotypeCounts.size() > 1) {
                    warningFileWriter.write("- Dissimilar genotypes found for variant " + nMarkerIndex
                            + ", individual " + individualId + ". Exporting most frequent: "
                            + mostFrequentGenotype + "\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String[] alleles = mostFrequentGenotype == null ? new String[0]
                        : mostFrequentGenotype.split(" ");
                if (alleles.length > 2) {
                    warningFileWriter.write("- More than 2 alleles found for variant " + nMarkerIndex
                            + ", individual " + individualId + ". Exporting only the first 2 alleles.\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String all1 = alleles.length == 0 ? "0" : alleles[0];
                String all2 = alleles.length == 0 ? "0" : alleles[alleles.length == 1 ? 0 : 1];
                if (all1.length() != 1 || all2.length() != 1) {
                    warningFileWriter
                            .write("- SNP expected, but alleles are not coded on a single char for variant "
                                    + nMarkerIndex + ", individual " + individualId
                                    + ". Ignoring this genotype.\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                } else
                    zos.write((" " + all1 + " " + all2).getBytes());

                nMarkerIndex++;
            }
        } catch (Exception e) {
            LOG.error("Error exporting data", e);
            progress.setError("Error exporting data: " + e.getClass().getSimpleName()
                    + (e.getMessage() != null ? " - " + e.getMessage() : ""));
            return;
        } finally {
            in.close();
        }

        if (progress.hasAborted())
            return;

        nProgress = (short) (++i * 100 / individualExportFiles.size());
        if (nProgress > nPreviousProgress) {
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
        zos.write('\n');

        if (!f.delete()) {
            f.deleteOnExit();
            LOG.info("Unable to delete tmp export file " + f.getAbsolutePath());
        }
    }
    warningFileWriter.close();

    zos.putNextEntry(new ZipEntry(exportName + ".map"));

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;

    markerCursor.batchSize(nChunkSize);
    int nMarkerIndex = 0;
    while (markerCursor.hasNext()) {
        DBObject exportVariant = markerCursor.next();
        DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
        Comparable markerId = (Comparable) exportVariant.get("_id");
        String chrom = (String) refPos.get(ReferencePosition.FIELDNAME_SEQUENCE);
        Long pos = ((Number) refPos.get(ReferencePosition.FIELDNAME_START_SITE)).longValue();

        if (chrom == null)
            LOG.warn("Chromosomal position not found for marker " + markerId);
        Comparable exportedId = markerSynonyms == null ? markerId : markerSynonyms.get(markerId);
        zos.write(((chrom == null ? "0" : chrom) + " " + exportedId + " " + 0 + " " + (pos == null ? 0 : pos)
                + LINE_SEPARATOR).getBytes());

        if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) { // we are going to need this marker's name for the warning file
            Comparable variantName = markerId;
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(markerId);
                if (syn != null)
                    variantName = syn;
            }
            problematicMarkerIndexToNameMap.put(nMarkerIndex, variantName);
        }
        nMarkerIndex++;
    }

    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet())
                sLine = sLine.replaceAll("__" + aMarkerIndex + "__",
                        problematicMarkerIndexToNameMap.get(aMarkerIndex).toString());
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeAggregateStatisticsForOptimisationConstraints_HSARHP_ISARCEP(String a_OutputDir_String)
        throws Exception {
    HashMap a_HID_HSARDataRecordVector_HashMap = _HSARDataHandler.get_HID_HSARDataRecordVector_HashMap();
    HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap();
    File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs);
    OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream);
    a_FileOutputStream.flush();//  ww w  . ja  v  a 2  s .  c om
    HashMap<String, Integer> a_SARCounts = null;
    CASDataRecord a_CASDataRecord;
    TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet();
    String s2;
    String s1;
    Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator();
    while (a_Iterator_String.hasNext()) {
        // Need to reorder data for each LAD as OAs not necessarily returned
        // in any order and an ordered result is wanted
        TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>();
        boolean setPrevious_OA_String = true;
        s1 = a_Iterator_String.next();
        s2 = s1.substring(0, 3);
        File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv");
        // A few results are missing
        if (resultsFile.exists()) {
            System.out.println(resultsFile.toString() + " exists");
            String previous_OA_String = "";
            BufferedReader aBufferedReader = new BufferedReader(
                    new InputStreamReader(new FileInputStream(resultsFile)));
            StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader);
            Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer);
            String line = "";
            int tokenType = aStreamTokenizer.nextToken();
            while (tokenType != StreamTokenizer.TT_EOF) {
                switch (tokenType) {
                case StreamTokenizer.TT_EOL:
                    //System.out.println(line);
                    String[] lineFields = line.split(",");
                    String a_OA_String = lineFields[0];
                    if (previous_OA_String.equalsIgnoreCase(a_OA_String)) {
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            // From the id of a household get a Vector 
                            // of HSARDataRecords
                            Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap
                                    .get(new Integer(lineFields[2]));
                            HSARDataRecord a_HSARDataRecord;
                            for (int i = 0; i < household.size(); i++) {
                                a_HSARDataRecord = (HSARDataRecord) household.elementAt(i);
                                GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts,
                                        _Random);
                            }
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                        }
                    } else {
                        // Store result
                        if (setPrevious_OA_String) {
                            previous_OA_String = a_OA_String;
                            setPrevious_OA_String = false;
                        } else {
                            // Store
                            resultsForLAD.put(previous_OA_String, a_SARCounts);
                        }
                        // Initialise/Re-initialise
                        a_CASDataRecord = (CASDataRecord) _CASDataHandler.getDataRecord(a_OA_String);
                        Object[] fitnessCounts = GeneticAlgorithm_HSARHP_ISARCEP
                                .getFitnessCounts(a_CASDataRecord);
                        a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1];
                        // Start a new aggregation
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            // From the id of a household get a Vector
                            // of HSARDataRecords
                            Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap
                                    .get(new Integer(lineFields[2]));
                            HSARDataRecord a_HSARDataRecord;
                            for (int i = 0; i < household.size(); i++) {
                                a_HSARDataRecord = (HSARDataRecord) household.elementAt(i);
                                GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts,
                                        _Random);
                            }
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_ISARDataRecord.toString());
                        }
                        //a_OA_String = lineFields[0];
                    }
                    previous_OA_String = a_OA_String;
                    break;
                case StreamTokenizer.TT_WORD:
                    line = aStreamTokenizer.sval;
                    break;
                }
                tokenType = aStreamTokenizer.nextToken();
            }
        } else {
            System.out.println(resultsFile.toString() + " !exists");
        }
        Iterator<String> string_Iterator = resultsForLAD.keySet().iterator();
        while (string_Iterator.hasNext()) {
            String oa_Code = string_Iterator.next();
            OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEP(resultsForLAD.get(oa_Code), oa_Code,
                    a_FileOutputStream);
        }
    }
    a_FileOutputStream.close();
}