Example usage for java.util TreeMap remove

List of usage examples for java.util TreeMap remove

Introduction

In this page you can find the example usage for java.util TreeMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for this key from this TreeMap if present.

Usage

From source file:com.ape.camera2raw.Camera2RawFragment.java

/**
 * If the given request has been completed, remove it from the queue of active requests and
 * send an {@link ImageSaver} with the results from this request to a background thread to
 * save a file.//from  w w  w  .j av a2  s.com
 * <p/>
 * Call this only with {@link #mCameraStateLock} held.
 *
 * @param requestId the ID of the {@link CaptureRequest} to handle.
 * @param builder   the {@link ImageSaver.ImageSaverBuilder} for this request.
 * @param queue     the queue to remove this request from, if completed.
 */
private void handleCompletionLocked(int requestId, ImageSaver.ImageSaverBuilder builder,
        TreeMap<Integer, ImageSaver.ImageSaverBuilder> queue) {
    if (builder == null)
        return;
    ImageSaver saver = builder.buildIfComplete();
    if (saver != null) {
        queue.remove(requestId);
        AsyncTask.THREAD_POOL_EXECUTOR.execute(saver);
    }
}

From source file:org.opennms.features.vaadin.pmatrix.calculator.PmatrixDpdCalculatorRepository.java

/**
 * Causes the dataPointMap to be persisted to a file
 * @param file file definition to persist the data set to
 * @return true if dataPointMap persisted correctly, false if not
 *//*  w  ww . j a  va 2s  .c  om*/
public boolean persist() {
    File currentArchiveFile = null;
    File tmpCurrentArchiveFile = null;
    Resource tmpResource = null;

    if (!persistHistoricData) {
        LOG.debug("not persisting data as persistHistoricData=false");
        return false;
    }

    if (archiveFileName == null || archiveFileDirectoryLocation == null) {
        LOG.error("cannot save historical data to file as incorrect file location:"
                + " archiveFileDirectoryLocation=" + archiveFileDirectoryLocation + " archiveFileName="
                + archiveFileName);
        return false;
    }

    // set the date on which this file was persisted
    datePersisted = new Date();

    // used to get file name suffix
    SimpleDateFormat dateFormatter = new SimpleDateFormat(dateFormatString);

    // persist data to temporary file <archiveFileName.tmp>
    String tmpArchiveFileName = archiveFileName + ".tmp";
    String tmpArchiveFileLocation = archiveFileDirectoryLocation + File.separator + tmpArchiveFileName;
    LOG.debug("historical data will be written to temporary file :" + tmpArchiveFileLocation);

    try {
        tmpResource = resourceLoader.getResource(tmpArchiveFileLocation);
        tmpCurrentArchiveFile = new File(tmpResource.getURL().getFile());
    } catch (IOException e) {
        LOG.error("cannot save historical data to file at archiveFileLocation='" + tmpArchiveFileLocation
                + "' due to error:", e);
        return false;
    }

    LOG.debug(
            "persisting historical data to temporary file location:" + tmpCurrentArchiveFile.getAbsolutePath());

    // marshal the data
    PrintWriter writer = null;
    boolean marshalledCorrectly = false;
    try {
        // create  directory if doesn't exist
        File directory = new File(tmpCurrentArchiveFile.getParentFile().getAbsolutePath());
        directory.mkdirs();
        // create file if doesn't exist
        writer = new PrintWriter(tmpCurrentArchiveFile, "UTF-8");
        writer.close();

        // see http://stackoverflow.com/questions/1043109/why-cant-jaxb-find-my-jaxb-index-when-running-inside-apache-felix
        // need to provide bundles class loader
        ClassLoader cl = org.opennms.features.vaadin.pmatrix.model.DataPointDefinition.class.getClassLoader();
        JAXBContext jaxbContext = JAXBContext.newInstance(
                "org.opennms.features.vaadin.pmatrix.model:org.opennms.features.vaadin.pmatrix.calculator", cl);

        //JAXBContext jaxbContext = JAXBContext.newInstance("org.opennms.features.vaadin.pmatrix.model:org.opennms.features.vaadin.pmatrix.calculator");

        Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
        jaxbMarshaller.setProperty(Marshaller.JAXB_ENCODING, "UTF-8");

        // TODO CHANGE output pretty printed
        jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);

        // marshal this Data Repository
        jaxbMarshaller.marshal(this, tmpCurrentArchiveFile);

        marshalledCorrectly = true;
    } catch (JAXBException e) {
        LOG.error("problem marshalling file: ", e);
    } catch (Exception e) {
        LOG.error("problem marshalling file: ", e);
    } finally {
        if (writer != null)
            writer.close();
    }
    if (marshalledCorrectly == false)
        return false;

    // marshaling succeeded so rename tmp file
    String archiveFileLocation = archiveFileDirectoryLocation + File.separator + archiveFileName;
    LOG.info("historical data will be written to:" + archiveFileLocation);

    Resource resource = resourceLoader.getResource(archiveFileLocation);

    if (resource.exists()) {
        String oldArchiveFileName = archiveFileName + "." + dateFormatter.format(datePersisted);
        String oldArchiveFileLocation = archiveFileDirectoryLocation + File.separator + oldArchiveFileName;
        LOG.info("previous historical file at archiveFileLocation='" + archiveFileLocation
                + "' exists so being renamed to " + oldArchiveFileLocation);
        Resource oldresource = resourceLoader.getResource(oldArchiveFileLocation);
        try {
            currentArchiveFile = new File(resource.getURL().getFile());
            File oldArchiveFile = new File(oldresource.getURL().getFile());
            // rename current archive file to old archive file name
            if (!currentArchiveFile.renameTo(oldArchiveFile)) {
                throw new IOException("cannot rename current archive file:"
                        + currentArchiveFile.getAbsolutePath() + " to " + oldArchiveFile.getAbsolutePath());
            }
            // rename temporary archive file to current archive file name
            if (!tmpCurrentArchiveFile.renameTo(currentArchiveFile)) {
                throw new IOException("cannot rename temporary current archive file:"
                        + tmpCurrentArchiveFile.getAbsolutePath() + " to "
                        + currentArchiveFile.getAbsolutePath());
            }
        } catch (IOException e) {
            LOG.error("Problem archiving old persistance file", e);
        }
        // remove excess files
        try {
            Resource directoryResource = resourceLoader.getResource(archiveFileDirectoryLocation);
            File archiveFolder = new File(directoryResource.getURL().getFile());
            File[] listOfFiles = archiveFolder.listFiles();

            String filename;
            //this will sort earliest to latest date
            TreeMap<Date, File> sortedFiles = new TreeMap<Date, File>();

            for (int i = 0; i < listOfFiles.length; i++) {
                if (listOfFiles[i].isFile()) {
                    filename = listOfFiles[i].getName();
                    if ((!filename.equals(archiveFileName)) && (!filename.equals(tmpArchiveFileName))
                            && (filename.startsWith(archiveFileName))) {
                        String beforeTimeString = archiveFileName + ".";
                        String timeSuffix = filename.substring(beforeTimeString.length());
                        if (!"".equals(timeSuffix)) {
                            Date fileCreatedate = null;
                            try {
                                fileCreatedate = dateFormatter.parse(timeSuffix);
                            } catch (ParseException e) {
                                LOG.debug("cant parse file name suffix to time for filename:" + filename, e);
                            }
                            if (fileCreatedate != null) {
                                sortedFiles.put(fileCreatedate, listOfFiles[i]);
                            }
                        }
                    }
                }
            }

            while (sortedFiles.size() > archiveFileMaxNumber) {
                File removeFile = sortedFiles.remove(sortedFiles.firstKey());
                LOG.debug("deleting archive file:'" + removeFile.getName()
                        + "' so that number of archive files <=" + archiveFileMaxNumber);
                removeFile.delete();
            }
            for (File archivedFile : sortedFiles.values()) {
                LOG.debug("not deleting archive file:'" + archivedFile.getName()
                        + "' so that number of archive files <=" + archiveFileMaxNumber);
            }

            return true;
        } catch (IOException e) {
            LOG.error("Problem removing old persistance files", e);
        }
    } else {
        // if resource doesn't exist just rename the new tmp file to the archive file name
        try {
            currentArchiveFile = new File(resource.getURL().getFile());
            // rename temporary archive file to current archive file name
            if (!tmpCurrentArchiveFile.renameTo(currentArchiveFile)) {
                throw new IOException("cannot rename temporary current archive file:"
                        + tmpCurrentArchiveFile.getAbsolutePath() + " to "
                        + currentArchiveFile.getAbsolutePath());
            }
            return true;
        } catch (IOException e) {
            LOG.error("cannot rename temporary current archive ", e);
        }
    }

    return false;

}

From source file:hydrograph.ui.dataviewer.filter.FilterHelper.java

/**
 * Rearrange group columns./*from   w  w  w  .j  a  va 2 s.  co m*/
 * 
 * @param groupSelectionMap
 *            the group selection map
 */
public void rearrangeGroupColumns(TreeMap<Integer, List<List<Integer>>> groupSelectionMap) {
    Map<Integer, List<List<Integer>>> tempMap = new TreeMap<Integer, List<List<Integer>>>(groupSelectionMap);
    for (int key : tempMap.keySet()) {
        List<List<Integer>> groups = tempMap.get(key);
        List<Integer> tempList = new ArrayList<>();
        for (List<Integer> grp : groups) {
            tempList.addAll(grp);
        }
        if (tempList.isEmpty()) {
            for (int i = key; i < tempMap.size() - 1; i++) {
                groupSelectionMap.put(i, tempMap.get(i + 1));
            }
            groupSelectionMap.remove(groupSelectionMap.lastKey());
        }
    }
}

From source file:com.datatorrent.contrib.hdht.HDHTWriter.java

/**
 * Flush changes from write cache to disk. New data files will be written and meta data replaced atomically. The flush
 * frequency determines availability of changes to external readers.
 *
 * @throws IOException/*from   w  ww  .java2  s.  c  o m*/
 */
private void writeDataFiles(Bucket bucket) throws IOException {
    BucketIOStats ioStats = getOrCretaStats(bucket.bucketKey);
    LOG.debug("Writing data files in bucket {}", bucket.bucketKey);
    // copy meta data on write
    BucketMeta bucketMetaCopy = kryo.copy(getMeta(bucket.bucketKey));

    /** Process purge requests before flushing data from cache to maintain
     * the oder or purge and put operations. This makes sure that purged data
     * removed from file, before new data is added to the files */
    HashSet<String> filesToDelete = Sets.newHashSet();
    bucketMetaCopy = processPurge(bucket, bucketMetaCopy, filesToDelete);

    // bucket keys by file
    TreeMap<Slice, BucketFileMeta> bucketSeqStarts = bucketMetaCopy.files;
    Map<BucketFileMeta, Map<Slice, Slice>> modifiedFiles = Maps.newHashMap();

    for (Map.Entry<Slice, byte[]> entry : bucket.frozenWriteCache.entrySet()) {
        // find file for key
        Map.Entry<Slice, BucketFileMeta> floorEntry = bucketSeqStarts.floorEntry(entry.getKey());
        BucketFileMeta floorFile;
        if (floorEntry != null) {
            floorFile = floorEntry.getValue();
        } else {
            floorEntry = bucketSeqStarts.firstEntry();
            if (floorEntry == null || floorEntry.getValue().name != null) {
                // no existing file or file with higher key
                floorFile = new BucketFileMeta();
            } else {
                // placeholder for new keys, move start key
                floorFile = floorEntry.getValue();
                bucketSeqStarts.remove(floorEntry.getKey());
            }
            floorFile.startKey = entry.getKey();
            if (floorFile.startKey.length != floorFile.startKey.buffer.length) {
                // normalize key for serialization
                floorFile.startKey = new Slice(floorFile.startKey.toByteArray());
            }
            bucketSeqStarts.put(floorFile.startKey, floorFile);
        }

        Map<Slice, Slice> fileUpdates = modifiedFiles.get(floorFile);
        if (fileUpdates == null) {
            modifiedFiles.put(floorFile, fileUpdates = Maps.newHashMap());
        }
        fileUpdates.put(entry.getKey(), new Slice(entry.getValue()));
    }

    // write modified files
    for (Map.Entry<BucketFileMeta, Map<Slice, Slice>> fileEntry : modifiedFiles.entrySet()) {
        BucketFileMeta fileMeta = fileEntry.getKey();
        TreeMap<Slice, Slice> fileData = new TreeMap<Slice, Slice>(getKeyComparator());

        if (fileMeta.name != null) {
            // load existing file
            long start = System.currentTimeMillis();
            FileReader reader = store.getReader(bucket.bucketKey, fileMeta.name);
            reader.readFully(fileData);
            ioStats.dataBytesRead += store.getFileSize(bucket.bucketKey, fileMeta.name);
            ioStats.dataReadTime += System.currentTimeMillis() - start;
            /* these keys are re-written */
            ioStats.dataKeysRewritten += fileData.size();
            ioStats.filesReadInCurrentWriteCycle++;
            ioStats.dataFilesRead++;
            reader.close();
            filesToDelete.add(fileMeta.name);
        }

        // apply updates
        fileData.putAll(fileEntry.getValue());
        // new file
        writeFile(bucket, bucketMetaCopy, fileData);
    }

    LOG.debug("Files written {} files read {}", ioStats.filesWroteInCurrentWriteCycle,
            ioStats.filesReadInCurrentWriteCycle);
    // flush meta data for new files
    try {
        LOG.debug("Writing {} with {} file entries", FNAME_META, bucketMetaCopy.files.size());
        OutputStream os = store.getOutputStream(bucket.bucketKey, FNAME_META + ".new");
        Output output = new Output(os);
        bucketMetaCopy.committedWid = bucket.committedLSN;
        bucketMetaCopy.recoveryStartWalPosition = bucket.recoveryStartWalPosition;
        kryo.writeClassAndObject(output, bucketMetaCopy);
        output.close();
        os.close();
        store.rename(bucket.bucketKey, FNAME_META + ".new", FNAME_META);
    } catch (IOException e) {
        throw new RuntimeException("Failed to write bucket meta data " + bucket.bucketKey, e);
    }

    // clear pending changes
    ioStats.dataKeysWritten += bucket.frozenWriteCache.size();
    // switch to new version
    this.metaCache.put(bucket.bucketKey, bucketMetaCopy);

    // delete old files
    for (String fileName : filesToDelete) {
        store.delete(bucket.bucketKey, fileName);
    }
    invalidateReader(bucket.bucketKey, filesToDelete);
    // clearing cache after invalidating readers
    bucket.frozenWriteCache.clear();

    // cleanup WAL files which are not needed anymore.
    minimumRecoveryWalPosition = bucketMetaCopy.recoveryStartWalPosition;
    for (Long bucketId : this.bucketKeys) {
        BucketMeta meta = getMeta(bucketId);
        if (meta.recoveryStartWalPosition.fileId < minimumRecoveryWalPosition.fileId
                || (meta.recoveryStartWalPosition.fileId == minimumRecoveryWalPosition.fileId
                        && meta.recoveryStartWalPosition.offset < minimumRecoveryWalPosition.offset)) {
            minimumRecoveryWalPosition = meta.recoveryStartWalPosition;
        }
    }
    this.wal.cleanup(minimumRecoveryWalPosition.fileId);
    ioStats.filesReadInCurrentWriteCycle = 0;
    ioStats.filesWroteInCurrentWriteCycle = 0;
}

From source file:org.apache.hadoop.mapred.HFSPScheduler.java

/**
 * Preempt missingSlots number of slots from jobs bigger
 * /*from ww  w  .  j  ava2 s .  c  o  m*/
 * @param jip
 *          Job that clamies slots
 * @param allJobs
 *          all the size based jobs in the cluster
 * @param localJobs
 *          size based jobs that can be immediately suspended
 * @param missingSlots
 *          number of slots to claim
 * @param numToSkip
 *          number of slots on non-local
 * 
 * @return number of tasks preemped in the cluster for jip. The first elem of
 *         the tuple is the number of tasks preempted for new tasks, the
 *         second in the number of tasks preempted for tasks to be resumed
 */
private ClaimedSlots claimSlots(HelperForType helper, final Phase phase, final JobInProgress jip,
        int missingNewSlots, int missingResumableSlots, int numToSkip,
        TreeMap<JobDurationInfo, JobInProgress> allJobs, TreeMap<JobDurationInfo, TaskStatuses> localJobs) {

    assert phase == Phase.SIZE_BASED || missingResumableSlots == 0;

    final TaskType type = helper.taskType;
    JobDurationInfo jdi = this.getDuration(jip.getJobID(), type);

    /* #size based tasks that occupies train slots in the cluster (suspendable) */
    int numTasksToPreempt = 0;
    if (phase == Phase.TRAIN) {
        /** num of size based tasks that can be suspended for training */
        int numOverflowSizeBasedTasks = helper.maxSizeBasedSlots > helper.runningSizeBasedTasks ? 0
                : helper.runningSizeBasedTasks - helper.maxSizeBasedSlots;

        /* num of size base tasks to preempt for the training of jip */
        numTasksToPreempt = Math.min(missingNewSlots, numOverflowSizeBasedTasks);
        if (LOG.isDebugEnabled()) {
            LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):"
                    + " numOverflowSizeBasedTasks: " + numOverflowSizeBasedTasks + " numTasksToPreempt: "
                    + numTasksToPreempt + " missingNewSlots: " + missingNewSlots + " numTrainTasksForJob: "
                    + helper.numTrainTasksForJob + " canAssignTrain: " + helper.canAssignTrain()
                    + " numToSkip: " + numToSkip);
        }
    } else {
        numTasksToPreempt = missingNewSlots;
        if (LOG.isDebugEnabled()) {
            LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " missingNewSlots: "
                    + missingNewSlots + " missingResumableSlots: " + missingResumableSlots
                    + " numTrainTasksForJob: " + helper.numTrainTasksForJob + " canAssignTrain: "
                    + helper.canAssignTrain() + " numToSkip: " + numToSkip);
        }
    }
    final int startingNumTasksToPreemptForNew = numTasksToPreempt;
    final int startingResumableSlots = missingResumableSlots;

    // try to free pendingTasks number of slots among running on this TT
    Iterator<Entry<JobDurationInfo, JobInProgress>> sizeBasedJobsDescIter = allJobs.descendingMap().entrySet()
            .iterator();
    Iterator<Entry<JobDurationInfo, TaskStatuses>> sizeBasedJobsDescIterOnTT = localJobs.entrySet().iterator();

    Entry<JobDurationInfo, TaskStatuses> biggerOnTT = sizeBasedJobsDescIterOnTT.hasNext()
            ? sizeBasedJobsDescIterOnTT.next()
            : null;
    while (this.preemptionStrategy.isPreemptionActive()
            && (numTasksToPreempt > 0 || missingResumableSlots > 0)) {
        if (!sizeBasedJobsDescIter.hasNext()) {
            if (LOG.isDebugEnabled()) {
                LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " should preempt "
                        + numTasksToPreempt + " for new tasks and " + missingResumableSlots + " for resumable "
                        + "tasks but no sizeBasedJob is running");
            }
            break;
        }

        Entry<JobDurationInfo, JobInProgress> nextSBJ = sizeBasedJobsDescIter.next();

        JobInProgress jipToPreempt = nextSBJ.getValue();

        /* don't try to suspend if jip is bigger than any other jip */
        if (jdi != null) {

            if (jipToPreempt.getJobID().equals(jip.getJobID())) {
                return new ClaimedSlots(startingNumTasksToPreemptForNew - numTasksToPreempt,
                        startingResumableSlots - missingResumableSlots);
            }

            if (JOB_DURATION_COMPARATOR.compare(nextSBJ.getKey(), jdi) <= 0) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " should preempt "
                            + numTasksToPreempt + ", but bigger job avail is " + jip.getJobID() + ".len: "
                            + jdi.getPhaseDuration() + " > " + nextSBJ.getValue().getJobID() + ".len: "
                            + nextSBJ.getKey().getPhaseDuration());
                }
                return new ClaimedSlots(startingNumTasksToPreemptForNew - numTasksToPreempt,
                        startingResumableSlots - missingResumableSlots);
            }
        }

        if (jipToPreempt.getJobID().equals(jip.getJobID())) {
            continue;
        }

        /*
         * don't try to claim slots from a job in training
         * 
         * FIXME: ideally a job can claim slots from a training job until this job
         * has enough tasks for training
         */
        if (!this.isTrained(jipToPreempt, type)) {
            LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "):" + " ignoring "
                    + jipToPreempt.getJobID() + " because in training");
            continue;
        }

        int numSuspendedOnThisTT = 0;

        /* if jipToPreempt has tasks on this TT, then suspend them */
        if (biggerOnTT != null // && type == TaskType.REDUCE
                && biggerOnTT.getKey().getJobID().equals(nextSBJ.getKey().getJobID())) {

            TreeMap<TaskAttemptID, TaskStatus> preemptableTAIDS = biggerOnTT.getValue().taskStatuses;
            int numPreemptions = Math.min(preemptableTAIDS.size(), missingResumableSlots + numTasksToPreempt);
            for (int i = 0; i < numPreemptions; i++) {
                TaskAttemptID pTAID = preemptableTAIDS.firstKey();
                TaskStatus pTS = preemptableTAIDS.remove(pTAID);
                JobInProgress pJIP = this.taskTrackerManager.getJob(pTAID.getJobID());
                TaskInProgress pTIP = pJIP.getTaskInProgress(pTAID.getTaskID());

                if (type == TaskType.REDUCE) {
                    // if (this.eagerPreemption == PreemptionType.KILL
                    // && pTIP.killTask(pTAID, false)) {
                    // if (missingResumableSlots > 0)
                    // missingResumableSlots -= 1;
                    // else
                    // numTasksToPreempt -= 1;
                    // numSuspendedOnThisTT += 1;
                    // if (jdi == null) {
                    // taskHelper.kill(pTAID, jip.getJobID(), phase);
                    // } else {
                    // taskHelper.kill(pTAID, jip.getJobID(), phase, nextSBJ.getKey(),
                    // jdi);
                    // }
                    // } else if (this.preemptionStrategy.isPreemptionActive()
                    // && this.canBeSuspended(pTS) && pTIP.suspendTaskAttempt(pTAID)) {
                    if (this.preemptionStrategy.isPreemptionActive()
                            && this.preemptionStrategy.canBePreempted(pTS)
                            && this.preemptionStrategy.preempt(pTIP, pTS)) {
                        if (missingResumableSlots > 0)
                            missingResumableSlots -= 1;
                        else
                            numTasksToPreempt -= 1;
                        numSuspendedOnThisTT += 1;
                        if (jdi == null) {
                            taskHelper.suspend(pTAID, jip.getJobID(), phase);
                        } else {
                            taskHelper.suspend(pTAID, jip.getJobID(), phase, nextSBJ.getKey(), jdi);
                        }
                    } else {
                        LOG.debug(phase.toString() + "(" + jip.getJobID() + ":" + type + "): cannot suspend "
                                + pTAID + " for " + jip);
                    }
                }
            }

            if (preemptableTAIDS.size() - numPreemptions <= 0) {
                biggerOnTT = sizeBasedJobsDescIterOnTT.hasNext() ? sizeBasedJobsDescIterOnTT.next() : null;
            }
        }

        /* #tasks that can be preempted */
        int numPreemptibleRunTasks = this.getNumRunningTasks(jipToPreempt, type) - numSuspendedOnThisTT;

        /*
         * Two cases: numToSkip is bigger then preemptible tasks or it is not: -
         * is bigger: then we skip this preemptible jip - it is not: then
         * numToSkip is set to 0 and we do the real wait preemption
         */
        if (numPreemptibleRunTasks <= numToSkip) {
            numToSkip -= numPreemptibleRunTasks;
        } else {
            /* #tasks that can be preempted by jip */
            int numPreemptibleByJIPRunTasks = numPreemptibleRunTasks - numToSkip;

            numToSkip = 0;

            /* #tasks that will be preempted by jip on other TTs s */
            int numRunTasksEventuallyPreemptedByJIP = Math.min(numTasksToPreempt, numPreemptibleByJIPRunTasks);

            numTasksToPreempt -= numRunTasksEventuallyPreemptedByJIP;
        }
    }

    return new ClaimedSlots(startingNumTasksToPreemptForNew - numTasksToPreempt,
            startingResumableSlots - missingNewSlots);
}

From source file:ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition.java

@SuppressWarnings("unchecked")
private void scanCompositeElementForChildren(Set<String> elementNames,
        TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> theOrderToElementDef,
        TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> theOrderToExtensionDef) {
    int baseElementOrder = 0;

    for (ScannedField next : myScannedFields) {
        if (next.isFirstFieldInNewClass()) {
            baseElementOrder = theOrderToElementDef.isEmpty() ? 0
                    : theOrderToElementDef.lastEntry().getKey() + 1;
        }// www.j a v  a2 s . c o  m

        Class<?> declaringClass = next.getField().getDeclaringClass();

        Description descriptionAnnotation = ModelScanner.pullAnnotation(next.getField(), Description.class);

        TreeMap<Integer, BaseRuntimeDeclaredChildDefinition> orderMap = theOrderToElementDef;
        Extension extensionAttr = ModelScanner.pullAnnotation(next.getField(), Extension.class);
        if (extensionAttr != null) {
            orderMap = theOrderToExtensionDef;
        }

        Child childAnnotation = next.getChildAnnotation();
        Field nextField = next.getField();
        String elementName = childAnnotation.name();
        int order = childAnnotation.order();
        boolean childIsChoiceType = false;
        boolean orderIsReplaceParent = false;

        if (order == Child.REPLACE_PARENT) {

            if (extensionAttr != null) {

                for (Entry<Integer, BaseRuntimeDeclaredChildDefinition> nextEntry : orderMap.entrySet()) {
                    BaseRuntimeDeclaredChildDefinition nextDef = nextEntry.getValue();
                    if (nextDef instanceof RuntimeChildDeclaredExtensionDefinition) {
                        if (nextDef.getExtensionUrl().equals(extensionAttr.url())) {
                            orderIsReplaceParent = true;
                            order = nextEntry.getKey();
                            orderMap.remove(nextEntry.getKey());
                            elementNames.remove(elementName);
                            break;
                        }
                    }
                }
                if (order == Child.REPLACE_PARENT) {
                    throw new ConfigurationException("Field " + nextField.getName() + "' on target type "
                            + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT ("
                            + Child.REPLACE_PARENT + ") but no parent element with extension URL "
                            + extensionAttr.url() + " could be found on type "
                            + nextField.getDeclaringClass().getSimpleName());
                }

            } else {

                for (Entry<Integer, BaseRuntimeDeclaredChildDefinition> nextEntry : orderMap.entrySet()) {
                    BaseRuntimeDeclaredChildDefinition nextDef = nextEntry.getValue();
                    if (elementName.equals(nextDef.getElementName())) {
                        orderIsReplaceParent = true;
                        order = nextEntry.getKey();
                        BaseRuntimeDeclaredChildDefinition existing = orderMap.remove(nextEntry.getKey());
                        elementNames.remove(elementName);

                        /*
                         * See #350 - If the original field (in the superclass) with the given name is a choice, then we need to make sure
                         * that the field which replaces is a choice even if it's only a choice of one type - this is because the
                         * element name when serialized still needs to reflect the datatype
                         */
                        if (existing instanceof RuntimeChildChoiceDefinition) {
                            childIsChoiceType = true;
                        }
                        break;
                    }
                }
                if (order == Child.REPLACE_PARENT) {
                    throw new ConfigurationException("Field " + nextField.getName() + "' on target type "
                            + declaringClass.getSimpleName() + " has order() of REPLACE_PARENT ("
                            + Child.REPLACE_PARENT + ") but no parent element with name " + elementName
                            + " could be found on type " + nextField.getDeclaringClass().getSimpleName());
                }

            }

        }

        if (order < 0 && order != Child.ORDER_UNKNOWN) {
            throw new ConfigurationException("Invalid order '" + order + "' on @Child for field '"
                    + nextField.getName() + "' on target type: " + declaringClass);
        }

        if (order != Child.ORDER_UNKNOWN && !orderIsReplaceParent) {
            order = order + baseElementOrder;
        }
        // int min = childAnnotation.min();
        // int max = childAnnotation.max();

        /*
         * Anything that's marked as unknown is given a new ID that is <0 so that it doesn't conflict with any given IDs and can be figured out later
         */
        if (order == Child.ORDER_UNKNOWN) {
            order = Integer.valueOf(0);
            while (orderMap.containsKey(order)) {
                order++;
            }
        }

        List<Class<? extends IBase>> choiceTypes = next.getChoiceTypes();

        if (orderMap.containsKey(order)) {
            throw new ConfigurationException("Detected duplicate field order '" + childAnnotation.order()
                    + "' for element named '" + elementName + "' in type '" + declaringClass.getCanonicalName()
                    + "' - Already had: " + orderMap.get(order).getElementName());
        }

        if (elementNames.contains(elementName)) {
            throw new ConfigurationException("Detected duplicate field name '" + elementName + "' in type '"
                    + declaringClass.getCanonicalName() + "'");
        }

        Class<?> nextElementType = next.getElementType();

        BaseRuntimeDeclaredChildDefinition def;
        if (childAnnotation.name().equals("extension")
                && IBaseExtension.class.isAssignableFrom(nextElementType)) {
            def = new RuntimeChildExtension(nextField, childAnnotation.name(), childAnnotation,
                    descriptionAnnotation);
        } else if (childAnnotation.name().equals("modifierExtension")
                && IBaseExtension.class.isAssignableFrom(nextElementType)) {
            def = new RuntimeChildExtension(nextField, childAnnotation.name(), childAnnotation,
                    descriptionAnnotation);
        } else if (BaseContainedDt.class.isAssignableFrom(nextElementType)
                || (childAnnotation.name().equals("contained")
                        && IBaseResource.class.isAssignableFrom(nextElementType))) {
            /*
             * Child is contained resources
             */
            def = new RuntimeChildContainedResources(nextField, childAnnotation, descriptionAnnotation,
                    elementName);
        } else if (IAnyResource.class.isAssignableFrom(nextElementType)
                || IResource.class.equals(nextElementType)) {
            /*
             * Child is a resource as a direct child, as in Bundle.entry.resource
             */
            def = new RuntimeChildDirectResource(nextField, childAnnotation, descriptionAnnotation,
                    elementName);
        } else {
            childIsChoiceType |= choiceTypes.size() > 1;
            if (childIsChoiceType && !BaseResourceReferenceDt.class.isAssignableFrom(nextElementType)
                    && !IBaseReference.class.isAssignableFrom(nextElementType)) {
                def = new RuntimeChildChoiceDefinition(nextField, elementName, childAnnotation,
                        descriptionAnnotation, choiceTypes);
            } else if (extensionAttr != null) {
                /*
                 * Child is an extension
                 */
                Class<? extends IBase> et = (Class<? extends IBase>) nextElementType;

                Object binder = null;
                if (BoundCodeDt.class.isAssignableFrom(nextElementType)
                        || IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) {
                    binder = ModelScanner.getBoundCodeBinder(nextField);
                }

                def = new RuntimeChildDeclaredExtensionDefinition(nextField, childAnnotation,
                        descriptionAnnotation, extensionAttr, elementName, extensionAttr.url(), et, binder);

                if (IBaseEnumeration.class.isAssignableFrom(nextElementType)) {
                    ((RuntimeChildDeclaredExtensionDefinition) def).setEnumerationType(
                            ReflectionUtil.getGenericCollectionTypeOfFieldWithSecondOrderForList(nextField));
                }
            } else if (BaseResourceReferenceDt.class.isAssignableFrom(nextElementType)
                    || IBaseReference.class.isAssignableFrom(nextElementType)) {
                /*
                 * Child is a resource reference
                 */
                List<Class<? extends IBaseResource>> refTypesList = new ArrayList<Class<? extends IBaseResource>>();
                for (Class<? extends IElement> nextType : childAnnotation.type()) {
                    if (IBaseReference.class.isAssignableFrom(nextType)) {
                        refTypesList.add(myContext.getVersion().getVersion().isRi() ? IAnyResource.class
                                : IResource.class);
                        continue;
                    } else if (IBaseResource.class.isAssignableFrom(nextType) == false) {
                        throw new ConfigurationException("Field '" + nextField.getName() + "' in class '"
                                + nextField.getDeclaringClass().getCanonicalName() + "' is of type "
                                + BaseResourceReferenceDt.class + " but contains a non-resource type: "
                                + nextType.getCanonicalName());
                    }
                    refTypesList.add((Class<? extends IBaseResource>) nextType);
                }
                def = new RuntimeChildResourceDefinition(nextField, elementName, childAnnotation,
                        descriptionAnnotation, refTypesList);

            } else if (IResourceBlock.class.isAssignableFrom(nextElementType)
                    || IBaseBackboneElement.class.isAssignableFrom(nextElementType)
                    || IBaseDatatypeElement.class.isAssignableFrom(nextElementType)) {
                /*
                 * Child is a resource block (i.e. a sub-tag within a resource) TODO: do these have a better name according to HL7?
                 */

                Class<? extends IBase> blockDef = (Class<? extends IBase>) nextElementType;
                def = new RuntimeChildResourceBlockDefinition(myContext, nextField, childAnnotation,
                        descriptionAnnotation, elementName, blockDef);
            } else if (IDatatype.class.equals(nextElementType) || IElement.class.equals(nextElementType)
                    || "Type".equals(nextElementType.getSimpleName())
                    || IBaseDatatype.class.equals(nextElementType)) {

                def = new RuntimeChildAny(nextField, elementName, childAnnotation, descriptionAnnotation);
            } else if (IDatatype.class.isAssignableFrom(nextElementType)
                    || IPrimitiveType.class.isAssignableFrom(nextElementType)
                    || ICompositeType.class.isAssignableFrom(nextElementType)
                    || IBaseDatatype.class.isAssignableFrom(nextElementType)
                    || IBaseExtension.class.isAssignableFrom(nextElementType)) {
                Class<? extends IBase> nextDatatype = (Class<? extends IBase>) nextElementType;

                if (IPrimitiveType.class.isAssignableFrom(nextElementType)) {
                    if (nextElementType.equals(BoundCodeDt.class)) {
                        IValueSetEnumBinder<Enum<?>> binder = ModelScanner.getBoundCodeBinder(nextField);
                        Class<? extends Enum<?>> enumType = ModelScanner
                                .determineEnumTypeForBoundField(nextField);
                        def = new RuntimeChildPrimitiveBoundCodeDatatypeDefinition(nextField, elementName,
                                childAnnotation, descriptionAnnotation, nextDatatype, binder, enumType);
                    } else if (IBaseEnumeration.class.isAssignableFrom(nextElementType)) {
                        Class<? extends Enum<?>> binderType = ModelScanner
                                .determineEnumTypeForBoundField(nextField);
                        def = new RuntimeChildPrimitiveEnumerationDatatypeDefinition(nextField, elementName,
                                childAnnotation, descriptionAnnotation, nextDatatype, binderType);
                    } else {
                        def = new RuntimeChildPrimitiveDatatypeDefinition(nextField, elementName,
                                descriptionAnnotation, childAnnotation, nextDatatype);
                    }
                } else {
                    if (IBoundCodeableConcept.class.isAssignableFrom(nextElementType)) {
                        IValueSetEnumBinder<Enum<?>> binder = ModelScanner.getBoundCodeBinder(nextField);
                        Class<? extends Enum<?>> enumType = ModelScanner
                                .determineEnumTypeForBoundField(nextField);
                        def = new RuntimeChildCompositeBoundDatatypeDefinition(nextField, elementName,
                                childAnnotation, descriptionAnnotation, nextDatatype, binder, enumType);
                    } else if (BaseNarrativeDt.class.isAssignableFrom(nextElementType)
                            || INarrative.class.isAssignableFrom(nextElementType)) {
                        def = new RuntimeChildNarrativeDefinition(nextField, elementName, childAnnotation,
                                descriptionAnnotation, nextDatatype);
                    } else {
                        def = new RuntimeChildCompositeDatatypeDefinition(nextField, elementName,
                                childAnnotation, descriptionAnnotation, nextDatatype);
                    }
                }

            } else {
                throw new ConfigurationException(
                        "Field '" + elementName + "' in type '" + declaringClass.getCanonicalName()
                                + "' is not a valid child type: " + nextElementType);
            }

            Binding bindingAnnotation = ModelScanner.pullAnnotation(nextField, Binding.class);
            if (bindingAnnotation != null) {
                if (isNotBlank(bindingAnnotation.valueSet())) {
                    def.setBindingValueSet(bindingAnnotation.valueSet());
                }
            }

        }

        orderMap.put(order, def);
        elementNames.add(elementName);
    }
}

From source file:com.netxforge.oss2.config.AmiPeerFactory.java

/**
 * Combine specific and range elements so that AMIPeerFactory has to spend
 * less time iterating all these elements.
 * TODO This really should be pulled up into PeerFactory somehow, but I'm not sure how (given that "Definition" is different for both
 * SNMP and AMI.  Maybe some sort of visitor methodology would work.  The basic logic should be fine as it's all IP address manipulation
 *
 * @throws UnknownHostException//from   w w  w. j  av a  2  s.co  m
 */
void optimize() throws UnknownHostException {
    getWriteLock().lock();

    try {
        // First pass: Remove empty definition elements
        for (final Iterator<Definition> definitionsIterator = m_config.getDefinitionCollection()
                .iterator(); definitionsIterator.hasNext();) {
            final Definition definition = definitionsIterator.next();
            if (definition.getSpecificCount() == 0 && definition.getRangeCount() == 0) {
                LogUtils.debugf(this, "optimize: Removing empty definition element");
                definitionsIterator.remove();
            }
        }

        // Second pass: Replace single IP range elements with specific elements
        for (Definition definition : m_config.getDefinitionCollection()) {
            for (Iterator<Range> rangesIterator = definition.getRangeCollection().iterator(); rangesIterator
                    .hasNext();) {
                Range range = rangesIterator.next();
                if (range.getBegin().equals(range.getEnd())) {
                    definition.addSpecific(range.getBegin());
                    rangesIterator.remove();
                }
            }
        }

        // Third pass: Sort specific and range elements for improved XML
        // readability and then combine them into fewer elements where possible
        for (final Definition definition : m_config.getDefinitionCollection()) {
            // Sort specifics
            final TreeMap<InetAddress, String> specificsMap = new TreeMap<InetAddress, String>(
                    new InetAddressComparator());
            for (final String specific : definition.getSpecificCollection()) {
                specificsMap.put(InetAddressUtils.getInetAddress(specific), specific.trim());
            }

            // Sort ranges
            final TreeMap<InetAddress, Range> rangesMap = new TreeMap<InetAddress, Range>(
                    new InetAddressComparator());
            for (final Range range : definition.getRangeCollection()) {
                rangesMap.put(InetAddressUtils.getInetAddress(range.getBegin()), range);
            }

            // Combine consecutive specifics into ranges
            InetAddress priorSpecific = null;
            Range addedRange = null;
            for (final InetAddress specific : specificsMap.keySet()) {
                if (priorSpecific == null) {
                    priorSpecific = specific;
                    continue;
                }

                if (BigInteger.ONE.equals(InetAddressUtils.difference(specific, priorSpecific))
                        && InetAddressUtils.inSameScope(specific, priorSpecific)) {
                    if (addedRange == null) {
                        addedRange = new Range();
                        addedRange.setBegin(InetAddressUtils.toIpAddrString(priorSpecific));
                        rangesMap.put(priorSpecific, addedRange);
                        specificsMap.remove(priorSpecific);
                    }

                    addedRange.setEnd(InetAddressUtils.toIpAddrString(specific));
                    specificsMap.remove(specific);
                } else {
                    addedRange = null;
                }

                priorSpecific = specific;
            }

            // Move specifics to ranges
            for (final InetAddress specific : new ArrayList<InetAddress>(specificsMap.keySet())) {
                for (final InetAddress begin : new ArrayList<InetAddress>(rangesMap.keySet())) {

                    if (!InetAddressUtils.inSameScope(begin, specific)) {
                        continue;
                    }

                    if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                            .compareTo(InetAddressUtils.toInteger(specific)) > 0) {
                        continue;
                    }

                    final Range range = rangesMap.get(begin);

                    final InetAddress end = InetAddressUtils.getInetAddress(range.getEnd());

                    if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                            .compareTo(InetAddressUtils.toInteger(specific)) < 0) {
                        continue;
                    }

                    if (InetAddressUtils.toInteger(specific).compareTo(InetAddressUtils.toInteger(begin)) >= 0
                            && InetAddressUtils.toInteger(specific)
                                    .compareTo(InetAddressUtils.toInteger(end)) <= 0) {
                        specificsMap.remove(specific);
                        break;
                    }

                    if (InetAddressUtils.toInteger(begin).subtract(BigInteger.ONE)
                            .equals(InetAddressUtils.toInteger(specific))) {
                        rangesMap.remove(begin);
                        rangesMap.put(specific, range);
                        range.setBegin(InetAddressUtils.toIpAddrString(specific));
                        specificsMap.remove(specific);
                        break;
                    }

                    if (InetAddressUtils.toInteger(end).add(BigInteger.ONE)
                            .equals(InetAddressUtils.toInteger(specific))) {
                        range.setEnd(InetAddressUtils.toIpAddrString(specific));
                        specificsMap.remove(specific);
                        break;
                    }
                }
            }

            // Combine consecutive ranges
            Range priorRange = null;
            InetAddress priorBegin = null;
            InetAddress priorEnd = null;
            for (final Iterator<InetAddress> rangesIterator = rangesMap.keySet().iterator(); rangesIterator
                    .hasNext();) {
                final InetAddress beginAddress = rangesIterator.next();
                final Range range = rangesMap.get(beginAddress);
                final InetAddress endAddress = InetAddressUtils.getInetAddress(range.getEnd());

                if (priorRange != null) {
                    if (InetAddressUtils.inSameScope(beginAddress, priorEnd) && InetAddressUtils
                            .difference(beginAddress, priorEnd).compareTo(BigInteger.ONE) <= 0) {
                        priorBegin = new InetAddressComparator().compare(priorBegin, beginAddress) < 0
                                ? priorBegin
                                : beginAddress;
                        priorRange.setBegin(InetAddressUtils.toIpAddrString(priorBegin));
                        priorEnd = new InetAddressComparator().compare(priorEnd, endAddress) > 0 ? priorEnd
                                : endAddress;
                        priorRange.setEnd(InetAddressUtils.toIpAddrString(priorEnd));

                        rangesIterator.remove();
                        continue;
                    }
                }

                priorRange = range;
                priorBegin = beginAddress;
                priorEnd = endAddress;
            }

            // Update changes made to sorted maps
            definition.setSpecific(specificsMap.values().toArray(new String[0]));
            definition.setRange(rangesMap.values().toArray(new Range[0]));
        }
    } finally {
        getWriteLock().unlock();
    }
}

From source file:com.example.camera2raw.Camera2RawFragment.java

/**
 * Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining
 * that {@link ImageReader} until that {@link Image} is no longer in use, and set this
 * {@link Image} as the result for the next request in the queue of pending requests.  If
 * all necessary information is available, begin saving the image to a file in a background
 * thread./* ww  w  .j a v a  2  s. co m*/
 *
 * @param pendingQueue the currently active requests.
 * @param reader       a reference counted wrapper containing an {@link ImageReader} from which
 *                     to acquire an image.
 */
private void dequeueAndSaveImage(TreeMap<Integer, ImageSaverBuilder> pendingQueue,
        RefCountedAutoCloseable<ImageReader> reader) {
    synchronized (mCameraStateLock) {
        Map.Entry<Integer, ImageSaverBuilder> entry = pendingQueue.firstEntry();
        ImageSaverBuilder builder = entry.getValue();

        // Increment reference count to prevent ImageReader from being closed while we
        // are saving its Images in a background thread (otherwise their resources may
        // be freed while we are writing to a file).
        if (reader == null || reader.getAndRetain() == null) {
            Log.e(TAG, "Paused the activity before we could save the image," + " ImageReader already closed.");
            pendingQueue.remove(entry.getKey());
            return;
        }

        Image image;
        try {
            image = reader.get().acquireNextImage();
        } catch (IllegalStateException e) {
            Log.e(TAG, "Too many images queued for saving, dropping image for request: " + entry.getKey());
            pendingQueue.remove(entry.getKey());
            return;
        }

        builder.setRefCountedReader(reader).setImage(image);

        handleCompletionLocked(entry.getKey(), builder, pendingQueue);

        //back Activity
        getActivity().finish();
    }
}

From source file:com.termmed.reconciliation.RelationshipReconciliation.java

/**
 * Compare prev inact./*from  w w  w  .j a v a 2 s . com*/
 *
 * @param step the step
 * @return the string
 * @throws IOException Signals that an I/O exception has occurred.
 */
private String comparePrevInact(int step) throws IOException {
    // STATISTICS COUNTERS
    int countSameISA = 0;
    int countA_Diff = 0;
    int countB_Diff = 0;
    int countB_DiffISA = 0;
    int countB_Total = 0;
    int sumPrevInact = 0;
    int sumNewNoRec = 0;
    long startTime = System.currentTimeMillis();

    StringBuilder s = new StringBuilder();
    s.append(
            "\r\n::: [Reconciliation by previous inactives vs current actives, without grouping comparation - step:"
                    + step + "]");

    boolean reconciled = false;
    for (Long conceptId : newNoRec.keySet()) {
        TreeMap<String, Relationship> relsPrev = prevInact.get(conceptId);
        ArrayList<Relationship> relsCurr = newNoRec.get(conceptId);
        sumNewNoRec += relsCurr.size();
        if (relsPrev != null) {
            sumPrevInact += relsPrev.size();
            for (Relationship relC : relsCurr) {
                reconciled = false;
                for (String key : relsPrev.descendingKeySet()) {
                    Relationship relP = relsPrev.get(key);
                    if (compareRelsStep(relC, relP, step)) {
                        writeReconciled(bw, relC, relP);

                        countB_Total++;
                        if (relC.typeId == isa) {
                            countSameISA++;
                        }
                        reconciled = true;
                        relsPrev.remove(key);
                        break;
                    }
                }
                if (!reconciled) {
                    countB_Diff++;
                    if (relC.typeId == isa) {
                        countB_DiffISA++;
                    }
                    writeNewNoRec(relC);
                }
            }
            prevInact.put(conceptId, relsPrev);
        } else {
            for (Relationship relC : relsCurr) {
                countB_Diff++;
                if (relC.typeId == isa) {
                    countB_DiffISA++;
                }
                writeNewNoRec(relC);

            }
        }

    }

    s.append("\r\n::: Current active relationships to reconcile = \t" + sumNewNoRec);
    s.append("\r\n::: Candidate previous inactive relationships to match = \t" + sumPrevInact);

    s.append("\r\n::: Partial process statistics:");
    s.append("\r\n::: Reconciled relationships:  \t").append(countB_Total);
    s.append("\r\n::: Reconciled Isa's relationships:  \t").append(countSameISA);
    s.append("\r\n::: Previous relationships without match :   \t").append(countA_Diff);
    s.append("\r\n::: Current relationships without match:   \t").append(countB_Diff);
    s.append("\r\n::: Current Isa's relationships without match:\t").append(countB_DiffISA);
    s.append("\r\n::: ");

    long lapseTime = System.currentTimeMillis() - startTime;
    s.append("\r\n::: [Partial time] Sort/Compare Input & Output: \t").append(lapseTime);
    s.append("\t(mS)\t");
    s.append("\r\n");

    sumB_Total += countB_Total;
    sumSameISA += countSameISA;
    sumA_Diff = countA_Diff;
    sumB_Diff = countB_Diff;
    sumB_DiffISA = countB_DiffISA;

    return s.toString();
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.Comparison.java

/**
 * Aim is to produce an aggregated data set for comparison totalling males
 * and females by MSOA to compare with CASUV003DataRecord
 *///w  ww.  j  a v  a  2s  .  co m
private void run3() throws IOException {
    boolean aggregateToMSOA = true;
    // boolean aggregateToMSOA = false;
    ToyModelDataHandler tToyModelDataHandler = new ToyModelDataHandler();
    String startOfFilename = "C:/Work/Projects/MoSeS/Workspace/Leeds/ToyModel_SWR_OA_HSARHP_ISARCEP_0_5_5000_3_30_12_20";
    // String startOfFilename = new String(
    // "C:/Work/Projects/MoSeS/Workspace/Leeds/ToyModel_SWR_OA_HSARHP_ISARCEP_0_5_1000_3_30_12_20"
    // );
    // String startOfFilename = new String(
    // "C:/Work/Projects/MoSeS/Workspace/Leeds/ToyModel_SWR_OA_ISARHP_ISARCEP_0_5_200_3_30_12_20"
    // );
    File tToyModelDataRecord2CSVFile = new File(startOfFilename + ".csv");
    File tToyModelDataRecordMaleFemaleComparisonFile;
    if (aggregateToMSOA) {
        tToyModelDataRecordMaleFemaleComparisonFile = new File(
                startOfFilename + "_MSOAMaleFemaleComparison.csv");
    } else {
        tToyModelDataRecordMaleFemaleComparisonFile = new File(startOfFilename + "_OAMaleFemaleComparison.csv");
    }
    if (!tToyModelDataRecordMaleFemaleComparisonFile.exists()) {
        tToyModelDataRecordMaleFemaleComparisonFile.createNewFile();
    }
    PrintWriter tToyModelDataRecordMaleFemaleComparisonFilePrintWriter = new PrintWriter(
            tToyModelDataRecordMaleFemaleComparisonFile);
    // CASUV003DataHandler tCASUV003DataHandler = new CASUV003DataHandler(
    // new File(
    // "C:/Work/Projects/MoSeS/Workspace/Leeds/CASUV003DataRecordsMSOA.dat"
    // ) );
    CASUV003DataHandler tCASUV003DataHandler;
    CAS001DataHandler tCAS001DataHandler;
    if (aggregateToMSOA) {
        tCASUV003DataHandler = new CASUV003DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/Leeds/CASUV003DataRecordsMSOA.dat"));
        tCAS001DataHandler = new CAS001DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/Leeds/CAS001DataRecordsMSOA.dat"));
    } else {
        tCASUV003DataHandler = new CASUV003DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/CASUV003DataRecords.dat"));
        tCAS001DataHandler = new CAS001DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/CAS001DataRecords.dat"));
    }
    CASUV003DataRecord aCASUV003DataRecord;
    CAS001DataRecord aCAS001DataRecord;
    BufferedReader tBufferedReader = new BufferedReader(
            new InputStreamReader(new FileInputStream(tToyModelDataRecord2CSVFile)));
    StreamTokenizer tStreamTokenizer = new StreamTokenizer(tBufferedReader);
    Generic_StaticIO.setStreamTokenizerSyntax1(tStreamTokenizer);
    // Initialise
    int tMaleCount;
    int tFemaleCount;
    int tMaleCEPCount;
    int tMaleHPCount;
    int tFemaleCEPCount;
    int tFemaleHPCount;
    int tokenType = tStreamTokenizer.nextToken();
    ToyModelDataRecord_2 aToyModelDataRecord2;
    String aZoneCode;
    HashMap tLookUpMSOAfromOAHashMap = null;
    CASDataHandler tCASDataHandler = new CASDataHandler();
    if (aggregateToMSOA) {
        tLookUpMSOAfromOAHashMap = tCASDataHandler.get_LookUpMSOAfromOAHashMap();
    }
    Counts aCounts;
    tToyModelDataRecordMaleFemaleComparisonFilePrintWriter.println(
            "ZoneCode,CAS001HPFemales,CAS001CEPFemales,CAS001Females,CASUV003Females,ToyModelFemales,ToyModelHPFemales,ToyModelCEPFemales,CAS001HPMales,CAS001CEPMales,CAS001Males,CASUV003Males,ToyModelMales,ToyModelHPMales,ToyModelCEPMales");
    TreeMap result = new TreeMap();
    while (tokenType != StreamTokenizer.TT_EOF) {
        switch (tokenType) {
        case StreamTokenizer.TT_WORD:
            aToyModelDataRecord2 = new ToyModelDataRecord_2(tToyModelDataHandler, tStreamTokenizer.sval);
            if (aggregateToMSOA) {
                aZoneCode = (String) tLookUpMSOAfromOAHashMap
                        .get(new String(aToyModelDataRecord2.getZone_Code()));
            } else {
                aZoneCode = String.valueOf(aToyModelDataRecord2.getZone_Code());
            }
            if (aToyModelDataRecord2.SEX == 0) {
                tFemaleCount = 1;
                if (aToyModelDataRecord2.tHouseholdID != -9) {
                    tFemaleHPCount = 1;
                    tFemaleCEPCount = 0;
                } else {
                    tFemaleHPCount = 0;
                    tFemaleCEPCount = 1;
                }
                tMaleCount = 0;
                tMaleHPCount = 0;
                tMaleCEPCount = 0;
            } else {
                tMaleCount = 1;
                if (aToyModelDataRecord2.tHouseholdID != -9) {
                    tMaleHPCount = 1;
                    tMaleCEPCount = 0;
                } else {
                    tMaleHPCount = 0;
                    tMaleCEPCount = 1;
                }
                tFemaleCount = 0;
                tFemaleHPCount = 0;
                tFemaleCEPCount = 0;
            }
            if (result.containsKey(aZoneCode)) {
                aCounts = (Counts) result.get(aZoneCode);
                result.remove(aZoneCode);
                aCounts.addToCounts(tMaleCount, tMaleCEPCount, tMaleHPCount, tFemaleCount, tFemaleCEPCount,
                        tFemaleHPCount);
                result.put(aZoneCode, aCounts);
            } else {
                aCounts = new Counts();
                aCounts.addToCounts(tMaleCount, tMaleCEPCount, tMaleHPCount, tFemaleCount, tFemaleCEPCount,
                        tFemaleHPCount);
                result.put(aZoneCode, aCounts);
            }
        }
        tokenType = tStreamTokenizer.nextToken();
    }
    Iterator aIterator = result.keySet().iterator();
    Object key;
    while (aIterator.hasNext()) {
        key = aIterator.next();
        aCounts = (Counts) result.get(key);
        aZoneCode = (String) key;
        aCASUV003DataRecord = (CASUV003DataRecord) tCASUV003DataHandler.getDataRecord(aZoneCode);
        aCAS001DataRecord = (CAS001DataRecord) tCAS001DataHandler.getDataRecord(aZoneCode);
        tToyModelDataRecordMaleFemaleComparisonFilePrintWriter.println("" + aZoneCode + ", "
                + aCAS001DataRecord.getHouseholdResidentsFemales() + ", "
                + aCAS001DataRecord.getCommunalEstablishmentResidentsFemales() + ", "
                + (aCAS001DataRecord.getHouseholdResidentsFemales()
                        + aCAS001DataRecord.getCommunalEstablishmentResidentsFemales())
                + ", " + aCASUV003DataRecord.getFemales() + ", " + aCounts.tFemaleCount + ", "
                + aCounts.tFemaleHPCount + ", " + aCounts.tFemaleCEPCount + ", "
                + aCAS001DataRecord.getHouseholdResidentsMales() + ", "
                + aCAS001DataRecord.getCommunalEstablishmentResidentsMales() + ", "
                + (aCAS001DataRecord.getHouseholdResidentsMales()
                        + aCAS001DataRecord.getCommunalEstablishmentResidentsMales())
                + ", " + aCASUV003DataRecord.getMales() + ", " + aCounts.tMaleCount + ", "
                + aCounts.tMaleHPCount + ", " + aCounts.tMaleCEPCount);
    }
    tBufferedReader.close();
    tToyModelDataRecordMaleFemaleComparisonFilePrintWriter.close();
}