Example usage for java.util PriorityQueue PriorityQueue

List of usage examples for java.util PriorityQueue PriorityQueue

Introduction

In this page you can find the example usage for java.util PriorityQueue PriorityQueue.

Prototype

public PriorityQueue(int initialCapacity, Comparator<? super E> comparator) 

Source Link

Document

Creates a PriorityQueue with the specified initial capacity that orders its elements according to the specified comparator.

Usage

From source file:fm.krui.kruifm.KRUIScheduleActivity.java

@Override
public void onTextDownloaded() {

    String[] urls = { MAIN_SCHEDULE_RR_URL, MAIN_SCHEDULE_MS_URL, MAIN_SCHEDULE_S_URL, MAIN_SCHEDULE_NT_URL,
            MAIN_SCHEDULE_SP_URL };
    String[] filenames = { MAIN_SCHEDULE_RR_FILENAME, MAIN_SCHEDULE_MS_FILENAME, MAIN_SCHEDULE_S_FILENAME,
            MAIN_SCHEDULE_NT_FILENAME, MAIN_SCHEDULE_SP_FILENAME };

    // Initialize a Priority Queue which will cache show objects before sorting into the final showList.
    pq = new PriorityQueue<Show>(11, new ShowComparator());

    // For each day of the week, scan all five event categories
    // For each text file we downloaded, parse the show data inside and store them in showList.
    for (int k = 0; k < filenames.length; k++) {
        Log.v(TAG, "* Beginning scan of category: " + filenames[k]);

        // Read text file
        try {/*from w w  w. j a  v a  2  s  .  com*/
            // Construct a JSONObject from the stored text file containing events of this category
            JSONObject calObj = new JSONObject(readTextFile(new File(getFilesDir(), filenames[k])));

            // Store Sun-Sat events from this category in the Priority Queue
            parseShowData(calObj, k + 1);

        } catch (NullPointerException e) {
            Log.e(TAG, "No shows found for category: " + filenames[k]);
        } catch (JSONException e) {
            Log.e(TAG, "JSONException thrown when processing shows!");
        }

    }

    Log.v(TAG, "All shows have been parsed, now sort into their proper place in showList.");

    // Initialize show storage
    sunday = new ArrayList<Show>();
    monday = new ArrayList<Show>();
    tuesday = new ArrayList<Show>();
    wednesday = new ArrayList<Show>();
    thursday = new ArrayList<Show>();
    friday = new ArrayList<Show>();
    saturday = new ArrayList<Show>();

    // Pull every show off the Priority Queue and store them
    int size = pq.size();
    for (int i = 0; i < size; i++) {
        storeShow(pq.poll());
        Log.v(TAG, "PQ Count:" + pq.size());
    }

    // Fill date list
    dateList = new ArrayList<String>();
    cal.get(Calendar.DAY_OF_WEEK);
    //dateList.add();

    // Hook up pager and pagerAdapter and hide loading screen
    pager = (ViewPager) findViewById(R.id.schedule_pager);
    pagerAdapter = new SchedulePagerAdapter(getSupportFragmentManager());
    pager.setAdapter(pagerAdapter);
    showLoadingScreen(false);

}

From source file:org.apache.hadoop.hdfs.server.namenode.FSEditLogTestUtil.java

/**
 * Find out how many transactions we can read from a
 * FileJournalManager, starting at a given transaction ID.
 *
 * @param jm              The journal manager
 * @param fromTxId        Transaction ID to start at
 * @param inProgressOk    Should we consider edit logs that are not finalized?
 * @return                The number of transactions
 * @throws IOException/*from  w w w . ja  v  a2  s .c o  m*/
 */
public static long getNumberOfTransactions(JournalManager jm, long fromTxId, boolean inProgressOk,
        boolean abortOnGap) throws IOException {
    long numTransactions = 0, txId = fromTxId;
    final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64,
            JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR);
    jm.selectInputStreams(allStreams, fromTxId, inProgressOk, true);
    EditLogInputStream elis = null;
    try {
        while ((elis = allStreams.poll()) != null) {
            elis.skipUntil(txId);
            while (true) {
                FSEditLogOp op = elis.readOp();
                if (op == null) {
                    break;
                }
                if (abortOnGap && (op.getTransactionId() != txId)) {
                    TestFileJournalManager.LOG
                            .info("getNumberOfTransactions: detected gap at txId " + fromTxId);
                    return numTransactions;
                }
                txId = op.getTransactionId() + 1;
                numTransactions++;
            }
        }
    } finally {
        IOUtils.cleanup(FSEditLogTestUtil.LOG, allStreams.toArray(new EditLogInputStream[0]));
        IOUtils.cleanup(FSEditLogTestUtil.LOG, elis);
    }
    return numTransactions;
}

From source file:org.apache.predictionio.examples.java.recommendations.tutorial1.Algorithm.java

private void setTopItemSimilarity(Map<Integer, Queue<IndexAndScore>> topItemSimilarity, Integer itemID1,
        Integer index2, double score, int capacity, Comparator<IndexAndScore> comparator) {
    Queue<IndexAndScore> queue = topItemSimilarity.get(itemID1);
    if (queue == null) {
        queue = new PriorityQueue<IndexAndScore>(capacity, comparator);
        topItemSimilarity.put(itemID1, queue);
    }/*w  w w  .  j  av a 2 s.c  o m*/
    IndexAndScore entry = new IndexAndScore(index2, score);
    if (queue.size() < capacity)
        queue.add(entry);
    else if (comparator.compare(queue.peek(), entry) < 0) {
        queue.poll();
        queue.add(entry);
    }
}

From source file:com.github.wolfdogs.kemono.util.event.RootEventManager.java

@Override
public <T extends Event> void dispatchEvent(ThrowableHandler throwableHandler, T event, Object... objects) {
    if (throwableHandler == null)
        throwableHandler = DEFAULT_THROWABLE_HANDLER;
    if (objects.length == 0)
        objects = new Object[] { new Object() };

    Class<? extends Event> type = event.getClass();
    PriorityQueue<HandlerEntry> handlerEntryQueue = new PriorityQueue<HandlerEntry>(16,
            HANDLER_ENTRY_PRIORITY_COMPARATOR);

    Map<Object, Queue<HandlerEntry>> objectEntriesMap = handlerEntryContainersMap.get(type);
    if (objectEntriesMap == null)
        return;/*from  www  . j  ava 2s . co  m*/

    for (Object object : objects) {
        Class<?> cls = object.getClass();

        Queue<HandlerEntry> entries = objectEntriesMap.get(object);
        if (entries != null) {
            for (HandlerEntry entry : entries)
                handlerEntryQueue.add(entry);
        }

        Class<?>[] interfaces = cls.getInterfaces();
        for (Class<?> clz : interfaces) {
            Queue<HandlerEntry> classEntries = objectEntriesMap.get(clz);
            if (classEntries != null) {
                for (HandlerEntry entry : classEntries)
                    handlerEntryQueue.add(entry);
            }
        }

        for (Class<?> clz = cls; clz != null; clz = clz.getSuperclass()) {
            Queue<HandlerEntry> classEntries = objectEntriesMap.get(clz);
            if (classEntries != null) {
                for (HandlerEntry entry : classEntries)
                    handlerEntryQueue.add(entry);
            }
        }
    }

    while (handlerEntryQueue.isEmpty() == false && event.isInterrupted() == false) {
        HandlerEntry entry = handlerEntryQueue.poll();
        EventHandler handler = entry.getHandler();

        if (handler == null)
            continue;

        try {
            handler.handleEvent(event);
        } catch (Throwable e) {
            throwableHandler.handleThrowable(e);
        }
    }
}

From source file:org.eclipse.titanium.graph.gui.layouts.BaseHierarchicalLayout.java

/**
 * Makes an X coordinate related ordering of the nodes. Tries to identify
 * the optimal X position for all nodes according to their distance to the
 * predecessor nodes.//from  www  .  j a  v a 2  s . c om
 */
@SuppressWarnings("unchecked")
protected void organizeNodes() {
    final Map<V, Integer> nodeLevels = alg.getLevels();
    final int[] nodesPerLevel = alg.getNumberOfNodesPerLevel();
    final int noLevels = alg.getNumberOfLevels();
    final Set<V> isolateNodes = alg.getIsolateNodes();
    final double cellHeight = (double) size.height / noLevels;
    double[] cellWidths = new double[noLevels];
    Set<Integer>[] freePlaces = new HashSet[noLevels];
    final int baseLevel = isolateNodes.isEmpty() ? 0 : 1;
    Queue<V>[] levels = new PriorityQueue[noLevels];

    for (int i = 0; i < noLevels; ++i) {
        levels[i] = new PriorityQueue<V>(nodesPerLevel[i], nodeComparator);
    }

    // build an array that contains the nodes ordered separated by the
    // levels
    for (final Map.Entry<V, Integer> entry : nodeLevels.entrySet()) {
        levels[entry.getValue()].add(entry.getKey());
    }

    // set all cells free inside the rows
    for (int i = 0; i < noLevels; i++) {
        cellWidths[i] = (double) size.width / nodesPerLevel[i];
        freePlaces[i] = new HashSet<Integer>();
        for (int actCell = 0; actCell < nodesPerLevel[i]; ++actCell) {
            freePlaces[i].add(actCell);
        }
    }

    // place first isolate nodes (if there is any)
    int noPlacedElems = 0;
    for (final V v : isolateNodes) {
        final double actHeight = cellHeight * INNER_NODE_POSITION_RATIO_Y;
        final double actXPos = cellWidths[0] * ((noPlacedElems++) + INNER_NODE_POSITION_RATIO_X);
        final Point2D p = new Point2D.Double(actXPos, actHeight);
        places.put(v, p);
    }

    if (baseLevel >= noLevels && noPlacedElems != 0) {
        return;
    }

    // place the initial first row's nodes
    noPlacedElems = 0;
    for (final V v : levels[baseLevel]) {
        final double actHeight = cellHeight * (baseLevel + INNER_NODE_POSITION_RATIO_Y);
        final double actXPos = cellWidths[baseLevel] * ((noPlacedElems++) + INNER_NODE_POSITION_RATIO_X);
        places.put(v, new Point2D.Double(actXPos, actHeight));
    }

    boolean badDistance = false;
    if (!distanceAlgorithm.equals(MAX_DISTANCE_ALGORITHM)
            && !distanceAlgorithm.equals(SUM_DISTANCE_ALGORITHM)) {
        errorHandler.reportBadSetting("Distance algorithm error",
                "Not existing distance algorithm is set, for details see the preference page"
                        + "\n (DistanceAlgorithm=" + distanceAlgorithm + ")",
                "Open preference page", "org.eclipse.titanium.preferences.pages.GraphPreferencePage");
        badDistance = true;
    }

    // set optimal place for inner rows
    for (int actLevel = baseLevel + 1; actLevel < noLevels; ++actLevel) {
        final double actHeight = cellHeight * (actLevel + INNER_NODE_POSITION_RATIO_Y);
        noPlacedElems = 0;
        for (final V v : levels[actLevel]) {
            if (!badDistance) {
                places.put(v, new Point2D.Double(
                        getBestXPosition(v, freePlaces[actLevel], cellWidths[actLevel]), actHeight));
            } else {
                places.put(v, new Point2D.Double(
                        cellWidths[actLevel] * ((noPlacedElems++) + INNER_NODE_POSITION_RATIO_X), actHeight));
            }
        }
    }
}

From source file:org.pepstock.jem.ant.tasks.utilities.SortTask.java

/**
 * This merges a bunch of temporary flat files
 * /*from   w w  w. j  a  va  2  s  .co  m*/
 * @param files
 * @param fileOutput 
 * @param cmp 
 * @param cs 
 * @param output file
 * @param Charset character set to use to load the strings
 * @return The number of lines sorted.
 * @throws IOException 
 */
public static int mergeSortedFiles(List<File> files, FileOutputStream fileOutput, final Comparator<String> cmp,
        Charset cs) throws IOException {
    PriorityQueue<BinaryFileBuffer> pq = new PriorityQueue<BinaryFileBuffer>(11,
            new Comparator<BinaryFileBuffer>() {
                public int compare(BinaryFileBuffer i, BinaryFileBuffer j) {
                    return cmp.compare(i.peek(), j.peek());
                }
            });
    for (File f : files) {
        BinaryFileBuffer bfb = new BinaryFileBuffer(f, cs);
        pq.add(bfb);
    }
    BufferedWriter fbw = new BufferedWriter(new OutputStreamWriter(fileOutput, cs));
    int rowcounter = 0;
    try {
        while (!pq.isEmpty()) {
            BinaryFileBuffer bfb = pq.poll();
            String r = bfb.pop();
            fbw.write(r);
            fbw.newLine();
            ++rowcounter;
            if (bfb.empty()) {
                bfb.getBufferReader().close();
                // we don't need you anymore
                boolean isDeleted = bfb.getOriginalfile().delete();
                if (!isDeleted) {
                    // nop
                }
            } else {
                // add it back
                pq.add(bfb);
            }
        }
    } finally {
        fbw.flush();
        fbw.close();
        for (BinaryFileBuffer bfb : pq) {
            bfb.close();
        }
    }
    return rowcounter;
}

From source file:org.apache.hadoop.hdfs.server.namenode.FSEditLogTestUtil.java

public static EditLogInputStream getJournalInputStream(JournalManager jm, long txId, boolean inProgressOk)
        throws IOException {
    final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64,
            JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR);
    jm.selectInputStreams(allStreams, txId, inProgressOk, true);
    EditLogInputStream elis = null, ret;
    try {/*  w  ww. j a  v a  2 s  .  c  o  m*/
        while ((elis = allStreams.poll()) != null) {
            if (elis.getFirstTxId() > txId) {
                break;
            }
            if (elis.getLastTxId() < txId) {
                elis.close();
                continue;
            }
            elis.skipUntil(txId);
            ret = elis;
            elis = null;
            return ret;
        }
    } finally {
        IOUtils.cleanup(FSEditLogTestUtil.LOG, allStreams.toArray(new EditLogInputStream[0]));
        IOUtils.cleanup(FSEditLogTestUtil.LOG, elis);
    }
    return null;
}

From source file:edu.stanford.cfuller.colocalization3d.correction.PositionCorrector.java

/**
* Creates a correction from a set of objects whose positions should be the same in each channel.
* 
* @param imageObjects                  A Vector containing all the ImageObjects to be used for the correction
*                                      or in the order it appears in a multiwavelength image file.
* @return                              A Correction object that can be used to correct the positions of other objects based upon the standards provided.
*/// w  w  w.j  av  a2  s .c om
public Correction getCorrection(java.util.List<ImageObject> imageObjects) {

    int referenceChannel = this.parameters.getIntValueForKey(REF_CH_PARAM);

    int channelToCorrect = this.parameters.getIntValueForKey(CORR_CH_PARAM);

    if (!this.parameters.hasKeyAndTrue(DET_CORR_PARAM)) {
        try {
            return Correction.readFromDisk(FileUtils.getCorrectionFilename(this.parameters));
        } catch (java.io.IOException e) {

            java.util.logging.Logger
                    .getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME)
                    .severe("Exception encountered while reading correction from disk: ");
            e.printStackTrace();

        } catch (ClassNotFoundException e) {

            java.util.logging.Logger
                    .getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME)
                    .severe("Exception encountered while reading correction from disk: ");
            e.printStackTrace();

        }

        return null;
    }

    int numberOfPointsToFit = this.parameters.getIntValueForKey(NUM_POINT_PARAM);

    RealMatrix correctionX = new Array2DRowRealMatrix(imageObjects.size(), numberOfCorrectionParameters);
    RealMatrix correctionY = new Array2DRowRealMatrix(imageObjects.size(), numberOfCorrectionParameters);
    RealMatrix correctionZ = new Array2DRowRealMatrix(imageObjects.size(), numberOfCorrectionParameters);

    RealVector distanceCutoffs = new ArrayRealVector(imageObjects.size(), 0.0);

    RealVector ones = new ArrayRealVector(numberOfPointsToFit, 1.0);

    RealVector distancesToObjects = new ArrayRealVector(imageObjects.size(), 0.0);

    RealMatrix allCorrectionParametersMatrix = new Array2DRowRealMatrix(numberOfPointsToFit,
            numberOfCorrectionParameters);

    for (int i = 0; i < imageObjects.size(); i++) {

        RealVector ithPos = imageObjects.get(i).getPositionForChannel(referenceChannel);

        for (int j = 0; j < imageObjects.size(); j++) {

            double d = imageObjects.get(j).getPositionForChannel(referenceChannel).subtract(ithPos).getNorm();

            distancesToObjects.setEntry(j, d);

        }

        //the sorting becomes a bottleneck once the number of points gets large

        //reverse comparator so we can use the priority queue and get the max element at the head

        Comparator<Double> cdReverse = new Comparator<Double>() {

            public int compare(Double o1, Double o2) {

                if (o1.equals(o2))
                    return 0;
                if (o1 > o2)
                    return -1;
                return 1;
            }

        };

        PriorityQueue<Double> pq = new PriorityQueue<Double>(numberOfPointsToFit + 2, cdReverse);

        double maxElement = Double.MAX_VALUE;

        for (int p = 0; p < numberOfPointsToFit + 1; p++) {

            pq.add(distancesToObjects.getEntry(p));

        }

        maxElement = pq.peek();

        for (int p = numberOfPointsToFit + 1; p < distancesToObjects.getDimension(); p++) {

            double value = distancesToObjects.getEntry(p);

            if (value < maxElement) {

                pq.poll();

                pq.add(value);

                maxElement = pq.peek();

            }

        }

        double firstExclude = pq.poll();
        double lastDist = pq.poll();

        double distanceCutoff = (lastDist + firstExclude) / 2.0;

        distanceCutoffs.setEntry(i, distanceCutoff);

        RealVector xPositionsToFit = new ArrayRealVector(numberOfPointsToFit, 0.0);
        RealVector yPositionsToFit = new ArrayRealVector(numberOfPointsToFit, 0.0);
        RealVector zPositionsToFit = new ArrayRealVector(numberOfPointsToFit, 0.0);

        RealMatrix differencesToFit = new Array2DRowRealMatrix(numberOfPointsToFit,
                imageObjects.get(0).getPositionForChannel(referenceChannel).getDimension());

        int toFitCounter = 0;

        for (int j = 0; j < imageObjects.size(); j++) {
            if (distancesToObjects.getEntry(j) < distanceCutoff) {
                xPositionsToFit.setEntry(toFitCounter,
                        imageObjects.get(j).getPositionForChannel(referenceChannel).getEntry(0));
                yPositionsToFit.setEntry(toFitCounter,
                        imageObjects.get(j).getPositionForChannel(referenceChannel).getEntry(1));
                zPositionsToFit.setEntry(toFitCounter,
                        imageObjects.get(j).getPositionForChannel(referenceChannel).getEntry(2));

                differencesToFit.setRowVector(toFitCounter, imageObjects.get(j)
                        .getVectorDifferenceBetweenChannels(referenceChannel, channelToCorrect));

                toFitCounter++;
            }
        }

        RealVector x = xPositionsToFit.mapSubtractToSelf(ithPos.getEntry(0));
        RealVector y = yPositionsToFit.mapSubtractToSelf(ithPos.getEntry(1));

        allCorrectionParametersMatrix.setColumnVector(0, ones);
        allCorrectionParametersMatrix.setColumnVector(1, x);
        allCorrectionParametersMatrix.setColumnVector(2, y);
        allCorrectionParametersMatrix.setColumnVector(3, x.map(new Power(2)));
        allCorrectionParametersMatrix.setColumnVector(4, y.map(new Power(2)));
        allCorrectionParametersMatrix.setColumnVector(5, x.ebeMultiply(y));

        DecompositionSolver solver = (new QRDecomposition(allCorrectionParametersMatrix)).getSolver();

        RealVector cX = solver.solve(differencesToFit.getColumnVector(0));
        RealVector cY = solver.solve(differencesToFit.getColumnVector(1));
        RealVector cZ = solver.solve(differencesToFit.getColumnVector(2));

        correctionX.setRowVector(i, cX);
        correctionY.setRowVector(i, cY);
        correctionZ.setRowVector(i, cZ);

    }

    Correction c = new Correction(correctionX, correctionY, correctionZ, distanceCutoffs, imageObjects,
            referenceChannel, channelToCorrect);

    return c;

}

From source file:org.apache.hadoop.mapred.PoolFairnessCalculator.java

/**
 * This method takes a list of {@link PoolMetadata} objects and calculates
 * fairness metrics of how well scheduling is doing.
 *
 * The goals of the fair scheduling are to insure that every pool is getting
 * an equal share.  The expected share of resources for each pool is
 * complicated by the pools not requiring an equal share
 * or pools that have a minimum or maximum allocation of resources.
 *
 * @param poolMetadataList List of all pool metadata
 * @param metricsRecord Where to write the metrics
 *///  w w w .  j  a v  a 2s .  c om
public static void calculateFairness(final List<PoolMetadata> poolMetadataList,
        final MetricsRecord metricsRecord) {
    if (poolMetadataList == null || poolMetadataList.isEmpty()) {
        return;
    }

    // Find the total available usage and guaranteed resources by resource
    // type.  Add the resource metadata to the sorted set to schedule if
    // there is something to schedule (desiredAfterConstraints > 0)
    long startTime = System.currentTimeMillis();
    Map<String, TotalResourceMetadata> resourceTotalMap = new HashMap<String, TotalResourceMetadata>();
    Map<String, Set<ResourceMetadata>> resourceSchedulablePoolMap = new HashMap<String, Set<ResourceMetadata>>();
    for (PoolMetadata poolMetadata : poolMetadataList) {
        for (String resourceName : poolMetadata.getResourceMetadataKeys()) {
            ResourceMetadata resourceMetadata = poolMetadata.getResourceMetadata(resourceName);
            TotalResourceMetadata totalResourceMetadata = resourceTotalMap.get(resourceName);
            if (totalResourceMetadata == null) {
                totalResourceMetadata = new TotalResourceMetadata();
                resourceTotalMap.put(resourceName, totalResourceMetadata);
            }
            totalResourceMetadata.totalAvailable += resourceMetadata.getCurrentlyUsed();

            Set<ResourceMetadata> schedulablePoolSet = resourceSchedulablePoolMap.get(resourceName);
            if (schedulablePoolSet == null) {
                schedulablePoolSet = new HashSet<ResourceMetadata>();
                resourceSchedulablePoolMap.put(resourceName, schedulablePoolSet);
            }
            if (resourceMetadata.getDesiredAfterConstraints() > 0) {
                if (!schedulablePoolSet.add(resourceMetadata)) {
                    throw new RuntimeException(
                            "Duplicate resource metadata " + resourceMetadata + " in " + schedulablePoolSet);
                }
            }
        }
    }

    // First, allocate resources for all the min guaranteed resources
    // for the pools.  Ordering is done by the largest
    // min(min guaranteed, desired).
    GuaranteedDesiredComparator guarantedDesiredComparator = new GuaranteedDesiredComparator();
    List<ResourceMetadata> removePoolList = new ArrayList<ResourceMetadata>();
    for (Map.Entry<String, TotalResourceMetadata> entry : resourceTotalMap.entrySet()) {
        List<ResourceMetadata> resourceMetadataList = new ArrayList<ResourceMetadata>(
                resourceSchedulablePoolMap.get(entry.getKey()));
        TotalResourceMetadata totalResourceMetadata = entry.getValue();
        Collections.sort(resourceMetadataList, guarantedDesiredComparator);
        while ((totalResourceMetadata.totalAllocated < totalResourceMetadata.totalAvailable)
                && !resourceMetadataList.isEmpty()) {
            removePoolList.clear();
            for (ResourceMetadata resourceMetadata : resourceMetadataList) {
                if (resourceMetadata.getExpectedUsed() == resourceMetadata.getGuaranteedUsedAndDesired()) {
                    removePoolList.add(resourceMetadata);
                    continue;
                }
                resourceMetadata.incrExpectedUsed();
                ++totalResourceMetadata.totalAllocated;
            }
            resourceMetadataList.removeAll(removePoolList);
        }
        LOG.info("After allocating min guaranteed and desired - " + "Resource type " + entry.getKey()
                + " totalAvailable=" + totalResourceMetadata.totalAvailable + ", totalAllocated="
                + totalResourceMetadata.totalAllocated);
    }

    // At this point, all pools have been allocated their guaranteed used and
    // desired resources.  If there are any more resources to allocate, give
    // resources to lowest allocated pool that hasn't reached desired
    // until all the resources are gone
    ExpectedUsedComparator expectedUsedComparator = new ExpectedUsedComparator();
    PriorityQueue<ResourceMetadata> minHeap = new PriorityQueue<ResourceMetadata>(100, expectedUsedComparator);
    for (Map.Entry<String, TotalResourceMetadata> entry : resourceTotalMap.entrySet()) {
        minHeap.addAll(resourceSchedulablePoolMap.get(entry.getKey()));
        TotalResourceMetadata totalResourceMetadata = entry.getValue();
        while ((totalResourceMetadata.totalAllocated < totalResourceMetadata.totalAvailable)
                && !minHeap.isEmpty()) {
            ResourceMetadata resourceMetadata = minHeap.remove();
            if (resourceMetadata.getExpectedUsed() == resourceMetadata.getDesiredAfterConstraints()) {
                continue;
            }
            resourceMetadata.incrExpectedUsed();
            ++totalResourceMetadata.totalAllocated;
            minHeap.add(resourceMetadata);
        }
        minHeap.clear();
    }

    // Now calculate the difference of the expected allocation and the
    // actual allocation to get the following metrics.  When calculating
    // the percent bad allocated divide by 2 because the difference double
    // counts a bad allocation
    // 1) total tasks difference between expected and actual allocation
    //    0 is totally fair, higher is less fair
    // 2) % of tasks incorrectly allocated
    //    0 is totally fair, higher is less fair
    // 3) average difference per pool
    //    0 is totally fair, higher is less fair
    // 4) standard deviation per pool
    //    0 is totally fair, higher is less fair
    for (PoolMetadata poolMetadata : poolMetadataList) {
        for (String resourceName : poolMetadata.getResourceMetadataKeys()) {
            ResourceMetadata resourceMetadata = poolMetadata.getResourceMetadata(resourceName);
            int diff = Math.abs(resourceMetadata.getExpectedUsed() - resourceMetadata.getCurrentlyUsed());
            LOG.info("Pool " + poolMetadata.getPoolName() + ", resourceName=" + resourceName + ", expectedUsed="
                    + resourceMetadata.getExpectedUsed() + ", currentUsed="
                    + resourceMetadata.getCurrentlyUsed() + ", maxAllowed=" + resourceMetadata.getMaxAllowed()
                    + ", desiredAfterConstraints=" + resourceMetadata.getDesiredAfterConstraints()
                    + ", guaranteedUsedAndDesired=" + resourceMetadata.getGuaranteedUsedAndDesired() + ", diff="
                    + diff);
            resourceTotalMap.get(resourceName).totalFairnessDifference += diff;
            resourceTotalMap.get(resourceName).totalFairnessDifferenceSquared += diff * diff;
        }
    }
    TotalResourceMetadata allResourceMetadata = new TotalResourceMetadata();
    allResourceMetadata.resourceTypeCount = resourceTotalMap.size();
    for (TotalResourceMetadata totalResourceMetadata : resourceTotalMap.values()) {
        allResourceMetadata.totalAvailable += totalResourceMetadata.totalAvailable;
        allResourceMetadata.totalFairnessDifference += totalResourceMetadata.totalFairnessDifference;
        allResourceMetadata.totalFairnessDifferenceSquared += totalResourceMetadata.totalFairnessDifferenceSquared;
    }
    resourceTotalMap.put("all", allResourceMetadata);
    StringBuilder metricsBuilder = new StringBuilder();
    for (Map.Entry<String, TotalResourceMetadata> entry : resourceTotalMap.entrySet()) {
        TotalResourceMetadata totalResourceMetadata = entry.getValue();
        totalResourceMetadata.percentUnfair = (totalResourceMetadata.totalAvailable == 0) ? 0
                : totalResourceMetadata.totalFairnessDifference * 100f / 2
                        / totalResourceMetadata.totalAvailable;
        totalResourceMetadata.stdDevUnfair = (float) Math
                .sqrt((double) totalResourceMetadata.totalFairnessDifferenceSquared / poolMetadataList.size()
                        / totalResourceMetadata.resourceTypeCount);
        totalResourceMetadata.averageUnfairPerPool = (float) totalResourceMetadata.totalFairnessDifference
                / poolMetadataList.size() / totalResourceMetadata.resourceTypeCount;

        metricsRecord.setMetric(FAIRNESS_DIFFERENCE_COUNT_PREFIX + entry.getKey(),
                totalResourceMetadata.totalFairnessDifference);
        metricsBuilder.append(FAIRNESS_DIFFERENCE_COUNT_PREFIX + entry.getKey() + "="
                + totalResourceMetadata.totalFairnessDifference + "\n");
        metricsRecord.setMetric(FAIRNESS_PERCENT_UNFAIR_PREFIX + entry.getKey(),
                totalResourceMetadata.percentUnfair);
        metricsBuilder.append(FAIRNESS_PERCENT_UNFAIR_PREFIX + entry.getKey() + "="
                + totalResourceMetadata.percentUnfair + "\n");
        metricsRecord.setMetric(FAIRNESS_DIFFERENCE_PER_POOL_PREFIX + entry.getKey(),
                totalResourceMetadata.averageUnfairPerPool);
        metricsBuilder.append(FAIRNESS_DIFFERENCE_PER_POOL_PREFIX + entry.getKey() + "="
                + totalResourceMetadata.averageUnfairPerPool + "\n");
        metricsRecord.setMetric(FAIRNESS_UNFAIR_STD_DEV_PERFIX + entry.getKey(),
                totalResourceMetadata.stdDevUnfair);
        metricsBuilder.append(FAIRNESS_UNFAIR_STD_DEV_PERFIX + entry.getKey() + "="
                + totalResourceMetadata.stdDevUnfair + "\n");
        metricsBuilder.append(
                TOTAL_RESOURCES_PREFIX + entry.getKey() + "=" + totalResourceMetadata.totalAvailable + "\n");
    }

    if (LOG.isInfoEnabled()) {
        LOG.info("calculateFairness took " + (System.currentTimeMillis() - startTime) + " millisecond(s).");
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("\n" + metricsBuilder.toString());
    }
}

From source file:bme.iclef.weka.featureselection.InfoGain.java

public AttributeInfoGain[] topAttributes(final int n) {
    Queue<AttributeInfoGain> all = new PriorityQueue<AttributeInfoGain>(m_InfoGains.length,
            new Comparator<AttributeInfoGain>() {
                @Override//from   w  w  w.jav a  2  s.  c om
                public int compare(AttributeInfoGain o1, AttributeInfoGain o2) {
                    return Double.compare(o2.infoGain, o1.infoGain); // descending
                }
            });
    for (int i = 0; i < m_InfoGains.length; i++)
        all.add(new AttributeInfoGain(i, m_InfoGains[i]));
    AttributeInfoGain[] best = new AttributeInfoGain[n];
    for (int i = 0; i < best.length; i++) {
        best[i] = all.remove();
    }
    return best;
}