Example usage for java.util IdentityHashMap IdentityHashMap

List of usage examples for java.util IdentityHashMap IdentityHashMap

Introduction

In this page you can find the example usage for java.util IdentityHashMap IdentityHashMap.

Prototype

public IdentityHashMap(Map<? extends K, ? extends V> m) 

Source Link

Document

Constructs a new identity hash map containing the keys-value mappings in the specified map.

Usage

From source file:gov.nasa.jpf.constraints.solvers.cw.CWSolver.java

private Result solveWithAdaptiveVariableSearch(Expression<Boolean> linearConstraint, Valuation linearSolution,
        Expression<Boolean> nonlinearConstraint, Valuation result) {

    // Partially based on the paper "Yet Another Local Search Method for Constraint Solving"
    // by Philippe Codognet and Daniel Diaz, 2001.

    // NOTE: End-of-line comments give the variable names and line numbers
    //       used in Algorithms 1 and 2 of the paper.

    // Interpret every variable in the path condition as a dimension
    // in a real-valued vector space.
    Set<Variable<?>> vars = union(ExpressionUtil.freeVariables(linearConstraint),
            ExpressionUtil.freeVariables(nonlinearConstraint));
    RealVectorSpace vectorSpace = RealVectorSpace.forDimensions(vars);
    int numberOfVariables = vectorSpace.dimensions().size();

    RealVector p = makeVectorFromSolutions(vectorSpace, linearSolution); // Alg. 1: \alpha, line 4

    printDebug(CWSolver.class, "Linear PC: ", linearConstraint);
    printDebug(CWSolver.class, "Linear PC solution: ", p);
    printDebug(CWSolver.class, "Solving non-linear PC\n", nonlinearConstraint);

    List<Expression<Boolean>> nonLinearConstraintsLst = ExpressionClassifier
            .splitToConjuncts(nonlinearConstraint);

    // Initialize lookup tables and local variables
    Map<Expression<Boolean>, BitSet> variableIndicesByConstraint = new IdentityHashMap<>(
            nonLinearConstraintsLst.size());
    @SuppressWarnings("unchecked")
    List<Expression<Boolean>>[] constraintsByVariableIndex = new List[numberOfVariables];
    populateLookupTables(vectorSpace, nonLinearConstraintsLst, constraintsByVariableIndex,
            variableIndicesByConstraint);

    double[] errorByVariables = new double[numberOfVariables]; // Alg. 1: \epsilon
    int[] tabuVariables = new int[numberOfVariables]; // Alg. 1: \tau

    int iterationCount = 1; // i
    int iterationLimit = nonLinearConstraintsLst.size() * ITERATIONS_PER_CONSTRAINT; // Alg. 1: I

    //iterate as long as non linear constraint is not satisfied
    while (!nonlinearConstraint.evaluate(p.convertToJconstraintsValuation())) { // Alg. 1: line 7
        if (iterationCount > iterationLimit) { // Alg. 1: line 8
            printDebug(CWSolver.class, "Could not find solution within ", iterationLimit, " iterations");
            return Result.DONT_KNOW;
        }/*from w ww . j  a  v a2s .com*/
        ++iterationCount; // Alg. 1: line 9

        // Compute errors
        double errorAtP = 0.0; // Alg. 1: e_\alpha
        Arrays.fill(errorByVariables, 0.0); // Alg. 1: line 14
        for (Expression<Boolean> c : nonLinearConstraintsLst) { // Alg. 1: lines 15--20
            if (c instanceof PropositionalCompound) {
                System.out.println("propositional compound. Skipping");
                continue;
            }

            //TODO: fix the list; it must be composed of NumericBooleanExpressions (stronger type)
            if (!(c instanceof NumericBooleanExpression))
                throw new IllegalStateException("constraint must be " + NumericBooleanExpression.class.getName()
                        + " and not of type " + c.getClass().getName());

            NumericBooleanExpression nc = (NumericBooleanExpression) c;

            double e = computeError(nc, p.convertToJconstraintsValuation());
            errorAtP += e;
            incrementElements(errorByVariables, variableIndicesByConstraint.get(c), e);
        }
        printDebug(CWSolver.class, "p = ", p, " -> error ", errorAtP);

        // Try to find a better solution by modifying the "worst" non-tabu variable
        int wiggleVarIndex = indexOfMaxIgnoringTabu(errorByVariables, tabuVariables);
        if (wiggleVarIndex == -1) { // All variables might be tabu,  Alg. 1: lines 10--13
            for (int i = 0; i < tabuVariables.length; ++i) {
                p = makeRandomNeighborInPolytope(p, linearConstraint, vectorSpace.dimensions().get(i));
                if (p == null) { //no point could be found, so dont_know?
                    return Result.DONT_KNOW;
                }
                tabuVariables[i] = 0;
            }
            printDebug(CWSolver.class, "All variables are tabu.  Took random step to ", p);
            continue;
        }
        Variable<?> wiggleVar = vectorSpace.dimensions().get(wiggleVarIndex); // Alg. 1: x, line 21
        printDebug(CWSolver.class, "Wiggling ", wiggleVar);

        // Find best neighbor (Algorithm 3)

        double minError = Double.POSITIVE_INFINITY; // Alg. 3: e_\mu
        RealVector minNeighbor = null;
        for (int i = 0; i < NEIGHBORS_GENERATED_PER_ITERATION; ++i) {
            RealVector q = makeRandomNeighborInPolytope(p, linearConstraint, wiggleVar); // Alg. 3: \beta
            RealVector r = null; // Alg. 3: \gamma

            if (q == null) { // No random neighbor could be found
                break;
            }

            double errorAtQ = computeError(nonLinearConstraintsLst, q.convertToJconstraintsValuation()); // Alg. 3: e_\beta, line 7
            double errorAtR = Double.POSITIVE_INFINITY; // Alg. 3: e_\gamma

            if (ENABLE_BISECTION && constraintsByVariableIndex[wiggleVarIndex] != null) { // Alg. 3: line 5
                // Pick a random unsatisfied constraint
                List<Expression<Boolean>> constraintsForVar = new ArrayList<>(
                        constraintsByVariableIndex[wiggleVarIndex]);
                Collections.shuffle(constraintsForVar);
                Expression<Boolean> constraint = null;
                for (int k = 0; k < constraintsForVar.size(); ++k) {
                    constraint = constraintsForVar.get(k);

                    boolean sat = constraint.evaluate(p.convertToJconstraintsValuation());
                    if (!sat) {
                        break;
                    }
                }
                Number valueAtP = evaluateAndSubtractSides((NumericBooleanExpression) constraint,
                        p.convertToJconstraintsValuation());
                Number valueAtQ = evaluateAndSubtractSides((NumericBooleanExpression) constraint,
                        q.convertToJconstraintsValuation());
                r = linearlyEstimateZero(p, valueAtP, q, valueAtQ,
                        ((NumericBooleanExpression) constraint).getComparator()); // Alg. 3: line 6

                boolean sat = linearConstraint.evaluate(r.convertToJconstraintsValuation());

                if (sat) {
                    errorAtR = computeError(nonLinearConstraintsLst, r.convertToJconstraintsValuation());
                }
            }

            printDebug(CWSolver.class, "Random neighbors");
            printDebug(CWSolver.class, "    q = ", q, " -> error ", errorAtQ);
            printDebug(CWSolver.class, "    r = ", r, " -> error ", errorAtR);

            if (errorAtQ < minError) { // Alg. 3: lines 9--12
                minError = errorAtQ;
                minNeighbor = q;
            }
            if (errorAtR < minError) { // Alg. 3: lines 13--16
                minError = errorAtR;
                minNeighbor = r;
            }
        }

        if (ENABLE_SEEDING) {
            for (double seed : SEEDED_VALUES) {
                RealVector s = vectorSpace.makeVector(p).set(wiggleVar, seed).build();
                double errorAtS = computeError(nonLinearConstraintsLst, s.convertToJconstraintsValuation());

                boolean sat = linearConstraint.evaluate(s.convertToJconstraintsValuation());
                if (sat && errorAtS < minError) {
                    minError = errorAtS;
                    minNeighbor = s;
                }
            }
        }

        if (minError < errorAtP) { // Alg. 1: lines 23--27
            printDebug(CWSolver.class, "Found new neighbor");
            p = minNeighbor;
            decrementElements(tabuVariables);
        } else { // Alg 1: lines 27--29
            printDebug(CWSolver.class, "Could not find better neighbor");
            tabuVariables[wiggleVarIndex] = Math.max(
                    Math.round(TABU_ITERATIONS_PER_VARIABLE * vectorSpace.dimensions().size()),
                    MIN_TABU_ITERATIONS);
            printDebug(CWSolver.class, "Tabu ", Arrays.toString(tabuVariables));
        }
    }
    printDebug(CWSolver.class, "Found solution: ", p);
    for (Variable<?> v : vars) {
        double vectorVal = p.get(v);
        if (v.getType() instanceof IntegerType<?>) {
            result.setValue((Variable<Integer>) v, (int) vectorVal);
        } else if (v.getType() instanceof RealType<?>) {
            result.setValue((Variable<Double>) v, vectorVal);
        }
    }
    System.out.println("cwsolver solution: " + result.toString());
    return Result.SAT;
}

From source file:org.onosproject.store.hz.SMap.java

private Set<byte[]> serializeKeySet(Set<K> keys) {
    Set<byte[]> sk = Collections.newSetFromMap(new IdentityHashMap<byte[], Boolean>(keys.size()));
    for (K key : keys) {
        sk.add(serializeKey(key));//from   w  w  w  . j av a 2  s  . c o  m
    }
    return sk;
}

From source file:org.jiemamy.utils.collection.CollectionsUtil.java

/**
 * {@link IdentityHashMap}?????/*from ww  w .  j av a  2s  . co m*/
 * 
 * @param <K> {@link IdentityHashMap}??
 * @param <V> {@link IdentityHashMap}??
 * @param expectedMaxSize ?
 * @return {@link IdentityHashMap}???
 * @throws IllegalArgumentException if <tt>expectedMaxSize</tt> is negative
 * @see IdentityHashMap#IdentityHashMap(int)
 */
public static <K, V> IdentityHashMap<K, V> newIdentityHashMap(int expectedMaxSize) {
    return new IdentityHashMap<K, V>(expectedMaxSize);
}

From source file:org.apache.hadoop.mapred.JobInProgress.java

private Map<Node, List<TaskInProgress>> createCache(TaskSplitMetaInfo[] splits, int maxLevel)
        throws UnknownHostException {
    Map<Node, List<TaskInProgress>> cache = new IdentityHashMap<Node, List<TaskInProgress>>(maxLevel);

    Set<String> uniqueHosts = new TreeSet<String>();
    for (int i = 0; i < splits.length; i++) {
        String[] splitLocations = splits[i].getLocations();
        if (splitLocations == null || splitLocations.length == 0) {
            nonLocalMaps.add(maps[i]);/*from ww w .  j a v a  2s .  co  m*/
            continue;
        }

        for (String host : splitLocations) {
            Node node = jobtracker.resolveAndAddToTopology(host);
            uniqueHosts.add(host);
            LOG.info("tip:" + maps[i].getTIPId() + " has split on node:" + node);
            for (int j = 0; j < maxLevel; j++) {
                List<TaskInProgress> hostMaps = cache.get(node);
                if (hostMaps == null) {
                    hostMaps = new ArrayList<TaskInProgress>();
                    cache.put(node, hostMaps);
                    hostMaps.add(maps[i]);
                }
                //check whether the hostMaps already contains an entry for a TIP
                //This will be true for nodes that are racks and multiple nodes in
                //the rack contain the input for a tip. Note that if it already
                //exists in the hostMaps, it must be the last element there since
                //we process one TIP at a time sequentially in the split-size order
                if (hostMaps.get(hostMaps.size() - 1) != maps[i]) {
                    hostMaps.add(maps[i]);
                }
                node = node.getParent();
            }
        }
    }

    // Calibrate the localityWaitFactor - Do not override user intent!
    if (localityWaitFactor == DEFAULT_LOCALITY_WAIT_FACTOR) {
        int jobNodes = uniqueHosts.size();
        int clusterNodes = jobtracker.getNumberOfUniqueHosts();

        if (clusterNodes > 0) {
            localityWaitFactor = Math.min((float) jobNodes / clusterNodes, localityWaitFactor);
        }
        LOG.info(jobId + " LOCALITY_WAIT_FACTOR=" + localityWaitFactor);
    }

    return cache;
}

From source file:org.jiemamy.utils.collection.CollectionsUtil.java

/**
 * {@link IdentityHashMap}?????// w ww.java  2s .  c  om
 * 
 * @param <K> {@link IdentityHashMap}??
 * @param <V> {@link IdentityHashMap}??
 * @param m ?????
 * @return {@link IdentityHashMap}???
 * @throws IllegalArgumentException ?{@code null}???
 * @see IdentityHashMap#IdentityHashMap(Map)
 */
public static <K, V> IdentityHashMap<K, V> newIdentityHashMap(Map<? extends K, ? extends V> m) {
    Validate.notNull(m);
    return new IdentityHashMap<K, V>(m);
}

From source file:org.fhcrc.cpl.toolbox.proteomics.feature.FeatureSet.java

public FeatureSet quant(AnalyzeICAT.IsotopicLabel label, int intensityType, MSRun run, float massTolerance,
        int massToleranceType, float timeTolerance) {
    boolean pairsOnly = false;

    ArrayList pairs = AnalyzeICAT.analyze(getFeatures(), label, massTolerance, massToleranceType,
            timeTolerance);//from  w  w w. jav  a 2  s. c om

    // TODO: option to output unpaired features
    Map<Feature, Feature> icatFeatures = new IdentityHashMap<Feature, Feature>(3 * pairs.size());

    ArrayList<Feature> list = new ArrayList<Feature>(_features.length);

    //
    // output paired features
    //
    // Consider May want to make ratio MAX_VALUE or MIN_VALUE when heavy is zero.
    //          +/- Inf (or Nan) may confuse downstream tools.

    // Give us the option of using either the total, recalculated, or maximum intensity
    // of each partner. Total has problems when one partner shows longer
    // elution time (or runs into a different co-eluting peptide).
    // TODO: This is just a surrogate for using the same range of scans
    // for each partner (max intensity has problems too).
    // Recalculated intensity requires access to the MS1 run
    if (intensityType == TOTAL_INTENSITY)
        Collections.sort(pairs, comparePairScanAsc);

    for (int i = 0; i < pairs.size(); i++) {
        Pair p = (Pair) pairs.get(i);
        Feature light = (Feature) p.first;
        Feature heavy = (Feature) p.second;
        Feature f = new Feature(light);
        f.setTotalIntensity(heavy.totalIntensity + light.totalIntensity);

        if (intensityType == TOTAL_INTENSITY) {
            IsotopicLabelExtraInfoDef.setHeavyIntensity(f, heavy.totalIntensity);
            IsotopicLabelExtraInfoDef.setLightIntensity(f, light.totalIntensity);
        } else if (intensityType == RECALCULATED_INTENSITY) {
            if (run == null) {
                _log.error("No run specified, unable to recalculate intensities.");
                return null;
            }
            IntRange overlappingScanRange = Feature.findOverlappingScanRange(light, heavy);
            IsotopicLabelExtraInfoDef.setLightIntensity(f, light.calculateFeatureIntensityInRange(run,
                    MZ_WINDOW_SIZE, overlappingScanRange, RESAMPLING_FREQUENCY));
            IsotopicLabelExtraInfoDef.setHeavyIntensity(f, heavy.calculateFeatureIntensityInRange(run,
                    MZ_WINDOW_SIZE, overlappingScanRange, RESAMPLING_FREQUENCY));
        } else if (intensityType == MAX_INTENSITY) {
            IsotopicLabelExtraInfoDef.setHeavyIntensity(f, heavy.intensity);
            IsotopicLabelExtraInfoDef.setLightIntensity(f, light.intensity);

            // smearing the total out doesn't seem to work better than just using the max
            // f.setHeavyIntensity(heavy.totalIntensity/heavy.scanCount);
            // f.setLightIntensity(light.totalIntensity/light.scanCount);
        }

        IsotopicLabelExtraInfoDef.setRatio(f, IsotopicLabelExtraInfoDef.getLightIntensity(f)
                / IsotopicLabelExtraInfoDef.getHeavyIntensity(f));
        IsotopicLabelExtraInfoDef.setLabelCount(f, Math.round((heavy.mass - light.mass) / label.getHeavy()));
        f.setProperty("label", label);
        f.setChargeStates(Math.max(light.getChargeStates(), heavy.getChargeStates()));

        //we used to do this in order to subtract out the light label.  Not doing that any more
        //(mass will be consistent with m/z and charge), so no need to update mass at all            
        //            f.updateMass();

        //deal with any peptide identifications, likely supplied by AMT.
        //If both light and heavy have the same ID, or any in common, or only one has an ID,
        //keep it.  If light and heavy have different IDs, toss them both out
        List<String> heavyPeptides = MS2ExtraInfoDef.getPeptideList(heavy);
        List<String> lightPeptides = MS2ExtraInfoDef.getPeptideList(heavy);
        if (heavyPeptides != null || lightPeptides != null) {
            if (heavyPeptides == null) {
                MS2ExtraInfoDef.setPeptideList(f, lightPeptides.get(0));
            } else if (lightPeptides == null) {
                MS2ExtraInfoDef.setPeptideList(f, heavyPeptides.get(0));
            } else {
                //both heavy and light peptides exist.
                if (heavyPeptides.size() == 1 && lightPeptides.size() == 1) {
                    if (heavyPeptides.get(0).equals(lightPeptides.get(0)))
                        MS2ExtraInfoDef.setPeptideList(f, heavyPeptides.get(0));
                    else
                        MS2ExtraInfoDef.removeAllPeptides(f);
                } else {
                    Set<String> commonPeptides = new HashSet<String>();
                    for (String heavyPeptide : heavyPeptides)
                        if (lightPeptides.contains(heavyPeptide))
                            commonPeptides.add(heavyPeptide);
                    if (commonPeptides.size() == 0)
                        MS2ExtraInfoDef.removeAllPeptides(f);
                    else
                        MS2ExtraInfoDef.setPeptideList(f, commonPeptides.iterator().next());
                }
            }

            //now that we've figured out what peptide to assign, make sure it has the
            //right number of labeled residues.  If not, unset.
            if (MS2ExtraInfoDef.getFirstPeptide(f) != null) {
                int numLabeledResidues = 0;
                String featurePeptide = MS2ExtraInfoDef.getFirstPeptide(f);
                for (int j = 0; j < featurePeptide.length(); j++)
                    if (featurePeptide.charAt(j) == label.getResidue())
                        numLabeledResidues++;
                if (numLabeledResidues != IsotopicLabelExtraInfoDef.getLabelCount(f)) {
                    //                        if (numLabeledResidues > 0) System.err.println("Tossing: " + featurePeptide + ", " + numLabeledResidues + ", " + IsotopicLabelExtraInfoDef.getLabelCount(f));

                    MS2ExtraInfoDef.removeAllPeptides(f);
                }
                //                    else
                //                        System.err.println("Saving: " + featurePeptide + ", " + numLabeledResidues);
            }
        }

        list.add(f);
        icatFeatures.put(light, light);
        icatFeatures.put(heavy, heavy);

    }

    //
    // output remaining features
    //

    if (!pairsOnly) {
        for (int i = 0; i < _features.length; i++) {
            Feature f = _features[i];
            if (icatFeatures.containsKey(f))
                continue;
            list.add(new Feature(f));
        }
    }

    FeatureSet fs = (FeatureSet) this.clone();
    fs.addExtraInformationType(new IsotopicLabelExtraInfoDef());
    fs.getProperties().put("label", label.toString());
    fs.setFeatures(list.toArray(new Feature[0]));
    return fs;
}