Example usage for java.util TreeMap size

List of usage examples for java.util TreeMap size

Introduction

In this page you can find the example usage for java.util TreeMap size.

Prototype

int size

To view the source code for java.util TreeMap size.

Click Source Link

Document

The number of entries in the tree

Usage

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

protected static Object[] loadDataHSARHP_ISARCEP(File a_SARFile, File a_CASFile) throws IOException {
    Object[] result = new Object[3];
    TreeMap<String, double[]> a_SAROptimistaionConstraints = loadCASOptimistaionConstraints(a_SARFile);
    TreeMap<String, double[]> a_CASOptimistaionConstraints = loadCASOptimistaionConstraints(a_CASFile);
    Vector<String> variables = GeneticAlgorithm_HSARHP_ISARCEP.getVariableList();
    variables.add(0, "Zone_Code");
    String[] variableNames = new String[0];
    variableNames = variables.toArray(variableNames);
    result[0] = variableNames;/*w w  w .j a  va  2  s .c o m*/
    // Format (Flip) data
    double[][] a_SARExpectedData = new double[variables.size() - 1][a_SAROptimistaionConstraints.size()];
    double[][] a_CASObservedData = new double[variables.size() - 1][a_SAROptimistaionConstraints.size()];
    String oa;
    double[] a_SARExpectedRow;
    double[] a_CASObservedRow;
    int j = 0;
    Iterator<String> iterator_String = a_SAROptimistaionConstraints.keySet().iterator();
    while (iterator_String.hasNext()) {
        oa = iterator_String.next();
        a_SARExpectedRow = a_SAROptimistaionConstraints.get(oa);
        a_CASObservedRow = a_CASOptimistaionConstraints.get(oa);
        //            if (oa.equalsIgnoreCase("00AAFQ0013")){
        //                System.out.println(oa);
        //            }
        if (a_SARExpectedRow == null) {
            System.out.println(
                    "Warning a_SARExpectedRow == null in loadDataHSARHP_ISARCEP(File,File) for OA " + oa);
        } else {
            if (a_CASObservedRow == null) {
                System.out.println(
                        "Warning a_SARExpectedRow == null in loadDataHSARHP_ISARCEP(File,File) for OA " + oa);
            } else {
                for (int i = 0; i < variables.size() - 1; i++) {
                    a_SARExpectedData[i][j] = a_SARExpectedRow[i];
                    a_CASObservedData[i][j] = a_CASObservedRow[i];
                }
            }
        }
        j++;
    }
    result[1] = a_SARExpectedData;
    result[2] = a_CASObservedData;
    return result;
}

From source file:api.wiki.WikiNameApi2.java

private void processSpecific(String s, PeopleNameOption option, final ProgressCallback callback) {
    final TreeMap<String, String> values = getGenderNames(s);

    final float[] progressValues = ProgressUtil.getProgressValues(values.size());
    int counter = 1;

    for (final String peopleName : values.values()) {
        final int c = counter++;
        callback.onProgressUpdate(progressValues[c]);
        Task<Void> task = new Task<Void>() {
            @Override//from  w  w w . j  av  a  2  s  .c o  m
            protected Void call() throws Exception {
                final File file = processName(peopleName, option);
                Platform.runLater(new Runnable() {

                    @Override
                    public void run() {
                        callback.onProgress(processFile(peopleName, file));
                    }
                });

                return null;
            }
        };
        Thread thread = new Thread(task);
        thread.setPriority(Thread.MAX_PRIORITY);
        thread.start();
    }
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

/**
 *
 * @param a_SAR_File/*from   w  w w. j  a  va  2 s  .c o m*/
 * @param a_CAS_File
 * @return Object[] result where;
 * result[0] is a String[] of variable names
 * result[1] is a double[number of variables][no of data items]
 * of a_SAR_data
 * result[2] is a double[number of variables][no of data items]
 * of a_CAS_data
 * @throws IOException
 */
protected static Object[] loadDataISARHP_ISARCEP(File a_SAR_File, File a_CAS_File) throws IOException {
    Object[] result = new Object[3];

    TreeMap<String, double[]> a_SAROptimistaionConstraints_TreeMap = loadCASOptimistaionConstraints(a_SAR_File);
    TreeMap<String, double[]> a_CASOptimistaionConstraints_TreeMap = loadCASOptimistaionConstraints(a_CAS_File);

    Vector<String> variables = GeneticAlgorithm_ISARHP_ISARCEP.getVariableList();
    variables.add(0, "Zone_Code");
    String[] variableNames = new String[0];
    variableNames = variables.toArray(variableNames);
    result[0] = variableNames;

    // Format (Flip) data
    double[][] a_SAR_Data = new double[variables.size() - 1][a_SAROptimistaionConstraints_TreeMap.size()];
    double[][] a_CAS_Data = new double[variables.size() - 1][a_SAROptimistaionConstraints_TreeMap.size()];
    String oa;
    double[] a_SARExpectedRow;
    double[] a_CASObservedRow;
    int j = 0;
    Iterator<String> iterator_String = a_SAROptimistaionConstraints_TreeMap.keySet().iterator();
    while (iterator_String.hasNext()) {
        oa = iterator_String.next();
        a_SARExpectedRow = a_SAROptimistaionConstraints_TreeMap.get(oa);
        a_CASObservedRow = a_CASOptimistaionConstraints_TreeMap.get(oa);
        if (a_SARExpectedRow == null) {
            System.out.println(
                    "Warning a_SARExpectedRow == null in loadDataISARHP_ISARCEP(File,File) for OA " + oa);
        } else {
            if (a_CASObservedRow == null) {
                System.out.println(
                        "Warning a_CASObservedRow == null in loadDataISARHP_ISARCEP(File,File) for OA " + oa);
            } else {
                for (int i = 0; i < variables.size() - 1; i++) {
                    a_SAR_Data[i][j] = a_SARExpectedRow[i];
                    a_CAS_Data[i][j] = a_CASObservedRow[i];
                }
            }
        }
        j++;
    }
    result[1] = a_SAR_Data;
    result[2] = a_CAS_Data;
    return result;
}

From source file:main.java.workload.WorkloadExecutor.java

public static Transaction streamOneTransaction(Database db, Cluster cluster, Workload wrl, WorkloadBatch wb) {

    Set<Integer> trTupleSet = null;
    Set<Integer> trDataSet = null;

    int min = 0, i = 0, n = 0, tr_id = 0;
    int type = trDistribution.sample();

    Transaction tr = null;//from  w ww .j  av  a  2  s  .c  o  m

    if (!wb.getTrMap().containsKey(type))
        wb.getTrMap().put(type, new TreeMap<Integer, Transaction>());

    // new
    double rand_val = Global.rand.nextDouble();
    int toBeRemovedKey = -1;

    /**
     *  Implementing the new Workload Generation model 
     *  (Finalised as per November 20, 2014 and later improved on February 13-14, 2015)      
     */
    ++Global.global_trCount;

    // Transaction birth
    if (wb.getTrMap().get(type).isEmpty() || rand_val <= Global.percentageChangeInWorkload) {

        trTupleSet = wrl.getTrTupleSet(db, type);
        trDataSet = Workload.getTrDataSet(db, cluster, wb, trTupleSet);

        ++Global.global_trSeq;
        tr = new Transaction(Global.global_trSeq, type, trDataSet, Sim.time());

        // Add the incident transaction id
        wb.addIncidentTrId(cluster, trDataSet, Global.global_trSeq);

        // Add the newly created Transaction in the Workload Transaction map   
        wb.getTrMap().get(type).put(tr.getTr_id(), tr);

        // New improvements------------------------------------------------------------------------------
        double initial_period = (double) WorkloadExecutor.uNmax; // initialisation         
        tr.setTr_period(initial_period);

        perfm.Period.put(tr.getTr_id(), initial_period);
        Time.put(tr.getTr_id(), Sim.time());

        // Transaction repetition and retention of old transaction
    } else {

        ArrayList<Integer> idx2_id = new ArrayList<Integer>();
        ArrayList<Integer> idx_value = new ArrayList<Integer>();
        ArrayList<Integer> uT = new ArrayList<Integer>();

        TreeMap<Integer, Integer> idx2 = new TreeMap<Integer, Integer>(new ValueComparator<Integer>(idx));
        idx2.putAll(idx);

        min = Math.min(idx.size(), uNmax); // uNmax or uNmaxT

        i = 0;
        Iterator<Entry<Integer, Integer>> itr = idx2.entrySet().iterator();
        while (i < min) {
            idx2_id.add(itr.next().getKey());
            ++i;
        }

        // Deleting old Transactions
        if (idx2.size() > min) {
            toBeRemovedKey = idx2.lastKey();

            Transaction tr_old = wb.getTransaction(toBeRemovedKey);
            tr_old.calculateSpans(cluster);

            wb.removeTransaction(cluster, tr_old);
            idx.remove(toBeRemovedKey);
        }

        i = 0;
        while (i < idx2_id.size()) {
            idx_value.add(idx.get(idx2_id.get(i)));
            ++i;
        }

        i = 0;
        while (i < idx_value.size()) {
            uT.add(T.get(idx_value.get(i) - 1));
            ++i;
        }

        if (uT.size() == 1)
            n = 0;
        else
            n = Global.rand.nextInt(uT.size());

        tr_id = uT.get(n);

        tr = wb.getTransaction(tr_id);
        tr.setProcessed(false);

        // New improvements------------------------------------------------------------------------------
        double prev_period = perfm.Period.get(tr.getTr_id());
        double prev_time = Time.get(tr.getTr_id());

        double new_period = Global.expAvgWt * prev_period + (1 - Global.expAvgWt) * (Sim.time() - prev_time);

        tr.setTr_period(new_period);

        perfm.Period.remove(tr.getTr_id());
        perfm.Period.put(tr.getTr_id(), new_period);

        Time.remove(tr.getTr_id());
        Time.put(tr.getTr_id(), Sim.time());

    } // end-if-else()

    // Calculate latest Span
    tr.calculateSpans(cluster);

    // Update Idt
    tr.calculateIdt();

    if (perfm.Span.containsKey(tr.getTr_id()))
        perfm.Span.remove(tr.getTr_id());

    perfm.Span.put(tr.getTr_id(), tr.getTr_serverSpanCost());

    // Create an index entry for each newly created Transaction      
    idx.put(tr.getTr_id(), Global.global_trCount);
    T.add(tr.getTr_id());

    // New improvements------------------------------------------------------------------------------
    if (Global.global_trCount > Global.observationWindow) {

        _i = Global.global_trCount; // _i ~ Sim.time() 
        _W = Global.observationWindow; // _W ~ time 

        HashSet<Integer> unq = new HashSet<Integer>(T);
        for (int _n = (_i - _W); n <= _i; n++) {
            unq.add(T.get(_n));
        }

        // Captures the number of total unique transaction for this observation window
        perfm.Unqlen.put((_i - _W), unq.size());

        // Calculate the impact of distributed transaction per transaction basis               
        double sum_of_span_by_period = 0.0;
        sum_of_one_by_period = 0.0;

        Iterator<Integer> unq_itr = unq.iterator();
        while (unq_itr.hasNext()) {
            int unq_T = unq_itr.next();

            int span = perfm.Span.get(unq_T);
            double period = perfm.Period.get(unq_T);

            double span_by_period = span / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)
            double one_by_period = 1 / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second)

            sum_of_span_by_period += span_by_period;
            sum_of_one_by_period += one_by_period;
        }

        double i_dt = (sum_of_span_by_period) / (Global.servers * sum_of_one_by_period);
        perfm.I_Dt.put((_i - _W), i_dt);

        if (Double.isNaN(i_dt))
            currentIDt = 0;
        else
            currentIDt = i_dt;

        // Reset repartitioning cooling off period
        if (WorkloadExecutor.repartitioningCoolingOff
                && Sim.time() >= WorkloadExecutor.RepartitioningCoolingOffPeriod) {

            WorkloadExecutor.repartitioningCoolingOff = false;

            Global.LOGGER.info("-----------------------------------------------------------------------------");
            Global.LOGGER.info("Simulation time: " + Sim.time() / (double) Global.observationWindow + " hrs");
            Global.LOGGER.info("Repartitioning cooling off period ends.");
            Global.LOGGER
                    .info("System will now check whether another repartitioning is required at this moment.");
            Global.LOGGER.info("Current IDt: " + currentIDt);
            Global.LOGGER.info("User defined IDt threshold: " + Global.userDefinedIDtThreshold);

            if (currentIDt < Global.userDefinedIDtThreshold) {
                Global.LOGGER.info("Repartitioning is not required at this moment.");

                //This is to disable on-demand atomic repartitioning for A-ARHC only
                if (Global.adaptive) {
                    Global.LOGGER.info("Disabling on-demand atomic repartitioning for A-ARHC ...");
                    WorkloadExecutor.isAdaptive = false;
                }

                Global.LOGGER.info("Continuing transaction processing ...");
            }
        }

        perfm.time.put((_i - _W), Sim.time());
    }

    // Add a hyperedge to workload hypergraph
    wb.addHGraphEdge(cluster, tr);

    // Collect transactional streams if data stream mining is enabled
    if (Global.streamCollection)
        Global.dsm.collectStream(cluster, tr);

    return tr;
}

From source file:edu.indiana.soic.ts.mapreduce.VectorCalculator.java

public void submitJob() {
    try {/*from   www .ja v  a2s .  c o  m*/
        Configuration config = HBaseConfiguration.create();
        config.set("mapreduce.output.textoutputformat.separator", ",");
        TreeMap<String, List<Date>> genDates = TableUtils.genDates(TableUtils.getDate(startDate),
                TableUtils.getDate(endDate), this.window, TimeUnit.DAYS, this.headShift, this.tailShift,
                TimeUnit.DAYS);
        LOG.info("Start Date : {} End Date : {}, Gen dates size: {}", startDate, endDate, genDates.size());
        for (String id : genDates.keySet()) {
            LOG.info("Vector calculation for: {}", id);
            Scan scan = new Scan();
            scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs
            scan.setCacheBlocks(false); // don't set to true for MR jobs
            List<Date> dates = genDates.get(id);
            String start = TableUtils.convertDateToString(dates.get(0));
            String end = TableUtils.convertDateToString(dates.get(1));
            List<String> suitableDateList = TableUtils.getDates(start, end);
            config.set(Constants.Job.NO_OF_DAYS, String.valueOf(suitableDateList.size()));
            LOG.info("Vector calculator for start: {}, end: {} time window: {}, shift: {}, days: {}", startDate,
                    endDate, window, headShift, suitableDateList.size());
            for (String date : suitableDateList) {
                scan.addColumn(Constants.STOCK_TABLE_CF_BYTES, date.getBytes());
            }
            Job job = new Job(config, "Vector calculation: " + id);
            job.setJarByClass(VectorCalculator.class);
            TableMapReduceUtil.initTableMapperJob(Constants.STOCK_TABLE_NAME, // input HBase table name
                    scan, // Scan instance to control CF and attribute selection
                    VectorCalculatorMapper.class, // mapper
                    IntWritable.class, // mapper output key
                    Text.class, // mapper output value
                    job);
            // adjust directories as required
            String outPutDir = tsConfiguration.getInterMediateVectorDir() + "/" + id;
            FileOutputFormat.setOutputPath(job, new Path(outPutDir));
            boolean b = job.waitForCompletion(true);
            if (!b) {
                LOG.error("Error with job for vector calculation");
                throw new RuntimeException("Error with job for vector calculation");
            }
            Utils.concatOutput(config, id, outPutDir, tsConfiguration.getVectorDir());
        }
    } catch (ParseException e) {
        LOG.error("Error while parsing date", e);
        throw new RuntimeException("Error while parsing date", e);
    } catch (InterruptedException | ClassNotFoundException | IOException e) {
        LOG.error("Error while creating the job", e);
        throw new RuntimeException("Error while creating the job", e);
    }
}

From source file:se.sics.kompics.p2p.overlay.cyclon.GraphUtil.java

public GraphUtil(TreeMap<OverlayAddress, CyclonNeighbors> alivePeers) {
    super();/*from  www . j  a  v a 2s  .c o m*/
    n = alivePeers.size();
    m = new byte[n][n];
    dist = new int[n][n];
    inDegree = new double[n];
    outDegree = new int[n];
    clustering = new double[n];
    a = new CyclonAddress[n];
    map = new HashMap<CyclonAddress, Integer>();
    neighbors = new int[n][];
    inStats = new SummaryStatistics();
    outStats = new SummaryStatistics();

    // map all alive nodes to a contiguous sequence of integers
    {
        int p = 0;
        for (OverlayAddress address : alivePeers.keySet()) {
            CyclonAddress src = (CyclonAddress) address;
            a[p] = src;
            map.put(src, p);
            p++;
        }
    }

    // build adjacency matrix
    int d = -1;
    {
        try {
            for (int s = 0; s < a.length; s++) {
                CyclonAddress src = a[s];
                CyclonNeighbors neigh = alivePeers.get(src);
                int nn = 0;
                for (CyclonNodeDescriptor desc : neigh.getDescriptors()) {
                    CyclonAddress dst = desc.getCyclonAddress();
                    if (!map.containsKey(dst)) {
                        continue;
                    }
                    d = map.get(dst);
                    m[s][d] = 1;
                    inDegree[d]++;
                    outDegree[s]++;
                    nn++;
                }
                neighbors[s] = new int[nn];
                nn = 0;
                for (CyclonNodeDescriptor desc : neigh.getDescriptors()) {
                    CyclonAddress dst = desc.getCyclonAddress();
                    if (map.containsKey(dst)) {
                        neighbors[s][nn++] = map.get(dst);
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    }
    // build distance matrix, clustering coefficient, average path length
    // diameter and average degrees
    {
        for (int i = 0; i < n; i++) {
            bfs(i, dist[i]);

            // we compute the clustering coefficient here
            int neigh[] = neighbors[i];
            if (neigh.length <= 1) {
                clustering[i] = 1.0;
                continue;
            }
            int edges = 0;

            for (int j = 0; j < neigh.length; j++) {
                for (int k = j + 1; k < neigh.length; k++) {
                    if (m[neigh[j]][neigh[k]] > 0 || m[neigh[k]][neigh[j]] > 0) {
                        ++edges;
                    }
                }
            }
            clustering[i] = ((edges * 2.0) / neigh.length) / (neigh.length - 1);
        }
        int k = 0;
        for (int i = 0; i < n; i++) {
            for (int j = 0; j < n; j++) {
                if (i == j)
                    continue;
                if (dist[i][j] == n) {
                    infinitePathCount++;
                    continue;
                }
                if (dist[i][j] > diameter) {
                    diameter = dist[i][j];
                }
                avgPathLength = (avgPathLength * k + dist[i][j]) / (k + 1);
                k++;
            }
            inStats.addValue(inDegree[i]);
            outStats.addValue(outDegree[i]);
            // avgIn = (avgIn * i + inDegree[i]) / (i + 1);
            // minIn = minIn > inDegree[i] ? inDegree[i] : minIn;
            // maxIn = maxIn < inDegree[i] ? inDegree[i] : maxIn;
            // avgOut = (avgOut * i + outDegree[i]) / (i + 1);
            avgClustering = (avgClustering * i + clustering[i]) / (i + 1);
        }
    }
}

From source file:se.sics.gvod.common.GraphUtil.java

public GraphUtil(TreeMap<VodAddress, VodNeighbors> alivePeers) {
    super();//  w  w w .  j  a  va2  s .  c o m
    n = alivePeers.size();
    m = new byte[n][n];
    dist = new int[n][n];
    inDegree = new double[n];
    outDegree = new int[n];
    clustering = new double[n];
    a = new VodAddress[n];
    map = new HashMap<VodAddress, Integer>();
    neighbors = new int[n][];
    inStats = new SummaryStatistics();
    outStats = new SummaryStatistics();

    // map all alive nodes to a contiguous sequence of integers
    {
        int p = 0;
        for (VodAddress address : alivePeers.keySet()) {
            VodAddress src = (VodAddress) address;
            utilitySetNbChange += (alivePeers.get(src).getUtilitySetNbChange()
                    / alivePeers.get(src).getNbCycles());
            upperSetNbChange += (alivePeers.get(src).getUpperSetNbChange() / alivePeers.get(src).getNbCycles());
            nbCycles += alivePeers.get(src).getNbCycles();
            a[p] = src;
            map.put(src, p);
            p++;
        }
    }

    // build adjacency matrix
    int d = -1;
    {
        try {
            for (int s = 0; s < a.length; s++) {
                VodAddress src = a[s];
                VodNeighbors neigh = alivePeers.get(src);
                int nn = 0;
                for (VodDescriptor desc : neigh.getRandomSetDescriptors()) {
                    VodAddress dst = desc.getVodAddress();
                    if (!map.containsKey(dst)) {
                        continue;
                    }
                    d = map.get(dst);
                    m[s][d] = 1;
                    inDegree[d]++;
                    outDegree[s]++;
                    nn++;
                }
                neighbors[s] = new int[nn];
                nn = 0;
                for (VodDescriptor desc : neigh.getRandomSetDescriptors()) {
                    VodAddress dst = desc.getVodAddress();
                    if (map.containsKey(dst)) {
                        neighbors[s][nn++] = map.get(dst);
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
    }
    // build distance matrix, clustering coefficient, average path length
    // diameter and average degrees
    {
        for (int i = 0; i < n; i++) {
            bfs(i, dist[i]);

            // we compute the clustering coefficient here
            int neigh[] = neighbors[i];
            if (neigh.length <= 1) {
                clustering[i] = 1.0;
                continue;
            }
            int edges = 0;

            for (int j = 0; j < neigh.length; j++) {
                for (int k = j + 1; k < neigh.length; k++) {
                    if (m[neigh[j]][neigh[k]] > 0 || m[neigh[k]][neigh[j]] > 0) {
                        ++edges;
                    }
                }
            }
            clustering[i] = ((edges * 2.0) / neigh.length) / (neigh.length - 1);
        }
        int k = 0;
        for (int i = 0; i < n; i++) {
            for (int j = 0; j < n; j++) {
                if (i == j)
                    continue;
                if (dist[i][j] == n) {
                    infinitePathCount++;
                    continue;
                }
                if (dist[i][j] > diameter) {
                    diameter = dist[i][j];
                }
                avgPathLength = (avgPathLength * k + dist[i][j]) / (k + 1);
                k++;
            }
            inStats.addValue(inDegree[i]);
            outStats.addValue(outDegree[i]);
            // avgIn = (avgIn * i + inDegree[i]) / (i + 1);
            // minIn = minIn > inDegree[i] ? inDegree[i] : minIn;
            // maxIn = maxIn < inDegree[i] ? inDegree[i] : maxIn;
            // avgOut = (avgOut * i + outDegree[i]) / (i + 1);
            avgClustering = (avgClustering * i + clustering[i]) / (i + 1);
        }
    }
}

From source file:org.eclipse.gyrex.cloud.internal.queue.ZooKeeperQueue.java

/**
 * Returns the ordered list of messages.
 * <p>/* w w w  . jav  a2 s .  c  o  m*/
 * Note, this represents a snapshot of the queue at the time of invoking the
 * method.
 * </p>
 * 
 * @return ordered list of messages
 */
public List<Message> getMessages() {
    try {
        final TreeMap<Long, String> queueChildren = readQueueChildren(null);
        final List<Message> messages = new ArrayList<Message>(queueChildren.size());
        for (final String messageId : queueChildren.values()) {
            final Message message = readQueueMessage(messageId);
            if (null != message) {
                messages.add(message);
            }
        }
        return messages;
    } catch (final NoNodeException e) {
        // don't fail just return null
        return Collections.emptyList();
    } catch (final Exception e) {
        throw new QueueOperationFailedException(id, "READ_MESSAGES", e);
    }
}

From source file:com.jsonstore.database.DatabaseSchema.java

public boolean equals(TreeMap<String, String> schema_compare) {
    if (schema_compare.size() != (this.nodes.size() + this.internalNodes.size())) {
        return false;
    }//from ww w.  ja  v a2 s  . c o  m

    for (String key : schema_compare.keySet()) {
        String safeKey = JSONStoreUtil.getDatabaseSafeSearchFieldName(key);
        SearchFieldType type = null;

        if (this.safeNodes.containsKey(safeKey)) {
            type = this.nodes.get(safeKey);
            if (null == type) {
                type = this.safeNodes.get(safeKey);
            }
        } else {
            type = this.internalNodes.get(safeKey);
        }

        if ((type == null) || !type.getMappedType().equals(schema_compare.get(key))) {
            return false;
        }
    }

    return true;
}

From source file:com.intellij.ide.passwordSafe.impl.providers.masterKey.PasswordDatabase.java

/**
 * {@inheritDoc}/*from   w w  w.  j a v a2  s .com*/
 */
public State getState() {
    TreeMap<ByteArrayWrapper, byte[]> sorted;
    String pi;
    synchronized (myDatabase) {
        pi = toHex(myMasterPasswordInfo);
        sorted = new TreeMap<ByteArrayWrapper, byte[]>(myDatabase);
    }
    String[][] db = new String[2][sorted.size()];
    int i = 0;
    for (Map.Entry<ByteArrayWrapper, byte[]> e : sorted.entrySet()) {
        db[0][i] = toHex(e.getKey().unwrap());
        db[1][i] = toHex(e.getValue());
        i++;
    }
    State s = new State();
    s.PASSWORDS = db;
    s.MASTER_PASSWORD_INFO = pi;
    return s;
}