Example usage for java.util IdentityHashMap IdentityHashMap

List of usage examples for java.util IdentityHashMap IdentityHashMap

Introduction

In this page you can find the example usage for java.util IdentityHashMap IdentityHashMap.

Prototype

public IdentityHashMap(Map<? extends K, ? extends V> m) 

Source Link

Document

Constructs a new identity hash map containing the keys-value mappings in the specified map.

Usage

From source file:IdentitySet.java

/**
 * Create an IdentitySet with the given sizing.
 *
 * @param sizing The sizing of the set to create.
 *//* w  w w .  jav  a2  s  . co m*/
public IdentitySet(int sizing) {
    this.map = new IdentityHashMap(sizing);
}

From source file:gridool.deployment.GridDeploymentJob.java

public Map<GridTask, GridNode> map(GridRouter router, Triple<String, byte[], Long> args) throws GridException {
    final String clsname = args.getFirst();
    final byte[] b = args.getSecond();
    final long timestamp = args.getThird();

    final GridNode[] nodes = router.getAllNodes();
    final Map<GridTask, GridNode> task2node = new IdentityHashMap<GridTask, GridNode>(nodes.length);
    for (GridNode node : nodes) {
        GridTask task = new GridDeployClassTask(this, clsname, b, timestamp);
        task2node.put(task, node);/*  www. jav  a2s.  com*/
    }
    return task2node;
}

From source file:gridool.db.partitioning.phihash.DBPartitioningJob.java

public Map<GridTask, GridNode> map(GridRouter router, DBPartitioningJobConf jobConf) throws GridException {
    this.started = System.currentTimeMillis();
    Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(1);
    GridTask dbtask = jobConf.makePartitioningTask(this);
    GridNode localNode = getJobNode();// w  w  w  .j  a  v  a2 s  . co m
    map.put(dbtask, localNode);
    return map;
}

From source file:dbcount.DbCountInitializeJob.java

@SuppressWarnings("serial")
public Map<GridTask, GridNode> map(final GridRouter router, final DBMapReduceJobConf jobConf)
        throws GridException {
    final GridNode[] nodes = router.getAllNodes();
    final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(nodes.length);
    for (GridNode node : nodes) {
        GridTask task = new GridTaskAdapter(this, false) {
            protected Integer execute() throws GridException {
                final int pageview;
                try {
                    pageview = initialize(jobConf);
                } catch (Exception e) {
                    LOG.error(e.getMessage(), e);
                    throw new GridException(e);
                }//from ww w.  j a va  2 s  .  c o m
                return pageview;
            }
        };
        map.put(task, node);
    }
    return map;
}

From source file:gridool.mapred.db.DBReduceJob.java

public Map<GridTask, GridNode> map(GridRouter router, DBMapReduceJobConf jobConf) throws GridException {
    final String inputTableName = jobConf.getMapOutputTableName();
    String destTableName = jobConf.getReduceOutputTableName();
    if (destTableName == null) {
        destTableName = generateOutputTableName(inputTableName, System.nanoTime());
        jobConf.setReduceOutputTableName(destTableName);
    }/*from w  ww.ja  v  a2  s.  c  o  m*/
    this.destTableName = destTableName;

    final GridNode[] nodes = router.getAllNodes();
    final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(nodes.length);
    final String createTableTemplate = jobConf.getQueryTemplateForCreatingViewComposite();
    if (createTableTemplate != null) {
        final String dstDbUrl = jobConf.getReduceOutputDbUrl();
        if (dstDbUrl == null) {
            throw new GridException(
                    "ReduceOutputDestinationDbUrl should be specified when using a view in reduce phase");
        }
        final String outputTblName = jobConf.getReduceOutputTableName();
        final StringBuilder createTablesQuery = new StringBuilder(512);
        final StringBuilder createViewQuery = new StringBuilder(512);
        createViewQuery.append("CREATE VIEW ").append(outputTblName).append(" AS");
        final int numNodes = nodes.length;
        for (int i = 0; i < numNodes; i++) {
            if (i != 0) {
                createViewQuery.append(" UNION ALL");
            }
            GridTask task = jobConf.makeReduceTask(this, inputTableName, destTableName);
            task.setTaskNumber(i + 1);
            map.put(task, nodes[i]);
            String newTableName = GridUtils.generateTableName(outputTblName, task);
            String createTableQuery = createTableTemplate.replace("?", newTableName);
            createTablesQuery.append(createTableQuery).append("; ");
            createViewQuery.append(" SELECT * FROM ").append(newTableName);
        }
        createViewQuery.append(';');
        try {
            createView(dstDbUrl, createTablesQuery.toString(), createViewQuery.toString(), jobConf);
        } catch (SQLException e) {
            LOG.error(e.getMessage(), e);
            throw new GridException(e);
        }
    } else {
        for (GridNode node : nodes) {
            GridTask task = jobConf.makeReduceTask(this, inputTableName, destTableName);
            map.put(task, node);
        }
    }
    return map;
}

From source file:gridool.replication.jobs.ReplicateTaskJob.java

public Map<GridTask, GridNode> map(GridRouter router, JobConf jobConf) throws GridException {
    final GridTask taskToReplicate = jobConf.getTask();
    final List<GridNode> destNodes = jobConf.getDestNodes();
    final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(destNodes.size());
    for (GridNode node : destNodes) {
        GridTask task = new ReplicatedGridTaskAdapter(this, taskToReplicate);
        map.put(task, node);/*from  w w  w .  ja va  2  s .  c  o m*/
    }
    this.replicatedTask = taskToReplicate;
    this.replicaList = destNodes;

    if (LOG.isInfoEnabled()) {
        LOG.info("Start a replication [" + getJobId() + "] of a task "
                + ClassUtils.getSimpleClassName(replicatedTask) + '[' + replicatedTask.getTaskId()
                + "] to slave nodes: " + replicaList);
    }
    return map;
}

From source file:gridool.db.catalog.UpdatePartitionInCatalogJob.java

public Map<GridTask, GridNode> map(GridTaskRouter router, UpdatePartitionInCatalogJobConf jobConf)
        throws GridException {
    final GridNode localNode = config.getLocalNode();
    final GridNode[] nodes = router.getAllNodes();
    final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(nodes.length);
    for (final GridNode node : nodes) {
        if (!node.equals(localNode)) {
            GridTask task = new UpdatePartitionInCatalogTask(this, jobConf);
            map.put(task, node);/*from  www  .ja v  a  2s  . com*/
        }
    }
    return map;
}

From source file:gridool.db.partitioning.phihash.monetdb.MonetDBGraceMultiCSVsLoadJob.java

@Override
public Map<GridTask, GridNode> map(GridRouter router, DBPartitioningJobConf jobConf) throws GridException {
    final GridNode[] allNodes = router.getAllNodes();
    final int numNodes = allNodes.length;
    final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(numNodes);
    for (GridNode node : allNodes) {
        GridTask task = new MonetDBGraceCsvLoadTask(this, jobConf, false);
        map.put(task, node);/*w  w w . j  a  va  2 s  .c o m*/
    }
    this.asgginedMap = new HashMap<GridNode, MutableLong>(numNodes);
    this.jobConf = jobConf;
    return map;
}

From source file:gridool.replication.jobs.CoordinateReplicaJob.java

public Map<GridTask, GridNode> map(GridRouter router, CoordinateReplicaJobConf jobConf) throws GridException {
    final ReplicationManager replMgr = registry.getReplicationManager();
    final int numReplicas = jobConf.getNumReplicas();

    final StringBuilder buf = new StringBuilder(256);
    buf.append("Configure replicas as follows ..\n");
    final GridNode[] nodes = router.getAllNodes();
    for (GridNode node : nodes) {
        if (replMgr.coordinateReplica(node, numReplicas, jobConf)) {
            buf.append(node).append(": ").append(node.getReplicas()).append('\n');
        } else {/*  w w w  .java  2s  .  co  m*/
            buf.append(node).append(": ").append("N/A\n");
            LOG.error("Cannot prepare " + numReplicas + " replicas for node '" + node + "': "
                    + node.getReplicas());
        }
    }
    if (LOG.isInfoEnabled()) {
        LOG.info(buf);
    }

    final Map<GridTask, GridNode> mapping = new IdentityHashMap<GridTask, GridNode>(nodes.length);
    final byte[] nodesBytes = ObjectUtils.toBytes(nodes);
    for (GridNode node : nodes) {
        GridTask task = new CoordinateReplicaTask(this, nodesBytes, jobConf);
        mapping.put(task, node);
    }

    this.failedNodes = new ArrayList<GridNode>(4);
    return mapping;
}

From source file:it.cnr.istc.iloc.gui.StateVariableVisualizer.java

@Override
public Collection<XYPlot> getPlots(Type type) {
    Collection<IItem> instances = type.getInstances();
    Collection<XYPlot> plots = new ArrayList<>(instances.size());
    Map<IItem, Collection<Atom>> sv_atoms = new IdentityHashMap<>(instances.size());
    for (IItem i : type.getInstances()) {
        sv_atoms.put(i, new ArrayList<>());
    }//  w w  w  .  j a  va 2s . co  m
    for (Atom atom : ((StateVariable) type).getDefinedPredicates().stream()
            .flatMap(p -> p.getInstances().stream()).map(a -> (Atom) a)
            .filter(a -> a.state.evaluate().isSingleton() && a.state.evaluate().contains(AtomState.Active))
            .collect(Collectors.toList())) {
        for (IItem i : ((IEnumItem) atom.get(SCOPE)).getEnumVar().evaluate().getAllowedValues()) {
            if (sv_atoms.containsKey(i)) {
                sv_atoms.get(i).add(atom);
            }
        }
    }

    for (IItem sv : instances) {
        Collection<Atom> atoms = sv_atoms.get(sv);
        // For each pulse the atoms starting at that pulse
        Map<Double, Collection<Atom>> starting_atoms = new HashMap<>(atoms.size());
        // For each pulse the atoms ending at that pulse
        Map<Double, Collection<Atom>> ending_atoms = new HashMap<>(atoms.size());

        // The pulses of the timeline
        Set<Double> c_pulses = new HashSet<>(atoms.size() * 2);

        for (Atom atom : atoms) {
            double start = sv.getCore().evaluate(((IArithItem) atom.get("start")).getArithVar());
            double end = sv.getCore().evaluate(((IArithItem) atom.get("end")).getArithVar());

            if (!starting_atoms.containsKey(start)) {
                starting_atoms.put(start, new ArrayList<>());
            }
            starting_atoms.get(start).add(atom);

            if (!ending_atoms.containsKey(end)) {
                ending_atoms.put(end, new ArrayList<>());
            }
            ending_atoms.get(end).add(atom);

            c_pulses.add(start);
            c_pulses.add(end);
        }

        // we sort current pulses..
        Double[] c_pulses_array = c_pulses.toArray(new Double[c_pulses.size()]);
        Arrays.sort(c_pulses_array);

        XYIntervalSeriesCollection collection = new XYIntervalSeriesCollection();

        ValueXYIntervalSeries undefined = new ValueXYIntervalSeries("Undefined");
        ValueXYIntervalSeries sv_values = new ValueXYIntervalSeries("Values");
        ValueXYIntervalSeries conflicts = new ValueXYIntervalSeries("Conflicts");

        List<Atom> overlapping_atoms = new ArrayList<>();
        for (int i = 0; i < c_pulses_array.length - 1; i++) {
            if (starting_atoms.containsKey(c_pulses_array[i])) {
                overlapping_atoms.addAll(starting_atoms.get(c_pulses_array[i]));
            }
            if (ending_atoms.containsKey(c_pulses_array[i])) {
                overlapping_atoms.removeAll(ending_atoms.get(c_pulses_array[i]));
            }
            switch (overlapping_atoms.size()) {
            case 0:
                undefined.add(c_pulses_array[i], c_pulses_array[i], c_pulses_array[i + 1], 0, 0, 1,
                        new Atom[0]);
                break;
            case 1:
                sv_values.add(c_pulses_array[i], c_pulses_array[i], c_pulses_array[i + 1], 0, 0, 1,
                        overlapping_atoms.toArray(new Atom[overlapping_atoms.size()]));
                break;
            default:
                conflicts.add(c_pulses_array[i], c_pulses_array[i], c_pulses_array[i + 1], 0, 0, 1,
                        overlapping_atoms.toArray(new Atom[overlapping_atoms.size()]));
                break;
            }
        }

        collection.addSeries(undefined);
        collection.addSeries(sv_values);
        collection.addSeries(conflicts);

        XYBarRenderer renderer = new XYBarRenderer();
        renderer.setSeriesPaint(0, Color.lightGray);
        renderer.setSeriesPaint(1, new Color(100, 250, 100));
        renderer.setSeriesPaint(2, Color.pink);
        renderer.setBarPainter(new ReverseGradientXYBarPainter());
        renderer.setDrawBarOutline(true);
        renderer.setShadowXOffset(2);
        renderer.setShadowYOffset(2);
        renderer.setUseYInterval(true);

        renderer.setBaseItemLabelsVisible(true);
        renderer.setBaseItemLabelPaint(Color.black);
        Font font = new Font("SansSerif", Font.PLAIN, 9);
        renderer.setBaseItemLabelFont(font);
        XYItemLabelGenerator generator = (XYDataset dataset, int series,
                int item) -> toString(((ValueXYIntervalDataItem) ((XYIntervalSeriesCollection) dataset)
                        .getSeries(series).getDataItem(item)).atoms);
        ItemLabelPosition itLabPos = new ItemLabelPosition(ItemLabelAnchor.CENTER, TextAnchor.CENTER);
        renderer.setBasePositiveItemLabelPosition(itLabPos);
        for (int i = 0; i < collection.getSeriesCount(); i++) {
            renderer.setSeriesItemLabelGenerator(i, generator);
            renderer.setSeriesItemLabelsVisible(i, true);
            renderer.setSeriesItemLabelPaint(i, Color.black);
            renderer.setSeriesItemLabelFont(i, font);
            renderer.setSeriesPositiveItemLabelPosition(i, itLabPos);
            renderer.setSeriesToolTipGenerator(i,
                    (XYDataset dataset, int series, int item) -> toString(
                            ((ValueXYIntervalDataItem) ((XYIntervalSeriesCollection) dataset).getSeries(series)
                                    .getDataItem(item)).atoms));
        }

        XYPlot plot = new XYPlot(collection, null, new NumberAxis(""), renderer);
        plot.getRangeAxis().setVisible(false);
        plot.setDatasetRenderingOrder(DatasetRenderingOrder.FORWARD);

        plots.add(plot);
    }

    return plots;
}