Example usage for java.util HashMap clone

List of usage examples for java.util HashMap clone

Introduction

In this page you can find the example usage for java.util HashMap clone.

Prototype

@SuppressWarnings("unchecked")
@Override
public Object clone() 

Source Link

Document

Returns a shallow copy of this HashMap instance: the keys and values themselves are not cloned.

Usage

From source file:org.duracloud.retrieval.config.RetrievalToolConfigParserTest.java

private void removeArgFailTest(RetrievalToolConfigParser retConfigParser, HashMap<String, String> argsMap,
        String arg, String failMsg) {
    HashMap<String, String> cloneMap = (HashMap<String, String>) argsMap.clone();
    cloneMap.remove(arg);/*from w  ww  .j a v a2  s.  c  om*/
    try {
        retConfigParser.processOptions(mapToArray(cloneMap));
        fail(failMsg);
    } catch (ParseException e) {
        assertNotNull(e);
    }
}

From source file:org.duracloud.retrieval.config.RetrievalToolConfigParserTest.java

private void addArgFailTest(RetrievalToolConfigParser retConfigParser, HashMap<String, String> argsMap,
        String arg, String value, String failMsg) {
    HashMap<String, String> cloneMap = (HashMap<String, String>) argsMap.clone();
    cloneMap.put(arg, value);//  w w  w.j a v a  2s .  com
    try {
        retConfigParser.processOptions(mapToArray(cloneMap));
        fail(failMsg);
    } catch (ParseException e) {
        assertNotNull(e);
    }
}

From source file:com.hangum.tadpole.rdb.core.editors.main.composite.direct.SQLResultEditingSupport.java

@Override
protected void setValue(Object element, Object value) {
    HashMap<Integer, String> data = (HashMap<Integer, String>) element;
    HashMap<Integer, String> oldDataMap = (HashMap<Integer, String>) data.clone();

    String oldData = data.get(intColumnIndex) == null ? "" : data.get(intColumnIndex);
    if (oldData.equals(value.toString()))
        return;/*from ww w.  ja va  2  s.c o m*/

    //  ?  .
    String colType = RDBTypeToJavaTypeUtils.getRDBType(rsDAO.getColumnType().get(intColumnIndex));
    if (!DataTypeValidate.isValid(rsDAO.getUserDB(), colType, value.toString())) {
        MessageDialog.openError(getViewer().getControl().getShell(), Messages.get().Confirm,
                Messages.get().TextViewerEditingSupport_2 + " is " + colType + ".");
        return;
    }

    String strColumnName = rsDAO.getColumnName().get(intColumnIndex);
    String strColumnValue = "";
    if (RDBTypeToJavaTypeUtils.isNumberType(rsDAO.getColumnType().get(intColumnIndex)))
        strColumnValue = value.toString();
    else
        strColumnValue = "'" + StringEscapeUtils.escapeSql(value.toString()) + "'";

    String tableName = rsDAO.getColumnTableName().get(intColumnIndex);

    final String strUpdateStatement = String.format("UPDATE %s SET %s=%s WHERE %s", tableName, strColumnName,
            strColumnValue, makeWhereStaement(oldDataMap));
    String strFormatStatement = "";
    try {
        strFormatStatement = SQLFormater.format(strUpdateStatement);
    } catch (Exception e) {
        strFormatStatement = strUpdateStatement;
    }
    if (logger.isDebugEnabled())
        logger.debug("Update SQL Statement is " + strFormatStatement);

    SQLUpdateDialog dialog = new SQLUpdateDialog(getViewer().getControl().getShell(), rsDAO.getUserDB(),
            strFormatStatement);
    if (Dialog.OK == dialog.open()) {
        // ? ?? 
        data.put(intColumnIndex, value.toString());
        tvSQLResult.refresh();
        tvSQLResult.getTable().setFocus();
    }
}

From source file:org.duracloud.sync.config.SyncToolConfigParserTest.java

private void removeArgFailTest(HashMap<String, String> argsMap, String arg, String failMsg) {
    HashMap<String, String> cloneMap = (HashMap<String, String>) argsMap.clone();
    cloneMap.remove(arg);/*from  w w w.  ja va2  s.c  om*/
    try {
        syncConfigParser.processStandardOptions(mapToArray(cloneMap));
        fail(failMsg);
    } catch (ParseException e) {
        assertNotNull(e);
    }
}

From source file:org.duracloud.sync.config.SyncToolConfigParserTest.java

private void addArgFailTest(HashMap<String, String> argsMap, String arg, String value, String failMsg) {
    HashMap<String, String> cloneMap = (HashMap<String, String>) argsMap.clone();
    cloneMap.put(arg, value);/*w w  w . j av a2 s .c  o m*/
    try {
        syncConfigParser.processStandardOptions(mapToArray(cloneMap));
        fail(failMsg);
    } catch (ParseException e) {
        assertNotNull(e);
    }
}

From source file:com.wormsim.data.SimulationConditions.java

public SimulationConditions(RealDistribution p_food_dist, RealDistribution[] p_pheromone_dists,
        HashMap<String, IntegerDistribution> p_group_dists) {
    this.food_dist = p_food_dist;
    this.pheromone_dists = Collections.unmodifiableList(Arrays.asList(p_pheromone_dists));
    // TODO: Replicate the group to protect the map using clone
    @SuppressWarnings("unchecked")
    Map<String, IntegerDistribution> clone = (Map<String, IntegerDistribution>) p_group_dists.clone();
    this.group_dists = Collections.unmodifiableMap(clone);
}

From source file:org.apache.flink.streaming.connectors.kafka.KafkaConsumerTest.java

@Test
public void testSnapshot() {
    try {/*  w w w .j  a  v a2s .  c o  m*/
        Field offsetsField = FlinkKafkaConsumerBase.class.getDeclaredField("offsetsState");
        Field runningField = FlinkKafkaConsumerBase.class.getDeclaredField("running");
        Field mapField = FlinkKafkaConsumerBase.class.getDeclaredField("pendingCheckpoints");

        offsetsField.setAccessible(true);
        runningField.setAccessible(true);
        mapField.setAccessible(true);

        FlinkKafkaConsumer08<?> consumer = mock(FlinkKafkaConsumer08.class);
        when(consumer.snapshotState(anyLong(), anyLong())).thenCallRealMethod();

        HashMap<KafkaTopicPartition, Long> testOffsets = new HashMap<>();
        long[] offsets = new long[] { 43, 6146, 133, 16, 162, 616 };
        int j = 0;
        for (long i : offsets) {
            KafkaTopicPartition ktp = new KafkaTopicPartition("topic", j++);
            testOffsets.put(ktp, i);
        }

        LinkedMap map = new LinkedMap();

        offsetsField.set(consumer, testOffsets);
        runningField.set(consumer, true);
        mapField.set(consumer, map);

        assertTrue(map.isEmpty());

        // make multiple checkpoints
        for (long checkpointId = 10L; checkpointId <= 2000L; checkpointId += 9L) {
            HashMap<KafkaTopicPartition, Long> checkpoint = consumer.snapshotState(checkpointId,
                    47 * checkpointId);
            assertEquals(testOffsets, checkpoint);

            // change the offsets, make sure the snapshot did not change
            HashMap<KafkaTopicPartition, Long> checkpointCopy = (HashMap<KafkaTopicPartition, Long>) checkpoint
                    .clone();

            for (Map.Entry<KafkaTopicPartition, Long> e : testOffsets.entrySet()) {
                testOffsets.put(e.getKey(), e.getValue() + 1);
            }

            assertEquals(checkpointCopy, checkpoint);

            assertTrue(map.size() > 0);
            assertTrue(map.size() <= FlinkKafkaConsumer08.MAX_NUM_PENDING_CHECKPOINTS);
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}

From source file:org.phylowidget.tree.PhyloNode.java

public HashMap<String, String> getFullAnnotations() {
    HashMap<String, String> annot = getAnnotations();
    if (annot == null)
        annot = new HashMap<String, String>();
    HashMap<String, String> clone = (HashMap<String, String>) annot.clone();
    clone.put("Label", getLabel());
    clone.put("Branch Length", "" + getTree().getBranchLength(this));
    return clone;
}

From source file:org.apache.axis2.deployment.DeploymentEngine.java

public static void addNewModule(AxisModule modulemetadata, AxisConfiguration axisConfiguration)
        throws AxisFault {

    Flow inflow = modulemetadata.getInFlow();
    ClassLoader moduleClassLoader = modulemetadata.getModuleClassLoader();

    if (inflow != null) {
        Utils.addFlowHandlers(inflow, moduleClassLoader);
    }//  w w w.j a  va2  s .co m

    Flow outFlow = modulemetadata.getOutFlow();

    if (outFlow != null) {
        Utils.addFlowHandlers(outFlow, moduleClassLoader);
    }

    Flow faultInFlow = modulemetadata.getFaultInFlow();

    if (faultInFlow != null) {
        Utils.addFlowHandlers(faultInFlow, moduleClassLoader);
    }

    Flow faultOutFlow = modulemetadata.getFaultOutFlow();

    if (faultOutFlow != null) {
        Utils.addFlowHandlers(faultOutFlow, moduleClassLoader);
    }

    axisConfiguration.addModule(modulemetadata);
    log.debug(Messages.getMessage(DeploymentErrorMsgs.ADDING_NEW_MODULE));

    synchronized (axisConfiguration.getFaultyServicesDuetoModules()) {

        //Check whether there are faulty services due to this module
        HashMap<String, FaultyServiceData> faultyServices = (HashMap<String, FaultyServiceData>) axisConfiguration
                .getFaultyServicesDuetoModule(modulemetadata.getName());
        faultyServices = (HashMap<String, FaultyServiceData>) faultyServices.clone();

        // Here iterating a cloned hashmap and modifying the original hashmap.
        // To avoid the ConcurrentModificationException.
        for (FaultyServiceData faultyServiceData : faultyServices.values()) {

            axisConfiguration.removeFaultyServiceDuetoModule(modulemetadata.getName(),
                    faultyServiceData.getServiceGroup().getServiceGroupName());

            //Recover the faulty serviceGroup.
            addServiceGroup(faultyServiceData.getServiceGroup(), faultyServiceData.getServiceList(),
                    faultyServiceData.getServiceLocation(), faultyServiceData.getCurrentDeploymentFile(),
                    axisConfiguration);
        }
    }
}

From source file:org.duracloud.sync.config.SyncToolConfigParserTest.java

@Test
public void testPrevBackupFile() throws Exception {
    HashMap<String, String> argsMap = getArgsMap();
    String[] args = mapToArray(argsMap);

    // First backup
    syncConfigParser.backupConfig(tempDir, args);
    File backupFile = getBackupFile();
    String[] retrieveArgs = syncConfigParser.retrieveConfig(backupFile);
    compareArrays(args, retrieveArgs);// w  w w  .  ja v a  2  s  . c  om

    HashMap<String, String> newArgsMap = (HashMap<String, String>) argsMap.clone();
    newArgsMap.put("-z", "new");
    String[] newArgs = mapToArray(newArgsMap);

    // Second backup
    syncConfigParser.backupConfig(tempDir, newArgs);

    // Check config file (should be new args)
    backupFile = getBackupFile();
    retrieveArgs = syncConfigParser.retrieveConfig(backupFile);
    compareArrays(newArgs, retrieveArgs);

    // Check previous config backup (should be old args)
    backupFile = getPrevBackupFile();
    retrieveArgs = syncConfigParser.retrieveConfig(backupFile);
    compareArrays(args, retrieveArgs);
}