Example usage for java.util HashMap remove

List of usage examples for java.util HashMap remove

Introduction

In this page you can find the example usage for java.util HashMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for the specified key from this map if present.

Usage

From source file:de.dfki.km.perspecting.obie.model.Document.java

/**
 * Returns all RDF subjects with matching literal property values in text.
 *///from w  w w  . jav a2 s . co m
public List<TokenSequence<SemanticEntity>> getResolvedSubjects() {

    // collection that will be returned as result
    List<TokenSequence<SemanticEntity>> entities = new ArrayList<TokenSequence<SemanticEntity>>();

    HashMap<Integer, TokenSequence<SemanticEntity>> map = new HashMap<Integer, TokenSequence<SemanticEntity>>();

    for (int tokenIndex : data.getIntegerKeys(TokenSequence.SUBJECT)) {
        List<SemanticEntity> values = data.get(TokenSequence.SUBJECT, tokenIndex);
        assert values != null; // when does this occur?
        for (SemanticEntity value : values) {

            int subject = value.getSubjectIndex();
            if (value.getPosition().equals("B")) {
                TokenSequence<SemanticEntity> entity = map.get(subject);
                if (entity != null) {
                    entities.add(map.remove(subject));
                }
                entity = new TokenSequence<SemanticEntity>(value);
                entity.addToken(new Token(tokenIndex, this));
                map.put(subject, entity);
            } else {
                map.get(subject).addToken(new Token(tokenIndex, this));
            }
        }

    }
    entities.addAll(map.values());

    return entities;
    //      
    //      
    //      
    //      
    //      
    // List<TokenSequence<SemanticEntity>> entities = new
    // ArrayList<TokenSequence<SemanticEntity>>();
    // TokenSequence<SemanticEntity> entity = null;
    //
    // for (int tokenIndex :
    // this.data.getIntegerKeys(TokenSequence.SUBJECT)) {
    // List<SemanticEntity> value = this.data.get(TokenSequence.SUBJECT,
    // tokenIndex);
    // for (SemanticEntity e : value) {
    // if (e.getPosition().equals("B")) { // equal for all entries.
    // if (entity != null) {
    // entities.add(entity);
    // }
    // entity = new TokenSequence<SemanticEntity>(e);
    // entity.addToken(new Token(tokenIndex, this));
    // } else {
    // assert entity != null;
    // entity.addToken(new Token(tokenIndex, this));
    // }
    // }
    // }
    // if (entity != null) {
    // entities.add(entity);
    // entity = null;Set
    // }
    //
    // return entities;
}

From source file:org.openbaton.marketplace.core.VNFPackageManagement.java

public ByteArrayOutputStream compose(String id) throws IOException, ArchiveException {

    VNFPackageMetadata vnfPackageMetadata = vnfPackageMetadataRepository.findFirstById(id);
    String vnfPackageName = vnfPackageMetadata.getName();
    VirtualNetworkFunctionDescriptor vnfd = vnfPackageMetadata.getVnfd();
    VNFPackage vnfPackage = vnfPackageMetadata.getVnfPackage();
    ImageMetadata imageMetadata = vnfPackageMetadata.getImageMetadata();
    NFVImage nfvImage = vnfPackageMetadata.getNfvImage();
    String vnfdJson = mapper.toJson(vnfd);

    HashMap<String, Object> imageConfigJson = new ObjectMapper().readValue(mapper.toJson(nfvImage),
            HashMap.class);
    imageConfigJson.put("minDisk", imageConfigJson.get("minDiskSpace"));
    Object minCPU = imageConfigJson.get("minCPU");
    if (minCPU != null) {
        imageConfigJson.put("minCPU", Integer.parseInt((String) minCPU));
    } else {/*  w w w.j  a va 2 s  .c om*/
        imageConfigJson.put("minCPU", 0);
    }
    imageConfigJson.remove("minDiskSpace");
    imageConfigJson.remove("id");
    imageConfigJson.remove("hb_version");

    HashMap<String, String> imageMetadataJson = new ObjectMapper().readValue(mapper.toJson(imageMetadata),
            HashMap.class);
    imageMetadataJson.put("link", imageMetadata.getLink());
    imageMetadataJson.remove("id");
    imageMetadataJson.remove("hb_version");

    ByteArrayOutputStream tar_output = new ByteArrayOutputStream();
    ArchiveOutputStream my_tar_ball = new ArchiveStreamFactory()
            .createArchiveOutputStream(ArchiveStreamFactory.TAR, tar_output);

    //prepare Metadata.yaml
    File tar_input_file = File.createTempFile("Metadata", null);
    Map<String, Object> data = new HashMap<String, Object>();
    data.put("name", vnfPackageName);
    data.put("description", vnfPackageMetadata.getDescription());
    data.put("provider", vnfPackageMetadata.getProvider());
    data.put("requirements", vnfPackageMetadata.getRequirements());
    data.put("shared", vnfPackageMetadata.isShared());
    data.put("image", imageMetadataJson);
    data.put("image-config", imageConfigJson);
    data.put("scripts-link", vnfPackage.getScriptsLink());
    DumperOptions options = new DumperOptions();
    options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
    Yaml yaml = new Yaml(options);
    FileWriter writer = new FileWriter(tar_input_file);
    yaml.dump(data, writer);
    TarArchiveEntry tar_file = new TarArchiveEntry(tar_input_file, "Metadata.yaml");
    tar_file.setSize(tar_input_file.length());
    my_tar_ball.putArchiveEntry(tar_file);
    IOUtils.copy(new FileInputStream(tar_input_file), my_tar_ball);
    /* Close Archieve entry, write trailer information */
    my_tar_ball.closeArchiveEntry();

    //prepare VNFD
    tar_input_file = File.createTempFile("vnfd", null);
    tar_file = new TarArchiveEntry(tar_input_file, "vnfd.json");
    writer = new FileWriter(tar_input_file);
    writer.write(vnfdJson);
    writer.close();
    tar_file.setSize(tar_input_file.length());
    my_tar_ball.putArchiveEntry(tar_file);
    IOUtils.copy(new FileInputStream(tar_input_file), my_tar_ball);
    /* Close Archieve entry, write trailer information */
    my_tar_ball.closeArchiveEntry();

    //scripts
    for (Script script : vnfPackage.getScripts()) {
        tar_input_file = File.createTempFile("script", null);
        tar_file = new TarArchiveEntry(tar_input_file, "scripts/" + script.getName());
        FileOutputStream outputStream = new FileOutputStream(tar_input_file);
        outputStream.write(script.getPayload());
        outputStream.close();
        tar_file.setSize(tar_input_file.length());
        my_tar_ball.putArchiveEntry(tar_file);
        IOUtils.copy(new FileInputStream(tar_input_file), my_tar_ball);
        my_tar_ball.closeArchiveEntry();
    }

    //close tar
    my_tar_ball.finish();
    /* Close output stream, our files are zipped */
    tar_output.close();
    return tar_output;
}

From source file:de.xirp.plugin.PluginLoader.java

/**
 * Checks the needs of all plugins.//ww  w  .j av a  2 s  .c om
 * 
 * @see IPlugable#requiredLibs()
 */
@SuppressWarnings("unchecked")
private static void checkAllNeeds() {
    HashMap<String, IPlugable> plugables = new HashMap<String, IPlugable>();
    MultiValueHashMap<String, String> refs = new MultiValueHashMap<String, String>();

    // list of all plugins
    List<String> fullPluginList = new ArrayList<String>(plugins.size());
    for (PluginInfo info : plugins.values()) {
        fullPluginList.add(info.getMainClass());
    }

    ClassLoader loader = logClass.getClass().getClassLoader();

    // Read the list of available jars from the class path
    String cp = ManagementFactory.getRuntimeMXBean().getClassPath();
    // String cp = System.getProperty("java.class.path");
    // //$NON-NLS-1$
    String[] jars = cp.split(File.pathSeparator);
    List<String> jarList = new ArrayList<String>(jars.length);
    for (String jar : jars) {
        jarList.add(FilenameUtils.getName(jar));
    }
    // The initial list of current plugins equals the full list
    // every plugin which does not full fill the needs
    // it removed from this list
    currentPluginList = new ArrayList<String>(fullPluginList);
    for (PluginInfo info : plugins.values()) {
        try {
            SecurePluginView view = PluginManager.getInstance(info, Robot.NAME_NONE);
            plugables.put(info.getMainClass(), view);
            boolean check = checkNeeds(view, loader, jarList);
            if (!check) {
                // remove plugins which reference this plugin
                removeRefs(info.getMainClass());
            }
        } catch (Exception e) {
            logClass.trace(e, e);
        }
    }
    // Remove all plugins of the full list
    // which are no more contained in the current list
    for (String clazz : fullPluginList) {
        if (!currentPluginList.contains(clazz)) {
            plugins.remove(clazz);
            plugables.remove(clazz);
        }
    }
    instances = new ArrayList<IPlugable>(plugables.values());
    refs.clear();
    refs = null;
    currentPluginList.clear();
    currentPluginList = null;
    fullPluginList.clear();
    fullPluginList = null;
}

From source file:com.google.gwt.emultest.java.util.HashMapTest.java

/**
 * Test method for 'java.util.HashMap.size()'.
 *///from   ww w.  j  a  v  a  2 s. com
public void testSize() {
    HashMap<String, String> hashMap = new HashMap<String, String>();
    checkEmptyHashMapAssumptions(hashMap);

    // Test size behavior on put
    assertEquals(hashMap.size(), SIZE_ZERO);
    hashMap.put(KEY_1, VALUE_1);
    assertEquals(hashMap.size(), SIZE_ONE);
    hashMap.put(KEY_2, VALUE_2);
    assertEquals(hashMap.size(), SIZE_TWO);
    hashMap.put(KEY_3, VALUE_3);
    assertEquals(hashMap.size(), SIZE_THREE);

    // Test size behavior on remove
    hashMap.remove(KEY_1);
    assertEquals(hashMap.size(), SIZE_TWO);
    hashMap.remove(KEY_2);
    assertEquals(hashMap.size(), SIZE_ONE);
    hashMap.remove(KEY_3);
    assertEquals(hashMap.size(), SIZE_ZERO);

    // Test size behavior on putAll
    hashMap.put(KEY_1, VALUE_1);
    hashMap.put(KEY_2, VALUE_2);
    hashMap.put(KEY_3, VALUE_3);
    HashMap<String, String> srcMap = new HashMap<String, String>(hashMap);
    hashMap.putAll(srcMap);
    assertEquals(hashMap.size(), SIZE_THREE);

    // Test size behavior on clear
    hashMap.clear();
    assertEquals(hashMap.size(), SIZE_ZERO);
}

From source file:de.uni_potsdam.hpi.bpt.bp2014.jcore.rest.RestInterface.java

/**
 * Returns a JSON-Object, which contains information about all
 * data objects of a specified scenario instance.
 * The data contains the id, label and state.
 *
 * @param scenarioID   The ID of the scenario model.
 * @param instanceID   The ID of the scenario instance.
 * @param filterString A String which specifies a filter. Only Data
 *                     Objects with a label containing this string
 *                     will be returned.
 * @return A Response with the outcome of the GET-Request. The Response
 * will be a 200 (OK) if the specified instance was found. Hence
 * the JSON-Object will be returned.//ww  w . ja  v  a 2 s.co  m
 * It will be a 301 (REDIRECT) if the scenarioID is wrong.
 * And a 404 if the instance id is wrong.
 */
@GET
@Path("scenario/{scenarioID}/instance/{instanceID}/dataobject")
@Produces(MediaType.APPLICATION_JSON)
public Response getDataObjects(@Context UriInfo uriInfo, @PathParam("scenarioID") int scenarioID,
        @PathParam("instanceID") int instanceID, @QueryParam("filter") String filterString) {
    ExecutionService executionService = new ExecutionService();
    //TODO: add link to detail REST call for more information about each dataobject
    if (!executionService.existScenarioInstance(instanceID)) {
        return Response.status(Response.Status.NOT_FOUND).type(MediaType.APPLICATION_JSON)
                .entity("{\"error\":\"There is no instance with the id " + instanceID + "\"}").build();
    } else if (!executionService.existScenario(scenarioID)) {
        try {
            return Response.seeOther(new URI(
                    "interface/v2/scenario/" + executionService.getScenarioIDForScenarioInstance(instanceID)
                            + "/instance/" + instanceID + "/dataobject"))
                    .build();
        } catch (URISyntaxException e) {
            return Response.serverError().build();
        }
    }

    executionService.openExistingScenarioInstance(scenarioID, instanceID);
    LinkedList<Integer> dataObjects = executionService.getAllDataObjectIDs(instanceID);
    HashMap<Integer, String> states = executionService.getAllDataObjectStates(instanceID);
    HashMap<Integer, String> labels = executionService.getAllDataObjectNames(instanceID);
    if (filterString != null && !filterString.isEmpty()) {
        for (Map.Entry<Integer, String> labelEntry : labels.entrySet()) {
            if (!labelEntry.getValue().contains(filterString)) {
                dataObjects.remove(labelEntry.getKey());
                states.remove(labelEntry.getKey());
                labels.remove(labelEntry.getKey());
            }
        }
    }
    JSONObject result = buildListForDataObjects(uriInfo, dataObjects, states, labels);
    return Response.ok(result.toString(), MediaType.APPLICATION_JSON).build();
}

From source file:org.eclipse.mylyn.internal.bugzilla.core.service.BugzillaXmlRpcClient.java

public HashMap<?, ?> getTime(final IProgressMonitor monitor) throws XmlRpcException {
    return (new BugzillaXmlRpcOperation<HashMap<?, ?>>(this) {
        @Override//  w w w . j a  v a  2  s. co  m
        public HashMap<?, ?> execute() throws XmlRpcException {
            HashMap<?, ?> response = (HashMap<?, ?>) call(monitor, XML_BUGZILLA_TIME, (Object[]) null);
            if (response != null) {
                for (String exclude : XML_BUGZILLA_TIME_RESPONSE_TO_REMOVE) {
                    response.remove(exclude);
                }
            }
            return response;
        }
    }).execute();
}

From source file:edu.uga.cs.fluxbuster.clustering.ClusterGenerator.java

/**
 * Copies candidate flux domains into a list if they appear in a domain file up
 * to a limit on the size of the list.  The candidate flux domains are copied 
 * from a map of candidate flux domains.  Once a candidate flux domain is copied
 * it is removed from the map.  /*  w  w  w .ja  va 2s. c om*/
 * 
 * @param domainfile the file from which to read the domains
 * @param maxCandidateDomains the limit on the total number of domains to add
 * @param resultBuf the list in which to store the candidate flux domains
 * @param seenDomains the map of candidate flux domains.
 * @throws IOException
 */
private void addDomainsFromFile(String domainfile, int maxCandidateDomains, List<CandidateFluxDomain> resultBuf,
        HashMap<String, CandidateFluxDomain> seenDomains) throws IOException {
    BufferedReader br = new BufferedReader(new FileReader(new File(domainfile)));
    String line = null;
    while ((line = br.readLine()) != null) {
        if (resultBuf.size() == maxCandidateDomains) {
            break;
        }
        line = DomainNameUtils.stripDots(line.trim());
        CandidateFluxDomain d = seenDomains.get(line);
        if (d != null) {
            if (log.isDebugEnabled()) {
                log.debug("Adding domain " + line + " from domains file.");
            }
            resultBuf.add(d);
            seenDomains.remove(line);
        } else {
            if (log.isDebugEnabled()) {
                log.debug("Unable to load domain " + line + " from domains file.");
            }
        }
    }
    br.close();
}

From source file:de.dfki.km.perspecting.obie.model.Document.java

public List<TokenSequence<SemanticEntity>> getRetrievedPropertyValues() {
    List<TokenSequence<SemanticEntity>> entities = new ArrayList<TokenSequence<SemanticEntity>>();

    HashMap<String, TokenSequence<SemanticEntity>> map = new HashMap<String, TokenSequence<SemanticEntity>>();

    for (int tokenIndex : this.data.getIntegerKeys(TokenSequence.PROPERTY)) {
        List<SemanticEntity> values = this.data.get(TokenSequence.PROPERTY, tokenIndex);
        if (values != null) {
            for (SemanticEntity value : values) {

                String key = Integer.toString(value.getPropertyIndex())
                        + Integer.toString(value.getLiteralValueIndex());

                if (value.getPosition().equals("B")) {
                    TokenSequence<SemanticEntity> entity = map.get(key);
                    if (entity != null) {
                        entities.add(map.remove(key));
                    }//from w w w  . ja v a  2s  . co  m
                    entity = new TokenSequence<SemanticEntity>(value);
                    entity.addToken(new Token(tokenIndex, this));
                    map.put(key, entity);
                } else {
                    map.get(key).addToken(new Token(tokenIndex, this));
                }
            }
        } else {
            entities.addAll(map.values());
            map.clear();
        }
    }
    entities.addAll(map.values());

    return entities;
}

From source file:ch.icclab.cyclops.services.iaas.openstack.resource.impl.TelemetryResource.java

/**
 * In this method, the usage metrics from the cumulative meters are extracted
 * <p/>/* w  ww.java  2 s  .  com*/
 * Pseudo Code
 * 1. Query the sample api of Telemetry
 * 2. Receive the ungrouped samples but already sorted for timestamp
 * 3. Group the sample on per resource basis
 * 4. Iterate through the array, add the subtracted value of two simultaneous samples at a time
 * 5. If the subtracted value is negative, adjust the value as per the datatype max limit
 * 6. Save all these details along with the usage in the db
 *
 * @param token The token generated by the keystone service is used for authorization by Telemetry Service
 * @return output A String output of the success or failure of the data extraction process
 * @throws JSONException
 * @throws IOException
 */
private boolean getCumulativeMeterData(ArrayList<String> meter, String token) {
    logger.trace("BEGIN getCumulativeMeterData(ArrayList<String> meter, String token)");
    boolean output = false;
    String response = null;
    Set keySet;
    String meterType = "cumulative";
    CumulativeMeterData data = null;
    LinkedList<CumulativeMeterData> linkedList;
    JSONArray array = null;

    ObjectMapper mapper = new ObjectMapper();
    TelemetryClient tClient = new TelemetryClient();
    ArrayList<CumulativeMeterData> cMeterArr;
    TSDBResource dbResource = new TSDBResource();
    HashMap<String, LinkedList<CumulativeMeterData>> map;

    for (int j = 0; j < meter.size(); j++) {
        cMeterArr = new ArrayList<CumulativeMeterData>();
        map = new HashMap<String, LinkedList<CumulativeMeterData>>();
        try {
            response = tClient.getData(token, meter.get(j), meterType);
            array = new JSONArray(response);

            //Builds an array of samples and a hashmap of resourceID as key and a linkedlist of samples as values.
            for (int i = 0; i < array.length(); i++) {
                JSONObject obj = null;
                obj = array.getJSONObject(i);
                data = mapper.readValue(obj.toString(), CumulativeMeterData.class);
                //This Adds all the data for the instances using the meter
                if (map.containsKey(data.getResource_id())) {
                    linkedList = map.get(data.getResource_id());
                    linkedList.add(data);
                    map.remove(data.getResource_id());
                    map.put(data.getResource_id(), linkedList);
                } else {
                    linkedList = new LinkedList<CumulativeMeterData>();
                    linkedList.add(data);
                    map.put(data.getResource_id(), linkedList);
                }
            }

            //Get the Set of keys (meters)
            keySet = map.keySet();
            Iterator setIterator = keySet.iterator();

            //Iterate through the Set to extract the LinkedList
            while (setIterator.hasNext()) {
                linkedList = map.get(setIterator.next());
                cMeterArr = calculateCumulativeMeterUsage(cMeterArr, linkedList);
            }
            dbResource.saveCumulativeMeterData(cMeterArr, meter.get(j));
            output = true;
        } catch (IOException e) {
            logger.error("EXCEPTION IOEXCEPTION getCumulativeMeterData(ArrayList<String> meter, String token)");
            output = false;
            e.printStackTrace();
            return output;
        } catch (JSONException e) {
            logger.error(
                    "EXCEPTION JSONEXCEPTION getCumulativeMeterData(ArrayList<String> meter, String token)");
            output = false;
            e.printStackTrace();
            return output;
        }
    }
    logger.trace("END getCumulativeMeterData(ArrayList<String> meter, String token)");
    return output;
}

From source file:de.fhg.fokus.odp.middleware.ckan.CKANGatewayUtil.java

/**
 * Sort the passed Map by the values/*from  w w w .  ja  v  a  2 s . c  om*/
 * 
 * @param passedMap
 *            the HashMap to sort.
 * @return the sorted HashMap.
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
private static LinkedHashMap sortHashMapByValues(HashMap<String, Long> passedMap) {
    List<String> mapKeys = new ArrayList<String>(passedMap.keySet());
    List<Long> mapValues = new ArrayList<Long>(passedMap.values());

    Comparator comparator = Collections.reverseOrder();
    Collections.sort(mapValues, comparator);
    Collections.sort(mapKeys, comparator);

    LinkedHashMap<String, Long> sortedMap = new LinkedHashMap<String, Long>();

    Iterator valueIt = mapValues.iterator();
    while (valueIt.hasNext()) {
        Object val = valueIt.next();
        Iterator keyIt = mapKeys.iterator();

        while (keyIt.hasNext()) {
            Object key = keyIt.next();
            String comp1 = passedMap.get(key).toString();
            String comp2 = val.toString();

            if (comp1.equals(comp2)) {
                passedMap.remove(key);
                mapKeys.remove(key);
                sortedMap.put((String) key, (Long) val);
                break;
            }

        }

    }
    return sortedMap;

}