Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:gov.nih.nci.cabig.caaers.service.synchronizer.StudyDiseasesSynchronizer.java

public void migrate(Study dbStudy, Study xmlStudy, DomainObjectImportOutcome<Study> outcome) {

    //ignore if disease section is empty in xmlstudy
    if (CollectionUtils.isEmpty(xmlStudy.getActiveStudyDiseases())) {
        return;/*  w  ww  .j  a  va 2s  .  c o m*/
    }

    //create an Index of existing study diseases
    HashMap<AbstractStudyDisease<? extends DomainObject>, AbstractStudyDisease<? extends DomainObject>> dbDiseasesIndexMap = new HashMap<AbstractStudyDisease<? extends DomainObject>, AbstractStudyDisease<? extends DomainObject>>();

    for (AbstractStudyDisease<? extends DomainObject> studyDisease : dbStudy.getActiveStudyDiseases()) {
        dbDiseasesIndexMap.put(studyDisease, studyDisease);
    }

    //loop through the xml study, then add/update existing diseases
    for (AbstractStudyDisease<? extends DomainObject> xmlDisease : xmlStudy.getActiveStudyDiseases()) {
        AbstractStudyDisease<? extends DomainObject> disease = dbDiseasesIndexMap.remove(xmlDisease);
        if (disease == null) {
            //new disease, so add to dbstudy
            if (xmlDisease instanceof CtepStudyDisease)
                dbStudy.addCtepStudyDisease((CtepStudyDisease) xmlDisease);
            if (xmlDisease instanceof MeddraStudyDisease)
                dbStudy.addMeddraStudyDisease((MeddraStudyDisease) xmlDisease);
            if (xmlDisease instanceof StudyCondition)
                dbStudy.addStudyCondition((StudyCondition) xmlDisease);
            continue;
        }

        //update the primary indicator (if CTEP Disease)
        if (disease instanceof CtepStudyDisease) {
            ((CtepStudyDisease) disease).setLeadDisease(((CtepStudyDisease) xmlDisease).getLeadDisease());
        }
    }

    //mark retired the diseases still in index
    AbstractMutableRetireableDomainObject.retire(dbDiseasesIndexMap.values());

}

From source file:de.dfki.km.perspecting.obie.model.Document.java

/**
 * Returns all RDF subjects with matching literal property values in text.
 *//*from   ww w .j  a v a 2s  . c  o  m*/
public List<TokenSequence<SemanticEntity>> getResolvedSubjects() {

    // collection that will be returned as result
    List<TokenSequence<SemanticEntity>> entities = new ArrayList<TokenSequence<SemanticEntity>>();

    HashMap<Integer, TokenSequence<SemanticEntity>> map = new HashMap<Integer, TokenSequence<SemanticEntity>>();

    for (int tokenIndex : data.getIntegerKeys(TokenSequence.SUBJECT)) {
        List<SemanticEntity> values = data.get(TokenSequence.SUBJECT, tokenIndex);
        assert values != null; // when does this occur?
        for (SemanticEntity value : values) {

            int subject = value.getSubjectIndex();
            if (value.getPosition().equals("B")) {
                TokenSequence<SemanticEntity> entity = map.get(subject);
                if (entity != null) {
                    entities.add(map.remove(subject));
                }
                entity = new TokenSequence<SemanticEntity>(value);
                entity.addToken(new Token(tokenIndex, this));
                map.put(subject, entity);
            } else {
                map.get(subject).addToken(new Token(tokenIndex, this));
            }
        }

    }
    entities.addAll(map.values());

    return entities;
    //      
    //      
    //      
    //      
    //      
    // List<TokenSequence<SemanticEntity>> entities = new
    // ArrayList<TokenSequence<SemanticEntity>>();
    // TokenSequence<SemanticEntity> entity = null;
    //
    // for (int tokenIndex :
    // this.data.getIntegerKeys(TokenSequence.SUBJECT)) {
    // List<SemanticEntity> value = this.data.get(TokenSequence.SUBJECT,
    // tokenIndex);
    // for (SemanticEntity e : value) {
    // if (e.getPosition().equals("B")) { // equal for all entries.
    // if (entity != null) {
    // entities.add(entity);
    // }
    // entity = new TokenSequence<SemanticEntity>(e);
    // entity.addToken(new Token(tokenIndex, this));
    // } else {
    // assert entity != null;
    // entity.addToken(new Token(tokenIndex, this));
    // }
    // }
    // }
    // if (entity != null) {
    // entities.add(entity);
    // entity = null;Set
    // }
    //
    // return entities;
}

From source file:com.webcohesion.enunciate.modules.csharp_client.CSharpXMLClientModule.java

private Map<String, String> buildPackageToNamespaceConversions() {
    Map<String, String> packageToNamespaceConversions = getPackageToNamespaceConversions();
    if (this.jaxwsModule != null) {
        HashMap<String, WebFault> allFaults = new HashMap<String, WebFault>();
        for (WsdlInfo wsdlInfo : this.jaxwsModule.getJaxwsContext().getWsdls().values()) {
            for (EndpointInterface ei : wsdlInfo.getEndpointInterfaces()) {
                String pckg = ei.getPackage().getQualifiedName().toString();
                if (!packageToNamespaceConversions.containsKey(pckg)) {
                    packageToNamespaceConversions.put(pckg, packageToNamespace(pckg));
                }/*w w  w  .j a  va 2  s  .  c o  m*/
                for (WebMethod webMethod : ei.getWebMethods()) {
                    for (WebFault webFault : webMethod.getWebFaults()) {
                        allFaults.put(webFault.getQualifiedName().toString(), webFault);
                    }
                }
            }
        }

        for (WebFault webFault : allFaults.values()) {
            String pckg = webFault.getPackage().getQualifiedName().toString();
            if (!packageToNamespaceConversions.containsKey(pckg)) {
                packageToNamespaceConversions.put(pckg, packageToNamespace(pckg));
            }
        }
    }

    if (jaxbModule != null) {
        for (SchemaInfo schemaInfo : jaxbModule.getJaxbContext().getSchemas().values()) {
            for (TypeDefinition typeDefinition : schemaInfo.getTypeDefinitions()) {
                String pckg = typeDefinition.getPackage().getQualifiedName().toString();
                if (!packageToNamespaceConversions.containsKey(pckg)) {
                    packageToNamespaceConversions.put(pckg, packageToNamespace(pckg));
                }
            }
        }
    }
    return packageToNamespaceConversions;
}

From source file:de.gfz_potsdam.datasync.Datasync.java

public void syncDeletedToRemote(String basedir, HashSet<String> entities, Container parent) throws Exception {

    //delete in infrastructure container members, where there is no local file but it is in our database

    if (parent == null)
        return;/*w w w .jav a2  s  .  c om*/

    HashMap<String, String> syncedfiles = App.db.listEntries(basedir, File.separator);
    HashMap<String, String> srvdelete = new HashMap<String, String>();

    HashMap<String, Integer> pathToIdCount = new HashMap<String, Integer>();
    for (String path : syncedfiles.keySet()) {
        String id = syncedfiles.get(path);
        Integer count = pathToIdCount.get(id);
        if (count == null)
            count = new Integer(1);
        else
            count = new Integer(count.intValue() + 1);
        pathToIdCount.put(id, count);
    }

    for (String path : syncedfiles.keySet()) {
        String id = syncedfiles.get(path);
        Integer count = pathToIdCount.get(id);
        if (!entities.contains(path)) {
            if (count.intValue() <= 1) {
                srvdelete.put(path, id);
                log.log(Level.INFO, "Remote delete: {0} {1}", new Object[] { path, id });
            }
        }
    }
    //items with more components

    if (!srvdelete.isEmpty()) {

        srv.containerRemoveMembers(parent, srvdelete.values());
        for (String path : srvdelete.keySet()) {
            App.db.deleteMapping(path, srvdelete.get(path), File.separator);

        }
        File parentdir = new File(directory + File.separator + basedir);
        App.db.storeMapping(basedir, parent.getObjid(), parentdir.lastModified(),
                parent.getLastModificationDate(), SyncDB.DIRECTORY);

    }

}

From source file:eu.planets_project.tb.gui.backing.ServiceBrowser.java

/**
 * @return//from www .  j  av a 2 s.  c  om
 */
public List<ServiceRecordsByNameBean> getAllServiceRecordsByName() {
    HashMap<String, ServiceRecordsByNameBean> sbn = new HashMap<String, ServiceRecordsByNameBean>();

    // Get all the known, unique service records.
    List<ServiceRecordBean> records = this.getAllServicesAndRecords();

    // Aggregate those into a list of new service-by-name:
    for (ServiceRecordBean srb : records) {
        if (this.getSelectedServiceTypes().contains(srb.getType())) {
            if (sbn.containsKey(srb.getName())) {
                // Add this SRB to the content:
                sbn.get(srb.getName()).addServiceRecord(srb);
            } else {
                sbn.put(srb.getName(), new ServiceRecordsByNameBean(srb));
            }
        }
    }

    return new ArrayList<ServiceRecordsByNameBean>(sbn.values());
}

From source file:org.apache.openejb.config.DeploymentLoader.java

protected static ConnectorModule createConnectorModule(final String appId, final String rarPath,
        final ClassLoader parentClassLoader, final String moduleId, final URL raXmlUrl)
        throws OpenEJBException {
    final URL baseUrl;// unpack the rar file
    File rarFile = new File(rarPath);
    rarFile = unpack(rarFile);//w w w. j  a v  a2s  .c  o m
    baseUrl = getFileUrl(rarFile);

    // read the ra.xml file
    final Map<String, URL> descriptors = getDescriptors(baseUrl);
    Connector connector = null;
    URL rarXmlUrl = descriptors.get("ra.xml");
    if (rarXmlUrl == null && raXmlUrl != null) {
        descriptors.put("ra.xml", raXmlUrl);
        rarXmlUrl = raXmlUrl;
    }
    if (rarXmlUrl != null) {
        connector = ReadDescriptors.readConnector(rarXmlUrl);
    }

    // find the nested jar files
    final HashMap<String, URL> rarLibs = new HashMap<String, URL>();
    scanDir(rarFile, rarLibs, "");
    for (final Iterator<Map.Entry<String, URL>> iterator = rarLibs.entrySet().iterator(); iterator.hasNext();) {
        // remove all non jars from the rarLibs
        final Map.Entry<String, URL> fileEntry = iterator.next();
        if (!fileEntry.getKey().endsWith(".jar")) {
            iterator.remove();
        }
    }

    // create the class loader
    final List<URL> classPath = new ArrayList<URL>();
    classPath.addAll(rarLibs.values());

    final ClassLoaderConfigurer configurer = QuickJarsTxtParser
            .parse(new File(rarFile, "META-INF/" + QuickJarsTxtParser.FILE_NAME));
    if (configurer != null) {
        ClassLoaderConfigurer.Helper.configure(classPath, configurer);
    }

    final URL[] urls = classPath.toArray(new URL[classPath.size()]);
    final ClassLoader appClassLoader = ClassLoaderUtil.createTempClassLoader(appId, urls, parentClassLoader);

    // create the Resource Module
    final ConnectorModule connectorModule = new ConnectorModule(connector, appClassLoader, rarPath, moduleId);
    connectorModule.getAltDDs().putAll(descriptors);
    connectorModule.getLibraries().addAll(classPath);
    connectorModule.getWatchedResources().add(rarPath);
    connectorModule.getWatchedResources().add(rarFile.getAbsolutePath());
    if (rarXmlUrl != null && "file".equals(rarXmlUrl.getProtocol())) {
        connectorModule.getWatchedResources().add(URLs.toFilePath(rarXmlUrl));
    }

    return connectorModule;
}

From source file:de.dfki.km.perspecting.obie.model.Document.java

public List<TokenSequence<SemanticEntity>> getEntityTypes() {
    List<TokenSequence<SemanticEntity>> entities = new ArrayList<TokenSequence<SemanticEntity>>();

    HashMap<Integer, TokenSequence<SemanticEntity>> map = new HashMap<Integer, TokenSequence<SemanticEntity>>();

    for (int tokenIndex : this.data.getIntegerKeys(TokenSequence.TYPE)) {
        List<SemanticEntity> values = this.data.get(TokenSequence.TYPE, tokenIndex);
        if (values != null) {
            for (SemanticEntity value : values) {
                int property = value.getPropertyIndex();
                if (value.getPosition().equals("B")) {
                    TokenSequence<SemanticEntity> entity = map.get(property);
                    if (entity != null) {
                        entities.add(map.remove(property));
                    }/*  w  ww  .  j  a v a 2  s . co  m*/
                    entity = new TokenSequence<SemanticEntity>(value);
                    entity.addToken(new Token(tokenIndex, this));
                    map.put(property, entity);
                } else {
                    map.get(property).addToken(new Token(tokenIndex, this));
                }
            }
        } else {
            entities.addAll(map.values());
            map.clear();
        }
    }
    entities.addAll(map.values());

    return entities;
}

From source file:org.apache.hadoop.raid.RaidShell.java

/**
 * /*from www.  ja va  2 s .c om*/
 * @param dfs
 * @param filePath
 * @param cntMissingBlksPerStrp
 * @param numNonRaidedMissingBlks
 * @param numStrpMissingBlksMap
 * @return
 * @throws java.io.IOException
 */
public static boolean isFileCorrupt(final DistributedFileSystem dfs, final FileStatus fileStat,
        final boolean cntMissingBlksPerStrp, final Configuration conf, AtomicLong numNonRaidedMissingBlks,
        Map<String, AtomicLongArray> numStrpMissingBlksMap) throws IOException {
    if (fileStat == null) {
        return false;
    }
    Path filePath = fileStat.getPath();
    try {
        // corruptBlocksPerStripe: 
        // map stripe # -> # of corrupt blocks in that stripe (data + parity)
        HashMap<Integer, Integer> corruptBlocksPerStripe = new LinkedHashMap<Integer, Integer>();
        boolean fileCorrupt = false;
        // Har checking requires one more RPC to namenode per file
        // skip it for performance. 
        RaidInfo raidInfo = RaidUtils.getFileRaidInfo(fileStat, conf, true);
        if (raidInfo.codec == null) {
            raidInfo = RaidUtils.getFileRaidInfo(fileStat, conf, false);
        }
        if (raidInfo.codec == null) {
            // Couldn't find out the parity file, so the file is corrupt
            int count = collectNumCorruptBlocksInFile(dfs, filePath);
            if (cntMissingBlksPerStrp && numNonRaidedMissingBlks != null) {
                numNonRaidedMissingBlks.addAndGet(count);
            }
            return true;
        }

        if (raidInfo.codec.isDirRaid) {
            RaidUtils.collectDirectoryCorruptBlocksInStripe(conf, dfs, raidInfo, fileStat,
                    corruptBlocksPerStripe);
        } else {
            RaidUtils.collectFileCorruptBlocksInStripe(dfs, raidInfo, fileStat, corruptBlocksPerStripe);
        }

        final int maxCorruptBlocksPerStripe = raidInfo.parityBlocksPerStripe;

        for (Integer corruptBlocksInStripe : corruptBlocksPerStripe.values()) {
            if (corruptBlocksInStripe == null) {
                continue;
            }
            //detect if the file has any stripes which cannot be fixed by Raid
            if (LOG.isDebugEnabled()) {
                LOG.debug("file " + filePath.toString() + " has corrupt blocks per Stripe value "
                        + corruptBlocksInStripe);
            }
            if (!fileCorrupt) {
                if (corruptBlocksInStripe > maxCorruptBlocksPerStripe) {
                    fileCorrupt = true;
                }
            }
            if (cntMissingBlksPerStrp && numStrpMissingBlksMap != null) {
                numStrpMissingBlksMap.get(raidInfo.codec.id).incrementAndGet(corruptBlocksInStripe - 1);
            }
        }
        return fileCorrupt;
    } catch (SocketException e) {
        // Re-throw network-related exceptions.
        throw e;
    } catch (SocketTimeoutException e) {
        throw e;
    } catch (IOException e) {
        // re-throw local exceptions.
        if (e.getCause() != null && !(e.getCause() instanceof RemoteException)) {
            throw e;
        }

        LOG.error("While trying to check isFileCorrupt " + filePath + " got exception ", e);
        return true;
    }
}

From source file:com.wantscart.jade.provider.jdbc.JdbcDataAccess.java

private int[] batchUpdate2(String sql, Modifier modifier, List<Map<String, Object>> parametersList) {
    if (parametersList.size() == 0) {
        return new int[0];
    }/*from w w  w. ja v  a 2  s  .  c  o  m*/
    // sql --> args[]
    HashMap<String, List<Object[]>> batches = new HashMap<String, List<Object[]>>();
    // sql --> named args
    HashMap<String, List<Map<String, Object>>> batches2 = new HashMap<String, List<Map<String, Object>>>();
    // sql --> [2,3,6,9] positions of parametersList
    Map<String, List<Integer>> positions = new HashMap<String, List<Integer>>();

    //TODO fix shardby null bug
    SQLThreadLocal.set(SQLType.WRITE, sql, modifier, parametersList);
    for (int i = 0; i < parametersList.size(); i++) {
        SQLInterpreterResult ir = interpret(sql, modifier, parametersList.get(i));
        List<Object[]> args = batches.get(ir.getSQL());
        List<Integer> position = positions.get(ir.getSQL());
        List<Map<String, Object>> maplist = batches2.get(ir.getSQL());
        if (args == null) {
            args = new LinkedList<Object[]>();
            batches.put(ir.getSQL(), args);
            position = new LinkedList<Integer>();
            positions.put(ir.getSQL(), position);
            maplist = new LinkedList<Map<String, Object>>();
            batches2.put(ir.getSQL(), maplist);
        }
        position.add(i);
        args.add(ir.getParameters());
        maplist.add(parametersList.get(i));
    }
    if (batches.size() == 1) {
        SQLThreadLocal.set(SQLType.WRITE, sql, modifier, parametersList);
        int[] updated = jdbc.batchUpdate(modifier, batches.keySet().iterator().next(),
                batches.values().iterator().next());
        SQLThreadLocal.remove();
        return updated;
    }
    int[] batchUpdated = new int[parametersList.size()];
    for (Map.Entry<String, List<Object[]>> batch : batches.entrySet()) {
        String batchSQL = batch.getKey();
        List<Object[]> values = batch.getValue();
        List<Map<String, Object>> map = batches2.get(batchSQL);
        SQLThreadLocal.set(SQLType.WRITE, sql, modifier, map);
        int[] updated = jdbc.batchUpdate(modifier, batchSQL, values);
        SQLThreadLocal.remove();
        List<Integer> position = positions.get(batchSQL);
        int i = 0;
        for (Integer p : position) {
            batchUpdated[p] = updated[i++];
        }
    }
    return batchUpdated;

}

From source file:com.thoughtworks.go.server.service.RulesService.java

public boolean validateSecretConfigReferences(ScmMaterial scmMaterial) {
    List<CaseInsensitiveString> pipelines = goConfigService.pipelinesWithMaterial(scmMaterial.getFingerprint());

    HashMap<CaseInsensitiveString, StringBuilder> pipelinesWithErrors = new HashMap<>();
    pipelines.forEach(pipelineName -> {
        MaterialConfig materialConfig = goConfigService.findPipelineByName(pipelineName).materialConfigs()
                .getByMaterialFingerPrint(scmMaterial.getFingerprint());
        PipelineConfigs group = goConfigService.findGroupByPipeline(pipelineName);
        ScmMaterialConfig scmMaterialConfig = (ScmMaterialConfig) materialConfig;
        SecretParams secretParams = SecretParams.parse(scmMaterialConfig.getPassword());
        secretParams.forEach(secretParam -> {
            String secretConfigId = secretParam.getSecretConfigId();
            SecretConfig secretConfig = goConfigService.getSecretConfigById(secretConfigId);
            if (secretConfig == null) {
                addError(pipelinesWithErrors, pipelineName,
                        format("Pipeline '%s' is referring to none-existent secret config '%s'.", pipelineName,
                                secretConfigId));
            } else if (!secretConfig.canRefer(group.getClass(), group.getGroup())) {
                addError(pipelinesWithErrors, pipelineName, format(
                        "Pipeline '%s' does not have permission to refer to secrets using secret config '%s'",
                        pipelineName, secretConfigId));
            }/* w  ww  .  j av a  2 s  .co m*/
        });
    });
    StringBuilder errorMessage = new StringBuilder();
    if (!pipelinesWithErrors.isEmpty()) {
        errorMessage.append(StringUtils.join(pipelinesWithErrors.values(), '\n').trim());
        LOGGER.error("[Material Update] Failure: {}", errorMessage.toString());
    }
    if (pipelines.size() == pipelinesWithErrors.size()) {
        throw new RulesViolationException(errorMessage.toString());
    }
    return true;
}