Example usage for java.util SortedMap containsKey

List of usage examples for java.util SortedMap containsKey

Introduction

In this page you can find the example usage for java.util SortedMap containsKey.

Prototype

boolean containsKey(Object key);

Source Link

Document

Returns true if this map contains a mapping for the specified key.

Usage

From source file:es.uvigo.ei.sing.gc.model.entities.ExpertResult.java

private synchronized void initClassificationPerformance() {
    if (this.performance == null && this.samples != null && !this.samples.isEmpty()) {
        final Set<Object> classes = new HashSet<Object>();
        Boolean multi = null;/* w w w  .j  av a 2 s .  c  o  m*/
        for (SampleClassification sample : this.samples) {
            if (multi == null) {
                multi = sample.isMultiStep();
            } else if (multi != sample.isMultiStep()) {
                throw new IllegalStateException("Different sample types");
            }

            classes.add(sample.getRealClass());
        }

        final Object[] classArray = classes.toArray(new Object[classes.size()]);
        if (multi) {
            final SortedMap<Integer, List<SampleClassification>> sampleMap = new TreeMap<Integer, List<SampleClassification>>();

            for (SampleClassification sample : this.samples) {
                if (!sampleMap.containsKey(sample.getStep())) {
                    sampleMap.put(sample.getStep(), new LinkedList<SampleClassification>());
                }

                sampleMap.get(sample.getStep()).add(sample);
            }

            for (Map.Entry<Integer, List<SampleClassification>> entry : sampleMap.entrySet()) {
                if (this.performance == null) {
                    this.performance = new DefaultMultiStepClassificationPerformance(
                            ExpertResult.createClassificationPerformance(
                                    Integer.toString(this.getId()) + "-Step " + entry.getKey(), classArray,
                                    entry.getValue()));
                } else {
                    this.performance = this.performance.merge(ExpertResult.createClassificationPerformance(
                            Integer.toString(this.getId()) + "-Step " + entry.getKey(), classArray,
                            entry.getValue()));
                }
            }
        } else {
            this.performance = ExpertResult.createClassificationPerformance(Integer.toString(this.getId()),
                    classArray, samples);
        }
    }
}

From source file:org.jets3t.service.utils.RestUtils.java

/**
 * Calculate the canonical string for a REST/HTTP request to a storage service.
 *
 * When expires is non-null, it will be used instead of the Date header.
 * @throws UnsupportedEncodingException/*  www .  j  a va 2 s  . com*/
 */
public static String makeServiceCanonicalString(String method, String resource, Map<String, Object> headersMap,
        String expires, String headerPrefix, List<String> serviceResourceParameterNames)
        throws UnsupportedEncodingException {
    StringBuilder canonicalStringBuf = new StringBuilder();
    canonicalStringBuf.append(method).append("\n");

    // Add all interesting headers to a list, then sort them.  "Interesting"
    // is defined as Content-MD5, Content-Type, Date, and x-amz-
    SortedMap<String, Object> interestingHeaders = new TreeMap<String, Object>();
    if (headersMap != null && headersMap.size() > 0) {
        for (Map.Entry<String, Object> entry : headersMap.entrySet()) {
            Object key = entry.getKey();
            Object value = entry.getValue();

            if (key == null) {
                continue;
            }
            String lk = key.toString().toLowerCase(Locale.getDefault());

            // Ignore any headers that are not particularly interesting.
            if (lk.equals("content-type") || lk.equals("content-md5") || lk.equals("date")
                    || lk.startsWith(headerPrefix)) {
                interestingHeaders.put(lk, value);
            }
        }
    }

    // Remove default date timestamp if "x-amz-date" or "x-goog-date" is set.
    if (interestingHeaders.containsKey(Constants.REST_METADATA_ALTERNATE_DATE_AMZ)
            || interestingHeaders.containsKey(Constants.REST_METADATA_ALTERNATE_DATE_GOOG)) {
        interestingHeaders.put("date", "");
    }

    // Use the expires value as the timestamp if it is available. This trumps both the default
    // "date" timestamp, and the "x-amz-date" header.
    if (expires != null) {
        interestingHeaders.put("date", expires);
    }

    // these headers require that we still put a new line in after them,
    // even if they don't exist.
    if (!interestingHeaders.containsKey("content-type")) {
        interestingHeaders.put("content-type", "");
    }
    if (!interestingHeaders.containsKey("content-md5")) {
        interestingHeaders.put("content-md5", "");
    }

    // Finally, add all the interesting headers (i.e.: all that start with x-amz- ;-))
    for (Map.Entry<String, Object> entry : interestingHeaders.entrySet()) {
        String key = entry.getKey();
        Object value = entry.getValue();

        if (key.startsWith(headerPrefix)) {
            canonicalStringBuf.append(key).append(':').append(value);
        } else {
            canonicalStringBuf.append(value);
        }
        canonicalStringBuf.append("\n");
    }

    // don't include the query parameters...
    int queryIndex = resource.indexOf('?');
    if (queryIndex == -1) {
        canonicalStringBuf.append(resource);
    } else {
        canonicalStringBuf.append(resource.substring(0, queryIndex));
    }

    // ...unless the parameter(s) are in the set of special params
    // that actually identify a service resource.
    if (queryIndex >= 0) {
        SortedMap<String, String> sortedResourceParams = new TreeMap<String, String>();

        // Parse parameters from resource string
        String query = resource.substring(queryIndex + 1);
        for (String paramPair : query.split("&")) {
            String[] paramNameValue = paramPair.split("=");
            String name = URLDecoder.decode(paramNameValue[0], "UTF-8");
            String value = null;
            if (paramNameValue.length > 1) {
                value = URLDecoder.decode(paramNameValue[1], "UTF-8");
            }
            // Only include parameter (and its value if present) in canonical
            // string if it is a resource-identifying parameter
            if (serviceResourceParameterNames.contains(name)) {
                sortedResourceParams.put(name, value);
            }
        }

        // Add resource parameters
        if (sortedResourceParams.size() > 0) {
            canonicalStringBuf.append("?");
        }
        boolean addedParam = false;
        for (Map.Entry<String, String> entry : sortedResourceParams.entrySet()) {
            if (addedParam) {
                canonicalStringBuf.append("&");
            }
            canonicalStringBuf.append(entry.getKey());
            if (entry.getValue() != null) {
                canonicalStringBuf.append("=").append(entry.getValue());
            }
            addedParam = true;
        }
    }

    return canonicalStringBuf.toString();
}

From source file:br.eti.ranieri.opcoesweb.importacao.offline.ImportadorOffline.java

private void calcularBlackScholes(List<CotacaoBDI> cotacoes, ConfiguracaoImportacao configuracaoImportacao)
        throws Exception {
    if (cotacoes == null)
        return;//from   w w w.j  ava2  s. com

    // Organiza as cotacoes por data e acao. As cotacoes da
    // acao e das opcoes ficam, por enquanto, na mesma lista
    SortedMap<LocalDate, Map<Acao, List<CotacaoBDI>>> diaAcaoOpcoes = new TreeMap<LocalDate, Map<Acao, List<CotacaoBDI>>>();
    for (CotacaoBDI cotacao : cotacoes) {
        LocalDate data = cotacao.getDataPregao();

        Map<Acao, List<CotacaoBDI>> cotacoesPorAcao = new HashMap<Acao, List<CotacaoBDI>>();
        if (diaAcaoOpcoes.containsKey(data)) {
            cotacoesPorAcao = diaAcaoOpcoes.get(data);
        } else {
            diaAcaoOpcoes.put(data, cotacoesPorAcao);
        }

        Acao acao = null;
        if (cotacao.getCodigoNegociacao().startsWith("PETR")) {
            acao = Acao.PETROBRAS;
        } else if (cotacao.getCodigoNegociacao().startsWith("VALE")) {
            acao = Acao.VALE;
        } else {
            log.error("Codigo de negociacao [{}] nao esta " + "vinculada a VALE e nem a PETROBRAS.",
                    cotacao.getCodigoNegociacao());
            continue;
        }

        List<CotacaoBDI> cotacoesAcaoOpcoes = new ArrayList<CotacaoBDI>();
        if (cotacoesPorAcao.containsKey(acao)) {
            cotacoesAcaoOpcoes = cotacoesPorAcao.get(acao);
        } else {
            cotacoesPorAcao.put(acao, cotacoesAcaoOpcoes);
        }

        cotacoesAcaoOpcoes.add(cotacao);
    }

    // Agora separa, para cada dia e para cada acao, as
    // cotacoes da acao, das opcoes que vencem este mes
    // e das opcoes que vencem no proximo mes.
    // 
    // Para cada dia e para cada acao, calcula o Black&Scholes
    // em cada dupla acao e lista de opcoes
    for (LocalDate data : diaAcaoOpcoes.keySet()) {

        Serie serieAtualOpcoes = Serie.getSerieAtualPorData(data);
        Serie proximaSerieOpcoes = Serie.getProximaSeriePorData(data);
        Double selic = taxaSelic.getSelic(data);

        for (Acao acao : diaAcaoOpcoes.get(data).keySet()) {

            CotacaoBDI cotacaoAcao = null;
            List<CotacaoBDI> cotacoesOpcoesSerie1 = new ArrayList<CotacaoBDI>();
            List<CotacaoBDI> cotacoesOpcoesSerie2 = new ArrayList<CotacaoBDI>();

            for (CotacaoBDI cotacao : diaAcaoOpcoes.get(data).get(acao)) {
                if (CodigoBDI.LOTE_PADRAO.equals(cotacao.getCodigoBdi())
                        && TipoMercadoBDI.MERCADO_A_VISTA.equals(cotacao.getTipoMercado())) {
                    if (cotacaoAcao != null)
                        log.error("Sobrescreveu cotacao [{}] com [{}].", cotacaoAcao, cotacao);
                    cotacaoAcao = cotacao;
                } else if (CodigoBDI.OPCOES_DE_COMPRA.equals(cotacao.getCodigoBdi())
                        && TipoMercadoBDI.OPCOES_DE_COMPRA.equals(cotacao.getTipoMercado())) {
                    if (serieAtualOpcoes.isSerieDaOpcao(cotacao.getCodigoNegociacao())) {
                        cotacoesOpcoesSerie1.add(cotacao);
                    } else if (proximaSerieOpcoes.isSerieDaOpcao(cotacao.getCodigoNegociacao())) {
                        cotacoesOpcoesSerie2.add(cotacao);
                    }
                }
            }

            if (cotacaoAcao == null) {
                log.error("Nao foi encontrada cotacao de " + "acao [{}] no dia [{}].", acao.getCodigo(), data);
                continue;
            }
            if (cotacoesOpcoesSerie1.size() == 0) {
                log.error("Nao foram encontradas cotacoes de opcoes "
                        + "de [{}] no dia [{}] para vencer neste mes.", acao.getCodigo(), data);
                continue;
            }
            if (cotacoesOpcoesSerie2.size() == 0) {
                log.error("Nao foram encontradas cotacoes de opcoes "
                        + "de [{}] no dia [{}] para vencer proximo mes.", acao.getCodigo(), data);
                continue;
            }

            CotacaoBDI opcaoTeorica1 = new CotacaoBDI(data, //
                    CodigoBDI.OPCOES_DE_COMPRA, //
                    TipoMercadoBDI.OPCOES_DE_COMPRA, //
                    "Teorica", 0, 0, 0, //
                    cotacaoAcao.getFechamento(), //
                    cotacoesOpcoesSerie1.iterator().next().getDataVencimento());

            CotacaoBDI opcaoTeorica2 = new CotacaoBDI(data, //
                    CodigoBDI.OPCOES_DE_COMPRA, //
                    TipoMercadoBDI.OPCOES_DE_COMPRA, //
                    "Teorica", 0, 0, 0, //
                    cotacaoAcao.getFechamento(), //
                    cotacoesOpcoesSerie2.iterator().next().getDataVencimento());

            Integer opcoesPorDia = configuracaoImportacao.getQuantidadeOpcoesPorAcaoPorDia();

            CotacaoAcaoOpcoes cotacao = blackScholes.calcularIndices(cotacaoAcao, serieAtualOpcoes,
                    cotacoesOpcoesSerie1, opcaoTeorica1, proximaSerieOpcoes, cotacoesOpcoesSerie2,
                    opcaoTeorica2, opcoesPorDia, selic);

            persistencia.incluirCotacaoHistorica(data, acao, cotacao);
        }
    }
}

From source file:org.apache.hadoop.hbase.regionserver.ccindex.IndexedRegion.java

private void removeOldIndexEntry(IndexSpecification indexSpec, byte[] row,
        SortedMap<byte[], byte[]> oldColumnValues) throws IOException {
    for (byte[] indexedCol : indexSpec.getIndexedColumns()) {
        if (!oldColumnValues.containsKey(indexedCol)) {
            LOG.debug("Index [" + indexSpec.getIndexId() + "] not trying to remove old entry for row ["
                    + Bytes.toString(row) + "] because col [" + Bytes.toString(indexedCol) + "] is missing");
            return;
        }/*from   w ww. j  a  v  a 2  s. co  m*/
    }

    byte[] oldIndexRow = indexSpec.getKeyGenerator().createIndexKey(row, oldColumnValues);
    LOG.debug(
            "Index [" + indexSpec.getIndexId() + "] removing old entry [" + Bytes.toString(oldIndexRow) + "]");
    getIndexTable(indexSpec).delete(new Delete(oldIndexRow));
}

From source file:com.jxt.web.service.AgentInfoServiceImpl.java

@Override
public ApplicationAgentList getApplicationAgentList(ApplicationAgentList.Key applicationAgentListKey,
        String applicationName, long timestamp) {
    if (applicationName == null) {
        throw new NullPointerException("applicationName must not be null");
    }/* www  .  j  av  a 2 s.co m*/
    if (applicationAgentListKey == null) {
        throw new NullPointerException("applicationAgentListKey must not be null");
    }
    final List<String> agentIdList = this.applicationIndexDao.selectAgentIds(applicationName);
    if (logger.isDebugEnabled()) {
        logger.debug("agentIdList={}", agentIdList);
    }

    if (CollectionUtils.isEmpty(agentIdList)) {
        logger.debug("agentIdList is empty. applicationName={}", applicationName);
        return new ApplicationAgentList(new TreeMap<String, List<AgentInfo>>());
    }

    // key = hostname
    // value= list fo agentinfo
    SortedMap<String, List<AgentInfo>> result = new TreeMap<>();

    List<AgentInfo> agentInfos = this.agentInfoDao.getAgentInfos(agentIdList, timestamp);
    this.agentLifeCycleDao.populateAgentStatuses(agentInfos, timestamp);
    for (AgentInfo agentInfo : agentInfos) {
        if (agentInfo != null) {
            String hostname = applicationAgentListKey.getKey(agentInfo);

            if (result.containsKey(hostname)) {
                result.get(hostname).add(agentInfo);
            } else {
                List<AgentInfo> list = new ArrayList<>();
                list.add(agentInfo);
                result.put(hostname, list);
            }
        }
    }

    for (List<AgentInfo> agentInfoList : result.values()) {
        Collections.sort(agentInfoList, AgentInfo.AGENT_NAME_ASC_COMPARATOR);
    }

    logger.info("getApplicationAgentList={}", result);

    return new ApplicationAgentList(result);
}

From source file:org.nuxeo.ecm.core.storage.sql.UnifiedCachingRowMapper.java

/**
 * Sets the session, used for event propagation.
 *//* w ww .  j  av a  2  s  .c o m*/
public void setSession(SessionImpl session) {
    this.session = session;
    cacheHitCount = registry.counter(MetricRegistry.name("nuxeo", "repositories", session.repository.getName(),
            "caches", "unified", "hits"));
    cacheGetTimer = registry.timer(MetricRegistry.name("nuxeo", "repositories", session.repository.getName(),
            "caches", "unified", "get"));
    sorRows = registry.counter(MetricRegistry.name("nuxeo", "repositories", session.repository.getName(),
            "caches", "unified", "sor", "rows"));
    sorGetTimer = registry.timer(MetricRegistry.name("nuxeo", "repositories", session.repository.getName(),
            "caches", "unified", "sor", "get"));
    String gaugeName = MetricRegistry.name("nuxeo", "repositories", session.repository.getName(), "caches",
            "unified", "cache-size");
    SortedMap<String, Gauge> gauges = registry.getGauges();
    if (!gauges.containsKey(gaugeName)) {
        registry.register(gaugeName, new Gauge<Integer>() {
            @Override
            public Integer getValue() {
                if (cacheManager != null) {
                    return cacheManager.getCache(CACHE_NAME).getSize();
                }
                return 0;
            }
        });
    }
}

From source file:de.micromata.genome.gwiki.umgmt.GWikiUserAuthorization.java

public void getPageRights(GWikiContext wikiContext, SortedMap<String, GWikiRight> rights) {
    for (GWikiElementInfo ei : wikiContext.getWikiWeb().getElementInfos()) {
        String r = ei.getProps().getStringValue(GWikiPropKeys.AUTH_EDIT);
        if (StringUtils.isNotBlank(r) == true) {
            if (rights.containsKey(r) == false) {
                rights.put(r, new GWikiRight(r, GWikiRight.RIGHT_CAT_PAGE_RIGHT, null));
            }//from   www  .ja  v a  2s  . c  o m
        }
        r = ei.getProps().getStringValue(GWikiPropKeys.AUTH_VIEW);
        if (StringUtils.isNotBlank(r) == true) {
            if (rights.containsKey(r) == false) {
                rights.put(r, new GWikiRight(r, GWikiRight.RIGHT_CAT_PAGE_RIGHT, null));
            }
        }
    }
}

From source file:net.sourceforge.seqware.pipeline.plugins.checkdb.plugins.WorkflowRunConventionsPlugin.java

@Override
public void check(SelectQueryRunner qRunner, SortedMap<Level, Set<String>> result) throws SQLException {
    try {//from www.  j  a  v a2  s .  c om
        /**
         * May not be true for downsteam workflow runs
        * List<Integer> executeQuery = qRunner.executeQuery("select sw_accession from workflow_run WHERE workflow_run_id NOT IN (select workflow_run_id FROM ius_workflow_runs);", new ColumnListHandler<Integer>());
        * CheckDB.processOutput(result, Level.TRIVIAL,  "Workflow runs not connected to an IUS via ius_workflow_runs: " , executeQuery);
        **/
        // workflow runs not connected to a study
        String query = IOUtils.toString(
                AttributePlugin.class.getResourceAsStream("workflow_runs_not_connected_to_study.sql"));
        List<Object[]> workflow_run_study_pairs = qRunner.executeQuery(query, new ArrayListHandler());

        List<Integer> unreachableByStudy = new ArrayList<>();
        // number studies -> workflow runs
        SortedMap<Integer, SortedSet<Integer>> reachableByMultipleStudies = new TreeMap<>();

        for (Object[] pair : workflow_run_study_pairs) {
            int studyCount = Integer.valueOf(pair[1].toString());
            if (pair[0] == null) {
                continue;
            }
            int sw_accession = Integer.valueOf(pair[0].toString());
            if (studyCount == 0) {
                unreachableByStudy.add(sw_accession);
            } else if (studyCount > 1) {
                if (!reachableByMultipleStudies.containsKey(studyCount)) {
                    reachableByMultipleStudies.put(studyCount, new TreeSet<Integer>());
                }
                reachableByMultipleStudies.get(studyCount).add(sw_accession);
            }
        }
        CheckDB.processOutput(result, Level.SEVERE, "'Completed' Workflow runs not reachable by studies: ",
                unreachableByStudy);
        // workflow runs connected to more than one study
        if (reachableByMultipleStudies.size() > 0) {
            for (Entry<Integer, SortedSet<Integer>> e : reachableByMultipleStudies.entrySet()) {
                CheckDB.processOutput(result, Level.WARNING,
                        "'Completed' Workflow runs reachable by " + e.getKey() + " studies: ",
                        new ArrayList<>(e.getValue()));
            }
        }
        query = IOUtils.toString(
                AttributePlugin.class.getResourceAsStream("workflow_runs_not_connected_in_hierarchy.sql"));
        List<Integer> executeQuery = qRunner.executeQuery(query, new ColumnListHandler<Integer>());
        CheckDB.processOutput(result, Level.SEVERE,
                "'Completed' Workflow runs reachable by ius_workflow_runs but not via the processing_hierarchy: ",
                executeQuery);

        query = IOUtils.toString(AttributePlugin.class.getResourceAsStream("new_input_files_versus_old.sql"));
        executeQuery = qRunner.executeQuery(query, new ColumnListHandler<Integer>());
        CheckDB.processOutput(result, Level.TRIVIAL,
                "Workflow runs with input files via workflow_run_input_files but not via the processing hierarchy: ",
                executeQuery);

        query = IOUtils.toString(AttributePlugin.class.getResourceAsStream("old_input_files_versus_new.sql"));
        executeQuery = qRunner.executeQuery(query, new ColumnListHandler<Integer>());
        CheckDB.processOutput(result, Level.TRIVIAL,
                "Workflow runs with input files via the processing hierarchy but not via workflow_run_input_files: ",
                executeQuery);

    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }

}

From source file:com.palantir.atlasdb.cleaner.KeyValueServiceScrubberStore.java

private SortedMap<Long, Multimap<String, Cell>> transformRows(List<RowResult<Value>> input) {
    SortedMap<Long, Multimap<String, Cell>> scrubTimestampToTableNameToCell = Maps.newTreeMap();
    for (RowResult<Value> rowResult : input) {
        for (Map.Entry<Cell, Value> entry : rowResult.getCells()) {
            Cell cell = entry.getKey();/*from ww w  . j av a  2  s .  c  om*/
            Value value = entry.getValue();
            long scrubTimestamp = value.getTimestamp();
            String[] tableNames = StringUtils.split(PtBytes.toString(value.getContents()),
                    AtlasDbConstants.SCRUB_TABLE_SEPARATOR_CHAR);
            if (!scrubTimestampToTableNameToCell.containsKey(scrubTimestamp)) {
                scrubTimestampToTableNameToCell.put(scrubTimestamp, HashMultimap.<String, Cell>create());
            }
            for (String tableName : tableNames) {
                scrubTimestampToTableNameToCell.get(scrubTimestamp).put(tableName, cell);
            }
        }
    }
    return scrubTimestampToTableNameToCell;
}

From source file:org.lilyproject.indexer.model.sharding.test.ShardSelectorTest.java

@Test
public void testDefaultMapping() throws Exception {
    SortedMap<String, String> shards = new TreeMap<String, String>();
    shards.put("shard1", "http://solr1");
    shards.put("shard2", "http://solr2");
    shards.put("shard3", "http://solr3");

    ShardSelector selector = DefaultShardSelectorBuilder.createDefaultSelector(shards);

    IdGenerator idGenerator = new IdGeneratorImpl();

    boolean shard1Used = false;
    boolean shard2Used = false;
    boolean shard3Used = false;

    for (int i = 0; i < 50; i++) {
        String shardName = selector.getShard(idGenerator.newRecordId());
        assertTrue(shards.containsKey(shardName));

        if (shardName.equals("shard1")) {
            shard1Used = true;//  ww  w.jav  a2s. c  o m
        } else if (shardName.equals("shard2")) {
            shard2Used = true;
        } else if (shardName.equals("shard3")) {
            shard3Used = true;
        }
    }

    assertTrue(shard1Used);
    assertTrue(shard2Used);
    assertTrue(shard3Used);
}