Example usage for java.util LinkedHashMap size

List of usage examples for java.util LinkedHashMap size

Introduction

In this page you can find the example usage for java.util LinkedHashMap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value mappings in this map.

Usage

From source file:org.apache.tez.mapreduce.input.TestMultiMRInput.java

@Test(timeout = 5000)
public void testMultipleSplits() throws Exception {

    Path workDir = new Path(TEST_ROOT_DIR, "testMultipleSplits");
    JobConf jobConf = new JobConf(defaultConf);
    jobConf.setInputFormat(org.apache.hadoop.mapred.SequenceFileInputFormat.class);
    FileInputFormat.setInputPaths(jobConf, workDir);

    MRInputUserPayloadProto.Builder builder = MRInputUserPayloadProto.newBuilder();
    builder.setGroupingEnabled(false);// ww w.  j ava 2 s .c  om
    builder.setConfigurationBytes(TezUtils.createByteStringFromConf(jobConf));
    byte[] payload = builder.build().toByteArray();

    InputContext inputContext = createTezInputContext(payload);

    MultiMRInput input = new MultiMRInput(inputContext, 2);
    input.initialize();
    List<Event> eventList = new ArrayList<Event>();

    LinkedHashMap<LongWritable, Text> data = new LinkedHashMap<LongWritable, Text>();

    String file1 = "file1";
    LinkedHashMap<LongWritable, Text> data1 = createInputData(localFs, workDir, jobConf, file1, 0, 10);

    String file2 = "file2";
    LinkedHashMap<LongWritable, Text> data2 = createInputData(localFs, workDir, jobConf, file2, 10, 20);

    data.putAll(data1);
    data.putAll(data2);

    SequenceFileInputFormat<LongWritable, Text> format = new SequenceFileInputFormat<LongWritable, Text>();
    InputSplit[] splits = format.getSplits(jobConf, 2);
    assertEquals(2, splits.length);

    MRSplitProto splitProto1 = MRInputHelpers.createSplitProto(splits[0]);
    InputDataInformationEvent event1 = InputDataInformationEvent.createWithSerializedPayload(0,
            splitProto1.toByteString().asReadOnlyByteBuffer());

    MRSplitProto splitProto2 = MRInputHelpers.createSplitProto(splits[1]);
    InputDataInformationEvent event2 = InputDataInformationEvent.createWithSerializedPayload(0,
            splitProto2.toByteString().asReadOnlyByteBuffer());

    eventList.clear();
    eventList.add(event1);
    eventList.add(event2);
    input.handleEvents(eventList);

    int readerCount = 0;
    for (KeyValueReader reader : input.getKeyValueReaders()) {
        readerCount++;
        while (reader.next()) {
            if (data.size() == 0) {
                fail("Found more records than expected");
            }
            Object key = reader.getCurrentKey();
            Object val = reader.getCurrentValue();
            assertEquals(val, data.remove(key));
        }
    }
    assertEquals(2, readerCount);
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchIndexUtils.java

@Test
public void test_parseDefaultMapping() throws JsonProcessingException, IOException {

    // Check the different components

    // Build/"unbuild" match pair

    assertEquals(Tuples._2T("*", "*"), ElasticsearchIndexUtils.buildMatchPair(_mapper.readTree("{}")));

    assertEquals(Tuples._2T("field*", "*"),
            ElasticsearchIndexUtils.buildMatchPair(_mapper.readTree("{\"match\":\"field*\"}")));

    assertEquals(Tuples._2T("field*field", "type*"), ElasticsearchIndexUtils.buildMatchPair(
            _mapper.readTree("{\"match\":\"field*field\", \"match_mapping_type\": \"type*\"}")));

    assertEquals("testBARSTAR_string",
            ElasticsearchIndexUtils.getFieldNameFromMatchPair(Tuples._2T("test_*", "string")));

    // More complex objects

    final String properties = Resources.toString(
            Resources.getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/properties_test.json"),
            Charsets.UTF_8);/*from   ww w  . j a  va 2s.co  m*/
    final String templates = Resources.toString(
            Resources.getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/templates_test.json"),
            Charsets.UTF_8);
    final String both = Resources.toString(
            Resources
                    .getResource("com/ikanow/aleph2/search_service/elasticsearch/utils/full_mapping_test.json"),
            Charsets.UTF_8);

    final JsonNode properties_json = _mapper.readTree(properties);
    final JsonNode templates_json = _mapper.readTree(templates);
    final JsonNode both_json = _mapper.readTree(both);

    // Properties, empty + non-empty

    final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> props_test1 = ElasticsearchIndexUtils
            .getProperties(templates_json);
    assertTrue("Empty map if not present", props_test1.isEmpty());

    final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> props_test2 = ElasticsearchIndexUtils
            .getProperties(properties_json);
    assertEquals(4, props_test2.size());
    assertEquals(Arrays.asList("@version", "@timestamp", "sourceKey", "geoip"),
            props_test2.keySet().stream().map(e -> e.left().value()).collect(Collectors.toList()));

    assertEquals("{\"type\":\"string\",\"index\":\"not_analyzed\"}",
            props_test2.get(Either.left("sourceKey")).toString());

    // Templates, empty + non-empty

    final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> templates_test1 = ElasticsearchIndexUtils
            .getTemplates(properties_json, _mapper.readTree("{}"), Collections.emptySet());
    assertTrue("Empty map if not present", templates_test1.isEmpty());

    final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> templates_test2 = ElasticsearchIndexUtils
            .getTemplates(templates_json, _mapper.readTree("{}"), Collections.emptySet());
    assertEquals("getTemplates: " + templates_test2, 2, templates_test2.size());
    assertEquals(Arrays.asList(Tuples._2T("*", "string"), Tuples._2T("*", "number")),
            templates_test2.keySet().stream().map(e -> e.right().value()).collect(Collectors.toList()));

    // Some more properties test

    final List<String> nested_properties = ElasticsearchIndexUtils.getAllFixedFields_internal(properties_json)
            .collect(Collectors.toList());
    assertEquals(Arrays.asList("@version", "@timestamp", "sourceKey", "geoip", "geoip.location"),
            nested_properties);

    final Set<String> nested_properties_2 = ElasticsearchIndexUtils.getAllFixedFields(both_json);
    assertEquals(Arrays.asList("sourceKey", "@timestamp", "geoip", "geoip.location", "@version"),
            new ArrayList<String>(nested_properties_2));

    // Putting it all together...

    final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> total_result1 = ElasticsearchIndexUtils
            .parseDefaultMapping(both_json, Optional.of("type_test"), Optional.empty(), Optional.empty(),
                    _config.search_technology_override(), _mapper);

    assertEquals(4, total_result1.size());
    assertEquals(
            "{\"mapping\":{\"type\":\"number\",\"index\":\"analyzed\"},\"path_match\":\"test*\",\"match_mapping_type\":\"number\"}",
            total_result1.get(Either.right(Tuples._2T("test*", "number"))).toString());
    assertEquals("{\"type\":\"date\"}", total_result1.get(Either.left("@timestamp1")).toString());

    final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> total_result2 = ElasticsearchIndexUtils
            .parseDefaultMapping(both_json, Optional.empty(), Optional.empty(), Optional.empty(),
                    _config.search_technology_override(), _mapper);

    assertEquals(7, total_result2.size());
    assertEquals(true, total_result2.get(Either.right(Tuples._2T("*", "string"))).get("mapping")
            .get("omit_norms").asBoolean());
    assertEquals("{\"type\":\"date\",\"fielddata\":{}}",
            total_result2.get(Either.left("@timestamp")).toString());

    // A couple of error checks:
    // - Missing mapping
    // - Mapping not an object
}

From source file:org.orcid.frontend.web.controllers.PublicProfileController.java

@RequestMapping(value = "/{orcid:(?:\\d{4}-){3,}\\d{3}[\\dX]}")
public ModelAndView publicPreview(HttpServletRequest request,
        @RequestParam(value = "page", defaultValue = "1") int pageNo,
        @RequestParam(value = "v", defaultValue = "0") int v,
        @RequestParam(value = "maxResults", defaultValue = "15") int maxResults,
        @PathVariable("orcid") String orcid) {

    OrcidProfile profile = orcidProfileCacheManager.retrievePublic(orcid);

    if (profile == null) {
        return new ModelAndView("error-404");
    }/*from w w w .j av a2 s .c  om*/

    ModelAndView mav = null;
    mav = new ModelAndView("public_profile_v3");
    mav.addObject("isPublicProfile", true);

    boolean isProfileEmtpy = true;

    request.getSession().removeAttribute(PUBLIC_WORKS_RESULTS_ATTRIBUTE);

    mav.addObject("profile", profile);

    String countryName = getCountryName(profile, true);
    if (!StringUtil.isBlank(countryName))
        mav.addObject("countryName", countryName);

    LinkedHashMap<Long, WorkForm> minimizedWorksMap = new LinkedHashMap<>();
    LinkedHashMap<Long, Affiliation> affiliationMap = new LinkedHashMap<>();
    LinkedHashMap<Long, Funding> fundingMap = new LinkedHashMap<>();
    LinkedHashMap<Long, PeerReview> peerReviewMap = new LinkedHashMap<>();

    if (profile != null && profile.getOrcidBio() != null && profile.getOrcidBio().getBiography() != null
            && StringUtils.isNotBlank(profile.getOrcidBio().getBiography().getContent())) {
        isProfileEmtpy = false;
    }

    if (profile.isLocked()) {
        mav.addObject("locked", true);
    } else if (profile.getOrcidDeprecated() != null) {
        String primaryRecord = profile.getOrcidDeprecated().getPrimaryRecord().getOrcidIdentifier().getPath();
        mav.addObject("deprecated", true);
        mav.addObject("primaryRecord", primaryRecord);
    } else {
        minimizedWorksMap = minimizedWorksMap(orcid);
        if (minimizedWorksMap.size() > 0) {
            mav.addObject("works", minimizedWorksMap.values());
            isProfileEmtpy = false;
        } else {
            mav.addObject("worksEmpty", true);
        }

        affiliationMap = affiliationMap(orcid);
        if (affiliationMap.size() > 0) {
            mav.addObject("affilations", affiliationMap.values());
            isProfileEmtpy = false;
        } else {
            mav.addObject("affiliationsEmpty", true);
        }

        fundingMap = fundingMap(orcid);
        if (fundingMap.size() > 0)
            isProfileEmtpy = false;
        else {
            mav.addObject("fundingEmpty", true);
        }

        peerReviewMap = peerReviewMap(orcid);
        if (peerReviewMap.size() > 0) {
            mav.addObject("peerReviews", peerReviewMap.values());
            isProfileEmtpy = false;
        } else {
            mav.addObject("peerReviewsEmpty", true);
        }

    }
    ObjectMapper mapper = new ObjectMapper();

    try {
        String worksIdsJson = mapper.writeValueAsString(minimizedWorksMap.keySet());
        String affiliationIdsJson = mapper.writeValueAsString(affiliationMap.keySet());
        String fundingIdsJson = mapper.writeValueAsString(fundingMap.keySet());
        String peerReviewIdsJson = mapper.writeValueAsString(peerReviewMap.keySet());
        mav.addObject("workIdsJson", StringEscapeUtils.escapeEcmaScript(worksIdsJson));
        mav.addObject("affiliationIdsJson", StringEscapeUtils.escapeEcmaScript(affiliationIdsJson));
        mav.addObject("fundingIdsJson", StringEscapeUtils.escapeEcmaScript(fundingIdsJson));
        mav.addObject("peerReviewIdsJson", StringEscapeUtils.escapeEcmaScript(peerReviewIdsJson));
        mav.addObject("isProfileEmpty", isProfileEmtpy);

        String creditName = "";
        if (profile.getOrcidBio() != null && profile.getOrcidBio().getPersonalDetails() != null) {
            PersonalDetails personalDetails = profile.getOrcidBio().getPersonalDetails();
            if (personalDetails.getCreditName() != null
                    && !PojoUtil.isEmpty(personalDetails.getCreditName().getContent()))
                creditName = profile.getOrcidBio().getPersonalDetails().getCreditName().getContent();
            else {
                if (personalDetails.getGivenNames() != null
                        && !PojoUtil.isEmpty(personalDetails.getGivenNames().getContent()))
                    creditName += personalDetails.getGivenNames().getContent();
                if (personalDetails.getFamilyName() != null
                        && !PojoUtil.isEmpty(personalDetails.getFamilyName().getContent()))
                    creditName += " " + personalDetails.getFamilyName().getContent();
            }
        }
        if (!PojoUtil.isEmpty(creditName)) {
            // <Published Name> (<ORCID iD>) - ORCID | Connecting Research
            // and Researchers
            mav.addObject("title", getMessage("layout.public-layout.title", creditName.trim(), orcid));
        }

    } catch (JsonGenerationException e) {
        e.printStackTrace();
    } catch (JsonMappingException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }

    if (!profile.isReviewed()) {
        if (isProfileValidForIndex(profile)) {
            if (profile.isLocked() || profile.getCountTokens() == 0
                    || (!CreationMethod.WEBSITE.equals(profile.getOrcidHistory().getCreationMethod())
                            && !CreationMethod.DIRECT.equals(profile.getOrcidHistory().getCreationMethod()))) {
                mav.addObject("noIndex", true);
            }
        } else {
            mav.addObject("noIndex", true);
        }
    }

    return mav;
}

From source file:pt.lsts.neptus.util.logdownload.LogsDownloaderWorkerActions.java

@SuppressWarnings("serial")
private AbstractAction createDownloadListAction() {
    return new AbstractAction() {
        @Override/*from w  w  w. j  a v  a 2  s. c om*/
        public void actionPerformed(ActionEvent e) {
            if (!gui.validateAndSetUI()) {
                gui.popupErrorConfigurationDialog();
                return;
            }
            AsyncTask task = new AsyncTask() {
                @Override
                public Object run() throws Exception {
                    if (stopLogListProcessing)
                        stopLogListProcessing = false;

                    long time = System.currentTimeMillis();
                    showInGuiStarting();

                    gui.downloadListButton.setEnabled(false);
                    // logFolderList.setEnabled(false);
                    gui.logFolderList.setValueIsAdjusting(true);
                    // logFilesList.setEnabled(false);

                    // ->Getting txt list of logs from server
                    showInGuiConnectingToServers();

                    // Map base log folder vs servers presence (space separated list of servers keys)
                    LinkedHashMap<String, String> serversLogPresenceList = new LinkedHashMap<>();
                    // Map FTPFile (log base folder) vs remote path
                    LinkedHashMap<FTPFile, String> retList = new LinkedHashMap<>();

                    // Get list from servers
                    getFromServersBaseLogList(retList, serversLogPresenceList);

                    if (retList.isEmpty()) {
                        gui.msgPanel.writeMessageTextln(I18n.text("Done"));
                        return null;
                    }

                    gui.msgPanel
                            .writeMessageTextln(I18n.textf("Log Folders: %numberoffolders", retList.size()));

                    long timeS1 = System.currentTimeMillis();

                    // Added in order not to show the active log (the last one)
                    orderAndFilterOutTheActiveLog(retList);
                    showInGuiNumberOfLogsFromServers(retList);
                    if (retList.size() == 0) // Abort the rest of processing
                        return null;

                    // ->Removing from already existing LogFolders to LOCAL state
                    showInGuiFiltering();
                    setStateLocalIfNotInPresentServer(retList);

                    if (stopLogListProcessing)
                        return null;

                    // ->Adding new LogFolders
                    LinkedList<LogFolderInfo> existentLogFoldersFromServer = new LinkedList<LogFolderInfo>();
                    LinkedList<LogFolderInfo> newLogFoldersFromServer = new LinkedList<LogFolderInfo>();
                    addTheNewFoldersAnFillTheReturnedExistentAndNewLists(retList, existentLogFoldersFromServer,
                            newLogFoldersFromServer);

                    if (stopLogListProcessing)
                        return null;

                    // ->Getting Log files list from server
                    showInGuiProcessingLogList();
                    LinkedList<LogFolderInfo> tmpLogFolderList = getFromServersCompleteLogList(
                            serversLogPresenceList);

                    showInGuiUpdatingLogsInfo();

                    // Testing for log files from each log folder
                    testingForLogFilesFromEachLogFolderAndFillInfo(tmpLogFolderList);

                    if (stopLogListProcessing)
                        return null;

                    // Updating new and existent log folders
                    testNewReportedLogFoldersForLocalCorrespondent(newLogFoldersFromServer);
                    updateLogFoldersState(existentLogFoldersFromServer);

                    // Updating Files for selected folders
                    updateFilesListGUIForFolderSelectedNonBlocking();

                    NeptusLog.pub().warn("....process list from all servers "
                            + (System.currentTimeMillis() - timeS1) + "ms");

                    showInGuiUpdatingGui();

                    NeptusLog.pub()
                            .warn("....all downloadListAction " + (System.currentTimeMillis() - time) + "ms");
                    showInGuiDone();
                    return true;
                }

                @Override
                public void finish() {
                    stopLogListProcessing = false;

                    gui.logFolderList.setValueIsAdjusting(false);
                    gui.logFolderList.invalidate();
                    gui.logFolderList.revalidate();
                    gui.logFolderList.repaint();
                    gui.logFolderList.setEnabled(true);
                    // logFilesList.invalidate();
                    // logFilesList.revalidate();
                    // logFilesList.repaint();
                    gui.listHandlingProgressBar.setValue(0);
                    gui.listHandlingProgressBar.setIndeterminate(false);
                    gui.listHandlingProgressBar.setString("");
                    gui.logFilesList.setEnabled(true);
                    gui.downloadListButton.setEnabled(true);
                    try {
                        this.getResultOrThrow();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            };

            AsyncWorker.getWorkerThread().postTask(task);
        }
    };
}

From source file:org.fao.fenix.wds.web.rest.crowdprices.CrowdPricesDataRESTService.java

private String createGeoJson(List<List<String>> table) {

    String s = "{ \"type\":\"FeatureCollection\",\"features\":[";
    int i = 0;//from ww w .j  a  v  a  2 s  .c o m
    LinkedHashMap<String, List<List<String>>> markets = getCrowdPricesPoints(table);
    for (String marketname : markets.keySet()) {
        String popupcontent = "<b>" + marketname + "</b><br>";
        String lat = "";
        String lon = "";
        for (List<String> row : markets.get(marketname)) {
            popupcontent += row.get(1).replace("_", " ") + " - " + row.get(2) + " " + " "
                    + row.get(3).replace("_", " ") + "/" + row.get(4).replace("_", " ") + "<br>";
            lon = row.get(5);
            lat = row.get(6);
        }
        //         System.out.println("popup: " + popupcontent);
        s += "{\"type\":\"Feature\",\"properties\":{\"iconurl\":\"images/marker-icon.png\","
                + "\"name\":\"Countrys\"," +
                //         s += "{\"type\":\"Feature\",\"properties\":{\"iconurl\":\"http://fenixapps.fao.org/repository/js/leaflet/0.5.1/images/marker-icon-disabled.png\"," + "\"name\":\"Countrys\"," +
                "\"popupContent\":\"" + popupcontent + " \"},\"geometry\":{\"type\":\"Point\",\"coordinates\":["
                + lon + "," + lat + "]}}";
        if (i < markets.size() - 1) {
            s += ",";
        }
        i++;
    }
    s += "]}";
    return s;
}

From source file:org.spout.api.chat.ChatArguments.java

/**
 * Splits this ChatArguments instance into sections
 *
 * @param type How these arguments are to be split into sections
 * @return The split sections//from w  w  w.  j a  v  a 2  s  .  c  om
 */
public List<ChatSection> toSections(SplitType type) {
    List<ChatSection> sections = new ArrayList<ChatSection>();
    StringBuilder currentWord = new StringBuilder();
    LinkedHashMap<Integer, List<ChatStyle>> map;
    switch (type) {
    case WORD:
        map = new LinkedHashMap<Integer, List<ChatStyle>>();
        int curIndex = 0;
        for (Object obj : getExpandedPlaceholders()) {
            if (obj instanceof ChatStyle) {
                ChatStyle style = (ChatStyle) obj;
                List<ChatStyle> list = map.get(curIndex);
                if (list == null) {
                    list = new ArrayList<ChatStyle>();
                    map.put(curIndex, list);
                }
                ChatSectionUtils.removeConflicting(list, style);
                list.add(style);
            } else {
                String val = String.valueOf(obj);
                for (int i = 0; i < val.length(); ++i) {
                    int codePoint = val.codePointAt(i);
                    if (Character.isWhitespace(codePoint)) {
                        sections.add(new ChatSectionImpl(type, new LinkedHashMap<Integer, List<ChatStyle>>(map),
                                currentWord.toString()));
                        curIndex = 0;
                        currentWord = new StringBuilder();
                        if (map.size() > 0) {
                            final List<ChatStyle> previousStyles = map.containsKey(-1)
                                    ? new ArrayList<ChatStyle>(map.get(-1))
                                    : new ArrayList<ChatStyle>();

                            for (Map.Entry<Integer, List<ChatStyle>> entry : map.entrySet()) {
                                if (entry.getKey() != -1) {
                                    for (ChatStyle style : entry.getValue()) {
                                        ChatSectionUtils.removeConflicting(previousStyles, style);
                                        previousStyles.add(style);
                                    }
                                }
                            }
                            map.clear();
                            map.put(-1, previousStyles);
                        }
                    } else {
                        currentWord.append(val.substring(i, i + 1));
                        curIndex++;
                    }
                }
            }
        }

        if (currentWord.length() > 0) {
            sections.add(new ChatSectionImpl(type, map, currentWord.toString()));
        }
        break;

    case STYLE_CHANGE:
        StringBuilder curSection = new StringBuilder();
        List<ChatStyle> activeStyles = new ArrayList<ChatStyle>(3);
        for (Object obj : getExpandedPlaceholders()) {
            if (obj instanceof ChatStyle) {
                ChatStyle style = (ChatStyle) obj;
                ChatSectionUtils.removeConflicting(activeStyles, style);
                activeStyles.add(style);

                map = new LinkedHashMap<Integer, List<ChatStyle>>();
                map.put(-1, new ArrayList<ChatStyle>(activeStyles));
                sections.add(new ChatSectionImpl(type, map, curSection.toString()));
                curSection = new StringBuilder();
            } else {
                curSection.append(obj);
            }
        }
        break;

    case ALL:
        return Collections.<ChatSection>singletonList(
                new ChatSectionImpl(getSplitType(), getActiveStyles(), getPlainString()));

    default:
        throw new IllegalArgumentException("Unknown SplitOption " + type + "!");
    }
    return sections;
}

From source file:com.datatorrent.stram.engine.StreamingContainer.java

private void setupNode(OperatorDeployInfo ndi) {
    failedNodes.remove(ndi.id);/* w w  w . j  a  v a 2 s. co  m*/
    final Node<?> node = nodes.get(ndi.id);

    node.setup(node.context);

    /* setup context for all the input ports */
    LinkedHashMap<String, PortContextPair<InputPort<?>>> inputPorts = node
            .getPortMappingDescriptor().inputPorts;
    LinkedHashMap<String, PortContextPair<InputPort<?>>> newInputPorts = new LinkedHashMap<String, PortContextPair<InputPort<?>>>(
            inputPorts.size());
    for (OperatorDeployInfo.InputDeployInfo idi : ndi.inputs) {
        InputPort<?> port = inputPorts.get(idi.portName).component;
        PortContext context = new PortContext(idi.contextAttributes, node.context);
        newInputPorts.put(idi.portName, new PortContextPair<InputPort<?>>(port, context));
        port.setup(context);
    }
    inputPorts.putAll(newInputPorts);

    /* setup context for all the output ports */
    LinkedHashMap<String, PortContextPair<OutputPort<?>>> outputPorts = node
            .getPortMappingDescriptor().outputPorts;
    LinkedHashMap<String, PortContextPair<OutputPort<?>>> newOutputPorts = new LinkedHashMap<String, PortContextPair<OutputPort<?>>>(
            outputPorts.size());
    for (OperatorDeployInfo.OutputDeployInfo odi : ndi.outputs) {
        OutputPort<?> port = outputPorts.get(odi.portName).component;
        PortContext context = new PortContext(odi.contextAttributes, node.context);
        newOutputPorts.put(odi.portName, new PortContextPair<OutputPort<?>>(port, context));
        port.setup(context);
    }
    outputPorts.putAll(newOutputPorts);

    logger.debug("activating {} in container {}", node, containerId);
    /* This introduces need for synchronization on processNodeRequest which was solved by adding deleted field in StramToNodeRequest  */
    processNodeRequests(false);
    node.activate();
    eventBus.publish(new NodeActivationEvent(node));
}

From source file:org.apache.ode.daohib.bpel.ql.HibernateInstancesQueryCompiler.java

protected OrderByEvaluator<Collection<Order>, Object> compileOrderBy(OrderBy orderBy) {
    final LinkedHashMap<String, Boolean> orders = new LinkedHashMap<String, Boolean>();

    for (OrderByElement idOrder : orderBy.getOrders()) {
        if (!(idOrder.getIdentifier() instanceof Field)) {
            throw new IllegalArgumentException("Only field identifier supported by order by operator.");
        }/* ww  w  .ja  v  a 2 s.c  o  m*/
        String idName = idOrder.getIdentifier().getName();
        if (INSTANCE_STATUS_FIELD.equals(idName)) {
            if (orderBy.getOrders().size() > 1) {
                //TODO throw appropriate exception
                throw new RuntimeException("Status field should be used alone in <order by> construction.");
            }
            orderByStatus = true;
            orderByStatusDesc = idOrder.getType() == OrderByType.DESC;
            return null;
        }
        String dbField = getDBField(idName);

        orders.put(dbField, idOrder.getType() == null || idOrder.getType() == OrderByType.ASC);
    }

    return new OrderByEvaluator<Collection<Order>, Object>() {
        public Collection<Order> evaluate(Object paramValue) {
            Collection<Order> hibernateOrders = new ArrayList<Order>(orders.size());
            for (Map.Entry<String, Boolean> order : orders.entrySet()) {
                hibernateOrders.add(order.getValue() ? Order.asc(order.getKey()) : Order.desc(order.getKey()));
            }
            return hibernateOrders;
        }
    };
}

From source file:org.kontalk.provider.UsersProvider.java

/**
 * Computes counts by the address book index labels and returns it as {@link Bundle} which
 * will be appended to a {@link Cursor} as extras.
 *//*from  ww w  .j  a va  2s .c  o m*/
private Bundle getFastScrollingIndexExtras(Cursor cursor) {
    try {
        LinkedHashMap<String, Counter> groups = new LinkedHashMap<>();
        int count = cursor.getCount();

        for (int i = 0; i < count; i++) {
            cursor.moveToNext();
            String source = cursor.getString(Contact.COLUMN_DISPLAY_NAME);
            // use phone number if we don't have a display name
            if (source == null)
                source = cursor.getString(Contact.COLUMN_NUMBER);
            String label = mLocaleUtils.getLabel(source);
            Counter counter = groups.get(label);
            if (counter == null) {
                counter = new Counter(1);
                groups.put(label, counter);
            } else {
                counter.inc();
            }
        }

        int numLabels = groups.size();
        String labels[] = new String[numLabels];
        int counts[] = new int[numLabels];
        int i = 0;
        for (Map.Entry<String, Counter> entry : groups.entrySet()) {
            labels[i] = entry.getKey();
            counts[i] = entry.getValue().value;
            i++;
        }

        return FastScrollingIndexCache.buildExtraBundle(labels, counts);
    } finally {
        // reset the cursor
        cursor.move(-1);
    }
}

From source file:com.google.gwt.emultest.java.util.LinkedHashMapTest.java

public void testKeysConflict() {
    LinkedHashMap<Object, String> hashMap = new LinkedHashMap<Object, String>();

    hashMap.put(STRING_ZERO_KEY, STRING_ZERO_VALUE);
    hashMap.put(INTEGER_ZERO_KEY, INTEGER_ZERO_VALUE);
    hashMap.put(ODD_ZERO_KEY, ODD_ZERO_VALUE);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), INTEGER_ZERO_VALUE);
    assertEquals(hashMap.get(ODD_ZERO_KEY), ODD_ZERO_VALUE);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    hashMap.remove(INTEGER_ZERO_KEY);//from  w ww  .  j  ava2 s .co  m
    assertEquals(hashMap.get(ODD_ZERO_KEY), ODD_ZERO_VALUE);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    hashMap.remove(ODD_ZERO_KEY);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    assertEquals(hashMap.get(ODD_ZERO_KEY), null);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    hashMap.remove(STRING_ZERO_KEY);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    assertEquals(hashMap.get(ODD_ZERO_KEY), null);
    assertEquals(hashMap.get(STRING_ZERO_KEY), null);
    assertEquals(hashMap.size(), 0);
}