Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:com.actionbarsherlock.ActionBarSherlock.java

/**
 * Wrap an activity with an action bar abstraction which will enable the
 * use of a custom implementation on platforms where a native version does
 * not exist./*from  w w w  .  j a v a 2 s .co  m*/
 *
 * @param activity Owning activity.
 * @param flags Option flags to control behavior.
 * @return Instance to interact with the action bar.
 */
public static ActionBarSherlock wrap(Activity activity, int flags) {
    //Create a local implementation map we can modify
    HashMap<Implementation, Class<? extends ActionBarSherlock>> impls = new HashMap<Implementation, Class<? extends ActionBarSherlock>>(
            IMPLEMENTATIONS);
    boolean hasQualfier;

    /* DPI FILTERING */
    hasQualfier = false;
    for (Implementation key : impls.keySet()) {
        //Only honor TVDPI as a specific qualifier
        if (key.dpi() == DisplayMetrics.DENSITY_TV) {
            hasQualfier = true;
            break;
        }
    }
    if (hasQualfier) {
        final boolean isTvDpi = activity.getResources()
                .getDisplayMetrics().densityDpi == DisplayMetrics.DENSITY_TV;
        for (Iterator<Implementation> keys = impls.keySet().iterator(); keys.hasNext();) {
            int keyDpi = keys.next().dpi();
            if ((isTvDpi && keyDpi != DisplayMetrics.DENSITY_TV)
                    || (!isTvDpi && keyDpi == DisplayMetrics.DENSITY_TV)) {
                keys.remove();
            }
        }
    }

    /* API FILTERING */
    hasQualfier = false;
    for (Implementation key : impls.keySet()) {
        if (key.api() != Implementation.DEFAULT_API) {
            hasQualfier = true;
            break;
        }
    }
    if (hasQualfier) {
        final int runtimeApi = Build.VERSION.SDK_INT;
        int bestApi = 0;
        for (Iterator<Implementation> keys = impls.keySet().iterator(); keys.hasNext();) {
            int keyApi = keys.next().api();
            if (keyApi > runtimeApi) {
                keys.remove();
            } else if (keyApi > bestApi) {
                bestApi = keyApi;
            }
        }
        for (Iterator<Implementation> keys = impls.keySet().iterator(); keys.hasNext();) {
            if (keys.next().api() != bestApi) {
                keys.remove();
            }
        }
    }

    if (impls.size() > 1) {
        throw new IllegalStateException("More than one implementation matches configuration.");
    }
    if (impls.isEmpty()) {
        throw new IllegalStateException("No implementations match configuration.");
    }
    Class<? extends ActionBarSherlock> impl = impls.values().iterator().next();
    if (DEBUG)
        Log.i(TAG, "Using implementation: " + impl.getSimpleName());

    try {
        Constructor<? extends ActionBarSherlock> ctor = impl.getConstructor(CONSTRUCTOR_ARGS);
        return ctor.newInstance(activity, flags);
    } catch (NoSuchMethodException e) {
        throw new RuntimeException(e);
    } catch (IllegalArgumentException e) {
        throw new RuntimeException(e);
    } catch (InstantiationException e) {
        throw new RuntimeException(e);
    } catch (IllegalAccessException e) {
        throw new RuntimeException(e);
    } catch (InvocationTargetException e) {
        throw new RuntimeException(e);
    }
}

From source file:com.clustercontrol.repository.factory.FacilitySelector.java

/**
 * ??????<BR>/*from   w  w  w. j  ava  2  s  . c o m*/
 * ?????????ID?
 * ??????
 * 
 * @param parentFacilityId ?ID
 * @param ownerRoleId ID
 * @param level ??
 * @param scopeFlag ?????:true ????:false)
 * @return ??
 */
private static ArrayList<FacilityInfo> getFacilityList(String parentFacilityId, String ownerRoleId, int level,
        boolean scopeFlag) {

    /**  */
    HashMap<String, FacilityInfo> facilityMap = new HashMap<String, FacilityInfo>();

    /** ? */
    m_log.debug("getting facilities under a scope. (scopeFacilityId = " + parentFacilityId + ")");

    if (ObjectValidator.isEmptyString(parentFacilityId)) {
        return new ArrayList<FacilityInfo>();
    }

    FacilityTreeItem treeItem = null;
    if (ownerRoleId != null && !ownerRoleId.isEmpty()) {
        // ID????
        treeItem = FacilityTreeCache.getFacilityTreeByRoleId(ownerRoleId);
    } else {
        // ????
        String userId = (String) HinemosSessionContext.instance()
                .getProperty(HinemosSessionContext.LOGIN_USER_ID);
        treeItem = FacilityTreeCache.getFacilityTreeByUserId(userId);
    }
    FacilityTreeItem parentFacilityTreeItem = getTopFacilityRecursive(treeItem, parentFacilityId);
    if (parentFacilityTreeItem == null) {
        m_log.info("getFacilityList() : Entity is not found. : facilityId = " + parentFacilityId);
    } else {
        if (parentFacilityTreeItem.getData().getFacilityType() == FacilityConstant.TYPE_NODE) {
            facilityMap.put(parentFacilityTreeItem.getData().getFacilityId(), parentFacilityTreeItem.getData());
            return new ArrayList<FacilityInfo>(facilityMap.values());
        } else if (scopeFlag) {
            facilityMap.put(parentFacilityTreeItem.getData().getFacilityId(), parentFacilityTreeItem.getData());
        }
        getFacilityListRecursive(parentFacilityTreeItem, level, facilityMap, scopeFlag);
    }
    m_log.debug("successful in getting facilities under a scope. (scopeFacilityId = " + parentFacilityId + ")");
    return new ArrayList<FacilityInfo>(facilityMap.values());
}

From source file:eu.planets_project.tb.gui.backing.ServiceBrowser.java

/**
 * @return//from   w  ww  . j a  va 2  s.  c om
 */
public List<ServiceRecordsByFormatBean> getAllServiceRecordsByFormat() {
    HashMap<URI, ServiceRecordsByFormatBean> sbn = new HashMap<URI, ServiceRecordsByFormatBean>();

    // Get all the known, unique service records.
    List<ServiceRecordBean> records = this.getAllServicesAndRecords();

    // Aggregate those into a list of new service-by-name:
    for (ServiceRecordBean srb : records) {
        if (this.getSelectedServiceTypes().contains(srb.getType())) {
            if (srb.getInputs(true) != null) {
                for (URI fmt : srb.getInputs(true)) {
                    if (sbn.get(fmt) == null) {
                        sbn.put(fmt, new ServiceRecordsByFormatBean(fr.getFormatForUri(fmt)));
                    }
                    sbn.get(fmt).addAsInputService(srb);
                }
            }
            if (srb.getOutputs() != null) {
                for (URI fmt : srb.getOutputs()) {
                    if (sbn.get(fmt) == null) {
                        sbn.put(fmt, new ServiceRecordsByFormatBean(fr.getFormatForUri(fmt)));
                    }
                    sbn.get(fmt).addAsOutputService(srb);
                }
            }
        }
    }

    return new ArrayList<ServiceRecordsByFormatBean>(sbn.values());
}

From source file:au.org.theark.core.dao.DataExtractionDao.java

public File createPhenotypicCSV(Search search, DataExtractionVO devo, List<PhenoDataSetFieldDisplay> cfds,
        FieldCategory fieldCategory) {/*w w w.ja  va  2s .c om*/
    final String tempDir = System.getProperty("java.io.tmpdir");
    String filename = new String("PHENOTYPIC.csv");
    final java.io.File file = new File(tempDir, filename);
    if (filename == null || filename.isEmpty()) {
        filename = "exportcsv.csv";
    }

    Set<String> headers = new HashSet<String>();
    HashMap<String, List<String>> phenoCollectionMapping = new HashMap<String, List<String>>();

    for (Entry<String, ExtractionVO> entry : devo.getPhenoCustomData().entrySet()) {
        String subjectUID = entry.getValue().getSubjectUid();
        if (phenoCollectionMapping.containsKey(subjectUID)) {
            phenoCollectionMapping.get(subjectUID).add(entry.getKey());
        } else {
            List<String> phenoCollectionIDs = new ArrayList<String>();
            phenoCollectionIDs.add(entry.getKey());
            phenoCollectionMapping.put(subjectUID, phenoCollectionIDs);
        }
    }

    Set<String> phenoCollectionHeadersSet = new HashSet<String>();
    int maxPhenoCollections = 0;
    for (List<String> pc : phenoCollectionMapping.values()) {
        if (pc.size() > maxPhenoCollections) {
            maxPhenoCollections = pc.size();
        }
    }

    Iterator<ExtractionVO> iter = devo.getPhenoCustomData().values().iterator();
    while (iter.hasNext()) {
        ExtractionVO evo = iter.next();
        phenoCollectionHeadersSet.addAll(evo.getKeyValues().keySet());
    }

    List<String> phenoCollectionHeaders = new ArrayList<String>(phenoCollectionHeadersSet);
    List<String> headersList = new ArrayList<String>(headers);
    Collections.sort(phenoCollectionHeaders);

    phenoCollectionHeaders.add(0, "Record Date");
    phenoCollectionHeaders.add(1, "Collection Name");

    OutputStream outputStream;
    try {
        outputStream = new FileOutputStream(file);
        CsvWriter csv = new CsvWriter(outputStream);

        csv.write("Subject UID");

        for (String header : headersList) {
            csv.write(header);
        }

        for (int i = 1; i <= maxPhenoCollections; i++) {
            for (String header : phenoCollectionHeaders) {
                csv.write("P" + i + "_" + header);
            }
        }

        csv.endLine();

        for (String subjectUID : phenoCollectionMapping.keySet()) {
            if (!phenoCollectionMapping.containsKey(subjectUID)) {
                continue;
            }

            List<String> row = new ArrayList<String>();
            csv.write(subjectUID);

            ExtractionVO subjectData = devo.getDemographicData().get(subjectUID);
            ExtractionVO subjectCustomData = devo.getSubjectCustomData().get(subjectUID);
            for (String header : headersList) {
                if (subjectData.getKeyValues().containsKey(header)) {
                    csv.write(subjectData.getKeyValues().get(header));
                } else if (subjectCustomData != null && subjectCustomData.getKeyValues().containsKey(header)) {
                    csv.write(subjectCustomData.getKeyValues().get(header));
                } else {
                    csv.write("");
                }
            }
            if (phenoCollectionMapping.containsKey(subjectUID)) {
                DateFormat df = new SimpleDateFormat("MM/dd/yyyy");
                for (String phenoCollectionID : phenoCollectionMapping.get(subjectUID)) {
                    ExtractionVO phenoCollectionData = devo.getPhenoCustomData().get(phenoCollectionID);
                    for (String header : phenoCollectionHeaders) {
                        if (header.equals("Record Date")) {
                            csv.write(df.format(phenoCollectionData.getRecordDate()));
                        } else if (header.equals("Collection Name")) {
                            csv.write(phenoCollectionData.getCollectionName());
                        } else if (phenoCollectionData.getKeyValues().containsKey(header)) {
                            csv.write(phenoCollectionData.getKeyValues().get(header));
                        } else {
                            csv.write("");
                        }
                    }
                }
                if (phenoCollectionMapping.get(subjectUID).size() < maxPhenoCollections) {
                    for (int i = 0; i < (maxPhenoCollections
                            - phenoCollectionMapping.get(subjectUID).size()); i++) {
                        for (String header : phenoCollectionHeaders) {
                            csv.write("");
                        }
                    }
                }
            } else {
                for (int i = 0; i < maxPhenoCollections; i++) {
                    for (String header : phenoCollectionHeaders) {
                        csv.write("");
                    }
                }
            }
            csv.endLine();
        }

        csv.close();

    } catch (FileNotFoundException e) {
        log.error(e.getMessage());
    }

    return file;
}

From source file:com.heliumv.api.order.OrderApi.java

@GET
@Path("offline")
@Produces({ FORMAT_JSON, FORMAT_XML })//from   w ww .  j a v a2 s  .  c  o m
public OfflineOrderEntry getOfflineOrders(@HeaderParam(ParamInHeader.TOKEN) String headerUserId,
        @QueryParam(Param.USERID) String userId, @QueryParam(Param.LIMIT) Integer limit,
        @QueryParam(Param.STARTINDEX) Integer startIndex, @QueryParam("filter_cnr") String filterCnr,
        @QueryParam("filter_customer") String filterCustomer,
        @QueryParam("filter_delivery_customer") String filterDeliveryCustomer,
        @QueryParam("filter_project") String filterProject,
        @QueryParam("filter_withHidden") Boolean filterWithHidden) {
    OfflineOrderEntry entry = new OfflineOrderEntry();

    try {
        if (null == connectClient(headerUserId, userId))
            return entry;
        if (!mandantCall.hasModulAuftrag()) {
            respondNotFound();
            return entry;
        }

        FilterKriteriumCollector collector = new FilterKriteriumCollector();
        collector.add(offlineOrderQuery.getFilterCnr(StringHelper.removeXssDelimiters(filterCnr)));
        collector.add(offlineOrderQuery.getFilterProject(StringHelper.removeXssDelimiters(filterProject)));
        collector.add(offlineOrderQuery.getFilterCustomer(StringHelper.removeXssDelimiters(filterCustomer)));
        collector.add(offlineOrderQuery
                .getFilterDeliveryCustomer(StringHelper.removeXssDelimiters(filterDeliveryCustomer)));
        collector.add(offlineOrderQuery.getFilterWithHidden(filterWithHidden));
        FilterBlock filterCrits = new FilterBlock(collector.asArray(), "AND");

        QueryParametersFeatures params = offlineOrderQuery.getFeatureQueryParameters(filterCrits);
        params.setLimit(limit);
        params.setKeyOfSelectedRow(startIndex);
        params.addFeature(AuftragHandlerFeature.ADRESSE_KOMPLETT);
        params.addFeature(AuftragHandlerFeature.ADRESSE_ANSCHRIFT);
        params.addFeature(AuftragHandlerFeature.ADRESSE_IST_LIEFERADRESSE);
        AuftragQueryResult result = (AuftragQueryResult) offlineOrderQuery.setQuery(params);

        List<OrderEntry> orders = offlineOrderQuery.getResultList(result);
        List<OrderpositionsEntry> positions = new ArrayList<OrderpositionsEntry>();
        HashMap<String, IAddressContact> distinctAddresses = new HashMap<String, IAddressContact>();

        int orderIndex = 0;

        for (OrderEntry orderEntry : orders) {
            collector = new FilterKriteriumCollector();
            collector.add(orderPositionQuery.getOrderIdFilter(orderEntry.getId()));
            collector.add(orderPositionQuery.getIsIdentFilter());
            filterCrits = new FilterBlock(collector.asArray(), "AND");

            QueryParameters posParams = orderPositionQuery.getDefaultQueryParameters(filterCrits);
            posParams.setLimit(Integer.MAX_VALUE);
            posParams.setKeyOfSelectedRow(0);

            QueryResult positionResult = orderPositionQuery.setQuery(posParams);
            List<OrderpositionEntry> posEntries = orderPositionQuery.getResultList(positionResult);

            addPositionEntries(positions, orderEntry.getId(), posEntries);

            try {
                IAddressContact orderAddress = result.getFlrData()[orderIndex].getAddressContact();
                distinctAddresses.put(orderAddress.getPartnerAddress().getPartnerId().toString()
                        + (orderAddress.getContactAddress() != null
                                ? ("|" + orderAddress.getContactAddress().getPartnerId().toString())
                                : ""),
                        orderAddress);
            } catch (IndexOutOfBoundsException e) {
            }

            ++orderIndex;
        }
        entry.setOrders(orders);
        entry.setOrderpositions(positions);

        List<OrderAddressContact> resultAddresses = new ArrayList<OrderAddressContact>();
        for (IAddressContact orderAddress : distinctAddresses.values()) {
            //            OrderAddressContact newAddress = modelMapper.map(orderAddress, OrderAddressContact.class) ;
            OrderAddressContact newAddress = new OrderAddressContact();
            newAddress.setPartnerAddress(modelMapper.map(orderAddress.getPartnerAddress(), OrderAddress.class));
            if (orderAddress.getContactAddress() != null) {
                newAddress.setContactAddress(
                        modelMapper.map(orderAddress.getContactAddress(), OrderAddress.class));
            }
            resultAddresses.add(newAddress);
        }
        entry.setAddresses(resultAddresses);

    } catch (NamingException e) {
        respondUnavailable(e);
        e.printStackTrace();
    } catch (RemoteException e) {
        respondUnavailable(e);
        e.printStackTrace();
    } catch (EJBExceptionLP e) {
        respondBadRequest(e);
    }

    return entry;
}

From source file:com.searchcode.app.jobs.IndexGitRepoJob.java

private List<CodeOwner> getBlameInfo(int codeLinesSize, String repoName, String repoLocations,
        String fileName) {/*from  www. ja v a 2s .c om*/
    List<CodeOwner> codeOwners = new ArrayList<>(codeLinesSize);
    try {
        // The / part is required due to centos bug for version 1.1.1
        // This appears to be correct
        String repoLoc = repoLocations + "/" + repoName + "/.git";

        Repository localRepository = new FileRepository(new File(repoLoc));
        BlameCommand blamer = new BlameCommand(localRepository);

        ObjectId commitID = localRepository.resolve("HEAD");

        if (commitID == null) {
            Singleton.getLogger().info("getBlameInfo commitID is null for " + repoLoc + " " + fileName);
            return codeOwners;
        }

        BlameResult blame;

        // Somewhere in here appears to be wrong...
        blamer.setStartCommit(commitID);
        blamer.setFilePath(fileName);
        blame = blamer.call();

        // Hail mary attempt to solve issue on CentOS Attempt to set at all costs
        if (blame == null) { // This one appears to solve the issue so don't remove it
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath(String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }
        if (blame == null) {
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath("/" + String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }

        if (blame == null) {
            Singleton.getLogger().info("getBlameInfo blame is null for " + repoLoc + " " + fileName);
        }

        if (blame != null) {
            // Get all the owners their number of commits and most recent commit
            HashMap<String, CodeOwner> owners = new HashMap<>();
            RevCommit commit;
            PersonIdent authorIdent;

            try {
                for (int i = 0; i < codeLinesSize; i++) {
                    commit = blame.getSourceCommit(i);
                    authorIdent = commit.getAuthorIdent();

                    if (owners.containsKey(authorIdent.getName())) {
                        CodeOwner codeOwner = owners.get(authorIdent.getName());
                        codeOwner.incrementLines();

                        int timestamp = codeOwner.getMostRecentUnixCommitTimestamp();

                        if (commit.getCommitTime() > timestamp) {
                            codeOwner.setMostRecentUnixCommitTimestamp(commit.getCommitTime());
                        }
                        owners.put(authorIdent.getName(), codeOwner);
                    } else {
                        owners.put(authorIdent.getName(),
                                new CodeOwner(authorIdent.getName(), 1, commit.getCommitTime()));
                    }
                }
            } catch (IndexOutOfBoundsException ex) {
                // Ignore this as its not really a problem or is it?
                Singleton.getLogger()
                        .info("IndexOutOfBoundsException when trying to get blame for " + repoName + fileName);
            }

            codeOwners = new ArrayList<>(owners.values());
        }

    } catch (IOException e) {
        e.printStackTrace();
    } catch (GitAPIException e) {
        e.printStackTrace();
    } catch (IllegalArgumentException e) {
        e.printStackTrace();
    }

    System.gc(); // Try to clean up
    return codeOwners;
}

From source file:de.tudarmstadt.ukp.dkpro.core.io.conll.Conll2009Writer.java

private void convert(JCas aJCas, PrintWriter aOut) {
    Map<Token, Collection<SemanticPredicate>> predIdx = indexCovered(aJCas, Token.class,
            SemanticPredicate.class);
    Map<SemanticArgument, Collection<Token>> argIdx = indexCovered(aJCas, SemanticArgument.class, Token.class);
    for (Sentence sentence : select(aJCas, Sentence.class)) {
        HashMap<Token, Row> ctokens = new LinkedHashMap<Token, Row>();

        // Tokens
        List<Token> tokens = selectCovered(Token.class, sentence);

        // Check if we should try to include the FEATS in output
        List<Morpheme> morphology = selectCovered(Morpheme.class, sentence);
        boolean useFeats = tokens.size() == morphology.size();

        List<SemanticPredicate> preds = selectCovered(SemanticPredicate.class, sentence);

        for (int i = 0; i < tokens.size(); i++) {
            Row row = new Row();
            row.id = i + 1;//from  w w  w  .  j  a  v a2  s  . co  m
            row.token = tokens.get(i);
            row.args = new SemanticArgument[preds.size()];
            if (useFeats) {
                row.feats = morphology.get(i);
            }

            // If there are multiple semantic predicates for the current token, then 
            // we keep only the first
            Collection<SemanticPredicate> predsForToken = predIdx.get(row.token);
            if (predsForToken != null && !predsForToken.isEmpty()) {
                row.pred = predsForToken.iterator().next();
            }
            ctokens.put(row.token, row);
        }

        // Dependencies
        for (Dependency rel : selectCovered(Dependency.class, sentence)) {
            ctokens.get(rel.getDependent()).deprel = rel;
        }

        // Semantic arguments
        for (int p = 0; p < preds.size(); p++) {
            FSArray args = preds.get(p).getArguments();
            for (SemanticArgument arg : select(args, SemanticArgument.class)) {
                for (Token t : argIdx.get(arg)) {
                    Row row = ctokens.get(t);
                    row.args[p] = arg;
                }
            }
        }

        // Write sentence in CONLL 2009 format
        for (Row row : ctokens.values()) {
            int id = row.id;

            String form = row.token.getCoveredText();

            String lemma = UNUSED;
            if (writeLemma && (row.token.getLemma() != null)) {
                lemma = row.token.getLemma().getValue();
            }
            String plemma = lemma;

            String pos = UNUSED;
            if (writePos && (row.token.getPos() != null)) {
                POS posAnno = row.token.getPos();
                pos = posAnno.getPosValue();
            }
            String ppos = pos;

            String feat = UNUSED;
            if (writeMorph && (row.feats != null)) {
                feat = row.feats.getMorphTag();
            }
            String pfeat = feat;

            int headId = UNUSED_INT;
            String deprel = UNUSED;
            if (writeDependency && (row.deprel != null)) {
                deprel = row.deprel.getDependencyType();
                headId = ctokens.get(row.deprel.getGovernor()).id;
                if (headId == row.id) {
                    // ROOT dependencies may be modeled as a loop, ignore these.
                    headId = 0;
                }
            }

            String head = UNUSED;
            if (headId != UNUSED_INT) {
                head = Integer.toString(headId);
            }

            String phead = head;
            String pdeprel = deprel;

            String fillpred = UNUSED;
            String pred = UNUSED;
            StringBuilder apreds = new StringBuilder();
            if (writeSemanticPredicate) {
                if (row.pred != null) {
                    fillpred = "Y";
                    pred = row.pred.getCategory();
                }

                for (SemanticArgument arg : row.args) {
                    if (apreds.length() > 0) {
                        apreds.append('\t');
                    }
                    apreds.append(arg != null ? arg.getRole() : UNUSED);
                }
            }

            aOut.printf("%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", id, form, lemma, plemma,
                    pos, ppos, feat, pfeat, head, phead, deprel, pdeprel, fillpred, pred, apreds);
        }

        aOut.println();
    }
}

From source file:eu.planets_project.tb.gui.backing.ServiceBrowser.java

/**
 * @return//  w ww  . j  a  va  2s  . c  om
 */
private List<ServiceRecordBean> lookupAllServicesAndRecords() {
    // Use a hash map to build up the list.
    HashMap<String, ServiceRecordBean> serviceMap = new HashMap<String, ServiceRecordBean>();
    // Get the historical service records:
    ServiceRecordPersistencyRemote srp = ServiceRecordPersistencyImpl.getInstance();
    for (ServiceRecordImpl sr : srp.getAllServiceRecords()) {
        log.info("Putting service record: " + sr.getServiceName() + " : '" + sr.getServiceHash() + "'");
        serviceMap.put(sr.getServiceHash(), new ServiceRecordBean(sr));
    }

    // Now get the active services and patch these records in:
    List<ServiceDescription> serviceList = getListOfServices(null);
    //log.info("Query result: "+serviceList);
    if (serviceList != null)
        log.info("Matched services = " + serviceList.size());
    for (ServiceDescription sd : serviceList) {
        if (serviceMap.containsKey("" + sd.hashCode())) {
            log.info("Updating bean for service: " + sd.getName() + " : '" + sd.hashCode() + "'");
            serviceMap.get("" + sd.hashCode()).setServiceDescription(sd);
        } else {
            serviceMap.put("" + sd.hashCode(), new ServiceRecordBean(sd));
            log.info("Putting in service: " + sd.getName() + " : '" + sd.hashCode() + "'");
        }
    }

    return new ArrayList<ServiceRecordBean>(serviceMap.values());
}

From source file:de.csw.expertfinder.mediawiki.api.MediaWikiAPI.java

/**
 * Returns all contributions for the given user.
 * @param userName//from  ww w . ja  v  a2 s . c o  m
 * @return
 * @throws MediaWikiAPIException
 */
public List<MediaWikiArticleContribution> getAllContributionsForUser(String userName)
        throws MediaWikiAPIException {
    BasicNameValuePair[] params = new BasicNameValuePair[] { new BasicNameValuePair("list", "usercontribs"),
            new BasicNameValuePair("ucnamespace", "0"), new BasicNameValuePair("ucuser", userName),
            new BasicNameValuePair("ucshow", "!minor"), new BasicNameValuePair("uclimit", "500") };

    HashMap<Integer, MediaWikiArticleContribution> contributionsByArticleId = new HashMap<Integer, MediaWikiArticleContribution>();
    for (;;) {
        Document doc = queryMediaWiki("query", params);
        NodeList itemElements = doc.getElementsByTagName("item");
        int len = itemElements.getLength();
        for (int i = 0; i < len; i++) {
            Element itemElement = (Element) itemElements.item(i);
            int articleId = Integer.parseInt(itemElement.getAttribute("pageid"));
            String title = itemElement.getAttribute("title");
            MediaWikiArticleContribution contribution = contributionsByArticleId.get(articleId);
            if (contribution == null) {
                contribution = new MediaWikiArticleContribution(articleId, title, userName);
                contributionsByArticleId.put(articleId, contribution);
            }
            contribution.increaseContributionCount();
        }

        NodeList queryContinueElements = doc.getElementsByTagName("query-continue");
        if (queryContinueElements.getLength() == 0) {
            ArrayList<MediaWikiArticleContribution> result = new ArrayList<MediaWikiArticleContribution>(
                    contributionsByArticleId.size());
            result.addAll(contributionsByArticleId.values());
            Collections.sort(result, new Comparator<MediaWikiArticleContribution>() {
                public int compare(MediaWikiArticleContribution o1, MediaWikiArticleContribution o2) {
                    // we want the result to be sorted in descending order, thus we swap o1 and o2 here.
                    return o2.getContributionCount().compareTo(o1.getContributionCount());
                }
            });
            return result;
        }

        Element queryContinueElement = (Element) queryContinueElements.item(0);
        Element userContribsElement = (Element) queryContinueElement.getElementsByTagName("usercontribs")
                .item(0);
        String ucstart = userContribsElement.getAttribute("ucstart");

        params = new BasicNameValuePair[] { new BasicNameValuePair("list", "usercontribs"),
                new BasicNameValuePair("ucnamespace", "0"), new BasicNameValuePair("ucuser", userName),
                new BasicNameValuePair("ucshow", "!minor"), new BasicNameValuePair("uclimit", "500"),
                new BasicNameValuePair("ucstart", ucstart) };
    }
}

From source file:com.sciaps.view.SpectrumAnalysisReportPanel.java

public void doAnalysis(final SpectrumShotItem spectrumShotItem) {
    logger_.info("Starting Spectrum Analysis");

    spectrumShotItem_ = spectrumShotItem;

    BackgroundTask.runBackgroundTask(new BackgroundTask() {

        private JDialog mDialog;
        private JProgressBar mProgress;

        @Override//w ww.jav  a 2s.  c  o m
        public void onBefore() {
            mProgress = new JProgressBar();
            mProgress.setIndeterminate(true);

            mDialog = new JDialog(Constants.MAIN_FRAME);
            mDialog.setLocationRelativeTo(Constants.MAIN_FRAME);
            mDialog.setAlwaysOnTop(true);
            mDialog.setResizable(false);
            mDialog.setContentPane(mProgress);
            mDialog.setSize(400, 100);
            mDialog.setVisible(true);
        }

        @Override
        public void onBackground() {

            Spectrum tmp = null;
            final double[] peaksOnX_;

            if (spectrumShotItem.getSeriesDataType() == SpectrumShotItem.NORMALIZED) {
                tmp = spectrumShotItem.getShot();
                peaksOnX_ = spectrumAnalyze_.doPeakFinding(tmp);
            } else if (spectrumShotItem.getSeriesDataType() == SpectrumShotItem.BG_REMOVED) {

                tmp = spectrumAnalyze_.doSpectrumNormalization(spectrumShotItem.getShot());
                peaksOnX_ = spectrumAnalyze_.doPeakFinding(tmp);
            } else {

                tmp = spectrumAnalyze_.doBackgroundRemoval(spectrumShotItem.getShot());
                tmp = spectrumAnalyze_.doSpectrumNormalization(tmp);
                peaksOnX_ = spectrumAnalyze_.doPeakFinding(tmp);
            }

            // Create a marker for each peaks and show them
            if (peaksOnX_.length > 0) {

                normalizedSpectrumItem_ = new SpectrumShotItem("analysis");
                normalizedSpectrumItem_.setShot(tmp, SpectrumShotItem.NORMALIZED);

                final HashMap<String, PeakMeritObj> mapOfPeaks = new HashMap<String, PeakMeritObj>();

                // Merge all the identified peak into one object
                // this for loop will set the found value
                for (int i = 0; i < peaksOnX_.length; i++) {
                    double y = tmp.getIntensityFunction().value(peaksOnX_[i]);

                    PeakMeritObj retMeritObj = spectrumAnalyze_.identifiedPeaks(peaksOnX_[i], y, searchRange_);
                    if (retMeritObj != null) {
                        PeakMeritObj meritObjInTheMap = mapOfPeaks.get(retMeritObj.elementName_);
                        if (meritObjInTheMap == null) {
                            // not in the list, add it
                            mapOfPeaks.put(retMeritObj.elementName_, retMeritObj);
                        } else {
                            // in the list already, update it
                            meritObjInTheMap.addWavelength(retMeritObj.getWaveLength());
                            meritObjInTheMap.addTotalPeaksFound(retMeritObj.getTotalPeaksFound());
                            meritObjInTheMap.addTotalLgPeaksFound(retMeritObj.getTotalLgPeaksFound());
                            meritObjInTheMap.addWeight(retMeritObj.getWeight());
                            meritObjInTheMap.addMerit(retMeritObj.getMerit());
                        }
                    }
                }

                //Now we have a list of identified peaks, going to get the element info on the libzlines library
                for (PeakMeritObj obj : mapOfPeaks.values()) {
                    spectrumAnalyze_.getElementLineData(obj);
                }

                SwingUtilities.invokeLater(new Runnable() {
                    @Override
                    public void run() {

                        // Remove all previous data
                        listModel_.removeAllElements();
                        lblSpectrumName_.setText(spectrumShotItem.getName());
                        markers_ = null;
                        peakMeritTableModel_.clearAllData();
                        rejectedPeakMeritTableModel_.clearAllData();

                        callback_.doShowShotXYSeries(normalizedSpectrumItem_);

                        int offset = -1;
                        IntervalMarker[] tmpMarkers = new IntervalMarker[peaksOnX_.length];
                        double min;
                        double max;
                        double tmpPeakFoundPercentage;
                        double tmpLgPeakFoundPercentage;
                        double tmpPeakWeightPercentage;
                        for (PeakMeritObj obj : mapOfPeaks.values()) {
                            for (Object wl : obj.getWaveLength()) {
                                String item = String.format("%.5g, %s", wl, obj.elementName_);
                                listModel_.addElement(item);

                                min = (Double) wl - MARKER_THRESHOLD;
                                max = (Double) wl + MARKER_THRESHOLD;
                                IntervalMarker marker = Util.createMarker(min, max, obj.elementName_);
                                tmpMarkers[++offset] = marker;
                            }

                            if (obj.getTotalLgPeaks() > 0) {
                                tmpPeakFoundPercentage = 100f * obj.getTotalPeaksFound() / obj.getTotalPeaks();
                                tmpLgPeakFoundPercentage = 100f * obj.getTotalLgPeaksFound()
                                        / obj.getTotalLgPeaks();

                            } else {
                                tmpPeakFoundPercentage = 0;
                                tmpLgPeakFoundPercentage = 0;
                            }
                            tmpPeakWeightPercentage = obj.getWeightPercentage();

                            // determine peak accept/reject
                            if (tmpPeakFoundPercentage >= peakFoundPercentage_
                                    && tmpLgPeakFoundPercentage >= lgPeakFoundPercentage_
                                    && tmpPeakWeightPercentage >= peakWeightPercentage_) {
                                peakMeritTableModel_.addRow(obj);
                            } else {
                                rejectedPeakMeritTableModel_.addRow(obj);
                            }
                        }

                        allMarkers_ = Arrays.copyOf(tmpMarkers, offset + 1);
                        callback_.doAddMarker(allMarkers_);
                    }
                });
            }
        }

        @Override
        public void onAfter() {
            mDialog.setVisible(false);
            logger_.info("Spectrum Analysis - done");
        }
    });

}