Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:org.alfresco.services.ModelTracker.java

public void trackModels() throws AuthenticationException, IOException, JSONException {
    // track models
    // reflect changes changes and update on disk copy

    try {/*from ww  w  . j  ava 2s  .  com*/
        List<AlfrescoModelDiff> modelDiffs = modelGetter.getModelsDiff(Collections.<AlfrescoModel>emptyList());
        HashMap<String, M2Model> modelMap = new HashMap<String, M2Model>();

        for (AlfrescoModelDiff modelDiff : modelDiffs) {
            switch (modelDiff.getType()) {
            case CHANGED:
                AlfrescoModel changedModel = modelGetter.getModel(modelDiff.getModelName());
                for (M2Namespace namespace : changedModel.getModel().getNamespaces()) {
                    modelMap.put(namespace.getUri(), changedModel.getModel());
                }
                break;
            case NEW:
                AlfrescoModel newModel = modelGetter.getModel(modelDiff.getModelName());
                for (M2Namespace namespace : newModel.getModel().getNamespaces()) {
                    modelMap.put(namespace.getUri(), newModel.getModel());
                }
                break;
            case REMOVED:
                // At the moment we do not unload models - I can see no side effects .... 
                // However search is used to check for references to indexed properties or types
                // This will be partially broken anyway due to eventual consistency
                // A model should only be unloaded if there are no data dependencies
                // Should have been on the de-lucene list.
                break;
            }
        }

        HashSet<String> loadedModels = new HashSet<String>();
        for (M2Model model : modelMap.values()) {
            loadModel(modelMap, loadedModels, model);
        }

        //        if(loadedModels.size() > 0)
        //        {
        //            this.infoSrv.afterInitModels();
        //        }

        File alfrescoModelDir = new File("alfrescoModels");
        if (!alfrescoModelDir.exists()) {
            alfrescoModelDir.mkdir();
        }
        for (AlfrescoModelDiff modelDiff : modelDiffs) {
            switch (modelDiff.getType()) {
            case CHANGED:
                removeMatchingModels(alfrescoModelDir, modelDiff.getModelName());
                ModelDefinition changedModel = dictionaryDAO.getModel(modelDiff.getModelName());

                //                   M2Model changedModel = dictionaryDAO.getCompiledModel(modelDiff.getModelName()).getM2Model();
                File changedFile = new File(alfrescoModelDir, getModelFileName(changedModel));
                FileOutputStream cos = new FileOutputStream(changedFile);
                //                   changedModel.toXML(cos);
                changedModel.toXML(null, cos);
                cos.flush();
                cos.close();
                break;
            case NEW:
                //                   M2Model newModel = dictionaryDAO.getCompiledModel(modelDiff.getModelName()).getM2Model();
                ModelDefinition newModel = dictionaryDAO.getModel(modelDiff.getModelName());
                // add on file
                File newFile = new File(alfrescoModelDir, getModelFileName(newModel));
                FileOutputStream nos = new FileOutputStream(newFile);
                //                   newModel.toXML(nos);
                newModel.toXML(null, nos);
                nos.flush();
                nos.close();
                break;
            case REMOVED:
                removeMatchingModels(alfrescoModelDir, modelDiff.getModelName());
                break;
            }
        }
    } catch (IOException e) {
        logger.warn("", e);
    }

    //        trackerStats.addModelTime(end-start);

    //        if(true == runPostModelLoadInit)
    //        {
    //            for(Object key : props.keySet())
    //            {
    //                String stringKey = (String)key;
    //                if(stringKey.startsWith("alfresco.index.store"))
    //                {
    //                    StoreRef store = new StoreRef(props.getProperty(stringKey));
    //                    indexedStores.add(store);
    //                }
    //                if(stringKey.startsWith("alfresco.ignore.store"))
    //                {
    //                    StoreRef store = new StoreRef(props.getProperty(stringKey));
    //                    ignoredStores.add(store);
    //                }
    //                if(stringKey.startsWith("alfresco.index.tenant"))
    //                {
    //                    indexedTenants.add(props.getProperty(stringKey));
    //                }
    //                if(stringKey.startsWith("alfresco.ignore.tenant"))
    //                {
    //                    ignoredTenants.add(props.getProperty(stringKey));
    //                }
    //                if(stringKey.startsWith("alfresco.index.datatype"))
    //                {
    //                    QName qname = expandQName(props.getProperty(stringKey));
    //                    indexedDataTypes.add(qname);
    //                }
    //                if(stringKey.startsWith("alfresco.ignore.datatype"))
    //                {
    //                    QName qname = expandQName(props.getProperty(stringKey));
    //                    ignoredDataTypes.add(qname);
    //                }
    //                if(stringKey.startsWith("alfresco.index.type"))
    //                {
    //                    QName qname = expandQName(props.getProperty(stringKey));
    //                    indexedTypes.add(qname);
    //                }
    //                if(stringKey.startsWith("alfresco.ignore.type"))
    //                {
    //                    QName qname = expandQName(props.getProperty(stringKey));
    //                    ignoredTypes.add(qname);
    //                }
    //                if(stringKey.startsWith("alfresco.index.aspect"))
    //                {
    //                    QName qname = expandQName(props.getProperty(stringKey));
    //                    indexedAspects.add(qname);
    //                }
    //                if(stringKey.startsWith("alfresco.ignore.aspect"))
    //                {
    //                    QName qname = expandQName(props.getProperty(stringKey));
    //                    ignoredAspects.add(qname);
    //                }
    //                if(stringKey.startsWith("alfresco.index.field"))
    //                {
    //                    String name = expandName(props.getProperty(stringKey));
    //                    indexedFields.add(name);
    //                }
    //                if(stringKey.startsWith("alfresco.ignore.field"))
    //                {
    //                    String name = expandName(props.getProperty(stringKey));
    //                    ignoredFields.add(name);
    //                }
    //            }
    //            runPostModelLoadInit = false;
    //        }

}

From source file:bbcdataservice.BBCDataService.java

public void updateTvData(final TvDataUpdateManager updateManager, final Channel[] channels,
        final Date startDate, final int days, final ProgressMonitor monitor) throws TvBrowserException {
    // // Check for connection
    // if (!updateManager.checkConnection()) {
    // return;//from w  ww  .j a  va 2  s.co  m
    // }
    monitor.setMessage(mLocalizer.msg("update", "Updating BBC data"));
    monitor.setMaximum(channels.length);
    int progress = 0;
    for (Channel channel : channels) {
        HashMap<Date, MutableChannelDayProgram> dayPrograms = new HashMap<Date, MutableChannelDayProgram>();
        monitor.setValue(progress++);
        for (int i = 0; i < days; i++) {
            Date date = startDate.addDays(i);
            String year = String.valueOf(date.getYear());
            String month = String.valueOf(date.getMonth());
            String day = String.valueOf(date.getDayOfMonth());
            String schedulePath = "/" + year + "/" + month + "/" + day + ".xml";
            String url = channel.getWebpage() + schedulePath;
            File file = new File(mWorkingDir, "bbc.xml");
            try {
                IOUtilities.download(new URL(url), file);
            } catch (MalformedURLException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            boolean continueWithNextDay = false;
            try {
                continueWithNextDay = BBCProgrammesParser.parse(dayPrograms, file, channel, date);
            } catch (Exception e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            file.delete();
            if (!continueWithNextDay) {
                break;
            }
        }
        // store the received programs
        for (MutableChannelDayProgram dayProgram : dayPrograms.values()) {
            updateManager.updateDayProgram(dayProgram);
        }
    }
}

From source file:org.apache.stratos.usage.api.TenantUsageRetriever.java

public APIManagerUsageStats[] getAPIManagerUsageStats(int tenantId, Calendar startDate, Calendar endDate,
        boolean currentMonth) throws Exception {
    //return the bandwidth usage of a user for a given period
    APIManagerUsageStats[] stats;/*from  w  ww .j  a v a2 s  .c o  m*/
    if (currentMonth) {
        //get from daily usage stats
        List<APIManagerUsageStats> bwsList = new ArrayList<APIManagerUsageStats>();
        bwsList = dao.getDailyAPIManagerUsageStats(tenantId, startDate, endDate);

        //next we'll get from the houlry stats to get the stats which are not yet
        //summarized to the daily stats table
        Calendar startHour = Calendar.getInstance();
        startHour.set(Calendar.HOUR, 0);
        startHour.set(Calendar.MINUTE, 0);
        startHour.set(Calendar.SECOND, 0);

        Calendar endHour = Calendar.getInstance();

        bwsList.addAll(dao.getHourlyAPIManagerUsageStats(tenantId, startHour, endHour));
        stats = convertAPIStatListToArray(bwsList);

    } else {
        //get from monthly usage stats
        Calendar monthCal = (Calendar) endDate.clone();
        monthCal.set(Calendar.DATE, 0);
        monthCal.set(Calendar.HOUR, 0);
        monthCal.set(Calendar.MINUTE, 0);
        monthCal.set(Calendar.SECOND, 0);

        stats = convertAPIStatListToArray(dao.getMonthlyAPIManagerUsageStats(tenantId, monthCal));
    }
    HashMap<String, APIManagerUsageStats> statMap = new HashMap<String, APIManagerUsageStats>();
    if (stats != null) {
        for (APIManagerUsageStats stat : stats) {
            if (stat.getRequestCount() == 0) {
                continue;
            }
            String serverName = extractServiceNameFromUrl(stat.getServerUrl());
            String statName = stat.getKey();

            HashMap<String, APIManagerUsageStats> statsHashMap;
            if (statName.equals("API-Call")) {
                statsHashMap = statMap;
            } else {
                log.warn("Unable to identify bandwidth name " + statName);
                continue;
            }

            //find whether the map already has this key; If not, insert a new one
            APIManagerUsageStats reqStat = statsHashMap.get(serverName);
            if (reqStat == null) {
                reqStat = new APIManagerUsageStats(serverName);
                statsHashMap.put(serverName, reqStat);
            }
            reqStat.setRequestCount(stat.getRequestCount());
        }
    }

    //Convert to array and return it
    APIManagerUsageStats[] returnValue = new APIManagerUsageStats[0];
    Collection<APIManagerUsageStats> values = statMap.values();
    returnValue = values.toArray(new APIManagerUsageStats[values.size()]);
    return returnValue;
}

From source file:gov.nih.nci.cabig.caaers.web.admin.UserAjaxFacade.java

/**
 * This method is invoked from the user_search.jsp to fetch csm users for the given search criteria  
 * @param searchCriteriaMap/*w  ww .  j a va 2 s.  c om*/
 * @return
 */
@SuppressWarnings("unchecked")
public List<UserAjaxableDomainObject> getUserTable(HashMap searchCriteriaMap) {
    List<UserAjaxableDomainObject> ajaxableUserList = new ArrayList<UserAjaxableDomainObject>();
    if (!StringUtils.equals("person", (String) searchCriteriaMap.get("linkType"))) {

        String name = (String) searchCriteriaMap.get("name");
        String fName = (String) searchCriteriaMap.get("firstName");
        String lName = (String) searchCriteriaMap.get("lastName");
        String uName = (String) searchCriteriaMap.get("userName");

        List<gov.nih.nci.security.authorization.domainobjects.User> csmUserList;
        if (StringUtils.isEmpty(name)) {
            csmUserList = userRepository.searchCsmUser(fName, lName, uName);
        } else {
            csmUserList = userRepository.searchCsmUser(name);
        }

        if (StringUtils.equals("user", (String) searchCriteriaMap.get("linkType"))) {
            if (CollectionUtils.isNotEmpty(csmUserList)) {
                HashMap<String, gov.nih.nci.security.authorization.domainobjects.User> userMap = new HashMap<String, gov.nih.nci.security.authorization.domainobjects.User>();
                for (gov.nih.nci.security.authorization.domainobjects.User csmUser : csmUserList) {
                    userMap.put(csmUser.getLoginName(), csmUser);
                }

                ResearchStaffQuery rsQuery = new ResearchStaffQuery();
                rsQuery.filterByExactLoginId(userMap.keySet().toArray(new String[] {}));
                rsQuery.setFiltered(true);

                List<ResearchStaff> staffs = personRepository.searchLocalResearchStaff(rsQuery);
                for (ResearchStaff rs : staffs) {
                    userMap.remove(rs.getLoginId());
                }

                InvestigatorQuery invQuery = new InvestigatorQuery();
                invQuery.filterByExactLoginId(userMap.keySet().toArray(new String[] {}));
                invQuery.setFiltered(true);
                List<Investigator> investigators = personRepository.searchLocalInvestigator(invQuery);
                for (Investigator inv : investigators) {
                    userMap.remove(inv.getLoginId());
                }
                csmUserList = new ArrayList<gov.nih.nci.security.authorization.domainobjects.User>(
                        userMap.values());
            }

        }

        UserAjaxableDomainObject ajaxableUser = null;
        for (gov.nih.nci.security.authorization.domainobjects.User csmUser : csmUserList) {
            ajaxableUser = new UserAjaxableDomainObject();
            ajaxableUser.setId(csmUser.getUserId().intValue());
            ajaxableUser.setFirstName(csmUser.getFirstName());
            ajaxableUser.setLastName(csmUser.getLastName());
            ajaxableUser.setMiddleName("");
            ajaxableUser.setNumber("");
            ajaxableUser.setExternalId("");
            ajaxableUser.setUserName(csmUser.getLoginName());
            ajaxableUser.setEmailAddress(csmUser.getEmailId());
            ajaxableUser.setRecordType("CSM_RECORD");
            if (csmUser.getEndDate() != null) {
                ajaxableUser.setLocked(
                        DateUtils.compareDate(csmUser.getEndDate(), Calendar.getInstance().getTime()) <= 0);
            }

            ajaxableUserList.add(ajaxableUser);
        }
    }
    return ajaxableUserList;
}

From source file:net.tourbook.tour.photo.TourPhotoManager.java

@Override
public void openTour(final HashMap<Long, TourPhotoReference> tourPhotoReferences) {

    for (final TourPhotoReference ref : tourPhotoReferences.values()) {

        // fire a selection for the first tour

        final long tourId = ref.tourId;
        final SelectionTourId selection = new SelectionTourId(tourId);

        PostSelectionProvider.fireSelection(selection);

        break;/*from  w  w  w  . j a  v a 2s .com*/
    }
}

From source file:org.eurocarbdb.application.glycoworkbench.plugin.SpectraPanel.java

public void addIsotopeCurves(TreeMap<Peak, Collection<Annotation>> annotations) {

    if (theDocument.size() == 0)
        return;/*from   w ww.  j  a v  a2  s  .co  m*/

    // remove old curves
    removeIsotopeCurves();

    // add curves
    if (annotations != null) {

        // set renderer
        if (show_all_isotopes) {
            thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.SHAPES));
            thePlot.getRenderer(1).setShape(new Ellipse2D.Double(0, 0, 7, 7));
        } else
            thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.LINES));

        MSUtils.IsotopeList isotope_list = new MSUtils.IsotopeList(show_all_isotopes);
        for (Map.Entry<Peak, Collection<Annotation>> pa : annotations.entrySet()) {
            Peak p = pa.getKey();
            double[] best_peak = theDocument.getPeakDataAt(current_ind).findNearestPeak(p.getMZ());

            // get compositions
            HashSet<Molecule> compositions = new HashSet<Molecule>();
            for (Annotation a : pa.getValue()) {
                try {
                    compositions.add(a.getFragmentEntry().fragment.computeIon());
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

            // collect curves for this peak
            HashMap<String, double[][]> all_curves = new HashMap<String, double[][]>();
            for (Molecule m : compositions) {
                try {
                    double[][] data = MSUtils.getIsotopesCurve(1, m, show_all_isotopes);

                    // overlay the distribution with the existing list of isotopes
                    isotope_list.adjust(data, best_peak[0], best_peak[1]);

                    all_curves.put(m.toString(), data);
                } catch (Exception e) {
                    LogUtils.report(e);
                }
            }

            // add average curve for this peak
            if (all_curves.size() > 1) {
                double[][] data = MSUtils.average(all_curves.values(), show_all_isotopes);

                // add the average to the chart
                String name = "average-" + p.getMZ();
                theIsotopesDataset.addSeries(name, data);
                thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.magenta);
                thePlot.getRenderer(1).setSeriesStroke(theIsotopesDataset.indexOf(name), new BasicStroke(2));

                // add the average to the isotope list
                isotope_list.add(data, false);
            } else if (all_curves.size() == 1) {
                // add the only curve to the isotope list
                isotope_list.add(all_curves.values().iterator().next(), false);
            }

            // add the other curves
            for (Map.Entry<String, double[][]> e : all_curves.entrySet()) {
                String name = e.getKey() + "-" + p.getMZ();
                theIsotopesDataset.addSeries(name, e.getValue());
                thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.blue);
            }
        }

    }
    updateIntensityAxis();
}

From source file:org.alfresco.solr.client.SOLRAPIClientTest.java

private void trackModels() throws AuthenticationException, IOException, JSONException {

    List<AlfrescoModelDiff> modelDiffs = client.getModelsDiff(Collections.<AlfrescoModel>emptyList());
    HashMap<String, M2Model> modelMap = new HashMap<String, M2Model>();

    for (AlfrescoModelDiff modelDiff : modelDiffs) {
        switch (modelDiff.getType()) {
        case CHANGED:
            AlfrescoModel changedModel = client.getModel(modelDiff.getModelName());
            for (M2Namespace namespace : changedModel.getModel().getNamespaces()) {
                modelMap.put(namespace.getUri(), changedModel.getModel());
            }/*from w  ww.jav a2s .  c  om*/
            break;
        case NEW:
            AlfrescoModel newModel = client.getModel(modelDiff.getModelName());
            for (M2Namespace namespace : newModel.getModel().getNamespaces()) {
                modelMap.put(namespace.getUri(), newModel.getModel());
            }
            break;
        case REMOVED:
            // At the moment we do not unload models - I can see no side effects ....
            // However search is used to check for references to indexed properties or types
            // This will be partially broken anyway due to eventual consistency
            // A model should only be unloaded if there are no data dependencies
            // Should have been on the de-lucene list.
            break;
        }
    }

    HashSet<String> loadedModels = new HashSet<String>();
    for (M2Model model : modelMap.values()) {
        loadModel(modelMap, loadedModels, model);
    }
    if (modelDiffs.size() > 0) {
        afterInitModels();
    }

}

From source file:com.globalsight.machineTranslation.AbstractTranslator.java

/**
 * Translate batch segments with tags in segments.
 * /*from  w w  w.  j  a v  a 2  s  . com*/
 * @param sourceLocale
 * @param targetLocale
 * @param segments
 * @return
 * @throws MachineTranslationException
 */
private String[] translateSegmentsWithTags(Locale sourceLocale, Locale targetLocale, String[] segments)
        throws MachineTranslationException {
    if (sourceLocale == null || targetLocale == null || segments == null || segments.length < 1) {
        return null;
    }

    String[] results = new String[segments.length];

    HashMap<String, String> map = new HashMap<String, String>();
    for (int k = 0; k < segments.length; k++) {
        String[] segmentsFromGxml = MTHelper.getSegmentsInGxml(segments[k]);
        if (segmentsFromGxml == null || segmentsFromGxml.length < 1) {
            results[k] = segments[k];
        } else {
            for (int count = 0; count < segmentsFromGxml.length; count++) {
                String key = String.valueOf(k) + "-" + count;
                map.put(key, segmentsFromGxml[count]);
            }
        }
    }

    // for MS & Google MT(batch translation)
    if (map.size() > 0) {
        // Put all keys into "keysInArray"
        String[] keysInArray = new String[map.keySet().size()];
        Iterator<String> keysInIter = map.keySet().iterator();
        int countKey = 0;
        while (keysInIter.hasNext()) {
            keysInArray[countKey] = (String) keysInIter.next();
            countKey++;
        }

        // Put all values into "valuesInArray"
        String[] valuesInArray = new String[map.values().size()];
        Iterator<String> valuesInIter = map.values().iterator();
        int countValue = 0;
        while (valuesInIter.hasNext()) {
            valuesInArray[countValue] = (String) valuesInIter.next();
            countValue++;
        }

        // Do batch translation
        String[] translatedSegments = doBatchTranslation(sourceLocale, targetLocale, valuesInArray);

        // Put sub texts back to GXML corresponding positions.
        if (translatedSegments != null) {
            for (int m = 0; m < segments.length; m++) {
                String gxml = segments[m];
                // Retrieve all TextNode that need translate.
                GxmlElement gxmlRoot = MTHelper.getGxmlElement(gxml);
                List items2 = MTHelper.getImmediateAndSubImmediateTextNodes(gxmlRoot);

                int count = 0;
                for (Iterator iter = items2.iterator(); iter.hasNext();) {
                    TextNode textNode = (TextNode) iter.next();

                    for (int n = 0; n < translatedSegments.length; n++) {
                        int dashIndex = keysInArray[n].indexOf("-");
                        int index = -1;
                        int subIndex = -1;
                        if (dashIndex < 0) {
                            index = Integer.parseInt(keysInArray[n]);
                        } else {
                            index = Integer.parseInt(keysInArray[n].substring(0, dashIndex));
                            subIndex = Integer.parseInt(keysInArray[n].substring(dashIndex + 1));
                        }

                        if (index == m && subIndex == count) {
                            textNode.setTextBuffer(new StringBuffer(
                                    translatedSegments[n] == null ? "" : translatedSegments[n]));
                            count++;
                            break;
                        }
                    }
                }

                String finalSegment = gxmlRoot.toGxml();
                results[m] = finalSegment;
            }
        }
    }

    return results;
}

From source file:com.searchcode.app.jobs.repository.IndexGitRepoJob.java

/**
 * Uses the inbuilt git/*  ww w. j  ava 2 s .c  o m*/
 * TODO this method appears to leak memory like crazy... need to investigate
 * TODO lots of hairy bits in here need tests to capture issues
 */
public List<CodeOwner> getBlameInfo(int codeLinesSize, String repoName, String repoLocations, String fileName) {
    List<CodeOwner> codeOwners = new ArrayList<>(codeLinesSize);
    try {
        // The / part is required due to centos bug for version 1.1.1
        // This appears to be correct
        String repoLoc = repoLocations + "/" + repoName + "/.git";

        Repository localRepository = new FileRepository(new File(repoLoc));
        BlameCommand blamer = new BlameCommand(localRepository);

        ObjectId commitID = localRepository.resolve("HEAD");

        if (commitID == null) {
            Singleton.getLogger().info("getBlameInfo commitID is null for " + repoLoc + " " + fileName);
            return codeOwners;
        }

        BlameResult blame;

        // Somewhere in here appears to be wrong...
        blamer.setStartCommit(commitID);
        blamer.setFilePath(fileName);
        blame = blamer.call();

        // Hail mary attempt to solve issue on CentOS Attempt to set at all costs
        if (blame == null) { // This one appears to solve the issue so don't remove it
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath(String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }
        if (blame == null) {
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath("/" + String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }

        if (blame == null) {
            Singleton.getLogger().info("getBlameInfo blame is null for " + repoLoc + " " + fileName);
        }

        if (blame != null) {
            // Get all the owners their number of commits and most recent commit
            HashMap<String, CodeOwner> owners = new HashMap<>();
            RevCommit commit;
            PersonIdent authorIdent;

            try {
                for (int i = 0; i < codeLinesSize; i++) {
                    commit = blame.getSourceCommit(i);
                    authorIdent = commit.getAuthorIdent();

                    if (owners.containsKey(authorIdent.getName())) {
                        CodeOwner codeOwner = owners.get(authorIdent.getName());
                        codeOwner.incrementLines();

                        int timestamp = codeOwner.getMostRecentUnixCommitTimestamp();

                        if (commit.getCommitTime() > timestamp) {
                            codeOwner.setMostRecentUnixCommitTimestamp(commit.getCommitTime());
                        }
                        owners.put(authorIdent.getName(), codeOwner);
                    } else {
                        owners.put(authorIdent.getName(),
                                new CodeOwner(authorIdent.getName(), 1, commit.getCommitTime()));
                    }
                }
            } catch (IndexOutOfBoundsException ex) {
                // Ignore this as its not really a problem or is it?
                Singleton.getLogger().info(
                        "IndexOutOfBoundsException when trying to get blame for " + repoName + " " + fileName);
            }

            codeOwners = new ArrayList<>(owners.values());
        }

    } catch (IOException ex) {
        Singleton.getLogger().info("IOException getBlameInfo when trying to get blame for " + repoName + " "
                + fileName + " " + ex.toString());
    } catch (GitAPIException ex) {
        Singleton.getLogger().info("GitAPIException getBlameInfo when trying to get blame for " + repoName + " "
                + fileName + " " + ex.toString());
    } catch (IllegalArgumentException ex) {
        Singleton.getLogger().info("IllegalArgumentException getBlameInfo when trying to get blame for "
                + repoName + " " + fileName + " " + ex.toString());
    }

    System.gc(); // Try to clean up
    return codeOwners;
}

From source file:de.bangl.lm.LotManagerPlugin.java

public void listLots(CommandSender sender) {
    HashMap<String, LotGroup> thegroups = this.lots.getAllLotGroups();
    HashMap<String, Lot> thelots = this.lots.getAllLots();

    if (thegroups.isEmpty()) {
        sendInfo(sender, "No lotgroups defined.");
    }//w ww .  j  a v  a  2  s. c o m

    if (thelots.isEmpty()) {
        sendInfo(sender, "No lots defined.");
    }

    for (LotGroup lotgroup : thegroups.values()) {
        sendInfo(sender, lotgroup.getId() + ":");
        for (Lot lot : thelots.values()) {
            if (lotgroup.getId().equals(lot.getGroup().getId())) {
                sendInfo(sender, " - " + lot.getId());
            }
        }
    }
}