Example usage for java.util HashMap size

List of usage examples for java.util HashMap size

Introduction

In this page you can find the example usage for java.util HashMap size.

Prototype

int size

To view the source code for java.util HashMap size.

Click Source Link

Document

The number of key-value mappings contained in this map.

Usage

From source file:com.planetmayo.debrief.satc_rcp.views.MaintainContributionsView.java

private void addNewPerformanceScore(double value, List<CompositeRoute> topRoutes) {
    // remember each contribution's set of scores
    HashMap<BaseContribution, HashMap<Date, Double>> stackedSeries = new HashMap<BaseContribution, HashMap<Date, Double>>();

    // remember the times for which we have states
    ArrayList<Date> valueTimes = new ArrayList<Date>();

    // ok - have a look at the scores
    Iterator<CoreRoute> legIter = topRoutes.get(0).getLegs().iterator();
    while (legIter.hasNext()) {
        CoreRoute route = legIter.next();
        Iterator<State> states = route.getStates().iterator();
        while (states.hasNext()) {
            State state = states.next();
            HashMap<BaseContribution, Double> scores = state.getScores();
            Iterator<BaseContribution> contributions = scores.keySet().iterator();
            while (contributions.hasNext()) {
                BaseContribution cont = contributions.next();

                // get the score
                Double score = scores.get(cont);
                if (score > 0) {

                    HashMap<Date, Double> thisSeries = stackedSeries.get(cont);
                    if (thisSeries == null) {
                        thisSeries = new HashMap<Date, Double>();
                        stackedSeries.put(cont, thisSeries);
                        final IBarSeries series = (IBarSeries) performanceChart.getSeriesSet()
                                .createSeries(SeriesType.BAR, cont.getName());
                        series.setBarColor(colorFor(cont));
                        // series.enableStack(true);
                    }/*w w w. j av  a 2 s .  c om*/
                    thisSeries.put(state.getTime(), scores.get(cont));

                    // store the time of this value
                    if (!valueTimes.contains(state.getTime())) {
                        valueTimes.add(state.getTime());
                    }
                }
            }
        }
    }

    // ok, now loop through the series
    Iterator<BaseContribution> conts = stackedSeries.keySet().iterator();
    while (conts.hasNext()) {
        BaseContribution cont = conts.next();
        HashMap<Date, Double> vals = stackedSeries.get(cont);
        if (vals.size() > 0) {
            final IBarSeries series = (IBarSeries) performanceChart.getSeriesSet().getSeries(cont.getName());

            // ok, we need to produce a value for each value time
            double[] valArr = new double[valueTimes.size()];

            Iterator<Date> iter2 = valueTimes.iterator();
            int ctr = 0;
            while (iter2.hasNext()) {
                Date date = iter2.next();
                Double thisV = vals.get(date);
                final double res;
                if (thisV != null)
                    res = thisV;
                else
                    res = 0;

                valArr[ctr++] = res;
            }

            series.setYSeries(valArr);
            // series.enableStack(true);
        }
    }

    // prepare the category labels
    String[] labels = new String[valueTimes.size()];
    Iterator<Date> vIter = valueTimes.iterator();

    // get our date formatter ready
    SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss");
    sdf.setTimeZone(TimeZone.getTimeZone("GMT"));

    // determine frequency f (trim to 1)
    int wid = performanceChart.getBounds().width;
    int allowed = wid / 90;
    int freq = Math.max(labels.length / allowed, 1);

    int ctr = 0;
    while (vIter.hasNext()) {
        Date date = vIter.next();
        final String str;
        if (ctr % freq == 0)
            str = sdf.format(date);
        else
            str = "";
        labels[ctr++] = str;
    }

    // set category labels
    performanceChart.getAxisSet().getXAxis(0).enableCategory(true);
    performanceChart.getAxisSet().getXAxis(0).setCategorySeries(labels);

    ISeries[] series = performanceChart.getSeriesSet().getSeries();
    if (series.length == 2 && series[0] instanceof IBarSeries && series[1] instanceof IBarSeries) {
        performanceChart.getLegend().setVisible(true);
        performanceChart.getLegend().setPosition(SWT.RIGHT);
        IBarSeries barSeries1 = (IBarSeries) series[0];
        IBarSeries barSeries2 = (IBarSeries) series[1];
        // enable stack series
        barSeries1.enableStack(false);
        barSeries2.enableStack(false);
        barSeries1.enableStack(true);
        barSeries2.enableStack(true);

    }

    // and resize the axes
    performanceChart.getAxisSet().adjustRange();

    final String perfString;
    if (value > 200d)
        perfString = "Unachievable";
    else
        perfString = PERFORMANCE_TITLE + (int) value;

    performanceChart.getTitle().setText(perfString);

    //
    performanceChart.redraw();
}

From source file:amie.keys.CSAKey.java

/**
 *
 * @param ruleToExtendWith/*from   w  w  w  .ja va2s  . c o  m*/
 * @param ruleToGraphFirstLevel
 * @param ruleToGraphLastLevel
 * @param kb
 */
private void discoverConditionalKeysPerLevel(HashMap<Rule, HashSet<String>> ruleToExtendWith,
        HashMap<Rule, Graph> ruleToGraphFirstLevel, HashMap<Rule, Graph> ruleToGraphLastLevel,
        Set<Rule> output) {
    //System.out.println("discoverConditionalKeysPerLevel()");
    HashMap<Rule, Graph> ruleToGraphThisLevel = new HashMap<>();
    for (Rule currentRule : ruleToExtendWith.keySet()) {
        Graph graph = ruleToGraphLastLevel.get(currentRule);
        //System.out.println("Current rule: " + currentRule+ " Graph:"+graph);
        for (String conditionProperty : ruleToExtendWith.get(currentRule)) {
            if (Utilities.getRelationIds(currentRule, property2Id).last() > property2Id
                    .get(conditionProperty)) {
                Graph currentGraphNew = (Graph) graph.clone();
                Integer propertyId = property2Id.get(conditionProperty);
                HashSet<Integer> propertiesSet = new HashSet<>();
                propertiesSet.add(propertyId);
                Node node = currentGraphNew.createOrGetNode(propertiesSet); //Before it was createNode
                node.toExplore = false;
                Iterable<Rule> conditions = Utilities.getConditions(currentRule, conditionProperty,
                        (int) support, kb);
                for (Rule conditionRule : conditions) {
                    Rule complementaryRule = getComplementaryRule(conditionRule);
                    if (!ruleToGraphFirstLevel.containsKey(complementaryRule)) {
                        // We should never fall in this case
                        for (Rule r : ruleToGraphFirstLevel.keySet()) {
                            System.out.println(r.getDatalogBasicRuleString());
                        }
                        System.out.println(complementaryRule.getDatalogBasicRuleString());
                        System.out.println(complementaryRule + " not found in the first level graph");
                    }

                    Graph complementaryGraphNew = ruleToGraphFirstLevel.get(complementaryRule);
                    //System.out.println("Complementary rule: " + complementaryRule + "\tThread " + Thread.currentThread().getId() + "\t" + complementaryGraphNew);
                    Graph newGraphNew = (Graph) currentGraphNew.clone();
                    HashSet<Integer> conditionProperties = new HashSet<>();
                    conditionProperties.addAll(getRelations(conditionRule, property2Id));
                    conditionProperties.addAll(getRelations(currentRule, property2Id));
                    //System.out.println("currentGraph:"+currentGraphNew);
                    //System.out.println("clone of currentGraph:"+newGraphNew);
                    newGraphNew = mergeGraphs(newGraphNew, complementaryGraphNew, newGraphNew.topGraphNodes(),
                            conditionProperties);
                    //System.out.println("newMergeGraph:"+newGraphNew);
                    discoverConditionalKeysForComplexConditions(newGraphNew, newGraphNew.topGraphNodes(),
                            conditionRule, output);
                    ruleToGraphThisLevel.put(conditionRule, newGraphNew);
                }
            }
        }
    }
    HashMap<Rule, HashSet<String>> newRuleToExtendWith = new HashMap<>();
    for (Rule conRule : ruleToGraphThisLevel.keySet()) {
        Graph newGraphNew = ruleToGraphThisLevel.get(conRule);
        for (Node node : newGraphNew.topGraphNodes()) {
            HashSet<String> properties = new HashSet<>();
            if (node.toExplore) {
                Iterator<Integer> it = node.set.iterator();
                int prop = it.next();
                String propertyStr = id2Property.get(prop);
                properties.add(propertyStr);
            }
            if (properties.size() != 0) {
                newRuleToExtendWith.put(conRule, properties);
            }
        }
    }

    if (newRuleToExtendWith.size() != 0) {
        discoverConditionalKeysPerLevel(newRuleToExtendWith, ruleToGraphFirstLevel, ruleToGraphThisLevel,
                output);
    }

    //System.out.println("discoverConditionalKeysPerLevel()");

}

From source file:com.ipc.service.UploadDocumentService.java

public void saveFileOther(HashMap<String, List<MultipartFile>> fileList, String root_path, UpLoadDocVoOther upv,
        HttpServletRequest request) throws IOException {
    System.out.println();//from  w  w w . j av a2s. co  m
    String date = ss.getToday(1);

    upv.setResident_registration("resident_registration" + date + "."
            + CreateFileUtils.getFileType(fileList.get("resident_registration").get(0).getOriginalFilename()));
    upv.setCertificate("certificate" + date + "."
            + CreateFileUtils.getFileType(fileList.get("certificate").get(0).getOriginalFilename()));
    upv.setBusiness_license("business_license" + date + "."
            + CreateFileUtils.getFileType(fileList.get("business_license").get(0).getOriginalFilename()));

    CreateFileUtils createFileObj = new CreateFileUtils();
    createFileObj.CreateFile(fileList.get("resident_registration").get(0), request,
            "resources/uploadimgs/uploadDocument/", "resident_registration" + date + "." + CreateFileUtils
                    .getFileType(fileList.get("resident_registration").get(0).getOriginalFilename()));
    createFileObj.CreateFile(fileList.get("certificate").get(0), request,
            "resources/uploadimgs/uploadDocument/", "certificate" + date + "."
                    + CreateFileUtils.getFileType(fileList.get("certificate").get(0).getOriginalFilename()));
    createFileObj.CreateFile(fileList.get("business_license").get(0), request,
            "resources/uploadimgs/uploadDocument/", "business_license" + date + "." + CreateFileUtils
                    .getFileType(fileList.get("business_license").get(0).getOriginalFilename()));

    if (fileList.size() == 4) {
        upv.setSmallsale("smallsale" + date + "."
                + CreateFileUtils.getFileType(fileList.get("smallsale").get(0).getOriginalFilename()));
        createFileObj.CreateFile(fileList.get("smallsale").get(0), request,
                "resources/uploadimgs/uploadDocument/", "smallsale" + date + "."
                        + CreateFileUtils.getFileType(fileList.get("smallsale").get(0).getOriginalFilename()));
    }
    upDao.saveUpLoadDocumentOther(upv);

    HashMap<String, String> map = new HashMap<String, String>();

    if (upv.getPatent_kind() == "Design") {
        map.put("deid", Integer.toString(upv.getSeq()));
        map.put("iscomplete", "4");
        designmapper.changeIsCompleteByHashMap(map);
    } else if (upv.getPatent_kind() == "Mark") {
        map.put("mid", Integer.toString(upv.getSeq()));
        map.put("iscomplete", "4");
        markmapper.updateIscomplete(map);
    }
}

From source file:com.twitter.pig.backend.hadoop.executionengine.tez.TezJobControlCompiler.java

/**
 * Reads the global counters produced by a job on the group labeled with PIG_MAP_RANK_NAME.
 * Then, it is calculated the cumulative sum, which consists on the sum of previous cumulative
 * sum plus the previous global counter value.
 * @param job with the global counters collected.
 * @param operationID After being collected on global counters (POCounter),
 * these values are passed via configuration file to PORank, by using the unique
 * operation identifier//from   www.ja va2  s .c o m
 */
private void saveCounters(Job job, String operationID) {
    Counters counters;
    Group groupCounters;

    Long previousValue = 0L;
    Long previousSum = 0L;
    ArrayList<Pair<String, Long>> counterPairs;

    try {
        counters = HadoopShims.getCounters(job);
        groupCounters = counters.getGroup(getGroupName(counters.getGroupNames()));

        Iterator<Counter> it = groupCounters.iterator();
        HashMap<Integer, Long> counterList = new HashMap<Integer, Long>();

        while (it.hasNext()) {
            try {
                Counter c = it.next();
                counterList.put(Integer.valueOf(c.getDisplayName()), c.getValue());
            } catch (Exception ex) {
                ex.printStackTrace();
            }
        }
        counterSize = counterList.size();
        counterPairs = new ArrayList<Pair<String, Long>>();

        for (int i = 0; i < counterSize; i++) {
            previousSum += previousValue;
            previousValue = counterList.get(Integer.valueOf(i));
            counterPairs.add(new Pair<String, Long>(TezJobControlCompiler.PIG_MAP_COUNTER + operationID
                    + TezJobControlCompiler.PIG_MAP_SEPARATOR + i, previousSum));
        }

        globalCounters.put(operationID, counterPairs);

    } catch (Exception e) {
        String msg = "Error to read counters into Rank operation counterSize " + counterSize;
        throw new RuntimeException(msg, e);
    }
}

From source file:edu.umich.its.lti.google.GoogleLtiServlet.java

/**
 * Actual permission call to google running on a separate thread. We have HashMap that is holding the SiteId and Time stamp. As of 11/04/14 Google take 5 minute 
 * to remove permission for Roster size =200. While iterating through the Roster list we are checking  if the time has passed 60 minutes since the start, then we will
 * terminate the thread. Once the removal of permission call is complete we remove the siteId from the HashMap to allow further requests.
 * //from ww  w.jav  a  2 s. c  o  m
 * @param handler
 * @param rosterEmailAddressKey
 * @throws Exception
 */
private void removePermissionCallToGoogleOnSeperateThread(FolderPermissionsHandler handler,
        HashMap<String, HashMap<String, String>> roster, TcSessionData tcSessionData) throws Exception {
    long start = System.currentTimeMillis();
    long end = start + SIXTY_MINUTES_IN_MILLI_SEC; //60 * 60 seconds * 1000 ms/sec
    M_log.debug("The duplicateCheckerHashMap while unshare: " + duplicateChecker.toString());
    int numberOfPermissionsRemoved = 0;
    int rosterSize = roster.size();
    for (Entry<String, HashMap<String, String>> entry : roster.entrySet()) {
        String emailAddress = entry.getKey();
        if (System.currentTimeMillis() > end) {
            String content = "The Google call Removal of permission for the roster size: \"" + rosterSize
                    + "\" took more than 60 minutes, this is unusual. The Email Id: \"";
            helperLogMessages(tcSessionData, content, null);
            break;
        }
        if (!getIsEmpty(emailAddress) && !handler.getIsInstructor(emailAddress)) {
            StringBuilder s = new StringBuilder();
            s.append("Removal of permission call to google for user: ");
            s.append(emailAddress);
            s.append(" in site :");
            s.append(tcSessionData.getContextId());
            M_log.debug(s.toString());
            if (handler.removePermission(emailAddress)) {
                numberOfPermissionsRemoved++;
            }
        }

    }
    removeSiteIdFromMap(tcSessionData);
    M_log.info("Number of permissions REMOVED Successfully to site:" + tcSessionData.getContextId() + " is "
            + numberOfPermissionsRemoved + " / " + (rosterSize - 1));
}

From source file:com.actionbarsherlock.ActionBarSherlock.java

/**
 * Wrap an activity with an action bar abstraction which will enable the
 * use of a custom implementation on platforms where a native version does
 * not exist.//from www  . ja va2  s . c  o m
 *
 * @param activity Owning activity.
 * @param flags Option flags to control behavior.
 * @return Instance to interact with the action bar.
 */
public static ActionBarSherlock wrap(Activity activity, int flags) {
    //Create a local implementation map we can modify
    HashMap<Implementation, Class<? extends ActionBarSherlock>> impls = new HashMap<Implementation, Class<? extends ActionBarSherlock>>(
            IMPLEMENTATIONS);
    boolean hasQualfier;

    /* DPI FILTERING */
    hasQualfier = false;
    for (Implementation key : impls.keySet()) {
        //Only honor TVDPI as a specific qualifier
        if (key.dpi() == DisplayMetrics.DENSITY_TV) {
            hasQualfier = true;
            break;
        }
    }
    if (hasQualfier) {
        final boolean isTvDpi = activity.getResources()
                .getDisplayMetrics().densityDpi == DisplayMetrics.DENSITY_TV;
        for (Iterator<Implementation> keys = impls.keySet().iterator(); keys.hasNext();) {
            int keyDpi = keys.next().dpi();
            if ((isTvDpi && keyDpi != DisplayMetrics.DENSITY_TV)
                    || (!isTvDpi && keyDpi == DisplayMetrics.DENSITY_TV)) {
                keys.remove();
            }
        }
    }

    /* API FILTERING */
    hasQualfier = false;
    for (Implementation key : impls.keySet()) {
        if (key.api() != Implementation.DEFAULT_API) {
            hasQualfier = true;
            break;
        }
    }
    if (hasQualfier) {
        final int runtimeApi = Build.VERSION.SDK_INT;
        int bestApi = 0;
        for (Iterator<Implementation> keys = impls.keySet().iterator(); keys.hasNext();) {
            int keyApi = keys.next().api();
            if (keyApi > runtimeApi) {
                keys.remove();
            } else if (keyApi > bestApi) {
                bestApi = keyApi;
            }
        }
        for (Iterator<Implementation> keys = impls.keySet().iterator(); keys.hasNext();) {
            if (keys.next().api() != bestApi) {
                keys.remove();
            }
        }
    }

    if (impls.size() > 1) {
        throw new IllegalStateException("More than one implementation matches configuration.");
    }
    if (impls.isEmpty()) {
        throw new IllegalStateException("No implementations match configuration.");
    }
    Class<? extends ActionBarSherlock> impl = impls.values().iterator().next();
    if (DEBUG)
        Log.i(TAG, "Using implementation: " + impl.getSimpleName());

    try {
        Constructor<? extends ActionBarSherlock> ctor = impl.getConstructor(CONSTRUCTOR_ARGS);
        return ctor.newInstance(activity, flags);
    } catch (NoSuchMethodException e) {
        throw new RuntimeException(e);
    } catch (IllegalArgumentException e) {
        throw new RuntimeException(e);
    } catch (InstantiationException e) {
        throw new RuntimeException(e);
    } catch (IllegalAccessException e) {
        throw new RuntimeException(e);
    } catch (InvocationTargetException e) {
        throw new RuntimeException(e);
    }
}

From source file:core.Reconciler.java

public Map<String, LicenseInternal> ProcessInfoInSalesForce(Map<String, LicenseInternal> aInLicenses,
        AtomicLong aInOutRetryInterval) {

    StringBuilder lErrors = new StringBuilder();

    // Initialize retry interval
    aInOutRetryInterval.set(Configuration.getReconcilerRetry());

    System.out.println("Reconciler: Ordering licenses");
    List<LicenseInternal> lOrderedLicences = GetOrderLicenses(aInLicenses);

    HashMap<String, LicenseInternal> lFailedRequests = new HashMap<String, LicenseInternal>();

    List<LicenseInternal> lCurrentBatch = new ArrayList<LicenseInternal>();
    int lCurrentBatchSize = 0;
    int lCurrentApiCalls = 0;
    for (LicenseInternal lLicense : lOrderedLicences) {

        if (lCurrentApiCalls >= SALES_FORCE_API_MAX_COUNT) {

            System.out.println("Reconciler: Already called SFDC 5 times together, saving the licence "
                    + lLicense.GetKey() + " for next retry");
            // Don't make more than 5 calls in 30 mins
            lFailedRequests.put(lLicense.GetKey(), lLicense);
            aInOutRetryInterval.set(SALES_FORCE_API_RETRY_INTERVAL);
            continue;
        }/*from  ww w .jav a 2s . co  m*/

        lCurrentBatchSize++;
        lCurrentBatch.add(lLicense);
        // System.out.println("Reconciler: Adding licence " +
        // lLicense.GetKey() + " for posting to SFDC");

        if (lCurrentBatchSize >= SALES_FORCE_API_BATCH) {
            // Post to Sales Force
            PostToSalesForce(lCurrentBatch, lFailedRequests, aInLicenses, true, lErrors);
            lCurrentApiCalls++;
            lCurrentBatch = new ArrayList<LicenseInternal>();
            lCurrentBatchSize = 0;
        }
    }

    if ((lCurrentApiCalls < SALES_FORCE_API_MAX_COUNT) && (lCurrentBatch.size() > 0)) {

        PostToSalesForce(lCurrentBatch, lFailedRequests, aInLicenses, true, lErrors);
    }

    if (lFailedRequests.size() > 0) {
        // Send an email with the alert
        SendGridClient.sendEmailv2("License Sync Errors", "Failed to update " + lFailedRequests.size()
                + " licenses in sales force, check UI for details");
    }

    return lFailedRequests;
}

From source file:com.sfs.whichdoctor.dao.RelationshipDAOImpl.java

/**
 * Iterate through the existing relationships and see what is different to
 * the updated ones.//  w ww.  j  av  a 2s.  c o m
 *
 * @param existingRelationships the existing relationships
 * @param updatedRelationships the updated relationships
 * @return the string
 */
private String update(final Collection<RelationshipBean> existingRelationships,
        final Collection<RelationshipBean> updatedRelationships) {

    HashMap<String, RelationshipBean> existingMap = new HashMap<String, RelationshipBean>();
    HashMap<String, RelationshipBean> updatedMap = new HashMap<String, RelationshipBean>();

    for (RelationshipBean relationship : existingRelationships) {
        final String key = buildKey(relationship);
        dataLogger.debug("Adding existing relationship: " + key);
        existingMap.put(key, relationship);
    }
    for (RelationshipBean relationship : updatedRelationships) {
        final String key = buildKey(relationship);
        dataLogger.debug("Adding updated relationship: " + key);
        updatedMap.put(key, relationship);
    }

    dataLogger.debug("Existing map size: " + existingMap.size());
    dataLogger.debug("Updated map size: " + updatedMap.size());

    final StringBuffer summary = new StringBuffer();

    for (String key : existingMap.keySet()) {
        dataLogger.debug("Existing key: " + key);
        // No need to update relationships that have not changed
        if (!updatedMap.containsKey(key)) {
            // The updated map does not contain this entry
            // Delete this relationship reference
            final RelationshipBean relationship = existingMap.get(key);
            try {
                delete(relationship);

                // Load the person
                final PersonBean person = this.personDAO.loadGUID(relationship.getGUID());

                final StringBuffer info = new StringBuffer();

                info.append("Deleted a relationship to ");
                info.append(person.getPersonIdentifier());
                info.append(". Person ");
                info.append(relationship.getIdentifier());
                info.append(" was their ");
                info.append(relationship.getRelationshipClass());
                info.append(" - ");
                info.append(relationship.getRelationshipType());

                dataLogger.info(info.toString());
                summary.append(info.toString() + "\n");

            } catch (WhichDoctorDaoException wde) {
                dataLogger.error("Error deleting relationship: " + wde.getMessage());
            }
        }
    }
    for (String key : updatedMap.keySet()) {
        dataLogger.debug("Updated key: " + key);
        // No need to update relationships that have not changed
        if (!existingMap.containsKey(key)) {
            // The relationship does not exist - create the relationship
            RelationshipBean relationship = updatedMap.get(key);
            try {
                create(relationship);

                // Load the person
                final PersonBean person = this.personDAO.loadGUID(relationship.getGUID());

                final StringBuffer info = new StringBuffer();

                info.append("Created a relationship to ");
                info.append(person.getPersonIdentifier());
                info.append(". Person ");
                info.append(relationship.getIdentifier());
                info.append(" is their ");
                info.append(relationship.getRelationshipClass());
                info.append(" - ");
                info.append(relationship.getRelationshipType());

                dataLogger.info(info.toString());
                summary.append(info.toString() + "\n");

            } catch (WhichDoctorDaoException wde) {
                dataLogger.error("Error creating relationship: " + wde.getMessage());
            }
        }
    }
    return summary.toString();
}

From source file:org.eurocarbdb.application.glycoworkbench.plugin.SpectraPanel.java

public void addIsotopeCurves(TreeMap<Peak, Collection<Annotation>> annotations) {

    if (theDocument.size() == 0)
        return;/* w  w w .  ja  v a 2s.com*/

    // remove old curves
    removeIsotopeCurves();

    // add curves
    if (annotations != null) {

        // set renderer
        if (show_all_isotopes) {
            thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.SHAPES));
            thePlot.getRenderer(1).setShape(new Ellipse2D.Double(0, 0, 7, 7));
        } else
            thePlot.setRenderer(1, new StandardXYItemRenderer(StandardXYItemRenderer.LINES));

        MSUtils.IsotopeList isotope_list = new MSUtils.IsotopeList(show_all_isotopes);
        for (Map.Entry<Peak, Collection<Annotation>> pa : annotations.entrySet()) {
            Peak p = pa.getKey();
            double[] best_peak = theDocument.getPeakDataAt(current_ind).findNearestPeak(p.getMZ());

            // get compositions
            HashSet<Molecule> compositions = new HashSet<Molecule>();
            for (Annotation a : pa.getValue()) {
                try {
                    compositions.add(a.getFragmentEntry().fragment.computeIon());
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

            // collect curves for this peak
            HashMap<String, double[][]> all_curves = new HashMap<String, double[][]>();
            for (Molecule m : compositions) {
                try {
                    double[][] data = MSUtils.getIsotopesCurve(1, m, show_all_isotopes);

                    // overlay the distribution with the existing list of isotopes
                    isotope_list.adjust(data, best_peak[0], best_peak[1]);

                    all_curves.put(m.toString(), data);
                } catch (Exception e) {
                    LogUtils.report(e);
                }
            }

            // add average curve for this peak
            if (all_curves.size() > 1) {
                double[][] data = MSUtils.average(all_curves.values(), show_all_isotopes);

                // add the average to the chart
                String name = "average-" + p.getMZ();
                theIsotopesDataset.addSeries(name, data);
                thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.magenta);
                thePlot.getRenderer(1).setSeriesStroke(theIsotopesDataset.indexOf(name), new BasicStroke(2));

                // add the average to the isotope list
                isotope_list.add(data, false);
            } else if (all_curves.size() == 1) {
                // add the only curve to the isotope list
                isotope_list.add(all_curves.values().iterator().next(), false);
            }

            // add the other curves
            for (Map.Entry<String, double[][]> e : all_curves.entrySet()) {
                String name = e.getKey() + "-" + p.getMZ();
                theIsotopesDataset.addSeries(name, e.getValue());
                thePlot.getRenderer(1).setSeriesPaint(theIsotopesDataset.indexOf(name), Color.blue);
            }
        }

    }
    updateIntensityAxis();
}

From source file:com.globalsight.connector.mindtouch.util.MindTouchHelper.java

@SuppressWarnings("rawtypes")
public String handleFiles(String pageId, String content, String targetLocale, String sourceLocale,
        MindTouchPageInfo pageInfo) throws DocumentException {
    String filesXml = getPageFiles(pageId);
    if (StringUtil.isEmpty(filesXml)) {
        return content;
    }//from w w w.  ja v  a  2  s  .  co m

    HashMap<String, String> fileMap = new HashMap<String, String>();
    Document doc = getDocument(filesXml);
    List propertyNodes = doc.selectNodes("//contents ");
    Iterator it = propertyNodes.iterator();
    String sourceFileUrl = null;
    while (it.hasNext()) {
        Element propertyNode = (Element) it.next();
        sourceFileUrl = propertyNode.attributeValue("href");
        String filePath = getPageFile(sourceFileUrl);
        if (filePath != null) {
            fileMap.put(sourceFileUrl, filePath);
        }
    }

    if (fileMap.size() > 0) {
        for (String tempSourceFileUrl : fileMap.keySet()) {
            String fileXml = putPageFile(fileMap.get(tempSourceFileUrl), targetLocale, sourceLocale, pageInfo);
            if (StringUtil.isNotEmpty(fileXml)) {
                doc = getDocument(fileXml);
                propertyNodes = doc.selectNodes("//contents ");
                it = propertyNodes.iterator();
                while (it.hasNext()) {
                    Element propertyNode = (Element) it.next();
                    String targetFileUrl = propertyNode.attributeValue("href");
                    fileMap.put(tempSourceFileUrl, targetFileUrl);
                }
            }
        }

        for (String tempSourceFileUrl : fileMap.keySet()) {
            content = StringUtil.replace(content, tempSourceFileUrl, fileMap.get(tempSourceFileUrl));
        }
    }

    return content;
}