Example usage for java.util LinkedHashMap size

List of usage examples for java.util LinkedHashMap size

Introduction

In this page you can find the example usage for java.util LinkedHashMap size.

Prototype

int size();

Source Link

Document

Returns the number of key-value mappings in this map.

Usage

From source file:dm_p2.DBSCAN.java

public static void internalValidation(LinkedHashMap<Integer, List<Double>> linkedMap,
        Map<Integer, Integer> ourMap) {
    int size = linkedMap.size();
    int n = ((size - 1) * (size)) / 2;

    double incidenceMatrix[] = new double[n];
    double distanceMatrix[] = new double[n];
    int k = 0;/*from   w w w .  ja  v  a  2 s. c o m*/
    for (int i = 1; i <= size; i++) {
        for (int j = i + 1; j <= size; j++) {
            if (ourMap.get(i) == ourMap.get(j)) {
                incidenceMatrix[k] = 1;
            } else {
                incidenceMatrix[k] = 0;
            }

            distanceMatrix[k] = euclidianDistance(linkedMap.get(i), linkedMap.get(j));
            k++;
        }
    }
    PearsonsCorrelation pc = new PearsonsCorrelation();

    System.out.println(
            "Internal Index Validation : Correlation = " + pc.correlation(incidenceMatrix, distanceMatrix));

}

From source file:com.cburch.logisim.gui.main.SelectionAttributes.java

private static boolean isSame(LinkedHashMap<Attribute<Object>, Object> attrMap, Attribute<?>[] oldAttrs,
        Object[] oldValues) {/*from  ww  w  . ja va  2  s .  com*/
    if (oldAttrs.length != attrMap.size()) {
        return false;
    } else {
        int j = -1;
        for (Map.Entry<Attribute<Object>, Object> entry : attrMap.entrySet()) {
            j++;

            Attribute<Object> a = entry.getKey();
            if (!oldAttrs[j].equals(a) || j >= oldValues.length)
                return false;
            Object ov = oldValues[j];
            Object nv = entry.getValue();
            if (ov == null ? nv != null : !ov.equals(nv))
                return false;
        }
        return true;
    }
}

From source file:hudson.plugins.parameterizedtrigger.ParameterizedTriggerUtils.java

public static ParametersAction mergeParameters(ParametersAction base, ParametersAction overlay) {
    LinkedHashMap<String, ParameterValue> params = new LinkedHashMap<String, ParameterValue>();
    for (ParameterValue param : base.getParameters())
        params.put(param.getName(), param);
    for (ParameterValue param : overlay.getParameters())
        params.put(param.getName(), param);
    return new ParametersAction(params.values().toArray(new ParameterValue[params.size()]));
}

From source file:com.concursive.connect.web.modules.communications.utils.EmailUpdatesUtils.java

public static String getEmailHTMLMessage(Connection db, ApplicationPrefs prefs, Configuration configuration,
        EmailUpdatesQueue queue, Timestamp min, Timestamp max) throws Exception {
    // User who needs to be sent an email
    User user = UserUtils.loadUser(queue.getEnteredBy());

    Project website = ProjectUtils.loadProject("main-profile");
    int emailUpdatesSchedule = -1;
    String title = "";
    if (queue.getScheduleOften()) {
        emailUpdatesSchedule = TeamMember.EMAIL_OFTEN;
        title = website.getTitle() + " recent updates";
    } else if (queue.getScheduleDaily()) {
        emailUpdatesSchedule = TeamMember.EMAIL_DAILY;
        title = website.getTitle() + " daily update";
    } else if (queue.getScheduleWeekly()) {
        emailUpdatesSchedule = TeamMember.EMAIL_WEEKLY;
        title = website.getTitle() + " weekly update";
    } else if (queue.getScheduleMonthly()) {
        emailUpdatesSchedule = TeamMember.EMAIL_MONTHLY;
        title = website.getTitle() + " monthly update";
    }/* w w  w  .  j a  va2s . c  om*/
    if (emailUpdatesSchedule == -1) {
        //unexpected case; throw exception
        throw new Exception("The queue does not have a valid schedule type!");
    }
    if (URLFactory.createURL(prefs.getPrefs()) == null) {
        throw new Exception(
                "The server URL is not specified. Please contact the system administrator to configure the remote server's URL!");
    }

    // Populate the message template
    freemarker.template.Template template = configuration
            .getTemplate("scheduled_activity_updates_email_body-html.ftl");
    Map bodyMappings = new HashMap();
    bodyMappings.put("title", title);
    bodyMappings.put("link", new HashMap());
    ((Map) bodyMappings.get("link")).put("settings",
            URLFactory.createURL(prefs.getPrefs()) + "/show/" + user.getProfileUniqueId());

    // Load the RSS config file to determine the objects for display
    String fileName = "scheduled_emails_en_US.xml";
    URL resource = EmailUpdatesUtils.class.getResource("/" + fileName);
    LOG.debug("Schedule emails config file: " + resource.toString());
    XMLUtils library = new XMLUtils(resource);

    String purpose = prefs.get(ApplicationPrefs.PURPOSE);
    LOG.debug("Purpose: " + purpose);
    Element emailElement = XMLUtils.getElement(library.getDocumentElement(), "email", "events",
            "site," + purpose);
    if (emailElement == null) {
        emailElement = XMLUtils.getElement(library.getDocumentElement(), "email", "events", "site");
    }
    if (emailElement != null) {
        LinkedHashMap categories = new LinkedHashMap();

        PagedListInfo info = new PagedListInfo();
        String limit = emailElement.getAttribute("limit");
        info.setItemsPerPage(limit);
        int activityCount = 0;

        //Determine the website's site-chatter data to email for this user (if any)
        ProjectHistoryList chatter = new ProjectHistoryList();
        chatter.setProjectId(website.getId());
        chatter.setLinkObject(ProjectHistoryList.SITE_CHATTER_OBJECT);
        chatter.setRangeStart(min);
        chatter.setRangeEnd(max);
        chatter.setPagedListInfo(info);
        chatter.setForMemberEmailUpdates(user.getId(), emailUpdatesSchedule);
        LinkedHashMap map = chatter.getList(db, user.getId(), URLFactory.createURL(prefs.getPrefs()));
        activityCount += map.size();
        if (map.size() != 0) {
            categories.put("Chatter", map);
        }

        // Determine the types of events to display based on the config file
        ArrayList<Element> eventElements = new ArrayList<Element>();
        XMLUtils.getAllChildren(emailElement, "event", eventElements);

        // set all the requested types based on the types we allow for the query..
        ArrayList<String> types = new ArrayList<String>();
        for (Element eventElement : eventElements) {
            String type = XMLUtils.getNodeText(eventElement);
            types.add(type);
        }
        LOG.debug("Event Types: " + types);
        // Load the categories
        ProjectCategoryList categoryList = new ProjectCategoryList();
        categoryList.setTopLevelOnly(true);
        categoryList.setEnabled(Constants.TRUE);
        categoryList.buildList(db);

        for (ProjectCategory category : categoryList) {
            ProjectHistoryList activities = new ProjectHistoryList();
            activities.setProjectCategoryId(category.getId());
            activities.setRangeStart(min);
            activities.setRangeEnd(max);
            activities.setObjectPreferences(types);
            activities.setPagedListInfo(info);
            activities.setForMemberEmailUpdates(user.getId(), emailUpdatesSchedule);
            LinkedHashMap activityMap = activities.getList(db, user.getId(),
                    URLFactory.createURL(prefs.getPrefs()));
            activityCount += activityMap.size();
            if (activityMap.size() != 0) {
                categories.put(category.getLabel(), activityMap);
            }
        }

        if (activityCount == 0) {
            //Don't send an email update
            return null;
        }
        bodyMappings.put("categories", categories);
    }

    // Parse and return
    StringWriter emailBodyTextWriter = new StringWriter();
    template.process(bodyMappings, emailBodyTextWriter);

    return emailBodyTextWriter.toString();
}

From source file:org.trnltk.tokenizer.TextTokenizerCorpusTest.java

protected static void createTokenizedFile(TextTokenizer tokenizer, File sentencesFile, File tokenizedFile,
        File errorFile, boolean silent, TokenizationCommandCallback tokenizationCommandCallback)
        throws IOException {
    int N = 10000;

    final StopWatch tokenizationStopWatch = new StopWatch();
    tokenizationStopWatch.start();/*from   w w  w. ja  v a  2 s. co m*/
    tokenizationStopWatch.suspend();

    //        final BufferedReader lineReader = Files.newReader(sentencesFile, Charsets.UTF_8);       // don't read the file into the memory
    //        final int lineCount = lineCount(sentencesFile);     // I want to know this in advance to make a ETA statement

    final List<String> sentences = Files.readLines(sentencesFile, Charsets.UTF_8);
    final int lineCount = sentences.size();

    if (!silent)
        System.out.println("Number of lines in the file : " + lineCount);

    final BufferedWriter tokensWriter = Files.newWriter(tokenizedFile, Charsets.UTF_8);
    final PrintWriter errorWriter = errorFile != null
            ? new PrintWriter(Files.newWriter(errorFile, Charsets.UTF_8))
            : new PrintWriter(System.out);

    int numberOfLinesInError = 0;
    int tokenCount = 0;
    try {
        //            for (Iterator<String> iterator = sentences.iterator(); iterator.hasNext(); ) {
        //              String sentence = iterator.next();
        int index;
        for (index = 0; index < sentences.size(); index++) {
            final String sentence = sentences.get(index);
            if (!silent && index % 10000 == 0) {
                System.out.println("Tokenizing line #" + index);
                final long totalTimeSoFar = tokenizationStopWatch.getTime();
                final double avgTimeForALine = Long.valueOf(totalTimeSoFar).doubleValue() / index;
                final double remainingTimeEstimate = avgTimeForALine * (lineCount - index);
                System.out.println("For file --> ETA : "
                        + DurationFormatUtils.formatDurationHMS((long) remainingTimeEstimate) + " So far : "
                        + tokenizationStopWatch.toString());
            }
            if (tokenizationCommandCallback != null && index % N == 0) {
                tokenizationCommandCallback.reportProgress(N);
            }
            tokenizationStopWatch.resume();
            final Iterable<Token> tokens;
            try {
                tokens = tokenizer.tokenize(sentence);
            } catch (Exception e) {
                // skip the line
                numberOfLinesInError++;
                e.printStackTrace(errorWriter);
                errorWriter.println();
                tokenizationStopWatch.suspend();
                continue;
            }
            tokenizationStopWatch.suspend();
            final Iterator<Token> tokensIterator = tokens.iterator();
            while (tokensIterator.hasNext()) {
                final Token token = tokensIterator.next();
                tokensWriter.write(token.getSurface());
                tokenCount++;
                if (tokensIterator.hasNext())
                    tokensWriter.write(" ");
            }
            tokensWriter.write("\n");
        }
        if (tokenizationCommandCallback != null) {
            //report the lines since last report
            tokenizationCommandCallback.reportProgress(index % N);
        }

    } finally {
        tokensWriter.close();
        errorWriter.close();
    }

    tokenizationStopWatch.stop();

    if (!silent) {
        System.out.println("Tokenized " + lineCount + " lines.");
        System.out.println("Found " + tokenCount + " tokens.");
        System.out.println("Avg time for tokenizing a line : "
                + Double.valueOf(tokenizationStopWatch.getTime()) / Double.valueOf(lineCount) + " ms");
        System.out.println("\tProcessed : "
                + Double.valueOf(lineCount) / Double.valueOf(tokenizationStopWatch.getTime()) * 1000d
                + " lines in a second");
        System.out.println("Avg time for generating a token : "
                + Double.valueOf(tokenizationStopWatch.getTime()) / Double.valueOf(tokenCount) + " ms");
        System.out.println("\tProcessed : "
                + Double.valueOf(tokenCount) / Double.valueOf(tokenizationStopWatch.getTime()) * 1000d
                + " tokens in a second");

        final TextTokenizer.TextTokenizerStats stats = tokenizer.getStats();

        if (stats != null) {
            final LinkedHashMap<Pair<TextBlockTypeGroup, TextBlockTypeGroup>, Integer> successMap = stats
                    .buildSortedSuccessMap();
            System.out.println("Used " + successMap.size() + " distinct rules");

            final LinkedHashMap<Pair<TextBlockTypeGroup, TextBlockTypeGroup>, Set<MissingTokenizationRuleException>> failMap = stats
                    .buildSortedFailMap();
            System.out.println("Couldn't find a rule for " + failMap.size() + " distinct specs");
            System.out.println("Printing missing rules with occurrence count:");

            int countOfMissing = 0;
            for (Map.Entry<Pair<TextBlockTypeGroup, TextBlockTypeGroup>, Set<MissingTokenizationRuleException>> entry : failMap
                    .entrySet()) {
                final Pair<TextBlockTypeGroup, TextBlockTypeGroup> theCase = entry.getKey();
                final Set<MissingTokenizationRuleException> exceptionsForCase = entry.getValue();
                countOfMissing += exceptionsForCase.size();
                System.out.println("\t" + theCase + "\t" + exceptionsForCase.size());
                int i = 0;
                for (MissingTokenizationRuleException ex : exceptionsForCase) {
                    final String message = ex.getMessage().replace("\t", "\t\t\t");
                    final String contextStr = "..." + ex.getContextBlockGroup().getText() + "...";

                    System.out.println("\t\t" + contextStr + "\n\t\t" + message);
                    if (i == 2) //print only 3 messages for each case
                        break;
                    i++;
                }
            }

            System.out.println("Couldn't find a rule in a total of " + countOfMissing + " times");
        }
    }

    if (tokenizationCommandCallback != null) {
        tokenizationCommandCallback.reportFileFinished(tokenCount, numberOfLinesInError);
    }
}

From source file:org.gumtree.vis.mask.ChartMaskingUtilities.java

public static void drawText(Graphics2D g2, Rectangle2D imageArea,
        LinkedHashMap<Rectangle2D, String> textContentMap, JFreeChart chart) {
    if (textContentMap == null || textContentMap.size() == 0) {
        return;/* w  w w  .  ja  va2  s .  co  m*/
    }
    //      for (Entry<Rectangle2D, String> textEntry : textMap.entrySet()) {
    //         Rectangle2D rect = textEntry.getKey();
    //         String text = textEntry.getValue();
    //         drawText(g2, imageArea, rect, text, chart);
    //      }
    Color oldColor = g2.getColor();
    g2.setColor(Color.BLACK);
    for (Entry<Rectangle2D, String> entry : textContentMap.entrySet()) {
        Rectangle2D rect = entry.getKey();
        Point2D screenPoint = ChartMaskingUtilities
                .translateChartPoint(new Point2D.Double(rect.getX(), rect.getY()), imageArea, chart);
        String text = entry.getValue();
        if (text == null) {
            continue;
        }
        String[] lines = text.split("\n");
        g2.setColor(Color.BLACK);
        for (int i = 0; i < lines.length; i++) {
            g2.drawString(lines[i], (int) screenPoint.getX() + 3, (int) screenPoint.getY() - 3 + i * 15);
        }
        //         if (rect == selectedTextWrapper) {
        //            FontMetrics fm = g2.getFontMetrics();
        //            int maxWidth = 0;
        //            int maxHeight = 0;
        //            for (int i = 0; i < lines.length; i++) {
        //               int lineWidth = fm.stringWidth(lines[i]);
        //               if (lineWidth > maxWidth) {
        //                  maxWidth = lineWidth;
        //               }
        //            }
        //            maxHeight = 15 * lines.length;
        //            if (maxWidth < 100) {
        //               maxWidth = 100;
        //            }
        //            Rectangle2D inputBox = new Rectangle2D.Double(screenPoint.getX(), screenPoint.getY() - 15, maxWidth + 8, maxHeight);
        //              Color fillColor = new Color(250, 250, 50, 30);
        //              g2.setPaint(fillColor);
        //              g2.fill(inputBox);
        //            g2.setColor(Color.ORANGE);
        //            g2.drawRect((int) screenPoint.getX(), (int) screenPoint.getY() - 15, maxWidth + 8, maxHeight);
        //
        //         }
        //         g2.drawString(text == null ? "" : text, (int) screenPoint.getX() + 3, (int) screenPoint.getY() - 3);
    }
    g2.setColor(oldColor);
}

From source file:org.apache.flex.compiler.internal.projects.SourcePathManager.java

private static boolean arePathsEqual(File[] newPaths, LinkedHashMap<DirectoryID, HashSet<QNameFile>> oldPaths) {
    if (newPaths.length != oldPaths.size())
        return false;

    int i = 0;/*from ww  w.  j  a  va 2 s. c  om*/
    for (DirectoryID oldPath : oldPaths.keySet()) {
        if (!newPaths[i].isDirectory())
            return false; // all the old paths are directories. If this isn't then it must not be equal

        DirectoryID newDir = new DirectoryID(newPaths[i]);

        if (!(newDir.equals(oldPath)))
            return false;
        i++;
    }

    return true;
}

From source file:org.apache.hadoop.hive.ql.exec.ArchiveUtils.java

/**
 * Determines if one can insert into partition(s), or there's a conflict with
 * archive. It can be because partition is itself archived or it is to be
 * created inside existing archive. The second case is when partition doesn't
 * exist yet, but it would be inside of an archive if it existed. This one is
 * quite tricky to check, we need to find at least one partition inside of
 * the parent directory. If it is archived and archiving level tells that
 * the archival was done of directory partition is in it means we cannot
 * insert; otherwise we can.//  w  w w  .ja va2s . c  o m
 * This method works both for full specifications and partial ones - in second
 * case it checks if any partition that could possibly match such
 * specification is inside archive.
 *
 * @param db - Hive object
 * @param tbl - table where partition is
 * @param partSpec - partition specification with possible nulls in case of
 * dynamic partiton inserts
 * @return null if partition can be inserted, string with colliding archive
 * name when it can't
 * @throws HiveException
 */
public static String conflictingArchiveNameOrNull(Hive db, Table tbl, LinkedHashMap<String, String> partSpec)
        throws HiveException {

    List<FieldSchema> partKeys = tbl.getPartitionKeys();
    int partSpecLevel = 0;
    for (FieldSchema partKey : partKeys) {
        if (!partSpec.containsKey(partKey.getName())) {
            break;
        }
        partSpecLevel++;
    }

    if (partSpecLevel != partSpec.size()) {
        throw new HiveException("partspec " + partSpec + " is wrong for table " + tbl.getTableName());
    }

    Map<String, String> spec = new HashMap<String, String>(partSpec);
    List<String> reversedKeys = new LinkedList<String>();
    for (FieldSchema fs : tbl.getPartCols()) {
        if (spec.containsKey(fs.getName())) {
            reversedKeys.add(0, fs.getName());
        }
    }

    for (String rk : reversedKeys) {
        List<Partition> parts = db.getPartitions(tbl, spec, (short) 1);
        if (parts.size() != 0) {
            Partition p = parts.get(0);
            if (!isArchived(p)) {
                // if archiving was done at this or at upper level, every matched
                // partition would be archived, so it not being archived means
                // no archiving was done neither at this nor at upper level
                return null;
            } else if (getArchivingLevel(p) > spec.size()) {
                // if archiving was done at this or at upper level its level
                // would be lesser or equal to specification size
                // it is not, which means no archiving at this or upper level
                return null;
            } else {
                return getPartialName(p, getArchivingLevel(p));
            }
        }
        spec.remove(rk);
    }
    return null;
}

From source file:org.loklak.api.client.HelloClient.java

public static void propagate(final String[] hoststubs) {
    // get some configuration
    int httpport = (int) DAO.getConfig("port.http", 9000);
    int httpsport = (int) DAO.getConfig("port.https", 9443);
    String peername = (String) DAO.getConfig("peername", "anonymous");

    // retrieve some simple statistics from the index
    final String backend = DAO.getConfig("backend", "");
    final boolean backend_push = DAO.getConfig("backend.push.enabled", false);
    Map<String, Object> backend_status = null;
    Map<String, Object> backend_status_index_sizes = null;
    if (backend.length() > 0 && !backend_push) {
        try {/*from w w  w  .ja  v a  2  s  .  com*/
            backend_status = StatusClient.status(backend);
        } catch (IOException e) {
            e.printStackTrace();
        }
        backend_status_index_sizes = backend_status == null ? null
                : (Map<String, Object>) backend_status.get("index_sizes");
    }
    long backend_messages = backend_status_index_sizes == null ? 0
            : ((Number) backend_status_index_sizes.get("messages")).longValue();
    long backend_users = backend_status_index_sizes == null ? 0
            : ((Number) backend_status_index_sizes.get("users")).longValue();
    long local_messages = DAO.countLocalMessages(-1);
    long local_users = DAO.countLocalUsers();
    int timezoneOffset = DateParser.getTimezoneOffset();

    // retrieve more complex data: date histogram
    LinkedHashMap<String, Long> fullDateHistogram = DAO.FullDateHistogram(timezoneOffset); // complex operation can take some time!
    String peakDay = "";
    long peakCount = -1;
    String lastDay = "";
    long lastCount = -1;
    for (Map.Entry<String, Long> day : fullDateHistogram.entrySet()) {
        lastDay = day.getKey();
        lastCount = day.getValue();
        if (lastCount > peakCount) {
            peakDay = lastDay;
            peakCount = lastCount;
        }
    }
    String firstDay = "";
    long firstCount = -1;
    if (fullDateHistogram.size() > 0) {
        Map.Entry<String, Long> firstDayEntry = fullDateHistogram.entrySet().iterator().next();
        firstDay = firstDayEntry.getKey();
        firstCount = firstDayEntry.getValue();
    }

    // send data to peers
    for (String hoststub : hoststubs) {
        if (hoststub.endsWith("/"))
            hoststub = hoststub.substring(0, hoststub.length() - 1);
        try {
            String urlstring = hoststub + "/api/hello.json?port.http=" + httpport + "&port.https=" + httpsport
                    + "&peername=" + peername + "&time=" + System.currentTimeMillis() + "&timezoneOffset="
                    + timezoneOffset + "&local_messages=" + local_messages + "&local_users=" + local_users
                    + "&backend_messages=" + backend_messages + "&backend_users=" + backend_users + "&peakDay="
                    + peakDay + "&peakCount=" + peakCount + "&firstDay=" + firstDay + "&firstCount="
                    + firstCount + "&lastDay=" + lastDay + "&lastCount=" + lastCount;
            byte[] jsonb = ClientConnection.download(urlstring);
            if (jsonb == null || jsonb.length == 0)
                throw new IOException("empty content from " + hoststub);
            String jsons = UTF8.String(jsonb);
            JSONObject json = new JSONObject(jsons);
            Log.getLog().info("Hello response: " + json.toString());
        } catch (IOException e) {
        }
    }
}

From source file:org.loklak.api.p2p.HelloService.java

public static void propagate(final String[] hoststubs) {
    // get some configuration
    int httpport = (int) DAO.getConfig("port.http", 9000);
    int httpsport = (int) DAO.getConfig("port.https", 9443);
    String peername = (String) DAO.getConfig("peername", "anonymous");

    // retrieve some simple statistics from the index
    final String backend = DAO.getConfig("backend", "");
    final boolean backend_push = DAO.getConfig("backend.push.enabled", false);
    JSONObject backend_status = null;//from w w  w .j  a v a2 s  .c  om
    JSONObject backend_status_index_sizes = null;
    if (backend.length() > 0 && !backend_push) {
        try {
            backend_status = StatusService.status(backend);
        } catch (IOException e) {
            Log.getLog().warn(e);
        }
        backend_status_index_sizes = backend_status == null ? null
                : (JSONObject) backend_status.get("index_sizes");
    }
    long backend_messages = backend_status_index_sizes == null ? 0
            : ((Number) backend_status_index_sizes.get("messages")).longValue();
    long backend_users = backend_status_index_sizes == null ? 0
            : ((Number) backend_status_index_sizes.get("users")).longValue();
    long local_messages = DAO.countLocalMessages(-1);
    long local_users = DAO.countLocalUsers();
    int timezoneOffset = DateParser.getTimezoneOffset();

    // retrieve more complex data: date histogram
    LinkedHashMap<String, Long> fullDateHistogram = DAO.FullDateHistogram(timezoneOffset); // complex operation can take some time!
    String peakDay = "";
    long peakCount = -1;
    String lastDay = "";
    long lastCount = -1;
    for (Map.Entry<String, Long> day : fullDateHistogram.entrySet()) {
        lastDay = day.getKey();
        lastCount = day.getValue();
        if (lastCount > peakCount) {
            peakDay = lastDay;
            peakCount = lastCount;
        }
    }
    String firstDay = "";
    long firstCount = -1;
    if (fullDateHistogram.size() > 0) {
        Map.Entry<String, Long> firstDayEntry = fullDateHistogram.entrySet().iterator().next();
        firstDay = firstDayEntry.getKey();
        firstCount = firstDayEntry.getValue();
    }

    // send data to peers
    for (String hoststub : hoststubs) {
        if (hoststub.endsWith("/"))
            hoststub = hoststub.substring(0, hoststub.length() - 1);
        try {
            String urlstring = hoststub + "/api/hello.json?port.http=" + httpport + "&port.https=" + httpsport
                    + "&peername=" + peername + "&time=" + System.currentTimeMillis() + "&timezoneOffset="
                    + timezoneOffset + "&local_messages=" + local_messages + "&local_users=" + local_users
                    + "&backend_messages=" + backend_messages + "&backend_users=" + backend_users + "&peakDay="
                    + peakDay + "&peakCount=" + peakCount + "&firstDay=" + firstDay + "&firstCount="
                    + firstCount + "&lastDay=" + lastDay + "&lastCount=" + lastCount;
            byte[] jsonb = ClientConnection.downloadPeer(urlstring);
            if (jsonb == null || jsonb.length == 0)
                throw new IOException("empty content from " + hoststub);
            String jsons = UTF8.String(jsonb);
            JSONObject json = new JSONObject(jsons);
            Log.getLog().info("Hello response: " + json.toString());
        } catch (IOException e) {
        }
    }
}