Example usage for java.util.regex Matcher groupCount

List of usage examples for java.util.regex Matcher groupCount

Introduction

In this page you can find the example usage for java.util.regex Matcher groupCount.

Prototype

public int groupCount() 

Source Link

Document

Returns the number of capturing groups in this matcher's pattern.

Usage

From source file:de.mpg.mpdl.inge.syndication.feed.Feed.java

/**
 * Populate parameters with the values taken from the certain <code>uri</code> and populate
 * <code>paramHash</code> with the parameter/value paars.
 * //from  w  ww . j a v a2  s  .  c  o  m
 * @param uri
 * @throws SyndicationException
 */
private void populateParamsFromUri(String uri) throws SyndicationException {
    Utils.checkName(uri, "Uri is empty");

    String um = getUriMatcher();

    Utils.checkName(um, "Uri matcher is empty");

    Matcher m = Pattern.compile(um, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(uri);
    if (m.find()) {
        for (int i = 0; i < m.groupCount(); i++)
            paramHash.put((String) paramList.get(i), m.group(i + 1));
    }

    // special handling of Organizational Unit Feed
    // TODO: should be resolved other way!
    if (getUriMatcher().equals("(.+)?/syndication/feed/(.+)?/publications/organization/(.+)?")) {
        TreeMap<String, String> outm = Utils.getOrganizationUnitTree();
        String oid = (String) paramHash.get("${organizationId}");
        for (Map.Entry<String, String> entry : outm.entrySet()) {
            if (entry.getValue().equals(oid)) {
                paramHash.put("${organizationName}", entry.getKey());
            }
        }
    }

    logger.info("parameters: " + paramHash);

}

From source file:org.apache.hadoop.util.SysInfoLinux.java

/**
 * Read /proc/net/dev file, parse and calculate amount
 * of bytes read and written through the network.
 *//*from   ww w  . ja va  2 s  .co m*/
private void readProcNetInfoFile() {

    numNetBytesRead = 0L;
    numNetBytesWritten = 0L;

    // Read "/proc/net/dev" file
    BufferedReader in;
    InputStreamReader fReader;
    try {
        fReader = new InputStreamReader(new FileInputStream(procfsNetFile), Charset.forName("UTF-8"));
        in = new BufferedReader(fReader);
    } catch (FileNotFoundException f) {
        return;
    }

    Matcher mat;
    try {
        String str = in.readLine();
        while (str != null) {
            mat = PROCFS_NETFILE_FORMAT.matcher(str);
            if (mat.find()) {
                assert mat.groupCount() >= 16;

                // ignore loopback interfaces
                if (mat.group(1).equals("lo")) {
                    str = in.readLine();
                    continue;
                }
                numNetBytesRead += Long.parseLong(mat.group(2));
                numNetBytesWritten += Long.parseLong(mat.group(10));
            }
            str = in.readLine();
        }
    } catch (IOException io) {
        LOG.warn("Error reading the stream " + io);
    } finally {
        // Close the streams
        try {
            fReader.close();
            try {
                in.close();
            } catch (IOException i) {
                LOG.warn("Error closing the stream " + in);
            }
        } catch (IOException i) {
            LOG.warn("Error closing the stream " + fReader);
        }
    }
}

From source file:com.app.util.browser.BrowserSniffer.java

private void sniffBrowser() throws Exception {
    // eg: Camino/0.7
    // [0] = Camino/0.7
    // [1] = Camino
    // [2] = 0// w  w  w  . j ava  2s .c  o m
    // [3] = .7
    ArrayList matches = getMatches(BrowserTypePat, ua, 4);
    if (matches.isEmpty())
        return;

    // first find out whether it's msie hiding behind many different doors... 
    String[] browserParticulars = (String[]) CollectionUtils.find(matches, new Predicate() {
        public boolean evaluate(Object arg0) {
            final String[] pieces = (String[]) arg0;
            for (int i = 0; i < pieces.length; i++) {
                final String piece = pieces[i];
                if (StringUtils.contains(piece, MSIE_ID)) {
                    return true;
                }
            }
            return false;
        }
    });

    // if it's not msie but test for Opera because it can identify itself as msie...
    if (browserParticulars == null) {
        // get the position of the last browser key found
        int count = matches.size() - 1;
        browserParticulars = (String[]) matches.get(count);
    }

    longName = browserParticulars[0];
    browserName = browserParticulars[1];

    // get browserName from string
    Matcher nameMatcher = NameOnlyPat.matcher(browserName);
    if (nameMatcher.matches()) {
        browserName = nameMatcher.group(nameMatcher.groupCount());
    }

    majorVersion = browserParticulars[2];

    // parse the minor version string and look for alpha chars
    if (browserParticulars[3] != null) {
        // eg: .7b
        // [0] = .7b
        // [1] = .7
        // [2] = b
        matches = getMatches(MinorVersionPat, browserParticulars[3], 3);
        if (matches.isEmpty())
            return;

        int count = matches.size() - 1;
        browserParticulars = (String[]) matches.get(count);
        if (browserParticulars[1] != null)
            minorVersion = browserParticulars[1];
        else
            minorVersion = ".0";

        if (PunctuationOnlyPat.matcher(minorVersion).matches())
            minorVersion = StringUtils.EMPTY;

        if (browserParticulars[2] != null && !PunctuationOnlyPat.matcher(browserParticulars[2]).matches())
            revisionVersion = browserParticulars[2];
    }
}

From source file:com.twinsoft.convertigo.engine.servlets.ReverseProxyServlet.java

private byte[] handleStringReplacements(String baseUrl, String contentType, String charset,
        ProxyHttpConnector proxyHttpConnector, byte[] data) throws IOException {

    Engine.logEngine.debug("(ReverseProxyServlet) String replacements for content-type: " + contentType);

    if (contentType == null) {
        Engine.logEngine.warn("(ReverseProxyServlet) Aborting string replacements because of null mimetype!");
    } else {//  ww  w  . j  a va  2 s . c  om
        Replacements replacements = proxyHttpConnector.getReplacementsForMimeType(contentType);

        if (!replacements.isEmpty()) {
            String sData = new String(data, charset);

            Engine.logEngine.trace("(ReverseProxyServlet) Data before string replacements:\n" + sData);

            Engine.logEngine.debug("(ReverseProxyServlet) Replacements in progress");

            String strSearched, strReplacing;
            for (int i = 0; i < replacements.strReplacing.length; i++) {
                strSearched = replacements.strSearched[i];
                Engine.logEngine.debug("(ReverseProxyServlet) Replacing: " + strSearched);

                strReplacing = replacements.strReplacing[i];

                Matcher m_connector = reg_connector.matcher(strReplacing);
                if (m_connector.find() && m_connector.groupCount() >= 1) {
                    String newConnector = m_connector.group(1);
                    Engine.logEngine.trace("(ReverseProxyServlet) find connector : " + newConnector);

                    // Bugfix for #1798 regression about #1718
                    String newBaseUrl = switchConnector(baseUrl, newConnector) + '/';
                    Engine.logEngine.trace("(ReverseProxyServlet) new baseUrl : " + newBaseUrl);
                    strReplacing = m_connector.replaceAll(newBaseUrl);
                } else {
                    strReplacing = reg_base.matcher(replacements.strReplacing[i]).replaceAll(baseUrl);
                }

                Engine.logEngine.debug("(ReverseProxyServlet) By: " + strReplacing);

                sData = sData.replaceAll(strSearched, strReplacing);
            }

            Engine.logEngine.debug("(ReverseProxyServlet) Replacements done!");

            Engine.logEngine.trace("(ReverseProxyServlet) Data after string replacements:\n" + sData);

            data = sData.getBytes(charset);
        }
    }
    return data;
}

From source file:de.mpg.escidoc.services.syndication.feed.Feed.java

/**
 * Populate parameters with the values taken from the certain <code>uri</code> 
 * and populate <code>paramHash</code> with the parameter/value paars.
 * @param uri/*  w w  w . j a  va  2  s . c  o  m*/
 * @throws SyndicationException
 */
private void populateParamsFromUri(String uri) throws SyndicationException {
    Utils.checkName(uri, "Uri is empty");

    String um = getUriMatcher();

    Utils.checkName(um, "Uri matcher is empty");

    Matcher m = Pattern.compile(um, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(uri);
    if (m.find()) {
        for (int i = 0; i < m.groupCount(); i++)
            paramHash.put((String) paramList.get(i), m.group(i + 1));
    }

    //special handling of Organizational Unit Feed
    //TODO: should be resolved other way!
    if (getUriMatcher().equals("(.+)?/syndication/feed/(.+)?/publications/organization/(.+)?")) {
        TreeMap<String, String> outm = Utils.getOrganizationUnitTree();
        String oid = (String) paramHash.get("${organizationId}");
        for (Map.Entry<String, String> entry : outm.entrySet()) {
            if (entry.getValue().equals(oid)) {
                paramHash.put("${organizationName}", entry.getKey());
            }
        }
    }

    logger.info("parameters: " + paramHash);

}

From source file:org.craftercms.studio.impl.v1.service.security.DbWithLdapExtensionSecurityProvider.java

private String[] extractSiteIdAndGroupNameFromAttributeValue(String siteIdAttributeValue) {
    Pattern pattern = Pattern
            .compile(studioConfiguration.getProperty(SECURITY_LDAP_USER_ATTRIBUTE_SITE_ID_REGEX));
    Matcher matcher = pattern.matcher(siteIdAttributeValue);
    if (matcher.matches()) {
        int siteIdIndex = Integer
                .parseInt(studioConfiguration.getProperty(SECURITY_LDAP_USER_ATTRIBUTE_SITE_ID_MATCH_INDEX));
        int groupNameIndex = Integer.parseInt(
                studioConfiguration.getProperty(SECURITY_LDAP_USER_ATTRIBUTE_SITE_ID_GROUP_NAME_MATCH_INDEX));

        String siteName = matcher.group(siteIdIndex);
        String groupName = null;/*from w ww.  ja v  a  2  s .co m*/

        if (groupNameIndex <= matcher.groupCount()) {
            groupName = matcher.group(groupNameIndex);
        }

        if (groupName != null) {
            return new String[] { siteName, groupName };
        } else {
            return new String[] { siteName };
        }
    }

    return new String[0];
}

From source file:org.codehaus.groovy.grails.orm.hibernate.metaclass.AbstractClausedStaticPersistentMethod.java

protected Object doInvokeInternal(Class<?> clazz, String methodName, DetachedCriteria<?> detachedCriteria,
        Closure<?> additionalCriteria, Object[] arguments) {
    List<GrailsMethodExpression> expressions = new ArrayList<GrailsMethodExpression>();
    if (arguments == null)
        arguments = new Object[0];
    Matcher match = super.getPattern().matcher(methodName);
    // find match
    match.find();/*from w w  w  .j  av a2s .co m*/

    String[] queryParameters;
    int totalRequiredArguments = 0;
    // get the sequence clauses
    final String querySequence;
    int groupCount = match.groupCount();
    if (groupCount == 6) {
        String booleanProperty = match.group(3);
        if (booleanProperty == null) {
            booleanProperty = match.group(6);
            querySequence = null;
        } else {
            querySequence = match.group(5);
        }
        Boolean arg = Boolean.TRUE;
        if (booleanProperty.matches("Not[A-Z].*")) {
            booleanProperty = booleanProperty.substring(3);
            arg = Boolean.FALSE;
        }
        GrailsMethodExpression booleanExpression = GrailsMethodExpression.create(application, clazz,
                booleanProperty, conversionService);
        booleanExpression.setArguments(new Object[] { arg });
        expressions.add(booleanExpression);
    } else {
        querySequence = match.group(2);
    }
    // if it contains operator and split
    boolean containsOperator = false;
    String operatorInUse = null;

    if (querySequence != null) {
        for (int i = 0; i < operators.length; i++) {
            Matcher currentMatcher = operatorPatterns[i].matcher(querySequence);
            if (currentMatcher.find()) {
                containsOperator = true;
                operatorInUse = operators[i];

                queryParameters = querySequence.split(operatorInUse);

                // loop through query parameters and create expressions
                // calculating the number of arguments required for the expression
                int argumentCursor = 0;
                for (String queryParameter : queryParameters) {
                    GrailsMethodExpression currentExpression = GrailsMethodExpression.create(application, clazz,
                            queryParameter, conversionService);
                    totalRequiredArguments += currentExpression.argumentsRequired;
                    // populate the arguments into the GrailsExpression from the argument list
                    Object[] currentArguments = new Object[currentExpression.argumentsRequired];
                    if ((argumentCursor + currentExpression.argumentsRequired) > arguments.length) {
                        throw new MissingMethodException(methodName, clazz, arguments);
                    }

                    for (int k = 0; k < currentExpression.argumentsRequired; k++, argumentCursor++) {
                        currentArguments[k] = arguments[argumentCursor];
                    }
                    try {
                        currentExpression.setArguments(currentArguments);
                    } catch (IllegalArgumentException iae) {
                        LOG.debug(iae.getMessage(), iae);
                        throw new MissingMethodException(methodName, clazz, arguments);
                    }
                    // add to list of expressions
                    expressions.add(currentExpression);
                }
                break;
            }
        }
    }

    // otherwise there is only one expression
    if (!containsOperator && querySequence != null) {
        GrailsMethodExpression solo = GrailsMethodExpression.create(application, clazz, querySequence,
                conversionService);

        if (solo.argumentsRequired > arguments.length) {
            throw new MissingMethodException(methodName, clazz, arguments);
        }

        totalRequiredArguments += solo.argumentsRequired;
        Object[] soloArgs = new Object[solo.argumentsRequired];

        System.arraycopy(arguments, 0, soloArgs, 0, solo.argumentsRequired);
        try {
            solo.setArguments(soloArgs);
        } catch (IllegalArgumentException iae) {
            LOG.debug(iae.getMessage(), iae);
            throw new MissingMethodException(methodName, clazz, arguments);
        }
        expressions.add(solo);
    }

    // if the total of all the arguments necessary does not equal the number of arguments throw exception
    if (totalRequiredArguments > arguments.length) {
        throw new MissingMethodException(methodName, clazz, arguments);
    }

    // calculate the remaining arguments
    Object[] remainingArguments = new Object[arguments.length - totalRequiredArguments];
    if (remainingArguments.length > 0) {
        for (int i = 0, j = totalRequiredArguments; i < remainingArguments.length; i++, j++) {
            remainingArguments[i] = arguments[j];
        }
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("Calculated expressions: " + expressions);
    }

    return doInvokeInternalWithExpressions(clazz, methodName, remainingArguments, expressions, operatorInUse,
            detachedCriteria, additionalCriteria);
}

From source file:com.frostwire.search.tbp.TPBSearchResult.java

private long parseCreationTime(String group) {

    //Today or for whatever minutes ago
    if (group.contains("Today") || group.contains("<b>")) {
        return System.currentTimeMillis();
    } else if (group.contains("Y-day")) {
        return System.currentTimeMillis() - (24 * 60 * 60 * 1000);
    }/*from ww  w.  j a v  a  2  s  .  com*/

    Matcher OLDER_DATE_PATTERN_MATCHER = OLDER_DATE_PATTERN.matcher(group);
    Matcher COMMON_DATE_PATTERN_MATCHER = COMMON_DATE_PATTERN.matcher(group);
    Matcher DATE_TIME_PATTERN_MATCHER = DATE_TIME_PATTERN.matcher(group);

    Matcher RIGHT_MATCHER = (OLDER_DATE_PATTERN_MATCHER.matches()) ? OLDER_DATE_PATTERN_MATCHER
            : COMMON_DATE_PATTERN_MATCHER;

    if (!RIGHT_MATCHER.matches() && DATE_TIME_PATTERN_MATCHER.matches()) {
        RIGHT_MATCHER = DATE_TIME_PATTERN_MATCHER;
    }

    int month = Integer.parseInt(RIGHT_MATCHER.group(1));
    int date = Integer.parseInt(RIGHT_MATCHER.group(2));
    int year = 0;

    if (OLDER_DATE_PATTERN_MATCHER.matches() && OLDER_DATE_PATTERN_MATCHER.groupCount() == 3) {
        year = Integer.parseInt(RIGHT_MATCHER.group(3));
    } else if (COMMON_DATE_PATTERN_MATCHER.matches() || DATE_TIME_PATTERN_MATCHER.matches()) {
        year = Calendar.getInstance().get(Calendar.YEAR);
    }

    Calendar instance = Calendar.getInstance();
    instance.clear();
    instance.set(year, month, date);
    return instance.getTimeInMillis();
}

From source file:europarl.PhraseTranslation.java

public boolean getFromGz(String fileName, String targetWord, int limit) {
    String strLine;/*from  ww  w .ja  va2  s .c  o  m*/
    ArrayList<String> line_triple = new ArrayList<String>();

    BufferedReader gzipReader;
    Pattern word_align = Pattern.compile("(\\w+) \\(\\{(.*?)\\}\\) ");

    Bag<String> words_list = new Bag<String>(); //Set of ALL words: it will be the list of attributes
    ArrayList<PhraseTranslation> translations = new ArrayList<PhraseTranslation>();
    try {
        gzipReader = new BufferedReader(
                new InputStreamReader(new GZIPInputStream(new FileInputStream(fileName))));

        while ((strLine = gzipReader.readLine()) != null) //read-everything
        {
            line_triple.add(strLine);
            if (line_triple.size() == 3) //triple finished
            {
                //TODO: match only complete words
                //TODO: stem it before doing this

                Matcher matcher = word_align.matcher(line_triple.get(2));
                String[] foreign_words = line_triple.get(1).split(" ");
                line_triple.clear();
                if (!strLine.contains(targetWord)) //skip it
                    continue;

                ArrayList<String> e_phrase = new ArrayList<String>();
                String translation = "";
                while (matcher.find()) //each iteration is word +alignment
                {
                    assert matcher.groupCount() == 2;
                    String e_word = matcher.group(1).trim();
                    if (e_word.equals("NULL"))
                        e_word = "";
                    if (stopwordsList.contains(e_word))
                        continue;
                    if (stemmer != null)
                        e_word = stemmer.stem(e_word);

                    e_phrase.add(e_word);
                    words_list.add(e_word);

                    //we don't care about the alignment of non-target words
                    if (!e_word.equals(targetWord))
                        continue;

                    //parse the { x y z } alignment part
                    ArrayList<Integer> f_words = new ArrayList<Integer>();
                    translation = "";
                    //for each number between curly brackets
                    for (String number : matcher.group(2).split(" ")) {
                        if (!number.isEmpty()) {
                            int n_word = Integer.parseInt(number) - 1;
                            f_words.add(n_word);
                            translation += foreign_words[n_word] + " ";
                        }
                    } // end of curly brackets for

                } //end of word+alignment while
                if (!translation.isEmpty()) {
                    PhraseTranslation trans = new PhraseTranslation(e_phrase, translation);
                    translations.add(trans);
                }
                line_triple.clear();
            } //end of triple-finished if
            if (translations.size() == limit)
                break; //stop collecting!
        } //end of the read-everything while
    } catch (Exception e) {
        log.error("Error: " + e);
        e.printStackTrace();
        return false;
    }

    //what we NOW have: a set of attributes in HashSet<String>words_list
    //a ArrayList<PhraseTranslation> translations      
    log.info("Collected " + translations.size() + " phrases and " + words_list.size() + " words");

    postProcessData(translations, words_list);

    //now convert the data we collected to Weka data
    //we needed to do "double passing" because we need to initialize
    //the dataset with the complete list of attributes

    //this will convert word to attributes: they are all "boolean"
    ArrayList<Attribute> attrs = new ArrayList<Attribute>();
    HashMap<String, Attribute> attrs_map = new HashMap<String, Attribute>();
    Attribute att;
    for (String word : words_list) {
        att = new Attribute(word);
        attrs.add(att);
        attrs_map.put(word, att);
    }

    //now we need to manage class.
    //each translation is a class, so we need to get all of them
    HashMap<String, Integer> class_map = new HashMap<String, Integer>();
    ArrayList<String> classes = new ArrayList<String>();
    for (PhraseTranslation phraseTranslation : translations) {
        if (!class_map.containsKey(phraseTranslation.getTranslatedWord())) {
            class_map.put(phraseTranslation.getTranslatedWord(), classes.size());
            classes.add(phraseTranslation.getTranslatedWord());
        }
    }

    log.info(targetWord + " has " + classes.size() + " translations:");
    if (log.isInfoEnabled())
        for (String translation : classes)
            System.out.println(translation);
    att = new Attribute("%class", classes);
    attrs.add(att);
    attrs_map.put("%class", att);
    dataSet = new Instances("dataset", attrs, 0);
    for (PhraseTranslation phraseTranslation : translations) {
        SparseInstance inst = new SparseInstance(attrs.size());
        //set everything to 0
        for (int i = 0; i < attrs.size(); i++)
            inst.setValue(i, 0);
        //set present word to 1
        for (String word : phraseTranslation.getPhraseWords())
            inst.setValue(attrs_map.get(word), 1);
        //set class of instance
        inst.setValue(attrs_map.get("%class"), class_map.get(phraseTranslation.getTranslatedWord()));
        dataSet.add(inst);
    }

    return true;
}

From source file:com.manydesigns.portofino.pageactions.text.TextAction.java

protected String restoreAttachmentUrls(String content) {
    Pattern pattern = Pattern.compile(PORTOFINO_ATTACHMENT_PATTERN);
    Matcher matcher = pattern.matcher(content);
    int lastEnd = 0;
    StringBuilder sb = new StringBuilder();
    while (matcher.find()) {
        String attachmentId = matcher.group(1);
        //Default to src for old texts
        String hrefAttribute = (matcher.groupCount() >= 3 && matcher.group(3) != null) ? matcher.group(3)
                : "src";

        sb.append(content.substring(lastEnd, matcher.start())).append(hrefAttribute).append("=\"")
                .append(StringEscapeUtils.escapeHtml(generateViewAttachmentUrl(attachmentId))).append("\"");

        lastEnd = matcher.end();/*from w ww.ja v a2 s  . c o  m*/
    }
    sb.append(content.substring(lastEnd));
    return sb.toString();
}