Example usage for org.apache.commons.io FilenameUtils wildcardMatch

List of usage examples for org.apache.commons.io FilenameUtils wildcardMatch

Introduction

In this page you can find the example usage for org.apache.commons.io FilenameUtils wildcardMatch.

Prototype

public static boolean wildcardMatch(String filename, String wildcardMatcher) 

Source Link

Document

Checks a filename to see if it matches the specified wildcard matcher, always testing case-sensitive.

Usage

From source file:org.apache.camel.idea.completion.extension.PropertiesPropertyPlaceholdersSmartCompletion.java

@Override
public boolean isValidExtension(String filename) {
    final CamelPreferenceService preferenceService = ServiceManager.getService(CamelPreferenceService.class);
    final boolean present = preferenceService.getExcludePropertyFiles().stream()
            .filter(s -> !s.isEmpty() && FilenameUtils.wildcardMatch(filename, s)).findFirst().isPresent();
    return (!present) && (filename.endsWith(".properties"));
}

From source file:org.apache.camel.idea.completion.extension.YamlPropertyPlaceholdersSmartCompletion.java

@Override
public boolean isValidExtension(String filename) {
    final CamelPreferenceService preferenceService = ServiceManager.getService(CamelPreferenceService.class);
    final boolean present = preferenceService.getExcludePropertyFiles().stream()
            .filter(s -> !s.isEmpty() && FilenameUtils.wildcardMatch(filename, s)).findFirst().isPresent();
    return (!present) && (filename.endsWith(".yaml") || filename.endsWith(".yml"));
}

From source file:org.apache.hadoop.hbase.security.access.CoprocessorWhitelistMasterObserver.java

/**
 * Validates a single whitelist path against the coprocessor path
 * @param  coprocPath the path to the coprocessor including scheme
 * @param  wlPath     can be://from   w w  w . ja  v  a2  s .c om
 *                      1) a "*" to wildcard all coprocessor paths
 *                      2) a specific filesystem (e.g. hdfs://my-cluster/)
 *                      3) a wildcard path to be evaluated by
 *                         {@link FilenameUtils.wildcardMatch}
 *                         path can specify scheme or not (e.g.
 *                         "file:///usr/hbase/coprocessors" or for all
 *                         filesystems "/usr/hbase/coprocessors")
 * @return             if the path was found under the wlPath
 * @throws IOException if a failure occurs in getting the path file system
 */
private static boolean validatePath(Path coprocPath, Path wlPath, Configuration conf) throws IOException {
    // verify if all are allowed
    if (wlPath.toString().equals("*")) {
        return (true);
    }

    // verify we are on the same filesystem if wlPath has a scheme
    if (!wlPath.isAbsoluteAndSchemeAuthorityNull()) {
        String wlPathScheme = wlPath.toUri().getScheme();
        String coprocPathScheme = coprocPath.toUri().getScheme();
        String wlPathHost = wlPath.toUri().getHost();
        String coprocPathHost = coprocPath.toUri().getHost();
        if (wlPathScheme != null) {
            wlPathScheme = wlPathScheme.toString().toLowerCase();
        } else {
            wlPathScheme = "";
        }
        if (wlPathHost != null) {
            wlPathHost = wlPathHost.toString().toLowerCase();
        } else {
            wlPathHost = "";
        }
        if (coprocPathScheme != null) {
            coprocPathScheme = coprocPathScheme.toString().toLowerCase();
        } else {
            coprocPathScheme = "";
        }
        if (coprocPathHost != null) {
            coprocPathHost = coprocPathHost.toString().toLowerCase();
        } else {
            coprocPathHost = "";
        }
        if (!wlPathScheme.equals(coprocPathScheme) || !wlPathHost.equals(coprocPathHost)) {
            return (false);
        }
    }

    // allow any on this file-system (file systems were verified to be the same above)
    if (wlPath.isRoot()) {
        return (true);
    }

    // allow "loose" matches stripping scheme
    if (FilenameUtils.wildcardMatch(Path.getPathWithoutSchemeAndAuthority(coprocPath).toString(),
            Path.getPathWithoutSchemeAndAuthority(wlPath).toString())) {
        return (true);
    }
    return (false);
}

From source file:org.apache.ranger.biz.RangerBizUtil.java

/**
 * returns true if given path matches in same level or sub directories with
 * given wild card pattern/*  w w w.j av  a  2  s  .c o m*/
 *
 * @param pathToCheck
 * @param wildcardPath
 * @return
 */
public boolean isRecursiveWildCardMatch(String pathToCheck, String wildcardPath) {
    if (pathToCheck != null) {
        if (wildcardPath != null && wildcardPath.equals(fileSeparator)) {
            return true;
        }
        StringBuilder sb = new StringBuilder();
        for (String p : pathToCheck.split(fileSeparator)) {
            sb.append(p);
            boolean matchFound = FilenameUtils.wildcardMatch(sb.toString(), wildcardPath);
            if (matchFound) {
                return true;
            }
            sb.append(fileSeparator);
        }
        sb = null;
    }
    return false;
}

From source file:org.apache.ranger.hadoop.client.HadoopFS.java

private List<String> listFilesInternal(String baseDir, String fileMatching) {
    List<String> fileList = new ArrayList<String>();
    ClassLoader prevCl = Thread.currentThread().getContextClassLoader();
    String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check xa_portal.log for more info.";
    try {/* w w w .j  ava 2 s  .  c o  m*/
        Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
        String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/"));
        String filterRegEx = null;
        if (fileMatching != null && fileMatching.trim().length() > 0) {
            filterRegEx = fileMatching.trim();
        }

        Configuration conf = new Configuration();
        UserGroupInformation.setConfiguration(conf);

        FileSystem fs = null;
        try {
            fs = FileSystem.get(conf);

            FileStatus[] fileStats = fs.listStatus(new Path(baseDir));
            if (fileStats != null) {
                for (FileStatus stat : fileStats) {
                    Path path = stat.getPath();
                    String pathComponent = path.getName();
                    if (filterRegEx == null) {
                        fileList.add(dirPrefix + pathComponent);
                    } else if (FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
                        fileList.add(dirPrefix + pathComponent);
                    }
                }
            }
        } catch (UnknownHostException uhe) {
            String msgDesc = "listFilesInternal: Unable to connect using given config parameters"
                    + " of Hadoop environment [" + getDataSource() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, uhe);
            hdpException.generateResponseDataMap(false, getMessage(uhe), msgDesc + errMsg, null, null);
            throw hdpException;
        } catch (FileNotFoundException fne) {
            String msgDesc = "listFilesInternal: Unable to locate files using given config parameters "
                    + "of Hadoop environment [" + getDataSource() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, fne);
            hdpException.generateResponseDataMap(false, getMessage(fne), msgDesc + errMsg, null, null);
            throw hdpException;
        } finally {
        }
    } catch (IOException ioe) {
        String msgDesc = "listFilesInternal: Unable to get listing of files for directory " + baseDir
                + "] from Hadoop environment [" + getDataSource() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, ioe);
        hdpException.generateResponseDataMap(false, getMessage(ioe), msgDesc + errMsg, null, null);
        throw hdpException;

    } catch (IllegalArgumentException iae) {
        String msgDesc = "Unable to get listing of files for directory [" + baseDir
                + "] from Hadoop environment [" + getDataSource() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, iae);
        hdpException.generateResponseDataMap(false, getMessage(iae), msgDesc + errMsg, null, null);
        throw hdpException;
    } finally {
        Thread.currentThread().setContextClassLoader(prevCl);
    }
    return fileList;
}

From source file:org.apache.ranger.hive.client.HiveClient.java

public List<String> getClmList(String database, String tableName, String columnNameMatching) {
    List<String> ret = new ArrayList<String>();
    String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check xa_portal.log for more info.";
    if (con != null) {

        String columnNameMatchingRegEx = null;

        if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
            columnNameMatchingRegEx = columnNameMatching;
        }// w  w w .  j a  v a 2 s  . co m

        Statement stat = null;
        ResultSet rs = null;

        String sql = null;

        try {
            sql = "use " + database;

            try {
                stat = con.createStatement();
                stat.execute(sql);
            } finally {
                close(stat);
            }

            sql = "describe  " + tableName;
            stat = con.createStatement();
            rs = stat.executeQuery(sql);
            while (rs.next()) {
                String columnName = rs.getString(1);
                if (columnNameMatchingRegEx == null) {
                    ret.add(columnName);
                } else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) {
                    ret.add(columnName);
                }
            }
        } catch (SQLTimeoutException sqlt) {
            String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
            HadoopException hdpException = new HadoopException(msgDesc, sqlt);
            hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + errMsg, null, null);
            throw hdpException;
        } catch (SQLException sqle) {
            String msgDesc = "Unable to execute SQL [" + sql + "].";
            HadoopException hdpException = new HadoopException(msgDesc, sqle);
            hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + errMsg, null, null);
            throw hdpException;
        } finally {
            close(rs);
            close(stat);
        }

    }
    return ret;
}

From source file:org.apache.ranger.plugin.conditionevaluator.RangerSampleSimpleMatcher.java

@Override
public boolean isMatched(RangerAccessRequest request) {

    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerSampleSimpleMatcher.isMatched(" + request + ")");
    }//ww w.  j  av  a 2 s. c  om

    boolean matched = false;

    if (_allowAny) {
        matched = true;
    } else {
        String requestValue = extractValue(request, _contextName);
        if (StringUtils.isNotBlank(requestValue)) {
            for (String policyValue : _values) {
                if (FilenameUtils.wildcardMatch(requestValue, policyValue)) {
                    matched = true;
                    break;
                }
            }
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerSampleSimpleMatcher.isMatched(" + request + "): " + matched);
    }

    return matched;
}

From source file:org.apache.ranger.plugin.conditionevaluator.RangerSimpleMatcher.java

@Override
public boolean isMatched(RangerAccessRequest request) {

    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerSimpleMatcher.isMatched(" + request + ")");
    }//from  w  w w.j  a va 2  s.c  om

    boolean matched = false;

    if (_allowAny) {
        matched = true;
    } else {
        String requestValue = extractValue(request, _contextName);
        if (StringUtils.isNotBlank(requestValue)) {
            for (String policyValue : _values) {
                if (FilenameUtils.wildcardMatch(requestValue, policyValue)) {
                    matched = true;
                    break;
                }
            }
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerSimpleMatcher.isMatched(" + request + "): " + matched);
    }

    return matched;
}

From source file:org.apache.ranger.plugin.store.AbstractPredicateUtil.java

private Predicate addPredicateForResources(final Map<String, String> resources, List<Predicate> predicates) {
    if (MapUtils.isEmpty(resources)) {
        return null;
    }//from   www.  j a  v  a  2 s.  c  o  m

    Predicate ret = new Predicate() {
        @Override
        public boolean evaluate(Object object) {
            if (object == null) {
                return false;
            }

            boolean ret = false;

            if (object instanceof RangerPolicy) {
                RangerPolicy policy = (RangerPolicy) object;

                if (!MapUtils.isEmpty(policy.getResources())) {
                    int numFound = 0;
                    for (String name : resources.keySet()) {
                        boolean isMatch = false;

                        RangerPolicyResource policyResource = policy.getResources().get(name);

                        if (policyResource != null && !CollectionUtils.isEmpty(policyResource.getValues())) {
                            String val = resources.get(name);

                            if (policyResource.getValues().contains(val)) {
                                isMatch = true;
                            } else {
                                for (String policyResourceValue : policyResource.getValues()) {
                                    if (FilenameUtils.wildcardMatch(val, policyResourceValue)) {
                                        isMatch = true;
                                        break;
                                    }
                                }
                            }
                        }

                        if (isMatch) {
                            numFound++;
                        } else {
                            break;
                        }
                    }

                    ret = numFound == resources.size();
                }
            } else {
                ret = true;
            }

            return ret;
        }
    };

    if (predicates != null) {
        predicates.add(ret);
    }

    return ret;
}

From source file:org.apache.ranger.services.hdfs.client.HdfsClient.java

private List<String> listFilesInternal(String baseDir, String fileMatching, final List<String> pathList)
        throws HadoopException {
    List<String> fileList = new ArrayList<String>();
    String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check ranger_admin.log for more info.";
    try {//from  w ww. j  av a 2  s .  c  om
        String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/"));
        String filterRegEx = null;
        if (fileMatching != null && fileMatching.trim().length() > 0) {
            filterRegEx = fileMatching.trim();
        }

        UserGroupInformation.setConfiguration(conf);

        FileSystem fs = null;
        try {
            fs = FileSystem.get(conf);

            Path basePath = new Path(baseDir);
            FileStatus[] fileStats = fs.listStatus(basePath);

            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient fileStatus : " + fileStats.length + " PathList :" + pathList);
            }

            if (fileStats != null) {
                if (fs.exists(basePath) && ArrayUtils.isEmpty(fileStats)) {
                    fileList.add(basePath.toString());
                } else {
                    for (FileStatus stat : fileStats) {
                        Path path = stat.getPath();
                        String pathComponent = path.getName();
                        String prefixedPath = dirPrefix + pathComponent;
                        if (pathList != null && pathList.contains(prefixedPath)) {
                            continue;
                        }
                        if (filterRegEx == null) {
                            fileList.add(prefixedPath);
                        } else if (FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
                            fileList.add(prefixedPath);
                        }
                    }
                }
            }
        } catch (UnknownHostException uhe) {
            String msgDesc = "listFilesInternal: Unable to connect using given config parameters"
                    + " of Hadoop environment [" + getSerivceName() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, uhe);
            hdpException.generateResponseDataMap(false, getMessage(uhe), msgDesc + errMsg, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient listFilesInternal Error : " + uhe);
            }
            throw hdpException;
        } catch (FileNotFoundException fne) {
            String msgDesc = "listFilesInternal: Unable to locate files using given config parameters "
                    + "of Hadoop environment [" + getSerivceName() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, fne);
            hdpException.generateResponseDataMap(false, getMessage(fne), msgDesc + errMsg, null, null);

            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient listFilesInternal Error : " + fne);
            }

            throw hdpException;
        }
    } catch (IOException ioe) {
        String msgDesc = "listFilesInternal: Unable to get listing of files for directory " + baseDir
                + fileMatching + "] from Hadoop environment [" + getSerivceName() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, ioe);
        hdpException.generateResponseDataMap(false, getMessage(ioe), msgDesc + errMsg, null, null);
        if (LOG.isDebugEnabled()) {
            LOG.debug("<== HdfsClient listFilesInternal Error : " + ioe);
        }
        throw hdpException;

    } catch (IllegalArgumentException iae) {
        String msgDesc = "Unable to get listing of files for directory [" + baseDir
                + "] from Hadoop environment [" + getSerivceName() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, iae);
        hdpException.generateResponseDataMap(false, getMessage(iae), msgDesc + errMsg, null, null);
        if (LOG.isDebugEnabled()) {
            LOG.debug("<== HdfsClient listFilesInternal Error : " + iae);
        }
        throw hdpException;
    }
    return fileList;
}