Example usage for java.util Collections list

List of usage examples for java.util Collections list

Introduction

In this page you can find the example usage for java.util Collections list.

Prototype

public static <T> ArrayList<T> list(Enumeration<T> e) 

Source Link

Document

Returns an array list containing the elements returned by the specified enumeration in the order they are returned by the enumeration.

Usage

From source file:org.eclipse.smarthome.test.SyntheticBundleInstaller.java

private static List<URL> collectEntries(Bundle bundle, String path, Set<String> extensionsToInclude) {
    List<URL> result = new ArrayList<>();
    for (String filePattern : extensionsToInclude) {
        Enumeration<URL> entries = bundle.findEntries(path, filePattern, true);
        if (entries != null) {
            result.addAll(Collections.list(entries));
        }//from   w  ww.  ja v  a2 s  . com
    }
    return result;
}

From source file:de.alpharogroup.lang.ClassUtils.java

/**
 * Gets a list with urls from the given path for all resources.
 *
 * @param path//w  ww.  j  av  a2  s .c om
 *            The base path.
 * @return The resources.
 * @throws IOException
 *             Signals that an I/O exception has occurred.
 */
public static List<URL> getResources(final String path) throws IOException {
    final ClassLoader classLoader = ClassUtils.getClassLoader();
    final List<URL> list = Collections.list(classLoader.getResources(path));
    return list;
}

From source file:org.springframework.cloud.sleuth.instrument.web.TraceFilter.java

/** Override to add annotations not defined in {@link TraceKeys}. */
protected void addRequestTags(Span span, HttpServletRequest request) {
    String uri = this.urlPathHelper.getPathWithinApplication(request);
    this.httpTraceKeysInjector.addRequestTags(span, getFullUrl(request), request.getServerName(), uri,
            request.getMethod());//  w  ww. ja  v a2s.com
    for (String name : this.traceKeys.getHttp().getHeaders()) {
        Enumeration<String> values = request.getHeaders(name);
        if (values.hasMoreElements()) {
            String key = this.traceKeys.getHttp().getPrefix() + name.toLowerCase();
            ArrayList<String> list = Collections.list(values);
            String value = list.size() == 1 ? list.get(0)
                    : StringUtils.collectionToDelimitedString(list, ",", "'", "'");
            this.httpTraceKeysInjector.tagSpan(span, key, value);
        }
    }
}

From source file:com.hichinaschool.flashcards.libanki.Media.java

/**
 * Extract zip data./* w ww . j a v a2  s  .  com*/
 * 
 * @param zipData An input stream that represents a zipped file.
 * @return True if finished.
 */
public boolean syncAdd(File zipData) {
    boolean finished = false;
    ZipFile z = null;
    ArrayList<Object[]> media = new ArrayList<Object[]>();
    long sizecnt = 0;
    JSONObject meta = null;
    int nextUsn = 0;
    try {
        z = new ZipFile(zipData, ZipFile.OPEN_READ);
        // get meta info first
        ZipEntry metaEntry = z.getEntry("_meta");
        // if (metaEntry.getSize() >= 100000) {
        // Log.e(AnkiDroidApp.TAG, "Size for _meta entry found too big (" + z.getEntry("_meta").getSize() + ")");
        // return false;
        // }
        meta = new JSONObject(Utils.convertStreamToString(z.getInputStream(metaEntry)));
        ZipEntry usnEntry = z.getEntry("_usn");
        String usnstr = Utils.convertStreamToString(z.getInputStream(usnEntry));
        nextUsn = Integer.parseInt(usnstr);
    } catch (JSONException e) {
        throw new RuntimeException(e);
    } catch (ZipException e) {
        throw new RuntimeException(e);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    // Then loop through all files
    for (ZipEntry zentry : Collections.list(z.entries())) {
        // Check for zip bombs
        sizecnt += zentry.getSize();
        if (sizecnt > 100 * 1024 * 1024) {
            Log.e(AnkiDroidApp.TAG, "Media zip file exceeds 100MB uncompressed, aborting unzipping");
            return false;
        }
        if (zentry.getName().compareTo("_meta") == 0 || zentry.getName().compareTo("_usn") == 0) {
            // Ignore previously retrieved meta
            continue;
        } else if (zentry.getName().compareTo("_finished") == 0) {
            finished = true;
        } else {
            String name = meta.optString(zentry.getName());
            if (illegal(name)) {
                continue;
            }
            String path = getDir().concat(File.separator).concat(name);
            try {
                Utils.writeToFile(z.getInputStream(zentry), path);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
            String csum = Utils.fileChecksum(path);
            // append db
            media.add(new Object[] { name, csum, _mtime(name) });
            mMediaDb.execute("delete from log where fname = ?", new String[] { name });
        }
    }

    // update media db and note new starting usn
    if (!media.isEmpty()) {
        mMediaDb.executeMany("insert or replace into media values (?,?,?)", media);
    }
    setUsn(nextUsn); // commits
    // if we have finished adding, we need to record the new folder mtime
    // so that we don't trigger a needless scan
    if (finished) {
        syncMod();
    }
    return finished;
}

From source file:org.apache.hadoop.hive.ql.exec.ExecDriver.java

public static void main(String[] args) throws IOException, HiveException {

    String planFileName = null;//  w w w.j av a 2 s .co m
    ArrayList<String> jobConfArgs = new ArrayList<String>();
    boolean noLog = false;
    String files = null;
    boolean localtask = false;
    try {
        for (int i = 0; i < args.length; i++) {
            if (args[i].equals("-plan")) {
                planFileName = args[++i];
            } else if (args[i].equals("-jobconf")) {
                jobConfArgs.add(args[++i]);
            } else if (args[i].equals("-nolog")) {
                noLog = true;
            } else if (args[i].equals("-files")) {
                files = args[++i];
            } else if (args[i].equals("-localtask")) {
                localtask = true;
            }
        }
    } catch (IndexOutOfBoundsException e) {
        System.err.println("Missing argument to option");
        printUsage();
    }

    JobConf conf;
    if (localtask) {
        conf = new JobConf(MapredLocalTask.class);
    } else {
        conf = new JobConf(ExecDriver.class);
    }
    StringBuilder sb = new StringBuilder("JobConf:\n");

    for (String one : jobConfArgs) {
        int eqIndex = one.indexOf('=');
        if (eqIndex != -1) {
            try {
                String key = one.substring(0, eqIndex);
                String value = URLDecoder.decode(one.substring(eqIndex + 1), "UTF-8");
                conf.set(key, value);
                sb.append(key).append("=").append(value).append("\n");
            } catch (UnsupportedEncodingException e) {
                System.err.println(
                        "Unexpected error " + e.getMessage() + " while encoding " + one.substring(eqIndex + 1));
                System.exit(3);
            }
        }
    }

    if (files != null) {
        conf.set("tmpfiles", files);
    }

    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);

    if (noLog) {
        // If started from main(), and noLog is on, we should not output
        // any logs. To turn the log on, please set -Dtest.silent=false
        BasicConfigurator.resetConfiguration();
        BasicConfigurator.configure(new NullAppender());
    } else {
        setupChildLog4j(conf);
    }

    Log LOG = LogFactory.getLog(ExecDriver.class.getName());
    LogHelper console = new LogHelper(LOG, isSilent);

    if (planFileName == null) {
        console.printError("Must specify Plan File Name");
        printUsage();
    }

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : Collections
            .list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) {
        if (appender instanceof FileAppender) {
            console.printInfo("Execution log at: " + ((FileAppender) appender).getFile());
        }
    }

    // log the list of job conf parameters for reference
    LOG.info(sb.toString());

    // the plan file should always be in local directory
    Path p = new Path(planFileName);
    FileSystem fs = FileSystem.getLocal(conf);
    InputStream pathData = fs.open(p);

    // this is workaround for hadoop-17 - libjars are not added to classpath of the
    // child process. so we add it here explicitly

    String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
    String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS);
    try {
        // see also - code in CliDriver.java
        ClassLoader loader = conf.getClassLoader();
        if (StringUtils.isNotBlank(auxJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        }
        if (StringUtils.isNotBlank(addedJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(addedJars, ","));
        }
        conf.setClassLoader(loader);
        // Also set this to the Thread ContextClassLoader, so new threads will
        // inherit
        // this class loader, and propagate into newly created Configurations by
        // those
        // new threads.
        Thread.currentThread().setContextClassLoader(loader);
    } catch (Exception e) {
        throw new HiveException(e.getMessage(), e);
    }
    int ret;
    if (localtask) {
        memoryMXBean = ManagementFactory.getMemoryMXBean();
        MapredLocalWork plan = Utilities.deserializeMapRedLocalWork(pathData, conf);
        MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
        ret = ed.executeFromChildJVM(new DriverContext());

    } else {
        MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
        ExecDriver ed = new ExecDriver(plan, conf, isSilent);
        ret = ed.execute(new DriverContext());
    }

    if (ret != 0) {
        System.exit(2);
    }
}

From source file:ezbake.security.client.EzbakeSecurityClient.java

/**
 * Attempt to read the proxy princiapl from the HTTP headers.
 *
 * @param request the servlet request for the current request
 * @return the proxy principal that was contained in the HTTP headers
 * @throws EzSecurityTokenException/*from  w  ww  .j ava2  s. c om*/
 */
public ProxyPrincipal requestPrincipalFromRequest(HttpServletRequest request) throws EzSecurityTokenException {
    if (log.isTraceEnabled()) {
        Enumeration<String> headers = request.getHeaderNames();
        while (headers.hasMoreElements()) {
            String headerName = headers.nextElement();
            log.trace("Header: " + headerName + " = " + request.getHeader(headerName));
        }
    }

    // Convert the headers into a map
    Map<String, List<String>> headers = new HashMap<>();
    for (String headerName : Collections.list(request.getHeaderNames())) {
        if (headers.containsKey(headerName)) {
            headers.get(headerName).add(request.getHeader(headerName));
        } else {
            headers.put(headerName.toUpperCase(), Lists.newArrayList(request.getHeader(headerName)));
        }
    }

    return requestPrincipalFromRequest(headers);
}

From source file:org.apache.hadoop.net.DNS.java

/**
 * Returns all the IPs associated with the provided interface, if any, as
 * a list of InetAddress objects.//from ww  w .  j  av  a  2  s.co m
 *
 * @param strInterface
 *            The name of the network interface or sub-interface to query
 *            (eg eth0 or eth0:0) or the string "default"
 * @param returnSubinterfaces
 *            Whether to return IPs associated with subinterfaces of
 *            the given interface
 * @return A list of all the IPs associated with the provided
 *         interface. The local host IP is returned if the interface
 *         name "default" is specified or there is an I/O error looking
 *         for the given interface.
 * @throws UnknownHostException
 *             If the given interface is invalid
 *
 */
public static List<InetAddress> getIPsAsInetAddressList(String strInterface, boolean returnSubinterfaces)
        throws UnknownHostException {
    if ("default".equals(strInterface)) {
        return Arrays.asList(InetAddress.getByName(cachedHostAddress));
    }
    NetworkInterface netIf;
    try {
        netIf = NetworkInterface.getByName(strInterface);
        if (netIf == null) {
            netIf = getSubinterface(strInterface);
        }
    } catch (SocketException e) {
        LOG.warn("I/O error finding interface " + strInterface + ": " + e.getMessage());
        return Arrays.asList(InetAddress.getByName(cachedHostAddress));
    }
    if (netIf == null) {
        throw new UnknownHostException("No such interface " + strInterface);
    }

    // NB: Using a LinkedHashSet to preserve the order for callers
    // that depend on a particular element being 1st in the array.
    // For example, getDefaultIP always returns the first element.
    LinkedHashSet<InetAddress> allAddrs = new LinkedHashSet<InetAddress>();
    allAddrs.addAll(Collections.list(netIf.getInetAddresses()));
    if (!returnSubinterfaces) {
        allAddrs.removeAll(getSubinterfaceInetAddrs(netIf));
    }
    return new Vector<InetAddress>(allAddrs);
}

From source file:org.apache.zeppelin.interpreter.InterpreterSettingManager.java

private boolean registerInterpreterFromResource(ClassLoader cl, String interpreterDir, String interpreterJson)
        throws IOException, RepositoryException {
    URL[] urls = recursiveBuildLibList(new File(interpreterDir));
    ClassLoader tempClassLoader = new URLClassLoader(urls, cl);

    Enumeration<URL> interpreterSettings = tempClassLoader.getResources(interpreterJson);
    if (!interpreterSettings.hasMoreElements()) {
        return false;
    }// w w w. ja  v a2  s  .c  om
    for (URL url : Collections.list(interpreterSettings)) {
        try (InputStream inputStream = url.openStream()) {
            logger.debug("Reading {} from {}", interpreterJson, url);
            List<RegisteredInterpreter> registeredInterpreterList = getInterpreterListFromJson(inputStream);
            registerInterpreters(registeredInterpreterList, interpreterDir);
        }
    }
    return true;
}