Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:org.apache.axis2.deployment.util.Utils.java

/**
 * Normalize a uri containing ../ and ./ paths.
 *
 * @param uri The uri path to normalize/*from w ww  . j a  va 2  s  .co m*/
 * @return The normalized uri
 */
public static String normalize(String uri) {
    if ("".equals(uri)) {
        return uri;
    }
    int leadingSlashes;
    for (leadingSlashes = 0; leadingSlashes < uri.length()
            && uri.charAt(leadingSlashes) == '/'; ++leadingSlashes) {
        // FIXME: this block is empty!!
    }
    boolean isDir = (uri.charAt(uri.length() - 1) == '/');
    StringTokenizer st = new StringTokenizer(uri, "/");
    LinkedList clean = new LinkedList();
    while (st.hasMoreTokens()) {
        String token = st.nextToken();
        if ("..".equals(token)) {
            if (!clean.isEmpty() && !"..".equals(clean.getLast())) {
                clean.removeLast();
                if (!st.hasMoreTokens()) {
                    isDir = true;
                }
            } else {
                clean.add("..");
            }
        } else if (!".".equals(token) && !"".equals(token)) {
            clean.add(token);
        }
    }
    StringBuffer sb = new StringBuffer();
    while (leadingSlashes-- > 0) {
        sb.append('/');
    }
    for (Iterator it = clean.iterator(); it.hasNext();) {
        sb.append(it.next());
        if (it.hasNext()) {
            sb.append('/');
        }
    }
    if (isDir && sb.length() > 0 && sb.charAt(sb.length() - 1) != '/') {
        sb.append('/');
    }
    return sb.toString();
}

From source file:com.datatorrent.stram.webapp.OperatorDiscoverer.java

/**
 * Enrich portClassHier with class/interface names that map to a list of parent classes/interfaces.
 * For any class encountered, find its parents too.<br/>
 * Also find the port types which have assignable schema classes.
 *
 * @param oper                       Operator to work on
 * @param portClassHierarchy         In-Out param that contains a mapping of class/interface to its parents
 * @param portTypesWithSchemaClasses Json that will contain all the ports which have any schema classes.
 *///from  w  w  w.j av  a2 s  .  c o  m
public void buildAdditionalPortInfo(JSONObject oper, JSONObject portClassHierarchy,
        JSONObject portTypesWithSchemaClasses) {
    try {
        JSONArray ports = oper.getJSONArray(OperatorDiscoverer.PORT_TYPE_INFO_KEY);
        for (int i = 0; i < ports.length(); i++) {
            JSONObject port = ports.getJSONObject(i);

            String portType = port.optString("type");
            if (portType == null) {
                //skipping if port type is null
                continue;
            }

            if (typeGraph.size() == 0) {
                buildTypeGraph();
            }

            try {
                //building port class hierarchy
                LinkedList<String> queue = Lists.newLinkedList();
                queue.add(portType);
                while (!queue.isEmpty()) {
                    String currentType = queue.remove();
                    if (portClassHierarchy.has(currentType)) {
                        //already present in the json so we skip.
                        continue;
                    }
                    List<String> immediateParents = typeGraph.getParents(currentType);
                    if (immediateParents == null) {
                        portClassHierarchy.put(currentType, Lists.<String>newArrayList());
                        continue;
                    }
                    portClassHierarchy.put(currentType, immediateParents);
                    queue.addAll(immediateParents);
                }
            } catch (JSONException e) {
                LOG.warn("building port type hierarchy {}", portType, e);
            }

            //finding port types with schema classes
            if (portTypesWithSchemaClasses.has(portType)) {
                //already present in the json so skipping
                continue;
            }
            if (portType.equals("byte") || portType.equals("short") || portType.equals("char")
                    || portType.equals("int") || portType.equals("long") || portType.equals("float")
                    || portType.equals("double") || portType.equals("java.lang.String")
                    || portType.equals("java.lang.Object")) {
                //ignoring primitives, strings and object types as this information is needed only for complex types.
                continue;
            }
            if (port.has("typeArgs")) {
                //ignoring any type with generics
                continue;
            }
            boolean hasSchemaClasses = false;
            List<String> instantiableDescendants = typeGraph.getInstantiableDescendants(portType);
            if (instantiableDescendants != null) {
                for (String descendant : instantiableDescendants) {
                    try {
                        if (typeGraph.isInstantiableBean(descendant)) {
                            hasSchemaClasses = true;
                            break;
                        }
                    } catch (JSONException ex) {
                        LOG.warn("checking descendant is instantiable {}", descendant);
                    }
                }
            }
            portTypesWithSchemaClasses.put(portType, hasSchemaClasses);
        }
    } catch (JSONException e) {
        // should not reach this
        LOG.error("JSON Exception {}", e);
        throw new RuntimeException(e);
    }
}

From source file:com.multimedia.service.wallpaper.CmsWallpaperService.java

@Override
public void preUploadWallpapers(StatusBean usb) {
    File upload_dir = new File(wallpaper_service.getUploadPath());
    OnlyFilesFilter filenameFilter = new OnlyFilesFilter();
    usb.setDone(0);/*from   w  w  w  .j  a va2s .  c om*/
    usb.setTotal(scanFolder(upload_dir));
    if (upload_dir.exists()) {
        boolean upload_made = true;
        int upload_count = 0;

        File pre_upload_directory = new File(wallpaper_service.getUploadPath(), "pre_upload");
        if (!pre_upload_directory.exists())
            pre_upload_directory.mkdir();

        File cur_dir = null;

        File description_file;
        boolean pre_uploaded;

        Long id_pages_cur;
        String page_name;

        LinkedList<File> files = new LinkedList<File>();
        files.addLast(upload_dir);

        while (!files.isEmpty()) {
            if (upload_made) {
                cur_dir = new File(pre_upload_directory, String.valueOf(upload_count));
                while (cur_dir.exists()) {
                    cur_dir = new File(pre_upload_directory, String.valueOf(upload_count));
                    upload_count++;
                }
                cur_dir.mkdir();
                Iterator<String> dimmensions = wallpaper_service.getDimmensions().keySet().iterator();
                while (dimmensions.hasNext()) {
                    String dimmension = dimmensions.next();
                    File pre_uploaded_dimm = new File(cur_dir, dimmension);
                    if (!pre_uploaded_dimm.exists())
                        pre_uploaded_dimm.mkdir();
                }
                upload_count++;
            }

            File f = files.removeLast();
            pre_uploaded = false;
            upload_made = false;
            //logger.debug("test file: '"+f.getAbsolutePath()+"'");
            if (f.isDirectory()) {
                id_pages_cur = null;
                page_name = null;
                //search for DESCRIPTION_FILE
                description_file = new File(f, DESCRIPTION_FILE);
                if (description_file.exists()) {
                    id_pages_cur = null;
                    try {
                        BufferedReader reader = new BufferedReader(
                                new InputStreamReader(new FileInputStream(description_file), "UTF-8"));
                        String line;
                        while ((line = reader.readLine()) != null) {
                            if (line.startsWith("id=")) {
                                id_pages_cur = Long.parseLong(line.substring(3), 10);
                            } else if (line.startsWith("name=")) {
                                page_name = line.substring(5);
                            } else if (line.startsWith("pre_uploaded=true")) {
                                //means that this folder contains subfolders with pre uploaded images
                                //i.e. wallpapers are allready resized and are stored in an appropriate folders
                                //but they still must be checked
                                pre_uploaded = true;
                            }
                        }
                    } catch (IOException ex) {
                        logger.error("", ex);
                    }
                }

                File[] files_temp = f.listFiles();

                for (File tmp : files_temp) {
                    if (tmp.isFile()) {
                        if (!tmp.getName().equals(DESCRIPTION_FILE) && id_pages_cur != null) {
                            usb.setCur_name(tmp.getAbsolutePath());
                            logger.debug("preparing upload file: '" + tmp.getAbsolutePath() + "'");

                            if (Utils.saveScaledImageFileToDisk(tmp, wallpaper_service.getDimmensions(),
                                    cur_dir)) {
                                tmp.delete();
                                usb.increaseDone(1);
                                upload_made = true;
                            }
                        } //else error
                    } else if (!pre_uploaded) {
                        files.addLast(tmp);
                    }
                }
                //create a description file
                if (upload_made) {
                    createDescriptionFile(cur_dir, id_pages_cur, page_name, true);
                    cur_dir = null;
                }
            }
        }
        if (cur_dir != null) {
            description_file = new File(cur_dir, DESCRIPTION_FILE);
            if (!description_file.exists())
                FileUtils.deleteFiles(cur_dir, true);
        }
    }
}

From source file:com.jaspersoft.jasperserver.export.modules.repository.ResourceImporter.java

protected void createPrependFolder() {
    if (prependPath != null) {
        LinkedList toCreateURIs = new LinkedList();
        for (String path = prependPath; repository.getFolder(executionContext,
                path) == null; path = PathUtils.splitPath(path).parentPath) {
            toCreateURIs.addFirst(path);
        }/*from ww  w  .j a  va2  s  .c  o  m*/

        while (!toCreateURIs.isEmpty()) {
            String path = (String) toCreateURIs.removeFirst();
            Folder folder = createFolder(path);

            commandOut.debug("About to save folder " + path);
            try {
                repository.saveFolder(executionContext, folder);
            } catch (SpringSecurityException er) {
                this.updateSecuredResource(executionContext, folder);
            }
        }
    }
}

From source file:io.hops.transaction.lock.INodeLock.java

private List<INode> findChildrenRecursively(INode lastINode)
        throws StorageException, TransactionContextException {
    LinkedList<INode> children = new LinkedList<>();
    LinkedList<INode> unCheckedDirs = new LinkedList<>();
    if (lastINode != null) {
        if (lastINode instanceof INodeDirectory) {
            unCheckedDirs.add(lastINode);
        }/* w  w  w.j a v  a2  s . c  o  m*/
    }

    // Find all the children in the sub-directories.
    while (!unCheckedDirs.isEmpty()) {
        INode next = unCheckedDirs.poll();
        if (next instanceof INodeDirectory) {
            setINodeLockType(TransactionLockTypes.INodeLockType.READ_COMMITTED); //locking the parent is sufficient
            List<INode> clist = ((INodeDirectory) next).getChildrenList();
            unCheckedDirs.addAll(clist);
            children.addAll(clist);
        }
    }
    LOG.debug("Added " + children.size() + " children.");
    return children;
}

From source file:gsn.http.DataDownload.java

/**
 * List of the parameters for the requests:
 * url : /data/*from w  ww . j  a v  a 2  s .c  o  m*/
 * Example: Getting all the data in CSV format => http://localhost:22001/data?vsName=memoryusage4&fields=heap&display=CSV
 * another example: http://localhost:22001/data?vsName=memoryusage4&fields=heap&fields=timed&display=CSV&delimiter=other&otherdelimiter=,
 * <p/>
 * param-name: vsName : the name of the virtual sensor we need.
 * param-name: fields [there can be multiple parameters with this name pointing to different fields in the stream element].
 * param-name: commonReq (always true !)
 * param-name: display , if there is a value it should be CSV.
 * param-name: delimiter, useful for CSV output (can be "tab","space","other")
 * param-name: otherdelimiter useful in the case of having delimiter=other
 * param-name: groupby can point to one of the fields in the stream element. In case groupby=timed then the parameter groupbytimed points to the period for which data should be aggregated [in milliseconds].
 * param-name: nb give the maximum number of elements to be outputed (most recent values first).
 * param-name:
 */
public void doPost(HttpServletRequest req, HttpServletResponse res)
        throws ServletException, java.io.IOException {

    //
    HttpSession session = req.getSession();
    User user = (User) session.getAttribute("user");

    res.setHeader("Cache-Control", "no-store");
    res.setDateHeader("Expires", 0);
    res.setHeader("Pragma", "no-cache");
    //

    PrintWriter respond = res.getWriter();
    DataEnumerator result = null;
    try {
        SimpleDateFormat sdf = new SimpleDateFormat(Main.getInstance().getContainerConfig().getTimeFormat());
        SimpleDateFormat sdf_from_ui = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
        TimeZone timeZone = GregorianCalendar.getInstance().getTimeZone();
        boolean responseCVS = false;
        boolean wantTimeStamp = false;
        boolean wantPk = false;
        boolean commonReq = true;
        boolean groupByTimed = false;

        String vsName = HttpRequestUtils.getStringParameter("vsName", null, req);
        if (vsName == null)
            vsName = HttpRequestUtils.getStringParameter("vsname", null, req);
        if (vsName == null) {
            res.sendError(WebConstants.MISSING_VSNAME_ERROR, "The virtual sensor name is missing");
            return;
        }

        DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
        Date currentDate = Calendar.getInstance().getTime();
        String filename = vsName + "_" + dateFormat.format(currentDate);

        if (Main.getContainerConfig().isAcEnabled() == true) {
            if (user != null) // meaning, that a login session is active, otherwise we couldn't get there
                if (user.hasReadAccessRight(vsName) == false && user.isAdmin() == false) // ACCESS_DENIED
                {
                    res.sendError(WebConstants.ACCESS_DENIED,
                            "Access denied to the specified virtual sensor .");
                    return;
                }
        }

        if (req.getParameter("display") != null && req.getParameter("display").equals("CSV")) {
            responseCVS = true;
            res.setContentType("text/csv");
            //res.setContentType("text/html");
        } else {
            res.setContentType("text/xml");
        }
        if (req.getParameter("commonReq") != null && req.getParameter("commonReq").equals("false")) {
            commonReq = false;
        }
        String separator = ";";
        if (req.getParameter("delimiter") != null && !req.getParameter("delimiter").equals("")) {
            String reqSeparator = req.getParameter("delimiter");
            if (reqSeparator.equals("tab")) {
                separator = "\t";
            } else if (reqSeparator.equals("space")) {
                separator = " ";
            } else if (reqSeparator.equals("other") && req.getParameter("otherdelimiter") != null
                    && !req.getParameter("otherdelimiter").equals("")) {
                separator = req.getParameter("otherdelimiter");
            }
        }
        String generated_request_query = "";
        String expression = "";
        String line = "";
        String groupby = "";
        String[] fields = req.getParameterValues("fields");
        if (commonReq) {
            if (req.getParameter("fields") != null) {
                for (int i = 0; i < fields.length; i++) {
                    if (fields[i].equals("timed")) {
                        wantTimeStamp = true;
                    }
                    if ("pk".equalsIgnoreCase(fields[i]))
                        wantPk = true;
                    generated_request_query += ", " + fields[i];
                }
                if (!wantPk)
                    generated_request_query += ", pk";
            }
        } else {
            if (req.getParameter("fields") == null) {
                respond.println("Request ERROR");
                return;
            } else {
                for (int i = 0; i < fields.length; i++) {
                    if (fields[i].equals("timed")) {
                        wantTimeStamp = true;
                    }
                    if ("pk".equalsIgnoreCase(fields[i]))
                        wantPk = true;
                    generated_request_query += ", " + fields[i];
                }
                if (!wantPk)
                    generated_request_query += ", pk";
            }
            if (req.getParameter("groupby") != null) {
                if (req.getParameter("groupby").equals("timed")) {
                    groupByTimed = true;
                    int periodmeasure = 1;
                    if (req.getParameter("groupbytimed") != null) {
                        periodmeasure = new Integer(req.getParameter("groupbytimed"));
                        periodmeasure = java.lang.Math.max(periodmeasure, 1);
                    }
                    generated_request_query += ", Min(timed), FLOOR(timed/" + periodmeasure + ") period ";
                    groupby = "GROUP BY period";
                } else {
                    groupby = "GROUP BY " + req.getParameter("groupby");
                }
            }
        }

        String where = "";
        if (req.getParameter("critfield") != null) {
            try {
                String[] critJoin = req.getParameterValues("critJoin");
                String[] neg = req.getParameterValues("neg");
                String[] critfields = req.getParameterValues("critfield");
                String[] critop = req.getParameterValues("critop");
                String[] critval = req.getParameterValues("critval");
                for (int i = 0; i < critfields.length; i++) {
                    if (critop[i].equals("LIKE")) {
                        if (i > 0) {
                            where += " " + critJoin[i - 1] + " " + neg[i] + " " + critfields[i] + " LIKE '%"; // + critval[i] + "%'";
                        } else {
                            where += neg[i] + " " + critfields[i] + " LIKE '%"; // + critval[i] + "%'";
                        }
                        if (critfields[i].equals("timed")) {
                            try {
                                //Date d = sdf.parse(critval[i]);
                                Date d = sdf_from_ui.parse(critval[i]);
                                where += d.getTime();
                            } catch (Exception e) {
                                where += "0";
                            }
                        } else {
                            where += critval[i];
                        }
                        where += "%'";
                    } else {
                        if (i > 0) {
                            where += " " + critJoin[i - 1] + " " + neg[i] + " " + critfields[i] + " "
                                    + critop[i] + " "; //critval[i];
                        } else {
                            where += neg[i] + " " + critfields[i] + " " + critop[i] + " "; //critval[i];
                        }
                        if (critfields[i].equals("timed")) {
                            try {
                                //Date d = sdf.parse(critval[i]);
                                Date d = sdf_from_ui.parse(critval[i]);
                                where += d.getTime();
                            } catch (Exception e) {
                                where += "0";
                            }
                        } else {
                            where += critval[i];
                        }
                    }
                }
                where = " WHERE " + where;
            } catch (NullPointerException npe) {
                where = " ";
            }
        }

        if (!generated_request_query.equals("")) {
            generated_request_query = generated_request_query.substring(2);
            if (!commonReq) {
                expression = generated_request_query;
            }
            generated_request_query = "select " + generated_request_query + " from " + vsName + where
                    + "  order by timed DESC  ";
            if (commonReq)
                if (req.getParameter("nb") != null && req.getParameter("nb") != "") {
                    int nb = new Integer(req.getParameter("nb"));
                    if (nb < 0)
                        nb = 0;
                    String limit = "";
                    if (Main.getStorage(vsName).isH2() || Main.getStorage(vsName).isMysqlDB()) {
                        if (nb >= 0)
                            limit = "LIMIT " + nb + "  offset 0";
                        generated_request_query += limit;
                    } else if (Main.getStorage(vsName).isOracle()) {
                        generated_request_query = "select * from (" + generated_request_query
                                + " ) where rownum <" + (nb + 1);
                    }
                }

            generated_request_query += " " + groupby;
            generated_request_query += ";";

            if (req.getParameter("sql") != null) {
                res.setContentType("text/html");
                respond.println("# " + generated_request_query);
                return;
            }

            try {
                result = Main.getStorage(vsName).streamedExecuteQuery(generated_request_query, true);
            } catch (SQLException e) {
                logger.error("ERROR IN EXECUTING, query: " + generated_request_query + " from "
                        + req.getRemoteAddr() + "- " + req.getRemoteHost() + ": " + e.getMessage());
                return;
            }
            if (!result.hasMoreElements()) {
                res.setContentType("text/html");
                respond.println("No data corresponds to your request");
                return;
            }

            //get units in hash map
            Iterator<VSensorConfig> vsIterator = Mappings.getAllVSensorConfigs();
            HashMap<String, String> fieldToUnitMap = new HashMap<String, String>();
            VSensorConfig sensorConfig = null;
            while (vsIterator.hasNext()) {
                VSensorConfig senConfig = vsIterator.next();
                if (vsName.equalsIgnoreCase(senConfig.getName())) {
                    sensorConfig = senConfig;
                    DataField[] dataFieldArray = senConfig.getOutputStructure();
                    for (DataField df : dataFieldArray) {
                        String unit = df.getUnit();
                        if (unit == null || unit.trim().length() == 0)
                            unit = "";

                        fieldToUnitMap.put(df.getName().toLowerCase(), unit);
                    }
                    break;
                }
            }

            line = "";
            int nbFields = 0;
            if (responseCVS) {
                boolean firstLine = true;
                res.setHeader("content-disposition", "attachment; filename=" + filename + ".csv");
                respond.println("# " + generated_request_query);
                for (KeyValue df : sensorConfig.getAddressing()) {
                    respond.println(
                            "# " + df.getKey().toString().toLowerCase() + ":" + df.getValue().toString());
                }
                respond.println("# description:" + sensorConfig.getDescription());
                LinkedList<StreamElement> streamElements = new LinkedList<StreamElement>();
                while (result.hasMoreElements()) {
                    streamElements.add(result.nextElement());
                }
                while (!streamElements.isEmpty()) {
                    StreamElement se = streamElements.removeLast();
                    if (firstLine) {
                        nbFields = se.getFieldNames().length;
                        if (groupByTimed) {
                            nbFields--;
                        }
                        if (wantTimeStamp) {
                            line += separator + "time";
                        }
                        for (int i = 0; i < nbFields; i++)
                            //line += delimiter + se.getFieldNames()[i].toString();
                            if ((!groupByTimed) || (i != fields.length)) {
                                line += separator + fields[i];
                            } else {
                                line += separator + "time";
                            }

                        firstLine = false;
                        respond.println(line.substring(separator.length()));

                        line = "";

                        //units (second line)
                        if (wantTimeStamp) {
                            line += separator + "";
                        }
                        for (int i = 0; i < nbFields; i++) {
                            if ((!groupByTimed) || (i != fields.length)) {
                                line += separator + fieldToUnitMap.get(fields[i].toLowerCase());
                            } else {
                                line += separator + "";
                            }
                        }
                        respond.println(line.substring(separator.length()));
                    }

                    line = "";
                    if (wantTimeStamp) {
                        Date d = new Date(se.getTimeStamp());
                        line += separator + sdf.format(d);
                    }
                    for (int i = 0; i < nbFields; i++)
                        //line += delimiter+se.getData( )[ i ].toString( );

                        if (!commonReq && ((i >= fields.length) || (fields[i].contains("timed")))) {
                            line += separator + sdf.format(se.getData()[i]);
                        } else {
                            line += separator + se.getData()[i].toString();
                        }
                    respond.println(line.substring(separator.length()));
                }
            } else {
                boolean firstLine = true;
                res.setHeader("content-disposition", "attachment; filename=" + filename + ".xml");
                for (KeyValue df : sensorConfig.getAddressing()) {
                    respond.println(
                            "\t<!-- " + StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()) + ":"
                                    + StringEscapeUtils.escapeXml(df.getValue().toString()) + " -->");
                }
                respond.println("\t<!-- description:"
                        + StringEscapeUtils.escapeXml(sensorConfig.getDescription()) + " -->");
                respond.println("<data>");
                LinkedList<StreamElement> streamElements = new LinkedList<StreamElement>();
                while (result.hasMoreElements()) {
                    streamElements.add(result.nextElement());
                }
                while (!streamElements.isEmpty()) {
                    StreamElement se = streamElements.removeLast();
                    if (firstLine) {
                        respond.println("\t<line>");
                        nbFields = se.getFieldNames().length;
                        if (groupByTimed) {
                            nbFields--;
                        }
                        if (wantTimeStamp) {
                            respond.println("\t\t<field unit=\"\">time</field>");
                        }
                        for (int i = 0; i < nbFields; i++) {
                            if ((!groupByTimed) || (i != fields.length)) {
                                respond.print(
                                        "\t\t<field unit=\"" + fieldToUnitMap.get(fields[i].toLowerCase()));
                                respond.println("\">" + fields[i] + "</field>");
                            } else {
                                respond.println("\t\t<field unit=\"\">time</field>");
                            }
                        }
                        //} else {
                        //    out.println("\t\t<field>"+expression+"</field>");
                        //}
                        respond.println("\t</line>");
                        firstLine = false;
                    }
                    line = "";
                    respond.println("\t<line>");
                    if (wantTimeStamp) {
                        Date d = new Date(se.getTimeStamp());
                        respond.println("\t\t<field>" + sdf.format(d) + "</field>");
                    }
                    for (int i = 0; i < nbFields; i++) {

                        //if ( !commonReq && expression.contains("timed")) {
                        if (!commonReq && ((i >= fields.length) || (fields[i].contains("timed")))) {
                            respond.println("\t\t<field>" + sdf.format(se.getData()[i]) + "</field>");
                        } else {
                            if (se.getData()[i] == null)
                                respond.println("\t\t<field>Null</field>");
                            else
                                respond.println("\t\t<field>" + se.getData()[i].toString() + "</field>");
                        }
                    }
                    respond.println("\t</line>");
                }
                respond.println("</data>");
            }
        }
        //*/
        else {
            res.setContentType("text/html");
            respond.println("Please select some fields");
        }
    } finally {
        if (result != null)
            result.close();
        respond.flush();
    }
}

From source file:net.sourceforge.sqlexplorer.service.SqlexplorerService.java

/**
 * /*w  w  w . ja  v  a  2  s  .  c o  m*/
 * Load the driver by lib management system , which will configure the SQL Explorer driver classpath from xml.
 * 
 * @param dbType
 * @param dbVersion
 * @param driverClassName
 * @param userName. the userName is used a special case :it MSSQL with empty userName.
 */
private void loadManagedDriver(DatabaseConnection dbConn) {
    String dbType = dbConn.getDatabaseType();
    String dbVersion = dbConn.getDbVersionString();
    String driverClassName = JavaSqlFactory.getDriverClass(dbConn);
    if (dbType == null || driverClassName == null) {
        return;
    }
    DriverManager driverManager = SQLExplorerPlugin.getDefault().getDriverModel();
    AliasAndManaDriverHelper aliasManaHelper = AliasAndManaDriverHelper.getInstance();
    String manaDriverId = aliasManaHelper.joinManagedDriverId(dbType, driverClassName, dbVersion);
    ManagedDriver manDr = driverManager.getDriver(manaDriverId);
    if (manDr != null && !manDr.isDriverClassLoaded()) {
        // find driver jars from 'temp\dbWizard', prefrence page or installation path 'lib\java',
        // "librariesIndex.xml".
        try {
            List<String> jarNames = EDatabaseVersion4Drivers.getDrivers(dbType, dbVersion);
            LinkedList<String> driverJarRealPaths = aliasManaHelper.getDriverJarRealPaths(jarNames);
            if (!driverJarRealPaths.isEmpty()) {
                manDr.getJars().clear();
                manDr.getJars().addAll(driverJarRealPaths);
            }

            manDr.registerSQLDriver(dbConn);
        } catch (Exception e) {
            log.error(e);
        }
    }
}

From source file:com.act.lcms.v2.TraceIndexExtractor.java

/**
 * Initiate a data feast of all traces within some window allocation.  OM NOM NOM.
 * @param iter An iterator over an LCMS data file.
 * @return The windows, time points, and per-window traces.
 *//*  w ww .  ja v a 2 s . co m*/
private IndexedTraces runSweepLine(List<Double> targetMZs, Iterator<LCMSSpectrum> iter)
        throws RocksDBException, IOException {
    // Create windows for sweep-linin'.
    List<MZWindow> windows = new ArrayList<MZWindow>() {
        {
            int i = 0;
            for (Double targetMZ : targetMZs) {
                add(new MZWindow(i, targetMZ));
                i++;
            }
        }
    };

    /* We *must* ensure the windows are sorted in m/z order for the sweep line to work.  However, we don't know anything
     * about the input targetMZs list, which may be immutable or may be in some order the client wants to preserve.
     * Rather than mess with that array, we'll sort the windows in our internal array and leave be he client's targets.
     */
    Collections.sort(windows, (a, b) -> a.getTargetMZ().compareTo(b.getTargetMZ()));

    List<Double> times = new ArrayList<>();

    List<List<Double>> allTraces = new ArrayList<List<Double>>(windows.size()) {
        {
            for (int i = 0; i < windows.size(); i++) {
                add(new ArrayList<>());
            }
        }
    };

    // Keep an array of accumulators around to reduce the overhead of accessing the trace matrix for accumulation.
    double[] sumIntensitiesInEachWindow = new double[windows.size()];

    int timepointCounter = 0;
    while (iter.hasNext()) {
        LCMSSpectrum spectrum = iter.next();
        Double time = spectrum.getTimeVal();

        // Store one list of the time values so we can knit times and intensity sums later to form XZs.
        times.add(time);

        for (int i = 0; i < sumIntensitiesInEachWindow.length; i++) {
            sumIntensitiesInEachWindow[i] = 0.0;
        }

        timepointCounter++;

        if (timepointCounter % 100 == 0) {
            LOGGER.info("Extracted %d timepoints (now at %.3fs)", timepointCounter, time);
        }

        /* We use a sweep-line approach to scanning through the m/z windows so that we can aggregate all intensities in
         * one pass over the current LCMSSpectrum (this saves us one inner loop in our extraction process).  The m/z
         * values in the LCMSSpectrum become our "critical" or "interesting points" over which we sweep our m/z ranges.
         * The next window in m/z order is guaranteed to be the next one we want to consider since we address the points
         * in m/z order as well.  As soon as we've passed out of the range of one of our windows, we discard it.  It is
         * valid for a window to be added to and discarded from the working queue in one application of the work loop. */
        LinkedList<MZWindow> workingQueue = new LinkedList<>();
        // TODO: can we reuse these instead of creating fresh?
        LinkedList<MZWindow> tbdQueue = new LinkedList<>(windows);

        // Assumption: these arrive in m/z order.
        for (Pair<Double, Double> mzIntensity : spectrum.getIntensities()) {
            Double mz = mzIntensity.getLeft();
            Double intensity = mzIntensity.getRight();

            // First, shift any applicable ranges onto the working queue based on their minimum mz.
            while (!tbdQueue.isEmpty() && tbdQueue.peekFirst().getMin() <= mz) {
                workingQueue.add(tbdQueue.pop());
            }

            // Next, remove any ranges we've passed.
            while (!workingQueue.isEmpty() && workingQueue.peekFirst().getMax() < mz) {
                workingQueue.pop();
            }

            if (workingQueue.isEmpty()) {
                if (tbdQueue.isEmpty()) {
                    // If both queues are empty, there are no more windows to consider at all.  One to the next timepoint!
                    break;
                }

                // If there's nothing that happens to fit in this range, skip it!
                continue;
            }

            // The working queue should now hold only ranges that include this m/z value.  Sweep line swept!

            /* Now add this intensity to accumulator value for each of the items in the working queue.
             * By the end of the outer loop, trace(t) = Sum(intensity) | win_min <= m/z <= win_max @ time point # t */
            for (MZWindow window : workingQueue) {
                // TODO: count the number of times we add intensities to each window's accumulator for MS1-style warnings.
                sumIntensitiesInEachWindow[window.getIndex()] += intensity;
            }
        }

        /* Extend allTraces to add a row of accumulated intensity values for this time point.  We build this incrementally
         * because the LCMSSpectrum iterator doesn't tell us how many time points to expect up front. */
        for (int i = 0; i < sumIntensitiesInEachWindow.length; i++) {
            allTraces.get(i).add(sumIntensitiesInEachWindow[i]);
        }
    }

    // Trace data has been devoured.  Might want to loosen the belt at this point...
    LOGGER.info("Done extracting %d traces", allTraces.size());

    return new IndexedTraces(windows, times, allTraces);
}

From source file:org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor.java

/**
 * Add new entries to the PendingUncached list.
 *
 * @param neededUncached   The number of replicas that need to be uncached.
 * @param cachedBlock      The block which needs to be uncached.
 * @param cached           A list of DataNodes currently caching the block.
 * @param pendingUncached  A list of DataNodes that will soon uncache the
 *                         block./*from  w  w  w .  j  a v a  2 s  .com*/
 */
private void addNewPendingUncached(int neededUncached, CachedBlock cachedBlock, List<DatanodeDescriptor> cached,
        List<DatanodeDescriptor> pendingUncached) {
    // Figure out which replicas can be uncached.
    LinkedList<DatanodeDescriptor> possibilities = new LinkedList<DatanodeDescriptor>();
    for (DatanodeDescriptor datanode : cached) {
        if (!pendingUncached.contains(datanode)) {
            possibilities.add(datanode);
        }
    }
    while (neededUncached > 0) {
        if (possibilities.isEmpty()) {
            LOG.warn("Logic error: we're trying to uncache more replicas than " + "actually exist for "
                    + cachedBlock);
            return;
        }
        DatanodeDescriptor datanode = possibilities.remove(random.nextInt(possibilities.size()));
        pendingUncached.add(datanode);
        boolean added = datanode.getPendingUncached().add(cachedBlock);
        assert added;
        neededUncached--;
    }
}

From source file:org.pircbotx.ReplayServer.java

public static void replay(Configuration.Builder config, InputStream input, String title) throws Exception {
    log.info("---Replaying {}---", title);
    StopWatch timer = new StopWatch();
    timer.start();// w  w  w.  ja  v  a2 s.com

    //Wrap listener manager with ours that siphons off events
    final Queue<Event> eventQueue = Lists.newLinkedList();
    WrapperListenerManager newManager = new WrapperListenerManager(config.getListenerManager(), eventQueue);
    config.setListenerManager(newManager);
    config.addListener(new ReplayListener());

    final LinkedList<String> outputQueue = Lists.newLinkedList();
    ReplayPircBotX bot = new ReplayPircBotX(config.buildConfiguration(), outputQueue);

    BufferedReader fileInput = new BufferedReader(new InputStreamReader(input));
    boolean skippedHeader = false;
    while (true) {
        String lineRaw = fileInput.readLine();
        if (bot.isClosed() && StringUtils.isNotBlank(lineRaw)) {
            throw new RuntimeException("bot is closed but file still has line " + lineRaw);
        } else if (!bot.isClosed() && StringUtils.isBlank(lineRaw)) {
            throw new RuntimeException("bot is not closed but file doesn't have any more lines");
        } else if (bot.isClosed() && StringUtils.isBlank(lineRaw)) {
            log.debug("(done) Bot is closed and file doesn't have any more lines");
            break;
        }

        log.debug("(line) " + lineRaw);
        String[] lineParts = StringUtils.split(lineRaw, " ", 2);
        String command = lineParts[0];
        String line = lineParts[1];

        //For now skip the info lines PircBotX is supposed to send on connect
        //They are only sent when connect() is called which requires multithreading
        if (!skippedHeader) {
            if (command.equals("pircbotx.output"))
                continue;
            else if (command.equals("pircbotx.input")) {
                log.debug("Finished skipping header");
                skippedHeader = true;
            } else
                throw new RuntimeException("Unknown line " + lineRaw);
        }

        if (command.equals("pircbotx.input")) {
            bot.getInputParser().handleLine(line);
        } else if (command.equals("pircbotx.output")) {
            String lastOutput = outputQueue.isEmpty() ? null : outputQueue.pop();
            if (StringUtils.startsWith(line, "JOIN")) {
                log.debug("Skipping JOIN output, server should send its own JOIN");
            } else if (StringUtils.startsWith(line, "QUIT")) {
                log.debug("Skipping QUIT output, server should send its own QUIT");
            } else if (!line.equals(lastOutput)) {
                log.error("Expected last output: " + line);
                log.error("Given last output: " + lastOutput);
                for (String curOutput : outputQueue) {
                    log.error("Queued output: " + curOutput);
                }
                throw new RuntimeException("Failed to verify output (see log)");
            }
        } else {
            throw new RuntimeException("Unknown line " + lineRaw);
        }

        for (Event curEvent : Iterables.consumingIterable(eventQueue))
            log.debug("(events) " + curEvent);

        log.debug("");
    }

    timer.stop();
    log.debug("---Replay successful in {}---",
            DurationFormatUtils.formatDuration(timer.getTime(), "mm'min'ss'sec'SSS'ms'"));
}