Example usage for java.sql ResultSet getBinaryStream

List of usage examples for java.sql ResultSet getBinaryStream

Introduction

In this page you can find the example usage for java.sql ResultSet getBinaryStream.

Prototype

java.io.InputStream getBinaryStream(String columnLabel) throws SQLException;

Source Link

Document

Retrieves the value of the designated column in the current row of this ResultSet object as a stream of uninterpreted bytes.

Usage

From source file:org.rhq.enterprise.server.core.plugin.ServerPluginScanner.java

/**
 * This method scans the database for any new or updated server plugins and make sure this server
 * has a plugin file on the filesystem for each of those new/updated server plugins.
 *
 * This also checks to see if the enabled flag changed for plugins that we already know about.
 * If it does, and its plugin container has the plugin already loaded, the plugin will be reloaded.
 *
 * @return a list of files that appear to be new or updated and should be deployed
 *//*from   w w  w .ja va 2s . com*/
private List<File> serverPluginScanDatabase() throws Exception {
    // these are plugins (name/path/md5/mtime) that have changed in the DB but are missing from the file system
    List<ServerPlugin> updatedPlugins = new ArrayList<ServerPlugin>();

    // the same list as above, only they are the files that are written to the filesystem and no longer missing
    List<File> updatedFiles = new ArrayList<File>();

    // process all the installed plugins
    ServerPluginsLocal serverPluginsManager = LookupUtil.getServerPlugins();
    List<ServerPlugin> installedPlugins = serverPluginsManager.getServerPlugins();
    for (ServerPlugin installedPlugin : installedPlugins) {
        String name = installedPlugin.getName();
        String path = installedPlugin.getPath();
        String md5 = installedPlugin.getMd5();
        long mtime = installedPlugin.getMtime();
        String version = installedPlugin.getVersion();
        ServerPluginType pluginType = new ServerPluginType(installedPlugin.getType());

        // let's see if we have this logical plugin on the filesystem (it may or may not be under the same filename)
        File expectedFile = new File(this.getServerPluginDir(), path);
        File currentFile = null; // will be non-null if we find that we have this plugin on the filesystem already
        PluginWithDescriptor pluginWithDescriptor = this.serverPluginsOnFilesystem.get(expectedFile);

        if (pluginWithDescriptor != null) {
            currentFile = expectedFile; // we have it where we are expected to have it
            if (!pluginWithDescriptor.plugin.getName().equals(name)
                    || !pluginType.equals(pluginWithDescriptor.pluginType)) {
                // Happens if someone wrote a plugin of one type but later changed it to a different type (or changed names)
                log.warn("For some reason, the server plugin file [" + expectedFile + "] is plugin ["
                        + pluginWithDescriptor.plugin.getName() + "] of type ["
                        + pluginWithDescriptor.pluginType + "] but the database says it should be [" + name
                        + "] of type [" + pluginType + "]");
            } else {
                log.debug("File system and db agree on server plugin location for [" + expectedFile + "]");
            }
        } else {
            // the plugin might still be on the file system but under a different filename, see if we can find it
            for (Map.Entry<File, PluginWithDescriptor> cacheEntry : this.serverPluginsOnFilesystem.entrySet()) {
                if (cacheEntry.getValue().plugin.getName().equals(name)
                        && cacheEntry.getValue().pluginType.equals(pluginType)) {
                    currentFile = cacheEntry.getKey();
                    pluginWithDescriptor = cacheEntry.getValue();
                    log.info("Filesystem has a server plugin [" + name + "] at the file [" + currentFile
                            + "] which is different than where the DB thinks it should be [" + expectedFile
                            + "]");
                    break; // we found it, no need to continue the loop
                }
            }
        }

        if (pluginWithDescriptor != null && currentFile != null && currentFile.exists()) {
            ServerPlugin dbPlugin = new ServerPlugin(name, path);
            dbPlugin.setType(pluginType.stringify());
            dbPlugin.setMd5(md5);
            dbPlugin.setVersion(version);
            dbPlugin.setMtime(mtime);

            ServerPlugin obsoletePlugin = ServerPluginDescriptorUtil.determineObsoletePlugin(dbPlugin,
                    pluginWithDescriptor.plugin);

            if (obsoletePlugin == pluginWithDescriptor.plugin) { // yes use == for reference equality!
                StringBuilder logMsg = new StringBuilder();
                logMsg.append("Found server plugin [").append(name);
                logMsg.append("] in the DB that is newer than the one on the filesystem: ");
                logMsg.append("DB path=[").append(path);
                logMsg.append("]; file path=[").append(currentFile.getName());
                logMsg.append("]; DB MD5=[").append(md5);
                logMsg.append("]; file MD5=[").append(pluginWithDescriptor.plugin.getMd5());
                logMsg.append("]; DB version=[").append(version);
                logMsg.append("]; file version=[").append(pluginWithDescriptor.plugin.getVersion());
                logMsg.append("]; DB timestamp=[").append(new Date(mtime));
                logMsg.append("]; file timestamp=[").append(new Date(pluginWithDescriptor.plugin.getMtime()));
                logMsg.append("]");
                log.info(logMsg.toString());

                updatedPlugins.add(dbPlugin);

                if (currentFile.delete()) {
                    log.info("Deleted the obsolete server plugin file to be updated: " + currentFile);
                    this.serverPluginsOnFilesystem.remove(currentFile);
                } else {
                    log.warn("Failed to delete the obsolete (to-be-updated) server plugin: " + currentFile);
                }
            } else if (obsoletePlugin == null) {
                // the db is up-to-date, but update the cache so we don't check MD5 or parse the descriptor again
                boolean succeeded = currentFile.setLastModified(mtime);
                if (!succeeded && log.isDebugEnabled()) {
                    log.debug(
                            "Failed to set mtime to [" + new Date(mtime) + "] on file [" + currentFile + "].");
                }
                pluginWithDescriptor.plugin.setMtime(mtime);
                pluginWithDescriptor.plugin.setVersion(version);
                pluginWithDescriptor.plugin.setMd5(md5);
            } else {
                log.info("It appears that the server plugin [" + dbPlugin
                        + "] in the database may be obsolete. If so, it will be updated later.");
            }
        } else {
            log.info("Found server plugin in the DB that we do not yet have: " + name);
            ServerPlugin plugin = new ServerPlugin(name, path, md5);
            plugin.setType(pluginType.stringify());
            plugin.setMtime(mtime);
            plugin.setVersion(version);
            updatedPlugins.add(plugin);
            this.serverPluginsOnFilesystem.remove(expectedFile); // paranoia, make sure the cache doesn't have this
        }
    }

    // write all our updated plugins to the file system
    if (!updatedPlugins.isEmpty()) {
        Connection conn = null;
        PreparedStatement ps = null;
        ResultSet rs = null;

        try {
            DataSource ds = LookupUtil.getDataSource();
            conn = ds.getConnection();

            ps = conn.prepareStatement("SELECT CONTENT FROM " + ServerPlugin.TABLE_NAME
                    + " WHERE DEPLOYMENT = 'SERVER' AND STATUS = 'INSTALLED' AND NAME = ? AND PTYPE = ?");
            for (ServerPlugin plugin : updatedPlugins) {
                File file = new File(this.getServerPluginDir(), plugin.getPath());

                ps.setString(1, plugin.getName());
                ps.setString(2, plugin.getType());
                rs = ps.executeQuery();
                rs.next();
                InputStream content = rs.getBinaryStream(1);
                StreamUtil.copy(content, new FileOutputStream(file));
                rs.close();
                boolean succeeded = file.setLastModified(plugin.getMtime());// so our file matches the database mtime
                if (!succeeded && log.isDebugEnabled()) {
                    log.debug("Failed to set mtime to [" + plugin.getMtime() + "] on file [" + file + "].");
                }
                updatedFiles.add(file);

                // we are writing a new file to the filesystem, cache it since we know about it now
                cacheFilesystemServerPluginJar(file, null);
            }
        } finally {
            JDBCUtil.safeClose(conn, ps, rs);
        }
    }

    return updatedFiles;
}

From source file:org.alinous.plugin.derby.DerbyDataSource.java

public void readLargeObject(Object connectionHandle, String fileName, String table, String blobColumn,
        WhereClause where, PostContext context, VariableRepository provider) throws ExecutionException {
    // SELECT blobColumn FROM table WHERE ...
    SelectSentence sentence = new SelectSentence();
    FromClause from = new FromClause();
    TablesList tableList = new TablesList();
    tableList.addTable(new TableIdentifier(table));
    from.setTableList(tableList);//from   ww w  . j av  a2 s.com
    sentence.setFrom(from);

    TypeHelper helper = this.typeHelper.newHelper(false, sentence);

    StringBuffer buff = new StringBuffer();

    buff.append("SELECT ");
    buff.append(blobColumn);
    buff.append(" FROM ");
    buff.append(table);

    if (where != null && where.isReady(context, provider, null)) {
        buff.append(" ");
        buff.append(where.extract(context, provider, null, null, helper));
    }

    OutputStream output = null;
    InputStream input = null;
    Connection con = (Connection) connectionHandle;
    boolean lastAutoCommit = false;
    try {
        lastAutoCommit = con.getAutoCommit();
        if (lastAutoCommit == true) {
            con.setAutoCommit(false);
        }

        Statement statement = con.createStatement();
        ResultSet result = statement.executeQuery(buff.toString());
        result.next();

        output = new AlinousFileOutputStream(new AlinousFile(fileName));
        input = result.getBinaryStream(1);

        byte[] byteBuff = new byte[256];

        int n = 1;
        while (n > 0) {
            n = input.read(byteBuff);

            if (n <= 0) {
                break;
            }
            output.write(byteBuff, 0, n);
        }

        statement.close();

    } catch (SQLException e) {
        throw new ExecutionException(e, "Failed in reading blob"); // i18n
    } catch (IOException e) {
        throw new ExecutionException(e, "Failed in reading blob"); // i18n
    } finally {
        try {
            if (input != null) {
                input.close();
            }
        } catch (IOException e) {
        }

        if (output != null) {
            try {
                output.close();
            } catch (IOException e) {
            }
        }

        try {
            if (lastAutoCommit == true) {
                con.setAutoCommit(lastAutoCommit);
            }
        } catch (SQLException e) {
        }

    }
}

From source file:com.nabla.wapp.server.basic.general.ExportService.java

private boolean exportFile(final String id, final UserSession userSession, final HttpServletResponse response)
        throws IOException, SQLException, InternalErrorException {
    final Connection conn = db.getConnection();
    try {/*from   w w  w .j  a  v  a2s  .c o  m*/
        final PreparedStatement stmt = StatementFormat.prepare(conn, "SELECT * FROM export WHERE id=?;", id);
        try {
            final ResultSet rs = stmt.executeQuery();
            try {
                if (!rs.next()) {
                    if (log.isDebugEnabled())
                        log.debug("failed to find file ID= " + id);
                    return false;
                }
                if (!userSession.getSessionId().equals(rs.getString("userSessionId"))) {
                    if (log.isTraceEnabled())
                        log.trace("invalid user session ID");
                    return false;
                }
                if (log.isTraceEnabled())
                    log.trace("exporting file " + id);
                response.reset();
                response.setBufferSize(DEFAULT_BUFFER_SIZE);
                response.setContentType(rs.getString("content_type"));
                if (rs.getBoolean("output_as_file")) {
                    // IMPORTANT:
                    // MUST be done before calling getOutputStream() otherwise no SaveAs dialogbox
                    response.setHeader("Content-Disposition",
                            MessageFormat.format("attachment; filename=\"{0}\"", rs.getString("name")));
                }
                IOUtils.copy(rs.getBinaryStream("content"), response.getOutputStream());
                /*   final BufferedInputStream input = new BufferedInputStream(rs.getBinaryStream("content"), DEFAULT_BUFFER_SIZE);
                   try {
                      final BufferedOutputStream output = new BufferedOutputStream(response.getOutputStream(), DEFAULT_BUFFER_SIZE);
                      try {
                         final byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
                         int length;
                         while ((length = input.read(buffer)) > 0)
                output.write(buffer, 0, length);
                      } finally {
                         output.close();
                      }
                   } finally {
                      input.close();
                   }*/
            } finally {
                rs.close();
                try {
                    Database.executeUpdate(conn, "DELETE FROM export WHERE id=?;", id);
                } catch (final SQLException e) {
                    if (log.isErrorEnabled())
                        log.error("failed to delete export record: " + e.getErrorCode() + "-" + e.getSQLState(),
                                e);
                }
            }
        } finally {
            stmt.close();
        }
    } finally {
        // remove any orphan export records i.e. older than 48h (beware of timezone!)
        final Calendar dt = Util.dateToCalendar(new Date());
        dt.add(GregorianCalendar.DATE, -2);
        try {
            Database.executeUpdate(conn, "DELETE FROM export WHERE created < ?;", Util.calendarToSqlDate(dt));
        } catch (final SQLException __) {
        }
        conn.close();
    }
    return true;
}

From source file:com.enonic.vertical.engine.handlers.ContentObjectHandler.java

private Document getContentObject(String sql, int[] paramValue) {
    Connection con = null;//  w w  w. j  a v a  2  s  . c om
    PreparedStatement preparedStmt = null;
    ResultSet resultSet = null;
    Document doc = XMLTool.createDocument("contentobjects");

    try {
        con = getConnection();
        preparedStmt = con.prepareStatement(sql);
        int length = (paramValue != null ? paramValue.length : 0);
        for (int i = 0; i < length; i++) {
            preparedStmt.setInt(i + 1, paramValue[i]);
        }

        resultSet = preparedStmt.executeQuery();
        Element root = doc.getDocumentElement();
        while (resultSet.next()) {

            // pre-fetch content object data
            Document contentdata = XMLTool.domparse(resultSet.getBinaryStream("cob_xmlData"));

            Element elem = XMLTool.createElement(doc, root, "contentobject");
            elem.setAttribute("key", resultSet.getString("cob_lKey"));
            elem.setAttribute("menukey", resultSet.getString("cob_men_lKey"));

            String style = resultSet.getString("cob_sStyle");
            Element subelem = XMLTool.createElement(doc, elem, "objectstylesheet", style);
            subelem.setAttribute("key", style);
            boolean styleExist = false;
            if (style != null && style.length() > 0) {
                styleExist = null != resourceDao.getResourceFile(new ResourceKey(style));
            }
            subelem.setAttribute("exists", styleExist ? "true" : "false");
            resultSet.getInt("cob_men_lKey");
            if (resultSet.wasNull()) {
                subelem.setAttribute("shared", "true");
            }

            String border = resultSet.getString("cob_sBorder");
            if (!resultSet.wasNull()) {
                subelem = XMLTool.createElement(doc, elem, "borderstylesheet", border);
                subelem.setAttribute("key", border);
                boolean borderExist = false;
                if (border != null && border.length() > 0) {
                    borderExist = null != resourceDao.getResourceFile(new ResourceKey(border));
                }
                subelem.setAttribute("exists", borderExist ? "true" : "false");
                resultSet.getInt("cob_men_lKey");
                if (resultSet.wasNull()) {
                    subelem.setAttribute("shared", "true");
                }
            }

            // element: name
            XMLTool.createElement(doc, elem, "name", resultSet.getString("cob_sName"));

            // element: contentobjectdata
            Node xmldata_root = doc.importNode(contentdata.getDocumentElement(), true);
            elem.appendChild(xmldata_root);

            Timestamp timestamp = resultSet.getTimestamp("cob_dteTimestamp");
            XMLTool.createElement(doc, elem, "timestamp", CalendarUtil.formatTimestamp(timestamp, true));

            elem.setAttribute("runAs", RunAsType.get(resultSet.getInt("cob_lRunAs")).toString());
        }

        resultSet.close();
        resultSet = null;
        preparedStmt.close();
        preparedStmt = null;
    } catch (SQLException sqle) {
        String message = "Failed to get content object(s): %t";
        VerticalEngineLogger.error(this.getClass(), 0, message, sqle);
    } finally {
        close(resultSet);
        close(preparedStmt);
        close(con);
    }

    return doc;
}

From source file:org.apache.ode.scheduler.simple.jdbc.SchedulerDAOConnectionImpl.java

@SuppressWarnings("unchecked")
public List<JobDAO> dequeueImmediate(String nodeId, long maxtime, int maxjobs) throws DatabaseException {
    ArrayList<JobDAO> ret = new ArrayList<JobDAO>(maxjobs);
    Connection con = null;/*from w  ww . j  a  v a2s . com*/
    PreparedStatement ps = null;
    try {
        con = getConnection();
        ps = con.prepareStatement(SCHEDULE_IMMEDIATE);
        ps.setString(1, nodeId);
        ps.setLong(2, maxtime);
        ps.setMaxRows(maxjobs);

        ResultSet rs = ps.executeQuery();
        while (rs.next()) {
            Scheduler.JobDetails details = new Scheduler.JobDetails();
            details.instanceId = asLong(rs.getObject("instanceId"));
            details.mexId = (String) rs.getObject("mexId");
            details.processId = (String) rs.getObject("processId");
            details.type = (String) rs.getObject("type");
            details.channel = (String) rs.getObject("channel");
            details.correlatorId = (String) rs.getObject("correlatorId");
            details.correlationKeySet = (String) rs.getObject("correlationKeySet");
            details.retryCount = asInteger(rs.getObject("retryCount"));
            details.inMem = rs.getBoolean("inMem");
            if (rs.getObject("detailsExt") != null) {
                try {
                    ObjectInputStream is = new ObjectInputStream(rs.getBinaryStream("detailsExt"));
                    details.detailsExt = (Map<String, Object>) is.readObject();
                    is.close();
                } catch (Exception e) {
                    throw new DatabaseException("Error deserializing job detailsExt", e);
                }
            }

            {
                //For compatibility reasons, we check whether there are entries inside
                //jobDetailsExt blob, which correspond to extracted entries. If so, we
                //use them.

                Map<String, Object> detailsExt = details.getDetailsExt();
                if (detailsExt.get("type") != null) {
                    details.type = (String) detailsExt.get("type");
                }
                if (detailsExt.get("iid") != null) {
                    details.instanceId = (Long) detailsExt.get("iid");
                }
                if (detailsExt.get("pid") != null) {
                    details.processId = (String) detailsExt.get("pid");
                }
                if (detailsExt.get("inmem") != null) {
                    details.inMem = (Boolean) detailsExt.get("inmem");
                }
                if (detailsExt.get("ckey") != null) {
                    details.correlationKeySet = (String) detailsExt.get("ckey");
                }
                if (detailsExt.get("channel") != null) {
                    details.channel = (String) detailsExt.get("channel");
                }
                if (detailsExt.get("mexid") != null) {
                    details.mexId = (String) detailsExt.get("mexid");
                }
                if (detailsExt.get("correlatorId") != null) {
                    details.correlatorId = (String) detailsExt.get("correlatorId");
                }
                if (detailsExt.get("retryCount") != null) {
                    details.retryCount = Integer.parseInt((String) detailsExt.get("retryCount"));
                }
            }

            JobDAO job = new JobDAOImpl(rs.getLong("ts"), rs.getString("jobid"), rs.getBoolean("transacted"),
                    details);
            ret.add(job);
        }
        rs.close();
        ps.close();

        // mark jobs as scheduled, UPDATE_SCHEDULED_SLOTS at a time
        int j = 0;
        int updateCount = 0;
        ps = con.prepareStatement(UPDATE_SCHEDULED);
        for (int updates = 1; updates <= (ret.size() / UPDATE_SCHEDULED_SLOTS) + 1; updates++) {
            for (int i = 1; i <= UPDATE_SCHEDULED_SLOTS; i++) {
                ps.setString(i, j < ret.size() ? ret.get(j).getJobId() : "");
                j++;
            }
            ps.execute();
            updateCount += ps.getUpdateCount();
        }
        if (updateCount != ret.size()) {
            __log.error("Updating scheduled jobs failed to update all jobs; expected=" + ret.size() + " actual="
                    + updateCount);
            return null;

        }
    } catch (SQLException se) {
        throw new DatabaseException(se);
    } finally {
        close(ps);
        close(con);
    }
    return ret;
}

From source file:org.apache.ode.scheduler.simple.JdbcDelegate.java

@SuppressWarnings("unchecked")
public List<Job> dequeueImmediate(String nodeId, long maxtime, int maxjobs) throws DatabaseException {
    ArrayList<Job> ret = new ArrayList<Job>(maxjobs);
    Connection con = null;// w  w  w.  ja v a 2s.  c o m
    PreparedStatement ps = null;
    try {
        con = getConnection();
        ps = con.prepareStatement(SCHEDULE_IMMEDIATE);
        ps.setString(1, nodeId);
        ps.setLong(2, maxtime);
        ps.setMaxRows(maxjobs);

        ResultSet rs = ps.executeQuery();
        while (rs.next()) {
            Scheduler.JobDetails details = new Scheduler.JobDetails();
            details.instanceId = asLong(rs.getObject("instanceId"));
            details.mexId = (String) rs.getObject("mexId");
            details.processId = (String) rs.getObject("processId");
            details.type = (String) rs.getObject("type");
            details.channel = (String) rs.getObject("channel");
            details.correlatorId = (String) rs.getObject("correlatorId");
            details.correlationKeySet = (String) rs.getObject("correlationKeySet");
            details.retryCount = asInteger(rs.getObject("retryCount"));
            details.inMem = asBoolean(rs.getInt("inMem"));
            if (rs.getObject("detailsExt") != null) {
                try {
                    ObjectInputStream is = new ObjectInputStream(rs.getBinaryStream("detailsExt"));
                    details.detailsExt = (Map<String, Object>) is.readObject();
                    is.close();
                } catch (Exception e) {
                    throw new DatabaseException("Error deserializing job detailsExt", e);
                }
            }

            {
                //For compatibility reasons, we check whether there are entries inside
                //jobDetailsExt blob, which correspond to extracted entries. If so, we
                //use them.

                Map<String, Object> detailsExt = details.getDetailsExt();
                if (detailsExt.get("type") != null) {
                    details.type = (String) detailsExt.get("type");
                }
                if (detailsExt.get("iid") != null) {
                    details.instanceId = (Long) detailsExt.get("iid");
                }
                if (detailsExt.get("pid") != null && detailsExt.get("pid") instanceof String) {
                    details.processId = (String) detailsExt.get("pid");
                }
                if (detailsExt.get("inmem") != null) {
                    details.inMem = (Boolean) detailsExt.get("inmem");
                }
                if (detailsExt.get("ckey") != null) {
                    details.correlationKeySet = (String) detailsExt.get("ckey");
                }
                if (detailsExt.get("channel") != null) {
                    details.channel = (String) detailsExt.get("channel");
                }
                if (detailsExt.get("mexid") != null) {
                    details.mexId = (String) detailsExt.get("mexid");
                }
                if (detailsExt.get("correlatorId") != null) {
                    details.correlatorId = (String) detailsExt.get("correlatorId");
                }
                if (detailsExt.get("retryCount") != null) {
                    details.retryCount = Integer.parseInt((String) detailsExt.get("retryCount"));
                }
            }

            Job job = new Job(rs.getLong("ts"), rs.getString("jobid"), asBoolean(rs.getInt("transacted")),
                    details);
            ret.add(job);
        }
        rs.close();
        ps.close();
    } catch (SQLException se) {
        throw new DatabaseException(se);
    } finally {
        close(ps);
        close(con);
    }
    return ret;
}

From source file:org.wso2.carbon.user.core.tenant.JDBCTenantManager.java

public Tenant getTenant(int tenantId) throws UserStoreException {

    @SuppressWarnings("unchecked")
    TenantCacheEntry<Tenant> entry = (TenantCacheEntry<Tenant>) tenantCacheManager
            .getValueFromCache(new TenantIdKey(tenantId));
    if ((entry != null) && (entry.getTenant() != null)) {
        return entry.getTenant();
    }/*from  www . j av  a  2  s. c  o m*/
    Connection dbConnection = null;
    PreparedStatement prepStmt = null;
    ResultSet result = null;
    Tenant tenant = null;
    int id;
    try {
        dbConnection = getDBConnection();
        String sqlStmt = TenantConstants.GET_TENANT_SQL;
        prepStmt = dbConnection.prepareStatement(sqlStmt);
        prepStmt.setInt(1, tenantId);

        result = prepStmt.executeQuery();

        if (result.next()) {
            id = result.getInt("UM_ID");
            String domain = result.getString("UM_DOMAIN_NAME");
            String email = result.getString("UM_EMAIL");
            boolean active = result.getBoolean("UM_ACTIVE");
            Date createdDate = new Date(result.getTimestamp("UM_CREATED_DATE").getTime());
            InputStream is = result.getBinaryStream("UM_USER_CONFIG");

            RealmConfigXMLProcessor processor = new RealmConfigXMLProcessor();
            RealmConfiguration realmConfig = processor.buildTenantRealmConfiguration(is);
            realmConfig.setTenantId(id);

            tenant = new Tenant();
            tenant.setId(id);
            tenant.setDomain(domain);
            tenant.setEmail(email);
            tenant.setCreatedDate(createdDate);
            tenant.setActive(active);
            tenant.setRealmConfig(realmConfig);
            setSecondaryUserStoreConfig(realmConfig, tenantId);
            tenant.setAdminName(realmConfig.getAdminUserName());
            tenantCacheManager.addToCache(new TenantIdKey(id), new TenantCacheEntry<Tenant>(tenant));
        }
        dbConnection.commit();
    } catch (SQLException e) {
        DatabaseUtil.rollBack(dbConnection);
        String msg = "Error in getting the tenant with " + "tenant id: " + tenantId + ".";
        if (log.isDebugEnabled()) {
            log.debug(msg, e);
        }
        throw new UserStoreException(msg, e);
    } finally {
        DatabaseUtil.closeAllConnections(dbConnection, result, prepStmt);
    }
    return tenant;
}

From source file:com.apatar.buzzsaw.BuzzsawNode.java

@Override
protected void TransformTDBtoRDB(int mode) {
    try {/*from  w  ww.ja  v  a 2s  .  c o  m*/
        DataBaseTools.completeTransfer();
        TableInfo ti = getTiForConnection(IN_CONN_POINT_NAME);
        ResultSet rs = DataBaseTools.getRSWithAllFields(ti.getTableName(),
                ApplicationData.tempDataBase.getJdbcParams(), ApplicationData.getTempDataBaseInfo());

        WebdavResource resource = null;

        while (rs.next()) {
            boolean isFolder = rs.getBoolean("isFolder");

            resource = getBindingBuzzsaw();
            // pathRes - path to resource
            String pathRes = convertHttpToString(resource.getHttpURL());
            // path - inner path from db
            String path = rs.getString("Path");

            if (path.length() > 0) {
                if (separator.equals(path.substring(0, 1)) || "\\".equals(path.substring(0, 1))) {
                    pathRes += path;
                } else {
                    pathRes = pathRes + separator + path;
                }
            }

            if (isFolder) {
                resource.mkcolMethod(pathRes);
            } else {
                InputStream in = rs.getBinaryStream("Content");
                if (null != in) {
                    resource.putMethod(pathRes, in);
                    in.close();
                } else {
                    // if Content field is null, but String_Content field is
                    // not null
                    String strContent = rs.getString("String_Content");
                    if (null != strContent && !"".equals(strContent)) {
                        byte[] bytes = strContent.getBytes();
                        resource.putMethod(pathRes, bytes);
                    } else {
                        resource.putMethod(pathRes, "");
                    }
                }
            }

            if (!ApplicationData.ProcessingProgress.Step()) {
                return;
            }

            ApplicationData.ProcessingProgress.Log("Uploading resource: " + pathRes);
        }

    } catch (Exception e1) {
        ApplicationData.ProcessingProgress.Log(e1);
        e1.printStackTrace();
    } finally {
        DataBaseTools.completeTransfer();
    }
}

From source file:org.kawanfw.sql.servlet.sql.ResultSetWriter.java

/**
 * the binary content is dumped in a server file that will be available for
 * the client the name of the file will be stored in the output stream ;
 * //  w ww. j av a 2  s .  com
 * @param resultSet
 *            the result set in progress to send back to the client side
 * @param columnIndex
 *            the column index
 * @param columnType
 *            the column type
 * @param columnName
 *            the column name
 * @param columnTable
 *            the table name of the column
 * @return the formated binary column
 * 
 * @throws SQLException
 */
private String formatBinaryColumn(ResultSet resultSet, int columnIndex, int columnType, String columnName,
        String columnTable) throws SQLException, IOException {
    String columnValueStr;

    FileNameFromBlobBuilder fileNameFromBlobBuilder = new FileNameFromBlobBuilder(sqlOrder, columnIndex, false);
    String fileName = fileNameFromBlobBuilder.getFileName();

    // Maybe null, we want to keep the info
    InputStream in = null;
    if (isTerradata) {
        in = resultSet.getBlob(columnIndex).getBinaryStream();
    }
    // For PostgreSQL columns OID columns have the BIGINT type
    else if (isPostgreSQL && columnType == Types.BIGINT) {
        in = PostgreSqlUtil.getPostgreSqlnputStream(resultSet, columnIndex);
    } else {
        in = resultSet.getBinaryStream(columnIndex);
    }

    // BufferedInputStream bufferedIn = new BufferedInputStream(in);

    if (fileConfigurator == null) // Servlet Awake FILE not
    // configured.
    {
        columnValueStr = TransportConverter.KAWANFW_BYTES_STREAM_FILE
                + TransportConverter.KAWANFW_STREAM_FAILURE;
        return columnValueStr;
    }

    OutputStream outStream = null;
    String hostFileName = null;
    try {
        hostFileName = HttpConfigurationUtil.addRootPath(fileConfigurator, username, fileName);
        outStream = new BufferedOutputStream(new FileOutputStream(hostFileName));

        debug("formatBinaryColumn:outStream: " + hostFileName);

        if (in == null) {
            debug("formatBinaryColumn: in == null");

            // DO NOTHING: just closing will create an empty file
            outStream.write(TransportConverter.KAWANFW_STREAM_NULL.getBytes());

        } else {
            IOUtils.copy(in, outStream);
        }
    } catch (IOException e) {
        throw new SQLException(e);
    } finally {
        // IOUtils.closeQuietly(in); NOT DONE. Why?
        IOUtils.closeQuietly(outStream);
    }

    // The column value is a file name with a tag for identification
    columnValueStr = TransportConverter.KAWANFW_BYTES_STREAM_FILE + fileName;

    return columnValueStr;
}

From source file:org.apache.gora.sql.store.SqlStore.java

protected Object readField(ResultSet resultSet, int columnIndex, Object field, Schema schema, Column column)
        throws SQLException, IOException {

    InputStream is = null;// www  . j a  v  a2s  . c  o  m
    byte[] bytes = null;

    JdbcType type = JdbcType.get(resultSet.getMetaData().getColumnType(columnIndex));

    switch (type) {
    case BLOB:
        Blob blob = resultSet.getBlob(columnIndex);
        if (blob != null)
            is = blob.getBinaryStream();
        break;
    case BINARY:
    case VARBINARY:
        bytes = resultSet.getBytes(columnIndex);
        break;
    case LONGVARBINARY:
        is = resultSet.getBinaryStream(columnIndex);
        break;
    }

    if (bytes != null)
        return IOUtils.deserialize(bytes, datumReader, schema, field);
    else if (is != null)
        return IOUtils.deserialize(is, datumReader, schema, field);
    return field; // field is empty
}