Example usage for java.lang ClassNotFoundException getMessage

List of usage examples for java.lang ClassNotFoundException getMessage

Introduction

In this page you can find the example usage for java.lang ClassNotFoundException getMessage.

Prototype

public String getMessage() 

Source Link

Document

Returns the detail message string of this throwable.

Usage

From source file:edu.lternet.pasta.datapackagemanager.DataPackageManager.java

/**
 * Delete a data package in PASTA based on its scope and identifier values
 * /*from  w w w  . j  a  v a 2 s. c o m*/
 * @param scope
 *          The scope value of the data package to be deleted
 * @param identifier
 *          The identifier value of the data package to be deleted
 * @param user
 *          The user value
 * @param authToken
 *          The authentication token object
 */
public boolean deleteDataPackage(String scope, Integer identifier, String user, AuthToken authToken)
        throws ClassNotFoundException, SQLException, ClientProtocolException, IOException, Exception {
    boolean hasDataPackage = false;
    boolean deleted = false;
    Integer revision = getNewestRevision(scope, identifier);

    try {
        /*
         * Do we have this data package in PASTA?
         */
        DataPackageRegistry dataPackageRegistry = new DataPackageRegistry(dbDriver, dbURL, dbUser, dbPassword);
        hasDataPackage = dataPackageRegistry.hasDataPackage(scope, identifier);

        /*
         * Check whether user is authorized to delete the data package
         */
        String entityId = null;
        String resourceId = composeResourceId(ResourceType.dataPackage, scope, identifier, revision, entityId);
        Authorizer authorizer = new Authorizer(dataPackageRegistry);
        boolean isAuthorized = authorizer.isAuthorized(authToken, resourceId, Rule.Permission.write);
        if (!isAuthorized) {
            String message = "User " + user + " does not have permission to delete this data package: "
                    + resourceId;
            throw new UnauthorizedException(message);
        }

        /*
         * If we do have this data package in PASTA, first check to see whether it
         * was previously deleted
         */
        if (hasDataPackage) {
            boolean isDeactivatedDataPackage = dataPackageRegistry.isDeactivatedDataPackage(scope, identifier);
            if (isDeactivatedDataPackage) {
                String docid = EMLDataPackage.composeDocid(scope, identifier);
                String message = "Attempting to delete a data package that was previously deleted from PASTA: "
                        + docid;
                throw new ResourceDeletedException(message);
            }

            /*
             * Delete the metadata from the Metadata Catalog
             */
            MetadataCatalog solrCatalog = new SolrMetadataCatalog(solrUrl);
            EmlPackageIdFormat emlPackageIdFormat = new EmlPackageIdFormat();
            EmlPackageId emlPackageId = emlPackageIdFormat.parse(scope, identifier.toString(),
                    revision.toString());
            solrCatalog.deleteEmlDocument(emlPackageId);

            /*
             * Delete the data package from the resource registry
             */

            deleted = dataPackageRegistry.deleteDataPackage(scope, identifier);
        }
    } catch (ClassNotFoundException e) {
        logger.error("Error connecting to Data Package Registry: " + e.getMessage());
        e.printStackTrace();
        throw (e);
    } catch (SQLException e) {
        logger.error("Error connecting to Data Package Registry: " + e.getMessage());
        e.printStackTrace();
        throw (e);
    }

    return deleted;
}

From source file:cai.flow.packets.V5_Packet.java

/**
 * UDPflowsVector// ww w . j  a v a2 s. co m
 *
 * @param RouterIP
 * @param buf
 * @param len
 * @throws DoneException
 */
@SuppressWarnings("unchecked")
public V5_Packet(String RouterIP, byte[] buf, int len) throws DoneException {
    if (false) {//(Params.DEBUG) {
        // 
        File tmpFile = new File(Params.path + File.separator + "cache.tmp");
        if (tmpFile.exists()) {
            try {
                ObjectInputStream fIn = new ObjectInputStream(new FileInputStream(tmpFile));
                try {
                    buf = (byte[]) fIn.readObject();
                    len = ((Integer) fIn.readObject()).intValue();
                } catch (ClassNotFoundException e) {
                    e.printStackTrace();
                }
                fIn.close();
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            }
        } else {
            try {
                ObjectOutputStream fOut;
                fOut = new ObjectOutputStream(new FileOutputStream(tmpFile));
                fOut.writeObject(buf);
                fOut.writeObject(new Integer(len));
                fOut.flush();
                fOut.close();
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            } catch (IOException e1) {
                e1.printStackTrace();
            }
        }
        // 
    }
    if (len < V5_Header_Size)
        throw new DoneException("    * incomplete header *");

    this.RouterIP = RouterIP;
    count = Util.to_number(buf, 2, 2);

    if (count <= 0 || len != V5_Header_Size + count * V5_Flow_Size)
        throw new DoneException("    * corrupted packet " + len + "/" + count + "/"
                + (V5_Header_Size + count * V5_Flow_Size) + " *");

    SysUptime = Util.to_number(buf, 4, 4);
    unix_secs = Util.to_number(buf, 8, 4);
    unix_nsecs = Util.to_number(buf, 12, 4);
    flow_sequence = Util.to_number(buf, 16, 4);
    engine_type = buf[20];
    engine_id = buf[21];

    logger.debug("    uptime: " + Util.uptime(SysUptime / 1000) + ", date: " + unix_secs + "." + unix_nsecs);
    logger.debug("    sequence: " + flow_sequence + ", count: " + count + ", engine: " + engine_type + "/"
            + engine_id);

    flows = new Vector((int) count);

    for (int i = 0, p = V5_Header_Size; i < count; i++, p += V5_Flow_Size) {
        V5_Flow f;
        try {
            f = new V5_Flow(RouterIP, buf, p);
            if (Params.DEBUG) {
                if (!f.equals(
                        new V5_Flow(RouterIP, buf, p, TemplateManager.getTemplateManager().getV5Template()))) {
                    logger.error("ERROR: Data inconsistency with different algorithm");
                }
            }
            if (f.srcaddr != null && f.dstaddr != null) {
                flows.add(f);
            } else {
                if (Params.DEBUG) {
                    logger.error(f.srcaddr + "  " + f.dstaddr + "    ");
                }
            }
        } catch (DoneException e) {
            if (Params.DEBUG) {
                logger.debug("", e);
            }
            if (e.getMessage() != null && (!e.getMessage().equals(""))) {
                logger.debug("", e);
            }
        }
    }
}

From source file:org.apache.maven.plugin.javadoc.AbstractFixJavadocMojo.java

/**
 * @param className not null//w w w  . j a v  a 2  s .c o  m
 * @return the Class corresponding to the given class name using the project classloader.
 * @throws MojoExecutionException if class not found
 * @see {@link ClassUtils#getClass(ClassLoader, String, boolean)}
 * @see {@link #getProjectClassLoader()}
 */
private Class<?> getClass(String className) throws MojoExecutionException {
    try {
        return ClassUtils.getClass(getProjectClassLoader(), className, false);
    } catch (ClassNotFoundException e) {
        throw new MojoExecutionException("ClassNotFoundException: " + e.getMessage(), e);
    }
}

From source file:de.unibi.techfak.bibiserv.BiBiTools.java

public Object retrieveOutputData(File outputFile, String type, String implementationType)
        throws BiBiToolsException, DBConnectionException, FileNotFoundException {

    byte[] output = readSpoolFile(outputFile);

    if (type.equals("PRIMITIVE")) {
        if (implementationType.equals("java.lang.String")) {
            return new String(output);
        } else if (implementationType.equals("java.lang.Integer")) {
            return Integer.parseInt(new String(output));
        } else if (implementationType.equals("java.lang.Float")) {
            return Float.parseFloat(new String(output));
        } else if (implementationType.equals("java.lang.Boolean")) {
            return Boolean.parseBoolean(new String(output));
        } else {//from   w  w w.j  a  v a 2 s.com
            status.setStatuscode(700, "Unsupported Primitive " + implementationType + ".");
            log.fatal(status);
            throw new BiBiToolsException(status);
        }

    } else if (type.equals("XML")) {
        try {
            return string2Jaxb(new String(output), Class.forName(implementationType));
        } catch (ClassNotFoundException e) {
            status.setStatuscode(700, "Internal Server Error", e.getMessage());
            log.fatal(status);
            throw new BiBiToolsException(status);
        } catch (JAXBException e) {
            status.setStatuscode(700, "Internal Server Error", e.getMessage());
            log.fatal(status);
            throw new BiBiToolsException(status);
        }
    }

    status.setStatuscode(700, "Internal Server Error", "Unknown input type \"" + type + "\"!");
    log.fatal(status);
    throw new BiBiToolsException(status);
}

From source file:org.deegree.services.wms.controller.WMSController.java

@Override
public void init(DeegreeServicesMetadataType serviceMetadata, DeegreeServiceControllerType mainConfig,
        Object controllerConf) {//from  ww  w  .  ja v a  2 s . co  m

    identification = convertFromJAXB(serviceMetadata.getServiceIdentification());
    provider = convertFromJAXB(serviceMetadata.getServiceProvider());

    NamespaceBindings nsContext = new NamespaceBindings();
    nsContext.addNamespace("wms", "http://www.deegree.org/services/wms");

    conf = (DeegreeWMS) controllerConf;

    if (conf.getExtendedCapabilities() != null) {
        this.extendedCaps = new HashMap<String, List<OMElement>>();
        List<OMElement> caps = new ArrayList<OMElement>(conf.getExtendedCapabilities().size());
        extendedCaps.put("default", caps);
        for (ExtendedCapabilities extendedCapsConf : conf.getExtendedCapabilities()) {
            DOMSource domSource = new DOMSource(extendedCapsConf.getAny());
            XMLStreamReader xmlStream;
            try {
                xmlStream = XMLInputFactory.newInstance().createXMLStreamReader(domSource);
            } catch (Exception t) {
                throw new ResourceInitException("Error extracting extended capabilities: " + t.getMessage(), t);
            }
            caps.add(new XMLAdapter(xmlStream).getRootElement());
        }
    }

    try {
        addSupportedImageFormats(conf);

        if (conf.getFeatureInfoFormats() != null) {
            for (GetFeatureInfoFormat t : conf.getFeatureInfoFormats().getGetFeatureInfoFormat()) {
                if (t.getFile() != null) {
                    featureInfoManager.addOrReplaceFormat(t.getFormat(),
                            metadata.getLocation().resolveToFile(t.getFile()).toString());
                } else if (t.getXSLTFile() != null) {
                    XSLTFile xsltFile = t.getXSLTFile();
                    GMLVersion version = GMLVersion.valueOf(xsltFile.getGmlVersion().toString());
                    featureInfoManager.addOrReplaceXsltFormat(t.getFormat(),
                            metadata.getLocation().resolveToUrl(xsltFile.getValue()), version, workspace);
                } else if (t.getSerializer() != null) {
                    Serializer serializer = t.getSerializer();

                    FeatureInfoSerializer featureInfoSerializer;
                    try {
                        Class<?> clazz = workspace.getModuleClassLoader().loadClass(serializer.getJavaClass());
                        featureInfoSerializer = clazz.asSubclass(FeatureInfoSerializer.class).newInstance();
                    } catch (ClassNotFoundException e) {
                        throw new IllegalArgumentException("Couldn't find serializer class", e);
                    } catch (ClassCastException e) {
                        throw new IllegalArgumentException(
                                "Configured serializer class doesn't implement FeatureInfoSerializer", e);
                    }

                    featureInfoManager.addOrReplaceCustomFormat(t.getFormat(), featureInfoSerializer);
                } else {
                    throw new IllegalArgumentException("Unknown GetFeatureInfoFormat");
                }
            }
        }

        // if ( pi.getImageFormat() != null ) {
        // for ( ImageFormat f : pi.getImageFormat() ) {
        // instantiateSerializer( imageSerializers, f.getFormat(), f.getClazz(), ImageSerializer.class );
        // }
        // }

        final org.deegree.services.jaxb.wms.DeegreeWMS.SupportedVersions versions = conf.getSupportedVersions();
        if (versions == null) {
            ArrayList<String> vs = new ArrayList<String>();
            vs.add("1.1.1");
            vs.add("1.3.0");
            validateAndSetOfferedVersions(vs);
        } else {
            validateAndSetOfferedVersions(versions.getVersion());
        }

        for (Version v : offeredVersions) {
            if (v.equals(VERSION_111)) {
                controllers.put(VERSION_111, new WMSController111());
            }
            if (v.equals(VERSION_130)) {
                controllers.put(VERSION_130, new WMSController130());
            }
        }

        Iterator<Version> iter = controllers.keySet().iterator();
        while (iter.hasNext()) {
            highestVersion = iter.next();
        }

        ServiceConfigurationType sc = conf.getServiceConfiguration();
        service = new MapService(sc, workspace);

        // after the service knows what layers are available:
        handleMetadata(conf.getMetadataURLTemplate(), conf.getMetadataStoreId());

        String configId = getMetadata().getIdentifier().getId();
        metadataProvider = workspace.getResource(OWSMetadataProviderProvider.class, configId + "_metadata");
    } catch (Exception e) {
        throw new ResourceInitException(e.getMessage(), e);
    }

}

From source file:de.unibi.techfak.bibiserv.BiBiTools.java

/**
 * The function parseInput validates the input with the given validator
 * object and add a key in the hashmap in the case of success (see list
 * below)://w w w .j av  a 2 s.  c  o  m
 *
 * <p>
 * <b>input handling :<b> Currently three different kinds of handling inputs
 * are known.
 * <ol>
 * <li><b>FILE ::</b> write content into a file named
 * "<b>id</b>.<b>FILEHANDLE</b>" and add hash entry :
 * <ul>
 * <li> key == id </li>
 * <li> value == <b>option?</b> <b>spooldir</b>/<b>id</b>.<b>FILEHANDLE</b>
 * (cmdline part of this input)</li>
 * <li> return "" (empty string) </li>
 * </ul>
 * </li>
 * <li><b>STDIN ::</b> write content into a file named
 * "<b>id</b>.<b>FILEHANDLE</b>" and add hash entry :
 * <ul>
 * <li> key == id </li>
 * <li> value == <b>option?</b> </li>
 * <li> return "&lt; <b>spooldir</b>/<b>id</b>.<b>FILEHANDLE</b>" (cmdline
 * part of this input)</li>
 * </ul>
 * </li>
 * <li><b>ARGUMENT ::</b> add hash key entry :
 * <ul>
 * <li> key == id </li>
 * <li> value == <b>option?</b> content_of_input </li>
 * <li> return "" (empty string) </li>
 * </ul: </li> </ol>
 * </p>
 *
 * @param id - input id
 * @param inputhash - an initalized (and maybe empty) HashMap
 * @param inputobject
 * @param type
 * @param implementationType
 *
 *
 *
 */
public String parseInput(String id, Map<String, String> inputhash, Object inputobject, String type,
        String implementationType) throws BiBiToolsException, DBConnectionException {

    TinputOutput input = search_for_input(id);

    if (input == null) {
        status.setStatuscode(701, "Did not found any input matching id = '" + id + "'!");
        log.error(status);
        throw new BiBiToolsException(status);
    }

    String input_content = null;

    /* Get a string representation of the input object,
     * wich can be saved in a file or .. */
    if (type.equals("PRIMITIVE")) {
        try {
            input_content = Class.forName(implementationType).cast(inputobject).toString();
        } catch (ClassNotFoundException e) {
            status.setStatuscode(700, "Internal Server Error", e.getMessage());
            log.fatal(status);
            throw new BiBiToolsException(status);
        } catch (ClassCastException e) {
            status.setStatuscode(700, "Internal Server Error", e.getMessage());
            log.fatal(status);
            throw new BiBiToolsException(status);
        }
    } else if (type.equals("XML")) {
        try {
            input_content = jaxb2String(inputobject, Class.forName(implementationType));
        } catch (ClassNotFoundException e) {
            status.setStatuscode(700, "Internal Server Error", e.getMessage());
            log.fatal(status);
            throw new BiBiToolsException(status);
        } catch (JAXBException e) {
            status.setStatuscode(700, "Internal Server Error", e.getMessage());
            log.fatal(status);
            throw new BiBiToolsException(status);
        }

    } else if (type.equals("MOBY")) {
        status.setStatuscode(700, "Internal Server Error",
                "Known input type \"MOBY\" is not yet supported ...");
        log.fatal(status);
        throw new BiBiToolsException(status);

    } else {
        status.setStatuscode(700, "Internal Server Error", "Unknown input type \"" + type + "\"!");
        log.fatal(status);
        throw new BiBiToolsException(status);
    }

    /* Check, how the input should be handled */
    try {
        if (input.getHandling().equalsIgnoreCase("FILE")) {
            /**
             * write content into a file named "<id>.input" and add hash
             * entry : - key == id - value == <option?>
             * <spooldir>/<id>.input (cmdline part of this input)
             */
            writeSpoolFile(input.getId() + "." + input.getHandling(), input_content.toString());
            inputhash.put(input.getId(), (input.isSetOption() ? input.getOption() : "")
                    + getSpoolDir().toString() + this.separator + input.getId() + "." + input.getHandling());
        } else if (input.getHandling().equalsIgnoreCase("STDIN")) {
            /**
             * write content into a file named "<id>.stdin" and add hash key
             * entry : - key == <id>
             * - value == <option?>
             * return < <spooldir>/<id>.stdin (cmdline part of this input)
             */
            writeSpoolFile(input.getId() + "." + input.getHandling(), input_content.toString());
            inputhash.put(input.getId(), (input.isSetOption() ? input.getOption() : ""));
            return "<" + getSpoolDir().toString() + this.separator + input.getId() + "." + input.getHandling();
        } else if (input.getHandling().equalsIgnoreCase("ARGUMENT")) {
            /**
             * add hash key entry : - key == <id>
             * - value == <option?> content_of_input
             */
            inputhash.put(input.getId(),
                    (input.isSetOption() ? input.getOption() : "") + input_content.toString());
        } else if (input.getHandling().equalsIgnoreCase("NONE")) {
            /**
             * add hash key entry : - key = <id>
             * - value = "" NONE :-)
             */
            inputhash.put(input.getId(), "");
        } else {
            log.fatal("Unknown input handling type ...");
            status.setStatuscode(701, "Unknown input handling type ...");
            throw new BiBiToolsException(status.toString());
        }
    } catch (FileNotFoundException e) {
        log.fatal("Content of input '" + input.getId() + "' can not be written into a file (mode :"
                + input.getHandling() + ").");
        status.setStatuscode(701, "Content of input '" + input.getId()
                + "' can not be written into a file (mode : " + input.getHandling() + ").");
        throw new BiBiToolsException(status.toString(), e);
    }
    return "";

}

From source file:tools.xor.service.AggregateManager.java

@Override
/**//from  www.j  a va2 s . c om
 *  For now we handle only one aggregate entity in the document. 
 *  Later on we can update it handle multiple entities.
 *  
 *  Ideally, we would want each entity to be in a separate document, 
 *  so we can process it efficiently using streaming.
 */
public Object importAggregate(InputStream is, Settings settings) throws IOException {
    validateImportExport();

    try {
        Workbook wb = WorkbookFactory.create(is);

        Sheet entitySheet = wb.getSheet(Constants.XOR.EXCEL_ENTITY_SHEET);
        if (entitySheet == null) {
            throw new RuntimeException("The entity sheet is missing");
        }

        // Get the entity class name
        Map<String, Integer> colMap = getHeaderMap(entitySheet);
        if (!colMap.containsKey(Constants.XOR.TYPE)) {
            // TODO: Fallback to entity class in settings if provided
            throw new RuntimeException("XOR.type column is missing");
        }
        Row entityRow = entitySheet.getRow(1);
        if (entityRow == null) {
            throw new RuntimeException("Entity row is missing");
        }
        Cell typeCell = entityRow.getCell(colMap.get(Constants.XOR.TYPE));
        String entityClassName = typeCell.getStringCellValue();
        try {
            settings.setEntityClass(Class.forName(entityClassName));
        } catch (ClassNotFoundException e) {
            throw new RuntimeException("Class " + entityClassName + " is not found");
        }

        /******************************************************
         * Algorithm
         * 
         * 1. Create all objects with the XOR.id 
         * 2. Create the collections
         * 3. Associate the collections to their owners
         * 4. Then finally call JSONTransformer.unpack to link the objects by XOR.id
         * 
         ********************************************************/

        // 1. Create all objects with the XOR.id
        Map<String, String> collectionSheets = new HashMap<String, String>();
        Map<String, String> entitySheets = new HashMap<String, String>();
        entitySheets.put(Constants.XOR.EXCEL_ENTITY_SHEET, entityClassName);
        Map<String, JSONObject> idMap = parseEntities(wb, entitySheets, collectionSheets);

        // 2. Create the collections
        // The key in the collection property map is of the form <owner_xor_id>:<property>
        Map<String, JSONArray> collectionPropertyMap = parseCollections(wb, collectionSheets, idMap);

        // 3. Associate the collections to their owners
        // Replace all objectref prefix keys with the actual objects
        // Replace all collection properties with the array objects
        link(wb, idMap, collectionPropertyMap);

        // Find the root
        Cell idCell = entityRow.getCell(colMap.get(Constants.XOR.ID));
        String rootId = idCell.getStringCellValue();
        JSONObject root = idMap.get(rootId);

        // Finally persist the root object
        // call the update persistence method
        Class entityClass;
        try {
            entityClass = Class.forName(root.getString(Constants.XOR.TYPE));
        } catch (ClassNotFoundException | JSONException e) {
            throw new RuntimeException(
                    "Unable to construct root entity. Either the class is not found or the class name is missing");
        }

        return update(root, entityClass);

    } catch (EncryptedDocumentException e) {
        throw new RuntimeException("Document is encrypted, provide a decrypted inputstream");
    } catch (InvalidFormatException e) {
        throw new RuntimeException("The provided inputstream is not valid. " + e.getMessage());
    }
}

From source file:org.apache.hadoop.hive.metastore.txn.TxnHandler.java

/**
 * Retry-by-caller note:/*from w ww .j  a va  2s . com*/
 * Worst case, it will leave an open txn which will timeout.
 */
@Override
@RetrySemantics.Idempotent
public OpenTxnsResponse openTxns(OpenTxnRequest rqst) throws MetaException {
    if (openTxnsCounter == null) {
        synchronized (TxnHandler.class) {
            try {
                if (openTxnsCounter == null) {
                    startHouseKeeperService(conf,
                            Class.forName("org.apache.hadoop.hive.ql.txn.AcidOpenTxnsCounterService"));
                }
            } catch (ClassNotFoundException e) {
                throw new MetaException(e.getMessage());
            }
        }
    }

    if (!tooManyOpenTxns && numOpenTxns >= maxOpenTxns) {
        tooManyOpenTxns = true;
    }
    if (tooManyOpenTxns) {
        if (numOpenTxns < maxOpenTxns * 0.9) {
            tooManyOpenTxns = false;
        } else {
            LOG.warn("Maximum allowed number of open transactions (" + maxOpenTxns + ") has been "
                    + "reached. Current number of open transactions: " + numOpenTxns);
            throw new MetaException("Maximum allowed number of open transactions has been reached. "
                    + "See hive.max.open.txns.");
        }
    }

    int numTxns = rqst.getNum_txns();
    try {
        Connection dbConn = null;
        Statement stmt = null;
        ResultSet rs = null;
        try {
            lockInternal();
            /**
             * To make {@link #getOpenTxns()}/{@link #getOpenTxnsInfo()} work correctly, this operation must ensure
             * that advancing the counter in NEXT_TXN_ID and adding appropriate entries to TXNS is atomic.
             * Also, advancing the counter must work when multiple metastores are running.
             * SELECT ... FOR UPDATE is used to prevent
             * concurrent DB transactions being rolled back due to Write-Write conflict on NEXT_TXN_ID.
             *
             * In the current design, there can be several metastore instances running in a given Warehouse.
             * This makes ideas like reserving a range of IDs to save trips to DB impossible.  For example,
             * a client may go to MS1 and start a transaction with ID 500 to update a particular row.
             * Now the same client will start another transaction, except it ends up on MS2 and may get
             * transaction ID 400 and update the same row.  Now the merge that happens to materialize the snapshot
             * on read will thing the version of the row from transaction ID 500 is the latest one.
             *
             * Longer term we can consider running Active-Passive MS (at least wrt to ACID operations).  This
             * set could support a write-through cache for added performance.
             */
            dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
            // Make sure the user has not requested an insane amount of txns.
            int maxTxns = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_TXN_MAX_OPEN_BATCH);
            if (numTxns > maxTxns)
                numTxns = maxTxns;

            stmt = dbConn.createStatement();
            String s = sqlGenerator.addForUpdateClause("select ntxn_next from NEXT_TXN_ID");
            LOG.debug("Going to execute query <" + s + ">");
            rs = stmt.executeQuery(s);
            if (!rs.next()) {
                throw new MetaException(
                        "Transaction database not properly " + "configured, can't find next transaction id.");
            }
            long first = rs.getLong(1);
            s = "update NEXT_TXN_ID set ntxn_next = " + (first + numTxns);
            LOG.debug("Going to execute update <" + s + ">");
            stmt.executeUpdate(s);

            long now = getDbTime(dbConn);
            List<Long> txnIds = new ArrayList<Long>(numTxns);

            List<String> rows = new ArrayList<>();
            for (long i = first; i < first + numTxns; i++) {
                txnIds.add(i);
                rows.add(i + "," + quoteChar(TXN_OPEN) + "," + now + "," + now + ","
                        + quoteString(rqst.getUser()) + "," + quoteString(rqst.getHostname()));
            }
            List<String> queries = sqlGenerator.createInsertValuesStmt(
                    "TXNS (txn_id, txn_state, txn_started, txn_last_heartbeat, txn_user, txn_host)", rows);
            for (String q : queries) {
                LOG.debug("Going to execute update <" + q + ">");
                stmt.execute(q);
            }
            LOG.debug("Going to commit");
            dbConn.commit();
            return new OpenTxnsResponse(txnIds);
        } catch (SQLException e) {
            LOG.debug("Going to rollback");
            rollbackDBConn(dbConn);
            checkRetryable(dbConn, e, "openTxns(" + rqst + ")");
            throw new MetaException(
                    "Unable to select from transaction database " + StringUtils.stringifyException(e));
        } finally {
            close(rs, stmt, dbConn);
            unlockInternal();
        }
    } catch (RetryException e) {
        return openTxns(rqst);
    }
}

From source file:net.sourceforge.msscodefactory.cfasterisk.v2_1.CFAstDb2LUW.CFAstDb2LUWSchema.java

public boolean connect() {
    final String S_ProcName = "connect";
    if (cnx != null) {
        return (false);
    }//from   w w  w  . j  a v  a2  s .co  m

    if (configuration != null) {
        try {
            Class.forName("com.ibm.db2.jcc.DB2Driver");
        } catch (ClassNotFoundException e) {
            throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), "connect",
                    "Could not load IBM DB/2 LUW 10.5 driver", e);
        }
        String dbServer = configuration.getDbServer();
        int dbPort = configuration.getDbPort();
        String dbDatabase = configuration.getDbDatabase();
        String dbUserName = configuration.getDbUserName();
        String dbPassword = configuration.getDbPassword();
        String url = "jdbc:db2://" + dbServer + ":" + Integer.toString(dbPort) + "/" + dbDatabase;
        Properties props = new Properties();
        props.setProperty("user", dbUserName);
        props.setProperty("password", dbPassword);
        try {
            cnx = DriverManager.getConnection(url, props);
            cnx.setAutoCommit(false);
            cnx.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
            cnx.rollback();
            setDbSchemaName(dbDatabase);
        } catch (SQLException e) {
            throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e);
        }
        return (true);
    }
    if (jndiName != null) {
        try {
            Context ctx = new InitialContext();
            DataSource ds = (DataSource) ctx.lookup(jndiName);
            if (ds == null) {
                throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName,
                        "Could not get resolve DataSource \"" + jndiName + "\"");
            }
            cnx = ds.getConnection();
            if (cnx == null) {
                throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName,
                        "Could not get Connection from PooledConnection for ConnectionPoolDataSource \""
                                + jndiName + "\"");
            }
            cnx.setAutoCommit(false);
            cnx.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
            cnx.rollback();
        } catch (NamingException e) {
            cnx = null;
            throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName,
                    "NamingException " + e.getMessage(), e);
        } catch (SQLException e) {
            cnx = null;
            inTransaction = false;
            throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e);
        }
        return (true);
    }
    throw CFLib.getDefaultExceptionFactory().newUsageException(getClass(), S_ProcName,
            "Neither configurationFile nor jndiName found, do not know how to connect to database");
}