List of usage examples for org.hibernate.cfg Configuration setProperty
public Configuration setProperty(String propertyName, String value)
From source file:org.alfresco.repo.domain.schema.SchemaBootstrap.java
License:Open Source License
@Override public synchronized void onBootstrap(ApplicationEvent event) { if (event != null) { // Use the application context to load resources rpr = (ApplicationContext) event.getSource(); }//w w w . jav a 2 s . c o m // do everything in a transaction Session session = getSessionFactory().openSession(); Connection connection = null; try { // make sure that we AUTO-COMMIT connection = dataSource.getConnection(); connection.setAutoCommit(true); LogUtil.info(logger, MSG_DATABASE_USED, connection); Configuration cfg = localSessionFactory.getConfiguration(); // Check and dump the dialect being used checkDialect(this.dialect); // Ensure that our static connection provider is used String defaultConnectionProviderFactoryClass = cfg.getProperty(Environment.CONNECTION_PROVIDER); cfg.setProperty(Environment.CONNECTION_PROVIDER, SchemaBootstrapConnectionProvider.class.getName()); SchemaBootstrapConnectionProvider.setBootstrapConnection(connection); // Update the schema, if required. if (updateSchema) { // Retries are required here as the DB lock will be applied lazily upon first statement execution. // So if the schema is up to date (no statements executed) then the LockFailException cannot be // thrown. If it is thrown, the the update needs to be rerun as it will probably generate no SQL // statements the second time around. boolean updatedSchema = false; boolean createdSchema = false; for (int i = 0; i < schemaUpdateLockRetryCount; i++) { try { createdSchema = updateSchema(cfg, session, connection); updatedSchema = true; break; } catch (LockFailedException e) { try { this.wait(schemaUpdateLockRetryWaitSeconds * 1000L); } catch (InterruptedException ee) { } } } if (!updatedSchema) { // The retries were exceeded throw new AlfrescoRuntimeException(ERR_PREVIOUS_FAILED_BOOTSTRAP); } // Copy the executed statements to the output file File schemaOutputFile = null; if (schemaOuputFilename != null) { schemaOutputFile = new File(schemaOuputFilename); } else { schemaOutputFile = TempFileProvider.createTempFile( "AlfrescoSchema-" + this.dialect.getClass().getSimpleName() + "-All_Statements-", ".sql"); } StringBuilder executedStatements = executedStatementsThreadLocal.get(); if (executedStatements == null) { LogUtil.info(logger, MSG_NO_CHANGES); } else { FileContentWriter writer = new FileContentWriter(schemaOutputFile); writer.setEncoding("UTF-8"); String executedStatementsStr = executedStatements.toString(); writer.putContent(executedStatementsStr); LogUtil.info(logger, MSG_ALL_STATEMENTS, schemaOutputFile.getPath()); } if (!createdSchema) { // verify that all patches have been applied correctly checkSchemaPatchScripts(cfg, connection, preUpdateScriptPatches, false); // check scripts checkSchemaPatchScripts(cfg, connection, postUpdateScriptPatches, false); // check scripts } if (executedStatements != null) { // Remove the flag indicating a running bootstrap setBootstrapCompleted(connection); } // Report normalized dumps if (executedStatements != null) { // Validate the schema, post-upgrade validateSchema("Alfresco-{0}-Validation-Post-Upgrade-{1}-", null); // 4.0+ schema dump dumpSchema("post-upgrade"); } } else { LogUtil.info(logger, MSG_BYPASSING_SCHEMA_UPDATE); } if (stopAfterSchemaBootstrap) { // 4.0+ schema dump dumpSchema("forced-exit"); LogUtil.error(logger, ERR_FORCED_STOP); throw new BootstrapStopException(); } // Reset the configuration cfg.setProperty(Environment.CONNECTION_PROVIDER, defaultConnectionProviderFactoryClass); if (event != null) { // all done successfully ((ApplicationContext) event.getSource()).publishEvent(new SchemaAvailableEvent(this)); } } catch (BootstrapStopException e) { // We just let this out throw e; } catch (Throwable e) { LogUtil.error(logger, e, ERR_UPDATE_FAILED); if (updateSchema) { throw new AlfrescoRuntimeException(ERR_UPDATE_FAILED, e); } else { throw new AlfrescoRuntimeException(ERR_VALIDATION_FAILED, e); } } finally { try { if (connection != null) { connection.close(); } } catch (Throwable e) { logger.warn("Error closing DB connection: " + e.getMessage()); } try { if (session != null) { session.close(); } } catch (Throwable e) { logger.warn("Error closing Hibernate session: " + e.getMessage()); } // Remove the connection reference from the threadlocal boostrap SchemaBootstrapConnectionProvider.setBootstrapConnection(null); } }
From source file:org.apache.ignite.cache.hibernate.GridHibernateL2CacheConfigurationSelfTest.java
License:Apache License
/** * @param gridName Grid name.//from w ww .j av a 2s.c om * @return Hibernate configuration. */ protected Configuration hibernateConfiguration(String gridName) { Configuration cfg = new Configuration(); cfg.addAnnotatedClass(Entity1.class); cfg.addAnnotatedClass(Entity2.class); cfg.addAnnotatedClass(Entity3.class); cfg.addAnnotatedClass(Entity4.class); cfg.setProperty(DFLT_ACCESS_TYPE_PROPERTY, AccessType.NONSTRICT_READ_WRITE.name()); cfg.setProperty(HBM2DDL_AUTO, "create"); cfg.setProperty(GENERATE_STATISTICS, "true"); cfg.setProperty(USE_SECOND_LEVEL_CACHE, "true"); cfg.setProperty(USE_QUERY_CACHE, "true"); cfg.setProperty(CACHE_REGION_FACTORY, GridHibernateRegionFactory.class.getName()); cfg.setProperty(RELEASE_CONNECTIONS, "on_close"); cfg.setProperty(GRID_NAME_PROPERTY, gridName); cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY1_NAME, "cache1"); cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY2_NAME, "cache2"); cfg.setProperty(REGION_CACHE_PROPERTY + TIMESTAMP_CACHE, TIMESTAMP_CACHE); cfg.setProperty(REGION_CACHE_PROPERTY + QUERY_CACHE, QUERY_CACHE); if (dfltCache) cfg.setProperty(DFLT_CACHE_NAME_PROPERTY, "cache3"); return cfg; }
From source file:org.apache.ignite.cache.hibernate.GridHibernateL2CacheSelfTest.java
License:Apache License
/** * @param accessType Hibernate L2 cache access type. * @param gridName Grid name.//from w w w . j a va 2 s . com * @return Hibernate configuration. */ protected Configuration hibernateConfiguration(org.hibernate.cache.spi.access.AccessType accessType, String gridName) { Configuration cfg = new Configuration(); cfg.addAnnotatedClass(Entity.class); cfg.addAnnotatedClass(Entity2.class); cfg.addAnnotatedClass(VersionedEntity.class); cfg.addAnnotatedClass(ChildEntity.class); cfg.addAnnotatedClass(ParentEntity.class); cfg.setCacheConcurrencyStrategy(ENTITY_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(ENTITY2_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(VERSIONED_ENTITY_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(PARENT_ENTITY_NAME, accessType.getExternalName()); cfg.setCollectionCacheConcurrencyStrategy(CHILD_COLLECTION_REGION, accessType.getExternalName()); cfg.setProperty(HBM2DDL_AUTO, "create"); cfg.setProperty(GENERATE_STATISTICS, "true"); cfg.setProperty(USE_SECOND_LEVEL_CACHE, "true"); cfg.setProperty(USE_QUERY_CACHE, "true"); cfg.setProperty(CACHE_REGION_FACTORY, GridHibernateRegionFactory.class.getName()); cfg.setProperty(RELEASE_CONNECTIONS, "on_close"); cfg.setProperty(GRID_NAME_PROPERTY, gridName); // Use the same cache for Entity and Entity2. cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY2_NAME, ENTITY_NAME); cfg.setProperty(DFLT_ACCESS_TYPE_PROPERTY, accessType.name()); return cfg; }
From source file:org.apache.ignite.cache.hibernate.HibernateL2CacheConfigurationSelfTest.java
License:Apache License
/** * @param gridName Grid name.//from ww w . j av a 2 s . co m * @return Hibernate configuration. */ protected Configuration hibernateConfiguration(String gridName) { Configuration cfg = new Configuration(); cfg.addAnnotatedClass(Entity1.class); cfg.addAnnotatedClass(Entity2.class); cfg.addAnnotatedClass(Entity3.class); cfg.addAnnotatedClass(Entity4.class); cfg.setProperty(DFLT_ACCESS_TYPE_PROPERTY, AccessType.NONSTRICT_READ_WRITE.name()); cfg.setProperty(HBM2DDL_AUTO, "create"); cfg.setProperty(GENERATE_STATISTICS, "true"); cfg.setProperty(USE_SECOND_LEVEL_CACHE, "true"); cfg.setProperty(USE_QUERY_CACHE, "true"); cfg.setProperty(CACHE_REGION_FACTORY, HibernateRegionFactory.class.getName()); cfg.setProperty(RELEASE_CONNECTIONS, "on_close"); cfg.setProperty(GRID_NAME_PROPERTY, gridName); cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY1_NAME, "cache1"); cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY2_NAME, "cache2"); cfg.setProperty(REGION_CACHE_PROPERTY + TIMESTAMP_CACHE, TIMESTAMP_CACHE); cfg.setProperty(REGION_CACHE_PROPERTY + QUERY_CACHE, QUERY_CACHE); if (dfltCache) cfg.setProperty(DFLT_CACHE_NAME_PROPERTY, "cache3"); return cfg; }
From source file:org.apache.ignite.cache.hibernate.HibernateL2CacheSelfTest.java
License:Apache License
/** * @param accessType Hibernate L2 cache access type. * @param gridName Grid name.//from w w w . j av a2 s.c om * @return Hibernate configuration. */ protected Configuration hibernateConfiguration(org.hibernate.cache.spi.access.AccessType accessType, String gridName) { Configuration cfg = new Configuration(); cfg.addAnnotatedClass(Entity.class); cfg.addAnnotatedClass(Entity2.class); cfg.addAnnotatedClass(VersionedEntity.class); cfg.addAnnotatedClass(ChildEntity.class); cfg.addAnnotatedClass(ParentEntity.class); cfg.setCacheConcurrencyStrategy(ENTITY_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(ENTITY2_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(VERSIONED_ENTITY_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(PARENT_ENTITY_NAME, accessType.getExternalName()); cfg.setCollectionCacheConcurrencyStrategy(CHILD_COLLECTION_REGION, accessType.getExternalName()); cfg.setProperty(HBM2DDL_AUTO, "create"); cfg.setProperty(GENERATE_STATISTICS, "true"); cfg.setProperty(USE_SECOND_LEVEL_CACHE, "true"); cfg.setProperty(USE_QUERY_CACHE, "true"); cfg.setProperty(CACHE_REGION_FACTORY, HibernateRegionFactory.class.getName()); cfg.setProperty(RELEASE_CONNECTIONS, "on_close"); cfg.setProperty(GRID_NAME_PROPERTY, gridName); // Use the same cache for Entity and Entity2. cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY2_NAME, ENTITY_NAME); cfg.setProperty(DFLT_ACCESS_TYPE_PROPERTY, accessType.name()); return cfg; }
From source file:org.apache.tapestry5.internal.hibernate.HibernateSessionSourceImplTest.java
License:Apache License
@Test public void get_configuration() { HibernateConfigurer configurer = new HibernateConfigurer() { @Override//from w w w .ja va 2s . com public void configure(Configuration configuration) { configuration.setProperty("foo", "bar"); configuration.configure(); } }; HibernateSessionSource source = new HibernateSessionSourceImpl(log, Arrays.asList(configurer)); Configuration config = source.getConfiguration(); Assert.assertNotNull(config); Assert.assertEquals("bar", config.getProperty("foo")); // Configuration was immutable in 5.1, but Hibernate 3.6.0.Final made that impossible }
From source file:org.arsenico.database.HibernateUtil.java
License:Apache License
/** * A partire dai parametri passati, crea una session factory * /*from w w w . j ava 2s. co m*/ * @param url * @param username * @param password * @param createTables * se true mette il parametro <code>hbm2ddl.auto</code> a <code>create</code>, altrimenti a <code>verify</code> * @param type * tipo di database * @return * sessionFactory * @throws Exception * * @see {@link DatabaseSupportedType} */ public static SessionFactory createSessionFactory(String url, String username, String password, boolean createTables, DatabaseSupportedType type) throws Exception { Configuration config = new Configuration(); config.addClass(LogRegistry.class); // config.setProperty("hibernate.connection.url", url); config.setProperty("hibernate.connection.driver_class", type.getDriver()); config.setProperty("hibernate.connection.username", username); password = (password == null) ? "" : password; config.setProperty("hibernate.connection.password", password); config.setProperty("hibernate.connection.pool_size", "1"); config.setProperty("hibernate.cache.provider_class", "org.hibernate.cache.NoCacheProvider"); // config.setProperty("hibernate.show_sql", "true"); config.setProperty("hibernate.show_sql", "false"); // creiamo le tabella in base al flag dato come parametro if (createTables) config.setProperty("hibernate.hbm2ddl.auto", "create"); else config.setProperty("hibernate.hbm2ddl.auto", "verify"); config.setProperty("hibernate.connection.pool_size", "1"); config.setProperty("hibernate.dialect", type.getDialect()); sessionFactory = config.buildSessionFactory(); return sessionFactory; }
From source file:org.beangle.orm.hibernate.tool.ConfigBuilder.java
License:Open Source License
/** * build configration/*from w w w . j a v a 2 s . co m*/ * * @throws Exception */ @SuppressWarnings("unchecked") public static Configuration build(Configuration cfg) throws Exception { PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver( DdlGenerator.class.getClassLoader()); // config naming strategy DefaultTableNamingStrategy tableNamingStrategy = new DefaultTableNamingStrategy(); for (Resource resource : resolver.getResources("classpath*:META-INF/beangle/table.properties")) tableNamingStrategy.addConfig(resource.getURL()); RailsNamingStrategy namingStrategy = new RailsNamingStrategy(); namingStrategy.setTableNamingStrategy(tableNamingStrategy); cfg.setNamingStrategy(namingStrategy); for (Resource resource : resolver.getResources("classpath*:META-INF/hibernate.cfg.xml")) cfg.configure(resource.getURL()); for (Resource resource : resolver.getResources("classpath*:META-INF/beangle/persist*.properties")) { InputStream is = resource.getURL().openStream(); Properties props = new Properties(); if (null != is) props.load(is); Object module = props.remove("module"); if (null == module) continue; Class<? extends AbstractPersistModule> moduleClass = (Class<? extends AbstractPersistModule>) ClassLoaders .loadClass(module.toString()); addPersistInfo(cfg, moduleClass.newInstance().getConfig()); Enumeration<String> enumer = (Enumeration<String>) props.propertyNames(); while (enumer.hasMoreElements()) { String propertyName = enumer.nextElement(); cfg.setProperty(propertyName, props.getProperty(propertyName)); } IOs.close(is); } cfg.buildMappings(); return cfg; }
From source file:org.blueoxygen.cimande.persistence.hibernate.DefaultHibernateSessionFactory.java
License:Open Source License
private SessionFactory buildSessionFactory() throws HibernateException { Configuration config = new Configuration(); config.configure();//from w ww . j ava2s . c om if (System.getProperty("hibernate.connect.url.override") != null) { config.setProperty("hibernate.connection.url", System.getProperty("hibernate.connect.url.override")); config.setProperty("hibernate.connection.username", "sa"); config.setProperty("hibernate.connection.password", ""); config.setProperty("hibernate.dialect", "net.sf.hibernate.dialect.HSQLDialect"); } // update database schema if required try { new SchemaUpdate(config).execute(false, false); } catch (HibernateException e) { log.fatal("Cannot update schema", e); throw new PersistenceException("Cannot update schema", e); } SessionFactory result = config.buildSessionFactory(); return result; }
From source file:org.codehaus.griffon.runtime.hibernate3.DefaultHibernate3Factory.java
License:Apache License
protected void createSchema(@Nonnull String dataSourceName, @Nonnull Map<String, Object> config, @Nonnull Configuration configuration) { configuration.setProperty("hibernate.hbm2ddl.auto", getConfigValue(config, "schema", "create-drop")); }