List of usage examples for org.hibernate ScrollMode FORWARD_ONLY
ScrollMode FORWARD_ONLY
To view the source code for org.hibernate ScrollMode FORWARD_ONLY.
Click Source Link
From source file:com.querydsl.jpa.HibernateHandler.java
License:Apache License
@SuppressWarnings("unchecked") @Override/*from www .j ava 2 s . co m*/ public <T> CloseableIterator<T> iterate(Query query, FactoryExpression<?> projection) { if (query instanceof HibernateQuery) { HibernateQuery hQuery = (HibernateQuery) query; ScrollableResults results = hQuery.getHibernateQuery().scroll(ScrollMode.FORWARD_ONLY); CloseableIterator<T> iterator = new ScrollableResultsIterator<T>(results); if (projection != null) { iterator = new TransformingIterator<T>(iterator, projection); } return iterator; } else { Iterator<T> iterator = query.getResultList().iterator(); if (projection != null) { return new TransformingIterator<T>(iterator, projection); } else { return new IteratorAdapter<T>(iterator); } } }
From source file:com.reignite.query.StructuredQuery.java
License:Open Source License
private void join(List<Object> rows) { for (Join join : joins) { Set<Object> ids = new HashSet<Object>(); for (Object obj : rows) { if (obj instanceof Map) { ids.add(((Map<?, ?>) obj).get(join.getJoinId())); }//from ww w . java 2 s . com } // prepare the join by setting the order and adding an "in" // clause join.prepare(ids); // if ids is size 1 then we are either doing a per row join or there is only 1 result to join to int firstRow = ids.size() == 1 ? join.getStartIndex() : 0; ScrollableResults scroll = join.getCriteria().scroll(ScrollMode.FORWARD_ONLY); if (scroll.setRowNumber(firstRow)) { do { Object[] row = scroll.get(); mergeResult(rows, row, join); } while (scroll.next()); } scroll.close(); } }
From source file:com.reignite.query.StructuredQuery.java
License:Open Source License
private int runQuery(Criteria criteria, QueryResult result, int maxResults) { ScrollableResults scroll = criteria.scroll(ScrollMode.FORWARD_ONLY); int count = 0; if (scroll.setRowNumber(startIndex)) { while (count < maxResults) { Object[] row = scroll.get(); count = fillResult(result, row) ? count += 1 : count; if (!scroll.next()) { break; }/* w ww. j a v a 2 s . c o m*/ } } int totalResultCount = 0; if (scroll.last()) { totalResultCount = scroll.getRowNumber() + 1; } result.setTotalResults(totalResultCount); scroll.close(); return count; }
From source file:com.square.core.agent.RebuildingIndexAgentJmxThead.java
License:Open Source License
/** * Lancement indexation manuelle sur requete. *//*ww w .j a v a 2s . c o m*/ private void runManualIndexer(Session session) { final FullTextSession fullTextSession = Search.getFullTextSession(session); try { fullTextSession.setFlushMode(FlushMode.MANUAL); fullTextSession.setCacheMode(CacheMode.IGNORE); final Transaction transaction = fullTextSession.beginTransaction(); // Scrollable results will avoid loading too many objects in memory final ScrollableResults results = fullTextSession.createQuery(agent.getRequete()) .setFetchSize(agent.getBatchSizeToLoad()).scroll(ScrollMode.FORWARD_ONLY); int index = 0; while (results.next()) { index++; logger.debug(agent.getMessageSourceUtil().get(AgentJmxKeyUtil.MESSAGE_INDEXATION_DE) + " " + results.get(0) + " (id = " + ((BaseModel) results.get(0)).getId() + ")"); fullTextSession.index(results.get(0)); // index each element if (index % agent.getBatchSizeToLoad() == 0) { fullTextSession.flushToIndexes(); // apply changes to indexes fullTextSession.clear(); // free memory since the queue is processed } } transaction.commit(); } catch (SearchException e) { e.printStackTrace(); } }
From source file:com.wci.umls.server.jpa.algo.RrfLoaderAlgorithm.java
License:Open Source License
/** * Load MRCONSO.RRF. This is responsible for loading {@link Atom}s and * {@link AtomClass}es./*from w w w . j av a 2 s . c o m*/ * * @throws Exception the exception */ private void loadMrconso() throws Exception { logInfo(" Load MRCONSO"); logInfo(" Insert atoms and concepts "); // Set up maps String line = null; int objectCt = 0; final PushBackReader reader = readers.getReader(RrfReaders.Keys.MRCONSO); final String fields[] = new String[18]; String prevCui = null; Concept cui = null; while ((line = reader.readLine()) != null) { line = line.replace("\r", ""); FieldedStringTokenizer.split(line, "|", 18, fields); // Skip non-matching in single mode if (singleMode && !fields[11].equals(getTerminology())) { continue; } // Field Description // 0 CUI // 1 LAT // 2 TS // 3 LUI // 4 STT // 5 SUI // 6 ISPREF // 7 AUI // 8 SAUI // 9 SCUI // 10 SDUI // 11 SAB // 12 TTY // 13 CODE // 14 STR // 15 SRL // 16 SUPPRESS // 17 CVF // // e.g. // C0000005|ENG|P|L0000005|PF|S0007492|Y|A7755565||M0019694|D012711|MSH|PEN|D012711|(131)I-Macroaggregated // Albumin|0|N|256| // set the root terminology language loadedRootTerminologies.get(fields[11]).setLanguage(fields[1]); final Atom atom = new AtomJpa(); atom.setLanguage(fields[1]); atom.setTimestamp(releaseVersionDate); atom.setLastModified(releaseVersionDate); atom.setLastModifiedBy(loader); atom.setObsolete(fields[16].equals("O")); atom.setSuppressible(!fields[16].equals("N")); atom.setPublished(true); atom.setPublishable(true); atom.setName(fields[14]); atom.setTerminology(fields[11]); if (loadedTerminologies.get(fields[11]) == null) { throw new Exception("Atom references terminology that does not exist: " + fields[11]); } atom.setVersion(loadedTerminologies.get(fields[11]).getVersion()); // skip in single mode if (!singleMode) { atom.putAlternateTerminologyId(getTerminology(), fields[7]); } atom.setTerminologyId(fields[8]); atom.setTermType(fields[12]); atom.setWorkflowStatus(published); atom.setCodeId(fields[13]); atom.setDescriptorId(fields[10]); atom.setConceptId(fields[9]); atom.setStringClassId(fields[5]); atom.setLexicalClassId(fields[3]); atom.setCodeId(fields[13]); // Handle root terminology short name, hierarchical name, and sy names if (fields[11].equals("SRC") && fields[12].equals("SSN")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { t.getRootTerminology().setShortName(fields[14]); } } if (fields[11].equals("SRC") && fields[12].equals("RHT")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { t.getRootTerminology().setHierarchicalName(fields[14]); } } if (fields[11].equals("SRC") && fields[12].equals("RPT")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { t.getRootTerminology().setPreferredName(fields[14]); } } if (fields[11].equals("SRC") && fields[12].equals("RSY") && !fields[14].equals("")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { List<String> syNames = t.getRootTerminology().getSynonymousNames(); syNames.add(fields[14]); } } // Handle terminology sy names if (fields[11].equals("SRC") && fields[12].equals("VSY") && !fields[14].equals("")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { List<String> syNames = t.getSynonymousNames(); syNames.add(fields[14]); } } // Determine organizing class type for terminology if (!atom.getDescriptorId().equals("")) { termIdTypeMap.put(atom.getTerminology(), IdType.DESCRIPTOR); } else if (!atom.getConceptId().equals("")) { termIdTypeMap.put(atom.getTerminology(), IdType.CONCEPT); } // OTHERWISE it remains "CODE" // skip in single mode if (!singleMode) { atom.putConceptTerminologyId(getTerminology(), fields[0]); } // Add atoms and commit periodically addAtom(atom); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); atomIdMap.put(fields[7], atom.getId()); atomTerminologyMap.put(fields[7], atom.getTerminology().intern()); atomConceptIdMap.put(fields[7], atom.getConceptId().length() == 0 ? "".intern() : atom.getConceptId()); atomCodeIdMap.put(fields[7], atom.getCodeId().length() == 0 ? "".intern() : atom.getCodeId()); atomDescriptorIdMap.put(fields[7], atom.getDescriptorId().length() == 0 ? "".intern() : atom.getDescriptorId()); // CUI - skip in single mode if (!singleMode) { // Add concept if (prevCui == null || !fields[0].equals(prevCui)) { if (prevCui != null) { cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } cui = new ConceptJpa(); cui.setTimestamp(releaseVersionDate); cui.setLastModified(releaseVersionDate); cui.setLastModifiedBy(loader); cui.setPublished(true); cui.setPublishable(true); cui.setTerminology(getTerminology()); cui.setTerminologyId(fields[0]); cui.setVersion(getVersion()); cui.setWorkflowStatus(published); } cui.getAtoms().add(atom); prevCui = fields[0]; } // Handle Subset // C3539934|ENG|S|L11195730|PF|S13913746|N|A23460885||900000000000538005||SNOMEDCT_US|SB|900000000000538005|Description // format|9|N|256| if (fields[12].equals("SB")) { // Have to handle the type later, when we get to attributes final AtomSubset atomSubset = new AtomSubsetJpa(); setSubsetFields(atomSubset, fields); cuiAuiAtomSubsetMap.put(fields[0] + fields[7], atomSubset); idTerminologyAtomSubsetMap.put(atomSubset.getTerminologyId() + atomSubset.getTerminology(), atomSubset); final ConceptSubset conceptSubset = new ConceptSubsetJpa(); setSubsetFields(conceptSubset, fields); cuiAuiConceptSubsetMap.put(fields[0] + fields[7], conceptSubset); idTerminologyConceptSubsetMap.put(conceptSubset.getTerminologyId() + conceptSubset.getTerminology(), conceptSubset); } } // Add last concept if (prevCui != null) { cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } // Set the terminology organizing class types for (final Terminology terminology : loadedTerminologies.values()) { final IdType idType = termIdTypeMap.get(terminology.getTerminology()); if (idType != null && idType != IdType.CODE) { terminology.setOrganizingClassType(idType); updateTerminology(terminology); } } logInfo(" Add concepts"); objectCt = 0; // NOTE: Hibernate-specific to support iterating // Restrict to timestamp used for THESE atoms, in case multiple RRF // files are loaded final Session session = manager.unwrap(Session.class); org.hibernate.Query hQuery = session.createQuery("select a from AtomJpa a " + "where conceptId is not null " + "and conceptId != '' and timestamp = :timestamp " + "order by terminology, conceptId"); hQuery.setParameter("timestamp", releaseVersionDate); hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false); ScrollableResults results = hQuery.scroll(ScrollMode.FORWARD_ONLY); prevCui = null; cui = null; while (results.next()) { final Atom atom = (Atom) results.get()[0]; if (atom.getConceptId() == null || atom.getConceptId().isEmpty()) { continue; } if (prevCui == null || !prevCui.equals(atom.getConceptId())) { if (cui != null) { // compute preferred name cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } cui = new ConceptJpa(); cui.setTimestamp(releaseVersionDate); cui.setLastModified(releaseVersionDate); cui.setLastModifiedBy(loader); cui.setPublished(true); cui.setPublishable(true); cui.setTerminology(atom.getTerminology()); cui.setTerminologyId(atom.getConceptId()); cui.setVersion(atom.getVersion()); cui.setWorkflowStatus(published); } cui.getAtoms().add(atom); prevCui = atom.getConceptId(); } if (cui != null) { cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); commitClearBegin(); } results.close(); logInfo(" Add descriptors"); objectCt = 0; // NOTE: Hibernate-specific to support iterating hQuery = session.createQuery("select a from AtomJpa a " + "where descriptorId is not null " + "and descriptorId != '' and timestamp = :timestamp " + "order by terminology, descriptorId"); hQuery.setParameter("timestamp", releaseVersionDate); hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false); results = hQuery.scroll(ScrollMode.FORWARD_ONLY); String prevDui = null; Descriptor dui = null; while (results.next()) { final Atom atom = (Atom) results.get()[0]; if (atom.getDescriptorId() == null || atom.getDescriptorId().isEmpty()) { continue; } if (prevDui == null || !prevDui.equals(atom.getDescriptorId())) { if (dui != null) { // compute preferred name dui.setName(getComputedPreferredName(dui, list)); addDescriptor(dui); descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } dui = new DescriptorJpa(); dui.setTimestamp(releaseVersionDate); dui.setLastModified(releaseVersionDate); dui.setLastModifiedBy(loader); dui.setPublished(true); dui.setPublishable(true); dui.setTerminology(atom.getTerminology()); dui.setTerminologyId(atom.getDescriptorId()); dui.setVersion(atom.getVersion()); dui.setWorkflowStatus(published); } dui.getAtoms().add(atom); prevDui = atom.getDescriptorId(); } if (dui != null) { dui.setName(getComputedPreferredName(dui, list)); addDescriptor(dui); descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId()); commitClearBegin(); } results.close(); // Use flag to decide whether to handle codes if (codesFlag) { logInfo(" Add codes"); objectCt = 0; // NOTE: Hibernate-specific to support iterating // Skip NOCODE // TODO: there is a LNC exception here -for now hQuery = session.createQuery("select a from AtomJpa a where codeId is not null " + "and codeId != '' and timestamp = :timestamp " + "and (terminology = 'LNC' OR (codeId != conceptId and codeId != descriptorId)) " + "and timestamp = :timestamp " + "order by terminology, codeId"); hQuery.setParameter("timestamp", releaseVersionDate); hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false); results = hQuery.scroll(ScrollMode.FORWARD_ONLY); String prevCode = null; Code code = null; while (results.next()) { final Atom atom = (Atom) results.get()[0]; if (atom.getCodeId() == null || atom.getCodeId().isEmpty() || atom.getCodeId().equals("NOCODE")) { continue; } if (prevCode == null || !prevCode.equals(atom.getCodeId())) { if (code != null) { // compute preferred name code.setName(getComputedPreferredName(code, list)); addCode(code); codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId()); logAndCommit(++objectCt, RootService.logCt, 1000); } code = new CodeJpa(); code.setTimestamp(releaseVersionDate); code.setLastModified(releaseVersionDate); code.setLastModifiedBy(loader); code.setPublished(true); code.setPublishable(true); code.setTerminology(atom.getTerminology()); code.setTerminologyId(atom.getCodeId()); code.setVersion(atom.getVersion()); code.setWorkflowStatus(published); } code.getAtoms().add(atom); prevCode = atom.getCodeId(); } if (code != null) { code.setName(getComputedPreferredName(code, list)); addCode(code); codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId()); commitClearBegin(); } results.close(); } // NOTE: for efficiency and lack of use cases, we've temporarily // suspended the loading of LexicalClass and StringClass objects // // NOTE: atoms are not connected to lexical classes as there are // // currently no known uses for this. // logInfo(" Add lexical classes"); // objectCt = 0; // query = NEED TO FIX THIS // manager // .createQuery("select a.id from AtomJpa a order by lexicalClassId"); // String prevLui = null; // LexicalClass lui = null; // LexicalClass atoms = null; // for (final Long id : (List<Long>) query.getResultList()) { // final Atom atom = getAtom(id); // if (atom.getLexicalClassId() == null // || atom.getLexicalClassId().isEmpty()) { // continue; // } // if (prevLui == null || !prevLui.equals(atom.getLexicalClassId())) { // if (lui != null) { // // compute preferred name // lui.setName(getComputedPreferredName(atoms)); // addLexicalClass(lui); // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); // } // // just used to hold atoms, enver saved. // atoms = new LexicalClassJpa(); // lui = new LexicalClassJpa(); // lui.setTimestamp(releaseVersionDate); // lui.setLastModified(releaseVersionDate); // lui.setLastModifiedBy(loader); // lui.setPublished(true); // lui.setPublishable(true); // lui.setTerminology(terminology); // lui.setTerminologyId(atom.getLexicalClassId()); // lui.setVersion(version); // lui.setWorkflowStatus(published); // lui.setNormalizedString(getNormalizedString(atom.getName())); // } // atoms.addAtom(atom); // prevLui = atom.getLexicalClassId(); // } // if (lui != null) { // lui.setName(getComputedPreferredName(atoms)); // commitClearBegin(); // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); // } // // // NOTE: currently atoms are not loaded for string classes // // We simply load the objects themselves ( for SUI maintenance) // // There are no known use cases for having the atoms here. // logInfo(" Add string classes"); // objectCt = 0; // query = NEED TO FIX THIS // manager // .createQuery("select distinct stringClassId, name from AtomJpa a"); // for (final Object[] suiFields : (List<Object[]>) query.getResultList()) { // final StringClass sui = new StringClassJpa(); // sui.setTimestamp(releaseVersionDate); // sui.setLastModified(releaseVersionDate); // sui.setLastModifiedBy(loader); // sui.setPublished(true); // sui.setPublishable(true); // sui.setTerminology(terminology); // sui.setTerminologyId(suiFields[0].toString()); // sui.setVersion(version); // sui.setWorkflowStatus(published); // sui.setName(suiFields[1].toString()); // addStringClass(sui); // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); // } // commit commitClearBegin(); logInfo(" Update terminologies for languages and names."); // Update all root terminologies now that we know languages and names for (final RootTerminology root : loadedRootTerminologies.values()) { updateRootTerminology(root); } // Update all root terminologies now that we know languages and names for (final Terminology terminology : loadedTerminologies.values()) { updateTerminology(terminology); } commitClearBegin(); }
From source file:com.yahoo.elide.datastores.hibernate3.HibernateDataStoreSupplier.java
License:Apache License
@Override public DataStore get() { // Add additional checks to our static check mappings map. // NOTE: This is a bit hacky. We need to do a major overhaul on our test architecture TestCheckMappings.MAPPINGS.put("filterCheck", Filtered.FilterCheck.class); TestCheckMappings.MAPPINGS.put("filterCheck3", Filtered.FilterCheck3.class); // method to force class initialization Configuration configuration = new Configuration(); try {/*from www.jav a2s . c o m*/ ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class) .forEach(configuration::addAnnotatedClass); } catch (MappingException e) { throw new RuntimeException(e); } SessionFactory sessionFactory = configuration.configure("hibernate.cfg.xml") .setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread") .setProperty(Environment.URL, "jdbc:mysql://localhost:" + System.getProperty("mysql.port", "3306") + "/root?serverTimezone=UTC") .setProperty(Environment.USER, "root").setProperty(Environment.PASS, "root").buildSessionFactory(); // create example tables from beans SchemaExport schemaExport = new SchemaExport(configuration).setHaltOnError(true); schemaExport.drop(false, true); schemaExport.execute(false, true, false, true); if (!schemaExport.getExceptions().isEmpty()) { throw new RuntimeException(schemaExport.getExceptions().toString()); } return new HibernateStore(sessionFactory, true, ScrollMode.FORWARD_ONLY); }
From source file:com.yahoo.elide.datastores.hibernate5.HibernateDataStoreSupplier.java
License:Apache License
@Override public DataStore get() { // Add additional checks to our static check mappings map. // NOTE: This is a bit hacky. We need to do a major overhaul on our test architecture TestCheckMappings.MAPPINGS.put("filterCheck", Filtered.FilterCheck.class); TestCheckMappings.MAPPINGS.put("filterCheck3", Filtered.FilterCheck3.class); // method to force class initialization MetadataSources metadataSources = new MetadataSources(new StandardServiceRegistryBuilder() .configure("hibernate.cfg.xml").applySetting(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread") .applySetting(Environment.URL, "jdbc:mysql://localhost:" + System.getProperty("mysql.port", "3306") + "/root?serverTimezone=UTC") .applySetting(Environment.USER, "root").applySetting(Environment.PASS, "root").build()); try {//from ww w . ja v a 2 s .co m ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class) .forEach(metadataSources::addAnnotatedClass); } catch (MappingException e) { throw new RuntimeException(e); } MetadataImplementor metadataImplementor = (MetadataImplementor) metadataSources.buildMetadata(); // create example tables from beans SchemaExport schemaExport = new SchemaExport(metadataImplementor); //.setHaltOnError(true); schemaExport.drop(false, true); schemaExport.execute(false, true, false, true); if (!schemaExport.getExceptions().isEmpty()) { throw new RuntimeException(schemaExport.getExceptions().toString()); } return new HibernateStore(metadataImplementor.buildSessionFactory(), true, ScrollMode.FORWARD_ONLY); }
From source file:com.yahoo.elide.datastores.hibernate5.HibernateEntityManagerDataStoreSupplier.java
License:Apache License
@Override public DataStore get() { // Add additional checks to our static check mappings map. // NOTE: This is a bit hacky. We need to do a major overhaul on our test architecture TestCheckMappings.MAPPINGS.put("filterCheck", Filtered.FilterCheck.class); TestCheckMappings.MAPPINGS.put("filterCheck3", Filtered.FilterCheck3.class); Map<String, Object> options = new HashMap<>(); ArrayList<Class> bindClasses = new ArrayList<>(); try {/*from w w w. jav a 2s. c o m*/ bindClasses.addAll(ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class)); } catch (MappingException e) { throw new IllegalStateException(e); } options.put("javax.persistence.jdbc.driver", "com.mysql.jdbc.Driver"); options.put("javax.persistence.jdbc.url", JDBC_PREFIX + System.getProperty(MYSQL_PORT_PROPERTY, MYSQL_PORT) + JDBC_SUFFIX); options.put("javax.persistence.jdbc.user", ROOT); options.put("javax.persistence.jdbc.password", ROOT); options.put(AvailableSettings.LOADED_CLASSES, bindClasses); EntityManagerFactory emf = Persistence.createEntityManagerFactory("elide-tests", options); HibernateEntityManager em = (HibernateEntityManager) emf.createEntityManager(); // method to force class initialization MetadataSources metadataSources = new MetadataSources(new StandardServiceRegistryBuilder() .configure("hibernate.cfg.xml").applySetting(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread") .applySetting(Environment.URL, JDBC_PREFIX + System.getProperty(MYSQL_PORT_PROPERTY, MYSQL_PORT) + JDBC_SUFFIX) .applySetting(Environment.USER, ROOT).applySetting(Environment.PASS, ROOT).build()); try { ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class) .forEach(metadataSources::addAnnotatedClass); } catch (MappingException e) { throw new IllegalStateException(e); } MetadataImplementor metadataImplementor = (MetadataImplementor) metadataSources.buildMetadata(); EnumSet<TargetType> type = EnumSet.of(TargetType.DATABASE); // create example tables from beans SchemaExport schemaExport = new SchemaExport(); schemaExport.drop(type, metadataImplementor); schemaExport.execute(type, SchemaExport.Action.CREATE, metadataImplementor); if (!schemaExport.getExceptions().isEmpty()) { throw new IllegalStateException(schemaExport.getExceptions().toString()); } return new AbstractHibernateStore.Builder(em).withScrollEnabled(true) .withScrollMode(ScrollMode.FORWARD_ONLY).build(); }
From source file:com.yahoo.elide.datastores.multiplex.bridgeable.BridgeableStoreSupplier.java
License:Apache License
@Override public DataStore get() { // method to force class initialization MetadataSources metadataSources = new MetadataSources(new StandardServiceRegistryBuilder() .configure("hibernate.cfg.xml").applySetting(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread") .applySetting(Environment.URL, "jdbc:mysql://localhost:" + System.getProperty("mysql.port", "3306") + "/root?serverTimezone=UTC") .applySetting(Environment.USER, "root").applySetting(Environment.PASS, "root").build()); metadataSources.addAnnotatedClass(HibernateUser.class); MetadataImplementor metadataImplementor = (MetadataImplementor) metadataSources.buildMetadata(); // create example tables from beans SchemaExport schemaExport = new SchemaExport(metadataImplementor); //.setHaltOnError(true); schemaExport.drop(false, true);/*from www . j a va 2s . c om*/ schemaExport.execute(false, true, false, true); if (!schemaExport.getExceptions().isEmpty()) { throw new RuntimeException(schemaExport.getExceptions().toString()); } LATEST_HIBERNATE_STORE = new HibernateStore.Builder(metadataImplementor.buildSessionFactory()) .withScrollEnabled(true).withScrollMode(ScrollMode.FORWARD_ONLY).build(); BridgeableRedisStore hbaseStore = new BridgeableRedisStore(); return new MultiplexManager(LATEST_HIBERNATE_STORE, hbaseStore); }
From source file:com.yahoo.elide.standalone.datastore.InjectionAwareHibernateStore.java
License:Apache License
/** * Constructor// ww w.jav a 2 s. c om * * @param injector hk2 injector to bind. * @param sessionFactory Hibernate session factory. */ public InjectionAwareHibernateStore(ServiceLocator injector, SessionFactory sessionFactory) { super(sessionFactory, true, ScrollMode.FORWARD_ONLY); this.injector = injector; }