List of usage examples for org.hibernate ScrollableResults close
void close();
From source file:com.mysema.query.jpa.IntegrationBase.java
License:Apache License
@Test public void Scroll() { session.save(new Cat("Bob", 10)); session.save(new Cat("Steve", 11)); HibernateQuery query = new HibernateQuery(session); ScrollableResults results = query.from(QCat.cat).scroll(ScrollMode.SCROLL_INSENSITIVE, QCat.cat); while (results.next()) { System.out.println(results.get(0)); }/* ww w . j a v a 2 s . c o m*/ results.close(); }
From source file:com.opensourcestrategies.financials.reports.FinancialReports.java
License:Open Source License
/** * <p>Look over invoice adjustments and transform them into into sales and tax invoice item facts. * Thus an adjustment amount is added into discount column of the fact table and this is only * currency column affected.</p>//ww w .j ava2 s .c o m * * @param session Hibernate session * @throws GenericEntityException */ public static void loadInvoiceAdjustments(Session session, Delegator delegator) throws GenericEntityException { Transaction tx = session.beginTransaction(); // retrieve data as scrollable result set. // this is join of InvoiceAdjustment and Invoice entities and each record has all required data // to create new fact row Query invAdjQry = session.createQuery( "select IA.invoiceAdjustmentId, IA.invoiceId, IA.amount, I.partyIdFrom, I.invoiceDate, I.currencyUomId from InvoiceAdjustment IA, Invoice I where IA.invoiceId = I.invoiceId and I.invoiceTypeId = 'SALES_INVOICE' and I.statusId not in ('INVOICE_IN_PROCESS', 'INVOICE_CANCELLED', 'INVOICE_VOIDED', 'INVOICE_WRITEOFF')"); ScrollableResults adjustments = invAdjQry.scroll(); // iterate over record set while (adjustments.next()) { // keep result fields in variables as a matter of convenience String invoiceId = adjustments.getString(1); String invoiceAdjustmentId = adjustments.getString(0); BigDecimal amount = adjustments.getBigDecimal(2); String organizationPartyId = adjustments.getString(3); Timestamp invoiceDate = (Timestamp) adjustments.get(4); String currencyUomId = adjustments.getString(5); // lookup date dimension DateFormat dayOfMonthFmt = new SimpleDateFormat("dd"); DateFormat monthOfYearFmt = new SimpleDateFormat("MM"); DateFormat yearNumberFmt = new SimpleDateFormat("yyyy"); String dayOfMonth = dayOfMonthFmt.format(invoiceDate); String monthOfYear = monthOfYearFmt.format(invoiceDate); String yearNumber = yearNumberFmt.format(invoiceDate); EntityCondition dateDimConditions = EntityCondition.makeCondition(EntityOperator.AND, EntityCondition.makeCondition("dayOfMonth", dayOfMonth), EntityCondition.makeCondition("monthOfYear", monthOfYear), EntityCondition.makeCondition("yearNumber", yearNumber)); Long dateDimId = UtilEtl.lookupDimension("DateDim", "dateDimId", dateDimConditions, delegator); // lookup currency dimension Long currencyDimId = UtilEtl.lookupDimension("CurrencyDim", "currencyDimId", EntityCondition.makeCondition("uomId", currencyUomId), delegator); // lookup organization dimension Long organizationDimId = UtilEtl.lookupDimension("OrganizationDim", "organizationDimId", EntityCondition.makeCondition("organizationPartyId", organizationPartyId), delegator); // creates rows for both fact tables TaxInvoiceItemFact taxFact = new TaxInvoiceItemFact(); taxFact.setDateDimId(dateDimId); taxFact.setStoreDimId(0L); taxFact.setTaxAuthorityDimId(0L); taxFact.setCurrencyDimId(currencyDimId); taxFact.setOrganizationDimId(organizationDimId); taxFact.setInvoiceId(invoiceId); taxFact.setInvoiceAdjustmentId(invoiceAdjustmentId); taxFact.setGrossAmount(BigDecimal.ZERO); taxFact.setDiscounts(amount); taxFact.setRefunds(BigDecimal.ZERO); taxFact.setNetAmount(BigDecimal.ZERO); taxFact.setTaxable(BigDecimal.ZERO); taxFact.setTaxDue(BigDecimal.ZERO); session.save(taxFact); SalesInvoiceItemFact salesFact = new SalesInvoiceItemFact(); salesFact.setDateDimId(dateDimId); salesFact.setStoreDimId(0L); salesFact.setCurrencyDimId(currencyDimId); salesFact.setOrganizationDimId(organizationDimId); salesFact.setInvoiceId(invoiceId); salesFact.setInvoiceAdjustmentId(invoiceAdjustmentId); salesFact.setGrossAmount(BigDecimal.ZERO); salesFact.setDiscounts(amount); salesFact.setRefunds(BigDecimal.ZERO); salesFact.setNetAmount(BigDecimal.ZERO); session.save(salesFact); } adjustments.close(); tx.commit(); // persist result, don't move this statement upper }
From source file:com.querydsl.jpa.IntegrationBase.java
License:Apache License
@Test public void Scroll() { session.save(new Cat("Bob", 10)); session.save(new Cat("Steve", 11)); QCat cat = QCat.cat;//from w w w . j ava2s . c o m HibernateQuery<?> query = new HibernateQuery<Void>(session); ScrollableResults results = query.from(cat).select(cat).scroll(ScrollMode.SCROLL_INSENSITIVE); while (results.next()) { assertNotNull(results.get(0)); } results.close(); }
From source file:com.reignite.query.StructuredQuery.java
License:Open Source License
private void join(List<Object> rows) { for (Join join : joins) { Set<Object> ids = new HashSet<Object>(); for (Object obj : rows) { if (obj instanceof Map) { ids.add(((Map<?, ?>) obj).get(join.getJoinId())); }// ww w . jav a 2 s.co m } // prepare the join by setting the order and adding an "in" // clause join.prepare(ids); // if ids is size 1 then we are either doing a per row join or there is only 1 result to join to int firstRow = ids.size() == 1 ? join.getStartIndex() : 0; ScrollableResults scroll = join.getCriteria().scroll(ScrollMode.FORWARD_ONLY); if (scroll.setRowNumber(firstRow)) { do { Object[] row = scroll.get(); mergeResult(rows, row, join); } while (scroll.next()); } scroll.close(); } }
From source file:com.reignite.query.StructuredQuery.java
License:Open Source License
private int runQuery(Criteria criteria, QueryResult result, int maxResults) { ScrollableResults scroll = criteria.scroll(ScrollMode.FORWARD_ONLY); int count = 0; if (scroll.setRowNumber(startIndex)) { while (count < maxResults) { Object[] row = scroll.get(); count = fillResult(result, row) ? count += 1 : count; if (!scroll.next()) { break; }//from www.j a v a 2 s . c o m } } int totalResultCount = 0; if (scroll.last()) { totalResultCount = scroll.getRowNumber() + 1; } result.setTotalResults(totalResultCount); scroll.close(); return count; }
From source file:com.sapienter.jbilling.server.process.BillingProcessSessionBean.java
License:Open Source License
@Transactional(propagation = Propagation.REQUIRES_NEW) public void processEntity(Integer entityId, Date billingDate, Integer periodType, Integer periodValue, boolean isReview) throws SessionInternalError { if (entityId == null || billingDate == null) { throw new SessionInternalError("entityId and billingDate can't be null"); }/*ww w . j a va2 s. com*/ try { ConfigurationBL conf = new ConfigurationBL(entityId); IBillingProcessSessionBean local = (IBillingProcessSessionBean) Context .getBean(Context.Name.BILLING_PROCESS_SESSION); Integer billingProcessId = local.createProcessRecord(entityId, billingDate, periodType, periodValue, isReview, conf.getEntity().getRetries()); BillingProcessRunBL billingProcessRunBL = new BillingProcessRunBL(); billingProcessRunBL.setProcess(billingProcessId); // TODO: all the customer's id in memory is not a good idea. 1M customers would be 4MB of memory List<Integer> successfullUsers = billingProcessRunBL.findSuccessfullUsers(); // start processing users of this entity int totalInvoices = 0; boolean onlyRecurring; // find out parameters from the configuration onlyRecurring = conf.getEntity().getOnlyRecurring() == 1; LOG.debug("**** ENTITY " + entityId + " PROCESSING USERS"); //Load the pluggable task for filtering the users PluggableTaskManager taskManager = new PluggableTaskManager(entityId, Constants.PLUGGABLE_TASK_BILL_PROCESS_FILTER); IBillingProcessFilterTask task = (IBillingProcessFilterTask) taskManager.getNextClass(); // If one was not configured just use the basic task by default if (task == null) { task = new BasicBillingProcessFilterTask(); } BillingProcessDAS bpDas = new BillingProcessDAS(); int usersFailed = 0; ScrollableResults userCursor = task.findUsersToProcess(entityId, billingDate); if (userCursor != null) { int count = 0; while (userCursor.next()) { Integer userId = (Integer) userCursor.get(0); if (successfullUsers.contains(userId)) { // TODO: change this by a query to the DB LOG.debug("User #" + userId + " was successfully processed during previous run. Skipping."); continue; } Integer result[] = null; try { result = local.processUser(billingProcessId, userId, isReview, onlyRecurring); } catch (Throwable ex) { LOG.error("Exception was caught when processing User #" + userId + ". Continue process skipping user .", ex); local.addProcessRunUser(billingProcessId, userId, ProcessRunUserDTO.STATUS_FAILED); } if (result != null) { LOG.debug("User " + userId + " done invoice generation."); if (!isReview) { for (int f = 0; f < result.length; f++) { local.emailAndPayment(entityId, result[f], billingProcessId, conf.getEntity().getAutoPayment().intValue() == 1); } LOG.debug("User " + userId + " done email & payment."); } totalInvoices += result.length; local.addProcessRunUser(billingProcessId, userId, ProcessRunUserDTO.STATUS_SUCCEEDED); } else { LOG.debug("User " + userId + " NOT done"); local.addProcessRunUser(billingProcessId, userId, ProcessRunUserDTO.STATUS_FAILED); ++usersFailed; } // make sure the memory doesn't get flooded if (++count % Constants.HIBERNATE_BATCH_SIZE == 0) { bpDas.reset(); } } userCursor.close(); // done with the cursor, needs manual closing } // restore the configuration in the session, the reset removed it conf.set(entityId); if (usersFailed == 0) { // only if all got well processed // if some of the invoices were paper invoices, a new file with all // of them has to be generated try { BillingProcessBL process = new BillingProcessBL(billingProcessId); PaperInvoiceBatchDTO batch = process.getEntity().getPaperInvoiceBatch(); if (totalInvoices > 0 && batch != null) { PaperInvoiceBatchBL batchBl = new PaperInvoiceBatchBL(batch); batchBl.compileInvoiceFilesForProcess(entityId); // send the file as an attachment batchBl.sendEmail(); } } catch (Exception e) { LOG.error("Error generetaing batch file", e); } // now update the billing proces record } if (usersFailed == 0) { Integer processRunId = local.updateProcessRunFinished(billingProcessId, Constants.PROCESS_RUN_STATUS_SUCCESS); if (!isReview) { // the payment processing is happening in parallel // this event marks the end of it EndProcessPaymentEvent event = new EndProcessPaymentEvent(processRunId, entityId); EventManager.process(event); // and finally the next run date in the config GregorianCalendar cal = new GregorianCalendar(); cal.setTime(billingDate); cal.add(MapPeriodToCalendar.map(periodType), periodValue.intValue()); conf.getEntity().setNextRunDate(cal.getTime()); LOG.debug("Updated run date to " + cal.getTime()); } } else { local.updateProcessRunFinished(billingProcessId, Constants.PROCESS_RUN_STATUS_FAILED); billingProcessRunBL.notifyProcessRunFailure(entityId, usersFailed); // TODO: check, if updating totals needed // TODO: in the case of errors during users processing BillingProcessRunBL runBL = new BillingProcessRunBL(); runBL.setProcess(billingProcessId); // update the totals runBL.updateTotals(billingProcessId); } LOG.debug("**** ENTITY " + entityId + " DONE. Failed users = " + usersFailed); // TODO: review that this is not needed: EventManager.process(generatedEvent); } catch (Exception e) { // no need to specify a rollback, an error in any of the // updates would not require the rest to be rolled back. // Actually, it's better to keep as far as it went. LOG.error("Error processing entity " + entityId, e); } }
From source file:com.wci.umls.server.jpa.algo.RrfLoaderAlgorithm.java
License:Open Source License
/** * Load MRCONSO.RRF. This is responsible for loading {@link Atom}s and * {@link AtomClass}es.//ww w. j a v a 2s . c om * * @throws Exception the exception */ private void loadMrconso() throws Exception { logInfo(" Load MRCONSO"); logInfo(" Insert atoms and concepts "); // Set up maps String line = null; int objectCt = 0; final PushBackReader reader = readers.getReader(RrfReaders.Keys.MRCONSO); final String fields[] = new String[18]; String prevCui = null; Concept cui = null; while ((line = reader.readLine()) != null) { line = line.replace("\r", ""); FieldedStringTokenizer.split(line, "|", 18, fields); // Skip non-matching in single mode if (singleMode && !fields[11].equals(getTerminology())) { continue; } // Field Description // 0 CUI // 1 LAT // 2 TS // 3 LUI // 4 STT // 5 SUI // 6 ISPREF // 7 AUI // 8 SAUI // 9 SCUI // 10 SDUI // 11 SAB // 12 TTY // 13 CODE // 14 STR // 15 SRL // 16 SUPPRESS // 17 CVF // // e.g. // C0000005|ENG|P|L0000005|PF|S0007492|Y|A7755565||M0019694|D012711|MSH|PEN|D012711|(131)I-Macroaggregated // Albumin|0|N|256| // set the root terminology language loadedRootTerminologies.get(fields[11]).setLanguage(fields[1]); final Atom atom = new AtomJpa(); atom.setLanguage(fields[1]); atom.setTimestamp(releaseVersionDate); atom.setLastModified(releaseVersionDate); atom.setLastModifiedBy(loader); atom.setObsolete(fields[16].equals("O")); atom.setSuppressible(!fields[16].equals("N")); atom.setPublished(true); atom.setPublishable(true); atom.setName(fields[14]); atom.setTerminology(fields[11]); if (loadedTerminologies.get(fields[11]) == null) { throw new Exception("Atom references terminology that does not exist: " + fields[11]); } atom.setVersion(loadedTerminologies.get(fields[11]).getVersion()); // skip in single mode if (!singleMode) { atom.putAlternateTerminologyId(getTerminology(), fields[7]); } atom.setTerminologyId(fields[8]); atom.setTermType(fields[12]); atom.setWorkflowStatus(published); atom.setCodeId(fields[13]); atom.setDescriptorId(fields[10]); atom.setConceptId(fields[9]); atom.setStringClassId(fields[5]); atom.setLexicalClassId(fields[3]); atom.setCodeId(fields[13]); // Handle root terminology short name, hierarchical name, and sy names if (fields[11].equals("SRC") && fields[12].equals("SSN")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { t.getRootTerminology().setShortName(fields[14]); } } if (fields[11].equals("SRC") && fields[12].equals("RHT")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { t.getRootTerminology().setHierarchicalName(fields[14]); } } if (fields[11].equals("SRC") && fields[12].equals("RPT")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { t.getRootTerminology().setPreferredName(fields[14]); } } if (fields[11].equals("SRC") && fields[12].equals("RSY") && !fields[14].equals("")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { List<String> syNames = t.getRootTerminology().getSynonymousNames(); syNames.add(fields[14]); } } // Handle terminology sy names if (fields[11].equals("SRC") && fields[12].equals("VSY") && !fields[14].equals("")) { final Terminology t = loadedTerminologies.get(fields[13].substring(2)); if (t == null || t.getRootTerminology() == null) { logError(" Null root " + line); } else { List<String> syNames = t.getSynonymousNames(); syNames.add(fields[14]); } } // Determine organizing class type for terminology if (!atom.getDescriptorId().equals("")) { termIdTypeMap.put(atom.getTerminology(), IdType.DESCRIPTOR); } else if (!atom.getConceptId().equals("")) { termIdTypeMap.put(atom.getTerminology(), IdType.CONCEPT); } // OTHERWISE it remains "CODE" // skip in single mode if (!singleMode) { atom.putConceptTerminologyId(getTerminology(), fields[0]); } // Add atoms and commit periodically addAtom(atom); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); atomIdMap.put(fields[7], atom.getId()); atomTerminologyMap.put(fields[7], atom.getTerminology().intern()); atomConceptIdMap.put(fields[7], atom.getConceptId().length() == 0 ? "".intern() : atom.getConceptId()); atomCodeIdMap.put(fields[7], atom.getCodeId().length() == 0 ? "".intern() : atom.getCodeId()); atomDescriptorIdMap.put(fields[7], atom.getDescriptorId().length() == 0 ? "".intern() : atom.getDescriptorId()); // CUI - skip in single mode if (!singleMode) { // Add concept if (prevCui == null || !fields[0].equals(prevCui)) { if (prevCui != null) { cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } cui = new ConceptJpa(); cui.setTimestamp(releaseVersionDate); cui.setLastModified(releaseVersionDate); cui.setLastModifiedBy(loader); cui.setPublished(true); cui.setPublishable(true); cui.setTerminology(getTerminology()); cui.setTerminologyId(fields[0]); cui.setVersion(getVersion()); cui.setWorkflowStatus(published); } cui.getAtoms().add(atom); prevCui = fields[0]; } // Handle Subset // C3539934|ENG|S|L11195730|PF|S13913746|N|A23460885||900000000000538005||SNOMEDCT_US|SB|900000000000538005|Description // format|9|N|256| if (fields[12].equals("SB")) { // Have to handle the type later, when we get to attributes final AtomSubset atomSubset = new AtomSubsetJpa(); setSubsetFields(atomSubset, fields); cuiAuiAtomSubsetMap.put(fields[0] + fields[7], atomSubset); idTerminologyAtomSubsetMap.put(atomSubset.getTerminologyId() + atomSubset.getTerminology(), atomSubset); final ConceptSubset conceptSubset = new ConceptSubsetJpa(); setSubsetFields(conceptSubset, fields); cuiAuiConceptSubsetMap.put(fields[0] + fields[7], conceptSubset); idTerminologyConceptSubsetMap.put(conceptSubset.getTerminologyId() + conceptSubset.getTerminology(), conceptSubset); } } // Add last concept if (prevCui != null) { cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } // Set the terminology organizing class types for (final Terminology terminology : loadedTerminologies.values()) { final IdType idType = termIdTypeMap.get(terminology.getTerminology()); if (idType != null && idType != IdType.CODE) { terminology.setOrganizingClassType(idType); updateTerminology(terminology); } } logInfo(" Add concepts"); objectCt = 0; // NOTE: Hibernate-specific to support iterating // Restrict to timestamp used for THESE atoms, in case multiple RRF // files are loaded final Session session = manager.unwrap(Session.class); org.hibernate.Query hQuery = session.createQuery("select a from AtomJpa a " + "where conceptId is not null " + "and conceptId != '' and timestamp = :timestamp " + "order by terminology, conceptId"); hQuery.setParameter("timestamp", releaseVersionDate); hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false); ScrollableResults results = hQuery.scroll(ScrollMode.FORWARD_ONLY); prevCui = null; cui = null; while (results.next()) { final Atom atom = (Atom) results.get()[0]; if (atom.getConceptId() == null || atom.getConceptId().isEmpty()) { continue; } if (prevCui == null || !prevCui.equals(atom.getConceptId())) { if (cui != null) { // compute preferred name cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } cui = new ConceptJpa(); cui.setTimestamp(releaseVersionDate); cui.setLastModified(releaseVersionDate); cui.setLastModifiedBy(loader); cui.setPublished(true); cui.setPublishable(true); cui.setTerminology(atom.getTerminology()); cui.setTerminologyId(atom.getConceptId()); cui.setVersion(atom.getVersion()); cui.setWorkflowStatus(published); } cui.getAtoms().add(atom); prevCui = atom.getConceptId(); } if (cui != null) { cui.setName(getComputedPreferredName(cui, list)); addConcept(cui); conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId()); commitClearBegin(); } results.close(); logInfo(" Add descriptors"); objectCt = 0; // NOTE: Hibernate-specific to support iterating hQuery = session.createQuery("select a from AtomJpa a " + "where descriptorId is not null " + "and descriptorId != '' and timestamp = :timestamp " + "order by terminology, descriptorId"); hQuery.setParameter("timestamp", releaseVersionDate); hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false); results = hQuery.scroll(ScrollMode.FORWARD_ONLY); String prevDui = null; Descriptor dui = null; while (results.next()) { final Atom atom = (Atom) results.get()[0]; if (atom.getDescriptorId() == null || atom.getDescriptorId().isEmpty()) { continue; } if (prevDui == null || !prevDui.equals(atom.getDescriptorId())) { if (dui != null) { // compute preferred name dui.setName(getComputedPreferredName(dui, list)); addDescriptor(dui); descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId()); logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); } dui = new DescriptorJpa(); dui.setTimestamp(releaseVersionDate); dui.setLastModified(releaseVersionDate); dui.setLastModifiedBy(loader); dui.setPublished(true); dui.setPublishable(true); dui.setTerminology(atom.getTerminology()); dui.setTerminologyId(atom.getDescriptorId()); dui.setVersion(atom.getVersion()); dui.setWorkflowStatus(published); } dui.getAtoms().add(atom); prevDui = atom.getDescriptorId(); } if (dui != null) { dui.setName(getComputedPreferredName(dui, list)); addDescriptor(dui); descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId()); commitClearBegin(); } results.close(); // Use flag to decide whether to handle codes if (codesFlag) { logInfo(" Add codes"); objectCt = 0; // NOTE: Hibernate-specific to support iterating // Skip NOCODE // TODO: there is a LNC exception here -for now hQuery = session.createQuery("select a from AtomJpa a where codeId is not null " + "and codeId != '' and timestamp = :timestamp " + "and (terminology = 'LNC' OR (codeId != conceptId and codeId != descriptorId)) " + "and timestamp = :timestamp " + "order by terminology, codeId"); hQuery.setParameter("timestamp", releaseVersionDate); hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false); results = hQuery.scroll(ScrollMode.FORWARD_ONLY); String prevCode = null; Code code = null; while (results.next()) { final Atom atom = (Atom) results.get()[0]; if (atom.getCodeId() == null || atom.getCodeId().isEmpty() || atom.getCodeId().equals("NOCODE")) { continue; } if (prevCode == null || !prevCode.equals(atom.getCodeId())) { if (code != null) { // compute preferred name code.setName(getComputedPreferredName(code, list)); addCode(code); codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId()); logAndCommit(++objectCt, RootService.logCt, 1000); } code = new CodeJpa(); code.setTimestamp(releaseVersionDate); code.setLastModified(releaseVersionDate); code.setLastModifiedBy(loader); code.setPublished(true); code.setPublishable(true); code.setTerminology(atom.getTerminology()); code.setTerminologyId(atom.getCodeId()); code.setVersion(atom.getVersion()); code.setWorkflowStatus(published); } code.getAtoms().add(atom); prevCode = atom.getCodeId(); } if (code != null) { code.setName(getComputedPreferredName(code, list)); addCode(code); codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId()); commitClearBegin(); } results.close(); } // NOTE: for efficiency and lack of use cases, we've temporarily // suspended the loading of LexicalClass and StringClass objects // // NOTE: atoms are not connected to lexical classes as there are // // currently no known uses for this. // logInfo(" Add lexical classes"); // objectCt = 0; // query = NEED TO FIX THIS // manager // .createQuery("select a.id from AtomJpa a order by lexicalClassId"); // String prevLui = null; // LexicalClass lui = null; // LexicalClass atoms = null; // for (final Long id : (List<Long>) query.getResultList()) { // final Atom atom = getAtom(id); // if (atom.getLexicalClassId() == null // || atom.getLexicalClassId().isEmpty()) { // continue; // } // if (prevLui == null || !prevLui.equals(atom.getLexicalClassId())) { // if (lui != null) { // // compute preferred name // lui.setName(getComputedPreferredName(atoms)); // addLexicalClass(lui); // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); // } // // just used to hold atoms, enver saved. // atoms = new LexicalClassJpa(); // lui = new LexicalClassJpa(); // lui.setTimestamp(releaseVersionDate); // lui.setLastModified(releaseVersionDate); // lui.setLastModifiedBy(loader); // lui.setPublished(true); // lui.setPublishable(true); // lui.setTerminology(terminology); // lui.setTerminologyId(atom.getLexicalClassId()); // lui.setVersion(version); // lui.setWorkflowStatus(published); // lui.setNormalizedString(getNormalizedString(atom.getName())); // } // atoms.addAtom(atom); // prevLui = atom.getLexicalClassId(); // } // if (lui != null) { // lui.setName(getComputedPreferredName(atoms)); // commitClearBegin(); // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); // } // // // NOTE: currently atoms are not loaded for string classes // // We simply load the objects themselves ( for SUI maintenance) // // There are no known use cases for having the atoms here. // logInfo(" Add string classes"); // objectCt = 0; // query = NEED TO FIX THIS // manager // .createQuery("select distinct stringClassId, name from AtomJpa a"); // for (final Object[] suiFields : (List<Object[]>) query.getResultList()) { // final StringClass sui = new StringClassJpa(); // sui.setTimestamp(releaseVersionDate); // sui.setLastModified(releaseVersionDate); // sui.setLastModifiedBy(loader); // sui.setPublished(true); // sui.setPublishable(true); // sui.setTerminology(terminology); // sui.setTerminologyId(suiFields[0].toString()); // sui.setVersion(version); // sui.setWorkflowStatus(published); // sui.setName(suiFields[1].toString()); // addStringClass(sui); // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt); // } // commit commitClearBegin(); logInfo(" Update terminologies for languages and names."); // Update all root terminologies now that we know languages and names for (final RootTerminology root : loadedRootTerminologies.values()) { updateRootTerminology(root); } // Update all root terminologies now that we know languages and names for (final Terminology terminology : loadedTerminologies.values()) { updateTerminology(terminology); } commitClearBegin(); }
From source file:de.codesourcery.eve.skills.util.DBConverter.java
License:Apache License
protected void export(Class<?> entity) { System.out.println("\n============\nExporting " + entity.getName() + "\n============"); // load data//from www.jav a 2 s .co m System.out.print("Opening MySQL session ..."); final Session mysqlSession = mysql.openSession(); System.out.print("created."); // mysqlSession.setFlushMode( FlushMode.MANUAL ); Transaction mysqlTransaction = mysqlSession.beginTransaction(); final Criteria criteria = mysqlSession.createCriteria(entity); // replicate data System.out.print("Opening HSQL session ..."); final Session hsqlSession = hsql.openSession(); System.out.println("created."); // mysqlSession.setFlushMode( FlushMode.MANUAL ); final Transaction hsqlTransaction = hsqlSession.beginTransaction(); final ScrollableResults data = criteria.scroll(); int count = 0; int dotCount = 0; try { while (data.next()) { Object loaded = data.get(0); // if ( entity == MarketGroup.class ) { // MarketGroup group = (MarketGroup) loaded; // System.out.println( group.getId() +" -> "+group.getParent() ); // } hsqlSession.replicate(loaded, ReplicationMode.IGNORE); if ((++count % 1000) == 0) { // make sure to adjust <prop key="hibernate.jdbc.batch_size">1000</prop> in config !! hsqlSession.flush(); hsqlSession.clear(); mysqlSession.flush(); mysqlSession.clear(); System.out.print("."); dotCount++; if (dotCount == 60) { System.out.println(); dotCount = 0; } } } } finally { data.close(); System.out.println("\nExported " + count + " entries"); } if (mysqlTransaction.isActive()) { mysqlTransaction.commit(); } if (hsqlTransaction.isActive()) { hsqlTransaction.commit(); } hsqlSession.flush(); mysqlSession.flush(); mysqlSession.close(); hsqlSession.close(); }
From source file:de.iteratec.iteraplan.persistence.dao.SearchDAOImpl.java
License:Open Source License
/** {@inheritDoc} */ public void createIndexes(Set<Class<?>> classList) { Session session = this.getSession(); FullTextSession fullTextSession = getFullTextSession(); session.setFlushMode(FlushMode.MANUAL); // Disable flush operations session.setCacheMode(CacheMode.IGNORE); // Disable second-level cache operations int batchSize = 100; // data is read from the database for (Class<?> bbClass : classList) { ScrollableResults results = session.createCriteria(bbClass).setFetchSize(batchSize) .scroll(ScrollMode.SCROLL_INSENSITIVE); LOGGER.info("Indexing " + bbClass.getSimpleName()); int index = 0; while (results.next()) { index++;/*w w w . j ava 2s . c om*/ // entities are indexed fullTextSession.index(results.get(0)); if (index % batchSize == 0) { fullTextSession.flushToIndexes(); fullTextSession.clear(); } } results.close(); LOGGER.info("Index for " + bbClass.getSimpleName() + " was created!"); } }
From source file:de.tudarmstadt.ukp.csniper.webapp.evaluation.EvaluationRepository.java
License:Apache License
@Transactional public int[][] listCachedParsesPages(String aCollectionId, int aPageSize) { List<int[]> pages = new ArrayList<int[]>(); ScrollableResults results = null; try {//from ww w .jav a 2 s . c om String queryString = "SELECT id FROM CachedParse WHERE collectionId = :collectionId"; org.hibernate.Query query = ((HibernateQuery) entityManager.createQuery(queryString)) .getHibernateQuery(); query.setParameter("collectionId", aCollectionId); results = query.scroll(); results.beforeFirst(); int row = 0; int[] curPage = new int[] { -1, -1 }; boolean hasNext = results.next(); while (hasNext) { int id = results.getLong(0).intValue(); // Record start of page if ((row % aPageSize) == 0) { curPage[0] = id; } // Step ahead hasNext = results.next(); row++; // Record end of page when end of page or end of results is reached if (((row % aPageSize) == (aPageSize - 1)) || !hasNext) { curPage[1] = id; pages.add(curPage); curPage = new int[] { -1, -1 }; } } } finally { if (results != null) { results.close(); } } return pages.toArray(new int[pages.size()][2]); }