List of usage examples for java.sql Timestamp getTime
public long getTime()
From source file:com.gtwm.pb.model.manageData.DataManagement.java
public SortedSet<CommentInfo> getComments(BaseField field, int rowId) throws SQLException, CantDoThatException { SortedSet<CommentInfo> comments = new TreeSet<CommentInfo>(); Boolean hasComments = field.hasComments(); if (hasComments != null) { if (hasComments.equals(false)) { return comments; }/*from w ww . j a v a 2s . c om*/ } String sqlCode = "SELECT created, author, text FROM dbint_comments WHERE internalfieldname=? AND rowid=? order by created desc limit 10"; Connection conn = null; try { conn = this.dataSource.getConnection(); conn.setAutoCommit(false); PreparedStatement statement = conn.prepareStatement(sqlCode); String internalFieldName = field.getInternalFieldName(); statement.setString(1, internalFieldName); statement.setInt(2, rowId); ResultSet results = statement.executeQuery(); while (results.next()) { Timestamp createdTimestamp = results.getTimestamp(1); Calendar created = Calendar.getInstance(); created.setTimeInMillis(createdTimestamp.getTime()); String author = results.getString(2); String comment = results.getString(3); comments.add(new Comment(internalFieldName, rowId, author, created, comment)); } results.close(); statement.close(); if (comments.size() > 0) { field.setHasComments(true); } else if (hasComments == null) { // We've seen there are no comments for this particular record // but we don't know if there are any for the field in other // records. Check. sqlCode = "SELECT count(*) from dbint_comments WHERE internalfieldname=?"; statement = conn.prepareStatement(sqlCode); statement.setString(1, internalFieldName); results = statement.executeQuery(); if (results.next()) { int numComments = results.getInt(1); if (numComments > 0) { field.setHasComments(true); } else { // Another check in case another thread e.g. running // addComment has set this to true. // We don't want to overwrite that // TODO: Really, this should be atomic but it takes such // a small amount of time compared to the SQL it's // probably fine if (field.hasComments() == null) { field.setHasComments(false); } } } else { logger.error("Unable to see if comments exist with query " + statement); } results.close(); statement.close(); } } finally { if (conn != null) { conn.close(); } } return comments; }
From source file:com.lp.server.personal.ejbfac.ZutrittscontrollerFacBean.java
public PersonalzutrittsklasseDto[] personalzutrittsklassenFindByTGueltigab(Timestamp tDatum, TheClientDto theClientDto) throws EJBExceptionLP { if (tDatum == null) { throw new EJBExceptionLP(EJBExceptionLP.FEHLER_FELD_DARF_NICHT_NULL_SEIN, new Exception("tDatum == null")); }/*from w ww. ja v a2 s . c o m*/ tDatum = Helper.cutTimestamp(tDatum); String sQuery = "select distinct personalzutrittsklasse.personal_i_id from FLRPersonalzutrittsklasse personalzutrittsklasse WHERE personalzutrittsklasse.t_gueltigab<='" + Helper.formatDateWithSlashes(new java.sql.Date(tDatum.getTime())) + "' AND personalzutrittsklasse.flrpersonal.c_ausweis is not null"; SessionFactory factory = FLRSessionFactory.getFactory(); Session session = factory.openSession(); org.hibernate.Query inventurliste = session.createQuery(sQuery); List<?> resultList = inventurliste.list(); Iterator<?> resultListIterator = resultList.iterator(); ArrayList<PersonalzutrittsklasseDto> a = new ArrayList<PersonalzutrittsklasseDto>(); int row = 0; while (resultListIterator.hasNext()) { Integer o = (Integer) resultListIterator.next(); try { if (!getPersonalFac().istPersonalAusgetreten(o, tDatum, theClientDto)) { PersonalzutrittsklasseDto dto = new PersonalzutrittsklasseDto(); // try { Query query = em.createNamedQuery("PersonalzutrittsklassefindByPersonalIIdTGueltigab"); query.setParameter(1, o); query.setParameter(2, tDatum); Collection<?> cl = query.getResultList(); // if (cl.isEmpty()) { // throw new // EJBExceptionLP(EJBExceptionLP.FEHLER_BEI_FINDBYPRIMARYKEY // , // null); // } PersonalzutrittsklasseDto[] dtoTemp = assemblePersonalzutrittsklasseDtos(cl); dto = dtoTemp[0]; // } // catch (FinderException ex1) { // throw new // EJBExceptionLP(EJBExceptionLP.FEHLER_BEI_FINDBYPRIMARYKEY // , // ex1); // } /* * dto.setPersonalDto(getPersonalFac(). * personalFindByPrimaryKey(o, cNrUserI)); * dto.setZutrittsklasseDto( * zutrittsklasseFindByPrimaryKey(dto. * getZutrittsklasseIId())); */ a.add(dto); } } catch (RemoteException ex) { throwEJBExceptionLPRespectOld(ex); } row++; } PersonalzutrittsklasseDto[] returnArray = new PersonalzutrittsklasseDto[a.size()]; return (PersonalzutrittsklasseDto[]) a.toArray(returnArray); }
From source file:com.lp.server.personal.ejbfac.ZutrittscontrollerFacBean.java
public void pruefeZutrittsobjektverwendung(Integer zutrittsklasseIId, TheClientDto theClientDto) { if (zutrittsklasseIId == null) { throw new EJBExceptionLP(EJBExceptionLP.FEHLER_PKFIELD_IS_NULL, new Exception("zutrittsobjektIId == null")); }//from w w w .j a va 2 s . co m java.sql.Timestamp tHeute = new Timestamp(System.currentTimeMillis()); tHeute = Helper.cutTimestamp(tHeute); SessionFactory factory = FLRSessionFactory.getFactory(); Session session = factory.openSession(); String sQuery = "select count(distinct personalzutrittsklasse.personal_i_id) from FLRPersonalzutrittsklasse personalzutrittsklasse WHERE personalzutrittsklasse.flrpersonal.mandant_c_nr='" + theClientDto.getMandant() + "' AND personalzutrittsklasse.t_gueltigab <='" + Helper.formatDateWithSlashes(new java.sql.Date(tHeute.getTime())) + "'"; org.hibernate.Query hqlquery = session.createQuery(sQuery); List<?> resultList = hqlquery.list(); long iAnzahlKlasseVerwendet = (Long) resultList.iterator().next(); // try { Query query = em.createNamedQuery("ZutrittsobjektverwendungfindByMandantCNr"); query.setParameter(1, theClientDto.getMandant()); Collection<?> cl = query.getResultList(); // if (! cl.isEmpty()) { ZutrittsobjektverwendungDto[] zutrittsobjektverwendungDtos = assembleZutrittsobjektverwendungDtos( query.getResultList()); if (zutrittsobjektverwendungDtos != null) { for (int i = 0; i < zutrittsobjektverwendungDtos.length; i++) { ZutrittsobjektverwendungDto zutrittsobjektverwendungDto = zutrittsobjektverwendungDtos[i]; int iAnzahlDarfVerwendetwerden = zutrittsobjektverwendungDto.getIAnzahlverwendung(); try { query = em.createNamedQuery("ZutrittsklasseobjektfindByZutrittsobjektIIdZutrittsklasseIId"); query.setParameter(1, zutrittsobjektverwendungDto.getZutrittsobjektIId()); query.setParameter(2, zutrittsklasseIId); Zutrittsklasseobjekt zutrittsklasseobjekt = (Zutrittsklasseobjekt) query.getSingleResult(); if (zutrittsklasseobjekt != null) { ZutrittsklasseobjektDto zutrittsklasseobjektDto = assembleZutrittsklasseobjektDto( zutrittsklasseobjekt); if (zutrittsklasseobjektDto != null) { if (iAnzahlKlasseVerwendet >= iAnzahlDarfVerwendetwerden) { Zutrittsobjekt zutrittsobjekt = em.find(Zutrittsobjekt.class, zutrittsklasseobjektDto.getZutrittsobjektIId()); if (zutrittsobjekt != null) { throw new EJBExceptionLP( EJBExceptionLP.FEHLER_ZUTRITTSOBJEKT_VERWENDUNGSUEBERSCHREITUNG, new Exception(zutrittsobjekt.getCBez())); } } } } } catch (javax.persistence.NoResultException ex1) { // nix } } } // } // catch (FinderException ex) { // //nix da // } }
From source file:com.gtwm.pb.model.manageData.DataManagement.java
/** * Fetch direct from the database// w w w. jav a2 s .c om */ private ChartDataInfo fetchChartData(ChartInfo chart, Map<BaseField, String> reportFilterValues) throws CantDoThatException, SQLException { Set<ChartAggregateInfo> aggregateFunctions = chart.getAggregateFunctions(); Set<ChartGroupingInfo> groupings = chart.getGroupings(); logger.debug("Chart groupings are " + groupings); List<ChartDataRowInfo> reportSummaryRows; reportSummaryRows = new LinkedList<ChartDataRowInfo>(); Connection conn = null; PreparedStatement statement = null; try { conn = this.dataSource.getConnection(); conn.setAutoCommit(false); // First, cache the set of display values for relation fields Map<ReportFieldInfo, Map<String, String>> displayLookups = new HashMap<ReportFieldInfo, Map<String, String>>(); for (ChartGroupingInfo grouping : groupings) { ReportFieldInfo groupingReportField = grouping.getGroupingReportField(); BaseField baseField = groupingReportField.getBaseField(); if (baseField instanceof RelationField) { String relatedKey = ((RelationField) baseField).getRelatedField().getInternalFieldName(); String relatedDisplay = ((RelationField) baseField).getDisplayField().getInternalFieldName(); String relatedSource = ((RelationField) baseField).getRelatedTable().getInternalTableName(); Map<String, String> displayLookup = getKeyToDisplayMapping(conn, relatedSource, relatedKey, relatedDisplay); displayLookups.put(groupingReportField, displayLookup); } } // Create some maps to store min. and max. values of each // aggregate column // These numbers can be used e.g. to scale values when charting // summary data Map<ChartAggregateInfo, Number> maxAggValues = new HashMap<ChartAggregateInfo, Number>(); Map<ChartAggregateInfo, Number> minAggValues = new HashMap<ChartAggregateInfo, Number>(); Map<ChartAggregateInfo, Number> grandTotals = new HashMap<ChartAggregateInfo, Number>(); // Also a map for working with in the loop Map<ReportFieldInfo, Date> previousDateValues = new HashMap<ReportFieldInfo, Date>(); Calendar calendar = Calendar.getInstance(); // Get database data BaseReportInfo report = chart.getReport(); ReportData.enableOptimisations(conn, report, true); statement = chart.getChartSqlPreparedStatement(conn, reportFilterValues, false); long startTime = System.currentTimeMillis(); ResultSet summaryResults = statement.executeQuery(); while (summaryResults.next()) { ChartDataRowInfo resultRow = new ChartDataRow(); int resultColumn = 0; for (ChartGroupingInfo grouping : groupings) { ReportFieldInfo groupingReportField = grouping.getGroupingReportField(); SummaryGroupingModifier groupingModifier = grouping.getGroupingModifier(); BaseField baseField = groupingReportField.getBaseField(); resultColumn++; String value = ""; DatabaseFieldType dbType = baseField.getDbType(); if (baseField instanceof RelationField) { value = summaryResults.getString(resultColumn); Map<String, String> displayLookup = displayLookups.get(groupingReportField); value = displayLookup.get(value); } else if (dbType.equals(DatabaseFieldType.TIMESTAMP)) { if (groupingModifier != null) { value = summaryResults.getString(resultColumn); } else { Date dbValue = summaryResults.getTimestamp(resultColumn); if (dbValue != null) { if (groupingReportField instanceof ReportCalcFieldInfo) { // See DateFieldDefn constructor for // format // explanation value = ((ReportCalcFieldInfo) groupingReportField).formatDate(dbValue); } else { DateField dateField = (DateField) baseField; value = (dateField.formatDate(dbValue)); if (Integer.valueOf(dateField.getDateResolution()) .equals(Calendar.DAY_OF_MONTH)) { Date previousDbValue = previousDateValues.get(groupingReportField); if (previousDbValue != null) { calendar.setTime(previousDbValue); int previousDayOfYear = calendar.get(Calendar.DAY_OF_YEAR); calendar.setTime(dbValue); int dayOfYear = calendar.get(Calendar.DAY_OF_YEAR); int difference = Math.abs(dayOfYear - previousDayOfYear); if (difference > 1) { value += " (" + (difference - 1) + " day gap)"; } } previousDateValues.put(groupingReportField, dbValue); } } } } } else if (dbType.equals(DatabaseFieldType.FLOAT)) { double floatValue = summaryResults.getDouble(resultColumn); if (baseField instanceof DecimalField) { value = ((DecimalField) baseField).formatFloat(floatValue); } else if (groupingReportField instanceof ReportCalcFieldInfo) { value = ((ReportCalcFieldInfo) groupingReportField).formatFloat(floatValue); } else { value = summaryResults.getString(resultColumn); } } else if (dbType.equals(DatabaseFieldType.BOOLEAN)) { if (summaryResults.getBoolean(resultColumn)) { value = "true"; } else { value = "false"; } } else { value = summaryResults.getString(resultColumn); } resultRow.addGroupingValue(grouping, value); } for (ChartAggregateInfo aggregateFunction : aggregateFunctions) { resultColumn++; DatabaseFieldType dbType = aggregateFunction.getReportField().getBaseField().getDbType(); Double value = null; // deal with aggregate results which are timestamps // rather than doubles if ((!aggregateFunction.getAggregateFunction().equals(AggregateFunction.COUNT)) && (dbType.equals(DatabaseFieldType.TIMESTAMP))) { java.sql.Timestamp timestampValue = summaryResults.getTimestamp(resultColumn); if (timestampValue != null) { Long longValue = timestampValue.getTime(); value = longValue.doubleValue(); } } else { value = summaryResults.getDouble(resultColumn); } if (value != null) { int precision = 1; ReportFieldInfo aggReportField = aggregateFunction.getReportField(); if (aggReportField instanceof ReportCalcFieldInfo) { DatabaseFieldType dbFieldType = ((ReportCalcFieldInfo) aggReportField).getDbType(); if (dbFieldType.equals(DatabaseFieldType.FLOAT)) { precision = ((ReportCalcFieldInfo) aggReportField).getDecimalPrecision(); } } else if (aggReportField.getBaseField() instanceof DecimalField) { precision = ((DecimalField) aggReportField.getBaseField()).getPrecision(); } Number currentGrandTotal = grandTotals.get(aggregateFunction); if (currentGrandTotal == null) { currentGrandTotal = new Double(0); } double currentGrandTotalDbl = currentGrandTotal.doubleValue() + value; grandTotals.put(aggregateFunction, Double.valueOf(currentGrandTotalDbl)); value = MathUtils.round(value, precision); resultRow.addAggregateValue(aggregateFunction, value); Number currentMin = minAggValues.get(aggregateFunction); Number currentMax = maxAggValues.get(aggregateFunction); if (currentMin == null) { minAggValues.put(aggregateFunction, value); } else if (value.doubleValue() < currentMin.doubleValue()) { minAggValues.put(aggregateFunction, value); } if (currentMax == null) { maxAggValues.put(aggregateFunction, value); } else if (value.doubleValue() > currentMax.doubleValue()) { maxAggValues.put(aggregateFunction, value); } } } reportSummaryRows.add(resultRow); } summaryResults.close(); statement.close(); ReportData.enableOptimisations(conn, report, false); float durationSecs = (System.currentTimeMillis() - startTime) / ((float) 1000); if (durationSecs > AppProperties.longSqlTime) { logger.debug("Long SELECT SQL execution time of " + durationSecs + " seconds for summary '" + chart + "', statement = " + statement); } return new ChartData(reportSummaryRows, minAggValues, maxAggValues, grandTotals); } catch (SQLException sqlex) { throw new SQLException( "Error getting report summary data " + chart + ": " + sqlex + ". SQL = " + statement); } finally { if (conn != null) { conn.close(); } } }
From source file:com.runwaysdk.system.metadata.ontology.PostgresOntolgoyDatabase.java
@Override public void copyTerm(Map<String, Object> parameters) { Term parent = (Term) this.getParameter(parameters, DatabaseAllPathsStrategy.PARENT_PARAMETER); Term child = (Term) this.getParameter(parameters, DatabaseAllPathsStrategy.CHILD_PARAMETER); MdBusiness allPaths = (MdBusiness) this.getParameter(parameters, DatabaseAllPathsStrategy.ALL_PATHS_PARAMETER); String tableName = allPaths.getTableName(); String id = getColumn(allPaths, MetadataInfo.ID); String siteMaster = getColumn(allPaths, MetadataInfo.SITE_MASTER); String createdBy = getColumn(allPaths, MetadataInfo.CREATED_BY); String key = getColumn(allPaths, MetadataInfo.KEY); String type = getColumn(allPaths, MetadataInfo.TYPE); String domain = getColumn(allPaths, MetadataInfo.DOMAIN); String lastUpdateDate = getColumn(allPaths, MetadataInfo.LAST_UPDATE_DATE); String sequence = getColumn(allPaths, MetadataInfo.SEQUENCE); String lockedBy = getColumn(allPaths, MetadataInfo.LOCKED_BY); String createDate = getColumn(allPaths, MetadataInfo.CREATE_DATE); String owner = getColumn(allPaths, MetadataInfo.OWNER); String lastUpdatedBy = getColumn(allPaths, MetadataInfo.LAST_UPDATED_BY); String parentTerm = getColumn(allPaths, DatabaseAllPathsStrategy.PARENT_TERM_ATTR); String childTerm = getColumn(allPaths, DatabaseAllPathsStrategy.CHILD_TERM_ATTR); String allPathsRootTypeId = this.getAllPathsTypeIdRoot(allPaths); String sequenceName = this.getSequenceName(allPaths); String createdById = new String(); SessionIF sessionIF = Session.getCurrentSession(); if (sessionIF != null) { createdById = sessionIF.getUser().getId(); } else {//from w w w . j a v a 2s .co m createdById = ServerConstants.SYSTEM_USER_ID; } // non-term values Timestamp transactionDate = new Timestamp(new Date().getTime()); String[] metadataColumns = new String[] { id, siteMaster, key, type, domain, lastUpdateDate, sequence, createdBy, lockedBy, createDate, owner, lastUpdatedBy, parentTerm, childTerm }; String insertColumns = StringUtils.join(metadataColumns, "," + NL); String childId = child.getId(); String parentId = parent.getId(); String identifierSQL = "MD5(nextval('" + sequenceName + "') || allpaths_parent." + parentTerm + " || allpaths_child." + childTerm + " ) || '" + allPathsRootTypeId + "'"; StringBuffer sql = new StringBuffer(); sql.append("INSERT INTO " + tableName + " (" + insertColumns + ") " + NL); sql.append(" SELECT " + NL); sql.append(" " + identifierSQL + " AS newId," + NL); sql.append(" '" + CommonProperties.getDomain() + "' AS " + siteMaster + "," + NL); sql.append(" " + identifierSQL + " AS newKey," + NL); sql.append(" '" + allPaths.definesType() + "' AS \"" + type + "\"," + NL); sql.append(" '' AS " + domain + "," + NL); sql.append(" ? AS " + lastUpdateDate + "," + NL); sql.append(" NEXTVAL('" + PostgreSQL.OBJECT_UPDATE_SEQUENCE + "') AS " + sequence + "," + NL); sql.append(" '" + createdById + "' AS " + createdBy + "," + NL); sql.append(" NULL AS " + lockedBy + "," + NL); sql.append(" ? AS " + createDate + "," + NL); sql.append(" '" + createdById + "' AS \"" + owner + "\"," + NL); sql.append(" '" + createdById + "' AS " + lastUpdatedBy + "," + NL); sql.append(" allpaths_parent." + parentTerm + " AS " + parentTerm + ", " + NL); sql.append(" allpaths_child." + childTerm + " AS " + childTerm + NL); sql.append(" FROM " + NL); // Fech all of the recursive children of the given child term, including // the child term itself. sql.append(" (SELECT " + childTerm + " " + NL); sql.append(" FROM " + tableName + " " + NL); sql.append(" WHERE " + parentTerm + " = '" + childId + "' ) AS allpaths_child, " + NL); // Fech all of the recursive parents of the given new parent term, // including the new parent term itself. sql.append(" (SELECT " + parentTerm + " " + NL); sql.append(" FROM " + tableName + " " + NL); sql.append(" WHERE " + childTerm + " = '" + parentId + "' " + NL + " ) AS allpaths_parent " + NL); // Since a term can have multiple parents, a path to one of the new // parent's parents may already exist sql.append(" WHERE allpaths_parent." + parentTerm + " NOT IN " + NL); sql.append(" (SELECT " + parentTerm + " " + NL); sql.append(" FROM " + tableName + " " + NL); sql.append(" WHERE " + parentTerm + " = allpaths_parent." + parentTerm + " " + NL); sql.append(" AND " + childTerm + " = allpaths_child." + childTerm + ") " + NL); Connection conn = Database.getConnection(); PreparedStatement prepared = null; try { prepared = conn.prepareStatement(sql.toString()); prepared.setTimestamp(1, new Timestamp(transactionDate.getTime())); prepared.setTimestamp(2, new Timestamp(transactionDate.getTime())); prepared.executeUpdate(); } catch (SQLException e) { throw new ProgrammingErrorException(e); } finally { if (prepared != null) { try { prepared.close(); } catch (SQLException e) { throw new ProgrammingErrorException(e); } } } }
From source file:org.kuali.kfs.module.cam.batch.service.impl.AssetDepreciationServiceImpl.java
/** * This method stores the depreciation transactions in the general pending entry table and creates a new documentHeader entry. * <p>/* ww w . j a va2 s . c om*/ * * @param trans SortedMap with the transactions * @return none */ protected void processGeneralLedgerPendingEntry(Integer fiscalYear, Integer fiscalMonth, List<String> documentNos, SortedMap<String, AssetDepreciationTransaction> trans) { LOG.debug( "populateExplicitGeneralLedgerPendingEntry(AccountingDocument, AccountingLine, GeneralLedgerPendingEntrySequenceHelper, GeneralLedgerPendingEntry) - start"); String financialSystemDocumentTypeCodeCode; try { String documentNumber = createNewDepreciationDocument(documentNos); financialSystemDocumentTypeCodeCode = CamsConstants.DocumentTypeName.ASSET_DEPRECIATION; LOG.debug(CamsConstants.Depreciation.DEPRECIATION_BATCH + "Depreciation Document Type Code: " + financialSystemDocumentTypeCodeCode); Timestamp transactionTimestamp = new Timestamp(dateTimeService.getCurrentDate().getTime()); GeneralLedgerPendingEntrySequenceHelper sequenceHelper = new GeneralLedgerPendingEntrySequenceHelper(); List<GeneralLedgerPendingEntry> saveList = new ArrayList<GeneralLedgerPendingEntry>(); int counter = 0; for (AssetDepreciationTransaction t : trans.values()) { if (t.getTransactionAmount().isNonZero()) { counter++; LOG.debug(CamsConstants.Depreciation.DEPRECIATION_BATCH + "Creating GLPE entries for asset:" + t.getCapitalAssetNumber()); GeneralLedgerPendingEntry explicitEntry = new GeneralLedgerPendingEntry(); explicitEntry.setFinancialSystemOriginationCode(KFSConstants.ORIGIN_CODE_KUALI); explicitEntry.setDocumentNumber(documentNumber); explicitEntry.setTransactionLedgerEntrySequenceNumber( new Integer(sequenceHelper.getSequenceCounter())); sequenceHelper.increment(); explicitEntry.setChartOfAccountsCode(t.getChartOfAccountsCode()); explicitEntry.setAccountNumber(t.getAccountNumber()); explicitEntry.setSubAccountNumber(null); explicitEntry.setFinancialObjectCode(t.getFinancialObjectCode()); explicitEntry.setFinancialSubObjectCode(null); explicitEntry.setFinancialBalanceTypeCode(BALANCE_TYPE_ACTUAL); explicitEntry.setFinancialObjectTypeCode(t.getFinancialObjectTypeCode()); explicitEntry.setUniversityFiscalYear(fiscalYear); explicitEntry.setUniversityFiscalPeriodCode( StringUtils.leftPad(fiscalMonth.toString().trim(), 2, "0")); explicitEntry.setTransactionLedgerEntryDescription(t.getTransactionLedgerEntryDescription()); explicitEntry.setTransactionLedgerEntryAmount(t.getTransactionAmount().abs()); explicitEntry.setTransactionDebitCreditCode(t.getTransactionType()); explicitEntry.setTransactionDate(new java.sql.Date(transactionTimestamp.getTime())); explicitEntry.setFinancialDocumentTypeCode(financialSystemDocumentTypeCodeCode); explicitEntry.setFinancialDocumentApprovedCode(KFSConstants.DocumentStatusCodes.APPROVED); explicitEntry.setVersionNumber(new Long(1)); explicitEntry .setTransactionEntryProcessedTs(new java.sql.Timestamp(transactionTimestamp.getTime())); // this.generalLedgerPendingEntryService.save(explicitEntry); saveList.add(explicitEntry); if (counter % 1000 == 0) { // save here getDepreciationBatchDao().savePendingGLEntries(saveList); saveList.clear(); } if (sequenceHelper.getSequenceCounter() == 99999) { // create new document and sequence is reset documentNumber = createNewDepreciationDocument(documentNos); sequenceHelper = new GeneralLedgerPendingEntrySequenceHelper(); } } } // save last list getDepreciationBatchDao().savePendingGLEntries(saveList); saveList.clear(); } catch (Exception e) { LOG.error("Error occurred", e); throw new IllegalStateException(kualiConfigurationService.getPropertyValueAsString( CamsKeyConstants.Depreciation.ERROR_WHEN_UPDATING_GL_PENDING_ENTRY_TABLE) + " :" + e.getMessage()); } LOG.debug( "populateExplicitGeneralLedgerPendingEntry(AccountingDocument, AccountingLine, GeneralLedgerPendingEntrySequenceHelper, GeneralLedgerPendingEntry) - end"); }
From source file:ching.icecreaming.action.ResourceDescriptors.java
private boolean searchFilter(String searchField, String searchOper, String searchString, Object object1) { boolean result1 = true; String string1 = null;//from w w w. java 2 s . c o m Integer integer1 = null; java.sql.Timestamp timestamp1 = null; org.joda.time.DateTime dateTime1 = null, dateTime2 = null; DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm"); java.util.Date date1 = null; if (object1 instanceof String) { string1 = (String) object1; switch (searchOper) { case "eq": result1 = StringUtils.equals(string1, searchString); break; case "ne": result1 = !StringUtils.equals(string1, searchString); break; case "bw": result1 = StringUtils.startsWith(string1, searchString); break; case "bn": result1 = !StringUtils.startsWith(string1, searchString); break; case "ew": result1 = StringUtils.endsWith(string1, searchString); break; case "en": result1 = !StringUtils.endsWith(string1, searchString); break; case "cn": result1 = StringUtils.contains(string1, searchString); break; case "nc": result1 = !StringUtils.contains(string1, searchString); break; case "nu": result1 = StringUtils.isBlank(string1); break; case "nn": result1 = StringUtils.isNotBlank(string1); break; case "in": case "ni": case "lt": case "le": case "gt": case "ge": default: break; } } else if (object1 instanceof Integer) { if (NumberUtils.isNumber(searchString)) { integer1 = (Integer) object1; switch (searchOper) { case "eq": result1 = (NumberUtils.toInt(searchString, 0) == integer1.intValue()); break; case "ne": result1 = (NumberUtils.toInt(searchString, 0) != integer1.intValue()); break; case "lt": result1 = (NumberUtils.toInt(searchString, 0) > integer1.intValue()); break; case "le": result1 = (NumberUtils.toInt(searchString, 0) >= integer1.intValue()); break; case "gt": result1 = (NumberUtils.toInt(searchString, 0) < integer1.intValue()); break; case "ge": result1 = (NumberUtils.toInt(searchString, 0) <= integer1.intValue()); break; case "bw": case "bn": case "ew": case "en": case "cn": case "nc": case "in": case "ni": case "nu": case "nn": default: break; } } } else if (object1 instanceof java.sql.Timestamp || object1 instanceof java.util.Date) { if (object1 instanceof java.sql.Timestamp) { timestamp1 = (java.sql.Timestamp) object1; dateTime1 = new org.joda.time.DateTime(timestamp1.getTime()); } else if (object1 instanceof java.util.Date) { date1 = (java.util.Date) object1; if (date1 != null) dateTime1 = new org.joda.time.DateTime(date1); } try { dateTime2 = dateTimeFormatter.parseDateTime(searchString); } catch (java.lang.IllegalArgumentException exception1) { dateTime2 = null; } if (dateTime2 != null && dateTime1 != null) { switch (searchOper) { case "eq": result1 = dateTime1.equals(dateTime2); break; case "ne": result1 = !dateTime1.equals(dateTime2); break; case "lt": result1 = dateTime1.isBefore(dateTime2); break; case "le": result1 = (dateTime1.isBefore(dateTime2) || dateTime1.equals(dateTime2)); break; case "gt": result1 = dateTime1.isAfter(dateTime2); break; case "ge": result1 = (dateTime1.isAfter(dateTime2) || dateTime1.equals(dateTime2)); break; case "bw": case "bn": case "ew": case "en": case "cn": case "nc": case "in": case "ni": break; case "nu": result1 = (timestamp1 == null); break; case "nn": result1 = (timestamp1 != null); break; default: break; } } } return !result1; }
From source file:org.openmicroscopy.shoola.agents.metadata.editor.EditorModel.java
/** * Sorts the passed collection of annotations by date starting with the * most recent.//w w w .j a va2s. c om * * @param annotations Collection of {@link AnnotationData} linked to * the currently edited <code>DataObject</code>. */ private void sortAnnotationByDate(List annotations) { if (annotations == null || annotations.size() == 0) return; Comparator c = new Comparator() { public int compare(Object o1, Object o2) { Timestamp t1 = ((AnnotationData) o1).getLastModified(), t2 = ((AnnotationData) o2).getLastModified(); long n1 = t1.getTime(); long n2 = t2.getTime(); int v = 0; if (n1 < n2) v = -1; else if (n1 > n2) v = 1; return -v; } }; Collections.sort(annotations, c); }
From source file:nl.sidn.pcap.parquet.DNSParquetPacketWriter.java
/** * create 1 parquet record which combines values from the query and the response * @param packet/* www. j a v a2s.c om*/ */ @Override public void write(PacketCombination combo) { GenericRecordBuilder builder = newBuilder(); packetCounter++; if (packetCounter % STATUS_COUNT == 0) { showStatus(); } Packet reqTransport = combo.getRequest(); Message requestMessage = combo.getRequestMessage(); Packet respTransport = combo.getResponse(); Message respMessage = combo.getResponseMessage(); Question q = lookupQuestion(requestMessage, respMessage); Header requestHeader = null; //lookupHeader(requestMessage, respMessage); if (requestMessage != null) { requestHeader = requestMessage.getHeader(); } //get the time in milliseconds long time = lookupTime(reqTransport, respTransport); Timestamp ts = new Timestamp((time * 1000)); String normalizedQname = q == null ? "" : filter(q.getqName()); normalizedQname = StringUtils.lowerCase(normalizedQname); Domaininfo domaininfo = NameUtil.getDomain(normalizedQname, Settings.getTldSuffixes()); //check to see it a response was found, if not then save -1 value //otherwise use the rcode returned by the server in the response. //no response might be caused by rate limiting int rcode = RCODE_QUERY_WITHOUT_RESPONSE; //default no reply, use non standard rcode value -1 //set the nameserver the queries are going to/coming from builder.set("svr", combo.getServer().getName()); //if no anycast location is encoded in the name then the anycast location will be null builder.set("server_location", combo.getServer().getLocation()); //add meta data enrich(reqTransport, respTransport, builder); //these are the values that are retrieved from the response if (respTransport != null && respMessage != null) { Header respHdr = respMessage.getHeader(); rcode = respHdr.getRawRcode(); builder.set("aa", respHdr.isAa()).set("tc", respHdr.isTc()).set("ra", respHdr.isRa()) .set("ad", respHdr.isAd()).set("ancount", (int) respHdr.getAnCount()) .set("arcount", (int) respHdr.getArCount()).set("nscount", (int) respHdr.getNsCount()) .set("res_len", respTransport.getTotalLength()).set("dns_res_len", respMessage.getBytes()); //add file name builder.set("pcap_file", combo.getPcapFilename()); //ip fragments in the response if (respTransport.isFragmented()) { int frags = respTransport.getReassembledFragments(); builder.set("resp_frag", frags); if ((respTransport.getProtocol() == PcapReader.PROTOCOL_UDP) && frags > 1) { responseUDPFragmentedCount++; } else if ((respTransport.getProtocol() == PcapReader.PROTOCOL_TCP) && frags > 1) { responseTCPFragmentedCount++; } } //EDNS0 for response writeResponseOptions(respMessage, builder); //update metric responseBytes = responseBytes + respTransport.getUdpLength(); if (!combo.isExpired()) { //do not send expired queries, this will cause duplicate timestamps with low values //this looks like dips in the grafana graph metricManager.sendAggregated(MetricManager.METRIC_IMPORT_DNS_RESPONSE_COUNT, 1, time); } //check if we have a request hdr if not continue using values //from the response header Header headerToUse = requestHeader; if (headerToUse == null) { headerToUse = respHdr; } else { //get these values only from the req header //if no request is found these will be null builder.set("q_tc", headerToUse.isTc()).set("q_ra", headerToUse.isRa()) .set("q_ad", headerToUse.isAd()).set("q_rcode", headerToUse.getRawRcode()); } //get these values from either the req or resp header builder.set("id", headerToUse.getId()).set("opcode", headerToUse.getRawOpcode()) .set("rd", headerToUse.isRd()).set("z", headerToUse.isZ()).set("cd", headerToUse.isCd()) .set("qdcount", (int) headerToUse.getQdCount()); updateMetricMap(opcodes, headerToUse.getRawOpcode()); } //values from request now, if no request found then use parts of the response. builder.set("rcode", rcode) .set("unixtime", reqTransport != null ? reqTransport.getTs() : respTransport.getTs()) .set("time", ts.getTime()) .set("time_micro", reqTransport != null ? reqTransport.getTsmicros() : respTransport.getTsmicros()) .set("qname", normalizedQname).set("domainname", domaininfo.name).set("labels", domaininfo.labels) .set("src", reqTransport != null ? reqTransport.getSrc() : respTransport.getDst()) .set("len", reqTransport != null ? reqTransport.getTotalLength() : null) .set("ttl", reqTransport != null ? reqTransport.getTtl() : null) .set("ipv", reqTransport != null ? (int) reqTransport.getIpVersion() : (int) respTransport.getIpVersion()) .set("prot", reqTransport != null ? (int) reqTransport.getProtocol() : (int) respTransport.getProtocol()) .set("srcp", reqTransport != null ? reqTransport.getSrcPort() : null) .set("dst", reqTransport != null ? reqTransport.getDst() : respTransport.getSrc()) .set("dstp", reqTransport != null ? reqTransport.getDstPort() : respTransport.getSrcPort()) .set("udp_sum", reqTransport != null ? reqTransport.getUdpsum() : null) .set("dns_len", requestMessage != null ? requestMessage.getBytes() : null); if (reqTransport != null) { //ip fragments in the request if (reqTransport.isFragmented()) { int req_frags = reqTransport.getReassembledFragments(); builder.set("frag", req_frags); if ((reqTransport.getProtocol() == PcapReader.PROTOCOL_UDP) && req_frags > 1) { requestUDPFragmentedCount++; } else if ((reqTransport.getProtocol() == PcapReader.PROTOCOL_TCP) && req_frags > 1) { requestTCPFragmentedCount++; } } //update metrics requestBytes = requestBytes + reqTransport.getUdpLength(); if (!combo.isExpired()) { //do not send expired queries, this will cause duplicate timestamps with low values //this looks like dips in the grafana graph metricManager.sendAggregated(MetricManager.METRIC_IMPORT_DNS_QUERY_COUNT, 1, time); } } if (rcode == RCODE_QUERY_WITHOUT_RESPONSE) { //no response found for query, update stats metricManager.sendAggregated(MetricManager.METRIC_IMPORT_DNS_NO_RESPONSE_COUNT, 1, time); } //question writeQuestion(q, builder); //EDNS0 for request writeRequestOptions(requestMessage, builder); //calculate the processing time writeProctime(reqTransport, respTransport, builder); //create the actual record and write to parquet file GenericRecord record = builder.build(); writer.write(record); //create metrics domainnames.add(domaininfo.name); updateMetricMap(rcodes, rcode); //ip version stats updateIpVersionMetrics(reqTransport, respTransport); //if packet was expired and dropped from cache then increase stats for this if (combo.isExpired()) { metricManager.sendAggregated(MetricManager.METRIC_IMPORT_CACHE_EXPPIRED_DNS_QUERY_COUNT, 1, time, false); } }
From source file:com.wso2telco.dep.reportingservice.dao.BillingDAO.java
/** * Gets the subscription created time.//from w ww . java2 s . c o m * * @param appId the app id * @param apiIdent the api ident * @return the subscription created time * @throws Exception the exception */ public Date getSubscriptionCreatedTime(int appId, APIIdentifier apiIdent) throws Exception { Connection connection = null; PreparedStatement ps = null; ResultSet results = null; Timestamp wfCreatedTime = null; StringBuilder sql = new StringBuilder(); sql.append("SELECT WF.WF_CREATED_TIME FROM ").append(ReportingTable.AM_SUBSCRIPTION.getTObject()) .append(" SUBS, ").append(ReportingTable.AM_WORKFLOWS.getTObject()).append(" WF ").append("WHERE ") .append("SUBS.APPLICATION_ID = ? ").append("AND SUBS.API_ID = ? ").append("AND WF.WF_TYPE= ? ") .append("AND WF.WF_REFERENCE=SUBS.SUBSCRIPTION_ID "); try { connection = DbUtils.getDbConnection(DataSourceNames.WSO2AM_DB); int apiId = ApiMgtDAO.getInstance().getAPIID(apiIdent, connection); ps = connection.prepareStatement(sql.toString()); ps.setInt(1, appId); ps.setInt(2, apiId); ps.setString(3, WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); results = ps.executeQuery(); while (results.next()) { wfCreatedTime = results.getTimestamp("WF_CREATED_TIME"); } if (log.isDebugEnabled()) { log.debug("Subscription creation Time of workflow for app " + appId + " and API " + apiId + " - " + wfCreatedTime); } } catch (Exception e) { handleException("getSubscriptionCreatedTime", e); } finally { DbUtils.closeAllConnections(ps, connection, results); } return (wfCreatedTime != null) ? new Date(wfCreatedTime.getTime()) : null; }