List of usage examples for java.sql Date Date
public Date(long date)
From source file:com.sfs.whichdoctor.dao.MembershipDAOImpl.java
/** * Gets the candidate number.// ww w . java 2 s .c o m * * @param personGUID the person guid * * @return the candidate number * * @throws WhichDoctorDaoException the which doctor dao exception */ public final String getCandidateNumber(final int personGUID) throws WhichDoctorDaoException { String candidateNo = ""; String year = ""; /* By default use the current year */ Date examDate = new Date(Calendar.getInstance().getTimeInMillis()); if (personGUID > 0) { // Get year, use cutoff date of Nov 22 for year. // 23 Nov is next year BuilderBean builderBean = new BuilderBean(); builderBean.setParameter("EXAMS", true); PersonBean person = this.personDAO.loadGUID(personGUID, builderBean); boolean writtenExamSet = false; if (person != null && person.getExams() != null) { for (ExamBean exam : person.getExams()) { // Assume that exams are returned in chronological // order, first written exam is the assumed year. if (!writtenExamSet) { if (StringUtils.equals(exam.getType(), "Written Exam")) { examDate = new Date(exam.getDateSat().getTime()); writtenExamSet = true; } } } } } /* Format the year */ SimpleDateFormat df = new SimpleDateFormat("yyyy"); year = df.format(examDate); final int yearLength = 4; final int randomNumberLength = 4; final int randomSize = 10; candidateNo += year.substring(0, 1) + year.substring(2, yearLength); /* Generate four digit random number */ Random rand = new Random(); for (int x = 0; x < randomNumberLength; x++) { int random = rand.nextInt(randomSize); candidateNo += String.valueOf(random); } if (!uniqueCandidateNumber(personGUID, candidateNo)) { candidateNo = getCandidateNumber(personGUID); } return candidateNo; }
From source file:dk.netarkivet.archive.arcrepositoryadmin.ReplicaCacheHelpers.java
/** * Method for updating the checksum_updated field for a given replica * in the replica table./*from w ww .jav a2s. c o m*/ * This is called when a checksum_job has been handled. * * The following fields for the entry in the replica table: * <br/> checksum_updated = now. * * @param rep The replica which has just been updated. * @param con An open connection to the archive database */ protected static void updateChecksumDateForReplica(Replica rep, Connection con) { PreparedStatement statement = null; try { Date now = new Date(Calendar.getInstance().getTimeInMillis()); final String sql = "UPDATE replica SET checksum_updated = ? WHERE " + "replica_id = ?"; statement = DBUtils.prepareStatement(con, sql, now, rep.getId()); statement.executeUpdate(); con.commit(); } catch (Exception e) { String msg = "Cannot update the checksum_updated for replica '" + rep + "'."; log.warn(msg); throw new IOFailure(msg, e); } finally { DBUtils.closeStatementIfOpen(statement); } }
From source file:eionet.meta.service.RDFVocabularyImportServiceTest.java
/** * In this test, two concepts RDF is imported. Concepts are derived from base RDF. Just identifiers are updated. Purge operation * is tested.//from w w w .ja va2s . c o m * * @throws Exception */ @Test @Rollback public void testIfConceptsAddedAfterPurge() throws Exception { // get vocabulary folder VocabularyFolder vocabularyFolder = vocabularyService.getVocabularyFolder(TEST_VALID_VOCABULARY_ID); // get initial values of concepts with attributes List<VocabularyConcept> concepts = getVocabularyConceptsWithAttributes(vocabularyFolder); // get reader for RDF file Reader reader = getReaderFromResource("rdf_import/rdf_import_test_4.rdf"); // import RDF into database vocabularyImportService.importRdfIntoVocabulary(reader, vocabularyFolder, true, false); Assert.assertFalse("Transaction rolled back (unexpected)", transactionManager.getTransaction(null).isRollbackOnly()); // manually create values of new concept for comparison concepts.remove(2);// remove last object // there is not much object just update, no need to iterate concepts.get(0).setIdentifier("rdf_test_concept_1_after_purge"); concepts.get(0).setStatus(StandardGenericStatus.VALID); concepts.get(0).setAcceptedDate(new Date(System.currentTimeMillis())); concepts.get(0).setStatusModified(new Date(System.currentTimeMillis())); concepts.get(1).setIdentifier("rdf_test_concept_2_after_purge"); concepts.get(1).setStatus(StandardGenericStatus.VALID); concepts.get(1).setAcceptedDate(new Date(System.currentTimeMillis())); concepts.get(1).setStatusModified(new Date(System.currentTimeMillis())); // get updated values of concepts with attributes List<VocabularyConcept> updatedConcepts = getVocabularyConceptsWithAttributes(vocabularyFolder); Assert.assertEquals("Updated Concepts does not include 2 vocabulary concepts", updatedConcepts.size(), 2); // concepts expected to be inserted in the same order as they are in rdf file, get ids from updated beans concepts.get(0) .setId(findVocabularyConceptByIdentifier(updatedConcepts, concepts.get(0).getIdentifier()).getId()); concepts.get(1) .setId(findVocabularyConceptByIdentifier(updatedConcepts, concepts.get(1).getIdentifier()).getId()); // update related concepts List<DataElement> elems = VocabularyImportBaseHandler.getDataElementValuesByName("skos:broader", concepts.get(0).getElementAttributes()); DataElement element = elems.get(0); element.setRelatedConceptId(concepts.get(1).getId()); element.setRelatedConceptIdentifier(concepts.get(1).getIdentifier()); elems = VocabularyImportBaseHandler.getDataElementValuesByName("skos:narrower", concepts.get(1).getElementAttributes()); element = elems.get(0); element.setRelatedConceptId(concepts.get(0).getId()); element.setRelatedConceptIdentifier(concepts.get(0).getIdentifier()); elems = VocabularyImportBaseHandler.getDataElementValuesByName("skos:related", concepts.get(1).getElementAttributes()); element = elems.get(0); element.setRelatedConceptLabel(null); element.setRelatedConceptId(null); element.setRelatedConceptIdentifier(null); element.setRelatedConceptVocSet(null); element.setRelatedConceptBaseURI(null); element.setRelatedConceptVocabulary(null); element.setAttributeValue(VocabularyFolder.getBaseUri(vocabularyFolder) + "rdf_test_concept_3"); // compare manually updated objects with queried ones (after import operation) ReflectionAssert.assertReflectionEquals(concepts, updatedConcepts, ReflectionComparatorMode.LENIENT_DATES, ReflectionComparatorMode.LENIENT_ORDER); }
From source file:org.apdplat.superword.tools.MySQLUtils.java
public static void main(String[] args) throws Exception { UserWord userWord = new UserWord(); userWord.setDateTime(new Date(System.currentTimeMillis())); userWord.setWord("fabulous"); userWord.setUserName("ysc"); MySQLUtils.saveUserWordToDatabase(userWord); System.out.println(MySQLUtils.getHistoryUserWordsFromDatabase("ysc")); }
From source file:nl.ordina.bag.etl.dao.AbstractBAGDAO.java
@Override public void insert(final Standplaats standplaats) throws DAOException { try {//from w w w . j av a 2 s. c om transactionTemplate.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { jdbcTemplate.update(new PreparedStatementCreator() { @Override public PreparedStatement createPreparedStatement(Connection connection) throws SQLException { PreparedStatement ps = connection .prepareStatement("insert into bag_standplaats (" + "bag_standplaats_id," + "aanduiding_record_inactief," + "aanduiding_record_correctie," + "officieel," + "standplaats_status," + "standplaats_geometrie," + "begindatum_tijdvak_geldigheid," + "einddatum_tijdvak_geldigheid," + "in_onderzoek," + "bron_documentdatum," + "bron_documentnummer," + "bag_nummeraanduiding_id" + ") values (?,?,?,?,?,?,?,?,?,?,?,?)"); ps.setLong(1, standplaats.getIdentificatie()); ps.setInt(2, standplaats.getAanduidingRecordInactief().ordinal()); ps.setLong(3, standplaats.getAanduidingRecordCorrectie()); ps.setInt(4, standplaats.getOfficieel().ordinal()); ps.setInt(5, standplaats.getStandplaatsStatus().ordinal()); ps.setString(6, standplaats.getStandplaatsGeometrie()); ps.setTimestamp(7, new Timestamp(standplaats.getBegindatumTijdvakGeldigheid().getTime())); if (standplaats.getEinddatumTijdvakGeldigheid() == null) ps.setNull(8, Types.TIMESTAMP); else ps.setTimestamp(8, new Timestamp(standplaats.getEinddatumTijdvakGeldigheid().getTime())); ps.setInt(9, standplaats.getInOnderzoek().ordinal()); ps.setDate(10, new Date(standplaats.getDocumentdatum().getTime())); ps.setString(11, standplaats.getDocumentnummer()); ps.setLong(12, standplaats.getHoofdAdres()); return ps; } }); insertNevenadressen(TypeAdresseerbaarObject.STANDPLAATS, standplaats); } }); } catch (DataAccessException e) { throw new DAOException("Error inserting standplaats: " + standplaats.getIdentificatie(), e); } }
From source file:dk.netarkivet.archive.arcrepositoryadmin.ReplicaCacheHelpers.java
/** * Method for updating the filelist_updated field for a given replica * in the replica table.//from w w w. ja va 2 s .co m * This is called when a filelist_job or a checksum_job has been handled. * * The following fields for the entry in the replica table: * <br/> filelist_updated = now. * * @param rep The replica which has just been updated. * @param connection An open connection to the archive database */ protected static void updateFilelistDateForReplica(Replica rep, Connection connection) { PreparedStatement statement = null; try { Date now = new Date(Calendar.getInstance().getTimeInMillis()); final String sql = "UPDATE replica SET filelist_updated = ? WHERE " + "replica_id = ?"; statement = DBUtils.prepareStatement(connection, sql, now, rep.getId()); statement.executeUpdate(); connection.commit(); } catch (Exception e) { String msg = "Cannot update the filelist_updated for replica '" + rep + "'."; log.warn(msg); throw new IOFailure(msg, e); } finally { DBUtils.closeStatementIfOpen(statement); } }
From source file:eionet.meta.service.CSVVocabularyImportServiceTest.java
/** * In this test, two line CSV is imported. Rows are derived from base CSV. Just identifiers are updated. Both purge operations * are tested.//from ww w .j a va 2s.c om * * @throws Exception */ @Test @Rollback public void testIfConceptsAddedAfterAllPurge() throws Exception { // get vocabulary folder VocabularyFolder vocabularyFolder = vocabularyService.getVocabularyFolder(TEST_VALID_VOCABULARY_ID); // get initial values of concepts with attributes List<VocabularyConcept> concepts = getVocabularyConceptsWithAttributes(vocabularyFolder); // get reader for CSV file Reader reader = getReaderFromResource("csv_import/csv_import_test_4.csv"); // import CSV into database vocabularyImportService.importCsvIntoVocabulary(reader, vocabularyFolder, true, true); Assert.assertFalse("Transaction rolled back (unexpected)", transactionManager.getTransaction(null).isRollbackOnly()); // manually create values of new concept for comparison concepts.remove(2);// remove last object // there is not much object just update, no need to iterate concepts.get(0).setIdentifier("csv_test_concept_1_after_purge"); concepts.get(0).setStatus(StandardGenericStatus.VALID); concepts.get(0).setAcceptedDate(new Date(System.currentTimeMillis())); concepts.get(0).setStatusModified(new Date(System.currentTimeMillis())); concepts.get(1).setIdentifier("csv_test_concept_2_after_purge"); concepts.get(1).setStatus(StandardGenericStatus.VALID); concepts.get(1).setAcceptedDate(new Date(System.currentTimeMillis())); concepts.get(1).setStatusModified(new Date(System.currentTimeMillis())); // get updated values of concepts with attributes List<VocabularyConcept> updatedConcepts = getVocabularyConceptsWithAttributes(vocabularyFolder); Assert.assertEquals("Updated Concepts does not include 2 vocabulary concepts", 2, updatedConcepts.size()); // concepts should be inserted in the same order as they are in csv file, get ids from updated beans concepts.get(0) .setId(findVocabularyConceptByIdentifier(updatedConcepts, concepts.get(0).getIdentifier()).getId()); concepts.get(1) .setId(findVocabularyConceptByIdentifier(updatedConcepts, concepts.get(1).getIdentifier()).getId()); // compare manually updated objects with queried ones (after import operation) ReflectionAssert.assertReflectionEquals(concepts, updatedConcepts, ReflectionComparatorMode.LENIENT_DATES, ReflectionComparatorMode.LENIENT_ORDER); }
From source file:com.heneryh.aquanotes.ui.controllers.ControllersActivity.java
/** * Handle updates to a controller tabhost based on a query result stored in a cursor. *///from w w w .j av a2 s .co m private void updateAllControllerTabs(Ctlr cntl, Cursor cursor) { try { // Header Area cntl.mTitleString = cursor.getString(ControllersQuery.TITLE); cntl.mSubtitle = cursor.getString(ControllersQuery.WAN_URL); cntl.mTimestamp = cursor.getLong(ControllersQuery.LAST_UPDATED); Date timestampD = new Date(cntl.mTimestamp); SimpleDateFormat formatter = new SimpleDateFormat("M/d/yy h:mm a"); String timestampS = formatter.format(timestampD); cntl.mTitleView.setText(timestampS); cntl.mSubtitleView.setText(cntl.mSubtitle); try { cntl.mControllerId = Integer.valueOf(cursor.getString(ControllersQuery._ID)); } catch (NumberFormatException e) { cntl.mControllerId = -1; } if (controllerUpdateFlag) { Uri newProbeUri = Data.buildQueryPDataAtUri(cntl.mControllerId, cntl.mTimestamp); Uri newOutletUri = Data.buildQueryODataAtUri(cntl.mControllerId, cntl.mTimestamp); cntl.mProbesFragment.reloadSelf(newProbeUri); cntl.mOutletsFragment.reloadSelf(newOutletUri); controllerUpdateFlag = false; } // AnalyticsUtils.getInstance(this).trackPageView("/Sessions/" + cntl.mTitleString); updateWorkspaceHeader(cntl.index); } finally { // cursor.close(); closed a level above } }
From source file:com.wso2telco.dep.reportingservice.northbound.NbHostObjectUtils.java
/** * Apply payment charges by category.//ww w . j av a 2s . c o m * * @param opSubscription the op subscription * @param categoryCharge the category charge * @param paymentRequestSet the payment request set * @throws Exception */ private static void applyPaymentChargesByCategory(BillingSubscription.OperatorSubscription opSubscription, CategoryCharge categoryCharge, Set<PaymentRequestDTO> paymentRequestSet) throws Exception { TaxDAO taxDAO = new TaxDAO(); ChargeRate rate = opSubscription.getRate(); List<Tax> taxList = taxDAO.getTaxesForTaxList(rate.getTaxList()); BigDecimal totalCharge = BigDecimal.ZERO; BigDecimal totalPrice = BigDecimal.ZERO; BigDecimal totalTax = BigDecimal.ZERO; for (PaymentRequestDTO paymentRequest : paymentRequestSet) { totalCharge = totalCharge.add(paymentRequest.getAmount()); BigDecimal price = BigDecimal.ZERO; CategoryEntity rateCategories = new CategoryEntity(); if (rateCategories == null) { throw new APIManagementException( "Payment Categoreis required for QUOTA charging are not specified in rate-card.xml"); } BigDecimal catpercent = rate.getValue().divide(new BigDecimal(100)); Date date = new Date(paymentRequest.getDate().getTime()); for (Tax tax : taxList) { // check if the date of payment request falls between this tax // validity period if (!date.before(tax.getEffective_from()) && !date.after(tax.getEffective_to())) { // totalTax += taxFraction x paymentAmount totalTax = totalTax.add(tax.getValue().multiply(price)); } } } // Get the percentage from the rate value // BigDecimal percentage = rate.getValue().divide(new BigDecimal(100)); // apply category wise charge percentage }
From source file:org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil.java
public static Writable getPrimitiveWritable(PrimitiveCategory primitiveCategory) { switch (primitiveCategory) { case VOID:/*from ww w . j a v a 2 s . c om*/ return null; case BOOLEAN: return new BooleanWritable(false); case BYTE: return new ByteWritable((byte) 0); case SHORT: return new ShortWritable((short) 0); case INT: return new IntWritable(0); case LONG: return new LongWritable(0); case TIMESTAMP: return new TimestampWritable(new Timestamp(0)); case DATE: return new DateWritable(new Date(0)); case FLOAT: return new FloatWritable(0); case DOUBLE: return new DoubleWritable(0); case BINARY: return new BytesWritable(ArrayUtils.EMPTY_BYTE_ARRAY); case STRING: return new Text(ArrayUtils.EMPTY_BYTE_ARRAY); case VARCHAR: return new HiveVarcharWritable(new HiveVarchar(StringUtils.EMPTY, -1)); case CHAR: return new HiveCharWritable(new HiveChar(StringUtils.EMPTY, -1)); case DECIMAL: return new HiveDecimalWritable(); case INTERVAL_YEAR_MONTH: return new HiveIntervalYearMonthWritable(); case INTERVAL_DAY_TIME: return new HiveIntervalDayTimeWritable(); default: throw new RuntimeException("Primitive category " + primitiveCategory.name() + " not supported"); } }