List of usage examples for java.util Date equals
public boolean equals(Object obj)
From source file:org.libreplan.web.resources.search.NewAllocationSelectorController.java
public Constraint checkConstraintFilteringDate() { return (comp, value) -> { if (value == null) { if (comp.getId().equals("startDateLoadRatiosDatebox")) { throw new WrongValueException(comp, _("Start filtering date cannot be empty")); } else if (comp.getId().equals("endDateLoadRatiosDatebox")) { throw new WrongValueException(comp, _("End filtering date cannot be empty")); }//from w ww .ja v a2 s . c o m } Date startDate; if (comp.getId().equals("startDateLoadRatiosDatebox")) { startDate = (Date) value; } else { startDate = (Date) startDateLoadRatiosDatebox.getRawValue(); } Date endDate; if (comp.getId().equals("endDateLoadRatiosDatebox")) { endDate = (Date) value; } else { endDate = (Date) endDateLoadRatiosDatebox.getRawValue(); } if ((startDate != null) && (endDate != null)) { if ((startDate.after(endDate)) || (startDate.equals(endDate))) { throw new WrongValueException(comp, _("Start filtering date must be before than end filtering date")); } } }; }
From source file:org.openehealth.coala.beans.ConsentBean.java
/** * Method to validate input parameters (validFrom and validUntil) of consent * creation. This is needed to have proper error notification on missing * date input at consent creation view./*from w w w . j ava 2 s.c o m*/ * * @param event */ public void validateCreateConsentParameters(ComponentSystemEvent event) { FacesContext fc = FacesContext.getCurrentInstance(); String messages = fc.getApplication().getMessageBundle(); Locale locale = new Locale(localeHandler.getLocale()); ResourceBundle bundle = ResourceBundle.getBundle(messages, locale); UIComponent components = event.getComponent(); UIInput validFromDateInput = (UIInput) components.findComponent("validFromInput"); UIInput validUntilDateInput = (UIInput) components.findComponent("validUntilInput"); boolean validFromDateEmpty = false; boolean validUntilDateEmpty = false; if (validFromDateInput.getLocalValue() == null || validFromDateInput.getLocalValue().toString().trim().isEmpty()) { validFromDateEmpty = true; } if (validUntilDateInput.getLocalValue() == null || validUntilDateInput.getLocalValue().toString().trim().isEmpty()) { validUntilDateEmpty = true; } if (validFromDateEmpty || validUntilDateEmpty) { validationSuccessful = false; FacesMessage msg = new FacesMessage(bundle.getString("errors.nonEmptyValidationDates"), ""); msg.setSeverity(FacesMessage.SEVERITY_WARN); fc.addMessage(components.getClientId(), msg); // passed to the Render Response phase fc.renderResponse(); } //validates if validFrom is after or equals validUntil Date from = (Date) validFromDateInput.getValue(); Date until = (Date) validUntilDateInput.getValue(); if (from == null || until == null) { validationSuccessful = false; FacesMessage msg = new FacesMessage("Please provide valid date input values for From AND Until.", ""); msg.setSeverity(FacesMessage.SEVERITY_WARN); fc.addMessage(components.getClientId(), msg); // passed to the Render Response phase fc.renderResponse(); } else if (from != null && until != null) { if (from.after(until) || from.equals(until)) { validationSuccessful = false; FacesMessage msg = new FacesMessage(bundle.getString("errors.fromBeforeUntil"), ""); msg.setSeverity(FacesMessage.SEVERITY_WARN); fc.addMessage(components.getClientId(), msg); // passed to the Render Response phase fc.renderResponse(); } // all checks passed now, set validationSuccessful to true, which causes the "Register" button to become active! else { LOG.info("Consent creation validation has passed now."); validationSuccessful = true; } } else { validationSuccessful = false; FacesMessage msg = new FacesMessage("Please provide valid date input values for From AND Until.", ""); msg.setSeverity(FacesMessage.SEVERITY_WARN); fc.addMessage(components.getClientId(), msg); // passed to the Render Response phase fc.renderResponse(); } }
From source file:org.jpos.gl.GLSession.java
private void deleteGLTransactions(Journal journal, Date start, Date end) throws HibernateException, GLException { boolean equalDate = start.equals(end); StringBuffer qs = new StringBuffer("from org.jpos.gl.GLTransaction where journal = :journal"); if (equalDate) { qs.append(" and postDate = :date"); } else {/*from www . j a va 2s . c om*/ qs.append(" and postDate >= :start"); qs.append(" and postDate <= :endDate"); } Query q = session.createQuery(qs.toString()); q.setLong("journal", journal.getId()); if (equalDate) q.setParameter("date", start); else { q.setParameter("start", start); q.setParameter("endDate", end); } ScrollableResults sr = q.scroll(ScrollMode.FORWARD_ONLY); while (sr.next()) { session.delete(sr.get(0)); } }
From source file:edu.mit.mobile.android.locast.data.Sync.java
/** * Given a live cursor pointing to a data item and/or a set of contentValues loaded from the network, * attempt to sync.// www . j a v a 2 s .c o m * Either c or cvNet can be null, but not both. * @param c A cursor pointing to the data item. Null is OK here. * @param jsonObject JSON object for the item as loaded from the network. null is OK here. * @param sync An empty JsonSyncableItem object. * @param publicPath TODO * * @return True if the item has been modified on either end. * @throws IOException */ private boolean syncItem(Uri toSync, Cursor c, JSONObject jsonObject, JsonSyncableItem sync, SyncProgressNotifier syncProgress, String publicPath) throws SyncException, IOException { boolean modified = false; boolean needToCloseCursor = false; boolean toSyncIsIndex = false; final SyncMap syncMap = sync.getSyncMap(); Uri locUri = null; final Uri origToSync = toSync; ContentValues cvNet = null; final Context context = getApplicationContext(); final ContentResolver cr = context.getContentResolver(); if (jsonObject != null) { if ("http".equals(toSync.getScheme()) || "https".equals(toSync.getScheme())) { // we successfully loaded it from the 'net, but toSync is really for local URIs. Erase it. toSync = sync.getContentUri(); if (toSync == null) { if (DEBUG) { Log.w(TAG, "cannot get local URI for " + origToSync + ". Skipping..."); } return false; } } try { cvNet = JsonSyncableItem.fromJSON(context, null, jsonObject, syncMap); } catch (final Exception e) { final SyncException se = new SyncException("Problem loading JSON object."); se.initCause(e); throw se; } } final String contentType = cr.getType(toSync); if (c != null) { if (contentType.startsWith(CONTENT_TYPE_PREFIX_DIR)) { locUri = ContentUris.withAppendedId(toSync, c.getLong(c.getColumnIndex(JsonSyncableItem._ID))) .buildUpon().query(null).build(); toSyncIsIndex = true; } else { locUri = toSync; } // skip any items already sync'd if (mLastUpdated.isUpdatedRecently(locUri)) { return false; } final int draftCol = c.getColumnIndex(TaggableItem._DRAFT); if (draftCol != -1 && c.getInt(draftCol) != 0) { if (DEBUG) { Log.d(TAG, locUri + " is marked a draft. Not syncing."); } return false; } syncMap.onPreSyncItem(cr, locUri, c); } else if (contentType.startsWith(CONTENT_TYPE_PREFIX_DIR)) { // strip any query strings toSync = toSync.buildUpon().query(null).build(); } // if (c != null){ // MediaProvider.dumpCursorToLog(c, sync.getFullProjection()); // } // when the PUBLIC_URI is null, that means it's only local final int pubUriColumn = (c != null) ? c.getColumnIndex(JsonSyncableItem._PUBLIC_URI) : -1; if (c != null && (c.isNull(pubUriColumn) || c.getString(pubUriColumn) == "")) { // new content on the local side only. Gotta publish. try { jsonObject = JsonSyncableItem.toJSON(context, locUri, c, syncMap); if (publicPath == null) { publicPath = MediaProvider.getPostPath(this, locUri); } if (DEBUG) { Log.d(TAG, "Posting " + locUri + " to " + publicPath); } // The response from a post to create a new item should be the newly created item, // which contains the public ID that we need. jsonObject = nc.postJson(publicPath, jsonObject); final ContentValues cvUpdate = JsonSyncableItem.fromJSON(context, locUri, jsonObject, syncMap); if (cr.update(locUri, cvUpdate, null, null) == 1) { // at this point, server and client should be in sync. mLastUpdated.markUpdated(locUri); if (DEBUG) { Log.i(TAG, "Hooray! " + locUri + " has been posted succesfully."); } } else { Log.e(TAG, "update of " + locUri + " failed"); } modified = true; } catch (final Exception e) { final SyncException se = new SyncException(getString(R.string.error_sync_no_post)); se.initCause(e); throw se; } // only on the remote side, so pull it in. } else if (c == null && cvNet != null) { if (DEBUG) { Log.i(TAG, "Only on the remote side, using network-provided values."); } final String[] params = { cvNet.getAsString(JsonSyncableItem._PUBLIC_URI) }; c = cr.query(toSync, sync.getFullProjection(), JsonSyncableItem._PUBLIC_URI + "=?", params, null); needToCloseCursor = true; if (!c.moveToFirst()) { locUri = cr.insert(toSync, cvNet); modified = true; } else { locUri = ContentUris.withAppendedId(toSync, c.getLong(c.getColumnIndex(JsonSyncableItem._ID))) .buildUpon().query(null).build(); syncMap.onPreSyncItem(cr, locUri, c); } } // we've now found data on both sides, so sync them. if (!modified && c != null) { publicPath = c.getString(c.getColumnIndex(JsonSyncableItem._PUBLIC_URI)); try { if (cvNet == null) { try { if (publicPath == null && toSyncIsIndex && !MediaProvider.canSync(locUri)) { // At this point, we've already checked the index and it doesn't contain the item (otherwise it would be in the syncdItems). // If we can't sync individual items, it's possible that the index is paged or the item has been deleted. if (DEBUG) { Log.w(TAG, "Asked to sync " + locUri + " but item wasn't in server index and cannot sync individual entries. Skipping and hoping it is up to date."); } return false; } else { if (mLastUpdated.isUpdatedRecently(nc.getFullUri(publicPath))) { if (DEBUG) { Log.d(TAG, "already sync'd! " + publicPath); } return false; } if (jsonObject == null) { jsonObject = nc.getObject(publicPath); } cvNet = JsonSyncableItem.fromJSON(context, locUri, jsonObject, syncMap); } } catch (final HttpResponseException hre) { if (hre.getStatusCode() == HttpStatus.SC_NOT_FOUND) { final SyncItemDeletedException side = new SyncItemDeletedException(locUri); side.initCause(hre); throw side; } } } if (cvNet == null) { Log.e(TAG, "got null values from fromJSON() on item " + locUri + ": " + (jsonObject != null ? jsonObject.toString() : "<< no json object >>")); return false; } final Date netLastModified = new Date(cvNet.getAsLong(JsonSyncableItem._MODIFIED_DATE)); final Date locLastModified = new Date(c.getLong(c.getColumnIndex(JsonSyncableItem._MODIFIED_DATE))); if (netLastModified.equals(locLastModified)) { // same! yay! We don't need to do anything. if (DEBUG) { Log.d("LocastSync", locUri + " doesn't need to sync."); } } else if (netLastModified.after(locLastModified)) { // remote is more up to date, update! cr.update(locUri, cvNet, null, null); if (DEBUG) { Log.d("LocastSync", cvNet + " is newer than " + locUri); } modified = true; } else if (netLastModified.before(locLastModified)) { // local is more up to date, propagate! jsonObject = nc.putJson(publicPath, JsonSyncableItem.toJSON(context, locUri, c, syncMap)); if (DEBUG) { Log.d("LocastSync", cvNet + " is older than " + locUri); } modified = true; } mLastUpdated.markUpdated(nc.getFullUri(publicPath)); } catch (final JSONException e) { final SyncException se = new SyncException( "Item sync error for path " + publicPath + ": invalid JSON."); se.initCause(e); throw se; } catch (final NetworkProtocolException e) { final SyncException se = new SyncException( "Item sync error for path " + publicPath + ": " + e.getHttpResponseMessage()); se.initCause(e); throw se; } finally { if (needToCloseCursor) { c.close(); needToCloseCursor = false; } } } if (needToCloseCursor) { c.close(); } if (locUri == null) { throw new RuntimeException("Never got a local URI for a sync'd item."); } // two calls are made in two different contexts. Which context you use depends on the application. syncMap.onPostSyncItem(context, locUri, jsonObject, modified); sync.onPostSyncItem(context, locUri, jsonObject, modified); mLastUpdated.markUpdated(locUri); // needed for things that may have requested a sync with a different URI than what was eventually produced. if (origToSync != locUri) { mLastUpdated.markUpdated(origToSync); cr.notifyChange(origToSync, null); } return modified; }
From source file:ddf.security.assertion.impl.SecurityAssertionImpl.java
@Override public boolean isPresentlyValid() { Date now = new Date(); if (getNotBefore() != null && now.before(getNotBefore())) { LOGGER.debug("SAML Assertion Time Bound Check Failed."); LOGGER.debug("\t Checked time of {} is before the NotBefore time of {}", now, getNotBefore()); return false; }//from ww w . ja v a 2s .co m if (getNotOnOrAfter() != null && (now.equals(getNotOnOrAfter()) || now.after(getNotOnOrAfter()))) { LOGGER.debug("SAML Assertion Time Bound Check Failed."); LOGGER.debug("\t Checked time of {} is equal to or after the NotOnOrAfter time of {}", now, getNotOnOrAfter()); return false; } return true; }
From source file:org.jasig.portlet.announcements.model.validators.AnnouncementValidator.java
public void validate(Object obj, Errors errors) { ValidationUtils.rejectIfEmptyOrWhitespace(errors, "title", "addAnn.title.required.error"); ValidationUtils.rejectIfEmptyOrWhitespace(errors, "abstractText", "addAnn.abstract.required.error"); if (!allowEmptyMessage) { ValidationUtils.rejectIfEmptyOrWhitespace(errors, "message", "addAnn.message.required.error"); }//from w w w.j a va 2 s.c o m ValidationUtils.rejectIfEmptyOrWhitespace(errors, "startDisplay", "addAnn.start.required.error"); if (!allowOpenEndDate) { ValidationUtils.rejectIfEmptyOrWhitespace(errors, "endDisplay", "addAnn.end.required.error"); } Announcement test = (Announcement) obj; if (test.getLink() != null && !"".equals(test.getLink().trim())) { if (!validUrlFormat(test.getLink())) errors.rejectValue("link", "addAnn.link.malformed.error"); } UnwantedCharacterScrubber vHelper = new UnwantedCharacterScrubber(); logger.debug(String.format("Original announcement abstract: [%s]", test.getAbstractText())); test.setAbstractText(vHelper.scrubUnwantedCharacters(test.getAbstractText())); logger.debug(String.format("Converted announcement abstract: [%s]", test.getAbstractText())); logger.debug(String.format("Original announcement title: [%s]", test.getTitle())); test.setTitle(vHelper.scrubUnwantedCharacters(test.getTitle())); logger.debug(String.format("Converted announcement title: [%s]", test.getTitle())); Date startDisplay = test.getStartDisplay(); Date endDisplay = test.getEndDisplay(); Date now = new Date(); if (startDisplay != null) { Calendar calStart = new GregorianCalendar(); calStart.setTime(startDisplay); if (calStart.get(Calendar.YEAR) > 2050) { errors.rejectValue("startDisplay", "addAnn.toofaraway"); } if (calStart.get(Calendar.YEAR) < 2008) { errors.rejectValue("startDisplay", "addAnn.tooold"); } } if (endDisplay != null) { Calendar calEnd = new GregorianCalendar(); calEnd.setTime(endDisplay); if (calEnd.get(Calendar.YEAR) > 2050) { errors.rejectValue("endDisplay", "addAnn.toofaraway"); } if (calEnd.get(Calendar.YEAR) < 2008) { errors.rejectValue("endDisplay", "addAnn.tooold"); } } if (endDisplay != null && startDisplay != null) { Calendar calStart = new GregorianCalendar(); calStart.setTime(startDisplay); Calendar calEnd = new GregorianCalendar(); calEnd.setTime(endDisplay); if (endDisplay.before(now) && !endDisplay.equals(startDisplay)) { errors.rejectValue("endDisplay", "addAnn.endDisplay.dateinpast"); } if (startDisplay.after(endDisplay)) { errors.rejectValue("startDisplay", "addAnn.startDisplay.afterenddisplay"); } if (startDisplay.equals(endDisplay)) { errors.rejectValue("endDisplay", "addAnn.endDisplay.sameAs.startDisplay"); } } }
From source file:org.apache.lens.cube.parse.StorageCandidate.java
/** * Gets FactPartitions for the given fact using the following logic * * 1. Find the max update interval that will be used for the query. Lets assume time * range is 15 Sep to 15 Dec and the fact has two storage with update periods as MONTHLY,DAILY,HOURLY. * In this case the data for [15 sep - 1 oct)U[1 Dec - 15 Dec) will be answered by DAILY partitions * and [1 oct - 1Dec) will be answered by MONTHLY partitions. The max interavl for this query will be MONTHLY. * * 2.Prune Storgaes that do not fall in the queries time range. * {@link org.apache.lens.cube.metadata.CubeMetastoreClient#isStorageTableCandidateForRange(String, Date, Date)} * * 3. Iterate over max interavl . In out case it will give two months Oct and Nov. Find partitions for * these two months.Check validity of FactPartitions for Oct and Nov * via {@link #updatePartitionStorage(FactPartition)}. * If the partition is missing, try getting partitions for the time range form other update periods (DAILY,HOURLY). * This is achieved by calling getPartitions() recursively but passing only 2 update periods (DAILY,HOURLY) * * 4.If the monthly partitions are found, check for lookahead partitions and call getPartitions recursively for the * remaining time intervals i.e, [15 sep - 1 oct) and [1 Dec - 15 Dec) * * TODO union : Move this into util./*from w w w. j a v a2 s . c om*/ */ private boolean getPartitions(Date fromDate, Date toDate, String partCol, Set<FactPartition> partitions, TreeSet<UpdatePeriod> updatePeriods, boolean addNonExistingParts, boolean failOnPartialData, PartitionRangesForPartitionColumns missingPartitions) throws LensException { if (fromDate.equals(toDate) || fromDate.after(toDate)) { return true; } if (updatePeriods == null || updatePeriods.isEmpty()) { return false; } UpdatePeriod maxInterval = CubeFactTable.maxIntervalInRange(fromDate, toDate, updatePeriods); if (maxInterval == null) { log.info("No max interval for range: {} to {}", fromDate, toDate); return false; } if (maxInterval == UpdatePeriod.CONTINUOUS && cubeQueryContext.getRangeWriter().getClass().equals(BetweenTimeRangeWriter.class)) { FactPartition part = new FactPartition(partCol, fromDate, maxInterval, null, partWhereClauseFormat); partitions.add(part); part.getStorageTables().add(storageTable); part = new FactPartition(partCol, toDate, maxInterval, null, partWhereClauseFormat); partitions.add(part); part.getStorageTables().add(storageTable); this.participatingUpdatePeriods.add(maxInterval); log.info("Added continuous fact partition for storage table {}", storageName); return true; } if (!getCubeMetastoreClient().partColExists(this.getFact(), storageName, partCol)) { log.info("{} does not exist in {}", partCol, name); return false; } Date maxIntervalStorageTblStartDate = getStorageTableStartDate(maxInterval); Date maxIntervalStorageTblEndDate = getStorageTableEndDate(maxInterval); TreeSet<UpdatePeriod> remainingIntervals = new TreeSet<>(updatePeriods); remainingIntervals.remove(maxInterval); if (!isCandidatePartiallyValidForTimeRange(maxIntervalStorageTblStartDate, maxIntervalStorageTblEndDate, fromDate, toDate)) { //Check the time range in remainingIntervals as maxInterval is not useful return getPartitions(fromDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts, failOnPartialData, missingPartitions); } Date ceilFromDate = DateUtil.getCeilDate( fromDate.after(maxIntervalStorageTblStartDate) ? fromDate : maxIntervalStorageTblStartDate, maxInterval); Date floorToDate = DateUtil.getFloorDate( toDate.before(maxIntervalStorageTblEndDate) ? toDate : maxIntervalStorageTblEndDate, maxInterval); if (ceilFromDate.equals(floorToDate) || floorToDate.before(ceilFromDate)) { return getPartitions(fromDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts, failOnPartialData, missingPartitions); } int lookAheadNumParts = getConf().getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(maxInterval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS); TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, maxInterval, 1).iterator(); // add partitions from ceilFrom to floorTo while (iter.hasNext()) { Date dt = iter.next(); Date nextDt = iter.peekNext(); FactPartition part = new FactPartition(partCol, dt, maxInterval, null, partWhereClauseFormat); updatePartitionStorage(part); log.debug("Storage tables containing Partition {} are: {}", part, part.getStorageTables()); if (part.isFound()) { log.debug("Adding existing partition {}", part); partitions.add(part); this.participatingUpdatePeriods.add(maxInterval); log.debug("Looking for look ahead process time partitions for {}", part); if (processTimePartCol == null) { log.debug("processTimePartCol is null"); } else if (partCol.equals(processTimePartCol)) { log.debug("part column is process time col"); } else if (updatePeriods.first().equals(maxInterval)) { log.debug("Update period is the least update period"); } else if ((iter.getNumIters() - iter.getCounter()) > lookAheadNumParts) { // see if this is the part of the last-n look ahead partitions log.debug("Not a look ahead partition"); } else { log.debug("Looking for look ahead process time partitions for {}", part); // check if finer partitions are required // final partitions are required if no partitions from // look-ahead // process time are present TimeRange.Iterable.Iterator processTimeIter = TimeRange .iterable(nextDt, lookAheadNumParts, maxInterval, 1).iterator(); while (processTimeIter.hasNext()) { Date pdt = processTimeIter.next(); Date nextPdt = processTimeIter.peekNext(); FactPartition processTimePartition = new FactPartition(processTimePartCol, pdt, maxInterval, null, partWhereClauseFormat); updatePartitionStorage(processTimePartition); if (processTimePartition.isFound()) { log.debug("Finer parts not required for look-ahead partition :{}", part); } else { log.debug("Looked ahead process time partition {} is not found", processTimePartition); TreeSet<UpdatePeriod> newset = new TreeSet<UpdatePeriod>(); newset.addAll(updatePeriods); newset.remove(maxInterval); log.debug("newset of update periods:{}", newset); if (!newset.isEmpty()) { // Get partitions for look ahead process time log.debug("Looking for process time partitions between {} and {}", pdt, nextPdt); Set<FactPartition> processTimeParts = getPartitions( TimeRange.builder().fromDate(pdt).toDate(nextPdt) .partitionColumn(processTimePartCol).build(), newset, true, failOnPartialData, missingPartitions); log.debug("Look ahead partitions: {}", processTimeParts); TimeRange timeRange = TimeRange.builder().fromDate(dt).toDate(nextDt).build(); for (FactPartition pPart : processTimeParts) { log.debug("Looking for finer partitions in pPart: {}", pPart); for (Date date : timeRange.iterable(pPart.getPeriod(), 1)) { FactPartition innerPart = new FactPartition(partCol, date, pPart.getPeriod(), pPart, partWhereClauseFormat); updatePartitionStorage(innerPart); innerPart.setFound(pPart.isFound()); if (innerPart.isFound()) { partitions.add(innerPart); } } log.debug("added all sub partitions blindly in pPart: {}", pPart); } } } } } } else { log.info("Partition:{} does not exist in any storage table", part); if (!getPartitions(dt, nextDt, partCol, partitions, remainingIntervals, false, failOnPartialData, missingPartitions)) { log.debug("Adding non existing partition {}", part); if (addNonExistingParts) { // Add non existing partitions for all cases of whether we populate all non existing or not. this.participatingUpdatePeriods.add(maxInterval); missingPartitions.add(part); if (!failOnPartialData) { partitions.add(part); part.getStorageTables().add(storageTable); } } else { log.info("No finer granualar partitions exist for {}", part); return false; } } else { log.debug("Finer granualar partitions added for {}", part); } } } return getPartitions(fromDate, ceilFromDate, partCol, partitions, remainingIntervals, addNonExistingParts, failOnPartialData, missingPartitions) && getPartitions(floorToDate, toDate, partCol, partitions, remainingIntervals, addNonExistingParts, failOnPartialData, missingPartitions); }
From source file:gov.utah.dts.det.ccl.service.impl.FacilityServiceImpl.java
@Override public Facility updateInitialRegulationDate(Facility facility, Date initialRegulationDate) { if (facility == null) { throw new IllegalArgumentException("Facility must not be null"); }//from w w w. j a va 2 s.c o m if (initialRegulationDate != null && !initialRegulationDate.equals(facility.getInitialRegulationDate())) { facility.setInitialRegulationDate(initialRegulationDate); facility = facilityDao.save(facility); } return facility; }
From source file:org.openmrs.module.reporting.dataset.definition.evaluator.EncounterAndObsDataSetEvaluatorTest.java
/** * @see {@link EncounterAndObsDataSetEvaluator#evaluate(DataSetDefinition,EvaluationContext)} *///from w ww. j av a 2 s . c om @Test @Verifies(value = "should evaluate an EncounterAndObsDataSetDefinition with filter for which Encounters and Quantity", method = "evaluate(DataSetDefinition,EvaluationContext)") public void evaluate_shouldEvaluateAnEncounterAndObsDataSetDefinitionWithWhichEncountersAndQuantity() throws Exception { executeDataSet(XML_ENCOUNTER_DATASET); EncounterAndObsDataSetDefinition d = new EncounterAndObsDataSetDefinition(); EncounterType adultInitial = new EncounterType(10); EncounterType adultReturn = new EncounterType(11); EncounterType childSpecial = new EncounterType(13); List<EncounterType> encounterTypes = new ArrayList<EncounterType>(); encounterTypes.add(adultInitial); encounterTypes.add(adultReturn); encounterTypes.add(childSpecial); d.setEncounterTypes(encounterTypes); d.setColumnDisplayFormat( Collections.singletonList(EncounterAndObsDataSetDefinition.ColumnDisplayFormat.ID)); d.setWhichEncounterQualifier(TimeQualifier.LAST); d.setQuantity(2); SimpleDataSet result = (SimpleDataSet) Context.getService(DataSetDefinitionService.class).evaluate(d, null); // Basic Dimensions of Rows and Columns Assert.assertEquals(5, result.getRows().size()); // test Quantity Assert.assertEquals(17, result.getMetaData().getColumnCount()); // 5 (standard) + 4 (obs) * 3 (value, date, group) SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); Integer lastPatientId = null; Date lastEncounterDatetime = null; for (DataSetRow row : result.getRows()) { Integer patientId = (Integer) row.getColumnValue("INTERNAL_PATIENT_ID"); Date encounterDatetime = sdf.parse(row.getColumnValue("ENCOUNTER_DATETIME").toString()); // test WhichEncounter if (lastPatientId != null) { if (lastPatientId == patientId) { Assert.assertTrue(encounterDatetime.before(lastEncounterDatetime) || encounterDatetime.equals(lastEncounterDatetime)); } else { Assert.assertTrue(patientId > (lastPatientId) || patientId == lastPatientId); } } lastPatientId = patientId; lastEncounterDatetime = encounterDatetime; } }