List of usage examples for java.util SortedMap entrySet
Set<Map.Entry<K, V>> entrySet();
From source file:org.apache.accumulo.server.tabletserver.Tablet.java
private Map<FileRef, Pair<Key, Key>> getFirstAndLastKeys(SortedMap<FileRef, DataFileValue> files) throws IOException { FileOperations fileFactory = FileOperations.getInstance(); Map<FileRef, Pair<Key, Key>> falks = new HashMap<FileRef, Pair<Key, Key>>(); for (Entry<FileRef, DataFileValue> entry : files.entrySet()) { FileRef file = entry.getKey();// w w w. ja v a 2 s . co m FileSystem ns = fs.getFileSystemByPath(file.path()); FileSKVIterator openReader = fileFactory.openReader(file.path().toString(), true, ns, ns.getConf(), acuTableConf); try { Key first = openReader.getFirstKey(); Key last = openReader.getLastKey(); falks.put(file, new Pair<Key, Key>(first, last)); } finally { openReader.close(); } } return falks; }
From source file:org.apache.accumulo.tserver.Tablet.java
private Map<FileRef, Pair<Key, Key>> getFirstAndLastKeys(SortedMap<FileRef, DataFileValue> allFiles) throws IOException { Map<FileRef, Pair<Key, Key>> result = new HashMap<FileRef, Pair<Key, Key>>(); FileOperations fileFactory = FileOperations.getInstance(); for (Entry<FileRef, DataFileValue> entry : allFiles.entrySet()) { FileRef file = entry.getKey();/*from www .j ava 2 s . c om*/ FileSystem ns = fs.getFileSystemByPath(file.path()); FileSKVIterator openReader = fileFactory.openReader(file.path().toString(), true, ns, ns.getConf(), this.getTableConfiguration()); try { Key first = openReader.getFirstKey(); Key last = openReader.getLastKey(); result.put(file, new Pair<Key, Key>(first, last)); } finally { openReader.close(); } } return result; }
From source file:org.slc.sli.dashboard.manager.impl.StudentProgressManagerImpl.java
@Override @SuppressWarnings("unchecked") @LogExecutionTime/*from w w w .j a va2 s .co m*/ public GenericEntity getTranscript(String token, Object studentIdObj, Config.Data config) { SortedMap<GenericEntity, List<GenericEntity>> transcripts = new TreeMap<GenericEntity, List<GenericEntity>>( new SessionComparator()); String studentId = studentIdObj.toString(); List<String> optionalFields = new LinkedList<String>(); optionalFields.add(Constants.ATTR_TRANSCRIPT); GenericEntity studentWithTranscript = entityManager.getStudentWithOptionalFields(token, studentId, optionalFields); if (studentWithTranscript == null) { return new GenericEntity(); } Map<String, Object> studentTranscript = (Map<String, Object>) studentWithTranscript .get(Constants.ATTR_TRANSCRIPT); if (studentTranscript == null) { return new GenericEntity(); } List<Map<String, Object>> courseTranscripts = (List<Map<String, Object>>) studentTranscript .get(Constants.ATTR_COURSE_TRANSCRIPTS); List<Map<String, Object>> studentSectionAssociations = (List<Map<String, Object>>) studentTranscript .get(Constants.ATTR_STUDENT_SECTION_ASSOC); if (studentSectionAssociations == null || courseTranscripts == null) { return new GenericEntity(); } Map<String, GenericEntity> cache = new HashMap<String, GenericEntity>(); cacheStudent(studentId, token, studentSectionAssociations, cache, courseTranscripts); for (Map<String, Object> studentSectionAssociation : studentSectionAssociations) { Map<String, Object> courseTranscript = getCourseTranscriptForSection(studentSectionAssociation, courseTranscripts); // skip this course if we can't find previous info if (courseTranscript == null) { continue; } Map<String, Object> section = getGenericEntity(studentSectionAssociation, Constants.ATTR_SECTIONS); Map<String, Object> course = getGenericEntity(section, Constants.ATTR_COURSES); Map<String, Object> session = getGenericEntity(section, Constants.ATTR_SESSIONS); GenericEntity term = new GenericEntity(); term.put(Constants.ATTR_TERM, getValue(session, Constants.ATTR_TERM)); term.put(Constants.ATTR_GRADE_LEVEL, getValue(courseTranscript, Constants.ATTR_GRADE_LEVEL_WHEN_TAKEN)); term.put(Constants.ATTR_SCHOOL, getSchoolName(section, token, cache)); term.put(Constants.ATTR_SCHOOL_YEAR, getValue(session, Constants.ATTR_SCHOOL_YEAR)); term.put(Constants.ATTR_CUMULATIVE_GPA, getGPA(session, studentId, token, cache)); term.put(Constants.ATTR_SESSION_BEGIN_DATE, getValue(session, Constants.ATTR_SESSION_BEGIN_DATE)); GenericEntityEnhancer.convertGradeLevel(term, Constants.ATTR_GRADE_LEVEL); // This isn't a new term if (transcripts.containsKey(term)) { List<GenericEntity> courses = transcripts.get(term); GenericEntity courseData = getCourseData(courseTranscript, course); courses.add(courseData); } else { // this is the first time the term has been encountered List<GenericEntity> courses = new ArrayList<GenericEntity>(); GenericEntity courseData = getCourseData(courseTranscript, course); courses.add(courseData); transcripts.put(term, courses); } } List<GenericEntity> transcriptData = new ArrayList<GenericEntity>(); for (Map.Entry<GenericEntity, List<GenericEntity>> entry : transcripts.entrySet()) { GenericEntity term = new GenericEntity(); term.putAll(entry.getKey()); term.put(Constants.ATTR_COURSES, entry.getValue()); transcriptData.add(term); } GenericEntity ret = new GenericEntity(); ret.put(TRANSCRIPT_HISTORY, transcriptData); return ret; }
From source file:NavigableMap.java
/** * Streamlined bulk insertion to initialize from elements of given sorted map. * Call only from constructor or clone method. *//*w w w. j a v a 2 s. c o m*/ private void buildFromSorted(SortedMap<K, ? extends V> map) { if (map == null) throw new NullPointerException(); HeadIndex<K, V> h = head; Node<K, V> basepred = h.node; // Track the current rightmost node at each level. Uses an // ArrayList to avoid committing to initial or maximum level. ArrayList<Index<K, V>> preds = new ArrayList<Index<K, V>>(); // initialize for (int i = 0; i <= h.level; ++i) preds.add(null); Index<K, V> q = h; for (int i = h.level; i > 0; --i) { preds.set(i, q); q = q.down; } Iterator<? extends Map.Entry<? extends K, ? extends V>> it = map.entrySet().iterator(); while (it.hasNext()) { Map.Entry<? extends K, ? extends V> e = it.next(); int j = randomLevel(); if (j > h.level) j = h.level + 1; K k = e.getKey(); V v = e.getValue(); if (k == null || v == null) throw new NullPointerException(); Node<K, V> z = new Node<K, V>(k, v, null); basepred.next = z; basepred = z; if (j > 0) { Index<K, V> idx = null; for (int i = 1; i <= j; ++i) { idx = new Index<K, V>(z, idx, null); if (i > h.level) h = new HeadIndex<K, V>(h.node, h, idx, i); if (i < preds.size()) { preds.get(i).right = idx; preds.set(i, idx); } else preds.add(idx); } } } head = h; }
From source file:org.apache.nifi.provenance.MiNiFiPersistentProvenanceRepository.java
/** * <p>// ww w. j a v a2 s . co m * Merges all of the given Journal Files into a single, merged Provenance Event Log File. As these records are merged, they will be compressed, if the repository is configured to compress records * </p> * <p> * <p> * If the repository is configured to compress the data, the file written to may not be the same as the <code>suggestedMergeFile</code>, as a filename extension of '.gz' may be appended. If the * journals are successfully merged, the file that they were merged into will be returned. If unable to merge the records (for instance, because the repository has been closed or because the list * of journal files was empty), this method will return <code>null</code>. * </p> * * @param journalFiles the journal files to merge * @param suggestedMergeFile the file to write the merged records to * @param eventReporter the event reporter to report any warnings or errors to; may be null. * @return the file that the given journals were merged into, or <code>null</code> if no records were merged. * @throws IOException if a problem occurs writing to the mergedFile, reading from a journal */ File mergeJournals(final List<File> journalFiles, final File suggestedMergeFile, final EventReporter eventReporter) throws IOException { if (this.closed.get()) { logger.info("Provenance Repository has been closed; will not merge journal files to {}", suggestedMergeFile); return null; } if (journalFiles.isEmpty()) { logger.debug("Couldn't merge journals: Journal Files is empty; won't merge journals"); return null; } Collections.sort(journalFiles, new Comparator<File>() { @Override public int compare(final File o1, final File o2) { final String suffix1 = StringUtils.substringAfterLast(o1.getName(), "."); final String suffix2 = StringUtils.substringAfterLast(o2.getName(), "."); try { final int journalIndex1 = Integer.parseInt(suffix1); final int journalIndex2 = Integer.parseInt(suffix2); return Integer.compare(journalIndex1, journalIndex2); } catch (final NumberFormatException nfe) { return o1.getName().compareTo(o2.getName()); } } }); final String firstJournalFile = journalFiles.get(0).getName(); final String firstFileSuffix = StringUtils.substringAfterLast(firstJournalFile, "."); final boolean allPartialFiles = firstFileSuffix.equals("0"); // check if we have all of the "partial" files for the journal. if (allPartialFiles) { if (suggestedMergeFile.exists()) { // we have all "partial" files and there is already a merged file. Delete the data from the index // because the merge file may not be fully merged. We will re-merge. logger.warn("Merged Journal File {} already exists; however, all partial journal files also exist " + "so assuming that the merge did not finish. Repeating procedure in order to ensure consistency."); // Since we only store the file's basename, block offset, and event ID, and because the newly created file could end up on // a different Storage Directory than the original, we need to ensure that we delete both the partially merged // file and the TOC file. Otherwise, we could get the wrong copy and have issues retrieving events. if (!suggestedMergeFile.delete()) { logger.error( "Failed to delete partially written Provenance Journal File {}. This may result in events from this journal " + "file not being able to be displayed. This file should be deleted manually.", suggestedMergeFile); } final File tocFile = TocUtil.getTocFile(suggestedMergeFile); if (tocFile.exists() && !tocFile.delete()) { logger.error( "Failed to delete .toc file {}; this may result in not being able to read the Provenance Events from the {} Journal File. " + "This can be corrected by manually deleting the {} file", tocFile, suggestedMergeFile, tocFile); } } } else { logger.warn("Cannot merge journal files {} because expected first file to end with extension '.0' " + "but it did not; assuming that the files were already merged but only some finished deletion " + "before restart. Deleting remaining partial journal files.", journalFiles); for (final File file : journalFiles) { if (!file.delete() && file.exists()) { logger.warn( "Failed to delete unneeded journal file {}; this file should be cleaned up manually", file); } } return null; } final long startNanos = System.nanoTime(); // Map each journal to a RecordReader final List<RecordReader> readers = new ArrayList<>(); int records = 0; final boolean isCompress = configuration.isCompressOnRollover(); final File writerFile = isCompress ? new File(suggestedMergeFile.getParentFile(), suggestedMergeFile.getName() + ".gz") : suggestedMergeFile; try { for (final File journalFile : journalFiles) { try { // Use MAX_VALUE for number of chars because we don't want to truncate the value as we write it // out. This allows us to later decide that we want more characters and still be able to retrieve // the entire event. readers.add(RecordReaders.newRecordReader(journalFile, null, Integer.MAX_VALUE)); } catch (final EOFException eof) { // there's nothing here. Skip over it. } catch (final IOException ioe) { logger.warn("Unable to merge {} with other Journal Files due to {}", journalFile, ioe.toString()); if (logger.isDebugEnabled()) { logger.warn("", ioe); } if (eventReporter != null) { eventReporter.reportEvent(Severity.ERROR, EVENT_CATEGORY, "re " + ioe.toString()); } } } // Create a Map so that the key is the next record available from a reader and the value is the Reader from which // the record came. This sorted map is then used so that we are able to always get the first entry, which is the next // lowest record id final SortedMap<StandardProvenanceEventRecord, RecordReader> recordToReaderMap = new TreeMap<>( new Comparator<StandardProvenanceEventRecord>() { @Override public int compare(final StandardProvenanceEventRecord o1, final StandardProvenanceEventRecord o2) { return Long.compare(o1.getEventId(), o2.getEventId()); } }); long minEventId = 0L; long earliestTimestamp = System.currentTimeMillis(); for (final RecordReader reader : readers) { StandardProvenanceEventRecord record = null; try { record = reader.nextRecord(); } catch (final EOFException eof) { } catch (final Exception e) { logger.warn("Failed to generate Provenance Event Record from Journal due to " + e + "; it's possible that the record wasn't " + "completely written to the file. This record will be skipped."); if (logger.isDebugEnabled()) { logger.warn("", e); } if (eventReporter != null) { eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to read Provenance Event Record from Journal due to " + e + "; it's possible that hte record wasn't completely written to the file. This record will be skipped."); } } if (record == null) { continue; } if (record.getEventTime() < earliestTimestamp) { earliestTimestamp = record.getEventTime(); } if (record.getEventId() < minEventId) { minEventId = record.getEventId(); } recordToReaderMap.put(record, reader); } // loop over each entry in the map, persisting the records to the merged file in order, and populating the map // with the next entry from the journal file from which the previous record was written. try (final RecordWriter writer = RecordWriters.newSchemaRecordWriter(writerFile, configuration.isCompressOnRollover(), true)) { writer.writeHeader(minEventId); while (!recordToReaderMap.isEmpty()) { final Map.Entry<StandardProvenanceEventRecord, RecordReader> entry = recordToReaderMap .entrySet().iterator().next(); final StandardProvenanceEventRecord record = entry.getKey(); final RecordReader reader = entry.getValue(); writer.writeRecord(record, record.getEventId()); final int blockIndex = writer.getTocWriter().getCurrentBlockIndex(); records++; // Remove this entry from the map recordToReaderMap.remove(record); // Get the next entry from this reader and add it to the map StandardProvenanceEventRecord nextRecord = null; try { nextRecord = reader.nextRecord(); } catch (final EOFException eof) { } if (nextRecord != null) { recordToReaderMap.put(nextRecord, reader); } } } } finally { for (final RecordReader reader : readers) { try { reader.close(); } catch (final IOException ioe) { } } } // Success. Remove all of the journal files, as they're no longer needed, now that they've been merged. for (final File journalFile : journalFiles) { if (!journalFile.delete() && journalFile.exists()) { logger.warn("Failed to remove temporary journal file {}; this file should be cleaned up manually", journalFile.getAbsolutePath()); if (eventReporter != null) { eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to remove temporary journal file " + journalFile.getAbsolutePath() + "; this file should be cleaned up manually"); } } final File tocFile = getTocFile(journalFile); if (!tocFile.delete() && tocFile.exists()) { logger.warn( "Failed to remove temporary journal TOC file {}; this file should be cleaned up manually", tocFile.getAbsolutePath()); if (eventReporter != null) { eventReporter.reportEvent(Severity.WARNING, EVENT_CATEGORY, "Failed to remove temporary journal TOC file " + tocFile.getAbsolutePath() + "; this file should be cleaned up manually"); } } } if (records == 0) { writerFile.delete(); logger.debug("Couldn't merge journals: No Records to merge"); return null; } else { final long nanos = System.nanoTime() - startNanos; final long millis = TimeUnit.MILLISECONDS.convert(nanos, TimeUnit.NANOSECONDS); logger.info( "Successfully merged {} journal files ({} records) into single Provenance Log File {} in {} milliseconds", journalFiles.size(), records, suggestedMergeFile, millis); } return writerFile; }
From source file:org.apache.accumulo.server.tabletserver.ScanRunState.java
public static Pair<Text, KeyExtent> verifyTabletInformation(KeyExtent extent, TServerInstance instance, SortedMap<Key, Value> tabletsKeyValues, String clientAddress, ZooLock lock) throws AccumuloSecurityException, DistributedStoreException, AccumuloException { log.debug("verifying extent " + extent); if (extent.isRootTablet()) { return verifyRootTablet(extent, instance); }/*from w w w . j a v a 2s. com*/ String tableToVerify = MetadataTable.ID; if (extent.isMeta()) tableToVerify = RootTable.ID; List<ColumnFQ> columnsToFetch = Arrays .asList(new ColumnFQ[] { TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN, TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN, TabletsSection.TabletColumnFamily.SPLIT_RATIO_COLUMN, TabletsSection.TabletColumnFamily.OLD_PREV_ROW_COLUMN, TabletsSection.ServerColumnFamily.TIME_COLUMN }); ScannerImpl scanner = new ScannerImpl(HdfsZooInstance.getInstance(), SystemCredentials.get(), tableToVerify, Authorizations.EMPTY); scanner.setRange(extent.toMetadataRange()); TreeMap<Key, Value> tkv = new TreeMap<Key, Value>(); for (Entry<Key, Value> entry : scanner) tkv.put(entry.getKey(), entry.getValue()); // only populate map after success if (tabletsKeyValues == null) { tabletsKeyValues = tkv; } else { tabletsKeyValues.clear(); tabletsKeyValues.putAll(tkv); } Text metadataEntry = extent.getMetadataEntry(); Value dir = checkTabletMetadata(extent, instance, tabletsKeyValues, metadataEntry); if (dir == null) return null; Value oldPrevEndRow = null; for (Entry<Key, Value> entry : tabletsKeyValues.entrySet()) { if (TabletsSection.TabletColumnFamily.OLD_PREV_ROW_COLUMN.hasColumns(entry.getKey())) { oldPrevEndRow = entry.getValue(); } } if (oldPrevEndRow != null) { SortedMap<Text, SortedMap<ColumnFQ, Value>> tabletEntries; tabletEntries = MetadataTableUtil.getTabletEntries(tabletsKeyValues, columnsToFetch); KeyExtent fke; try { fke = MetadataTableUtil.fixSplit(metadataEntry, tabletEntries.get(metadataEntry), instance, SystemCredentials.get(), lock); } catch (IOException e) { log.error("Error fixing split " + metadataEntry); throw new AccumuloException(e.toString()); } if (!fke.equals(extent)) { return new Pair<Text, KeyExtent>(null, fke); } // reread and reverify metadata entries now that metadata entries were fixed tabletsKeyValues.clear(); return verifyTabletInformation(fke, instance, tabletsKeyValues, clientAddress, lock); } return new Pair<Text, KeyExtent>(new Text(dir.get()), null); }
From source file:com.aurel.track.item.history.HistorySaverBL.java
/** * Builds the trail text for history/*from w w w. j a v a2s .c o m*/ * @param fieldChanges Map with FieldChange values * @param longFields the fields with longer texts (description, comment). This will be added at the end of the trail text * @param locale * @param isNew whether creating a new issue (isCreate || isCopy) or editing an existing one * @param newLineString * * @return */ private static boolean persistHistory(SortedMap<Integer, FieldChange> fieldChanges, AfterItemSaveEventParam afterItemSaveEventParam, Integer personID, List<Integer> longFields, Locale locale, boolean isCreate, boolean isCopy, Integer fieldChangeID) { SortedMap<Integer, FieldChange> historyLongTextMap = new TreeMap<Integer, FieldChange>(); //maintain order TWorkItemBean workItemBeanNew = afterItemSaveEventParam.getWorkItemNew(); TWorkItemBean workItemBeanOld = afterItemSaveEventParam.getWorkItemOld(); boolean needHistoryTransaction = false; if (isCreate || isCopy) { //need first status in history needHistoryTransaction = true; } Map<Integer, TFieldChangeBean> lastHistoryFieldChangesMap = null; if (!needHistoryTransaction && fieldChanges != null) { //gather the fields with explicit history List<Integer> explicitHistoryFields = new LinkedList<Integer>(); int minutes = GeneralSettings.getHistoryAndEmailDelay(); for (FieldChange fieldChange : fieldChanges.values()) { if (fieldChange.isChanged()) { needHistoryTransaction = true; if (minutes == 0 || minutes < 0) { //no need to handle recent history changes break; } Integer fieldID = fieldChange.getFieldID(); if (fieldChange.isExplicitHistory() && !SystemFields.INTEGER_STATE.equals(fieldID) && !SystemFields.INTEGER_COMMENT.equals(fieldID)) { //the status field although is hardcoded to have explicit history but me make it exception from rule. //(the status change will be added to the history even the last status change happened within x minutes) //the comment should be added in the history anyway explicitHistoryFields.add(fieldChange.getFieldID()); } } } if (!explicitHistoryFields.isEmpty()) { Date targetTime = new Date(); //now targetTime = DateUtils.addMinutes(targetTime, -minutes); Map<Integer, THistoryTransactionBean> lastHistoryTransactionsMap = GeneralUtils.createMapFromList( HistoryTransactionBL.loadByItemAndFieldsSince(workItemBeanNew.getObjectID(), explicitHistoryFields, targetTime)); List<TFieldChangeBean> lastFieldChanges = FieldChangeBL .loadByItemAndFieldsSince(workItemBeanNew.getObjectID(), explicitHistoryFields, targetTime); lastHistoryFieldChangesMap = new HashMap<Integer, TFieldChangeBean>(); for (TFieldChangeBean fieldChangeBean : lastFieldChanges) { Integer transactionID = fieldChangeBean.getHistoryTransaction(); Integer fieldID = fieldChangeBean.getFieldKey(); THistoryTransactionBean historyTransactionBean = lastHistoryTransactionsMap.get(transactionID); if (historyTransactionBean != null) { //only the first found Integer changedByPersonID = historyTransactionBean.getChangedByID(); if (personID.equals(changedByPersonID) && lastHistoryFieldChangesMap.get(fieldID) == null) { lastHistoryFieldChangesMap.put(fieldID, fieldChangeBean); } explicitHistoryFields.remove(fieldID); if (explicitHistoryFields.isEmpty()) { break; } } } } } boolean mightTriggerEmail = false; if (!needHistoryTransaction) { return false; } //Integer historyTransactionID = HistoryTransactionBL.saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); if (isCreate || isCopy) { //add the first status change history entry if not deep copy if (!workItemBeanNew.isDeepCopy()) { //with deep copy the status changes will be copied also no need for first status change in history //set null for workItemBeanOld parameter (by create is null anyway) because otherwise the //values are the same and will not be saved Integer statusTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); saveExplicitField(SystemFields.INTEGER_STATE, statusTransactionID, workItemBeanNew, null, null); } mightTriggerEmail = true; } StringBuilder compoundTextNewBuffer = new StringBuilder(); StringBuilder compoundTextOldBuffer = new StringBuilder(); if (isCopy) { Object[] msgArguments = null; String messageKey = null; if (ApplicationBean.getInstance().getSiteBean().getProjectSpecificIDsOn()) { String projectSpecificID = SystemProjectSpecificIssueNoRT .getShowValue(workItemBeanOld.getIDNumber(), workItemBeanOld); msgArguments = new Object[] { projectSpecificID }; messageKey = "item.history.copyMessageProjectSpecificID"; } else { msgArguments = new Object[] { workItemBeanOld.getObjectID() }; messageKey = "item.history.copyMessage"; } compoundTextNewBuffer.append(LocalizeUtil.getParametrizedString(messageKey, msgArguments, locale)); } Set<Integer> attachmentHistoryFields = HistoryLoaderBL.getAttachmentHistoryFields(); Integer historyTransactionID = null; for (Map.Entry<Integer, FieldChange> entry : fieldChanges.entrySet()) { FieldChange fieldChange = (FieldChange) entry.getValue(); Integer fieldID = fieldChange.getFieldID(); String fieldLabel = fieldChange.getLocalizedFieldLabel(); String newValue = fieldChange.getNewShowValue(); String oldValue = fieldChange.getOldShowValue(); //For history text we are interested in: //1. all field changes for existing issues //2. "Comment" for new issues //For that the fieldChange.isChanged() should be set accordingly already if (!fieldChange.isChanged()) { continue; } if (attachmentHistoryFields.contains(fieldID)) { Integer attachmentChangeTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); insertFieldChange(attachmentChangeTransactionID, fieldID, newValue, oldValue); mightTriggerEmail = true; continue; } if (fieldChange.isExplicitHistory() || SystemFields.INTEGER_COMMENT.equals(fieldID)) { TFieldChangeBean fieldChangeBean = null; boolean isCommentChange = false; if (fieldChangeID == null) { if (lastHistoryFieldChangesMap != null) { fieldChangeBean = lastHistoryFieldChangesMap.get(fieldID); } if (fieldChangeBean == null) { //no previous entry within x minutes mightTriggerEmail = true; } } else { isCommentChange = true; fieldChangeBean = FieldChangeBL.loadByPrimaryKey(fieldChangeID); mightTriggerEmail = true; } if (historyTransactionID == null && !isCommentChange) { historyTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); } saveExplicitField(fieldID, historyTransactionID, workItemBeanNew, workItemBeanOld, fieldChangeBean); //the comment is saved anyway explicitly in the history as Comment field //even if explicit history is not configured. //Explicit history for comment means whether to historize the comment changes (edit and delete). //The field set into the workitemContext is COMMENT also for edit and delete comment //(instead of COMMENT_DELETE_HISTORY_FIELD or COMMENT_MODIFY_HISTORY_FIELD) comment because //we need the explicit history flag which is set only for COMMENT field (the other two are only pseudo fields) if (fieldChange.isExplicitHistory() && SystemFields.INTEGER_COMMENT.equals(fieldID)) { if (oldValue != null && !"".equals(oldValue)) { //history only if the comment is edited or deleted Integer commentChangeTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); if (newValue == null || "".equals(newValue)) { insertFieldChange(commentChangeTransactionID, SystemFields.COMMENT_DELETE_HISTORY_FIELD, newValue, oldValue); } else { insertFieldChange(commentChangeTransactionID, SystemFields.COMMENT_MODIFY_HISTORY_FIELD, newValue, oldValue); } } } } else { //fields without explicit history if (longFields.contains(fieldID)) { //gather the changed long fields to add them at the end historyLongTextMap.put(fieldID, fieldChange); mightTriggerEmail = true; } else { if (newValue != null && !"".equals(newValue)) { if (compoundTextNewBuffer.length() > 0) { //some content already present compoundTextNewBuffer.append(commonFieldsSeparator + lineBreak); } compoundTextNewBuffer.append(fieldLabel + fieldLabelSeparator + newValue); mightTriggerEmail = true; } if (oldValue != null && !"".equals(oldValue)) { if (compoundTextOldBuffer.length() > 0) { //some content already present compoundTextOldBuffer.append(commonFieldsSeparator + lineBreak); } compoundTextOldBuffer.append(fieldLabel + fieldLabelSeparator + oldValue); mightTriggerEmail = true; } } } } //add the longText changes at the end //add the commonFieldsSeparator only after the last short field //after long fields (HTML text) it does not make sense (for ex. after a <p>) boolean firstLongField = true; for (Map.Entry<Integer, FieldChange> entry : historyLongTextMap.entrySet()) { FieldChange fieldChange = entry.getValue(); if (fieldChange != null) { if (compoundTextNewBuffer.length() > 0) { //some content already present if (firstLongField) { compoundTextNewBuffer.append(commonFieldsSeparator + lineBreak); } else { compoundTextNewBuffer.append(lineBreak); } } if (compoundTextOldBuffer.length() > 0) { //some content already present if (firstLongField) { compoundTextOldBuffer.append(commonFieldsSeparator + lineBreak); } else { compoundTextOldBuffer.append(lineBreak); } } firstLongField = false; String fieldLabel = fieldChange.getLocalizedFieldLabel(); String newShowValue = fieldChange.getNewShowValue(); if (newShowValue != null && !"".equals(newShowValue)) { compoundTextNewBuffer.append(fieldLabel + fieldLabelSeparator + newShowValue); } String oldShowValue = fieldChange.getOldShowValue(); if (oldShowValue != null && !"".equals(oldShowValue)) { compoundTextOldBuffer.append(fieldLabel + fieldLabelSeparator + oldShowValue); } } } saveCompoundField(historyTransactionID, workItemBeanNew.getObjectID(), personID, compoundTextNewBuffer.toString(), compoundTextOldBuffer.toString()); return mightTriggerEmail; }
From source file:com.google.gwt.emultest.java.util.TreeMapTest.java
public void testHeadMapLjava_lang_ObjectZL() { K[] keys = getSortedKeys();//from www.jav a 2 s . c o m V[] values = getSortedValues(); NavigableMap<K, V> map = createNavigableMap(); for (int i = 0; i < keys.length; i++) { map.put(keys[i], values[i]); } // normal case SortedMap<K, V> subMap = map.headMap(keys[2], true); assertEquals(3, subMap.size()); subMap = map.headMap(keys[3], true); assertEquals(4, subMap.size()); for (int i = 0; i < 4; i++) { assertEquals(values[i], subMap.get(keys[i])); } subMap = map.headMap(keys[2], false); assertEquals(2, subMap.size()); assertNull(subMap.get(keys[3])); // Exceptions assertEquals(0, map.headMap(keys[0], false).size()); try { map.headMap(null, true); assertTrue("expected exception", useNullKey()); } catch (NullPointerException e) { assertFalse("unexpected NPE", useNullKey()); } try { map.headMap(null, false); assertTrue("expected exception", useNullKey()); } catch (NullPointerException e) { assertFalse("unexpected NPE", useNullKey()); } subMap = map.headMap(keys[2]); assertEquals(2, subMap.size()); try { subMap.put(keys[2], values[2]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException expected) { } assertEquals(keys.length, map.size()); subMap = map.headMap(keys[2], true); assertEquals(3, subMap.size()); subMap.remove(keys[1]); assertFalse(subMap.containsKey(keys[1])); assertFalse(subMap.containsValue(values[1])); assertFalse(map.containsKey(keys[1])); assertFalse(map.containsValue(values[1])); assertEquals(2, subMap.size()); assertEquals(keys.length - 1, map.size()); subMap.put(keys[1], values[1]); try { subMap.subMap(keys[1], keys[3]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { subMap.subMap(keys[3], keys[1]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException expected) { } if (useNullKey() && useNullValue()) { map.put(null, null); subMap = map.headMap(null, true); assertEquals(1, subMap.size()); assertTrue(subMap.containsValue(null)); assertNull(subMap.get(null)); subMap = map.subMap(null, false, keys[2], true); assertEquals(3, subMap.size()); Set<K> keySet = subMap.keySet(); assertEquals(3, keySet.size()); Set<Map.Entry<K, V>> entrySet = subMap.entrySet(); assertEquals(3, entrySet.size()); Collection<V> valueCollection = subMap.values(); assertEquals(3, valueCollection.size()); map.remove(null); } // head map of head map NavigableMap<K, V> headMap = map.headMap(keys[3], true); assertEquals(4, headMap.size()); headMap = headMap.headMap(keys[3], false); assertEquals(3, headMap.size()); headMap = headMap.headMap(keys[2], false); assertEquals(2, headMap.size()); headMap = headMap.tailMap(keys[0], false); assertEquals(1, headMap.size()); headMap = headMap.tailMap(keys[1], false); assertEquals(0, headMap.size()); }