List of usage examples for java.lang Long longValue
@HotSpotIntrinsicCandidate public long longValue()
From source file:org.openmeetings.app.data.conference.dao.RoomModeratorsDaoImpl.java
public void updateRoomModeratorByUserList(List<Map<String, Object>> roomModerators, Long roomId) { try {/*from www . jav a 2 s. com*/ // getLsit of RoomModerators before you add new ones List<RoomModerators> remoteRoomModeratorList = this.getRoomModeratorByRoomId(roomId); for (Iterator<Map<String, Object>> iter = roomModerators.iterator(); iter.hasNext();) { Map<String, Object> roomModeratorObj = iter.next(); Long roomModeratorsId = Long.parseLong(roomModeratorObj.get("roomModeratorsId").toString()); Long userId = Long.parseLong(roomModeratorObj.get("userId").toString()); Boolean isSuperModerator = Boolean .parseBoolean(roomModeratorObj.get("isSuperModerator").toString()); if (roomModeratorsId == null || roomModeratorsId == 0) { Long newRoomModeratorId = this.addRoomModeratorByUserId(userManagement.getUserById(userId), isSuperModerator, roomId); roomModeratorObj.put("roomModeratorsId", newRoomModeratorId); } else { this.updateRoomModeratorByUserId(roomModeratorsId, isSuperModerator); } } // Check for items to delete List<RoomModerators> roomModeratorsToDelete = new LinkedList<RoomModerators>(); if (remoteRoomModeratorList != null) { for (RoomModerators roomModerator : remoteRoomModeratorList) { boolean found = false; for (Iterator<Map<String, Object>> iter = roomModerators.iterator(); iter.hasNext();) { Map<String, Object> roomModeratorObj = iter.next(); Long roomModeratorsId = Long.parseLong(roomModeratorObj.get("roomModeratorsId").toString()); if (roomModerator.getRoomModeratorsId() == roomModeratorsId.longValue()) { found = true; break; } } if (!found) { roomModeratorsToDelete.add(roomModerator); } } } for (RoomModerators rModerator : roomModeratorsToDelete) { System.out.println("Remove Map " + rModerator.getRoomModeratorsId()); this.removeRoomModeratorByUserId(rModerator.getRoomModeratorsId()); } } catch (Exception ex2) { log.error("[updateRoomModeratorByUserList] ", ex2); ex2.printStackTrace(); } }
From source file:com.baidu.cc.spring.ConfigCenterPropertyExtractorTest.java
/** * test property extractor/*from w w w . ja va2 s. c o m*/ * @throws Exception */ @Test public void testPropertyExtractor() throws Exception { ExtConfigServerService service = new ExtConfigServerService() { /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#getLastestConfigVersion(java.lang.String, java.lang.String, java.lang.Long) */ public Long getLastestConfigVersion(String user, String password, Long envId) { return null; } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#getLastestConfigItems(java.lang.String, java.lang.String, java.lang.Long) */ public Map<String, String> getLastestConfigItems(String user, String password, Long envId) { return null; } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#getConfigItems(java.lang.String, java.lang.String, java.lang.Long) */ public Map<String, String> getConfigItems(String user, String password, Long version) { Map<String, String> ret = new HashMap<String, String>(); ret.put("new", "value"); return ret; } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#getConfigItemValue(java.lang.String, java.lang.String, java.lang.Long, java.lang.String) */ public String getConfigItemValue(String user, String password, Long version, String key) { return null; } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#checkVersionTag(java.lang.String, java.lang.String, java.lang.Long, java.lang.String) */ public boolean checkVersionTag(String user, String password, Long version, String tag) { return false; } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ExtConfigServerService#importConfigItems(java.lang.String, java.lang.String, java.lang.Long, java.util.Map) */ public void importConfigItems(String user, String password, Long version, Map<String, String> configItems) { Assert.assertEquals("xie", user); StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor(); encryptor.setPassword(ExtConfigServerService.class.getSimpleName()); Assert.assertEquals("matthew", encryptor.decrypt(password)); Assert.assertEquals(2, version.longValue()); Assert.assertTrue(configItems.containsKey("name")); Assert.assertTrue(configItems.containsKey("age")); Assert.assertTrue(configItems.containsValue("100")); Assert.assertEquals(3, configItems.size()); } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#getLastestConfigVersion(java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ public Long getLastestConfigVersion(String user, String password, String projectName, String envName) { return null; } /* (non-Javadoc) * @see com.baidu.cc.interfaces.ConfigServerService#getVersionId(java.lang.String, java.lang.String, java.lang.String) */ public Long getVersionId(String user, String password, String versionName) { return null; } }; ConfigCenterPropertyPlaceholderConfigurer extractor = new ConfigCenterPropertyPlaceholderConfigurer(); ClassPathResource cpr = new ClassPathResource("/com/baidu/cc/spring/test.properties"); extractor.setLocation(cpr); extractor.setConfigServerService(service); ConfigCenterPropertyPlaceholderConfigurer importer; importer = new ConfigCenterPropertyPlaceholderConfigurer(); importer.setCcUser("xie"); importer.setCcPassword("matthew"); importer.setCcVersion(1); extractor.setCcUser("xie"); extractor.setCcPassword("matthew"); extractor.setCcVersion(1); importer.setConfigServerService(service); //initial importer ConfigCenterPropertyPlaceholderConfigurerTest test = new ConfigCenterPropertyPlaceholderConfigurerTest(); importer.setApplicationContext(test.appContext); DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory(); importer.postProcessBeanFactory(beanFactory); extractor.setApplicationContext(test.appContext); extractor.postProcessBeanFactory(beanFactory); ConfigCenterPropertyExtractor ccpe = new ConfigCenterPropertyExtractor(); ccpe.setCcVersion(2); ccpe.setExtractor(extractor); ccpe.setImporter(importer); ccpe.afterPropertiesSet(); }
From source file:com.cloud.consoleproxy.ConsoleProxyManagerImpl.java
@Override public Pair<AfterScanAction, Object> scanPool(Long pool) { long dataCenterId = pool.longValue(); ConsoleProxyLoadInfo proxyInfo = _zoneProxyCountMap.get(dataCenterId); if (proxyInfo == null) { return new Pair<AfterScanAction, Object>(AfterScanAction.nop, null); }//w ww .j a va2s . co m ConsoleProxyLoadInfo vmInfo = _zoneVmCountMap.get(dataCenterId); if (vmInfo == null) { vmInfo = new ConsoleProxyLoadInfo(); } if (!checkCapacity(proxyInfo, vmInfo)) { if (s_logger.isDebugEnabled()) { s_logger.debug("Expand console proxy standby capacity for zone " + proxyInfo.getName()); } return new Pair<AfterScanAction, Object>(AfterScanAction.expand, null); } return new Pair<AfterScanAction, Object>(AfterScanAction.nop, null); }
From source file:com.cloud.consoleproxy.ConsoleProxyManagerImpl.java
@Override public void expandPool(Long pool, Object actionArgs) { long dataCenterId = pool.longValue(); allocCapacity(dataCenterId);/*from w ww . ja v a 2 s .c o m*/ }
From source file:com.liferay.portal.events.ServicePreActionExt.java
protected List<Layout> mergeAdditionalLayouts(HttpServletRequest request, User user, PermissionChecker permissionChecker, Layout layout, List<Layout> layouts) throws PortalException, SystemException { if ((layout == null) || layout.isPrivateLayout()) { return layouts; }/*from ww w .j ava2 s. c om*/ long layoutGroupId = layout.getGroupId(); Group guestGroup = GroupLocalServiceUtil.getGroup(user.getCompanyId(), GroupConstants.GUEST); if (layoutGroupId != guestGroup.getGroupId()) { Group layoutGroup = GroupLocalServiceUtil.getGroup(layoutGroupId); UnicodeProperties props = layoutGroup.getTypeSettingsProperties(); boolean mergeGuestPublicPages = GetterUtil.getBoolean(props.getProperty("mergeGuestPublicPages")); if (!mergeGuestPublicPages) { return layouts; } List<Layout> guestLayouts = LayoutLocalServiceUtil.getLayouts(guestGroup.getGroupId(), false, LayoutConstants.DEFAULT_PARENT_LAYOUT_ID); Object[] viewableLayouts = getViewableLayouts(request, user, permissionChecker, layout, guestLayouts); guestLayouts = (List<Layout>) viewableLayouts[1]; layouts.addAll(0, guestLayouts); } else { HttpSession session = request.getSession(); Long previousGroupId = (Long) session.getAttribute(WebKeys.VISITED_GROUP_ID_PREVIOUS); if ((previousGroupId != null) && (previousGroupId.longValue() != layoutGroupId)) { Group previousGroup = null; try { previousGroup = GroupLocalServiceUtil.getGroup(previousGroupId.longValue()); } catch (NoSuchGroupException nsge) { if (_log.isWarnEnabled()) { _log.warn(nsge); } return layouts; } UnicodeProperties props = previousGroup.getTypeSettingsProperties(); boolean mergeGuestPublicPages = GetterUtil.getBoolean(props.getProperty("mergeGuestPublicPages")); if (!mergeGuestPublicPages) { return layouts; } List<Layout> previousLayouts = LayoutLocalServiceUtil.getLayouts(previousGroupId.longValue(), false, LayoutConstants.DEFAULT_PARENT_LAYOUT_ID); Object[] viewableLayouts = getViewableLayouts(request, user, permissionChecker, layout, previousLayouts); previousLayouts = (List<Layout>) viewableLayouts[1]; layouts.addAll(previousLayouts); } } return layouts; }
From source file:com.oneops.transistor.service.ManifestRfcBulkProcessor.java
public void processLinkedTo(Map<Long, CmsRfcCI> design2manifestPlatMap, String nsPath, String userId) { List<CmsRfcRelation> existingPlatRels = cmRfcMrgProcessor.getDfDjRelationsWithCIs("manifest.LinksTo", null, nsPath, "manifest.Platform", "manifest.Platform", "dj", true, true, null); Map<String, Long> existingRelGoids = new HashMap<String, Long>(); for (CmsRfcRelation rel : existingPlatRels) { existingRelGoids.put(rel.getFromRfcCi().getCiName() + "::" + rel.getToRfcCi().getCiName(), rel.getCiRelationId());//ww w . j a v a 2s . co m } for (Long designPlatCiId : design2manifestPlatMap.keySet()) { List<CmsCIRelation> platRels = cmProcessor.getFromCIRelationsNaked(designPlatCiId.longValue(), null, "LinksTo", "catalog.Platform"); for (CmsCIRelation catalogLinksTo : platRels) { CmsRfcCI fromManifestPlatfrom = design2manifestPlatMap.get(designPlatCiId); CmsRfcCI toManifestPlatfrom = design2manifestPlatMap.get(catalogLinksTo.getToCiId()); CmsRfcRelation manifestLinksToRfc = bootstrapRelationRfc(fromManifestPlatfrom.getCiId(), toManifestPlatfrom.getCiId(), "manifest.LinksTo", nsPath, nsPath); manifestLinksToRfc.setCreatedBy(userId); manifestLinksToRfc.setUpdatedBy(userId); CmsRfcRelation newManifestLinksToRfc = cmRfcMrgProcessor.upsertRelationRfc(manifestLinksToRfc, userId); String relKey = fromManifestPlatfrom.getCiName() + "::" + toManifestPlatfrom.getCiName(); if (existingRelGoids.containsKey(relKey)) { existingRelGoids.remove(relKey); } logger.debug("Created LinkedTo RFC with rfcid: " + newManifestLinksToRfc.getRfcId()); } } for (Map.Entry<String, Long> absoleteRel : existingRelGoids.entrySet()) { cmRfcMrgProcessor.requestRelationDelete(absoleteRel.getValue(), userId); } }
From source file:com.cloud.consoleproxy.ConsoleProxyManagerImpl.java
@Override public boolean isPoolReadyForScan(Long pool) { // pool is at zone basis long dataCenterId = pool.longValue(); if (!isZoneReady(_zoneHostInfoMap, dataCenterId)) { if (s_logger.isDebugEnabled()) { s_logger.debug("Zone " + dataCenterId + " is not ready to launch console proxy yet"); }/*from ww w . jav a2s . c o m*/ return false; } List<ConsoleProxyVO> l = _consoleProxyDao.getProxyListInStates(VirtualMachine.State.Starting, VirtualMachine.State.Stopping); if (l.size() > 0) { if (s_logger.isDebugEnabled()) { s_logger.debug( "Zone " + dataCenterId + " has " + l.size() + " console proxy VM(s) in transition state"); } return false; } if (s_logger.isDebugEnabled()) { s_logger.debug("Zone " + dataCenterId + " is ready to launch console proxy"); } return true; }
From source file:com.globalsight.everest.webapp.pagehandler.edit.online.EditorPageHandler.java
private void renderJson(HttpServletRequest p_request, HttpServletResponse p_response, EditorState state, boolean isAssignee) throws IOException { EditorState.Layout layout = state.getLayout(); String jsonStr = ""; p_response.setContentType("text/html;charset=UTF-8"); String value = "3"; // comment button if ((value = p_request.getParameter(WebAppConstants.REVIEW_MODE)) != null) { if ("Show Comments".equals(value)) { state.setReviewMode();//from ww w . ja v a 2s .co m } else if (state.getUserIsPm()) { state.setViewerMode(); } else { state.setEditorMode(); } } // lock button if ((value = p_request.getParameter("editAll")) != null) { if (state.canEditAll()) { state.setEditAllState(Integer.parseInt(value)); } else { // is not json format so jquery will no back jsonStr = "false"; } } // show segment datails if ((value = p_request.getParameter("param")) != null) { SegmentView view; String param[] = value.split("&"); String tuid[] = param[0].split("="); String tuvid[] = param[1].split("="); String subid[] = param[2].split("="); long tuId = Long.valueOf(tuid[1]).longValue(); long tuvId = Long.valueOf(tuvid[1]).longValue(); long subId = Long.valueOf(subid[1]).longValue(); Long targetPageId = state.getTargetPageId(); long sourceLocaleId = state.getSourceLocale().getId(); long targetLocaleId = state.getTargetLocale().getId(); view = EditorHelper.getSegmentView(state, tuId, tuvId, subId, targetPageId.longValue(), sourceLocaleId, targetLocaleId); JSONObject json = new JSONObject(); ServletOutputStream out = p_response.getOutputStream(); try { json.put("str_segmentId", tuvid[1]); json.put("str_segmentFormat", view.getDataType()); json.put("str_segmentType", view.getItemType()); json.put("str_wordCount", String.valueOf(view.getWordCount())); String str_sid = view.getTargetTuv().getSid(); if (str_sid == null || str_sid.trim().length() == 0) { str_sid = "N/A"; } json.put("str_sid", str_sid);// view.getTargetTuv().getSid() String str_lastModifyUser = view.getTargetTuv().getLastModifiedUser(); if (str_lastModifyUser == null || str_lastModifyUser.equalsIgnoreCase("xlf") || str_lastModifyUser.equalsIgnoreCase("Xliff")) { str_lastModifyUser = "N/A"; } json.put("str_lastModifyUser", str_lastModifyUser); try { OnlineHelper helper = new OnlineHelper(); String str_sourceSegment = GxmlUtil.getInnerXml(view.getSourceSegment()); String str_dataType = view.getDataType(); helper.setInputSegment(str_sourceSegment, "", str_dataType); if (EditorConstants.PTAGS_VERBOSE.equals(state.getPTagFormat())) { helper.getVerbose(); } else { helper.getCompact(); } String str_segementPtag = helper.getPtagToNativeMappingTable(); if (StringUtil.isEmpty(str_segementPtag)) { str_segementPtag = "N/A"; } else { str_segementPtag = str_segementPtag.replace("<TR>", "<TR valign=top>").replace("<TD", "<TD noWrap"); str_segementPtag = str_segementPtag.replace("<tr>", "<TR valign=top>").replace("<td", "<TD noWrap"); } json.put("str_segementPtag", str_segementPtag); } catch (Exception e1) { CATEGORY.error("Get segement tag information. ", e1); throw new EnvoyServletException(e1); } out.write(json.toString().getBytes("UTF-8")); } catch (JSONException e) { CATEGORY.error("Get segement detail. ", e); throw new EnvoyServletException(e); } return; } // Find Repeated Segments if ((value = p_request.getParameter(WebAppConstants.PROPAGATE_ACTION)) != null) { if (value.equalsIgnoreCase("Unmark Repeated")) { state.setNeedFindRepeatedSegments(false); } else { state.setNeedFindRepeatedSegments(true); } } // Show/Hide PTags if ((value = p_request.getParameter("pTagsAction")) != null) { if (value.equalsIgnoreCase("Show PTags")) { state.setNeedShowPTags(true); } else { state.setNeedShowPTags(false); } } boolean isGetJsonData = false; if ((value = p_request.getParameter("trgViewMode")) != null) { layout.setTargetViewMode(Integer.parseInt(value)); isGetJsonData = true; } else if ((value = p_request.getParameter("srcViewMode")) != null) { layout.setSourceViewMode(Integer.parseInt(value)); isGetJsonData = true; } else if (getSearchParamsInMap(p_request).size() > 0) { isGetJsonData = true; } if (isGetJsonData) { jsonStr = state.getEditorManager().getTargetJsonData(state, isAssignee, getSearchParamsInMap(p_request)); } p_response.getWriter().write(jsonStr); }
From source file:com.thinkbiganalytics.nifi.provenance.reporting.KyloProvenanceEventReportingTask.java
/** * * Responsible to querying the provenance data and sending the events to Kylo, both the streaming event aggregration and the batch event data A boolean {@code processing} flag is used to prevent * multiple threads from running this trigger at the same time. 1. sets the Boolean flag to processing 2. queries NiFi provenance to determine the set of Events to process and send to Kylo 3. * aggregrates and processes the batch events and sends to Kylo via JMS 4. Callback listeners for the JMS will update the {@code StateManager} setting the {@code LAST_EVENT_ID_KEY} value. 5. Upon * any failure the {@code abortProcessing()} will be called *///from w ww.j a va 2 s .c o m @Override public void onTrigger(final ReportingContext context) { String nodeId = getNodeIdStrategy().getNodeId(context); getLogger().debug("Nifi nodeId {}", new Object[] { nodeId }); if (nodeId == null) { return; } ensureInitializeFlowFileMapDbCache(); if (!isInitializing() && processing.compareAndSet(false, true)) { final StateManager stateManager = context.getStateManager(); final EventAccess access = context.getEventAccess(); final ProvenanceEventRepository provenance = access.getProvenanceRepository(); if (this.stateManager == null) { this.stateManager = stateManager; } getLogger().debug( "KyloProvenanceEventReportingTask onTrigger Info: Reporting Task Triggered! Last Event id is ", new Object[] { getLastEventId(stateManager) }); ensureJmsListeners(); //get the latest event Id in provenance final Long maxEventId = provenance.getMaxEventId(); if (maxEventId == null || maxEventId < 0) { getLogger().debug("No Provenance exists yet. Max Id is not set.. Will not process events "); finishProcessing(0); return; } previousMax = maxEventId; try { if (!isKyloAvailable()) { getLogger().info( "Kylo is not available to process requests yet. This task will exit and wait for its next schedule interval."); abortProcessing(); return; } //get the last event that was processed long lastEventId = initializeAndGetLastEventIdForProcessing(maxEventId, nodeId); //finish processing if there is nothing to process if (lastEventId == maxEventId.longValue()) { getLogger().trace("Last event id == max id... will not process!"); finishProcessing(0); return; } DateTime lastLogTime = DateTime.now(); //how often to report the batch processing log when processing a lot of events int logReportingTimeMs = 10000; //every 10 sec long nextId = lastEventId + 1; //record count is inclusive, so we need to add one to the difference to include the last eventid int recordCount = new Long(maxEventId - (nextId < 0 ? 0 : nextId)).intValue() + 1; int totalRecords = recordCount; long start = System.currentTimeMillis(); //split this into batches of events, maxing at 500 if not specified int batchSize = processingBatchSize == null || processingBatchSize < 1 ? 500 : processingBatchSize; //setup the object pool to be able to store at least the processing batch size amount ProvenanceEventObjectPool pool = getProvenanceEventObjectPool(); int total = processingBatchSize + 100; pool.setMaxIdle(total); pool.setMaxTotal(total); Integer batches = (int) Math.ceil(Double.valueOf(recordCount) / batchSize); if (recordCount > 0) { getLogger().info( "KyloProvenanceEventReportingTask onTrigger Info: KyloFlowCache Sync Id: {} . Attempting to process {} events starting with event id: {}. Splitting into {} batches of {} each ", new Object[] { nifiFlowSyncId, recordCount, nextId, batches, batchSize }); } //reset the queryTime holder nifiQueryTime = 0L; while (recordCount > 0) { if (!isProcessing()) { break; } long min = lastEventId + 1; long max = (min + (batchSize - 1)) > maxEventId ? maxEventId : (min + (batchSize - 1)); int batchAmount = new Long(max - (min < 0 ? 0 : min)).intValue() + 1; if (batchAmount <= 0) { break; } else { lastEventId = processEventsInRange(provenance, min, max); recordCount -= batchAmount; recordCount = recordCount < 0 ? 0 : recordCount; setLastEventId(lastEventId); if (lastLogTime == null || (DateTime.now().getMillis() - lastLogTime.getMillis() > logReportingTimeMs)) { lastLogTime = DateTime.now(); getLogger().info( "KyloProvenanceEventReportingTask onTrigger Info: ReportingTask is in a long running process. Currently processing Event id: {}. {} events remaining to be processed. ", new Object[] { lastEventId, recordCount }); } } if (!isProcessing()) { break; } } if (totalRecords > 0 && isProcessing()) { long processingTime = (System.currentTimeMillis() - start); getLogger().info( "KyloProvenanceEventReportingTask onTrigger Info: ReportingTask finished. Last Event id: {}. Total time to process {} events was {} ms. Total time spent querying for events in Nifi was {} ms. Kylo ProcessingTime: {} ms ", new Object[] { lastEventId, totalRecords, processingTime, nifiQueryTime, processingTime - nifiQueryTime }); } finishProcessing(totalRecords); } catch (IOException e) { getLogger().error(e.getMessage(), e); } finally { abortProcessing(); } } else { if (isInitializing()) { getLogger().info( "Still initializing any previously active flow file provenance data. The task should run shortly"); } else { Long maxId = context.getEventAccess().getProvenanceRepository().getMaxEventId(); Long count = (maxId - previousMax); getLogger().info("KyloProvenanceEventReportingTask onTrigger Info: Still processing previous batch " + currentProcessingMessage + ". The next run will process events up to " + maxId + ". " + count + " new events"); } } }
From source file:gate.annotation.AnnotationSetImpl.java
/** * Propagate document content changes to this AnnotationSet. * /*from www. ja v a2s . c o m*/ * This method is called for all annotation sets of a document from * DocumentImpl.edit to adapt the annotations to the text changes made through * the edit. The behaviour of this method is influenced by the configuration * setting {@link gate.GateConstants#DOCEDIT_INSERT_PREPEND GateConstants.DOCEDIT_INSERT_PREPEND }: * annotations immediately * ending before or starting after the point of insertion will either become * part of the inserted text or not. Currently it works like this: * <ul> * <li>PREPEND=true: annotation before will become part, annotation after not * <li>PREPEND=false: annotation before will not become part, annotation after * will become part * </UL> * NOTE 1 (JP): There is another setting * {@link gate.GateConstants#DOCEDIT_INSERT_APPEND GateConstants.DOCEDIT_INSERT_APPEND } * but * this setting does currently not influence the behaviour of this method. * The behaviour of this method may change in the future so that * DOCEDIT_INSERT_APPEND is considered separately and in addition to * DOCEDIT_INSERT_PREPEND so that it can be controlled independently if * the annotation before and/or after an insertion point gets expanded or not. * <p> * NOTE 2: This method has, unfortunately, to be * public, to allow DocumentImpls to get at it. Oh for a "friend" declaration. * Doesn't throw InvalidOffsetException as DocumentImpl is the only client, * and that checks the offsets before calling this method. */ public void edit(Long start, Long end, DocumentContent replacement) { // make sure we have the indices computed indexByStartOffset(); if (end.compareTo(start) > 0) { // get the nodes that need to be processed (the nodes internal to // the // removed section plus the marginal ones List<Node> affectedNodes = new ArrayList<Node>( nodesByOffset.subMap(start, new Long(end.longValue() + 1)).values()); // if we have more than 1 node we need to delete all apart from // the first // and move the annotations so that they refer to the one we keep // (the // first) NodeImpl firstNode = null; if (!affectedNodes.isEmpty()) { firstNode = (NodeImpl) affectedNodes.get(0); List<Annotation> startingAnnotations = new ArrayList<Annotation>(); List<Annotation> endingAnnotations = new ArrayList<Annotation>(); // now we need to find all the annotations // ending in the zone List<Node> beforeNodes = new ArrayList<Node>( nodesByOffset.subMap(new Long(0), new Long(end.longValue() + 1)).values()); Iterator<Node> beforeNodesIter = beforeNodes.iterator(); while (beforeNodesIter.hasNext()) { Node currentNode = beforeNodesIter.next(); Collection<Annotation> annotations = getAnnotsByStartNode(currentNode.getId()); if (annotations == null) continue; // iterates on the annotations in this set Iterator<Annotation> localIterator = annotations.iterator(); while (localIterator.hasNext()) { Annotation annotation = localIterator.next(); long offsetEndAnnotation = annotation.getEndNode().getOffset().longValue(); // we are interested only in the annotations ending // inside the zone if (offsetEndAnnotation >= start.longValue() && offsetEndAnnotation <= end.longValue()) endingAnnotations.add(annotation); } } for (int i = 1; i < affectedNodes.size(); i++) { Node aNode = affectedNodes.get(i); Collection<Annotation> annSet = getAnnotsByStartNode(aNode.getId()); if (annSet != null) { startingAnnotations.addAll(annSet); } // remove the node // nodesByOffset.remove(aNode.getOffset()); // annotsByStartNode.remove(aNode); } // modify the annotations so they point to the saved node Iterator<Annotation> annIter = startingAnnotations.iterator(); while (annIter.hasNext()) { AnnotationImpl anAnnot = (AnnotationImpl) annIter.next(); anAnnot.start = firstNode; // remove the modified annotation if it has just become // zero-length if (anAnnot.start == anAnnot.end) { remove(anAnnot); } else { addToStartOffsetIndex(anAnnot); } } annIter = endingAnnotations.iterator(); while (annIter.hasNext()) { AnnotationImpl anAnnot = (AnnotationImpl) annIter.next(); anAnnot.end = firstNode; // remove the modified annotation if it has just become // zero-length if (anAnnot.start == anAnnot.end) { remove(anAnnot); } } // remove the unused nodes inside the area for (int i = 1; i < affectedNodes.size(); i++) { Node aNode = affectedNodes.get(i); nodesByOffset.remove(aNode.getOffset()); annotsByStartNode.remove(aNode.getId()); } // repair the first node // remove from offset index nodesByOffset.remove(firstNode.getOffset()); // change the offset for the saved node firstNode.setOffset(start); // add back to the offset index nodesByOffset.put(firstNode.getOffset(), firstNode); } } // now handle the insert and/or update the rest of the nodes' // position // get the user selected behaviour (defaults to append) boolean shouldPrepend = Gate.getUserConfig().getBoolean(GateConstants.DOCEDIT_INSERT_PREPEND) .booleanValue(); long s = start.longValue(), e = end.longValue(); long rlen = // length of the replacement value ((replacement == null) ? 0 : replacement.size().longValue()); // update the offsets and the index by offset for the rest of the // nodes List<Node> nodesAfterReplacement = new ArrayList<Node>(nodesByOffset.tailMap(start).values()); // remove from the index by offset Iterator<Node> nodesAfterReplacementIter = nodesAfterReplacement.iterator(); while (nodesAfterReplacementIter.hasNext()) { NodeImpl n = (NodeImpl) nodesAfterReplacementIter.next(); nodesByOffset.remove(n.getOffset()); } // change the offsets nodesAfterReplacementIter = nodesAfterReplacement.iterator(); while (nodesAfterReplacementIter.hasNext()) { NodeImpl n = (NodeImpl) nodesAfterReplacementIter.next(); long oldOffset = n.getOffset().longValue(); // by default we move all nodes back long newOffset = oldOffset - (e - s) + rlen; // for the first node we need behave differently if (oldOffset == s) { // the first offset never moves back if (newOffset < s) newOffset = s; // if we're prepending we don't move forward if (shouldPrepend) newOffset = s; } n.setOffset(new Long(newOffset)); } // add back to the index by offset with the new offsets nodesAfterReplacementIter = nodesAfterReplacement.iterator(); while (nodesAfterReplacementIter.hasNext()) { NodeImpl n = (NodeImpl) nodesAfterReplacementIter.next(); nodesByOffset.put(n.getOffset(), n); } // //rebuild the indices with the new offsets // nodesByOffset = null; // annotsByStartNode = null; // annotsByEndNode = null; // indexByStartOffset(); // indexByEndOffset(); }