List of usage examples for java.lang Long longValue
@HotSpotIntrinsicCandidate public long longValue()
From source file:de.acosix.alfresco.mtsupport.repo.sync.TenantAwareChainingUserRegistrySynchronizer.java
/** * {@inheritDoc}/*from w w w.jav a2 s. c o m*/ */ @Override public Date getSyncStartTime() { final Long start = (Long) this.doGetAttribute(true, START_TIME_ATTRIBUTE); final Date lastStart = start.longValue() == -1 ? null : new Date(start.longValue()); return lastStart; }
From source file:de.micromata.genome.chronos.spi.DispatcherImpl2.java
/** * /*www . j a v a 2 s . c o m*/ * @param schedulerName the new name of the scheduler * @param jobDefinition the definition of the job * @param arg the arguments for the job * @param trigger the cron expression * @return Job reference (pk) * @throws SchedulerConfigurationException wenn ein nicht registrierter Scheduler angesprochen wird * @throws SchedulerException wenn der Job im JobStore nicht angelegt werden kann. */ @Override public long submit(final String schedulerName, String jobName, final JobDefinition jobDefinition, final Object arg, final Trigger trigger, String hostName) { if (hostName == null) { hostName = getVirtualHost(); } synchronized (this) { final Scheduler scheduler = getScheduler(schedulerName); if (scheduler == null) { final String msg = "Es wurde versucht einen nicht registrierten Scheduler zu benutzen: " + schedulerName; /** * @logging * @reason Chronos Dispatcher hat einen Job ueber einen Schedulder bekommen, wobei der Scheduler nicht * eingerichtet ist. * @action TechAdmin kontaktieren */ GLog.error(GenomeLogCategory.Scheduler, "Es wurde versucht einen nicht registrierten Scheduler zu benutzen: " + schedulerName); throw new SchedulerConfigurationException(msg); } TriggerJobDO job = getSchedulerDAO().buildTriggerJob(scheduler, jobName, jobDefinition, arg, trigger, hostName, State.WAIT); boolean dispatcherAndSchedulerRunning = isRunning() && scheduler.isRunning(); boolean isLocalHost = false; if (StringUtils.equals(hostName, getVirtualHostName()) == true) { isLocalHost = true; } boolean startJobNow = false; boolean addToLocalJobQueue = false; if (dispatcherAndSchedulerRunning == true) { if (isLocalHost == true) { Date now = new Date(); Date nt = trigger.getNextFireTime(now); if (nt.getTime() - now.getTime() < 3) { startJobNow = true; addToLocalJobQueue = true; } else { addToLocalJobQueue = true; } } } else { GLog.note(GenomeLogCategory.Scheduler, "Submitting Job with no running dispather or scheduler"); } getJobStore().insertJob(job); if (startJobNow == true) { boolean started = scheduler.executeJob(job, getJobStore()); if (started == false) { reservedJobs.addReservedJob(job); wakeup(); } } else if (addToLocalJobQueue == true) { reservedJobs.addReservedJob(job); wakeup(); } Long jobPk = job.getPk(); if (jobPk == null) { // pk = null sollte nicht auftreten knnen ist aber abhngig von der JobStore implemenmtierung und theoretisch mglich. final String msg = "Beim Anlegen des Jobs ist ein Fehler aufgetreten. Die Referenz (pk) wurde nicht gesetzt : " + job.toString(); /** * @logging * @reason Im Job Store wurde beim persistieren eines neuen Jobs keine Referenz (pk) vergeben. * @action TechAdmin kontaktieren */ GLog.error(GenomeLogCategory.Scheduler, "Beim Anlegen des Jobs ist ein Fehler aufgetreten. Die Referenz (pk) wurde nicht gesetzt : " + job.toString()); throw new SchedulerException(msg); } return jobPk.longValue(); } }
From source file:com.gs.obevo.db.impl.core.changeauditdao.SameSchemaDeployExecutionDao.java
private ImmutableCollection<DeployExecution> getDeployExecutions(Connection conn, final String schema, Long idToQuery) { PhysicalSchema physicalSchema = env.getPhysicalSchema(schema); if (!isDaoInitialized(physicalSchema)) { return Lists.immutable.empty(); }/*w w w.j a v a 2s. c o m*/ DaTable tableInfo = this.dbMetadataManager.getTableInfo(physicalSchema.getPhysicalName(), deployExecutionTableName, new DaSchemaInfoLevel().setRetrieveTables(true).setRetrieveTableColumns(true)); MutableList<String> mainWhereClauses = Lists.mutable.empty(); MutableList<String> attrWhereClauses = Lists.mutable.empty(); // account for the 5.2.x -> 5.3.0 version and rollback integration upgrade by checking for the db schema column if (tableInfo.getColumn(dbSchemaColName) != null) { mainWhereClauses.add(dbSchemaColName + " = '" + schema + "'"); } if (idToQuery != null) { mainWhereClauses.add(idColName + " = " + idToQuery.longValue()); attrWhereClauses.add(deployExecutionIdColName + " = " + idToQuery.longValue()); } String mainWhereClause = mainWhereClauses.notEmpty() ? " WHERE " + mainWhereClauses.makeString(" AND ") : ""; String attrWhereClause = attrWhereClauses.notEmpty() ? " WHERE " + attrWhereClauses.makeString(" AND ") : ""; String mainQuery = "SELECT * FROM " + platform.getSchemaPrefix(physicalSchema) + deployExecutionTableName + mainWhereClause; String attrQuery = "SELECT * FROM " + platform.getSchemaPrefix(physicalSchema) + deployExecutionAttributeTableName + " " + attrWhereClause; final MutableListMultimap<Long, DeployExecutionAttribute> attrsById = Multimaps.mutable.list.empty(); for (Map<String, Object> attrResult : ListAdapter .adapt(jdbc.query(conn, attrQuery, new MapListHandler()))) { long id = platform.getLongValue(attrResult.get(deployExecutionIdColName)).longValue(); DeployExecutionAttribute attr = new DeployExecutionAttributeImpl( (String) attrResult.get(attrNameColName), (String) attrResult.get(attrValueColName)); attrsById.put(id, attr); } return ListAdapter.adapt(jdbc.query(conn, mainQuery, new MapListHandler())) .collect(new Function<Map<String, Object>, DeployExecution>() { @Override public DeployExecution valueOf(Map<String, Object> result) { long id = platform.getLongValue(result.get(idColName)).longValue(); DeployExecutionStatus status = DeployExecutionStatus.IN_PROGRESS .valueOfStatusCode(((String) result.get(statusColName)).charAt(0)); Timestamp deployTime = platform.getTimestampValue(result.get(deployTimeColName)); String executorId = (String) result.get(executorIdColName); String toolVersion = (String) result.get(toolVersionColName); boolean init = platform.getIntegerValue(result.get(initCommandColName)).intValue() == 1; boolean rollback = platform.getIntegerValue(result.get(rollbackCommandColName)) .intValue() == 1; String requesterId = (String) result.get(requesterIdColName); String reason = (String) result.get(reasonColName); String productVersion = (String) result.get(productVersionColName); ImmutableSet<DeployExecutionAttribute> deployExecutionAttributes = attrsById.get(id).toSet() .toImmutable(); DeployExecutionImpl deployExecution = new DeployExecutionImpl(requesterId, executorId, schema, toolVersion, deployTime, init, rollback, productVersion, reason, deployExecutionAttributes); deployExecution.setId(id); deployExecution.setStatus(status); return deployExecution; } }).toImmutable(); }
From source file:com.mirth.connect.client.ui.DashboardPanel.java
public synchronized void updateTableHighlighting() { // MIRTH-2301 // Since we are using addHighlighter here instead of using setHighlighters, we need to remove the old ones first. dashboardTable.setHighlighters();/*from w ww .ja v a 2s. c o m*/ // Add the highlighters. Always add the error highlighter. if (Preferences.userNodeForPackage(Mirth.class).getBoolean("highlightRows", true)) { Highlighter highlighter = HighlighterFactory.createAlternateStriping(UIConstants.HIGHLIGHTER_COLOR, UIConstants.BACKGROUND_COLOR); dashboardTable.addHighlighter(highlighter); } HighlightPredicate queuedHighlighterPredicate = new HighlightPredicate() { public boolean isHighlighted(Component renderer, ComponentAdapter adapter) { if (adapter.column == dashboardTable.getColumnViewIndex(QUEUED_COLUMN_NAME)) { Long value = (Long) dashboardTable.getValueAt(adapter.row, adapter.column); if (value != null && value.longValue() > 0) { return true; } } return false; } }; dashboardTable.addHighlighter(new ColorHighlighter(queuedHighlighterPredicate, new Color(240, 230, 140), Color.BLACK, new Color(240, 230, 140), Color.BLACK)); HighlightPredicate errorHighlighterPredicate = new HighlightPredicate() { public boolean isHighlighted(Component renderer, ComponentAdapter adapter) { if (adapter.column == dashboardTable.getColumnViewIndex(ERROR_COLUMN_NAME)) { Long value = (Long) dashboardTable.getValueAt(adapter.row, adapter.column); if (value != null && value.longValue() > 0) { return true; } } return false; } }; Highlighter errorHighlighter = new ColorHighlighter(errorHighlighterPredicate, Color.PINK, Color.BLACK, Color.PINK, Color.BLACK); dashboardTable.addHighlighter(errorHighlighter); HighlightPredicate revisionDeltaHighlighterPredicate = new HighlightPredicate() { public boolean isHighlighted(Component renderer, ComponentAdapter adapter) { if (adapter.column == dashboardTable.getColumnViewIndex(DEPLOYED_REVISION_DELTA_COLUMN_NAME)) { Integer value = (Integer) dashboardTable.getValueAt(adapter.row, adapter.column); if (value != null && value.intValue() > 0) { return true; } TreePath path = dashboardTable.getPathForRow(adapter.row); if (path != null) { AbstractDashboardTableNode dashboardTableNode = (AbstractDashboardTableNode) path .getLastPathComponent(); if (!dashboardTableNode.isGroupNode()) { DashboardStatus status = dashboardTableNode.getDashboardStatus(); if (status.getCodeTemplatesChanged() != null && status.getCodeTemplatesChanged()) { return true; } } } } return false; } }; dashboardTable.addHighlighter(new ColorHighlighter(revisionDeltaHighlighterPredicate, new Color(255, 204, 0), Color.BLACK, new Color(255, 204, 0), Color.BLACK)); HighlightPredicate lastDeployedHighlighterPredicate = new HighlightPredicate() { public boolean isHighlighted(Component renderer, ComponentAdapter adapter) { if (adapter.column == dashboardTable.getColumnViewIndex(LAST_DEPLOYED_COLUMN_NAME)) { Calendar checkAfter = Calendar.getInstance(); checkAfter.add(Calendar.MINUTE, -2); Object value = dashboardTable.getValueAt(adapter.row, adapter.column); if (value != null && value instanceof Calendar && ((Calendar) value).after(checkAfter)) { return true; } } return false; } }; dashboardTable.addHighlighter(new ColorHighlighter(lastDeployedHighlighterPredicate, new Color(240, 230, 140), Color.BLACK, new Color(240, 230, 140), Color.BLACK)); }
From source file:be.hogent.tarsos.util.histogram.Histogram.java
/** * Adds a value to the Histogram. Assigns the value to the right bin * automatically./*from ww w . j av a 2 s .c o m*/ * * @param value * The value to add. * @throws IllegalArgumentException * when the value is not in the range of the histogram. */ public final Histogram add(final double value) { if (!wraps && !ignoreValuesOutsideRange && !validValue(value)) { throw new IllegalArgumentException("Value not in the correct interval: " + value + " not between " + "[" + this.firstValidValue() + "," + this.lastValidValue() + "]."); } else if (!wraps && ignoreValuesOutsideRange && !validValue(value)) { LOG.info("Ignored value " + value + " (not between " + "[" + this.firstValidValue() + "," + this.lastValidValue() + "])."); } if (value > 0) { final double key = valueToKey(value); final Long count = freqTable.get(key); assert count != null : "All key values should be initialized, " + key + " is not."; if (count != null) { freqTable.put(key, Long.valueOf(count.longValue() + 1)); } } else { LOG.warning("Using values below zero in is not tested, " + "it can yield unexpected results. Values below zero are ignored!"); } valueAddedHook(value); return this; }
From source file:xx.tream.chengxin.ms.action.TrainBackAction.java
@RequestMapping({ "/toUpdatePaying" }) public String toUpdatePaying(ModelMap modelMap, HttpServletRequest request, HttpServletResponse response, FormMap formMap, Integer currentPage, Integer pageSize, Long id) { Map<String, Object> queryMap = new HashMap<String, Object>(); queryMap.put("payingId", id); Map<String, Object> user = ParamUtil.getSessionUser(request); queryMap.put("updateUser", user.get("id")); queryMap.put("updateStatus", "0"); List<Map<String, Object>> list = this.payingBackService.queryByParam(queryMap); if ((list != null) && (list.size() > 0)) { modelMap.put("msg", "??"); } else {/*from ww w . j a va 2 s . com*/ Map<String, Object> payingMap = this.payingService.find(id.longValue()); //if (!payingMap.get("status").equals("0")) // modelMap.put("msg", "??????"); //else { modelMap.put("payingMap", payingMap); //} } return "train/updatePaying"; }
From source file:xx.tream.chengxin.ms.action.TrainBackAction.java
@RequestMapping({ "/toUpdatePayout" }) public String toUpdatePayout(ModelMap modelMap, HttpServletRequest request, HttpServletResponse response, FormMap formMap, Integer currentPage, Integer pageSize, Long id) { Map<String, Object> queryMap = new HashMap<String, Object>(); queryMap.put("payoutId", id); Map<String, Object> user = ParamUtil.getSessionUser(request); queryMap.put("createUser", user.get("id")); queryMap.put("updateStatus", "0"); List<Map<String, Object>> list = this.payoutBackService.queryByParam(queryMap); if ((list != null) && (list.size() > 0)) { modelMap.put("msg", "??"); } else {/*w ww . j a va2 s .c o m*/ Map<String, Object> payoutMap = this.payoutService.find(id.longValue()); //if (!payoutMap.get("status").equals("0")) // modelMap.put("msg", "??????"); //else { modelMap.put("payoutMap", payoutMap); List<Map<String, Object>> payoutItemList = ParamUtil.getPayoutItemList(); modelMap.put("payoutItemList", payoutItemList); //} } return "train/updatePayout"; }
From source file:edu.harvard.iq.dvn.core.index.IndexServiceBean.java
public void updateStudiesInCollections() { long ioProblemCount = 0; boolean ioProblem = false; Indexer indexer = Indexer.getInstance(); String dvnIndexLocation = System.getProperty("dvn.index.location"); String lockFileName = dvnIndexLocation + "/IndexAll.lock"; File indexAllLockFile = new File(lockFileName); // Before we do anything else, check if the index directory is // locked for IndexAll: if (indexAllLockFile.exists()) { logger.info("Detected IndexAll in progress; skipping reindexing ofn collection-linked studies."); return;//from w w w . j a v a 2 s .c o m } logger.info("Starting batch reindex of collection-linked studies."); lockFileName = dvnIndexLocation + "/collReindex.lock"; File collReindexLockFile = new File(lockFileName); try { // Check for an existing lock file: if (collReindexLockFile.exists()) { String errorMessage = "Cannot reindex: collection reindexing already in progress;"; errorMessage += ("lock file " + lockFileName + ", created on " + (new Date(collReindexLockFile.lastModified())).toString() + "."); throw new IOException(errorMessage); } // Create a lock file: try { collReindexLockFile.createNewFile(); } catch (IOException ex) { String errorMessage = "Error: could not create lock file ("; errorMessage += (lockFileName + ")"); throw new IOException(errorMessage); } List<Long> vdcIdList = vdcService.findAllIds(); logger.fine("Found " + vdcIdList.size() + " dataverses."); Long maxStudyId = studyService.getMaxStudyTableId(); if (maxStudyId == null) { logger.fine("The database appears to be empty. Exiting."); return; } if (maxStudyId.intValue() != maxStudyId.longValue()) { logger.severe( "There appears to be more than 2^^31 objects in the study table; the subnetwork cross-indexing hack isn't going to work."); throw new IOException( "There appears to be more than 2^^31 objects in the study table; the subnetwork cross-indexing hack isn't going to work."); /* This is quite unlikely to happen, but still... */ } ArrayList<VDCNetwork> subNetworks = getSubNetworksAsArray(); //vdcNetworkService.getVDCSubNetworks(); // This is an array of [sub]networks organized by *network id*; // i.e., if there are subnetworks with the ids 0, 2 and 5 the array // will contain {0, NULL, network_2, NULL, NULL, network_5} if (subNetworks == null || (subNetworks.size() < 1)) { // No subnetworks in this DV Network; nothing to do. logger.fine("There's only one network in the DVN; nothing to do. Exiting"); return; } int maxSubnetworkId = subNetworks.size() - 1; if (maxSubnetworkId > 63) { logger.severe( "There are more than 63 VDC (sub)networks. The subnetwork cross-indexing hack isn't going to work." + "(we are using longs as bitstrings to store network cross-linked status of a study)"); throw new IOException( "There are more than 63 VDC (sub)networks. The subnetwork cross-indexing hack isn't going to work." + "(we are using longs as bitstrings to store network cross-linked status of a study)"); /* Not very likely to happen either... */ } long linkedVdcNetworkMap[] = new long[maxStudyId.intValue() + 1]; Long vdcId = null; VDC vdc = null; List<Long> linkedStudyIds = null; Long vdcNetworkId = null; Long studyNetworkId = null; Study linkedStudy = null; for (Iterator it = vdcIdList.iterator(); it.hasNext();) { vdcId = (Long) it.next(); vdc = vdcService.findById(vdcId); if (vdc != null && vdc.getVdcNetwork() != null) { vdcNetworkId = vdc.getVdcNetwork().getId(); if (vdcNetworkId.longValue() > 0) { // We are not interested in the VDCs in the top-level // network (network id 0); because the top-level network // already contains all the studies in it. Whatever // studies the dynamic collections may be linking, they // are still in the same DVN. linkedStudyIds = indexer.findStudiesInCollections(vdc); if (linkedStudyIds != null) { logger.fine("Found " + linkedStudyIds.size() + " linked studies in VDC " + vdc.getId() + ", subnetwork " + vdcNetworkId.toString()); for (Long studyId : linkedStudyIds) { if (studyId.longValue() <= maxStudyId.longValue()) { // otherwise this is a new study, created since we // have started this process; we'll be skipping it, // this time around. try { linkedStudy = studyService.getStudy(studyId); } catch (Exception ex) { linkedStudy = null; } if (linkedStudy != null) { studyNetworkId = linkedStudy.getOwner().getVdcNetwork().getId(); if ((studyNetworkId != null) && (vdcNetworkId.compareTo(studyNetworkId) != 0)) { // this study is cross-linked from another VDC network! logger.fine("Study " + linkedStudy.getId() + " from subnetwork " + studyNetworkId + " is linked to this VDC (" + vdc.getId() + ")."); linkedVdcNetworkMap[linkedStudy.getId() .intValue()] |= (1 << vdcNetworkId.intValue()); } } linkedStudy = null; studyNetworkId = null; } } } linkedStudyIds = null; vdcNetworkId = null; } } vdcId = null; vdc = null; } // Now go through the list of studies and reindex those for which // the cross-linked status has changed: logger.fine("Checking the cross-linking status and reindexing the studies for which it has changed:"); List<Long> linkedToNetworkIds = null; boolean reindexNecessary = false; // Check for the studies that are no longer linked to any foreign // subnetworks: List<Long> existingLinkedStudies = studyService.getAllLinkedStudyIds(); Long sid = null; for (Iterator it = existingLinkedStudies.iterator(); it.hasNext();) { sid = (Long) it.next(); if (linkedVdcNetworkMap[sid.intValue()] == 0) { // study no longer linked to any subnetworks linkedVdcNetworkMap[sid.intValue()] = -1; } } // TODO: would be faster still to retrieve the entire map of crosslinks // from the db in a single query here, cook another array of bitstrings // and then just go and compare the 2, without making any further // queries... --L.A. List<VDCNetwork> currentCrossLinks = null; for (int i = 0; i < maxStudyId.intValue() + 1; i++) { if (linkedVdcNetworkMap[i] != 0) { logger.fine("study " + i + ": cross-linked outside of its network; (still need to check if we need to reindex it)"); try { linkedStudy = studyService.getStudy(new Long(i)); } catch (Exception ex) { linkedStudy = null; } reindexNecessary = false; if (linkedStudy != null) { // Only released studies get indexed. // (but studies that are no longer released may // need to be dropped from the crosslinking map, and // from the index) currentCrossLinks = linkedStudy.getLinkedToNetworks(); if (linkedVdcNetworkMap[i] == -1) { // If it's an "unlinked" study, // remove the existing links in the database: logger.fine("study " + i + " no longer cross-linked to any subnetworks."); //linkedStudy.setLinkedToNetworks(null); linkedStudy = studyService.setLinkedToNetworks(linkedStudy.getId(), null); reindexNecessary = true; } else if (linkedStudy.isReleased()) { // else find what subnetworks this study is already linked // to in the database: linkedToNetworkIds = linkedStudy.getLinkedToNetworkIds(); long linkedNetworkBitString = produceLinkedNetworksBitstring(linkedToNetworkIds); if (linkedNetworkBitString != linkedVdcNetworkMap[i]) { // This means the cross-linking status of the study has changed! logger.fine("study " + i + ": cross-linked status has changed; updating"); // Update it in the database: //linkedStudy.setLinkedToNetworks(newLinkedToNetworks(subNetworks, linkedVdcNetworkMap[i])); linkedStudy = studyService.setLinkedToNetworks(linkedStudy.getId(), newLinkedToNetworks(subNetworks, linkedVdcNetworkMap[i])); //studyService.updateStudy(linkedStudy); reindexNecessary = true; } } if (reindexNecessary) { // Re-index the study: indexer = Indexer.getInstance(); boolean indexSuccess = true; try { indexer.deleteDocumentCarefully(linkedStudy.getId()); } catch (IOException ioe) { indexSuccess = false; } if (indexSuccess) { try { //indexer.addDocument(linkedStudy); addDocument(linkedStudy); } catch (Exception ex) { ioProblem = true; ioProblemCount++; logger.severe("Caught exception attempting to re-index re-linked study " + linkedStudy.getId() + "; " + ex.getMessage()); ex.printStackTrace(); indexSuccess = false; } } else { logger.fine("Could not delete study " + linkedStudy.getId() + " from index; skipping reindexing."); } if (!indexSuccess) { // Make sure we leave the db linking status entry // in the same shape it was before the reindexing // attempt; so that it'll hopefully get caught // by the next reindexing now. //linkedStudy.setLinkedToNetworks(currentCrossLinks); linkedStudy = studyService.setLinkedToNetworks(linkedStudy.getId(), currentCrossLinks); } } } } } logger.info("Done reindexing collection-linked studies."); } catch (Exception ex) { ioProblem = true; ioProblemCount++; logger.severe("Caught exception while trying to update studies in collections: " + ex.getMessage()); ex.printStackTrace(); } finally { // delete the lock file: if (collReindexLockFile.exists()) { collReindexLockFile.delete(); } } handleIOProblems(ioProblem, ioProblemCount); }
From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDao.java
/** * convenience method for use with functional datatype properties *//*from ww w. j a v a 2s .c o m*/ protected void updatePropertyLongValue(Resource res, Property dataprop, Long value, Model model) { if (dataprop != null) { Long existingValue = null; Statement stmt = res.getProperty(dataprop); if (stmt != null) { RDFNode object = stmt.getObject(); if (object != null && object.isLiteral()) { existingValue = ((Literal) object).getLong(); } } if (existingValue == null) { model.add(res, dataprop, value.toString(), XSDDatatype.XSDlong); } else if (existingValue.longValue() != value) { model.removeAll(res, dataprop, null); model.add(res, dataprop, value.toString(), XSDDatatype.XSDlong); } } }
From source file:net.java.sip.communicator.plugin.notificationwiring.NotificationManager.java
/** * Informs the user what is the typing state of his chat contacts. * * @param ev the event containing details on the typing notification *//*from ww w .ja va2s. co m*/ public void typingNotificationReceived(TypingNotificationEvent ev) { try { Contact contact = ev.getSourceContact(); // we don't care for proactive notifications, different than typing // sometimes after closing chat we can see someone is typing us // its just server sanding that the chat is inactive (STATE_STOPPED) if (ev.getTypingState() != OperationSetTypingNotifications.STATE_TYPING) { return; } // check whether the current chat window shows the // chat we received a typing info for and in such case don't show // notifications UIService uiService = NotificationWiringActivator.getUIService(); if (uiService != null) { Chat chat = uiService.getCurrentChat(); if (chat != null) { MetaContact metaContact = uiService.getChatContact(chat); if ((metaContact != null) && metaContact.containsContact(contact) && chat.isChatFocused()) { return; } } } long currentTime = System.currentTimeMillis(); if (proactiveTimer.size() > 0) { // first remove contacts that have been here longer than the // timeout to avoid memory leaks Iterator<Map.Entry<Contact, Long>> entries = proactiveTimer.entrySet().iterator(); while (entries.hasNext()) { Map.Entry<Contact, Long> entry = entries.next(); Long lastNotificationDate = entry.getValue(); if (lastNotificationDate.longValue() + 30000 < currentTime) { // The entry is outdated entries.remove(); } } // Now, check if the contact is still in the map if (proactiveTimer.containsKey(contact)) { // We already notified the others about this return; } } proactiveTimer.put(contact, currentTime); fireChatNotification(contact, PROACTIVE_NOTIFICATION, contact.getDisplayName(), NotificationWiringActivator.getResources().getI18NString("service.gui.PROACTIVE_NOTIFICATION"), null); } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; else { logger.error("An error occurred while handling" + " a typing notification.", t); } } }