List of usage examples for java.util LinkedList addLast
public void addLast(E e)
From source file:org.gcaldaemon.core.file.OnlineFileListener.java
public OnlineFileListener(ThreadGroup mainGroup, Configurator configurator) throws Exception { super(mainGroup, "File listener"); this.configurator = configurator; // Get polling times long timeout = configurator.getConfigProperty(Configurator.FILE_POLLING_FILE, 10000L); if (timeout < 1000L) { log.warn("The fastest file polling period is '1 sec'!"); timeout = 1000L;// w w w . jav a 2s. com } filePollingTime = timeout; timeout = configurator.getConfigProperty(Configurator.FILE_POLLING_GOOGLE, 600000L); if (timeout < 180000L) { log.warn("The fastest Google Calendar polling period is '3 min'!"); timeout = 180000L; } googlePollingTime = timeout; // Get reloader script's path reloaderScript = configurator.getConfigProperty(Configurator.FILE_RELOADER_SCRIPT, null); // Get parameters LinkedList iCalFileList = new LinkedList(); LinkedList usernameList = new LinkedList(); LinkedList passwordList = new LinkedList(); LinkedList urlList = new LinkedList(); String parameterPostfix; int gapCounter = 0; for (int j, i = 1;; i++) { // Create parameter postfix [..n] if (i == 1) { parameterPostfix = ""; } else { parameterPostfix = Integer.toString(i); } if (configurator.getConfigProperty(Configurator.FILE_ICAL_PATH + parameterPostfix, null) == null) { if (gapCounter < MAX_INDEX_GAP) { gapCounter++; continue; } break; } gapCounter = 0; // Get local file path String filePath = configurator.getConfigProperty(Configurator.FILE_ICAL_PATH + parameterPostfix, "/google" + i + ".ics"); if (filePath.startsWith("~")) { filePath = filePath.substring(1); } if (filePath.endsWith("/*.ics")) { filePath = filePath.substring(0, filePath.length() - 6); } log.info("Start listening file " + filePath + "..."); File iCalFile = new File(filePath); // Get username String username = configurator.getConfigProperty(Configurator.FILE_GOOGLE_USERNAME + parameterPostfix, null); // Get password String password = null; if (configurator.getConfigProperty(Configurator.FILE_GOOGLE_PASSWORD + parameterPostfix, null) != null) { password = configurator.getPasswordProperty(Configurator.FILE_GOOGLE_PASSWORD + parameterPostfix); } // Get calendar URL String url = configurator.getConfigProperty(Configurator.FILE_PRIVATE_ICAL_URL + parameterPostfix, null); // Verify parameters if (url == null) { throw new NullPointerException("Missing private ICAL URL (" + Configurator.FILE_PRIVATE_ICAL_URL + parameterPostfix + ")!"); } if (!configurator.isFeedConverterEnabled()) { if (username == null) { throw new NullPointerException( "Missing username (" + Configurator.FILE_GOOGLE_USERNAME + parameterPostfix + ")!"); } if (password == null) { throw new NullPointerException( "Missing password (" + Configurator.FILE_GOOGLE_PASSWORD + parameterPostfix + ")!"); } j = url.indexOf("/calendar"); if (j > 0) { url = url.substring(j); } if (url.charAt(0) != '/') { throw new NullPointerException("Invalid private ICAL URL (" + Configurator.FILE_PRIVATE_ICAL_URL + parameterPostfix + ")!"); } j = url.indexOf('@'); if (j != -1) { url = url.substring(0, j) + "%40" + url.substring(j + 1); } j = url.indexOf("googlemail.com"); if (j != -1) { url = url.substring(0, j) + "gmail.com" + url.substring(j + 14); } } else { if (url.startsWith("calendar/")) { url = '/' + url; } } // Add parameters to lists iCalFileList.addLast(iCalFile); usernameList.addLast(username); passwordList.addLast(password); urlList.addLast(url); } // Create object arrays iCalFiles = new File[iCalFileList.size()]; usernames = new String[usernameList.size()]; passwords = new String[passwordList.size()]; urls = new String[urlList.size()]; iCalFileList.toArray(iCalFiles); usernameList.toArray(usernames); passwordList.toArray(passwords); urlList.toArray(urls); log.info("File listener started successfully."); // Start listener start(); }
From source file:net.spfbl.core.Analise.java
public static void processIP(String ip, StringBuilder builder, int timeout) { try {//from w ww .ja v a 2s . c om ip = Subnet.normalizeIP(ip); Distribution dist = SPF.getDistribution(ip, false); float probability = dist == null ? 0.0f : dist.getSpamProbability(ip); boolean ipv4 = SubnetIPv4.isValidIPv4(ip); Object response = null; Status statusIP; String tokenName; Status statusName = Status.NONE; LinkedList<String> nameList = new LinkedList<String>(); try { for (String ptr : Reverse.getPointerSet(ip)) { nameList.add(ptr); if (Generic.containsDynamic(ptr)) { statusName = Status.DYNAMIC; break; } else if (Block.containsDomain(ptr, false)) { statusName = Status.BLOCK; } else if (Block.containsREGEX(ptr)) { statusName = Status.BLOCK; } else if (Block.containsWHOIS(ptr)) { statusName = Status.BLOCK; } else if (Generic.containsGeneric(ptr)) { statusName = Status.GENERIC; } else { try { if (Reverse.getAddressSet(ptr).contains(ip)) { Distribution distPTR; if (White.containsDomain(ptr)) { statusName = Status.WHITE; break; } else if (Provider.containsDomain(ptr)) { statusName = Status.PROVIDER; break; } else if (Ignore.contains(ptr)) { statusName = Status.IGNORE; break; } else if ((distPTR = SPF.getDistribution(ptr, false)) == null) { statusName = Status.GREEN; break; } else { statusName = Status.valueOf(distPTR.getStatus(ptr).name()); break; } } else { statusName = Status.INVALID; } } catch (NamingException ex) { statusName = Status.NXDOMAIN; } } } } catch (CommunicationException ex) { statusName = Status.TIMEOUT; } catch (ServiceUnavailableException ex) { statusName = Status.UNAVAILABLE; } catch (NamingException ex) { statusName = Status.NONE; } if (White.containsIP(ip)) { statusIP = Status.WHITE; } else if (Block.containsCIDR(ip)) { statusIP = Status.BLOCK; } else if (Provider.containsCIDR(ip)) { statusIP = Status.PROVIDER; } else if (Ignore.containsCIDR(ip)) { statusIP = Status.IGNORE; } else if (Block.containsDNSBL(ip)) { statusIP = Status.DNSBL; } else if (statusName == Status.TIMEOUT && hasAccessSMTP(ip) && (response = getResponseSMTP(ip, 25, timeout)) instanceof Status) { statusIP = (Status) response; } else if (statusName == Status.UNAVAILABLE && hasAccessSMTP(ip) && (response = getResponseSMTP(ip, 25, timeout)) instanceof Status) { statusIP = (Status) response; } else if (statusName == Status.NONE && hasAccessSMTP(ip) && (response = getResponseSMTP(ip, 25, timeout)) instanceof Status) { statusIP = (Status) response; } else if (dist == null) { statusIP = Status.GREEN; } else { statusIP = Status.valueOf(dist.getStatus(ip).name()); } if (response instanceof String) { nameList.addLast((String) response); } if (statusName == Status.TIMEOUT) { tokenName = ip; } else if (statusName == Status.UNAVAILABLE) { tokenName = ip; } else if (nameList.isEmpty()) { tokenName = ip; statusName = Status.NONE; } else { tokenName = nameList.getFirst(); statusName = Status.INVALID; } for (String name : nameList) { if (Generic.containsDynamic(name)) { tokenName = name; statusName = Status.DYNAMIC; break; } else if (Block.containsDomain(name, false)) { tokenName = name; statusName = Status.BLOCK; break; } else if (Block.containsREGEX(name)) { tokenName = name; statusName = Status.BLOCK; break; } else if (Block.containsWHOIS(name)) { tokenName = name; statusName = Status.BLOCK; break; } else if (Generic.containsGeneric(name)) { tokenName = name; statusName = Status.GENERIC; break; } else { try { if (Reverse.getAddressSet(name).contains(ip)) { if (White.containsDomain(name)) { tokenName = name; statusName = Status.WHITE; break; } else if (Provider.containsDomain(name)) { tokenName = name; statusName = Status.PROVIDER; break; } else if (Ignore.contains(name)) { tokenName = name; statusName = Status.IGNORE; break; } else { tokenName = name; Distribution distribution2 = SPF.getDistribution(name, false); if (distribution2 == null) { statusName = Status.GREEN; } else { statusName = Status.valueOf(distribution2.getStatus(name).name()); } } } } catch (NameNotFoundException ex) { tokenName = name; statusName = Status.NXDOMAIN; } catch (NamingException ex) { // Fazer nada. } } } if (statusName == Status.INVALID || statusName == Status.NXDOMAIN) { try { String domain = Domain.extractDomain(tokenName, true); if (!Reverse.hasValidNameServers(domain)) { if (Block.addExact(domain)) { statusName = Status.BLOCK; Server.logDebug("new BLOCK '" + domain + "' added by 'NXDOMAIN'."); Peer.sendBlockToAll(domain); } } } catch (NamingException ex) { // Fazer nada. } catch (ProcessException ex) { if (ex.isErrorMessage("RESERVED")) { statusName = Status.RESERVED; } else { Server.logError(ex); } } } if (statusIP != Status.BLOCK && statusName == Status.DYNAMIC) { String token = ip + (SubnetIPv4.isValidIPv4(ip) ? "/24" : "/48"); String cidr = Subnet.normalizeCIDR(token); if (Block.tryOverlap(cidr)) { Server.logDebug("new BLOCK '" + token + "' added by '" + tokenName + ";" + statusName + "'."); } else if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusName + "'."); } String previous = Subnet.getFirstIP(cidr); previous = Subnet.getPreviousIP(previous); previous = Subnet.getPreviousIP(previous); Analise.processToday(previous); String next = Subnet.getLastIP(cidr); next = Subnet.getNextIP(next); next = Subnet.getNextIP(next); Analise.processToday(next); statusIP = Status.BLOCK; } else if (statusIP != Status.BLOCK && statusName == Status.NONE) { String token = ip + (ipv4 ? "/32" : "/64"); String cidr = Subnet.normalizeCIDR(token); if (Block.tryOverlap(cidr)) { Server.logDebug("new BLOCK '" + token + "' added by '" + tokenName + ";" + statusName + "'."); } else if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusName + "'."); } if (ipv4) { cidr = Subnet.normalizeCIDR(ip + "/24"); String next = Subnet.getFirstIP(cidr); for (int index = 0; index < 256; index++) { if (!hasReverse(next) && Block.tryAdd(next)) { Server.logDebug("new BLOCK '" + next + "' added by '" + next + ";" + statusName + "'."); } next = Subnet.getNextIP(next); } } statusIP = Status.BLOCK; } else if (statusIP != Status.BLOCK && (statusName == Status.BLOCK || statusName == Status.RESERVED || statusName == Status.NXDOMAIN)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusName + "'."); } statusIP = Status.BLOCK; } else if (statusIP != Status.BLOCK && statusIP != Status.IGNORE && statusName != Status.PROVIDER && statusName != Status.IGNORE && statusName != Status.GREEN && statusName != Status.WHITE && SubnetIPv6.isSLAAC(ip)) { String token = ip + "/64"; String cidr = SubnetIPv6.normalizeCIDRv6(token); if (Block.tryOverlap(cidr)) { Server.logDebug("new BLOCK '" + token + "' added by 'SLAAC'."); } else if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'SLAAC'."); } statusIP = Status.BLOCK; // } else if (statusIP != Status.BLOCK && statusIP != Status.IGNORE && statusIP != Status.WHITE && statusName != Status.PROVIDER && statusName != Status.IGNORE && statusName != Status.WHITE && isCusterRED(ip, null, tokenName)) { // if (Block.tryAdd(ip)) { // Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";CLUSTER'."); // } // statusIP = Status.BLOCK; } else if (statusIP == Status.DNSBL && (statusName != Status.GREEN && statusName != Status.PROVIDER && statusName != Status.IGNORE && statusName != Status.WHITE)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusIP + "'."); } statusIP = Status.BLOCK; } else if (statusIP == Status.CLOSED && statusName == Status.RED) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusIP + "'."); } statusIP = Status.BLOCK; } else if (statusIP != Status.BLOCK && statusName == Status.INVALID && Generic.containsGenericDomain(tokenName)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusName + "'."); } statusIP = Status.BLOCK; } else if ((statusName == Status.INVALID || statusName == Status.GENERIC) && (statusIP == Status.CLOSED || statusIP == Status.RED || statusIP == Status.YELLOW)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + tokenName + ";" + statusName + "'."); } statusIP = Status.BLOCK; } else if (statusIP == Status.BLOCK && (statusName == Status.YELLOW || statusName == Status.RED)) { if (Block.tryAdd(tokenName)) { Server.logDebug( "new BLOCK '" + tokenName + "' added by '" + tokenName + ";" + statusName + "'."); } statusName = Status.BLOCK; } else if (statusIP == Status.BLOCK && (statusName == Status.PROVIDER || statusName == Status.IGNORE || statusName == Status.WHITE)) { String cidr; int mask = SubnetIPv4.isValidIPv4(ip) ? 32 : 64; if ((cidr = Block.clearCIDR(ip, mask)) != null) { Server.logInfo("false positive BLOCK '" + cidr + "' detected by '" + tokenName + ";" + statusName + "'."); } if (Provider.containsCIDR(ip)) { statusIP = Status.PROVIDER; } else if (Ignore.containsCIDR(ip)) { statusIP = Status.IGNORE; } else if (Block.containsDNSBL(ip)) { statusIP = Status.DNSBL; } else if (hasAccessSMTP(ip) && (response = getResponseSMTP(ip, 25, timeout)) instanceof Status) { statusIP = (Status) response; } else if (dist == null) { statusIP = Status.GREEN; } else { statusIP = Status.valueOf(dist.getStatus(ip).name()); } } else if (statusIP == Status.DNSBL && (statusName == Status.PROVIDER || statusName == Status.IGNORE || statusName == Status.WHITE)) { if (hasAccessSMTP(ip) && (response = getResponseSMTP(ip, 25, timeout)) instanceof Status) { statusIP = (Status) response; } else if (dist == null) { statusIP = Status.GREEN; } else { statusIP = Status.valueOf(dist.getStatus(ip).name()); } } builder.append(statusIP); builder.append(' '); builder.append(tokenName); builder.append(' '); builder.append(statusName); builder.append(' '); builder.append(Core.DECIMAL_FORMAT.format(probability)); builder.append(' '); builder.append(dist == null ? "UNDEFINED" : dist.getFrequencyLiteral()); builder.append(' '); if (Subnet.isValidIP(tokenName)) { builder.append(Subnet.expandIP(tokenName)); } else { builder.append(Domain.revert(tokenName)); addCluster(convertHostToMask(tokenName), statusName, dist); addCluster(extractTLD(tokenName), statusName, dist); addCluster(getOwnerID(tokenName), statusName, dist); } addCluster(Subnet.normalizeCIDR(ip + (ipv4 ? "/24" : "/56")), statusIP, dist); } catch (Exception ex) { builder.append("ERROR"); Server.logError(ex); } }
From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java
/** * This method returns the versions selected for this project and its * parents.// w w w .j av a 2 s . c om * * @return */ public Map<String, Long> getAllVersionsFromCurrentState() { LinkedList<InheritanceProject> open = new LinkedList<InheritanceProject>(); Set<String> closed = new HashSet<String>(); Map<String, Long> out = new HashMap<String, Long>(); //Adding ourselves as the first node open.add(this); while (!open.isEmpty()) { InheritanceProject ip = open.pop(); //Fetching the user-requested version for the open node Long v = ip.getUserDesiredVersion(); out.put(ip.getName(), v); //Then, adding this node to the closed set closed.add(ip.getName()); //And adding the parent nodes to the open list for (AbstractProjectReference apr : ip.getParentReferences()) { if (closed.contains(apr.getName())) { continue; } InheritanceProject next = apr.getProject(); if (next == null) { continue; } open.addLast(next); } } return out; }
From source file:org.apache.hadoop.hbase.util.RegionSplitter.java
static void rollingSplit(String tableName, SplitAlgorithm splitAlgo, Configuration conf) throws IOException, InterruptedException { final int minOS = conf.getInt("split.outstanding", 2); HTable table = new HTable(conf, tableName); // max outstanding splits. default == 50% of servers final int MAX_OUTSTANDING = Math.max(table.getConnection().getCurrentNrHRS() / 2, minOS); Path hbDir = FSUtils.getRootDir(conf); Path tableDir = FSUtils.getTableDir(hbDir, table.getName()); Path splitFile = new Path(tableDir, "_balancedSplit"); FileSystem fs = FileSystem.get(conf); // get a list of daughter regions to create LinkedList<Pair<byte[], byte[]>> tmpRegionSet = getSplits(table, splitAlgo); LinkedList<Pair<byte[], byte[]>> outstanding = Lists.newLinkedList(); int splitCount = 0; final int origCount = tmpRegionSet.size(); // all splits must compact & we have 1 compact thread, so 2 split // requests to the same RS can stall the outstanding split queue. // To fix, group the regions into an RS pool and round-robin through it LOG.debug("Bucketing regions by regionserver..."); TreeMap<String, LinkedList<Pair<byte[], byte[]>>> daughterRegions = Maps.newTreeMap(); for (Pair<byte[], byte[]> dr : tmpRegionSet) { String rsLocation = table.getRegionLocation(dr.getSecond()).getHostnamePort(); if (!daughterRegions.containsKey(rsLocation)) { LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList(); daughterRegions.put(rsLocation, entry); }/*w ww. j av a 2 s. co m*/ daughterRegions.get(rsLocation).add(dr); } LOG.debug("Done with bucketing. Split time!"); long startTime = System.currentTimeMillis(); // open the split file and modify it as splits finish FSDataInputStream tmpIn = fs.open(splitFile); byte[] rawData = new byte[tmpIn.available()]; tmpIn.readFully(rawData); tmpIn.close(); FSDataOutputStream splitOut = fs.create(splitFile); splitOut.write(rawData); try { // *** split code *** while (!daughterRegions.isEmpty()) { LOG.debug(daughterRegions.size() + " RS have regions to splt."); // Get RegionServer : region count mapping final TreeMap<ServerName, Integer> rsSizes = Maps.newTreeMap(); Map<HRegionInfo, ServerName> regionsInfo = table.getRegionLocations(); for (ServerName rs : regionsInfo.values()) { if (rsSizes.containsKey(rs)) { rsSizes.put(rs, rsSizes.get(rs) + 1); } else { rsSizes.put(rs, 1); } } // sort the RS by the number of regions they have List<String> serversLeft = Lists.newArrayList(daughterRegions.keySet()); Collections.sort(serversLeft, new Comparator<String>() { public int compare(String o1, String o2) { return rsSizes.get(o1).compareTo(rsSizes.get(o2)); } }); // round-robin through the RS list. Choose the lightest-loaded servers // first to keep the master from load-balancing regions as we split. for (String rsLoc : serversLeft) { Pair<byte[], byte[]> dr = null; // find a region in the RS list that hasn't been moved LOG.debug("Finding a region on " + rsLoc); LinkedList<Pair<byte[], byte[]>> regionList = daughterRegions.get(rsLoc); while (!regionList.isEmpty()) { dr = regionList.pop(); // get current region info byte[] split = dr.getSecond(); HRegionLocation regionLoc = table.getRegionLocation(split); // if this region moved locations String newRs = regionLoc.getHostnamePort(); if (newRs.compareTo(rsLoc) != 0) { LOG.debug("Region with " + splitAlgo.rowToStr(split) + " moved to " + newRs + ". Relocating..."); // relocate it, don't use it right now if (!daughterRegions.containsKey(newRs)) { LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList(); daughterRegions.put(newRs, entry); } daughterRegions.get(newRs).add(dr); dr = null; continue; } // make sure this region wasn't already split byte[] sk = regionLoc.getRegionInfo().getStartKey(); if (sk.length != 0) { if (Bytes.equals(split, sk)) { LOG.debug("Region already split on " + splitAlgo.rowToStr(split) + ". Skipping this region..."); ++splitCount; dr = null; continue; } byte[] start = dr.getFirst(); Preconditions.checkArgument(Bytes.equals(start, sk), splitAlgo.rowToStr(start) + " != " + splitAlgo.rowToStr(sk)); } // passed all checks! found a good region break; } if (regionList.isEmpty()) { daughterRegions.remove(rsLoc); } if (dr == null) continue; // we have a good region, time to split! byte[] split = dr.getSecond(); LOG.debug("Splitting at " + splitAlgo.rowToStr(split)); HBaseAdmin admin = new HBaseAdmin(table.getConfiguration()); admin.split(table.getTableName(), split); LinkedList<Pair<byte[], byte[]>> finished = Lists.newLinkedList(); if (conf.getBoolean("split.verify", true)) { // we need to verify and rate-limit our splits outstanding.addLast(dr); // with too many outstanding splits, wait for some to finish while (outstanding.size() >= MAX_OUTSTANDING) { finished = splitScan(outstanding, table, splitAlgo); if (finished.isEmpty()) { Thread.sleep(30 * 1000); } else { outstanding.removeAll(finished); } } } else { finished.add(dr); } // mark each finished region as successfully split. for (Pair<byte[], byte[]> region : finished) { splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " " + splitAlgo.rowToStr(region.getSecond()) + "\n"); splitCount++; if (splitCount % 10 == 0) { long tDiff = (System.currentTimeMillis() - startTime) / splitCount; LOG.debug("STATUS UPDATE: " + splitCount + " / " + origCount + ". Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff)); } } } } if (conf.getBoolean("split.verify", true)) { while (!outstanding.isEmpty()) { LinkedList<Pair<byte[], byte[]>> finished = splitScan(outstanding, table, splitAlgo); if (finished.isEmpty()) { Thread.sleep(30 * 1000); } else { outstanding.removeAll(finished); for (Pair<byte[], byte[]> region : finished) { splitOut.writeChars("- " + splitAlgo.rowToStr(region.getFirst()) + " " + splitAlgo.rowToStr(region.getSecond()) + "\n"); } } } } LOG.debug("All regions have been successfully split!"); } finally { long tDiff = System.currentTimeMillis() - startTime; LOG.debug("TOTAL TIME = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff)); LOG.debug("Splits = " + splitCount); LOG.debug("Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff / splitCount)); splitOut.close(); if (table != null) { table.close(); } } fs.delete(splitFile, false); }
From source file:com.funambol.foundation.items.dao.PIMCalendarDAO.java
/** * Retrieves the UID list of the calendars considered to be "twins" of a * given calendar.// ww w .j a va2 s . c om * * @param c the Calendar object representing the calendar whose twins * need be found. In the present implementation, only the following * data matter: * <BR>for events <UL><LI>date start<LI>date end<LI>subject</UL> * for tasks <UL><LI>date end<LI>subject</UL> * @throws DAOException * @return a List of UIDs (as String objects) that may be empty but not null */ public List getTwinItems(Calendar c) throws DAOException { if (log.isTraceEnabled()) { log.trace("PIMCalendarDAO getTwinItems begin"); } LinkedList<String> twins = new LinkedList<String>(); Connection con = null; PreparedStatement ps = null; ResultSet rs = null; if (!isTwinSearchAppliableOn(c)) { if (log.isTraceEnabled()) { log.trace("Item with no dtStart, dtEnd, summary: twin search skipped."); } return twins; } try { // Looks up the data source when the first connection is created con = getUserDataSource().getRoutedConnection(userId); con.setReadOnly(true); Date dtStart; Date dtEnd; Date dueTomorrowNoon = null; Date dueYesterdayNoon = null; dtStart = getDateFromString(c.getCalendarContent().isAllDay(), Property.stringFrom(c.getCalendarContent().getDtStart()), "000000"); dtEnd = getDateFromString(c.getCalendarContent().isAllDay(), Property.stringFrom(c.getCalendarContent().getDtEnd()), "235900"); if ((dtEnd != null) && (c.getCalendarContent() instanceof Task)) { java.util.Calendar noon = new GregorianCalendar(TimeZone.getTimeZone("UTC")); noon.setTime(dtEnd); noon.set(java.util.Calendar.HOUR_OF_DAY, 12); noon.set(java.util.Calendar.MINUTE, 0); noon.set(java.util.Calendar.MILLISECOND, 0); noon.add(java.util.Calendar.DATE, +1); dueTomorrowNoon = noon.getTime(); noon.add(java.util.Calendar.DATE, -2); // go back and another -1 dueYesterdayNoon = noon.getTime(); } StringBuffer sqlGetCalendarTwinList = new StringBuffer(SQL_GET_FNBL_PIM_CALENDAR_ID_LIST_BY_USER); String subject = Property.stringFrom(c.getCalendarContent().getSummary(), true); // Empty implies null; if ("null".equals(subject)) { subject = null; } if (subject == null) { sqlGetCalendarTwinList.append(SQL_AND_NO_SUBJECT_IS_SET); } else { sqlGetCalendarTwinList.append(SQL_AND_SUBJECT_EQUALS_QUESTIONMARK); } if (c.getCalendarContent() instanceof Event) { if (dtStart == null) { sqlGetCalendarTwinList.append(SQL_AND_NO_DSTART_IS_SET); } else { sqlGetCalendarTwinList.append(SQL_AND_DSTART_EQUALS_QUESTIONMARK); } } if (dtEnd == null) { // In method updateItems() while storing the Event in the db, if // the End Date is empty it is filled with the Start Date. // Filling the empty EndDate with the StartDate is done only for // Events and not for Tasks. // See "Fix for Siemens S56 end date issue" in method // updateItems(). // So in order to find the twins, if the incoming Event has an // empty EndDate we seek into the db for Events with EndDate // equal to the StartDate value. if (c.getCalendarContent() instanceof Task) { sqlGetCalendarTwinList.append(SQL_AND_NO_DEND_IS_SET); } else { sqlGetCalendarTwinList.append(SQL_AND_DEND_EQUALS_QUESTIONMARK); } } else { if (c.getCalendarContent() instanceof Task) { sqlGetCalendarTwinList.append(SQL_AND_DEND_IN_INTERVAL); } else { sqlGetCalendarTwinList.append(SQL_AND_DEND_EQUALS_QUESTIONMARK); } } if (c.getCalendarContent() instanceof Event) { sqlGetCalendarTwinList.append(SQL_FILTER_BY_TYPE[CALENDAR_EVENT_TYPE]); } else { sqlGetCalendarTwinList.append(SQL_FILTER_BY_TYPE[CALENDAR_TASK_TYPE]); } // // If funambol is not in the debug mode it is not possible to print // the calendar info because it contains sensitive data. // if (Configuration.getConfiguration().isDebugMode()) { if (log.isTraceEnabled()) { StringBuilder sb = new StringBuilder(100); sb.append("Looking for items having: "); if (subject == null || subject.length() == 0) { sb.append("\n> subject: <N/A>"); } else { sb.append("\n> subject: '").append(subject).append('\''); } if (c.getCalendarContent() instanceof Event) { if (dtStart == null) { sb.append("\n> start date: <N/A>"); } else { sb.append("\n> start date: ").append(dtStart); } if (dtEnd == null) { sb.append("\n> end date: <N/A>"); } else { sb.append("\n> end date: ").append(dtEnd); } } else { // It's a task if (dtEnd == null) { sb.append("\n> due date: <N/A>"); } else { sb.append("\n> due date: between ").append(dueYesterdayNoon) .append("\n> and ").append(dueTomorrowNoon) .append(",\n> possibly ").append(dtEnd); } } log.trace(sb.toString()); } } sqlGetCalendarTwinList.append(SQL_ORDER_BY_ID); ps = con.prepareStatement(sqlGetCalendarTwinList.toString()); int k = 1; ps.setString(k++, userId); if (subject != null) { ps.setString(k++, subject.toLowerCase(Locale.ENGLISH)); } if (dtStart != null) { if (c.getCalendarContent() instanceof Event) { ps.setTimestamp(k++, new Timestamp(dtStart.getTime())); } } if (dtEnd != null) { if (c.getCalendarContent() instanceof Task) { ps.setTimestamp(k++, new Timestamp(dueYesterdayNoon.getTime())); ps.setTimestamp(k++, new Timestamp(dueTomorrowNoon.getTime())); } else { ps.setTimestamp(k++, new Timestamp(dtEnd.getTime())); } } else { // In method updateItems() while storing the Event in the db, if // the End Date is empty it is filled with the Start Date. // Filling the empty EndDate with the StartDate is done only for // Events and not for Tasks. // See "Fix for Siemens S56 end date issue" in method // updateItems(). // So in order to find the twins, if the incoming Event has an // empty EndDate we seek into the db for Events with EndDate // equal to the StartDate value. if (c.getCalendarContent() instanceof Event) { ps.setTimestamp(k++, new Timestamp(dtStart.getTime())); } } rs = ps.executeQuery(); long twinId; Timestamp twinDueDate; while (rs.next()) { if (c.getCalendarContent() instanceof Event) { twinId = rs.getLong(1); // dend is not relevant in this case if (log.isTraceEnabled()) { log.trace("Twin event found: " + twinId); } twins.add(Long.toString(twinId)); } else { // it's a Task twinId = rs.getLong(1); twinDueDate = rs.getTimestamp(2); if (log.isTraceEnabled()) { log.trace("Twin task found: " + twinId); } if ((dtEnd != null) && (twinDueDate != null) && twinDueDate.getTime() == dtEnd.getTime()) { twins.addFirst(Long.toString(twinId)); if (log.isTraceEnabled()) { log.trace("Item " + twinId + " is an exact due-date match."); } } else { twins.addLast(Long.toString(twinId)); } } } } catch (Exception e) { throw new DAOException("Error retrieving twin. ", e); } finally { DBTools.close(con, ps, rs); } if (log.isTraceEnabled()) { log.trace("PIMCalendarDAO getTwinItems end"); } return twins; }