List of usage examples for java.util TreeSet size
public int size()
From source file:org.commoncrawl.service.listcrawler.CrawlList.java
/** * Initialize a new CrawlList object from a given input stream of urls * /* www.ja v a2 s.co m*/ * @param manager - reference to the crawl history log manager * @param urlInputStream - the input stream containing the list of urls that we should add to this list ... * @throws IOException */ public CrawlList(CrawlHistoryStorage manager, long listId, File sourceURLFile, int refreshInterval) throws IOException { _manager = manager; _listState = LoadState.REALLY_LOADING; // initialize a new list id _listId = listId; LOG.info("*** LIST:" + getListId() + " LOADING FROM SOURCE FILE:" + sourceURLFile.getAbsolutePath()); //establish file names initializeListFileNames(); sourceURLFile.renameTo(_listURLDataFile); FileInputStream urlInputStream = new FileInputStream(_listURLDataFile); try { // set we will use to hold all fingerprints generated TreeSet<URLFP> urlSet = new TreeSet<URLFP>(); // create temp files ... File spillOutputFile = File.createTempFile("spillOut", Long.toString(_listId)); // create mergesortspillwriter SequenceFileSpillWriter<URLFP, ProxyCrawlHistoryItem> spillwriter = new SequenceFileSpillWriter<URLFP, ProxyCrawlHistoryItem>( FileSystem.getLocal(CrawlEnvironment.getHadoopConfig()), CrawlEnvironment.getHadoopConfig(), new Path(spillOutputFile.getAbsolutePath()), URLFP.class, ProxyCrawlHistoryItem.class, null, false); try { MergeSortSpillWriter<URLFP, ProxyCrawlHistoryItem> merger = new MergeSortSpillWriter<URLFP, ProxyCrawlHistoryItem>( CrawlEnvironment.getHadoopConfig(), spillwriter, FileSystem.getLocal(CrawlEnvironment.getHadoopConfig()), new Path(manager.getLocalDataDir().getAbsolutePath()), null, new RawKeyValueComparator<URLFP, ProxyCrawlHistoryItem>() { DataInputBuffer _key1Buffer = new DataInputBuffer(); DataInputBuffer _key2Buffer = new DataInputBuffer(); @Override public int compareRaw(byte[] key1Data, int key1Offset, int key1Length, byte[] key2Data, int key2Offset, int key2Length, byte[] value1Data, int value1Offset, int value1Length, byte[] value2Data, int value2Offset, int value2Length) throws IOException { _key1Buffer.reset(key1Data, key1Offset, key1Length); _key2Buffer.reset(key2Data, key2Offset, key2Length); _key1Buffer.skip(2); // skip verison, and 1 byte id _key2Buffer.skip(2); // skip verison, and 1 byte id int domainHash1 = WritableUtils.readVInt(_key1Buffer); int domainHash2 = WritableUtils.readVInt(_key2Buffer); _key1Buffer.skip(1); // skip 1 byte id _key2Buffer.skip(1); // skip 1 byte id long fingerprint1 = WritableUtils.readVLong(_key1Buffer); long fingerprint2 = WritableUtils.readVLong(_key2Buffer); int result = ((Integer) domainHash1).compareTo(domainHash2); if (result == 0) { result = ((Long) fingerprint1).compareTo(fingerprint2); } return result; } @Override public int compare(URLFP key1, ProxyCrawlHistoryItem value1, URLFP key2, ProxyCrawlHistoryItem value2) { return key1.compareTo(key2); } }, URLFP.class, ProxyCrawlHistoryItem.class, false, null); try { LOG.info("*** LIST:" + getListId() + " Starting Scan of URLS In List"); BufferedReader reader = new BufferedReader( new InputStreamReader(urlInputStream, Charset.forName("UTF-8"))); String line = null; int lineNumber = 0; ProxyCrawlHistoryItem item = new ProxyCrawlHistoryItem(); while ((line = reader.readLine()) != null) { ++lineNumber; if (line.length() != 0 && !line.startsWith("#")) { URLFP fingerprint = URLUtils.getURLFPFromURL(line, true); if (fingerprint != null) { if (!urlSet.contains(fingerprint)) { // and add fingerprint to set urlSet.add(fingerprint); // initialize item item.clear(); item.setOriginalURL(line); // and spill to merger / sorter .. merger.spillRecord(fingerprint, item); } } else { LOG.error("*** LIST:" + getListId() + " Invalid URL Encounered at Line:" + lineNumber + " URL" + line); } } } LOG.info("*** LIST:" + getListId() + " Completed Scan of:" + urlSet.size() + " URLS"); } finally { merger.close(); } } finally { if (spillwriter != null) spillwriter.close(); } LOG.info("*** LIST:" + getListId() + " Generating BloomFilter for:" + urlSet.size() + " keys"); // generate bloom filter ... _bloomFilter = new URLFPBloomFilter(urlSet.size(), 7, 10); for (URLFP fingerprint : urlSet) { _bloomFilter.add(fingerprint); } LOG.info("*** LIST:" + getListId() + " Serializing BloomFilter"); // serialize it FileOutputStream bloomFilterStream = new FileOutputStream(_bloomFilterData); try { _bloomFilter.serialize(bloomFilterStream); } finally { bloomFilterStream.flush(); bloomFilterStream.close(); } LOG.info("*** LIST:" + getListId() + " Starting Read of Merged Sequence File:" + spillOutputFile); // now initialize value map and string maps based on output sequence file ... SequenceFile.Reader reader = new SequenceFile.Reader( FileSystem.getLocal(CrawlEnvironment.getHadoopConfig()), new Path(spillOutputFile.getAbsolutePath()), CrawlEnvironment.getHadoopConfig()); LOG.info("*** LIST:" + getListId() + " PRE-ALLOCATING FIXED DATA BUFFER OF SIZE:" + (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE)); // OK, Allocate room for fixed data file upfront DataOutputBuffer valueStream = new DataOutputBuffer( urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE); LOG.info("*** LIST:" + getListId() + " ALLOCATION SUCCEEDED"); try { //DataOutputStream valueStream = new DataOutputStream(new FileOutputStream(_fixedDataFile)); RandomAccessFile stringsStream = new RandomAccessFile(_variableDataFile, "rw"); try { URLFP urlFP = new URLFP(); ProxyCrawlHistoryItem item = new ProxyCrawlHistoryItem(); // read fingerprints ... while (reader.next(urlFP, item)) { // write out fixed data structure and strings writeInitialOnDiskItem(urlFP, item, valueStream, stringsStream); } } finally { //valueStream.flush(); //valueStream.close(); stringsStream.close(); } } finally { reader.close(); } LOG.info("*** LIST:" + getListId() + " Finished Writing Initial Values to Disk"); LOG.info("*** LIST:" + getListId() + " FIXED DATA BUFFER OF SIZE:" + valueStream.getLength() + " EXCEPECTED SIZE:" + (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE)); if (valueStream.getLength() != (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE)) { throw new IOException("Final FixedItemData Buffer Size:" + valueStream.getLength() + " != URLSetSize:" + (urlSet.size() * OnDiskCrawlHistoryItem.ON_DISK_SIZE)); } // initialize temp data buffer variables _tempFixedDataBuffer = valueStream.getData(); _tempFixedDataBufferSize = valueStream.getLength(); // update metadata _metadata.setRefreshInterval(refreshInterval); _metadata.setUrlCount(urlSet.size()); // setup version _metadata.setVersion(1); // and write to disk writeMetadataToDisk(); // mark state as loaded ... _listState = LoadState.LOADED; LOG.info("*** LIST:" + getListId() + " SYNCING"); // reconcile with history log _manager.syncList(this.getListId(), urlSet, this); LOG.info("*** LIST:" + getListId() + " SYNC COMPLETE"); // write metdata to disk again writeMetadataToDisk(); LOG.info("*** LIST:" + getListId() + " FLUSHING FIXED DATA"); // and finally flush fixed data to disk FileOutputStream finalDataStream = new FileOutputStream(_fixedDataFile); try { synchronized (this) { int blockSize = 1 << 20; long bytesCopied = 0; for (int offset = 0; offset < _tempFixedDataBufferSize; offset += blockSize) { int bytesToCopy = Math.min(blockSize, _tempFixedDataBufferSize - offset); finalDataStream.write(_tempFixedDataBuffer, offset, bytesToCopy); bytesCopied += bytesToCopy; } // validate bytes copied if (bytesCopied != _tempFixedDataBufferSize) { throw new IOException("Buffer Size:" + _tempFixedDataBufferSize + " Does not Match BytesCopied:" + bytesCopied); } // ok release the buffer _tempFixedDataBuffer = null; _tempFixedDataBufferSize = 0; LOG.info("*** LIST:" + getListId() + " FIXED DATA FLUSH COMPLETE"); } } finally { finalDataStream.flush(); finalDataStream.close(); } // load sub domain metadata from disk ... loadSubDomainMetadataFromDisk(); } catch (IOException e) { LOG.error("*** LIST:" + getListId() + " Crawl List Initialization Failed With Exception:" + CCStringUtils.stringifyException(e)); _fixedDataFile.delete(); _variableDataFile.delete(); _bloomFilterData.delete(); _listState = LoadState.ERROR; throw e; } finally { urlInputStream.close(); } }
From source file:net.spfbl.spf.SPF.java
/** * Processa a consulta e retorna o resultado. * * @param query a expresso da consulta./* ww w .j a v a 2 s .co m*/ * @return o resultado do processamento. */ protected static String processSPF(InetAddress ipAddress, Client client, User user, String query, LinkedList<User> userList) { try { String result = ""; if (query.length() == 0) { return "INVALID QUERY\n"; } else { String origin; if (client == null) { origin = ipAddress.getHostAddress(); } else if (client.hasEmail()) { origin = ipAddress.getHostAddress() + " " + client.getDomain() + " " + client.getEmail(); } else { origin = ipAddress.getHostAddress() + " " + client.getDomain(); } StringTokenizer tokenizer = new StringTokenizer(query, " "); String firstToken = tokenizer.nextToken(); if (firstToken.equals("SPAM") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); TreeSet<String> tokenSet = addComplainURLSafe(origin, ticket, null); if (tokenSet == null) { result = "DUPLICATE COMPLAIN\n"; } else { String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } user = User.get(userEmail); if (user != null) { userList.add(user); } String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } result = "OK " + tokenSet + (recipient == null ? "" : " >" + recipient) + "\n"; } } else if (firstToken.equals("ABUSE") && tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (token.startsWith("In-Reply-To:") && tokenizer.countTokens() == 1) { token = tokenizer.nextToken(); if (token.startsWith("From:")) { int index = token.indexOf(':') + 1; String recipient = token.substring(index); User recipientUser = User.get(recipient); if (recipientUser == null) { // Se a consulta originar de destinatrio com postmaster cadastrado, // considerar o prprio postmaster como usurio da consulta. index = recipient.indexOf('@'); String postmaster = "postmaster" + recipient.substring(index); User postmasterUser = User.get(postmaster); if (postmasterUser != null) { userList.add(user = postmasterUser); } } else { userList.add(user = recipientUser); } index = query.indexOf(':') + 1; String messageID = query.substring(index); result = "INVALID ID\n"; index = messageID.indexOf('<'); if (index >= 0) { messageID = messageID.substring(index + 1); index = messageID.indexOf('>'); if (index > 0) { messageID = messageID.substring(0, index); result = user.blockByMessageID(messageID) + '\n'; } } } else { result = "INVALID FROM\n"; } } else { result = "INVALID COMMAND\n"; } } else if (firstToken.equals("HOLDING") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); result = getHoldStatus(client, ticket, userList) + '\n'; } else if (firstToken.equals("LINK") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); TreeSet<String> linkSet = new TreeSet<String>(); while (tokenizer.hasMoreTokens()) { linkSet.add(tokenizer.nextToken()); } StringTokenizer tokenizerTicket = new StringTokenizer(ticketSet, ";"); String unblockURL = null; boolean blocked = false; Action action = null; while (tokenizerTicket.hasMoreTokens()) { String ticket = tokenizerTicket.nextToken(); String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } if ((user = User.get(userEmail)) != null) { userList.add(user); long dateTicket = SPF.getDateTicket(ticket); User.Query queryTicket = user.getQuery(dateTicket); if (queryTicket != null) { if (queryTicket.setLinkSet(linkSet)) { SPF.setSpam(dateTicket, queryTicket.getTokenSet()); if (!queryTicket.isWhite() && queryTicket.blockSender(dateTicket)) { Server.logDebug( "new BLOCK '" + queryTicket.getBlockSender() + "' added by LINK."); } action = client == null ? Action.REJECT : client.getActionBLOCK(); unblockURL = queryTicket.getUnblockURL(); blocked = true; } else if (queryTicket.isAnyLinkRED()) { action = client == null ? Action.FLAG : client.getActionRED(); } if (action == Action.HOLD) { queryTicket.setResult("HOLD"); } else if (action == Action.FLAG) { queryTicket.setResult("FLAG"); } else if (action == Action.REJECT) { queryTicket.setResult("REJECT"); } User.storeDB(dateTicket, queryTicket); } } } if (unblockURL != null) { result = "BLOCKED " + unblockURL + "\n"; } else if (blocked) { result = "BLOCKED\n"; } else if (action == Action.HOLD) { result = "HOLD\n"; } else if (action == Action.FLAG) { result = "FLAG\n"; } else if (action == Action.REJECT) { result = "REJECT\n"; } else { result = "CLEAR\n"; } } else if (firstToken.equals("MALWARE") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); StringBuilder nameBuilder = new StringBuilder(); while (tokenizer.hasMoreTokens()) { if (nameBuilder.length() > 0) { nameBuilder.append(' '); } nameBuilder.append(tokenizer.nextToken()); } StringBuilder resultBuilder = new StringBuilder(); StringTokenizer ticketTokenizer = new StringTokenizer(ticketSet, ";"); while (ticketTokenizer.hasMoreTokens()) { String ticket = ticketTokenizer.nextToken(); TreeSet<String> tokenSet = addComplainURLSafe(origin, ticket, "MALWARE"); if (tokenSet == null) { resultBuilder.append("DUPLICATE COMPLAIN\n"); } else { // Processar reclamao. String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } user = User.get(userEmail); if (user != null) { userList.add(user); long dateTicket = getDateTicket(ticket); User.Query userQuery = user.getQuery(dateTicket); if (userQuery != null && userQuery.setMalware(nameBuilder.toString())) { User.storeDB(dateTicket, userQuery); } } String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } // Bloquear automaticamente todos // os tokens com reputao amarela ou vermelha. // Processar reclamao. for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug( "new BLOCK '" + block + "' added by '" + recipient + ";MALWARE'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug( "new BLOCK '" + block + "' added by '" + recipient + ";MALWARE'."); } } resultBuilder.append("OK "); resultBuilder.append(tokenSet); resultBuilder.append(recipient == null ? "" : " >" + recipient); resultBuilder.append("\n"); } } result = resultBuilder.toString(); } else if (firstToken.equals("HEADER") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); String key = null; String from = null; String replyto = null; String messageID = null; String unsubscribe = null; String subject = null; while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (token.startsWith("From:")) { key = "From"; int index = token.indexOf(':'); from = token.substring(index + 1); } else if (token.startsWith("ReplyTo:") || token.startsWith("Reply-To:")) { key = "Reply-To"; int index = token.indexOf(':'); replyto = token.substring(index + 1); } else if (token.startsWith("Message-ID:")) { key = "Message-ID"; int index = token.indexOf(':'); messageID = token.substring(index + 1); } else if (token.startsWith("List-Unsubscribe:")) { key = "List-Unsubscribe"; int index = token.indexOf(':'); unsubscribe = token.substring(index + 1); } else if (token.startsWith("Subject:")) { key = "Subject"; int index = token.indexOf(':'); subject = token.substring(index + 1); } else if (key == null) { from = null; replyto = null; unsubscribe = null; subject = null; break; } else if (key.equals("From")) { from += ' ' + token; } else if (key.equals("Reply-To")) { replyto += ' ' + token; } else if (key.equals("Message-ID")) { messageID += ' ' + token; } else if (key.equals("List-Unsubscribe")) { unsubscribe += ' ' + token; } else if (key.equals("Subject")) { subject += ' ' + token; } } if ((from == null || from.length() == 0) && (replyto == null || replyto.length() == 0) && (messageID == null || messageID.length() == 0) && (unsubscribe == null || unsubscribe.length() == 0) && (subject == null || subject.length() == 0)) { result = "INVALID COMMAND\n"; } else { boolean whitelisted = false; boolean blocklisted = false; TreeSet<String> unblockURLSet = new TreeSet<String>(); StringTokenizer ticketRokenizer = new StringTokenizer(ticketSet, ";"); int n = ticketRokenizer.countTokens(); ArrayList<User.Query> queryList = new ArrayList<User.Query>(n); while (ticketRokenizer.hasMoreTokens()) { String ticket = ticketRokenizer.nextToken(); String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } if ((user = User.get(userEmail)) != null) { userList.add(user); long dateTicket = SPF.getDateTicket(ticket); User.Query queryTicket = user.getQuery(dateTicket); if (queryTicket != null) { queryList.add(queryTicket); String resultLocal = queryTicket.setHeader(from, replyto, subject, messageID, unsubscribe); if ("WHITE".equals(resultLocal)) { whitelisted = true; } else if ("BLOCK".equals(resultLocal)) { blocklisted = true; String url = queryTicket.getUnblockURL(); if (url != null) { unblockURLSet.add(url); } } User.storeDB(dateTicket, queryTicket); } } } if (whitelisted) { for (User.Query queryTicket : queryList) { queryTicket.setResult("WHITE"); } result = "WHITE\n"; } else if (blocklisted) { for (User.Query queryTicket : queryList) { queryTicket.setResult("BLOCK"); } if (unblockURLSet.size() == 1) { result = "BLOCKED " + unblockURLSet.first() + "\n"; } else { result = "BLOCKED\n"; } } else { result = "CLEAR\n"; } } } else if (firstToken.equals("HAM") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); TreeSet<String> tokenSet = deleteComplainURLSafe(origin, ticket); if (tokenSet == null) { result = "ALREADY REMOVED\n"; } else { String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } result = "OK " + tokenSet + (recipient == null ? "" : " >" + recipient) + "\n"; } } else if (firstToken.equals("REFRESH") && tokenizer.countTokens() == 1) { String address = tokenizer.nextToken(); try { if (CacheSPF.refresh(address, true)) { result = "UPDATED\n"; } else { result = "NOT LOADED\n"; } } catch (ProcessException ex) { result = ex.getMessage() + "\n"; } } else if ((firstToken.equals("SPF") && tokenizer.countTokens() >= 4) || tokenizer.countTokens() == 2 || tokenizer.countTokens() == 1 || (firstToken.equals("CHECK") && tokenizer.countTokens() == 4) || (firstToken.equals("CHECK") && tokenizer.countTokens() == 3) || (firstToken.equals("CHECK") && tokenizer.countTokens() == 2)) { try { String ip; String sender; String helo; String recipient; String origem; String fluxo; if (firstToken.equals("SPF")) { // Nova formatao de consulta. ip = tokenizer.nextToken(); sender = tokenizer.nextToken(); while (!sender.endsWith("'") && tokenizer.hasMoreTokens()) { sender += " " + tokenizer.nextToken(); } helo = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "''"; recipient = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "''"; ip = ip.substring(1, ip.length() - 1); sender = sender.substring(1, sender.length() - 1); helo = helo.substring(1, helo.length() - 1); if (recipient.equals("'")) { recipient = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : ""; if (recipient.endsWith("'")) { recipient = recipient.substring(0, recipient.length() - 1); } } else { recipient = recipient.substring(1, recipient.length() - 1); } if (sender.length() == 0) { sender = null; } else { sender = sender.toLowerCase(); } recipient = recipient.toLowerCase(); recipient = recipient.replace("\"", ""); } else if (firstToken.equals("CHECK") && tokenizer.countTokens() == 4) { ip = tokenizer.nextToken().toLowerCase(); sender = tokenizer.nextToken().toLowerCase(); helo = tokenizer.nextToken(); recipient = tokenizer.nextToken().toLowerCase(); if (ip.startsWith("'") && ip.endsWith("'")) { ip = ip.substring(1, ip.length() - 1); } if (sender.startsWith("'") && sender.endsWith("'")) { sender = sender.substring(1, sender.length() - 1); } if (helo.startsWith("'") && helo.endsWith("'")) { helo = helo.substring(1, helo.length() - 1); } if (recipient.startsWith("'") && recipient.endsWith("'")) { recipient = recipient.substring(1, recipient.length() - 1); } if (ip.length() == 0) { ip = null; } if (sender.length() == 0) { sender = null; } if (!Domain.isHostname(helo)) { helo = null; } if (recipient.length() == 0) { recipient = null; } else { recipient = recipient.toLowerCase(); } } else { // Manter compatibilidade da verso antiga. // Verso obsoleta. if (firstToken.equals("CHECK")) { ip = tokenizer.nextToken(); } else { ip = firstToken; } if (tokenizer.countTokens() == 2) { sender = tokenizer.nextToken().toLowerCase(); helo = tokenizer.nextToken(); } else { sender = null; helo = tokenizer.nextToken(); } recipient = null; if (ip.startsWith("'") && ip.endsWith("'")) { ip = ip.substring(1, ip.length() - 1); } if (sender != null && sender.startsWith("'") && sender.endsWith("'")) { sender = sender.substring(1, sender.length() - 1); if (sender.length() == 0) { sender = null; } } if (helo.startsWith("'") && helo.endsWith("'")) { helo = helo.substring(1, helo.length() - 1); } } if (!Subnet.isValidIP(ip)) { return "INVALID\n"; } else if (sender != null && !Domain.isEmail(sender)) { return "INVALID\n"; } else if (recipient != null && !Domain.isEmail(recipient)) { return "INVALID\n"; } else if (Subnet.isReservedIP(ip)) { // Message from LAN. return "LAN\n"; } else if (client != null && client.containsFull(ip)) { // Message from LAN. return "LAN\n"; } else { TreeSet<String> tokenSet = new TreeSet<String>(); ip = Subnet.normalizeIP(ip); tokenSet.add(ip); if (Domain.isValidEmail(recipient)) { // Se houver um remetente vlido, // Adicionar no ticket para controle. tokenSet.add('>' + recipient); } if (recipient != null) { User recipientUser = User.get(recipient); if (recipientUser == null) { // Se a consulta originar de destinatrio com postmaster cadastrado, // considerar o prprio postmaster como usurio da consulta. int index = recipient.indexOf('@'); String postmaster = "postmaster" + recipient.substring(index); User postmasterUser = User.get(postmaster); if (postmasterUser != null) { user = postmasterUser; } } else { user = recipientUser; } } if (user != null) { userList.add(user); tokenSet.add(user.getEmail() + ':'); } else if (client != null && client.hasEmail()) { tokenSet.add(client.getEmail() + ':'); } // Passar a acompanhar todos os // HELO quando apontados para o IP para // uma nova forma de interpretar dados. String hostname; if (CacheHELO.match(ip, helo, false)) { hostname = Domain.normalizeHostname(helo, true); } else { hostname = Reverse.getHostname(ip); hostname = Domain.normalizeHostname(hostname, true); } if (hostname == null) { Server.logDebug("no rDNS for " + ip + "."); } else if (Domain.isOfficialTLD(hostname)) { return "INVALID\n"; } else { // Verificao de pilha dupla, // para pontuao em ambas pilhas. String ipv4 = CacheHELO.getUniqueIPv4(hostname); String ipv6 = CacheHELO.getUniqueIPv6(hostname); if (ip.equals(ipv6) && CacheHELO.match(ipv4, hostname, false)) { // Equivalncia de pilha dupla se // IPv4 for nico para o hostname. tokenSet.add(ipv4); } else if (ip.equals(ipv4) && CacheHELO.match(ipv6, hostname, false)) { // Equivalncia de pilha dupla se // IPv6 for nico para o hostname. tokenSet.add(ipv6); } } if (Generic.containsGenericSoft(hostname)) { // Quando o reverso for // genrico, no consider-lo. hostname = null; } else if (hostname != null) { tokenSet.add(hostname); } LinkedList<String> logList = null; if (sender != null && firstToken.equals("CHECK")) { int index = sender.lastIndexOf('@'); String domain = sender.substring(index + 1); logList = new LinkedList<String>(); try { CacheSPF.refresh(domain, false); } catch (ProcessException ex) { logList.add("Cannot refresh SPF registry: " + ex.getErrorMessage()); logList.add("Using cached SPF registry."); } } SPF spf; if (sender == null) { spf = null; result = "NONE"; } else if (Domain.isOfficialTLD(sender)) { spf = null; result = "NONE"; } else if (Generic.containsGeneric(sender)) { spf = null; result = "NONE"; } else if ((spf = CacheSPF.get(sender)) == null) { result = "NONE"; } else if (spf.isInexistent()) { result = "NONE"; } else { result = spf.getResult(ip, sender, helo, logList); } String mx = Domain.extractHost(sender, true); if (user != null && user.isLocal()) { // Message from local user. return "LAN\n"; } else if (recipient != null && result.equals("PASS")) { if (recipient.endsWith(mx)) { // Message from same domain. return "LAN\n"; } else if (recipient.equals(Core.getAbuseEmail()) && User.exists(sender, "postmaster" + mx)) { // Message to abuse. return "LAN\n"; } } if (result.equals("PASS") || (sender != null && Provider.containsHELO(ip, hostname))) { // Quando fo PASS, significa que o domnio // autorizou envio pelo IP, portanto o dono dele // responsavel pelas mensagens. if (!Provider.containsExact(mx)) { // No um provedor ento // o MX deve ser listado. tokenSet.add(mx); origem = mx; } else if (Domain.isValidEmail(sender)) { // Listar apenas o remetente se o // hostname for um provedor de e-mail. String userEmail = null; String recipientEmail = null; for (String token : tokenSet) { if (token.endsWith(":")) { userEmail = token; } else if (token.startsWith(">")) { recipientEmail = token; } } tokenSet.clear(); tokenSet.add(sender); if (userEmail != null) { tokenSet.add(userEmail); } if (recipientEmail != null) { tokenSet.add(recipientEmail); } origem = sender; } else { origem = sender; } fluxo = origem + ">" + recipient; } else if (hostname == null) { origem = (sender == null ? "" : sender + '>') + ip; fluxo = origem + ">" + recipient; } else { String dominio = Domain.extractDomain(hostname, true); origem = (sender == null ? "" : sender + '>') + (dominio == null ? hostname : dominio.substring(1)); fluxo = origem + ">" + recipient; } Long recipientTrapTime = Trap.getTimeRecipient(client, user, recipient); if (firstToken.equals("CHECK")) { String results = "\nSPF resolution results:\n"; if (spf != null && spf.isInexistent()) { results += " NXDOMAIN\n"; } else if (logList == null || logList.isEmpty()) { results += " NONE\n"; } else { for (String log : logList) { results += " " + log + "\n"; } } String white; String block; if ((white = White.find(client, user, ip, sender, hostname, result, recipient)) != null) { results += "\nFirst WHITE match: " + white + "\n"; } else if ((block = Block.find(client, user, ip, sender, hostname, result, recipient, false, true, true, false)) != null) { results += "\nFirst BLOCK match: " + block + "\n"; } TreeSet<String> graceSet = new TreeSet<String>(); if (Domain.isGraceTime(sender)) { graceSet.add(Domain.extractDomain(sender, false)); } if (Domain.isGraceTime(hostname)) { graceSet.add(Domain.extractDomain(hostname, false)); } if (!graceSet.isEmpty()) { results += "\n"; results += "Domains in grace time:\n"; for (String grace : graceSet) { results += " " + grace + "\n"; } } results += "\n"; results += "Considered identifiers and status:\n"; tokenSet = expandTokenSet(tokenSet); TreeMap<String, Distribution> distributionMap = CacheDistribution.getMap(tokenSet); int count = 0; for (String token : tokenSet) { if (!token.startsWith(">") && !token.endsWith(":")) { if (!Ignore.contains(token)) { float probability; Status status; if (distributionMap.containsKey(token)) { Distribution distribution = distributionMap.get(token); probability = distribution.getSpamProbability(token); status = distribution.getStatus(token); } else { probability = 0.0f; status = SPF.Status.GREEN; } results += " " + token + " " + status.name() + " " + Core.DECIMAL_FORMAT.format(probability) + "\n"; count++; } } } if (count == 0) { results += " NONE\n"; } results += "\n"; return results; } else if (recipientTrapTime == null && White.contains(client, user, ip, sender, hostname, result, recipient)) { if (White.contains(client, user, ip, sender, hostname, result, null)) { // Limpa da lista BLOCK um possvel falso positivo. Block.clear(client, user, ip, sender, hostname, result, null); } // Calcula frequencia de consultas. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "WHITE"); return "WHITE " + (url == null ? ticket : url + ticket) + "\n"; } else if (Block.contains(client, user, ip, sender, hostname, result, recipient, true, true, true, true)) { Action action = client == null ? Action.REJECT : client.getActionBLOCK(); if (action == Action.REJECT) { // Calcula frequencia de consultas. long time = Server.getNewUniqueTime(); User.Query queryLocal = SPF.addQuerySpam(time, client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "BLOCK"); action = client == null ? Action.FLAG : client.getActionRED(); if (action != Action.REJECT && queryLocal != null && queryLocal.needHeader()) { if (action == Action.FLAG) { queryLocal.setResult("FLAG"); String url = Core.getURL(); String ticket = SPF.createTicket(time, tokenSet); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { queryLocal.setResult("HOLD"); String url = Core.getURL(); String ticket = SPF.createTicket(time, tokenSet); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else { String url = Core.getUnblockURL(client, user, ip, sender, hostname, recipient); if (url == null) { return "BLOCKED\n"; } else { return "BLOCKED " + url + "\n"; } } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (Generic.containsDynamicDomain(hostname)) { // Bloquear automaticamente range de IP dinmico. String cidr = Subnet .normalizeCIDR(SubnetIPv4.isValidIPv4(ip) ? ip + "/24" : ip + "/48"); if (Block.tryOverlap(cidr)) { Server.logDebug( "new BLOCK '" + cidr + "' added by '" + hostname + ";DYNAMIC'."); } else if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + hostname + ";DYNAMIC'."); } SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (spf != null && spf.isDefinitelyInexistent()) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + mx + ";NXDOMAIN'."); } } Analise.processToday(ip); // O domnio foi dado como inexistente inmeras vezes. // Rejeitar e denunciar o host pois h abuso de tentativas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "NXDOMAIN"); return "NXDOMAIN\n"; } else if (spf != null && spf.isInexistent()) { Analise.processToday(ip); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "NXDOMAIN"); return "NXDOMAIN\n"; } else if (result.equals("FAIL")) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + sender + ";FAIL'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FAIL"); // Retornar FAIL somente se no houver // liberao literal do remetente com FAIL. return "FAIL\n"; } else if (sender != null && !Domain.isEmail(sender)) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + sender + ";INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (sender != null && Domain.isOfficialTLD(sender)) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + sender + ";RESERVED'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (sender == null && !CacheHELO.match(ip, hostname, false)) { // Bloquear automaticamente IP com reputao ruim. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); // HELO invlido sem remetente. return "INVALID\n"; } else if (hostname == null && Core.isReverseRequired()) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'NONE'."); } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); // Require a valid HELO or reverse. return "INVALID\n"; } else if (recipient != null && !Domain.isValidEmail(recipient)) { Analise.processToday(ip); Analise.processToday(mx); SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INEXISTENT"); return "INEXISTENT\n"; } else if (recipientTrapTime != null) { if (System.currentTimeMillis() > recipientTrapTime) { // Spamtrap for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";SPAMTRAP'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";SPAMTRAP'."); } } Analise.processToday(ip); Analise.processToday(mx); // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "TRAP"); return "SPAMTRAP\n"; } else { // Inexistent for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";INEXISTENT'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";INEXISTENT'."); } } Analise.processToday(ip); Analise.processToday(mx); SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INEXISTENT"); return "INEXISTENT\n"; } } else if (Defer.count(fluxo) > Core.getFloodMaxRetry()) { Analise.processToday(ip); Analise.processToday(mx); // A origem atingiu o limite de atraso // para liberao do destinatrio. long time = System.currentTimeMillis(); Defer.end(fluxo); Server.logDefer(time, fluxo, "DEFER FLOOD"); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (!result.equals("PASS") && !CacheHELO.match(ip, hostname, false)) { // Bloquear automaticamente IP com reputao amarela. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + recipient + ";INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (recipient != null && recipient.startsWith("postmaster@")) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return result + " " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } else if (result.equals("PASS") && SPF.isGood(Provider.containsExact(mx) ? sender : mx)) { // O remetente vlido e tem excelente reputao, // ainda que o provedor dele esteja com reputao ruim. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return "PASS " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } else if (SPF.hasRed(tokenSet) || Analise.isCusterRED(ip, sender, hostname)) { Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.REJECT : client.getActionRED(); if (action == Action.REJECT) { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (action == Action.DEFER) { if (Defer.defer(fluxo, Core.getDeferTimeRED())) { String url = Core.getReleaseURL(fluxo); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "LISTED"); if (url == null || Defer.count(fluxo) > 1) { return "LISTED\n"; } else if (result.equals("PASS") && enviarLiberacao(url, sender, recipient)) { // Envio da liberao por e-mail se // houver validao do remetente por PASS. return "LISTED\n"; } else { return "LISTED " + url + "\n"; } } else { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (Domain.isGraceTime(sender) || Domain.isGraceTime(hostname)) { Server.logTrace("domain in grace time."); for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + status + "'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + status + "'."); } } Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.REJECT : client.getActionGRACE(); if (action == Action.REJECT) { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (action == Action.DEFER) { if (Defer.defer(fluxo, Core.getDeferTimeRED())) { String url = Core.getReleaseURL(fluxo); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "LISTED"); if (url == null || Defer.count(fluxo) > 1) { return "LISTED\n"; } else if (result.equals("PASS") && enviarLiberacao(url, sender, recipient)) { // Envio da liberao por e-mail se // houver validao do remetente por PASS. return "LISTED\n"; } else { return "LISTED " + url + "\n"; } } else { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (SPF.hasYellow(tokenSet) && Defer.defer(fluxo, Core.getDeferTimeYELLOW())) { Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.DEFER : client.getActionYELLOW(); if (action == Action.DEFER) { // Pelo menos um identificador do conjunto est em greylisting com atrazo de 10min. SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (SPF.isFlood(tokenSet) && !Provider.containsHELO(ip, hostname) && Defer.defer(origem, Core.getDeferTimeFLOOD())) { Analise.processToday(ip); Analise.processToday(mx); // Pelo menos um identificador est com frequncia superior ao permitido. Server.logDebug("FLOOD " + tokenSet); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else if (result.equals("SOFTFAIL") && !Provider.containsHELO(ip, hostname) && Defer.defer(fluxo, Core.getDeferTimeSOFTFAIL())) { Analise.processToday(ip); Analise.processToday(mx); // SOFTFAIL com atrazo de 1min. SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else { Analise.processToday(ip); Analise.processToday(mx); // Calcula frequencia de consultas. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return result + " " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } } } catch (ProcessException ex) { if (ex.isErrorMessage("HOST NOT FOUND")) { return "NXDOMAIN\n"; } else { throw ex; } } } else { return "INVALID QUERY\n"; } } return result; } catch (ProcessException ex) { Server.logError(ex); return ex.getMessage() + "\n"; } catch (Exception ex) { Server.logError(ex); return "ERROR: FATAL\n"; } }
From source file:org.wso2.carbon.service.mgt.ServiceAdmin.java
/** * List all the available services/*w ww. j av a 2 s. co m*/ * * @param serviceTypeFilter Service type of services to be returned * @param serviceSearchString Service name or part of a service name * @param pageNumber The number of the page to be retrieved * @return The service metadata * @throws org.apache.axis2.AxisFault If an error occurs while retrieving services */ public ServiceMetaDataWrapper listServices(String serviceTypeFilter, String serviceSearchString, int pageNumber) throws AxisFault { if (serviceTypeFilter == null) { serviceTypeFilter = "ALL"; } if (pageNumber < 0 || pageNumber == Integer.MAX_VALUE) { pageNumber = 0; } List<ServiceMetaData> serviceList = new ArrayList<ServiceMetaData>(); TreeSet<String> serviceTypes = new TreeSet<String>(); serviceTypes.add("axis2"); HashMap<String, AxisService> axisServices = getAxisConfig().getServices(); Set<String> axisFaultServices = (getAxisConfig().getFaultyServices()).keySet(); List<AxisService> axisServicesList = new ArrayList<AxisService>(); // we have to check services in transit ghost state as well.. Map<String, AxisService> originalTransitGhosts = GhostDeployerUtils .getTransitGhostServicesMap(getAxisConfig()); Map<String, AxisService> clonedTransitGhosts = new HashMap<String, AxisService>(); clonedTransitGhosts.putAll(originalTransitGhosts); for (Map.Entry<String, AxisService> entry : axisServices.entrySet()) { AxisService axisService = entry.getValue(); // Filtering the admin services if (SystemFilter.isAdminService(axisService) || SystemFilter.isHiddenService(axisService)) { continue; // No advancement of currentIndex } String serviceType = "axis2"; Parameter serviceTypeParam; serviceTypeParam = axisService.getParameter(ServerConstants.SERVICE_TYPE); if (serviceTypeParam != null) { serviceType = (String) serviceTypeParam.getValue(); serviceTypes.add(serviceType); } // Filter out client side services if (axisService.isClientSide()) { continue; } // Filter out services based on service type if (!serviceTypeFilter.equals("ALL") && !serviceTypeFilter.equals(serviceType)) { continue; } // Filter out services based on serviceSearchString if (serviceSearchString != null && serviceSearchString.trim().length() > 0 && !isServiceSatisfySearchString(serviceSearchString, axisService.getName())) { continue; } axisServicesList.add(axisService); if (clonedTransitGhosts.containsKey(axisService.getName())) { clonedTransitGhosts.remove(axisService.getName()); } } java.util.Collection<AxisService> transitGhosts = clonedTransitGhosts.values(); for (AxisService transitGhost : transitGhosts) { axisServicesList.add(transitGhost); } if (axisServicesList.size() > 0) { Collections.sort(axisServicesList, new Comparator<AxisService>() { public int compare(AxisService arg0, AxisService arg1) { return arg0.getName().compareToIgnoreCase(arg1.getName()); } }); } // String itemsPerPage = ServerConfiguration.getInstance().getFirstProperty("ItemsPerPage"); // int itemsPerPageInt = 10; // the default number of item per page // if (itemsPerPage != null) { // itemsPerPageInt = Integer.parseInt(itemsPerPage); // } // int startIndex = pageNumber * itemsPerPageInt; // int endIndex = (pageNumber + 1) * itemsPerPageInt; //Get only required services for page. List<AxisService> axisServicesRequiredForPage = new ArrayList<AxisService>(); // for (int i = startIndex; i < endIndex && i < axisServicesList.size(); i++) { // axisServicesRequiredForPage.add(axisServicesList.get(i)); // } for (AxisService anAxisServicesList : axisServicesList) { axisServicesRequiredForPage.add(anAxisServicesList); } for (AxisService axisService : axisServicesRequiredForPage) { String serviceType = "axis2"; Parameter serviceTypeParam; serviceTypeParam = axisService.getParameter(ServerConstants.SERVICE_TYPE); if (serviceTypeParam != null) { serviceType = (String) serviceTypeParam.getValue(); serviceTypes.add(serviceType); } ServiceMetaData service = new ServiceMetaData(); String serviceName = axisService.getName(); service.setName(serviceName); // extract service type serviceTypeParam = axisService.getParameter(ServerConstants.SERVICE_TYPE); if (serviceTypeParam != null) { serviceType = (String) serviceTypeParam.getValue(); } service.setServiceType(serviceType); AxisConfiguration axisConfiguration = getAxisConfig(); service.setWsdlURLs(Utils.getWsdlInformation(serviceName, axisConfiguration)); service.setTryitURL(Utils.getTryitURL(serviceName, getConfigContext())); service.setActive(axisService.isActive()); Parameter parameter = axisService.getParameter(ServiceAdmin.DISABLE_TRY_IT_PARAM); if (parameter != null && Boolean.TRUE.toString().equalsIgnoreCase((String) parameter.getValue())) { service.setDisableTryit(true); } parameter = axisService.getParameter(ServiceAdmin.DISABLE_DELETION_PARAM); if (parameter != null && Boolean.TRUE.toString().equalsIgnoreCase((String) parameter.getValue())) { service.setDisableDeletion(true); } service.setServiceGroupName(axisService.getAxisServiceGroup().getServiceGroupName()); // find the current security scenario id if (GhostDeployerUtils.isGhostService(axisService)) { Parameter secParam = axisService.getParameter(CarbonConstants.GHOST_ATTR_SECURITY_SCENARIO); if (secParam != null) { service.setSecurityScenarioId((String) secParam.getValue()); } } else { SecurityScenarioData securityScenario = getSecurityScenario(serviceName); if (securityScenario != null) { service.setSecurityScenarioId(securityScenario.getScenarioId()); } } if (!axisFaultServices.contains(axisService.getName())) { serviceList.add(service); } } ServiceMetaDataWrapper wrapper; wrapper = new ServiceMetaDataWrapper(); wrapper.setNumberOfCorrectServiceGroups(getNumberOfServiceGroups()); wrapper.setNumberOfFaultyServiceGroups(getNumberOfFaultyServices()); wrapper.setServiceTypes(serviceTypes.toArray(new String[serviceTypes.size()])); wrapper.setNumberOfActiveServices(getNumberOfActiveServices()); // DataPaginator.doPaging(pageNumber, axisServicesList, serviceList, wrapper); DataPaginator.doPaging(pageNumber, serviceList, wrapper); return wrapper; }
From source file:com.jhh.hdb.sqlparser.MySemanticAnalyzer.java
private List<List<String>> getCommonGroupByDestGroups(QB qb, Map<String, Operator<? extends OperatorDesc>> inputs) throws SemanticException { QBParseInfo qbp = qb.getParseInfo(); TreeSet<String> ks = new TreeSet<String>(); ks.addAll(qbp.getClauseNames());/*w ww. j av a 2 s . com*/ List<List<String>> commonGroupByDestGroups = new ArrayList<List<String>>(); // If this is a trivial query block return if (ks.size() <= 1) { List<String> oneList = new ArrayList<String>(1); if (ks.size() == 1) { oneList.add(ks.first()); } commonGroupByDestGroups.add(oneList); return commonGroupByDestGroups; } List<Operator<? extends OperatorDesc>> inputOperators = new ArrayList<Operator<? extends OperatorDesc>>( ks.size()); List<List<ExprNodeDesc>> sprayKeyLists = new ArrayList<List<ExprNodeDesc>>(ks.size()); List<List<ExprNodeDesc>> distinctKeyLists = new ArrayList<List<ExprNodeDesc>>(ks.size()); // Iterate over each clause for (String dest : ks) { Operator input = inputs.get(dest); RowResolver inputRR = opParseCtx.get(input).getRowResolver(); List<ExprNodeDesc> distinctKeys = getDistinctExprs(qbp, dest, inputRR); List<ExprNodeDesc> sprayKeys = new ArrayList<ExprNodeDesc>(); // Add the group by expressions List<ASTNode> grpByExprs = getGroupByForClause(qbp, dest); for (ASTNode grpByExpr : grpByExprs) { ExprNodeDesc exprDesc = genExprNodeDesc(grpByExpr, inputRR); if (ExprNodeDescUtils.indexOf(exprDesc, sprayKeys) < 0) { sprayKeys.add(exprDesc); } } // Loop through each of the lists of exprs, looking for a match boolean found = false; for (int i = 0; i < sprayKeyLists.size(); i++) { if (!input.equals(inputOperators.get(i))) { continue; } if (distinctKeys.isEmpty()) { // current dest has no distinct keys. List<ExprNodeDesc> combinedList = new ArrayList<ExprNodeDesc>(); combineExprNodeLists(sprayKeyLists.get(i), distinctKeyLists.get(i), combinedList); if (!matchExprLists(combinedList, sprayKeys)) { continue; } // else do the common code at the end. } else { if (distinctKeyLists.get(i).isEmpty()) { List<ExprNodeDesc> combinedList = new ArrayList<ExprNodeDesc>(); combineExprNodeLists(sprayKeys, distinctKeys, combinedList); if (!matchExprLists(combinedList, sprayKeyLists.get(i))) { continue; } else { // we have found a match. insert this distinct clause to head. distinctKeyLists.remove(i); sprayKeyLists.remove(i); distinctKeyLists.add(i, distinctKeys); sprayKeyLists.add(i, sprayKeys); commonGroupByDestGroups.get(i).add(0, dest); found = true; break; } } else { if (!matchExprLists(distinctKeyLists.get(i), distinctKeys)) { continue; } if (!matchExprLists(sprayKeyLists.get(i), sprayKeys)) { continue; } // else do common code } } // common code // A match was found, so add the clause to the corresponding list commonGroupByDestGroups.get(i).add(dest); found = true; break; } // No match was found, so create new entries if (!found) { inputOperators.add(input); sprayKeyLists.add(sprayKeys); distinctKeyLists.add(distinctKeys); List<String> destGroup = new ArrayList<String>(); destGroup.add(dest); commonGroupByDestGroups.add(destGroup); } } return commonGroupByDestGroups; }
From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java
/** * @param treeDef// w w w.j a va2 s. c o m * @throws SQLException */ public void convertLithoStratGeneral(final LithoStratTreeDef treeDef, final LithoStrat earth, final TableWriter tblWriter, final String srcTableName) throws SQLException { Statement stmt = null; ResultSet rs = null; String s = ""; try { // get a Hibernate session for saving the new records Session localSession = HibernateUtil.getCurrentSession(); HibernateUtil.beginTransaction(); int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM " + srcTableName); if (count < 1) return; if (hasFrame) { setProcess(0, count); } // create an ID mapper for the geography table (mainly for use in converting localities) IdHashMapper lithoStratIdMapper = IdMapperMgr.getInstance().addHashMapper("stratigraphy_StratigraphyID", true); if (lithoStratIdMapper == null) { UIRegistry.showError("The lithoStratIdMapper was null."); return; } IdMapperIFace gtpIdMapper = IdMapperMgr.getInstance().get("geologictimeperiod", "GeologicTimePeriodID"); IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID"); if (ceMapper == null) { ceMapper = IdMapperMgr.getInstance().addTableMapper("collectingevent", "CollectingEventID", null, false); } String sql = String.format( "SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed, Remarks, " + "Text1, Text2, Number1, Number2, YesNo1, YesNo2, GeologicTimePeriodID FROM %s s " + "ORDER BY s.StratigraphyID", srcTableName); stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(Integer.MIN_VALUE); rs = stmt.executeQuery(sql); Map<Integer, Pair<Integer, Integer>> stratHash = new HashMap<Integer, Pair<Integer, Integer>>(); int stratsWithNoGTP = 0; int stratsWithNoMappedGTP = 0; int missingCEMapping = 0; int lithoCnt = 0; int counter = 0; // for each old record, convert the record while (rs.next()) { if (counter % 500 == 0) { if (hasFrame) { setProcess(counter); } else { log.info("Converted " + counter + " Stratigraphy records"); } } // grab the important data fields from the old record int oldStratId = rs.getInt(1); // This is a one-to-one with CollectingEvent String superGroup = rs.getString(2); String lithoGroup = rs.getString(3); String formation = rs.getString(4); String member = rs.getString(5); String bed = rs.getString(6); String remarks = escapeStringLiterals(rs.getString(7)); String text1 = escapeStringLiterals(rs.getString(8)); String text2 = escapeStringLiterals(rs.getString(9)); Double number1 = rs.getObject(10) != null ? rs.getDouble(10) : null; Double number2 = rs.getObject(11) != null ? rs.getDouble(11) : null; Boolean yesNo1 = rs.getObject(12) != null ? rs.getBoolean(12) : null; Boolean yesNo2 = rs.getObject(13) != null ? rs.getBoolean(13) : null; Integer oldGTPId = rs.getObject(14) != null ? rs.getInt(14) : null; // Check to see if there is any Litho information OR an GTP Id // If both are missing then skip the record. boolean hasLithoFields = isNotEmpty(superGroup) || isNotEmpty(lithoGroup) || isNotEmpty(formation) || isNotEmpty(member); if (!hasLithoFields && oldGTPId == null) { continue; } Integer gtpId = null; if (oldGTPId != null) { gtpId = gtpIdMapper.get(oldGTPId); if (gtpId == null) { tblWriter.logError("Old GTPID[" + gtpId + "] in the Strat record could not be mapped for Old StratID[" + oldStratId + "]"); stratsWithNoMappedGTP++; } } else { stratsWithNoGTP++; } // There may not be any Litho information to add to the LithoStrat tree, // but it did have GTP Information if we got here Integer lithoStratID = null; if (hasLithoFields) { // create a new Geography object from the old data LithoStrat[] newStrats = convertOldStratRecord(superGroup, lithoGroup, formation, member, bed, remarks, text1, text2, number1, number2, yesNo1, yesNo2, earth, localSession); LithoStrat newStrat = getLastLithoStrat(newStrats); counter++; lithoCnt += newStrats.length; // Map Old LithoStrat ID to the new Tree Id //System.out.println(oldStratId + " " + newStrat.getLithoStratId()); if (newStrat != null) { lithoStratID = newStrat.getLithoStratId(); lithoStratIdMapper.put(oldStratId, newStrat.getLithoStratId()); } else { String msg = String.format("Strat Fields were all null for oldID", oldStratId); tblWriter.logError(msg); log.error(msg); missingCEMapping++; } } if (lithoStratID != null || gtpId != null) { Integer newCEId = ceMapper.get(oldStratId); if (newCEId == null) { String msg = String.format("No CE mapping for Old StratId %d, when they are a one-to-one.", oldStratId); tblWriter.logError(msg); log.error(msg); missingCEMapping++; } else { stratHash.put(newCEId, new Pair<Integer, Integer>(gtpId, lithoStratID)); } } } stmt.close(); System.out.println("lithoCnt: " + lithoCnt); if (hasFrame) { setProcess(counter); } else { log.info("Converted " + counter + " Stratigraphy records"); } TreeHelper.fixFullnameForNodeAndDescendants(earth); earth.setNodeNumber(1); fixNodeNumbersFromRoot(earth); HibernateUtil.commitTransaction(); log.info("Converted " + counter + " Stratigraphy records"); rs.close(); Statement updateStatement = newDBConn.createStatement(); int ceCnt = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT Count(CollectingEventID) FROM collectingevent"); int stratCnt = BasicSQLUtils.getCountAsInt(oldDBConn, String.format("SELECT Count(CollectingEventID) FROM collectingevent " + "INNER JOIN %s ON CollectingEventID = StratigraphyID", srcTableName)); String msg = String.format("There are %d CE->Strat and %d CEs. The diff is %d", stratCnt, ceCnt, (ceCnt - stratCnt)); tblWriter.log(msg); log.debug(msg); // Create a PaleoContext for each ColObj stmt = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(Integer.MIN_VALUE); int processCnt = BasicSQLUtils .getCountAsInt("SELECT COUNT(*) FROM collectionobject WHERE CollectingEventID IS NOT NULL"); if (frame != null) { frame.setDesc("Converting PaleoContext..."); frame.setProcess(0, processCnt); } TreeSet<Integer> missingStratIds = new TreeSet<Integer>(); int missingStrat = 0; int missingGTP = 0; int coUpdateCnt = 0; int cnt = 0; sql = "SELECT CollectionObjectID, CollectingEventID FROM collectionobject WHERE CollectingEventID IS NOT NULL ORDER BY CollectionObjectID"; rs = stmt.executeQuery(sql); while (rs.next()) { int coId = rs.getInt(1); // New CO Id Integer ceId = rs.getInt(2); // New CE Id Pair<Integer, Integer> strat = stratHash.get(ceId); Integer newLithoId = null; Integer gtpId = null; if (strat != null) { gtpId = strat.getFirst(); newLithoId = strat.getSecond(); } if (newLithoId == null) { missingStrat++; missingStratIds.add(ceId); if (gtpId == null) continue; } try { String updateStr = "INSERT INTO paleocontext (TimestampCreated, TimestampModified, DisciplineID, Version, CreatedByAgentID, ModifiedByAgentID, LithoStratID, ChronosStratID) " + "VALUES ('" + nowStr + "','" + nowStr + "'," + getDisciplineId() + ", 0, " + getCreatorAgentId(null) + "," + getModifiedByAgentId(null) + "," + (newLithoId != null ? newLithoId : "NULL") + "," + (gtpId != null ? gtpId : "NULL") + ")"; updateStatement.executeUpdate(updateStr, Statement.RETURN_GENERATED_KEYS); Integer paleoContextID = getInsertedId(updateStatement); if (paleoContextID == null) { throw new RuntimeException("Couldn't get the Agent's inserted ID"); } String sqlUpdate = "UPDATE collectionobject SET PaleoContextID=" + paleoContextID + " WHERE CollectionObjectID = " + coId; updateStatement.executeUpdate(sqlUpdate); coUpdateCnt++; } catch (SQLException e) { e.printStackTrace(); log.error(e); showError(e.getMessage()); throw new RuntimeException(e); } processCnt++; if (frame != null && cnt % 100 == 0) frame.setProcess(cnt); } rs.close(); stmt.close(); if (frame != null) frame.setProcess(processCnt); msg = String.format("There are %d unmappable Strat Records and %d unmappable GTP records.", missingStrat, missingGTP); tblWriter.log(msg); log.debug(msg); msg = String.format("There are %d CO records updated.", coUpdateCnt); tblWriter.log(msg); log.debug(msg); updateStatement.close(); msg = String.format("No CE mapping for Old StratId Count: %d", missingCEMapping); tblWriter.logError(msg); log.error(msg); msg = String.format("Strats with No GTP Count: %d", stratsWithNoGTP); tblWriter.logError(msg); log.error(msg); msg = String.format("Strats with missing Mapping to GTP Count: %d", stratsWithNoMappedGTP); tblWriter.logError(msg); log.error(msg); msg = String.format("Number of Old StratIds mapped to a new Strat ID Count: %d", lithoStratIdMapper.size()); tblWriter.logError(msg); log.error(msg); StringBuilder sb = new StringBuilder(); sb.append("Missing New Strat: "); if (missingStratIds.size() == 0) sb.append("None"); for (Integer id : missingStratIds) { sb.append(String.format("%d, ", id)); } tblWriter.logError(sb.toString()); log.error(sb.toString()); } catch (Exception ex) { ex.printStackTrace(); } // Now in this Step we Add the PaleoContext to the Collecting Events }
From source file:edu.ku.brc.specify.conversion.GenericDBConversion.java
/** * @param treeDef//ww w . jav a 2s . co m * @throws SQLException */ public void convertLithoStratCustom(final LithoStratTreeDef treeDef, final LithoStrat earth, final TableWriter tblWriter, final String srcTableName, final boolean doMapGTPIds) throws SQLException { Statement stmt = null; ResultSet rs = null; String s = ""; try { // get a Hibernate session for saving the new records Session localSession = HibernateUtil.getCurrentSession(); HibernateUtil.beginTransaction(); int count = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT COUNT(*) FROM " + srcTableName); if (count < 1) return; if (hasFrame) { setProcess(0, count); } // create an ID mapper for the geography table (mainly for use in converting localities) IdHashMapper lithoStratIdMapper = IdMapperMgr.getInstance().addHashMapper("stratigraphy_StratigraphyID", true); if (lithoStratIdMapper == null) { UIRegistry.showError("The lithoStratIdMapper was null."); return; } IdTableMapper gtpIdMapper = IdMapperMgr.getInstance().addTableMapper("geologictimeperiod", "GeologicTimePeriodID", null, false); if (doMapGTPIds) { gtpIdMapper.clearRecords(); gtpIdMapper.mapAllIds(); } Hashtable<Integer, Integer> stratGTPIdHash = new Hashtable<Integer, Integer>(); //Hashtable<Integer, Integer> newCEIdToNewStratIdHash = new Hashtable<Integer, Integer>(); // stratigraphy2 goes here. IdHashMapper newCEIdToNewStratIdHash = IdMapperMgr.getInstance() .addHashMapper("stratigraphy_StratigraphyID_2", true); newCEIdToNewStratIdHash.setShowLogErrors(false); IdMapperIFace ceMapper = IdMapperMgr.getInstance().get("collectingevent", "CollectingEventID"); if (ceMapper == null) { ceMapper = IdMapperMgr.getInstance().addTableMapper("collectingevent", "CollectingEventID", null, false); } // get all of the old records // String sql = String.format("SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed, Remarks, " + // "Text1, Text2, Number1, Number2, YesNo1, YesNo2, GeologicTimePeriodID FROM %s s " + // "WHERE s.SuperGroup IS NOT NULL OR s.Group IS NOT NULL OR s.Formation IS NOT NULL OR " + // "s.Member IS NOT NULL OR s.Bed IS NOT NULL ORDER BY s.StratigraphyID", srcTableName); String sql = String.format( "SELECT s.StratigraphyID, s.SuperGroup, s.Group, s.Formation, s.Member, s.Bed, Remarks, " + "Text1, Text2, Number1, Number2, YesNo1, YesNo2, GeologicTimePeriodID FROM %s s " + "ORDER BY s.StratigraphyID", srcTableName); stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(Integer.MIN_VALUE); rs = stmt.executeQuery(sql); int stratsWithNoGTP = 0; int stratsWithNoMappedGTP = 0; int missingCEMapping = 0; int lithoCnt = 0; int counter = 0; // for each old record, convert the record while (rs.next()) { if (counter % 500 == 0) { if (hasFrame) { setProcess(counter); } else { log.info("Converted " + counter + " Stratigraphy records"); } } // grab the important data fields from the old record int oldStratId = rs.getInt(1); // This is a one-to-one with CollectingEvent String superGroup = rs.getString(2); String lithoGroup = rs.getString(3); String formation = rs.getString(4); String member = rs.getString(5); String bed = rs.getString(6); String remarks = escapeStringLiterals(rs.getString(7)); String text1 = escapeStringLiterals(rs.getString(8)); String text2 = escapeStringLiterals(rs.getString(9)); Double number1 = rs.getObject(10) != null ? rs.getDouble(10) : null; Double number2 = rs.getObject(11) != null ? rs.getDouble(11) : null; Boolean yesNo1 = rs.getObject(12) != null ? rs.getBoolean(12) : null; Boolean yesNo2 = rs.getObject(13) != null ? rs.getBoolean(13) : null; Integer oldGTPId = rs.getObject(14) != null ? rs.getInt(14) : null; // Check to see if there is any Litho information OR an GTP Id // If both are missing then skip the record. boolean hasLithoFields = isNotEmpty(superGroup) || isNotEmpty(lithoGroup) || isNotEmpty(formation) || isNotEmpty(member); if (!hasLithoFields && oldGTPId == null) { continue; } Integer gtpId = null; if (doMapGTPIds) { if (oldGTPId != null) { gtpId = oldGTPId; } } else { gtpId = oldStratId; } if (gtpId != null) { gtpId = gtpIdMapper.get(gtpId); if (gtpId == null) { tblWriter.logError("Old GTPID[" + gtpId + "] in the Strat record could not be mapped for Old StratID[" + oldStratId + "]"); stratsWithNoMappedGTP++; } } else { stratsWithNoGTP++; } // There may not be any Litho information to add to the LithoStrat tree, // but it did have GTP Information if we got here if (hasLithoFields) { // create a new Geography object from the old data LithoStrat[] newStrats = convertOldStratRecord(superGroup, lithoGroup, formation, member, bed, remarks, text1, text2, number1, number2, yesNo1, yesNo2, earth, localSession); LithoStrat newStrat = getLastLithoStrat(newStrats); counter++; lithoCnt += newStrats.length; // Map Old LithoStrat ID to the new Tree Id //System.out.println(oldStratId + " " + newStrat.getLithoStratId()); if (newStrat != null) { lithoStratIdMapper.put(oldStratId, newStrat.getLithoStratId()); // Convert Old CEId (StratID) to new CEId, then map the new CEId -> new StratId Integer newCEId = ceMapper.get(oldStratId); if (newCEId != null) { newCEIdToNewStratIdHash.put(newCEId, newStrat.getLithoStratId()); } else { String msg = String.format( "No CE mapping for Old StratId %d, when they are a one-to-one.", oldStratId); tblWriter.logError(msg); log.error(msg); missingCEMapping++; } // Map the New StratId to the new GTP Id if (gtpId != null && stratGTPIdHash.get(newStrat.getLithoStratId()) == null) { stratGTPIdHash.put(newStrat.getLithoStratId(), gtpId); // new ID to new ID } } else { String msg = String.format("Strat Fields were all null for oldID", oldStratId); tblWriter.logError(msg); log.error(msg); missingCEMapping++; } } } stmt.close(); System.out.println("lithoCnt: " + lithoCnt); if (hasFrame) { setProcess(counter); } else { log.info("Converted " + counter + " Stratigraphy records"); } TreeHelper.fixFullnameForNodeAndDescendants(earth); earth.setNodeNumber(1); fixNodeNumbersFromRoot(earth); HibernateUtil.commitTransaction(); log.info("Converted " + counter + " Stratigraphy records"); rs.close(); Statement updateStatement = newDBConn.createStatement(); //Hashtable<Integer, Integer> ceToPCHash = new Hashtable<Integer, Integer>(); int ceCnt = BasicSQLUtils.getCountAsInt(oldDBConn, "SELECT Count(CollectingEventID) FROM collectingevent"); int stratCnt = BasicSQLUtils.getCountAsInt(oldDBConn, String.format("SELECT Count(CollectingEventID) FROM collectingevent " + "INNER JOIN %s ON CollectingEventID = StratigraphyID", srcTableName)); String msg = String.format("There are %d CE->Strat and %d CEs. The diff is %d", stratCnt, ceCnt, (ceCnt - stratCnt)); tblWriter.log(msg); log.debug(msg); // Create a PaleoContext for each ColObj stmt = newDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(Integer.MIN_VALUE); int processCnt = BasicSQLUtils .getCountAsInt("SELECT COUNT(*) FROM collectionobject WHERE CollectingEventID IS NOT NULL"); if (frame != null) { frame.setDesc("Converting PaleoContext..."); frame.setProcess(0, processCnt); } TreeSet<Integer> missingStratIds = new TreeSet<Integer>(); int missingStrat = 0; int missingGTP = 0; int coUpdateCnt = 0; int cnt = 0; sql = "SELECT CollectionObjectID, CollectingEventID FROM collectionobject WHERE CollectingEventID IS NOT NULL ORDER BY CollectionObjectID"; rs = stmt.executeQuery(sql); while (rs.next()) { int coId = rs.getInt(1); // New CO Id Integer ceId = rs.getInt(2); // New CE Id // Use the new CE ID to get the new Strat Id Integer newLithoId = newCEIdToNewStratIdHash.get(ceId); Integer gtpId = null; if (newLithoId == null) { missingStrat++; missingStratIds.add(ceId); Integer oldStratID = ceMapper.reverseGet(ceId); if (oldStratID != null) { sql = "SELECT GeologicTimePeriodID FROM stratigraphy WHERE StratigraphyID = " + oldStratID; Integer oldGTPId = BasicSQLUtils.getCount(oldDBConn, sql); if (oldGTPId != null) { gtpId = gtpIdMapper.get(oldGTPId); } } if (gtpId == null) continue; } // Use the new StratID to get the new GTP Id (ChronosStratigraphy) if (gtpId == null) { gtpId = stratGTPIdHash.get(newLithoId); if (gtpId == null) { missingGTP++; if (newLithoId == null) continue; } } try { String updateStr = "INSERT INTO paleocontext (TimestampCreated, TimestampModified, DisciplineID, Version, CreatedByAgentID, ModifiedByAgentID, LithoStratID, ChronosStratID) " + "VALUES ('" + nowStr + "','" + nowStr + "'," + getDisciplineId() + ", 0, " + getCreatorAgentId(null) + "," + getModifiedByAgentId(null) + "," + (newLithoId != null ? newLithoId : "NULL") + "," + (gtpId != null ? gtpId : "NULL") + ")"; updateStatement.executeUpdate(updateStr, Statement.RETURN_GENERATED_KEYS); Integer paleoContextID = getInsertedId(updateStatement); if (paleoContextID == null) { throw new RuntimeException("Couldn't get the Agent's inserted ID"); } String sqlUpdate = "UPDATE collectionobject SET PaleoContextID=" + paleoContextID + " WHERE CollectionObjectID = " + coId; updateStatement.executeUpdate(sqlUpdate); coUpdateCnt++; } catch (SQLException e) { e.printStackTrace(); log.error(e); showError(e.getMessage()); throw new RuntimeException(e); } processCnt++; if (frame != null && cnt % 100 == 0) frame.setProcess(cnt); } rs.close(); stmt.close(); if (frame != null) frame.setProcess(processCnt); msg = String.format("There are %d unmappable Strat Records and %d unmappable GTP records.", missingStrat, missingGTP); tblWriter.log(msg); log.debug(msg); msg = String.format("There are %d CO records updated.", coUpdateCnt); tblWriter.log(msg); log.debug(msg); updateStatement.close(); msg = String.format("No CE mapping for Old StratId Count: %d", missingCEMapping); tblWriter.logError(msg); log.error(msg); msg = String.format("Strats with No GTP Count: %d", stratsWithNoGTP); tblWriter.logError(msg); log.error(msg); msg = String.format("Strats with missing Mapping to GTP Count: %d", stratsWithNoMappedGTP); tblWriter.logError(msg); log.error(msg); msg = String.format("Number of Old StratIds mapped to a new Strat ID Count: %d", lithoStratIdMapper.size()); tblWriter.logError(msg); log.error(msg); StringBuilder sb = new StringBuilder(); sb.append("Missing New Strat: "); if (missingStratIds.size() == 0) sb.append("None"); for (Integer id : missingStratIds) { sb.append(String.format("%d, ", id)); } tblWriter.logError(sb.toString()); log.error(sb.toString()); } catch (Exception ex) { ex.printStackTrace(); } // Now in this Step we Add the PaleoContext to the Collecting Events }