List of usage examples for java.util TreeMap get
public V get(Object key)
From source file:org.apache.geode.internal.cache.GemFireCacheImpl.java
private TreeMap<String, Map<String, PartitionedRegion>> getPRTrees() { // prTree will save a sublist of PRs who are under the same root TreeMap<String, Map<String, PartitionedRegion>> prTrees = new TreeMap(); TreeMap<String, PartitionedRegion> prMap = getPartitionedRegionMap(); boolean hasColocatedRegion = false; for (PartitionedRegion pr : prMap.values()) { List<PartitionedRegion> childlist = ColocationHelper.getColocatedChildRegions(pr); if (childlist != null && childlist.size() > 0) { hasColocatedRegion = true;//from w ww . j a v a 2s . c om break; } } if (hasColocatedRegion) { LinkedHashMap<String, PartitionedRegion> orderedPrMap = orderByColocation(prMap); prTrees.put("ROOT", orderedPrMap); } else { for (PartitionedRegion pr : prMap.values()) { String rootName = pr.getRoot().getName(); TreeMap<String, PartitionedRegion> prSubMap = (TreeMap<String, PartitionedRegion>) prTrees .get(rootName); if (prSubMap == null) { prSubMap = new TreeMap(); prTrees.put(rootName, prSubMap); } prSubMap.put(pr.getFullPath(), pr); } } return prTrees; }
From source file:com.if3games.chessonline.DroidFish.java
private final Dialog moveListMenuDialog() { final int EDIT_HEADERS = 0; final int EDIT_COMMENTS = 1; final int REMOVE_SUBTREE = 2; final int MOVE_VAR_UP = 3; final int MOVE_VAR_DOWN = 4; final int ADD_NULL_MOVE = 5; List<CharSequence> lst = new ArrayList<CharSequence>(); List<Integer> actions = new ArrayList<Integer>(); lst.add(getString(R.string.edit_headers)); actions.add(EDIT_HEADERS);/*from www. j a v a 2 s.c om*/ if (ctrl.humansTurn()) { lst.add(getString(R.string.edit_comments)); actions.add(EDIT_COMMENTS); } lst.add(getString(R.string.truncate_gametree)); actions.add(REMOVE_SUBTREE); if (ctrl.numVariations() > 1) { lst.add(getString(R.string.move_var_up)); actions.add(MOVE_VAR_UP); lst.add(getString(R.string.move_var_down)); actions.add(MOVE_VAR_DOWN); } boolean allowNullMove = gameMode.analysisMode() || (gameMode.playerWhite() && gameMode.playerBlack() && !gameMode.clocksActive()); if (allowNullMove) { lst.add(getString(R.string.add_null_move)); actions.add(ADD_NULL_MOVE); } final List<Integer> finalActions = actions; AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.edit_game); builder.setItems(lst.toArray(new CharSequence[lst.size()]), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { switch (finalActions.get(item)) { case EDIT_HEADERS: { final TreeMap<String, String> headers = new TreeMap<String, String>(); ctrl.getHeaders(headers); AlertDialog.Builder builder = new AlertDialog.Builder(DroidFish.this); builder.setTitle(R.string.edit_headers); View content = View.inflate(DroidFish.this, R.layout.edit_headers, null); builder.setView(content); final TextView event, site, date, round, white, black; event = (TextView) content.findViewById(R.id.ed_header_event); site = (TextView) content.findViewById(R.id.ed_header_site); date = (TextView) content.findViewById(R.id.ed_header_date); round = (TextView) content.findViewById(R.id.ed_header_round); white = (TextView) content.findViewById(R.id.ed_header_white); black = (TextView) content.findViewById(R.id.ed_header_black); event.setText(headers.get("Event")); site.setText(headers.get("Site")); date.setText(headers.get("Date")); round.setText(headers.get("Round")); white.setText(headers.get("White")); black.setText(headers.get("Black")); builder.setNegativeButton(R.string.cancel, null); builder.setPositiveButton(android.R.string.ok, new Dialog.OnClickListener() { public void onClick(DialogInterface dialog, int which) { headers.put("Event", event.getText().toString().trim()); headers.put("Site", site.getText().toString().trim()); headers.put("Date", date.getText().toString().trim()); headers.put("Round", round.getText().toString().trim()); headers.put("White", white.getText().toString().trim()); headers.put("Black", black.getText().toString().trim()); ctrl.setHeaders(headers); setBoardFlip(true); } }); builder.show(); break; } case EDIT_COMMENTS: { AlertDialog.Builder builder = new AlertDialog.Builder(DroidFish.this); builder.setTitle(R.string.edit_comments); View content = View.inflate(DroidFish.this, R.layout.edit_comments, null); builder.setView(content); DroidChessController.CommentInfo commInfo = ctrl.getComments(); final TextView preComment, moveView, nag, postComment; preComment = (TextView) content.findViewById(R.id.ed_comments_pre); moveView = (TextView) content.findViewById(R.id.ed_comments_move); nag = (TextView) content.findViewById(R.id.ed_comments_nag); postComment = (TextView) content.findViewById(R.id.ed_comments_post); preComment.setText(commInfo.preComment); postComment.setText(commInfo.postComment); moveView.setText(commInfo.move); String nagStr = Node.nagStr(commInfo.nag).trim(); if ((nagStr.length() == 0) && (commInfo.nag > 0)) nagStr = String.format(Locale.US, "%d", commInfo.nag); nag.setText(nagStr); builder.setNegativeButton(R.string.cancel, null); builder.setPositiveButton(android.R.string.ok, new Dialog.OnClickListener() { public void onClick(DialogInterface dialog, int which) { String pre = preComment.getText().toString().trim(); String post = postComment.getText().toString().trim(); int nagVal = Node.strToNag(nag.getText().toString()); DroidChessController.CommentInfo commInfo = new DroidChessController.CommentInfo(); commInfo.preComment = pre; commInfo.postComment = post; commInfo.nag = nagVal; ctrl.setComments(commInfo); } }); builder.show(); break; } case REMOVE_SUBTREE: ctrl.removeSubTree(); break; case MOVE_VAR_UP: ctrl.moveVariation(-1); break; case MOVE_VAR_DOWN: ctrl.moveVariation(1); break; case ADD_NULL_MOVE: ctrl.makeHumanNullMove(); break; } moveListMenuDlg = null; } }); AlertDialog alert = builder.create(); moveListMenuDlg = alert; return alert; }
From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java
public void writeAggregateStatisticsForOptimisationConstraints_ISARHP_ISARCEP(String a_OutputDir_String) throws Exception { HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap(); File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv"); FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs); OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream); a_FileOutputStream.flush();/* w w w. j a v a 2s . c o m*/ Object[] fitnessCounts; HashMap<String, Integer> a_SARCounts = null; TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet(); String s2; String s1; Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator(); while (a_Iterator_String.hasNext()) { // Need to reorder data for each LAD as OAs not necessarily returned // in any order and an ordered result is wanted TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>(); boolean setPrevious_OA_String = true; s1 = a_Iterator_String.next(); s2 = s1.substring(0, 3); File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv"); // A few results are missing if (resultsFile.exists()) { System.out.println(resultsFile.toString() + " exists"); String previous_OA_String = ""; BufferedReader aBufferedReader = new BufferedReader( new InputStreamReader(new FileInputStream(resultsFile))); StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader); Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer); String line = ""; int tokenType = aStreamTokenizer.nextToken(); while (tokenType != StreamTokenizer.TT_EOF) { switch (tokenType) { case StreamTokenizer.TT_EOL: //System.out.println(line); String[] lineFields = line.split(","); String a_OA_String = lineFields[0]; if (previous_OA_String.equalsIgnoreCase(a_OA_String)) { if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); } } else { // Store result if (setPrevious_OA_String) { previous_OA_String = a_OA_String; setPrevious_OA_String = false; } else { // Store resultsForLAD.put(previous_OA_String, a_SARCounts); } // Initialise/Re-initialise CASDataRecord a_CASDataRecord = (CASDataRecord) _CASDataHandler .getDataRecord(a_OA_String); fitnessCounts = GeneticAlgorithm_ISARHP_ISARCEP.getFitnessCounts(a_CASDataRecord); a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1]; // Start a new aggregation if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_ISARDataRecord.toString()); } //a_OA_String = lineFields[0]; } previous_OA_String = a_OA_String; break; case StreamTokenizer.TT_WORD: line = aStreamTokenizer.sval; break; } tokenType = aStreamTokenizer.nextToken(); } } else { System.out.println(resultsFile.toString() + " !exists"); } Iterator<String> string_Iterator = resultsForLAD.keySet().iterator(); while (string_Iterator.hasNext()) { String oa_Code = string_Iterator.next(); a_SARCounts = resultsForLAD.get(oa_Code); //GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(null, a_ID_RecordID_HashMap, _Random) OutputDataHandler_OptimisationConstraints.writeISARHP_ISARCEP(a_SARCounts, oa_Code, a_FileOutputStream); } } a_FileOutputStream.close(); }
From source file:net.spfbl.spf.SPF.java
/** * Processa a consulta e retorna o resultado. * * @param query a expresso da consulta.//w w w .ja v a 2s. co m * @return o resultado do processamento. */ protected static String processSPF(InetAddress ipAddress, Client client, User user, String query, LinkedList<User> userList) { try { String result = ""; if (query.length() == 0) { return "INVALID QUERY\n"; } else { String origin; if (client == null) { origin = ipAddress.getHostAddress(); } else if (client.hasEmail()) { origin = ipAddress.getHostAddress() + " " + client.getDomain() + " " + client.getEmail(); } else { origin = ipAddress.getHostAddress() + " " + client.getDomain(); } StringTokenizer tokenizer = new StringTokenizer(query, " "); String firstToken = tokenizer.nextToken(); if (firstToken.equals("SPAM") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); TreeSet<String> tokenSet = addComplainURLSafe(origin, ticket, null); if (tokenSet == null) { result = "DUPLICATE COMPLAIN\n"; } else { String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } user = User.get(userEmail); if (user != null) { userList.add(user); } String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } result = "OK " + tokenSet + (recipient == null ? "" : " >" + recipient) + "\n"; } } else if (firstToken.equals("ABUSE") && tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (token.startsWith("In-Reply-To:") && tokenizer.countTokens() == 1) { token = tokenizer.nextToken(); if (token.startsWith("From:")) { int index = token.indexOf(':') + 1; String recipient = token.substring(index); User recipientUser = User.get(recipient); if (recipientUser == null) { // Se a consulta originar de destinatrio com postmaster cadastrado, // considerar o prprio postmaster como usurio da consulta. index = recipient.indexOf('@'); String postmaster = "postmaster" + recipient.substring(index); User postmasterUser = User.get(postmaster); if (postmasterUser != null) { userList.add(user = postmasterUser); } } else { userList.add(user = recipientUser); } index = query.indexOf(':') + 1; String messageID = query.substring(index); result = "INVALID ID\n"; index = messageID.indexOf('<'); if (index >= 0) { messageID = messageID.substring(index + 1); index = messageID.indexOf('>'); if (index > 0) { messageID = messageID.substring(0, index); result = user.blockByMessageID(messageID) + '\n'; } } } else { result = "INVALID FROM\n"; } } else { result = "INVALID COMMAND\n"; } } else if (firstToken.equals("HOLDING") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); result = getHoldStatus(client, ticket, userList) + '\n'; } else if (firstToken.equals("LINK") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); TreeSet<String> linkSet = new TreeSet<String>(); while (tokenizer.hasMoreTokens()) { linkSet.add(tokenizer.nextToken()); } StringTokenizer tokenizerTicket = new StringTokenizer(ticketSet, ";"); String unblockURL = null; boolean blocked = false; Action action = null; while (tokenizerTicket.hasMoreTokens()) { String ticket = tokenizerTicket.nextToken(); String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } if ((user = User.get(userEmail)) != null) { userList.add(user); long dateTicket = SPF.getDateTicket(ticket); User.Query queryTicket = user.getQuery(dateTicket); if (queryTicket != null) { if (queryTicket.setLinkSet(linkSet)) { SPF.setSpam(dateTicket, queryTicket.getTokenSet()); if (!queryTicket.isWhite() && queryTicket.blockSender(dateTicket)) { Server.logDebug( "new BLOCK '" + queryTicket.getBlockSender() + "' added by LINK."); } action = client == null ? Action.REJECT : client.getActionBLOCK(); unblockURL = queryTicket.getUnblockURL(); blocked = true; } else if (queryTicket.isAnyLinkRED()) { action = client == null ? Action.FLAG : client.getActionRED(); } if (action == Action.HOLD) { queryTicket.setResult("HOLD"); } else if (action == Action.FLAG) { queryTicket.setResult("FLAG"); } else if (action == Action.REJECT) { queryTicket.setResult("REJECT"); } User.storeDB(dateTicket, queryTicket); } } } if (unblockURL != null) { result = "BLOCKED " + unblockURL + "\n"; } else if (blocked) { result = "BLOCKED\n"; } else if (action == Action.HOLD) { result = "HOLD\n"; } else if (action == Action.FLAG) { result = "FLAG\n"; } else if (action == Action.REJECT) { result = "REJECT\n"; } else { result = "CLEAR\n"; } } else if (firstToken.equals("MALWARE") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); StringBuilder nameBuilder = new StringBuilder(); while (tokenizer.hasMoreTokens()) { if (nameBuilder.length() > 0) { nameBuilder.append(' '); } nameBuilder.append(tokenizer.nextToken()); } StringBuilder resultBuilder = new StringBuilder(); StringTokenizer ticketTokenizer = new StringTokenizer(ticketSet, ";"); while (ticketTokenizer.hasMoreTokens()) { String ticket = ticketTokenizer.nextToken(); TreeSet<String> tokenSet = addComplainURLSafe(origin, ticket, "MALWARE"); if (tokenSet == null) { resultBuilder.append("DUPLICATE COMPLAIN\n"); } else { // Processar reclamao. String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } user = User.get(userEmail); if (user != null) { userList.add(user); long dateTicket = getDateTicket(ticket); User.Query userQuery = user.getQuery(dateTicket); if (userQuery != null && userQuery.setMalware(nameBuilder.toString())) { User.storeDB(dateTicket, userQuery); } } String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } // Bloquear automaticamente todos // os tokens com reputao amarela ou vermelha. // Processar reclamao. for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug( "new BLOCK '" + block + "' added by '" + recipient + ";MALWARE'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug( "new BLOCK '" + block + "' added by '" + recipient + ";MALWARE'."); } } resultBuilder.append("OK "); resultBuilder.append(tokenSet); resultBuilder.append(recipient == null ? "" : " >" + recipient); resultBuilder.append("\n"); } } result = resultBuilder.toString(); } else if (firstToken.equals("HEADER") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); String key = null; String from = null; String replyto = null; String messageID = null; String unsubscribe = null; String subject = null; while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (token.startsWith("From:")) { key = "From"; int index = token.indexOf(':'); from = token.substring(index + 1); } else if (token.startsWith("ReplyTo:") || token.startsWith("Reply-To:")) { key = "Reply-To"; int index = token.indexOf(':'); replyto = token.substring(index + 1); } else if (token.startsWith("Message-ID:")) { key = "Message-ID"; int index = token.indexOf(':'); messageID = token.substring(index + 1); } else if (token.startsWith("List-Unsubscribe:")) { key = "List-Unsubscribe"; int index = token.indexOf(':'); unsubscribe = token.substring(index + 1); } else if (token.startsWith("Subject:")) { key = "Subject"; int index = token.indexOf(':'); subject = token.substring(index + 1); } else if (key == null) { from = null; replyto = null; unsubscribe = null; subject = null; break; } else if (key.equals("From")) { from += ' ' + token; } else if (key.equals("Reply-To")) { replyto += ' ' + token; } else if (key.equals("Message-ID")) { messageID += ' ' + token; } else if (key.equals("List-Unsubscribe")) { unsubscribe += ' ' + token; } else if (key.equals("Subject")) { subject += ' ' + token; } } if ((from == null || from.length() == 0) && (replyto == null || replyto.length() == 0) && (messageID == null || messageID.length() == 0) && (unsubscribe == null || unsubscribe.length() == 0) && (subject == null || subject.length() == 0)) { result = "INVALID COMMAND\n"; } else { boolean whitelisted = false; boolean blocklisted = false; TreeSet<String> unblockURLSet = new TreeSet<String>(); StringTokenizer ticketRokenizer = new StringTokenizer(ticketSet, ";"); int n = ticketRokenizer.countTokens(); ArrayList<User.Query> queryList = new ArrayList<User.Query>(n); while (ticketRokenizer.hasMoreTokens()) { String ticket = ticketRokenizer.nextToken(); String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } if ((user = User.get(userEmail)) != null) { userList.add(user); long dateTicket = SPF.getDateTicket(ticket); User.Query queryTicket = user.getQuery(dateTicket); if (queryTicket != null) { queryList.add(queryTicket); String resultLocal = queryTicket.setHeader(from, replyto, subject, messageID, unsubscribe); if ("WHITE".equals(resultLocal)) { whitelisted = true; } else if ("BLOCK".equals(resultLocal)) { blocklisted = true; String url = queryTicket.getUnblockURL(); if (url != null) { unblockURLSet.add(url); } } User.storeDB(dateTicket, queryTicket); } } } if (whitelisted) { for (User.Query queryTicket : queryList) { queryTicket.setResult("WHITE"); } result = "WHITE\n"; } else if (blocklisted) { for (User.Query queryTicket : queryList) { queryTicket.setResult("BLOCK"); } if (unblockURLSet.size() == 1) { result = "BLOCKED " + unblockURLSet.first() + "\n"; } else { result = "BLOCKED\n"; } } else { result = "CLEAR\n"; } } } else if (firstToken.equals("HAM") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); TreeSet<String> tokenSet = deleteComplainURLSafe(origin, ticket); if (tokenSet == null) { result = "ALREADY REMOVED\n"; } else { String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } result = "OK " + tokenSet + (recipient == null ? "" : " >" + recipient) + "\n"; } } else if (firstToken.equals("REFRESH") && tokenizer.countTokens() == 1) { String address = tokenizer.nextToken(); try { if (CacheSPF.refresh(address, true)) { result = "UPDATED\n"; } else { result = "NOT LOADED\n"; } } catch (ProcessException ex) { result = ex.getMessage() + "\n"; } } else if ((firstToken.equals("SPF") && tokenizer.countTokens() >= 4) || tokenizer.countTokens() == 2 || tokenizer.countTokens() == 1 || (firstToken.equals("CHECK") && tokenizer.countTokens() == 4) || (firstToken.equals("CHECK") && tokenizer.countTokens() == 3) || (firstToken.equals("CHECK") && tokenizer.countTokens() == 2)) { try { String ip; String sender; String helo; String recipient; String origem; String fluxo; if (firstToken.equals("SPF")) { // Nova formatao de consulta. ip = tokenizer.nextToken(); sender = tokenizer.nextToken(); while (!sender.endsWith("'") && tokenizer.hasMoreTokens()) { sender += " " + tokenizer.nextToken(); } helo = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "''"; recipient = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "''"; ip = ip.substring(1, ip.length() - 1); sender = sender.substring(1, sender.length() - 1); helo = helo.substring(1, helo.length() - 1); if (recipient.equals("'")) { recipient = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : ""; if (recipient.endsWith("'")) { recipient = recipient.substring(0, recipient.length() - 1); } } else { recipient = recipient.substring(1, recipient.length() - 1); } if (sender.length() == 0) { sender = null; } else { sender = sender.toLowerCase(); } recipient = recipient.toLowerCase(); recipient = recipient.replace("\"", ""); } else if (firstToken.equals("CHECK") && tokenizer.countTokens() == 4) { ip = tokenizer.nextToken().toLowerCase(); sender = tokenizer.nextToken().toLowerCase(); helo = tokenizer.nextToken(); recipient = tokenizer.nextToken().toLowerCase(); if (ip.startsWith("'") && ip.endsWith("'")) { ip = ip.substring(1, ip.length() - 1); } if (sender.startsWith("'") && sender.endsWith("'")) { sender = sender.substring(1, sender.length() - 1); } if (helo.startsWith("'") && helo.endsWith("'")) { helo = helo.substring(1, helo.length() - 1); } if (recipient.startsWith("'") && recipient.endsWith("'")) { recipient = recipient.substring(1, recipient.length() - 1); } if (ip.length() == 0) { ip = null; } if (sender.length() == 0) { sender = null; } if (!Domain.isHostname(helo)) { helo = null; } if (recipient.length() == 0) { recipient = null; } else { recipient = recipient.toLowerCase(); } } else { // Manter compatibilidade da verso antiga. // Verso obsoleta. if (firstToken.equals("CHECK")) { ip = tokenizer.nextToken(); } else { ip = firstToken; } if (tokenizer.countTokens() == 2) { sender = tokenizer.nextToken().toLowerCase(); helo = tokenizer.nextToken(); } else { sender = null; helo = tokenizer.nextToken(); } recipient = null; if (ip.startsWith("'") && ip.endsWith("'")) { ip = ip.substring(1, ip.length() - 1); } if (sender != null && sender.startsWith("'") && sender.endsWith("'")) { sender = sender.substring(1, sender.length() - 1); if (sender.length() == 0) { sender = null; } } if (helo.startsWith("'") && helo.endsWith("'")) { helo = helo.substring(1, helo.length() - 1); } } if (!Subnet.isValidIP(ip)) { return "INVALID\n"; } else if (sender != null && !Domain.isEmail(sender)) { return "INVALID\n"; } else if (recipient != null && !Domain.isEmail(recipient)) { return "INVALID\n"; } else if (Subnet.isReservedIP(ip)) { // Message from LAN. return "LAN\n"; } else if (client != null && client.containsFull(ip)) { // Message from LAN. return "LAN\n"; } else { TreeSet<String> tokenSet = new TreeSet<String>(); ip = Subnet.normalizeIP(ip); tokenSet.add(ip); if (Domain.isValidEmail(recipient)) { // Se houver um remetente vlido, // Adicionar no ticket para controle. tokenSet.add('>' + recipient); } if (recipient != null) { User recipientUser = User.get(recipient); if (recipientUser == null) { // Se a consulta originar de destinatrio com postmaster cadastrado, // considerar o prprio postmaster como usurio da consulta. int index = recipient.indexOf('@'); String postmaster = "postmaster" + recipient.substring(index); User postmasterUser = User.get(postmaster); if (postmasterUser != null) { user = postmasterUser; } } else { user = recipientUser; } } if (user != null) { userList.add(user); tokenSet.add(user.getEmail() + ':'); } else if (client != null && client.hasEmail()) { tokenSet.add(client.getEmail() + ':'); } // Passar a acompanhar todos os // HELO quando apontados para o IP para // uma nova forma de interpretar dados. String hostname; if (CacheHELO.match(ip, helo, false)) { hostname = Domain.normalizeHostname(helo, true); } else { hostname = Reverse.getHostname(ip); hostname = Domain.normalizeHostname(hostname, true); } if (hostname == null) { Server.logDebug("no rDNS for " + ip + "."); } else if (Domain.isOfficialTLD(hostname)) { return "INVALID\n"; } else { // Verificao de pilha dupla, // para pontuao em ambas pilhas. String ipv4 = CacheHELO.getUniqueIPv4(hostname); String ipv6 = CacheHELO.getUniqueIPv6(hostname); if (ip.equals(ipv6) && CacheHELO.match(ipv4, hostname, false)) { // Equivalncia de pilha dupla se // IPv4 for nico para o hostname. tokenSet.add(ipv4); } else if (ip.equals(ipv4) && CacheHELO.match(ipv6, hostname, false)) { // Equivalncia de pilha dupla se // IPv6 for nico para o hostname. tokenSet.add(ipv6); } } if (Generic.containsGenericSoft(hostname)) { // Quando o reverso for // genrico, no consider-lo. hostname = null; } else if (hostname != null) { tokenSet.add(hostname); } LinkedList<String> logList = null; if (sender != null && firstToken.equals("CHECK")) { int index = sender.lastIndexOf('@'); String domain = sender.substring(index + 1); logList = new LinkedList<String>(); try { CacheSPF.refresh(domain, false); } catch (ProcessException ex) { logList.add("Cannot refresh SPF registry: " + ex.getErrorMessage()); logList.add("Using cached SPF registry."); } } SPF spf; if (sender == null) { spf = null; result = "NONE"; } else if (Domain.isOfficialTLD(sender)) { spf = null; result = "NONE"; } else if (Generic.containsGeneric(sender)) { spf = null; result = "NONE"; } else if ((spf = CacheSPF.get(sender)) == null) { result = "NONE"; } else if (spf.isInexistent()) { result = "NONE"; } else { result = spf.getResult(ip, sender, helo, logList); } String mx = Domain.extractHost(sender, true); if (user != null && user.isLocal()) { // Message from local user. return "LAN\n"; } else if (recipient != null && result.equals("PASS")) { if (recipient.endsWith(mx)) { // Message from same domain. return "LAN\n"; } else if (recipient.equals(Core.getAbuseEmail()) && User.exists(sender, "postmaster" + mx)) { // Message to abuse. return "LAN\n"; } } if (result.equals("PASS") || (sender != null && Provider.containsHELO(ip, hostname))) { // Quando fo PASS, significa que o domnio // autorizou envio pelo IP, portanto o dono dele // responsavel pelas mensagens. if (!Provider.containsExact(mx)) { // No um provedor ento // o MX deve ser listado. tokenSet.add(mx); origem = mx; } else if (Domain.isValidEmail(sender)) { // Listar apenas o remetente se o // hostname for um provedor de e-mail. String userEmail = null; String recipientEmail = null; for (String token : tokenSet) { if (token.endsWith(":")) { userEmail = token; } else if (token.startsWith(">")) { recipientEmail = token; } } tokenSet.clear(); tokenSet.add(sender); if (userEmail != null) { tokenSet.add(userEmail); } if (recipientEmail != null) { tokenSet.add(recipientEmail); } origem = sender; } else { origem = sender; } fluxo = origem + ">" + recipient; } else if (hostname == null) { origem = (sender == null ? "" : sender + '>') + ip; fluxo = origem + ">" + recipient; } else { String dominio = Domain.extractDomain(hostname, true); origem = (sender == null ? "" : sender + '>') + (dominio == null ? hostname : dominio.substring(1)); fluxo = origem + ">" + recipient; } Long recipientTrapTime = Trap.getTimeRecipient(client, user, recipient); if (firstToken.equals("CHECK")) { String results = "\nSPF resolution results:\n"; if (spf != null && spf.isInexistent()) { results += " NXDOMAIN\n"; } else if (logList == null || logList.isEmpty()) { results += " NONE\n"; } else { for (String log : logList) { results += " " + log + "\n"; } } String white; String block; if ((white = White.find(client, user, ip, sender, hostname, result, recipient)) != null) { results += "\nFirst WHITE match: " + white + "\n"; } else if ((block = Block.find(client, user, ip, sender, hostname, result, recipient, false, true, true, false)) != null) { results += "\nFirst BLOCK match: " + block + "\n"; } TreeSet<String> graceSet = new TreeSet<String>(); if (Domain.isGraceTime(sender)) { graceSet.add(Domain.extractDomain(sender, false)); } if (Domain.isGraceTime(hostname)) { graceSet.add(Domain.extractDomain(hostname, false)); } if (!graceSet.isEmpty()) { results += "\n"; results += "Domains in grace time:\n"; for (String grace : graceSet) { results += " " + grace + "\n"; } } results += "\n"; results += "Considered identifiers and status:\n"; tokenSet = expandTokenSet(tokenSet); TreeMap<String, Distribution> distributionMap = CacheDistribution.getMap(tokenSet); int count = 0; for (String token : tokenSet) { if (!token.startsWith(">") && !token.endsWith(":")) { if (!Ignore.contains(token)) { float probability; Status status; if (distributionMap.containsKey(token)) { Distribution distribution = distributionMap.get(token); probability = distribution.getSpamProbability(token); status = distribution.getStatus(token); } else { probability = 0.0f; status = SPF.Status.GREEN; } results += " " + token + " " + status.name() + " " + Core.DECIMAL_FORMAT.format(probability) + "\n"; count++; } } } if (count == 0) { results += " NONE\n"; } results += "\n"; return results; } else if (recipientTrapTime == null && White.contains(client, user, ip, sender, hostname, result, recipient)) { if (White.contains(client, user, ip, sender, hostname, result, null)) { // Limpa da lista BLOCK um possvel falso positivo. Block.clear(client, user, ip, sender, hostname, result, null); } // Calcula frequencia de consultas. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "WHITE"); return "WHITE " + (url == null ? ticket : url + ticket) + "\n"; } else if (Block.contains(client, user, ip, sender, hostname, result, recipient, true, true, true, true)) { Action action = client == null ? Action.REJECT : client.getActionBLOCK(); if (action == Action.REJECT) { // Calcula frequencia de consultas. long time = Server.getNewUniqueTime(); User.Query queryLocal = SPF.addQuerySpam(time, client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "BLOCK"); action = client == null ? Action.FLAG : client.getActionRED(); if (action != Action.REJECT && queryLocal != null && queryLocal.needHeader()) { if (action == Action.FLAG) { queryLocal.setResult("FLAG"); String url = Core.getURL(); String ticket = SPF.createTicket(time, tokenSet); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { queryLocal.setResult("HOLD"); String url = Core.getURL(); String ticket = SPF.createTicket(time, tokenSet); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else { String url = Core.getUnblockURL(client, user, ip, sender, hostname, recipient); if (url == null) { return "BLOCKED\n"; } else { return "BLOCKED " + url + "\n"; } } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (Generic.containsDynamicDomain(hostname)) { // Bloquear automaticamente range de IP dinmico. String cidr = Subnet .normalizeCIDR(SubnetIPv4.isValidIPv4(ip) ? ip + "/24" : ip + "/48"); if (Block.tryOverlap(cidr)) { Server.logDebug( "new BLOCK '" + cidr + "' added by '" + hostname + ";DYNAMIC'."); } else if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + hostname + ";DYNAMIC'."); } SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (spf != null && spf.isDefinitelyInexistent()) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + mx + ";NXDOMAIN'."); } } Analise.processToday(ip); // O domnio foi dado como inexistente inmeras vezes. // Rejeitar e denunciar o host pois h abuso de tentativas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "NXDOMAIN"); return "NXDOMAIN\n"; } else if (spf != null && spf.isInexistent()) { Analise.processToday(ip); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "NXDOMAIN"); return "NXDOMAIN\n"; } else if (result.equals("FAIL")) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + sender + ";FAIL'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FAIL"); // Retornar FAIL somente se no houver // liberao literal do remetente com FAIL. return "FAIL\n"; } else if (sender != null && !Domain.isEmail(sender)) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + sender + ";INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (sender != null && Domain.isOfficialTLD(sender)) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + sender + ";RESERVED'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (sender == null && !CacheHELO.match(ip, hostname, false)) { // Bloquear automaticamente IP com reputao ruim. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); // HELO invlido sem remetente. return "INVALID\n"; } else if (hostname == null && Core.isReverseRequired()) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'NONE'."); } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); // Require a valid HELO or reverse. return "INVALID\n"; } else if (recipient != null && !Domain.isValidEmail(recipient)) { Analise.processToday(ip); Analise.processToday(mx); SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INEXISTENT"); return "INEXISTENT\n"; } else if (recipientTrapTime != null) { if (System.currentTimeMillis() > recipientTrapTime) { // Spamtrap for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";SPAMTRAP'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";SPAMTRAP'."); } } Analise.processToday(ip); Analise.processToday(mx); // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "TRAP"); return "SPAMTRAP\n"; } else { // Inexistent for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";INEXISTENT'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";INEXISTENT'."); } } Analise.processToday(ip); Analise.processToday(mx); SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INEXISTENT"); return "INEXISTENT\n"; } } else if (Defer.count(fluxo) > Core.getFloodMaxRetry()) { Analise.processToday(ip); Analise.processToday(mx); // A origem atingiu o limite de atraso // para liberao do destinatrio. long time = System.currentTimeMillis(); Defer.end(fluxo); Server.logDefer(time, fluxo, "DEFER FLOOD"); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (!result.equals("PASS") && !CacheHELO.match(ip, hostname, false)) { // Bloquear automaticamente IP com reputao amarela. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + recipient + ";INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (recipient != null && recipient.startsWith("postmaster@")) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return result + " " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } else if (result.equals("PASS") && SPF.isGood(Provider.containsExact(mx) ? sender : mx)) { // O remetente vlido e tem excelente reputao, // ainda que o provedor dele esteja com reputao ruim. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return "PASS " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } else if (SPF.hasRed(tokenSet) || Analise.isCusterRED(ip, sender, hostname)) { Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.REJECT : client.getActionRED(); if (action == Action.REJECT) { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (action == Action.DEFER) { if (Defer.defer(fluxo, Core.getDeferTimeRED())) { String url = Core.getReleaseURL(fluxo); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "LISTED"); if (url == null || Defer.count(fluxo) > 1) { return "LISTED\n"; } else if (result.equals("PASS") && enviarLiberacao(url, sender, recipient)) { // Envio da liberao por e-mail se // houver validao do remetente por PASS. return "LISTED\n"; } else { return "LISTED " + url + "\n"; } } else { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (Domain.isGraceTime(sender) || Domain.isGraceTime(hostname)) { Server.logTrace("domain in grace time."); for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + status + "'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + status + "'."); } } Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.REJECT : client.getActionGRACE(); if (action == Action.REJECT) { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (action == Action.DEFER) { if (Defer.defer(fluxo, Core.getDeferTimeRED())) { String url = Core.getReleaseURL(fluxo); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "LISTED"); if (url == null || Defer.count(fluxo) > 1) { return "LISTED\n"; } else if (result.equals("PASS") && enviarLiberacao(url, sender, recipient)) { // Envio da liberao por e-mail se // houver validao do remetente por PASS. return "LISTED\n"; } else { return "LISTED " + url + "\n"; } } else { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (SPF.hasYellow(tokenSet) && Defer.defer(fluxo, Core.getDeferTimeYELLOW())) { Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.DEFER : client.getActionYELLOW(); if (action == Action.DEFER) { // Pelo menos um identificador do conjunto est em greylisting com atrazo de 10min. SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (SPF.isFlood(tokenSet) && !Provider.containsHELO(ip, hostname) && Defer.defer(origem, Core.getDeferTimeFLOOD())) { Analise.processToday(ip); Analise.processToday(mx); // Pelo menos um identificador est com frequncia superior ao permitido. Server.logDebug("FLOOD " + tokenSet); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else if (result.equals("SOFTFAIL") && !Provider.containsHELO(ip, hostname) && Defer.defer(fluxo, Core.getDeferTimeSOFTFAIL())) { Analise.processToday(ip); Analise.processToday(mx); // SOFTFAIL com atrazo de 1min. SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else { Analise.processToday(ip); Analise.processToday(mx); // Calcula frequencia de consultas. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return result + " " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } } } catch (ProcessException ex) { if (ex.isErrorMessage("HOST NOT FOUND")) { return "NXDOMAIN\n"; } else { throw ex; } } } else { return "INVALID QUERY\n"; } } return result; } catch (ProcessException ex) { Server.logError(ex); return ex.getMessage() + "\n"; } catch (Exception ex) { Server.logError(ex); return "ERROR: FATAL\n"; } }
From source file:edu.hawaii.soest.kilonalu.adam.AdamSource.java
/** * A method that processes the data ByteBuffer passed in for the given IP * address of the ADAM sensor, parses the binary ADAM data, and flushes the * data to the DataTurbine given the sensor properties in the XMLConfiguration * passed in.//from w ww . j a va2s . c o m * * @param datagramAddress - the IP address of the datagram of this packet of data * @param xmlConfig - the XMLConfiguration object containing the list of * sensor properties * @param sampleBuffer - the binary data sample as a ByteBuffer */ protected boolean process(String datagramAddress, XMLConfiguration xmlConfig, ByteBuffer sampleBuffer) { logger.debug("AdamSource.process() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; try { // add channels of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. Information // on each channel is found in the XMLConfiguration file (sensors.properties.xml) // and the AdamParser object (to get the actual voltages for each ADAM channel) ChannelMap rbnbChannelMap = new ChannelMap(); // used to flush channels ChannelMap registerChannelMap = new ChannelMap(); // used to register channels int channelIndex = 0; this.adamParser = new AdamParser(sampleBuffer); logger.debug("\n" + "channelZero : " + this.adamParser.getChannelZero() + "\n" + "channelOne : " + this.adamParser.getChannelOne() + "\n" + "channelTwo : " + this.adamParser.getChannelTwo() + "\n" + "channelThree : " + this.adamParser.getChannelThree() + "\n" + "channelFour : " + this.adamParser.getChannelFour() + "\n" + "channelFive : " + this.adamParser.getChannelFive() + "\n" + "channelSix : " + this.adamParser.getChannelSix() + "\n" + "channelSeven : " + this.adamParser.getChannelSeven() + "\n" + "channelAverage : " + this.adamParser.getChannelAverage() + "\n" + "channelZeroMax : " + this.adamParser.getChannelZeroMax() + "\n" + "channelOneMax : " + this.adamParser.getChannelOneMax() + "\n" + "channelTwoMax : " + this.adamParser.getChannelTwoMax() + "\n" + "channelThreeMax : " + this.adamParser.getChannelThreeMax() + "\n" + "channelFourMax : " + this.adamParser.getChannelFourMax() + "\n" + "channelFiveMax : " + this.adamParser.getChannelFiveMax() + "\n" + "channelSixMax : " + this.adamParser.getChannelSixMax() + "\n" + "channelSevenMax : " + this.adamParser.getChannelSevenMax() + "\n" + "channelAverageMax : " + this.adamParser.getChannelAverageMax() + "\n" + "channelZeroMin : " + this.adamParser.getChannelZeroMin() + "\n" + "channelOneMin : " + this.adamParser.getChannelOneMin() + "\n" + "channelTwoMin : " + this.adamParser.getChannelTwoMin() + "\n" + "channelThreeMin : " + this.adamParser.getChannelThreeMin() + "\n" + "channelFourMin : " + this.adamParser.getChannelFourMin() + "\n" + "channelFiveMin : " + this.adamParser.getChannelFiveMin() + "\n" + "channelSixMin : " + this.adamParser.getChannelSixMin() + "\n" + "channelSevenMin : " + this.adamParser.getChannelSevenMin() + "\n" + "channelAverageMin : " + this.adamParser.getChannelAverageMin() + "\n" ); // create a TreeMap to hold the voltageChannel and its associated // RBNB ChannelMap channel string. When the RBNB ChannelMap is // populated, this TreeMap will be consulted TreeMap<Integer, String> voltageChannelTreeMap = new TreeMap<Integer, String>(); // create a character string to store characters from the voltage values StringBuilder decimalASCIISampleData = new StringBuilder(); // Create a list of sensors from the properties file, and iterate through // the list, matching the datagram IP address to the address in the // xml configuration file. If there is a match, find the correct voltage // channel to measurement mappings, create a corresponding RBNB channel // map, and flush the data to the DataTurbine. List sensorList = xmlConfig.getList("sensor.address"); // declare the properties that will be pulled from the // sensor.properties.xml file String address = ""; String sourceName = ""; String description = ""; String type = ""; String cacheSize = ""; String archiveSize = ""; String archiveChannel = ""; String portNumber = ""; String voltageChannel = ""; String measurement = ""; // evaluate each sensor listed in the sensor.properties.xml file for (Iterator sIterator = sensorList.iterator(); sIterator.hasNext();) { // get each property value of the sensor int index = sensorList.indexOf(sIterator.next()); address = (String) xmlConfig.getProperty("sensor(" + index + ").address"); sourceName = (String) xmlConfig.getProperty("sensor(" + index + ").name"); description = (String) xmlConfig.getProperty("sensor(" + index + ").description"); type = (String) xmlConfig.getProperty("sensor(" + index + ").type"); logger.debug("Sensor details:" + "\n\t\t\t\t\t\t\t\t\t\taddress : " + address + "\n\t\t\t\t\t\t\t\t\t\tname : " + sourceName + "\n\t\t\t\t\t\t\t\t\t\tdescription : " + description + "\n\t\t\t\t\t\t\t\t\t\ttype : " + type); // move to the next sensor if this doesn't match the RBNB source name if (!sourceName.equals(getRBNBClientName())) { continue; } List portList = xmlConfig.getList("sensor(" + index + ").ports.port[@number]"); // get each port of the sensor, along with the port properties for (Iterator pIterator = portList.iterator(); pIterator.hasNext();) { int pindex = portList.indexOf(pIterator.next()); // get the port number value portNumber = (String) xmlConfig .getProperty("sensor(" + index + ").ports.port(" + pindex + ")[@number]"); logger.debug("\tport " + portNumber + " details:"); List measurementList = xmlConfig .getList("sensor(" + index + ").ports.port(" + pindex + ").measurement[@label]"); // get each measurement and voltageChannel for the given port for (Iterator mIterator = measurementList.iterator(); mIterator.hasNext();) { int mindex = measurementList.indexOf(mIterator.next()); // build the property paths into the config file String voltagePath = "sensor(" + index + ").ports.port(" + pindex + ").measurement(" + mindex + ").voltageChannel"; String measurementPath = "sensor(" + index + ").ports.port(" + pindex + ").measurement(" + mindex + ")[@label]"; // get the voltageChannel and measurement label values voltageChannel = (String) xmlConfig.getProperty(voltagePath); measurement = (String) xmlConfig.getProperty(measurementPath); logger.debug("\t\t" + "voltageChannel: " + voltageChannel + "\n\t\t\t\t\t\t\t\t\t\t\t" + "measurement label: " + measurement); // Match the datagram address with the address in the xmlConfig file if (datagramAddress.equals(address)) { // and only add channel data for this class instance RBNB Source name if (sourceName.equals(getRBNBClientName())) { // create an Integer out of the voltageChannel Integer voltageChannelInt = new Integer(voltageChannel); // build the RBNB channel path string String channelPath = "port" + "/" + portNumber + "/" + measurement; voltageChannelTreeMap.put(voltageChannelInt, channelPath); } else { logger.debug("\t\tSource names don't match: " + sourceName + " != " + getRBNBClientName()); } // end sourceName if() statement } else { logger.debug("\t\tNo IP address match. " + datagramAddress + " != " + address); } //end IP address if() statement } // end for each channel } // end for each port // now that we've found the correct sensor, exit the sensor loop break; } // end for each sensor // Build the RBNB channel map from the entries in the tree map // by doing a lookup of the ADAM voltage channel values based // on the voltage channel number in the treemap. Also add the voltages // to the DecimalASCIISampleData string (and then channel) for (Iterator vcIterator = voltageChannelTreeMap.keySet().iterator(); vcIterator.hasNext();) { int voltageChannelFromMap = ((Integer) vcIterator.next()).intValue(); String channelPathFromMap = voltageChannelTreeMap.get(voltageChannelFromMap); float voltageValue = -9999.0f; // look up the voltage value from the AdamParser object based // on the voltage channel set in the xmlConfig file (via the treemap) switch (voltageChannelFromMap) { case 0: voltageValue = this.adamParser.getChannelZero(); break; case 1: voltageValue = this.adamParser.getChannelOne(); break; case 2: voltageValue = this.adamParser.getChannelTwo(); break; case 3: voltageValue = this.adamParser.getChannelThree(); break; case 4: voltageValue = this.adamParser.getChannelFour(); break; case 5: voltageValue = this.adamParser.getChannelFive(); break; case 6: voltageValue = this.adamParser.getChannelSix(); break; case 7: voltageValue = this.adamParser.getChannelSeven(); break; } // now add the channel and the voltage value to the RBNB channel maps channelIndex = registerChannelMap.Add(channelPathFromMap); registerChannelMap.PutUserInfo(channelIndex, "units=volts"); registerChannelMap.PutUserInfo(channelIndex, "description=" + description); logger.debug("Voltage Channel Tree Map: " + voltageChannelTreeMap.toString()); // then the channel and voltage channelIndex = rbnbChannelMap.Add(channelPathFromMap); rbnbChannelMap.PutMime(channelIndex, "application/octet-stream"); rbnbChannelMap.PutDataAsFloat32(channelIndex, new float[] { voltageValue }); decimalASCIISampleData.append(String.format("%05.3f", (Object) voltageValue) + ", "); } // and only flush data for this class instance RBNB Source name if (sourceName.equals(getRBNBClientName()) && datagramAddress.equals(address)) { // add the timestamp to the rbnb channel map registerChannelMap.PutTimeAuto("server"); rbnbChannelMap.PutTimeAuto("server"); // then add a timestamp to the end of the ASCII version of the sample DATE_FORMAT.setTimeZone(TZ); String sampleDateAsString = DATE_FORMAT.format(new Date()).toString(); decimalASCIISampleData.append(sampleDateAsString); decimalASCIISampleData.append("\n"); // add the DecimalASCIISampleData channel to the channelMap channelIndex = registerChannelMap.Add(getRBNBChannelName()); channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, decimalASCIISampleData.toString()); // Now register the RBNB channels, and flush the rbnbChannelMap to the // DataTurbine getSource().Register(registerChannelMap); getSource().Flush(rbnbChannelMap); logger.info(getRBNBClientName() + " Sample sent to the DataTurbine: " + decimalASCIISampleData.toString()); registerChannelMap.Clear(); rbnbChannelMap.Clear(); sampleBuffer.clear(); } else { logger.debug("\t\tSource names don't match: " + sourceName + " != " + getRBNBClientName()); registerChannelMap.Clear(); rbnbChannelMap.Clear(); sampleBuffer.clear(); } } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } return !failed; }
From source file:fr.cirad.mgdb.exporting.individualoriented.PLinkExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles, boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles) throws Exception { File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }// w ww. java 2 s. co m } MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".ped")); TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>(); short nProgress = 0, nPreviousProgress = 0; int i = 0; for (File f : individualExportFiles) { BufferedReader in = new BufferedReader(new FileReader(f)); try { String individualId, line = in.readLine(); // read sample id if (line != null) { individualId = line; String population = getIndividualPopulation(sModule, line); String individualInfo = (population == null ? "." : population) + " " + individualId; zos.write((individualInfo + " 0 0 0 " + getIndividualGenderCode(sModule, individualId)) .getBytes()); } else throw new Exception("Unable to read first line of temp export file " + f.getName()); int nMarkerIndex = 0; while ((line = in.readLine()) != null) { List<String> genotypes = MgdbDao.split(line, "|"); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } if (genotypeCounts.size() > 1) { warningFileWriter.write("- Dissimilar genotypes found for variant " + nMarkerIndex + ", individual " + individualId + ". Exporting most frequent: " + mostFrequentGenotype + "\n"); problematicMarkerIndexToNameMap.put(nMarkerIndex, ""); } String[] alleles = mostFrequentGenotype == null ? new String[0] : mostFrequentGenotype.split(" "); if (alleles.length > 2) { warningFileWriter.write("- More than 2 alleles found for variant " + nMarkerIndex + ", individual " + individualId + ". Exporting only the first 2 alleles.\n"); problematicMarkerIndexToNameMap.put(nMarkerIndex, ""); } String all1 = alleles.length == 0 ? "0" : alleles[0]; String all2 = alleles.length == 0 ? "0" : alleles[alleles.length == 1 ? 0 : 1]; if (all1.length() != 1 || all2.length() != 1) { warningFileWriter .write("- SNP expected, but alleles are not coded on a single char for variant " + nMarkerIndex + ", individual " + individualId + ". Ignoring this genotype.\n"); problematicMarkerIndexToNameMap.put(nMarkerIndex, ""); } else zos.write((" " + all1 + " " + all2).getBytes()); nMarkerIndex++; } } catch (Exception e) { LOG.error("Error exporting data", e); progress.setError("Error exporting data: " + e.getClass().getSimpleName() + (e.getMessage() != null ? " - " + e.getMessage() : "")); return; } finally { in.close(); } if (progress.hasAborted()) return; nProgress = (short) (++i * 100 / individualExportFiles.size()); if (nProgress > nPreviousProgress) { progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } zos.write('\n'); if (!f.delete()) { f.deleteOnExit(); LOG.info("Unable to delete tmp export file " + f.getAbsolutePath()); } } warningFileWriter.close(); zos.putNextEntry(new ZipEntry(exportName + ".map")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; markerCursor.batchSize(nChunkSize); int nMarkerIndex = 0; while (markerCursor.hasNext()) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); Comparable markerId = (Comparable) exportVariant.get("_id"); String chrom = (String) refPos.get(ReferencePosition.FIELDNAME_SEQUENCE); Long pos = ((Number) refPos.get(ReferencePosition.FIELDNAME_START_SITE)).longValue(); if (chrom == null) LOG.warn("Chromosomal position not found for marker " + markerId); Comparable exportedId = markerSynonyms == null ? markerId : markerSynonyms.get(markerId); zos.write(((chrom == null ? "0" : chrom) + " " + exportedId + " " + 0 + " " + (pos == null ? 0 : pos) + LINE_SEPARATOR).getBytes()); if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) { // we are going to need this marker's name for the warning file Comparable variantName = markerId; if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) variantName = syn; } problematicMarkerIndexToNameMap.put(nMarkerIndex, variantName); } nMarkerIndex++; } if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet()) sLine = sLine.replaceAll("__" + aMarkerIndex + "__", problematicMarkerIndexToNameMap.get(aMarkerIndex).toString()); zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java
public void writeAggregateStatisticsForOptimisationConstraints_HSARHP_ISARCEP(String a_OutputDir_String) throws Exception { HashMap a_HID_HSARDataRecordVector_HashMap = _HSARDataHandler.get_HID_HSARDataRecordVector_HashMap(); HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap(); File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv"); FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs); OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream); a_FileOutputStream.flush();//from w w w. j a v a 2s. c o m HashMap<String, Integer> a_SARCounts = null; CASDataRecord a_CASDataRecord; TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet(); String s2; String s1; Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator(); while (a_Iterator_String.hasNext()) { // Need to reorder data for each LAD as OAs not necessarily returned // in any order and an ordered result is wanted TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>(); boolean setPrevious_OA_String = true; s1 = a_Iterator_String.next(); s2 = s1.substring(0, 3); File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv"); // A few results are missing if (resultsFile.exists()) { System.out.println(resultsFile.toString() + " exists"); String previous_OA_String = ""; BufferedReader aBufferedReader = new BufferedReader( new InputStreamReader(new FileInputStream(resultsFile))); StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader); Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer); String line = ""; int tokenType = aStreamTokenizer.nextToken(); while (tokenType != StreamTokenizer.TT_EOF) { switch (tokenType) { case StreamTokenizer.TT_EOL: //System.out.println(line); String[] lineFields = line.split(","); String a_OA_String = lineFields[0]; if (previous_OA_String.equalsIgnoreCase(a_OA_String)) { if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); // From the id of a household get a Vector // of HSARDataRecords Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap .get(new Integer(lineFields[2])); HSARDataRecord a_HSARDataRecord; for (int i = 0; i < household.size(); i++) { a_HSARDataRecord = (HSARDataRecord) household.elementAt(i); GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts, _Random); } //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); } } else { // Store result if (setPrevious_OA_String) { previous_OA_String = a_OA_String; setPrevious_OA_String = false; } else { // Store resultsForLAD.put(previous_OA_String, a_SARCounts); } // Initialise/Re-initialise a_CASDataRecord = (CASDataRecord) _CASDataHandler.getDataRecord(a_OA_String); Object[] fitnessCounts = GeneticAlgorithm_HSARHP_ISARCEP .getFitnessCounts(a_CASDataRecord); a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1]; // Start a new aggregation if (lineFields[1].equalsIgnoreCase("HP")) { //System.out.println("HP"); // From the id of a household get a Vector // of HSARDataRecords Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap .get(new Integer(lineFields[2])); HSARDataRecord a_HSARDataRecord; for (int i = 0; i < household.size(); i++) { a_HSARDataRecord = (HSARDataRecord) household.elementAt(i); GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts, _Random); } //System.out.println(a_HSARDataRecord.toString()); } else { //System.out.println("CEP"); // From the id of the ISARDataRecord get the // ISARRecordID. long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2])); ISARDataRecord a_ISARDataRecord = _ISARDataHandler .getISARDataRecord(a_ISARRecordID); GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts, _Random); //System.out.println(a_ISARDataRecord.toString()); } //a_OA_String = lineFields[0]; } previous_OA_String = a_OA_String; break; case StreamTokenizer.TT_WORD: line = aStreamTokenizer.sval; break; } tokenType = aStreamTokenizer.nextToken(); } } else { System.out.println(resultsFile.toString() + " !exists"); } Iterator<String> string_Iterator = resultsForLAD.keySet().iterator(); while (string_Iterator.hasNext()) { String oa_Code = string_Iterator.next(); OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEP(resultsForLAD.get(oa_Code), oa_Code, a_FileOutputStream); } } a_FileOutputStream.close(); }
From source file:com.cmart.DB.CassandraDBQuery.java
public ArrayList<Purchase> getPurchases(long userID, long ts) { ArrayList<Purchase> purchases = new ArrayList<Purchase>(); if (userID < 1) return purchases; int attemptsRemaining = SQL_RETRIES; do {//from www . j av a 2 s .c om Connection conn = this.getConnection(); if (conn != null) { PreparedStatement statement = null; PreparedStatement itemstatement = null; try { // Get the purchases statement = conn.prepareStatement( "SELECT itemid FROM purchased WHERE userid=" + userID + " AND ts>=" + ts); ResultSet rs = statement.executeQuery(); // Get ids of items we need StringBuilder ids = new StringBuilder(); ids.append("('0"); while (rs.next()) { ids.append("','"); ids.append(rs.getLong("itemid")); } ids.append("')"); rs.close(); statement.close(); // Get the items itemstatement = conn.prepareStatement("SELECT * FROM olditems WHERE KEY IN " + ids.toString()); ResultSet itemrs = itemstatement.executeQuery(); TreeMap<Long, Item> items = new TreeMap<Long, Item>(); while (itemrs.next()) { String name = null; try { name = itemrs.getString("name"); } catch (Exception e) { } if (name != null) { ArrayList<Image> images = this.getItemImages(itemrs.getLong("KEY")); Item currentItem = new Item(itemrs.getLong("KEY"), name, new String(itemrs.getString("description")), new Integer((int) itemrs.getLong("quantity")), itemrs.getDouble("startprice"), itemrs.getDouble("reserveprice"), itemrs.getDouble("buynowprice"), itemrs.getDouble("curbid"), itemrs.getDouble("maxbid"), new Integer((int) itemrs.getLong("noofbids")), new Date(itemrs.getLong("startdate")), new Date(itemrs.getLong("enddate")), itemrs.getLong("sellerid"), itemrs.getLong("categoryid"), new String(itemrs.getString("thumbnail")), images); items.put(itemrs.getLong("KEY"), currentItem); } } itemrs.close(); itemstatement.close(); // Get the purchases and make them statement = conn .prepareStatement("SELECT * FROM purchased WHERE userid=" + userID + " AND ts>=" + ts); rs = statement.executeQuery(); while (rs.next()) { // Make sure it is real long pKey = 0; try { Long temp = rs.getLong("KEY"); pKey = temp; } catch (Exception e) { } if (pKey > 0) { purchases.add(new Purchase(rs.getLong("KEY"), items.get(rs.getLong("itemid")), (int) rs.getLong("quantity"), rs.getDouble("price"), new Boolean(rs.getString("paid")))); } } rs.close(); attemptsRemaining = 0; } catch (CommunicationsException e) { this.forceCloseConnection(conn); this.checkConnections(); } catch (MySQLNonTransientConnectionException e) { this.forceCloseConnection(conn); this.checkConnections(); } catch (Exception e) { System.out.println("MySQLQuery (getPurchases): Could not get the purchases"); e.printStackTrace(); } finally { this.closeSmt(statement); this.closeConnection(conn); } } attemptsRemaining--; } while (attemptsRemaining >= 0); return purchases; }
From source file:com.dell.asm.asmcore.asmmanager.app.rest.ServiceTemplateService.java
/** * We will be parsing the network interfaces, most likely extracted from a device xml, and generating a json string to set the value of the service template setting with it * @param serverSettingNetwork// w w w .j a va 2 s . co m * @param networkInterfaces */ protected void applyNetworkCustomization(ServiceTemplateSetting serverSettingNetwork, final TreeMap<String, Component> networkInterfaces, boolean is2x10gby2x1gNicType) { if (serverSettingNetwork == null || networkInterfaces == null) return; LOGGER.debug("Beginning to construct network from config xml"); //Setup the little helpers we will be using later //Create the top level of networkconfiguration com.dell.asm.asmcore.asmmanager.client.networkconfiguration.NetworkConfiguration networkConfigurationResult = new com.dell.asm.asmcore.asmmanager.client.networkconfiguration.NetworkConfiguration(); networkConfigurationResult.setId(UUID.randomUUID().toString()); networkConfigurationResult.setInterfaces(new ArrayList<Fabric>()); Fabric lastCard = new Fabric(); Interface lastSubInterface = new Interface(); // String lastSlot = "1"; //Fabric lastFabric = new Fabric(); //Interface lastFabricInterface = new Interface(); //We use this comparator to ensure the fabrics show up in the right order in the UI //If we don't sort, the fabrics will show up in the order they show up in the config.xml ArrayList<String> sortedInterfaces = new ArrayList<String>(); sortedInterfaces.addAll(networkInterfaces.keySet()); Collections.sort(sortedInterfaces, new FabricComparator()); //Loop through sortedInterfaces to map out which nics are in which fabric/slot. HashMap<String, String> fabricMap = new HashMap<>(); int fabricIndex = -1; String lastFabricLocation = "0"; String lastNicType = ""; // For RACK Servers, 1 Integrated and 7 PCI slots String[] fabricIds = { "1", "2", "3", "4", "5", "6", "7", "8" }; for (String nic : sortedInterfaces) { String thisLocation = getFabricLocation(nic); String thisNicType = nic.split("\\.\\d\\w*")[0]; if (!thisLocation.equals(lastFabricLocation) || !lastNicType.equals(thisNicType)) { fabricIndex++; lastFabricLocation = thisLocation; lastNicType = thisNicType; } String thisFabricId = fabricIds[fabricIndex]; fabricMap.put(nic, thisFabricId); } //Loop through sortedInterfaces to map out which nics are in which fabric/slot. //Iterate over all of the nic configurations that were passed in sorted off of their keys int firstInterfaceCount = 0; // is2x10gby2x1gNicType can only be set for ONE interface, must make sure we do not set it for ALL Network Interfaces (Network Cards / Nics) for (String key : sortedInterfaces) { String regex = "^\\w+\\.\\w+"; String min = "0"; String max = "100"; //Parse nics that are of the expected naming convention if (key.matches(regex + "\\.\\d\\w?-\\d(-\\d)?")) { LOGGER.debug("Found correctly formatted key: " + key); Component nic = networkInterfaces.get(key); //Get the values from the passed in nic configuration for (Attribute a : nic.getAttribute()) { if ("minbandwidth".equalsIgnoreCase(a.getName())) min = a.getValue(); else if ("maxbandwidth".equalsIgnoreCase(a.getName())) max = a.getValue(); } Pattern pattern = Pattern.compile(regex); Matcher m = pattern.matcher(key); m.find(); String nicType = m.group(); boolean isFcFabric = nicType.toLowerCase().startsWith("fc."); String numeric = key.split(regex + "\\.")[1]; String[] numericSplit = numeric.split("-"); //Now parse the numeric portion and construct a network configuation hierarchy from it //Top level under the network configuration String fIndex = fabricMap.get(nic.getFQDD()); String topName = "Interface " + fIndex; if (!topName.equals(lastCard.getName())) { LOGGER.debug("Creating top level for: " + topName); lastCard = new Fabric(); lastCard.setId(UUID.randomUUID().toString()); lastCard.setName(topName); lastCard.setEnabled(true); if (is2x10gby2x1gNicType && firstInterfaceCount < 4) { lastCard.setNictype(Interface.NIC_2_X_10GB_2_X_1GB); } else lastCard.setNictype(Interface.NIC_2_X_10GB); lastCard.setInterfaces(new ArrayList<Interface>()); if (isFcFabric) { lastCard.setFabrictype(Fabric.FC_TYPE); } else { lastCard.setFabrictype(Fabric.ETHERNET_TYPE); } //Only add the interfaces here if it is rack server //If not we will be constucting them anyway but just discarding them networkConfigurationResult.getInterfaces().add(lastCard); } //FC fabrics should have no port/partition configuration. if (!isFcFabric) { //Middle level String thisSlot = numericSplit[0]; String portNumber = numericSplit[1]; // Embedded/Integrated nics are weird, in that the nics could be labelled 1-1-1 and 2-1-1, but those are single port cards. // So we want to show double ports, in the same fabric. if ((key.contains("Embedded") || key.contains("Integrated")) && !thisSlot.equals("1")) { portNumber = thisSlot; } String middleName = "Port " + portNumber; if (!middleName.equals(lastSubInterface.getName())) { if (Integer.parseInt(numericSplit[1]) > 2) { if (is2x10gby2x1gNicType && firstInterfaceCount < 4) { lastCard.setNictype(Interface.NIC_2_X_10GB_2_X_1GB); } else { lastCard.setNictype(Interface.NIC_4_X_10GB); } } LOGGER.debug("Creating middle level for: " + middleName); lastSubInterface = new Interface(); lastSubInterface.setId(UUID.randomUUID().toString()); lastCard.getInterfaces().add(lastSubInterface); lastSubInterface.setPartitions(new ArrayList<Partition>()); lastSubInterface.setName(middleName); lastSubInterface.setPartitioned(true); } //Last level. Can be missed in FQDD. if (numericSplit.length > 2) { Partition partition = new Partition(); partition.setId(UUID.randomUUID().toString()); partition.setName(numericSplit[2]); partition.setMinimum(Integer.parseInt(min)); partition.setMaximum(Integer.parseInt(max)); partition.setNetworks(new ArrayList<String>()); lastSubInterface.getPartitions().add(partition); } } } firstInterfaceCount++; } fillMaxPortsAndPartitions(networkConfigurationResult.getInterfaces()); serverSettingNetwork.setValue(ServiceTemplateUtil.serializeNetwork(networkConfigurationResult)); }