List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:net.spfbl.spf.SPF.java
/** * Atualiza o registro SPF de um hostname. * * @throws ProcessException se houver falha no processamento. *//*from w ww . j a va 2s . com*/ private synchronized void refresh(boolean load, boolean bgWhenUnavailable) throws ProcessException { long time = System.currentTimeMillis(); LinkedList<String> registryList = getRegistrySPF(hostname, bgWhenUnavailable); if (registryList == null) { // Domnimo no encontrado. this.mechanismList = null; this.all = null; this.redirect = null; this.explanation = null; this.error = false; CacheSPF.CHANGED = true; this.addInexistent(); updateLastRefresh(); Server.logLookupSPF(time, hostname, "NXDOMAIN"); } else if (registryList.isEmpty()) { // Sem registro SPF. this.mechanismList = new ArrayList<Mechanism>(); this.all = null; this.redirect = null; this.explanation = null; this.error = false; CacheSPF.CHANGED = true; this.nxdomain = 0; updateLastRefresh(); Server.logLookupSPF(time, hostname, "NO REGISTRY"); } else { ArrayList<Mechanism> mechanismListIP = new ArrayList<Mechanism>(); ArrayList<Mechanism> mechanismListDNS = new ArrayList<Mechanism>(); ArrayList<Mechanism> mechanismListInclude = new ArrayList<Mechanism>(); ArrayList<Mechanism> mechanismListPTR = new ArrayList<Mechanism>(); TreeSet<String> visitedTokens = new TreeSet<String>(); Qualifier allLocal = null; String redirectLocal = null; String explanationLocal = null; boolean errorQuery = false; String fixed; String result = null; for (String registry : registryList) { boolean errorRegistry = false; StringTokenizer tokenizer = new StringTokenizer(registry, " "); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (visitedTokens.contains(token)) { // Token j visitado. } else if (token.equals("spf1")) { // Nada deve ser feito. } else if (token.equals("v=spf1")) { // Nada deve ser feito. } else if (token.equals("v=msv1")) { // Nada deve ser feito. } else if (token.equals("+")) { // Ignorar qualificadores isolados. } else if (token.startsWith("t=") && token.length() == 32) { // Nada deve ser feito. } else if (isMechanismAll(token)) { // No permitir qualificadores permissivos para all. switch (token.charAt(0)) { case '-': allLocal = Qualifier.FAIL; break; case '~': allLocal = Qualifier.SOFTFAIL; break; default: allLocal = Qualifier.NEUTRAL; // Default qualifier or all. } } else if (isMechanismIPv4(token)) { mechanismListIP.add(new MechanismIPv4(token)); } else if (isMechanismIPv6(token)) { mechanismListIP.add(new MechanismIPv6(token)); } else if (isMechanismA(token)) { mechanismListDNS.add(new MechanismA(token, load)); } else if (isMechanismMX(token)) { mechanismListDNS.add(new MechanismMX(token, load)); } else if (isMechanismPTR(token)) { mechanismListPTR.add(new MechanismPTR(token)); } else if (isMechanismExistis(token)) { mechanismListDNS.add(new MechanismExists(token)); } else if (isMechanismInclude(token)) { mechanismListInclude.add(new MechanismInclude(token)); } else if (isModifierRedirect(token)) { int index = token.indexOf("=") + 1; redirectLocal = token.substring(index); } else if (isModifierExplanation(token)) { int index = token.indexOf("=") + 1; explanationLocal = token.substring(index); } else if ((fixed = extractIPv4CIDR(token)) != null) { // Tenta recuperar um erro de sintaxe. if (!visitedTokens.contains(token = "ip4:" + fixed)) { mechanismListIP.add(new MechanismIPv4(token)); } errorRegistry = true; } else if ((fixed = extractIPv6CIDR(token)) != null) { // Tenta recuperar um erro de sintaxe. if (!visitedTokens.contains(token = "ip4:" + fixed)) { mechanismListIP.add(new MechanismIPv6(token)); } errorRegistry = true; } else { // Um erro durante o processamento foi encontrado. Server.logDebug("SPF token not defined: " + token); errorRegistry = true; errorQuery = true; } visitedTokens.add(token); } if (result == null) { result = (errorRegistry ? "ERR" : "OK") + " \"" + registry + "\""; } else { result += (errorRegistry ? "\\nERR" : "\\nOK") + " \"" + registry + "\""; } } // Considerar os mecanismos na ordem crescente // de complexidade de processamento. ArrayList<Mechanism> mechanismListLocal = new ArrayList<Mechanism>(); mechanismListLocal.addAll(mechanismListIP); mechanismListLocal.addAll(mechanismListDNS); mechanismListLocal.addAll(mechanismListInclude); mechanismListLocal.addAll(mechanismListPTR); // Atribuio dos novos valores. this.mechanismList = mechanismListLocal; this.all = allLocal; this.redirect = redirectLocal; this.explanation = explanationLocal; this.error = errorQuery; CacheSPF.CHANGED = true; this.nxdomain = 0; updateLastRefresh(); Server.logLookupSPF(time, hostname, result); } }
From source file:com.google.bitcoin.core.Wallet.java
/** * Checks if "tx" is spending any inputs of pending transactions. Not a general check, but it can work even if * the double spent inputs are not ours. Returns the pending tx that was double spent or null if none found. *///from w w w.j a v a 2 s .c om private boolean checkForDoubleSpendAgainstPending(Transaction tx, boolean takeAction) { checkState(lock.isHeldByCurrentThread()); // Compile a set of outpoints that are spent by tx. HashSet<TransactionOutPoint> outpoints = new HashSet<TransactionOutPoint>(); for (TransactionInput input : tx.getInputs()) { outpoints.add(input.getOutpoint()); } // Now for each pending transaction, see if it shares any outpoints with this tx. LinkedList<Transaction> doubleSpentTxns = Lists.newLinkedList(); for (Transaction p : pending.values()) { for (TransactionInput input : p.getInputs()) { // This relies on the fact that TransactionOutPoint equality is defined at the protocol not object // level - outpoints from two different inputs that point to the same output compare the same. TransactionOutPoint outpoint = input.getOutpoint(); if (outpoints.contains(outpoint)) { // It does, it's a double spend against the pending pool, which makes it relevant. if (!doubleSpentTxns.isEmpty() && doubleSpentTxns.getLast() == p) continue; doubleSpentTxns.add(p); } } } if (takeAction && !doubleSpentTxns.isEmpty()) { killTx(tx, doubleSpentTxns); } return !doubleSpentTxns.isEmpty(); }
From source file:org.hyperledger.fabric.sdk.Channel.java
private String registerChaincodeListenerProcessor() throws InvalidArgumentException { logger.debug(format("Channel %s registerChaincodeListenerProcessor starting", name)); // Chaincode event listener is internal Block listener for chaincode events. return registerBlockListener(blockEvent -> { if (chainCodeListeners.isEmpty()) { return; }/* w w w .j a v a 2 s .c o m*/ LinkedList<ChaincodeEvent> chaincodeEvents = new LinkedList<>(); //Find the chaincode events in the transactions. for (TransactionEvent transactionEvent : blockEvent.getTransactionEvents()) { logger.debug(format("Channel %s got event for transaction %s ", name, transactionEvent.getTransactionID())); for (BlockInfo.TransactionEnvelopeInfo.TransactionActionInfo info : transactionEvent .getTransactionActionInfos()) { ChaincodeEvent event = info.getEvent(); if (null != event) { chaincodeEvents.add(event); } } } if (!chaincodeEvents.isEmpty()) { class MatchPair { final ChaincodeEventListenerEntry eventListener; final ChaincodeEvent event; MatchPair(ChaincodeEventListenerEntry eventListener, ChaincodeEvent event) { this.eventListener = eventListener; this.event = event; } } List<MatchPair> matches = new LinkedList<>(); //Find matches. synchronized (chainCodeListeners) { for (ChaincodeEventListenerEntry chaincodeEventListenerEntry : chainCodeListeners.values()) { for (ChaincodeEvent chaincodeEvent : chaincodeEvents) { if (chaincodeEventListenerEntry.isMatch(chaincodeEvent)) { matches.add(new MatchPair(chaincodeEventListenerEntry, chaincodeEvent)); } } } } //fire events for (MatchPair match : matches) { ChaincodeEventListenerEntry chaincodeEventListenerEntry = match.eventListener; ChaincodeEvent ce = match.event; chaincodeEventListenerEntry.fire(blockEvent, ce); } } }); }
From source file:com.net2plan.interfaces.networkDesign.NetPlan.java
/** * <p>Checks if a sequence of links and resources traversed is valid, that is all the links follow a contiguous path from the demand ingress node to the egress node, and the resources are traversed in the appropriate order. If the sequence * is not valid, an exception is thrown.</p> * * @param path Sequence of links//from w w w. j a v a 2 s. c o m * @param d Demand * @return see above */ Pair<List<Link>, List<Resource>> checkPathValidityForDemand(List<? extends NetworkElement> path, Demand d) { checkInThisNetPlan(d); checkInThisNetPlanAndLayer(path, d.layer); LinkedList<Link> links = new LinkedList<Link>(); List<Resource> resources = new ArrayList<Resource>(); for (NetworkElement e : path) { if (e == null) throw new Net2PlanException("A link/resource in the sequence is null"); if (e instanceof Link) links.add((Link) e); else if (e instanceof Resource) { resources.add((Resource) e); if (links.isEmpty() && !((Resource) e).hostNode.equals(d.ingressNode)) throw new Net2PlanException("Wrong resource node in the service chain"); if (!links.isEmpty() && !((Resource) e).hostNode.equals(links.getLast().destinationNode)) throw new Net2PlanException("Wrong resource node in the service chain"); } else throw new Net2PlanException("A list of links and/or resources is expected"); } checkContiguousPath(links, d.layer, d.ingressNode, d.egressNode); if (resources.size() != d.mandatorySequenceOfTraversedResourceTypes.size()) throw new Net2PlanException( "The path does not follow the sequence of resources of the service chain request"); for (int cont = 0; cont < resources.size(); cont++) if (!resources.get(cont).type.equals(d.mandatorySequenceOfTraversedResourceTypes.get(cont))) throw new Net2PlanException( "The path does not follow the sequence of resources of the service chain request"); return Pair.of(links, resources); }
From source file:com.joliciel.talismane.TalismaneImpl.java
public void analyse(TalismaneConfig config) { try {//w w w . jav a 2 s . c om if (config.needsSentenceDetector()) { if (config.getSentenceDetector() == null) { throw new TalismaneException("Sentence detector not provided."); } } if (config.needsTokeniser()) { if (config.getTokeniser() == null) { throw new TalismaneException("Tokeniser not provided."); } } if (config.needsPosTagger()) { if (config.getPosTagger() == null) { throw new TalismaneException("Pos-tagger not provided."); } } if (config.needsParser()) { if (config.getParser() == null) { throw new TalismaneException("Parser not provided."); } } if (config.getEndModule().equals(Module.SentenceDetector)) { if (this.getSentenceProcessor() == null) { throw new TalismaneException( "No sentence processor provided with sentence detector end module, cannot generate output."); } } if (config.getEndModule().equals(Module.Tokeniser)) { if (this.getTokenSequenceProcessor() == null) { throw new TalismaneException( "No token sequence processor provided with tokeniser end module, cannot generate output."); } } if (config.getEndModule().equals(Module.PosTagger)) { if (this.getPosTagSequenceProcessor() == null) { throw new TalismaneException( "No postag sequence processor provided with pos-tagger end module, cannot generate output."); } } if (config.getEndModule().equals(Module.Parser)) { if (this.getParseConfigurationProcessor() == null) { throw new TalismaneException( "No parse configuration processor provided with parser end module, cannot generate output."); } } LinkedList<String> textSegments = new LinkedList<String>(); LinkedList<Sentence> sentences = new LinkedList<Sentence>(); TokenSequence tokenSequence = null; PosTagSequence posTagSequence = null; RollingSentenceProcessor rollingSentenceProcessor = this.getFilterService() .getRollingSentenceProcessor(config.getFileName(), config.isProcessByDefault()); Sentence leftover = null; if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) { // prime the sentence detector with two text segments, to ensure everything gets processed textSegments.addLast(""); textSegments.addLast(""); } StringBuilder stringBuilder = new StringBuilder(); boolean finished = false; int sentenceCount = 0; String prevProcessedText = ""; String processedText = ""; String nextProcessedText = ""; SentenceHolder prevSentenceHolder = null; int endBlockCharacterCount = 0; while (!finished) { if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) { // Note SentenceDetector and Tokeniser start modules treated identically, // except that for SentenceDetector we apply a probabilistic sentence detector // whereas for Tokeniser we assume all sentence breaks are marked by filters // read characters from the reader, one at a time char c; int r = -1; try { r = this.getReader().read(); } catch (IOException e) { LogUtils.logError(LOG, e); } if (r == -1) { finished = true; c = '\n'; } else { c = (char) r; } // Jump out if we have 3 consecutive end-block characters. if (c == config.getEndBlockCharacter()) { endBlockCharacterCount++; if (endBlockCharacterCount == 3) { LOG.info("Three consecutive end-block characters. Exiting."); finished = true; } } else { endBlockCharacterCount = 0; } // have sentence detector if (finished || (Character.isWhitespace(c) && stringBuilder.length() > config.getBlockSize()) || c == config.getEndBlockCharacter()) { if (c == config.getEndBlockCharacter()) stringBuilder.append(c); if (stringBuilder.length() > 0) { String textSegment = stringBuilder.toString(); stringBuilder = new StringBuilder(); textSegments.add(textSegment); } // is the current block > 0 characters? if (c == config.getEndBlockCharacter()) { textSegments.addLast(""); } } // is there a next block available? if (finished) { if (stringBuilder.length() > 0) { textSegments.addLast(stringBuilder.toString()); stringBuilder = new StringBuilder(); } textSegments.addLast(""); textSegments.addLast(""); textSegments.addLast(""); } if (c != config.getEndBlockCharacter()) stringBuilder.append(c); while (textSegments.size() >= 3) { String prevText = textSegments.removeFirst(); String text = textSegments.removeFirst(); String nextText = textSegments.removeFirst(); if (LOG.isTraceEnabled()) { LOG.trace("prevText: " + prevText); LOG.trace("text: " + text); LOG.trace("nextText: " + nextText); } Set<TextMarker> textMarkers = new TreeSet<TextMarker>(); for (TextMarkerFilter textMarkerFilter : config.getTextMarkerFilters()) { Set<TextMarker> result = textMarkerFilter.apply(prevText, text, nextText); textMarkers.addAll(result); } // push the text segments back onto the beginning of Deque textSegments.addFirst(nextText); textSegments.addFirst(text); SentenceHolder sentenceHolder = rollingSentenceProcessor.addNextSegment(text, textMarkers); prevProcessedText = processedText; processedText = nextProcessedText; nextProcessedText = sentenceHolder.getText(); if (LOG.isTraceEnabled()) { LOG.trace("prevProcessedText: " + prevProcessedText); LOG.trace("processedText: " + processedText); LOG.trace("nextProcessedText: " + nextProcessedText); } boolean reallyFinished = finished && textSegments.size() == 3; if (prevSentenceHolder != null) { if (config.getStartModule().equals(Module.SentenceDetector)) { List<Integer> sentenceBreaks = config.getSentenceDetector() .detectSentences(prevProcessedText, processedText, nextProcessedText); for (int sentenceBreak : sentenceBreaks) { prevSentenceHolder.addSentenceBoundary(sentenceBreak); } } List<Sentence> theSentences = prevSentenceHolder.getDetectedSentences(leftover); leftover = null; for (Sentence sentence : theSentences) { if (sentence.isComplete() || reallyFinished) { sentences.add(sentence); sentenceCount++; } else { LOG.debug("Setting leftover to: " + sentence.getText()); leftover = sentence; } } if (config.getMaxSentenceCount() > 0 && sentenceCount >= config.getMaxSentenceCount()) { finished = true; } } prevSentenceHolder = sentenceHolder; } // we have at least 3 text segments (should always be the case once we get started) } else if (config.getStartModule().equals(Module.PosTagger)) { if (config.getTokenCorpusReader().hasNextTokenSequence()) { tokenSequence = config.getTokenCorpusReader().nextTokenSequence(); } else { tokenSequence = null; finished = true; } } else if (config.getStartModule().equals(Module.Parser)) { if (config.getPosTagCorpusReader().hasNextPosTagSequence()) { posTagSequence = config.getPosTagCorpusReader().nextPosTagSequence(); } else { posTagSequence = null; finished = true; } } // which start module? boolean needToProcess = false; if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) needToProcess = !sentences.isEmpty(); else if (config.getStartModule().equals(Module.PosTagger)) needToProcess = tokenSequence != null; else if (config.getStartModule().equals(Module.Parser)) needToProcess = posTagSequence != null; while (needToProcess) { Sentence sentence = null; if (config.getStartModule().compareTo(Module.Tokeniser) <= 0 && config.getEndModule().compareTo(Module.SentenceDetector) >= 0) { sentence = sentences.poll(); LOG.debug("Sentence: " + sentence); if (this.getSentenceProcessor() != null) this.getSentenceProcessor().onNextSentence(sentence.getText(), this.getWriter()); } // need to read next sentence List<TokenSequence> tokenSequences = null; if (config.needsTokeniser()) { tokenSequences = config.getTokeniser().tokenise(sentence); tokenSequence = tokenSequences.get(0); if (this.getTokenSequenceProcessor() != null) { this.getTokenSequenceProcessor().onNextTokenSequence(tokenSequence, this.getWriter()); } } // need to tokenise ? List<PosTagSequence> posTagSequences = null; if (config.needsPosTagger()) { posTagSequence = null; if (tokenSequences == null || !config.isPropagateTokeniserBeam()) { tokenSequences = new ArrayList<TokenSequence>(); tokenSequences.add(tokenSequence); } if (config.getPosTagger() instanceof NonDeterministicPosTagger) { NonDeterministicPosTagger nonDeterministicPosTagger = (NonDeterministicPosTagger) config .getPosTagger(); posTagSequences = nonDeterministicPosTagger.tagSentence(tokenSequences); posTagSequence = posTagSequences.get(0); } else { posTagSequence = config.getPosTagger().tagSentence(tokenSequence); } if (posTagSequenceProcessor != null) { posTagSequenceProcessor.onNextPosTagSequence(posTagSequence, this.getWriter()); } tokenSequence = null; } // need to postag if (config.needsParser()) { if (posTagSequences == null || !config.isPropagatePosTaggerBeam()) { posTagSequences = new ArrayList<PosTagSequence>(); posTagSequences.add(posTagSequence); } ParseConfiguration parseConfiguration = null; List<ParseConfiguration> parseConfigurations = null; try { if (config.getParser() instanceof NonDeterministicParser) { NonDeterministicParser nonDeterministicParser = (NonDeterministicParser) config .getParser(); parseConfigurations = nonDeterministicParser.parseSentence(posTagSequences); parseConfiguration = parseConfigurations.get(0); } else { parseConfiguration = config.getParser().parseSentence(posTagSequence); } if (this.getParseConfigurationProcessor() != null) { this.getParseConfigurationProcessor().onNextParseConfiguration(parseConfiguration, this.getWriter()); } } catch (Exception e) { LOG.error(e); if (stopOnError) throw new RuntimeException(e); } posTagSequence = null; } // need to parse if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) needToProcess = !sentences.isEmpty(); else if (config.getStartModule().equals(Module.PosTagger)) needToProcess = tokenSequence != null; else if (config.getStartModule().equals(Module.Parser)) needToProcess = posTagSequence != null; } // next sentence } // next character } finally { if (this.getParseConfigurationProcessor() != null) { this.getParseConfigurationProcessor().onCompleteParse(); } try { this.getReader().close(); this.getWriter().flush(); this.getWriter().close(); } catch (IOException ioe2) { LOG.error(ioe2); throw new RuntimeException(ioe2); } } }
From source file:org.nuxeo.ecm.core.storage.sql.NXQLQueryMaker.java
public Query buildQuery(SQLInfo sqlInfo, Model model, Session session, String query, QueryFilter queryFilter, Object... params) throws StorageException { this.sqlInfo = sqlInfo; database = sqlInfo.database;//from w ww .ja va 2 s .c o m dialect = sqlInfo.dialect; this.model = model; this.session = session; // transform the query according to the transformers defined by the // security policies SQLQuery sqlQuery = SQLQueryParser.parse(query); for (SQLQuery.Transformer transformer : queryFilter.getQueryTransformers()) { sqlQuery = transformer.transform(queryFilter.getPrincipal(), sqlQuery); } /* * Find all relevant types and keys for the criteria. */ QueryAnalyzer info = new QueryAnalyzer(); try { info.visitQuery(sqlQuery); } catch (QueryCannotMatchException e) { // query cannot match return null; } catch (QueryMakerException e) { throw new StorageException(e.getMessage(), e); } /* * Find all the types to take into account (all concrete types being a * subtype of the passed types) based on the FROM list. */ Set<String> types = new HashSet<String>(); for (String typeName : info.fromTypes) { if ("document".equals(typeName)) { typeName = "Document"; } Set<String> subTypes = model.getDocumentSubTypes(typeName); if (subTypes == null) { throw new StorageException("Unknown type: " + typeName); } types.addAll(subTypes); } types.remove(model.ROOT_TYPE); /* * Restrict types based on toplevel ecm:primaryType and ecm:mixinType * predicates. */ types.removeAll(info.typesExcluded); if (!info.typesAnyRequired.isEmpty()) { types.retainAll(info.typesAnyRequired); } if (types.isEmpty()) { // conflicting types requirement, query cannot match return null; } /* * Merge facet filter into mixin clauses and immutable flag. */ FacetFilter facetFilter = queryFilter.getFacetFilter(); if (facetFilter == null) { facetFilter = FacetFilter.ALLOW; } info.mixinsExcluded.addAll(facetFilter.excluded); if (info.mixinsExcluded.remove(FacetNames.IMMUTABLE)) { if (info.immutableClause == Boolean.TRUE) { // conflict on immutable condition, query cannot match return null; } info.immutableClause = Boolean.FALSE; } info.mixinsAllRequired.addAll(facetFilter.required); if (info.mixinsAllRequired.remove(FacetNames.IMMUTABLE)) { if (info.immutableClause == Boolean.FALSE) { // conflict on immutable condition, query cannot match return null; } info.immutableClause = Boolean.TRUE; } /* * Find the relevant tables to join with. */ Set<String> fragmentNames = new HashSet<String>(); for (String prop : info.props) { PropertyInfo propertyInfo = model.getPropertyInfo(prop); if (propertyInfo == null) { throw new StorageException("Unknown field: " + prop); } fragmentNames.add(propertyInfo.fragmentName); } fragmentNames.remove(model.hierTableName); // Do we need to add the versions table too? if (info.needsVersionsTable || info.immutableClause != null) { fragmentNames.add(model.VERSION_TABLE_NAME); } /* * Build the FROM / JOIN criteria for each select. */ DocKind[] docKinds; if (info.proxyClause == Boolean.TRUE) { if (info.immutableClause == Boolean.FALSE) { // proxy but not immutable: query cannot match return null; } docKinds = new DocKind[] { DocKind.PROXY }; } else if (info.proxyClause == Boolean.FALSE || info.immutableClause == Boolean.FALSE) { docKinds = new DocKind[] { DocKind.DIRECT }; } else { docKinds = new DocKind[] { DocKind.DIRECT, DocKind.PROXY }; } Table hier = database.getTable(model.hierTableName); boolean aliasColumns = docKinds.length > 1; Select select = null; String orderBy = null; List<String> statements = new ArrayList<String>(2); List<Serializable> selectParams = new LinkedList<Serializable>(); for (DocKind docKind : docKinds) { // The hierarchy table, which may be an alias table. Table hierTable; // Quoted id in the hierarchy. This is the id returned by the query. String hierId; // Quoted name in the hierarchy. This is the id returned by the query. String hierName; // The hierarchy table of the data. Table dataHierTable; // Quoted id attached to the data that matches. String dataHierId; List<String> joins = new LinkedList<String>(); LinkedList<String> leftJoins = new LinkedList<String>(); List<Serializable> leftJoinsParams = new LinkedList<Serializable>(); LinkedList<String> implicitJoins = new LinkedList<String>(); List<Serializable> implicitJoinsParams = new LinkedList<Serializable>(); List<String> whereClauses = new LinkedList<String>(); List<Serializable> whereParams = new LinkedList<Serializable>(); switch (docKind) { case DIRECT: hierTable = hier; hierId = hierTable.getColumn(model.MAIN_KEY).getFullQuotedName(); hierName = hierTable.getColumn(model.HIER_CHILD_NAME_KEY).getFullQuotedName(); dataHierTable = hierTable; dataHierId = hierId; joins.add(hierTable.getQuotedName()); break; case PROXY: hierTable = new TableAlias(hier, TABLE_HIER_ALIAS); String hierFrom = hier.getQuotedName() + " " + hierTable.getQuotedName(); // TODO use dialect hierId = hierTable.getColumn(model.MAIN_KEY).getFullQuotedName(); hierName = hierTable.getColumn(model.HIER_CHILD_NAME_KEY).getFullQuotedName(); // joined (data) dataHierTable = hier; dataHierId = hier.getColumn(model.MAIN_KEY).getFullQuotedName(); // proxies Table proxies = database.getTable(model.PROXY_TABLE_NAME); String proxiesid = proxies.getColumn(model.MAIN_KEY).getFullQuotedName(); String proxiestargetid = proxies.getColumn(model.PROXY_TARGET_KEY).getFullQuotedName(); // join all that joins.add(hierFrom); joins.add(String.format(JOIN_ON, proxies.getQuotedName(), hierId, proxiesid)); joins.add(String.format(JOIN_ON, dataHierTable.getQuotedName(), dataHierId, proxiestargetid)); break; default: throw new AssertionError(docKind); } // main data joins for (String fragmentName : fragmentNames) { Table table = database.getTable(fragmentName); // the versions table joins on the real hier table boolean useHier = model.VERSION_TABLE_NAME.equals(fragmentName); leftJoins.add(String.format(JOIN_ON, table.getQuotedName(), useHier ? hierId : dataHierId, table.getColumn(model.MAIN_KEY).getFullQuotedName())); } /* * Filter on facets and mixin types, and create the structural WHERE * clauses for the type. */ List<String> typeStrings = new ArrayList<String>(types.size()); NEXT_TYPE: for (String type : types) { Set<String> facets = model.getDocumentTypeFacets(type); for (String facet : info.mixinsExcluded) { if (facets.contains(facet)) { continue NEXT_TYPE; } } for (String facet : info.mixinsAllRequired) { if (!facets.contains(facet)) { continue NEXT_TYPE; } } if (!info.mixinsAnyRequired.isEmpty()) { Set<String> intersection = new HashSet<String>(info.mixinsAnyRequired); intersection.retainAll(facets); if (intersection.isEmpty()) { continue NEXT_TYPE; } } // this type is good typeStrings.add("?"); whereParams.add(type); } if (typeStrings.isEmpty()) { return null; // mixins excluded all types, no match possible } whereClauses.add(String.format("%s IN (%s)", dataHierTable.getColumn(model.MAIN_PRIMARY_TYPE_KEY).getFullQuotedName(), StringUtils.join(typeStrings, ", "))); /* * Add clause for immutable match. */ if (docKind == DocKind.DIRECT && info.immutableClause != null) { String where = String.format("%s IS %s", database.getTable(model.VERSION_TABLE_NAME).getColumn(model.MAIN_KEY).getFullQuotedName(), info.immutableClause.booleanValue() ? "NOT NULL" : "NULL"); whereClauses.add(where); } /* * Parse the WHERE clause from the original query, and deduce from * it actual WHERE clauses and potential JOINs. */ WhereBuilder whereBuilder; try { whereBuilder = new WhereBuilder(database, session, hierTable, hierId, dataHierTable, dataHierId, docKind == DocKind.PROXY, aliasColumns); } catch (QueryMakerException e) { throw new StorageException(e.getMessage(), e); } if (info.wherePredicate != null) { info.wherePredicate.accept(whereBuilder); // JOINs added by fulltext queries leftJoins.addAll(whereBuilder.leftJoins); leftJoinsParams.addAll(whereBuilder.leftJoinsParams); implicitJoins.addAll(whereBuilder.implicitJoins); implicitJoinsParams.addAll(whereBuilder.implicitJoinsParams); // WHERE clause String where = whereBuilder.buf.toString(); if (where.length() != 0) { whereClauses.add(where); whereParams.addAll(whereBuilder.whereParams); } } /* * Security check. */ if (queryFilter.getPrincipals() != null) { Serializable principals = queryFilter.getPrincipals(); Serializable permissions = queryFilter.getPermissions(); if (!dialect.supportsArrays()) { principals = StringUtils.join((String[]) principals, '|'); permissions = StringUtils.join((String[]) permissions, '|'); } if (dialect.supportsReadAcl()) { /* optimized read acl */ whereClauses.add(dialect.getReadAclsCheckSql("r.acl_id")); whereParams.add(principals); joins.add(String.format("%s AS r ON %s = r.id", model.HIER_READ_ACL_TABLE_NAME, hierId)); } else { whereClauses.add(dialect.getSecurityCheckSql(hierId)); whereParams.add(principals); whereParams.add(permissions); } } /* * Columns on which to do ordering. */ String selectWhat = hierId; // always add the name, it will be used for intalio crm selectWhat += ", " + hierName; if (aliasColumns) { // UNION, so we need all orderable columns, aliased int n = 0; for (String key : info.orderKeys) { Column column = whereBuilder.findColumn(key, false, true); String qname = column.getFullQuotedName(); selectWhat += ", " + qname + " AS " + dialect.openQuote() + COL_ORDER_ALIAS_PREFIX + ++n + dialect.closeQuote(); } } /* * Order by. Compute it just once. May use just aliases. */ if (orderBy == null && sqlQuery.orderBy != null) { whereBuilder.buf.setLength(0); sqlQuery.orderBy.accept(whereBuilder); orderBy = whereBuilder.buf.toString(); } /* * Resulting select. */ select = new Select(null); select.setWhat(selectWhat); leftJoins.addFirst(StringUtils.join(joins, " JOIN ")); String from = StringUtils.join(leftJoins, " LEFT JOIN "); if (!implicitJoins.isEmpty()) { implicitJoins.addFirst(from); from = StringUtils.join(implicitJoins, ", "); } select.setFrom(from); select.setWhere(StringUtils.join(whereClauses, " AND ")); selectParams.addAll(leftJoinsParams); selectParams.addAll(implicitJoinsParams); selectParams.addAll(whereParams); statements.add(select.getStatement()); } /* * Create the whole select. */ if (statements.size() > 1) { select = new Select(null); String selectWhat = hier.getColumn(model.MAIN_KEY).getQuotedName(); selectWhat = selectWhat + ", " + hier.getColumn(model.HIER_CHILD_NAME_KEY).getQuotedName(); select.setWhat(selectWhat); // note that Derby has bizarre restrictions on parentheses placement // around UNION, see http://issues.apache.org/jira/browse/DERBY-2374 String from = '(' + StringUtils.join(statements, " UNION ALL ") + ')'; if (dialect.needsAliasForDerivedTable()) { from += " AS " + dialect.openQuote() + UNION_ALIAS + dialect.closeQuote(); } select.setFrom(from); } select.setOrderBy(orderBy); List<Column> whatColumns = Collections.singletonList(hier.getColumn(model.MAIN_KEY)); Query q = new Query(); q.selectInfo = new SQLInfoSelect(select.getStatement(), whatColumns, null, null); q.selectParams = selectParams; return q; }
From source file:org.gcaldaemon.core.ldap.LDAPListener.java
private final ByteBuffer processRequest(LdapMessage request, boolean utf8) throws Exception { if (log.isDebugEnabled()) { try {/*from www .jav a2s .c o m*/ String command = request.getMessageTypeName(); if (command != null) { command = command.toLowerCase().replace('_', ' '); } log.debug("Processing " + command + "..."); } catch (Exception ignored) { log.warn("Processing unknown LDAP request..."); } } LinkedList list = new LinkedList(); switch (request.getMessageType()) { case LdapConstants.BIND_REQUEST: // Bind response BindResponse bind = new BindResponse(); bind.setMessageId(request.getMessageId()); LdapResult result = new LdapResult(); result.setResultCode(0); bind.setLdapResult(result); list.addLast(bind); break; case LdapConstants.UNBIND_REQUEST: // Unbind response LdapResponse unbind = new LdapResponse(); unbind.setMessageId(request.getMessageId()); result = new LdapResult(); result.setResultCode(0); unbind.setLdapResult(result); list.addLast(unbind); break; case LdapConstants.SEARCH_REQUEST: // Switch back encoding if (nativeCharsetLocked) { utf8 = false; } // Get search string SearchRequest search = request.getSearchRequest(); Filter filter = search.getTerminalFilter(); String key = null; if (filter == null) { filter = search.getFilter(); if (filter == null) { filter = search.getCurrentFilter(); } } if (filter != null) { if (filter instanceof SubstringFilter) { SubstringFilter substringFilter = (SubstringFilter) filter; ArrayList substrings = substringFilter.getAnySubstrings(); if (substrings != null && substrings.size() != 0) { key = (String) substrings.get(0); } } if (key == null) { key = filter.toString(); if (key != null) { if (key.charAt(0) == '*') { key = key.substring(1); } if (key.charAt(key.length() - 1) == '*') { key = key.substring(0, key.length() - 1); } if (key.indexOf('=') != -1) { key = key.substring(key.indexOf('=') + 1); } } } if (key != null) { if (key.length() == 0) { key = null; } else { // Decode UTF8 chars try { byte[] bytes = key.getBytes(PLATFORM_ENCODING); key = StringUtils.decodeToString(bytes, StringUtils.UTF_8); if (utf8) { bytes = key.getBytes(PLATFORM_ENCODING); key = StringUtils.decodeToString(bytes, StringUtils.UTF_8); } } catch (Exception ignored) { } if (log.isDebugEnabled()) { log.debug("LDAP search filter (" + key + ") readed."); } key = key.toLowerCase(); // All contacts requested if (key.equals("@")) { key = null; } } } } // Handle native charset lock if (key != null && !utf8) { nativeCharsetLocked = true; } // Find entry GmailContact[] contacts = loader.getContacts(); if (contacts != null) { GmailContact contact; for (int n = 0; n < contacts.length; n++) { contact = contacts[n]; if (key != null && contact.name.toLowerCase().indexOf(key) == -1) { continue; } // Add search entry SearchResultEntry entry = new SearchResultEntry(); entry.setMessageId(request.getMessageId()); LdapDN name; try { name = new LdapDN("CN=" + encode(contact.name, utf8)); } catch (Exception badDN) { log.debug(badDN); continue; } entry.setObjectName(name); BasicAttributes partialAttributeList = new BasicAttributes(true); partialAttributeList.put(new BasicAttribute("cn", encode(contact.name, utf8))); if (contact.email.length() != 0) { // first email partialAttributeList.put(new BasicAttribute("mail", encode(contact.email, utf8))); } if (contact.notes.length() != 0) { // notes partialAttributeList.put(new BasicAttribute("comment", encode(contact.notes, utf8))); partialAttributeList.put(new BasicAttribute("description", encode(contact.notes, utf8))); } String mobile = contact.mobile; if (mobile.length() == 0) { mobile = contact.phone; } if (mobile.length() != 0) { // mobile phone partialAttributeList.put(new BasicAttribute("telephonenumber", encode(mobile, utf8))); } if (contact.phone.length() != 0) { // homePhone partialAttributeList.put(new BasicAttribute("homePhone", encode(contact.phone, utf8))); } if (contact.mail.length() != 0) { // second email partialAttributeList .put(new BasicAttribute("mozillaSecondEmail", encode(contact.mail, utf8))); partialAttributeList .put(new BasicAttribute("mailAlternateAddress", encode(contact.mail, utf8))); } if (contact.address.length() != 0) { // postal address partialAttributeList .put(new BasicAttribute("postalAddress", encode(contact.address, utf8))); partialAttributeList .put(new BasicAttribute("homePostalAddress", encode(contact.address, utf8))); partialAttributeList.put(new BasicAttribute("homeStreet", encode(contact.address, utf8))); } if (contact.pager.length() != 0) { // pager partialAttributeList.put(new BasicAttribute("pager", encode(contact.pager, utf8))); } if (contact.fax.length() != 0) { // fax partialAttributeList .put(new BasicAttribute("facsimileTelephoneNumber", encode(contact.fax, utf8))); if (contact.pager.length() == 0) { partialAttributeList.put(new BasicAttribute("pager", encode(contact.fax, utf8))); } } if (contact.title.length() != 0) { // title partialAttributeList.put(new BasicAttribute("title", encode(contact.title, utf8))); } if (contact.company.length() != 0) { // company partialAttributeList.put(new BasicAttribute("company", encode(contact.company, utf8))); partialAttributeList.put(new BasicAttribute("o", encode(contact.company, utf8))); } entry.setPartialAttributeList(partialAttributeList); list.addLast(entry); } } // Search done if (log.isDebugEnabled()) { log.debug("Found " + list.size() + " contacts."); } SearchResultDone done = new SearchResultDone(); done.setMessageId(request.getMessageId()); result = new LdapResult(); result.setResultCode(0); done.setLdapResult(result); list.addLast(done); break; case LdapConstants.ABANDON_REQUEST: // Abandon command result = new LdapResult(); result.setResultCode(0); LdapResponse response = new LdapResponse(); response.setLdapResult(result); list.addLast(response); break; default: // Unsupported command log.debug("Unsupported LDAP command!"); result = new LdapResult(); result.setErrorMessage("Unsupported LDAP command!"); response = new LdapResponse(); response.setLdapResult(result); list.addLast(response); } log.debug("LDAP request processed."); if (!list.isEmpty()) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Iterator responses = list.iterator(); while (responses.hasNext()) { LdapMessage response = (LdapMessage) responses.next(); response.setMessageId(request.getMessageId()); // Append LDAP response LdapMessage message = new LdapMessage(); message.setProtocolOP(response); message.setMessageId(request.getMessageId()); ByteBuffer bb = message.encode(null); byte[] a = bb.array(); out.write(a); } byte[] bytes = out.toByteArray(); return ByteBuffer.wrap(bytes); } return null; }
From source file:org.gldapdaemon.core.ldap.LDAPListener.java
private final ByteBuffer processRequest(LdapMessage request, boolean utf8) throws Exception { if (log.isDebugEnabled()) { try {/*from ww w .j a va 2 s.c o m*/ String command = request.getMessageTypeName(); if (command != null) { command = command.toLowerCase().replace('_', ' '); } log.debug("Processing " + command + "..."); } catch (Exception ignored) { log.warn("Processing unknown LDAP request..."); } } LinkedList list = new LinkedList(); switch (request.getMessageType()) { case LdapConstants.BIND_REQUEST: // Bind response BindResponse bind = new BindResponse(); bind.setMessageId(request.getMessageId()); LdapResult result = new LdapResult(); result.setResultCode(0); bind.setLdapResult(result); list.addLast(bind); break; case LdapConstants.UNBIND_REQUEST: // Unbind response LdapResponse unbind = new LdapResponse(); unbind.setMessageId(request.getMessageId()); result = new LdapResult(); result.setResultCode(0); unbind.setLdapResult(result); list.addLast(unbind); break; case LdapConstants.SEARCH_REQUEST: // Switch back encoding if (nativeCharsetLocked) { utf8 = false; } // Get search string SearchRequest search = request.getSearchRequest(); Filter filter = search.getTerminalFilter(); String key = null; if (filter == null) { filter = search.getFilter(); if (filter == null) { filter = search.getCurrentFilter(); } } if (filter != null) { if (filter instanceof SubstringFilter) { SubstringFilter substringFilter = (SubstringFilter) filter; ArrayList substrings = substringFilter.getAnySubstrings(); if (substrings != null && substrings.size() != 0) { key = (String) substrings.get(0); } } if (key == null) { key = filter.toString(); if (key != null) { if (key.charAt(0) == '*') { key = key.substring(1); } if (key.charAt(key.length() - 1) == '*') { key = key.substring(0, key.length() - 1); } if (key.indexOf('=') != -1) { key = key.substring(key.indexOf('=') + 1); } } } if (key != null) { if (key.length() == 0) { key = null; } else { // Decode UTF8 chars try { byte[] bytes = key.getBytes(PLATFORM_ENCODING); key = StringUtils.decodeToString(bytes, StringUtils.UTF_8); if (utf8) { bytes = key.getBytes(PLATFORM_ENCODING); key = StringUtils.decodeToString(bytes, StringUtils.UTF_8); } } catch (Exception ignored) { } if (log.isDebugEnabled()) { log.debug("LDAP search filter (" + key + ") received."); } key = key.toLowerCase(); // All contacts requested if (key.equals("@")) { key = null; } } } } // Handle native charset lock if (key != null && !utf8) { nativeCharsetLocked = true; } // Find entry ArrayList<GmailContact> contacts = loader.getContacts(); if (contacts != null) { GmailContact contact; for (int n = 0; n < contacts.size(); n++) { contact = contacts.get(n); String value = null; if (contact.name.toLowerCase().indexOf(key) >= 0 || contact.company.toLowerCase().indexOf(key) >= 0) { value = contact.name.length() > 0 ? contact.name : contact.company; } else if (key != null) { continue; } // Add search entry SearchResultEntry entry = new SearchResultEntry(); entry.setMessageId(request.getMessageId()); LdapDN name; try { name = new LdapDN("CN=" + encode(value, utf8)); } catch (Exception badDN) { log.debug(badDN); continue; } entry.setObjectName(name); BasicAttributes partialAttributeList = new BasicAttributes(true); partialAttributeList.put(new BasicAttribute("cn", encode(value, utf8))); if (contact.email.length() != 0) { // first email partialAttributeList.put(new BasicAttribute("mail", encode(contact.email, utf8))); } if (contact.notes.length() != 0) { // notes partialAttributeList.put(new BasicAttribute("comment", encode(contact.notes, utf8))); partialAttributeList.put(new BasicAttribute("description", encode(contact.notes, utf8))); } String mobile = contact.mobile; if (mobile.length() == 0) { mobile = contact.phone; } if (mobile.length() != 0) { // mobile phone partialAttributeList.put(new BasicAttribute("telephonenumber", encode(mobile, utf8))); } if (contact.phone.length() != 0) { // homePhone partialAttributeList.put(new BasicAttribute("homePhone", encode(contact.phone, utf8))); } if (contact.mail.length() != 0) { // second email partialAttributeList .put(new BasicAttribute("mozillaSecondEmail", encode(contact.mail, utf8))); partialAttributeList .put(new BasicAttribute("mailAlternateAddress", encode(contact.mail, utf8))); } if (contact.address.length() != 0) { // postal address partialAttributeList .put(new BasicAttribute("postalAddress", encode(contact.address, utf8))); partialAttributeList .put(new BasicAttribute("homePostalAddress", encode(contact.address, utf8))); partialAttributeList.put(new BasicAttribute("homeStreet", encode(contact.address, utf8))); } if (contact.pager.length() != 0) { // pager partialAttributeList.put(new BasicAttribute("pager", encode(contact.pager, utf8))); } if (contact.fax.length() != 0) { // fax partialAttributeList .put(new BasicAttribute("facsimileTelephoneNumber", encode(contact.fax, utf8))); if (contact.pager.length() == 0) { partialAttributeList.put(new BasicAttribute("pager", encode(contact.fax, utf8))); } } if (contact.title.length() != 0) { // title partialAttributeList.put(new BasicAttribute("title", encode(contact.title, utf8))); } if (contact.company.length() != 0) { // company partialAttributeList.put(new BasicAttribute("company", encode(contact.company, utf8))); partialAttributeList.put(new BasicAttribute("o", encode(contact.company, utf8))); } entry.setPartialAttributeList(partialAttributeList); list.addLast(entry); } } // Search done if (log.isDebugEnabled()) { log.debug("Found " + list.size() + " contacts."); } SearchResultDone done = new SearchResultDone(); done.setMessageId(request.getMessageId()); result = new LdapResult(); result.setResultCode(0); done.setLdapResult(result); list.addLast(done); break; case LdapConstants.ABANDON_REQUEST: // Abandon command result = new LdapResult(); result.setResultCode(0); LdapResponse response = new LdapResponse(); response.setLdapResult(result); list.addLast(response); break; default: // Unsupported command log.debug("Unsupported LDAP command!"); result = new LdapResult(); result.setErrorMessage("Unsupported LDAP command!"); response = new LdapResponse(); response.setLdapResult(result); list.addLast(response); } log.debug("LDAP request processed."); if (!list.isEmpty()) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Iterator responses = list.iterator(); while (responses.hasNext()) { LdapMessage response = (LdapMessage) responses.next(); response.setMessageId(request.getMessageId()); // Append LDAP response LdapMessage message = new LdapMessage(); message.setProtocolOP(response); message.setMessageId(request.getMessageId()); ByteBuffer bb = message.encode(null); byte[] a = bb.array(); out.write(a); } byte[] bytes = out.toByteArray(); return ByteBuffer.wrap(bytes); } return null; }
From source file:elh.eus.absa.Features.java
/** * Function fills the attribute vectors for the instances existing in the corpus given. * Attribute vectors contain the features loaded by the creatFeatureSet() function. * // w ww . jav a 2 s. co m * @param boolean save : whether the Instances file should be saved to an arff file or not. * @return Weka Instances object containing the attribute vectors filled with the features specified * in the parameter file. */ public Instances loadInstances(boolean save, String prefix) throws IOException { String savePath = params.getProperty("fVectorDir") + File.separator + "arff" + File.separator + "train_" + prefix; HashMap<String, Opinion> trainExamples = corpus.getOpinions(); int trainExamplesNum = trainExamples.size(); int bowWin = 0; if (params.containsKey("window")) { bowWin = Integer.parseInt(params.getProperty("window")); savePath = savePath + "_w" + bowWin; } //Properties posProp = new Properties(); //eus.ixa.ixa.pipe.pos.Annotate postagger = new eus.ixa.ixa.pipe.pos.Annotate(posProp); if (params.containsKey("lemmaNgrams")) { Properties posProp = NLPpipelineWrapper.setPostaggerProperties(params.getProperty("pos-model"), corpus.getLang(), "3", "bin", "false"); postagger = new eus.ixa.ixa.pipe.pos.Annotate(posProp); } //System.out.println("train examples: "+trainExamplesNum); //Create the Weka object for the training set Instances rsltdata = new Instances("train", atts, trainExamplesNum); // setting class attribute (last attribute in train data. //traindata.setClassIndex(traindata.numAttributes() - 1); System.err.println("Features: loadInstances() - featNum: " + this.featNum + " - trainset attrib num -> " + rsltdata.numAttributes() + " - "); System.out.println("Features: loadInstances() - featNum: " + this.featNum + " - trainset attrib num -> " + rsltdata.numAttributes() + " - "); int instId = 1; // fill the vectors for each training example for (String oId : trainExamples.keySet()) { //System.err.println("sentence: "+ corpus.getOpinionSentence(o.getId())); //value vector double[] values = new double[featNum]; // first element is the instanceId values[rsltdata.attribute("instanceId").index()] = instId; // string normalization (emoticons, twitter grammar,...) String opNormalized = corpus.getOpinionSentence(oId); // compute uppercase ratio before normalization (if needed) double upRatio = 0.0; if (params.getProperty("upperCaseRatio", "no").equalsIgnoreCase("yes")) { String upper = opNormalized.replaceAll("[\\p{Ll}]", ""); upRatio = (double) upper.length() / (double) opNormalized.length(); values[rsltdata.attribute("upperCaseRation").index()] = upRatio; } // string normalization (emoticons, twitter grammar,...) if ((params.containsKey("wfngrams") || params.containsKey("lemmaNgrams")) && (!params.getProperty("normalization", "none").equalsIgnoreCase("noEmot"))) { opNormalized = normalize(opNormalized, params.getProperty("normalization", "none")); } //process the current instance with the NLP pipeline in order to get token and lemma|pos features KAFDocument nafinst = new KAFDocument("", ""); String nafname = trainExamples.get(oId).getsId().replace(':', '_'); String nafDir = params.getProperty("kafDir"); String nafPath = nafDir + File.separator + nafname + ".kaf"; //counter for opinion sentence token number. Used for computing relative values of the features int tokNum = 1; try { if (params.containsKey("lemmaNgrams")) //(lemmaNgrams != null) && (!lemmaNgrams.isEmpty())) { if (FileUtilsElh.checkFile(nafPath)) { nafinst = KAFDocument.createFromFile(new File(nafPath)); } else { nafinst = NLPpipelineWrapper.ixaPipesTokPos(opNormalized, corpus.getLang(), params.getProperty("pos-model"), postagger); Files.createDirectories(Paths.get(nafDir)); nafinst.save(nafPath); } tokNum = nafinst.getWFs().size(); //System.err.println("Features::loadInstances - postagging opinion sentence ("+oId+") - "+corpus.getOpinionSentence(oId)); } else { if (FileUtilsElh.checkFile(nafPath)) { nafinst = KAFDocument.createFromFile(new File(nafPath)); } else { nafinst = NLPpipelineWrapper.ixaPipesTok(opNormalized, corpus.getLang()); } tokNum = nafinst.getWFs().size(); //System.err.println("Features::loadInstances - tokenizing opinion sentence ("+oId+") - "+corpus.getOpinionSentence(oId)); } } catch (IOException | JDOMException e) { System.err.println("Features::loadInstances() - error when NLP processing the instance " + instId + "|" + oId + ") for filling the attribute vector"); e.printStackTrace(); System.exit(5); } LinkedList<String> ngrams = new LinkedList<String>(); int ngramDim; try { ngramDim = Integer.valueOf(params.getProperty("wfngrams")); } catch (Exception e) { ngramDim = 0; } boolean polNgrams = false; if (params.containsKey("polNgrams")) { polNgrams = params.getProperty("polNgrams").equalsIgnoreCase("yes"); } List<WF> window = nafinst.getWFs(); Integer end = corpus.getOpinion(oId).getTo(); // apply window if window active (>0) and if the target is not null (to=0) if ((bowWin > 0) && (end > 0)) { Integer start = corpus.getOpinion(oId).getFrom(); Integer to = window.size(); Integer from = 0; end++; for (int i = 0; i < window.size(); i++) { WF wf = window.get(i); if ((wf.getOffset() == start) && (i >= bowWin)) { from = i - bowWin; } else if (wf.getOffset() >= end) { if (i + bowWin < window.size()) { to = i + bowWin; } break; } } window = window.subList(from, to); //System.out.println("startTgt: "+start+" - from: "+from+" | endTrgt:"+(end-1)+" - to:"+to); } //System.out.println("Sentence: "+corpus.getOpinionSentence(oId)+" - target: "+corpus.getOpinion(oId).getTarget()+ // "\n window: from-> "+window.get(0).getForm()+" to-> "+window.get(window.size()-1)+" .\n"); List<String> windowWFIds = new ArrayList<String>(); // word form ngram related features for (WF wf : window) { windowWFIds.add(wf.getId()); String wfStr = wf.getForm(); if (params.containsKey("wfngrams") && ngramDim > 0) { if (!savePath.contains("_wf" + ngramDim)) { savePath = savePath + "_wf" + ngramDim; } //if the current word form is in the ngram list activate the feature in the vector if (ngrams.size() >= ngramDim) { ngrams.removeFirst(); } ngrams.add(wfStr); // add ngrams to the feature vector checkNgramFeatures(ngrams, values, "wf", 1, false); //toknum } // Clark cluster info corresponding to the current word form if (params.containsKey("clark") && attributeSets.get("ClarkCl").containsKey(wfStr)) { if (!savePath.contains("_cl")) { savePath = savePath + "_cl"; } values[rsltdata.attribute("ClarkClId_" + attributeSets.get("ClarkCl").get(wfStr)).index()]++; } // Clark cluster info corresponding to the current word form if (params.containsKey("brown") && attributeSets.get("BrownCl").containsKey(wfStr)) { if (!savePath.contains("_br")) { savePath = savePath + "_br"; } values[rsltdata.attribute("BrownClId_" + attributeSets.get("BrownCl").get(wfStr)).index()]++; } // Clark cluster info corresponding to the current word form if (params.containsKey("word2vec") && attributeSets.get("w2vCl").containsKey(wfStr)) { if (!savePath.contains("_w2v")) { savePath = savePath + "_w2v"; } values[rsltdata.attribute("w2vClId_" + attributeSets.get("w2vCl").get(wfStr)).index()]++; } } //empty ngram list and add remaining ngrams to the feature list checkNgramFeatures(ngrams, values, "wf", 1, true); //toknum // PoS tagger related attributes: lemmas and pos tags if (params.containsKey("lemmaNgrams") || (params.containsKey("pos") && !params.getProperty("pos").equalsIgnoreCase("0")) || params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) { ngrams = new LinkedList<String>(); if (params.containsKey("lemmaNgrams") && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) { ngramDim = Integer.valueOf(params.getProperty("lemmaNgrams")); } else { ngramDim = 3; } LinkedList<String> posNgrams = new LinkedList<String>(); int posNgramDim = 0; if (params.containsKey("pos")) { posNgramDim = Integer.valueOf(params.getProperty("pos")); } for (Term t : nafinst.getTermsFromWFs(windowWFIds)) { //lemmas // && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0")) if ((params.containsKey("lemmaNgrams")) || params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) { if (!savePath.contains("_l" + ngramDim)) { savePath = savePath + "_l" + ngramDim; } String lemma = t.getLemma(); if (ngrams.size() >= ngramDim) { ngrams.removeFirst(); } ngrams.add(lemma); // add ngrams to the feature vector for (int i = 0; i < ngrams.size(); i++) { String ng = featureFromArray(ngrams.subList(0, i + 1), "lemma"); //if the current lemma is in the ngram list activate the feature in the vector if (params.containsKey("lemmaNgrams") && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) { Attribute ngAtt = rsltdata.attribute(ng); if (ngAtt != null) { addNumericToFeatureVector(ng, values, 1); //tokNum } } ng = featureFromArray(ngrams.subList(0, i + 1), ""); if (params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) { checkPolarityLexicons(ng, values, tokNum, polNgrams); } //end polarity ngram checker } //end ngram checking } //pos tags if (params.containsKey("pos") && !params.getProperty("pos").equalsIgnoreCase("0")) { if (!savePath.contains("_p")) { savePath = savePath + "_p"; } if (posNgrams.size() >= posNgramDim) { posNgrams.removeFirst(); } posNgrams.add(t.getPos()); // add ngrams to the feature vector checkNgramFeatures(posNgrams, values, "pos", 1, false); } } //endFor //empty ngram list and add remaining ngrams to the feature list while (!ngrams.isEmpty()) { String ng = featureFromArray(ngrams, "lemma"); //if the current lemma is in the ngram list activate the feature in the vector if (rsltdata.attribute(ng) != null) { addNumericToFeatureVector(ng, values, 1); //tokNum } // polarity lexicons if (params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) { checkPolarityLexicons(ng, values, tokNum, polNgrams); } //end polarity ngram checker ngrams.removeFirst(); } //empty pos ngram list and add remaining pos ngrams to the feature list checkNgramFeatures(posNgrams, values, "pos", 1, true); } // add sentence length as a feature if (params.containsKey("sentenceLength") && (!params.getProperty("sentenceLength").equalsIgnoreCase("no"))) { values[rsltdata.attribute("sentenceLength").index()] = tokNum; } //create object for the current instance and associate it with the current train dataset. Instance inst = new SparseInstance(1.0, values); inst.setDataset(rsltdata); // add category attributte values String cat = trainExamples.get(oId).getCategory(); if (params.containsKey("categories") && params.getProperty("categories").compareTo("E&A") == 0) { if (cat.compareTo("NULL") == 0) { inst.setValue(rsltdata.attribute("entCat").index(), cat); inst.setValue(rsltdata.attribute("attCat").index(), cat); } else { String[] splitCat = cat.split("#"); inst.setValue(rsltdata.attribute("entCat").index(), splitCat[0]); inst.setValue(rsltdata.attribute("attCat").index(), splitCat[1]); } //inst.setValue(attIndexes.get("entAttCat"), cat); } else if (params.containsKey("categories") && params.getProperty("categories").compareTo("E#A") == 0) { inst.setValue(rsltdata.attribute("entAttCat").index(), cat); } if (params.containsKey("polarity") && params.getProperty("polarity").compareTo("yes") == 0) { // add class value as a double (Weka stores all values as doubles ) String pol = normalizePolarity(trainExamples.get(oId).getPolarity()); //System.err.println("Features::loadInstances - pol "+pol+" for oid "+oId+" - text:"+corpus.getOpinionSentence(oId)); if (pol != null && !pol.isEmpty()) { //System.err.println("polarity: _"+pol+"_"); inst.setValue(rsltdata.attribute("polarityCat"), pol); } else { inst.setMissing(rsltdata.attribute("polarityCat")); } } //add instance to train data rsltdata.add(inst); //store opinion Id and instance Id this.opInst.put(oId, instId); instId++; } System.err.println("Features : loadInstances() - training data ready total number of examples -> " + trainExamplesNum + " - " + rsltdata.numInstances()); if (save) { try { savePath = savePath + ".arff"; System.err.println("arff written to: " + savePath); ArffSaver saver = new ArffSaver(); saver.setInstances(rsltdata); saver.setFile(new File(savePath)); saver.writeBatch(); } catch (IOException e1) { e1.printStackTrace(); } catch (Exception e2) { e2.printStackTrace(); } } return rsltdata; }
From source file:net.spfbl.spf.SPF.java
/** * Processa a consulta e retorna o resultado. * * @param query a expresso da consulta.//from w ww . j a va 2 s .co m * @return o resultado do processamento. */ protected static String processSPF(InetAddress ipAddress, Client client, User user, String query, LinkedList<User> userList) { try { String result = ""; if (query.length() == 0) { return "INVALID QUERY\n"; } else { String origin; if (client == null) { origin = ipAddress.getHostAddress(); } else if (client.hasEmail()) { origin = ipAddress.getHostAddress() + " " + client.getDomain() + " " + client.getEmail(); } else { origin = ipAddress.getHostAddress() + " " + client.getDomain(); } StringTokenizer tokenizer = new StringTokenizer(query, " "); String firstToken = tokenizer.nextToken(); if (firstToken.equals("SPAM") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); TreeSet<String> tokenSet = addComplainURLSafe(origin, ticket, null); if (tokenSet == null) { result = "DUPLICATE COMPLAIN\n"; } else { String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } user = User.get(userEmail); if (user != null) { userList.add(user); } String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } result = "OK " + tokenSet + (recipient == null ? "" : " >" + recipient) + "\n"; } } else if (firstToken.equals("ABUSE") && tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (token.startsWith("In-Reply-To:") && tokenizer.countTokens() == 1) { token = tokenizer.nextToken(); if (token.startsWith("From:")) { int index = token.indexOf(':') + 1; String recipient = token.substring(index); User recipientUser = User.get(recipient); if (recipientUser == null) { // Se a consulta originar de destinatrio com postmaster cadastrado, // considerar o prprio postmaster como usurio da consulta. index = recipient.indexOf('@'); String postmaster = "postmaster" + recipient.substring(index); User postmasterUser = User.get(postmaster); if (postmasterUser != null) { userList.add(user = postmasterUser); } } else { userList.add(user = recipientUser); } index = query.indexOf(':') + 1; String messageID = query.substring(index); result = "INVALID ID\n"; index = messageID.indexOf('<'); if (index >= 0) { messageID = messageID.substring(index + 1); index = messageID.indexOf('>'); if (index > 0) { messageID = messageID.substring(0, index); result = user.blockByMessageID(messageID) + '\n'; } } } else { result = "INVALID FROM\n"; } } else { result = "INVALID COMMAND\n"; } } else if (firstToken.equals("HOLDING") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); result = getHoldStatus(client, ticket, userList) + '\n'; } else if (firstToken.equals("LINK") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); TreeSet<String> linkSet = new TreeSet<String>(); while (tokenizer.hasMoreTokens()) { linkSet.add(tokenizer.nextToken()); } StringTokenizer tokenizerTicket = new StringTokenizer(ticketSet, ";"); String unblockURL = null; boolean blocked = false; Action action = null; while (tokenizerTicket.hasMoreTokens()) { String ticket = tokenizerTicket.nextToken(); String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } if ((user = User.get(userEmail)) != null) { userList.add(user); long dateTicket = SPF.getDateTicket(ticket); User.Query queryTicket = user.getQuery(dateTicket); if (queryTicket != null) { if (queryTicket.setLinkSet(linkSet)) { SPF.setSpam(dateTicket, queryTicket.getTokenSet()); if (!queryTicket.isWhite() && queryTicket.blockSender(dateTicket)) { Server.logDebug( "new BLOCK '" + queryTicket.getBlockSender() + "' added by LINK."); } action = client == null ? Action.REJECT : client.getActionBLOCK(); unblockURL = queryTicket.getUnblockURL(); blocked = true; } else if (queryTicket.isAnyLinkRED()) { action = client == null ? Action.FLAG : client.getActionRED(); } if (action == Action.HOLD) { queryTicket.setResult("HOLD"); } else if (action == Action.FLAG) { queryTicket.setResult("FLAG"); } else if (action == Action.REJECT) { queryTicket.setResult("REJECT"); } User.storeDB(dateTicket, queryTicket); } } } if (unblockURL != null) { result = "BLOCKED " + unblockURL + "\n"; } else if (blocked) { result = "BLOCKED\n"; } else if (action == Action.HOLD) { result = "HOLD\n"; } else if (action == Action.FLAG) { result = "FLAG\n"; } else if (action == Action.REJECT) { result = "REJECT\n"; } else { result = "CLEAR\n"; } } else if (firstToken.equals("MALWARE") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); StringBuilder nameBuilder = new StringBuilder(); while (tokenizer.hasMoreTokens()) { if (nameBuilder.length() > 0) { nameBuilder.append(' '); } nameBuilder.append(tokenizer.nextToken()); } StringBuilder resultBuilder = new StringBuilder(); StringTokenizer ticketTokenizer = new StringTokenizer(ticketSet, ";"); while (ticketTokenizer.hasMoreTokens()) { String ticket = ticketTokenizer.nextToken(); TreeSet<String> tokenSet = addComplainURLSafe(origin, ticket, "MALWARE"); if (tokenSet == null) { resultBuilder.append("DUPLICATE COMPLAIN\n"); } else { // Processar reclamao. String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } user = User.get(userEmail); if (user != null) { userList.add(user); long dateTicket = getDateTicket(ticket); User.Query userQuery = user.getQuery(dateTicket); if (userQuery != null && userQuery.setMalware(nameBuilder.toString())) { User.storeDB(dateTicket, userQuery); } } String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } // Bloquear automaticamente todos // os tokens com reputao amarela ou vermelha. // Processar reclamao. for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug( "new BLOCK '" + block + "' added by '" + recipient + ";MALWARE'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug( "new BLOCK '" + block + "' added by '" + recipient + ";MALWARE'."); } } resultBuilder.append("OK "); resultBuilder.append(tokenSet); resultBuilder.append(recipient == null ? "" : " >" + recipient); resultBuilder.append("\n"); } } result = resultBuilder.toString(); } else if (firstToken.equals("HEADER") && tokenizer.hasMoreTokens()) { String ticketSet = tokenizer.nextToken(); String key = null; String from = null; String replyto = null; String messageID = null; String unsubscribe = null; String subject = null; while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (token.startsWith("From:")) { key = "From"; int index = token.indexOf(':'); from = token.substring(index + 1); } else if (token.startsWith("ReplyTo:") || token.startsWith("Reply-To:")) { key = "Reply-To"; int index = token.indexOf(':'); replyto = token.substring(index + 1); } else if (token.startsWith("Message-ID:")) { key = "Message-ID"; int index = token.indexOf(':'); messageID = token.substring(index + 1); } else if (token.startsWith("List-Unsubscribe:")) { key = "List-Unsubscribe"; int index = token.indexOf(':'); unsubscribe = token.substring(index + 1); } else if (token.startsWith("Subject:")) { key = "Subject"; int index = token.indexOf(':'); subject = token.substring(index + 1); } else if (key == null) { from = null; replyto = null; unsubscribe = null; subject = null; break; } else if (key.equals("From")) { from += ' ' + token; } else if (key.equals("Reply-To")) { replyto += ' ' + token; } else if (key.equals("Message-ID")) { messageID += ' ' + token; } else if (key.equals("List-Unsubscribe")) { unsubscribe += ' ' + token; } else if (key.equals("Subject")) { subject += ' ' + token; } } if ((from == null || from.length() == 0) && (replyto == null || replyto.length() == 0) && (messageID == null || messageID.length() == 0) && (unsubscribe == null || unsubscribe.length() == 0) && (subject == null || subject.length() == 0)) { result = "INVALID COMMAND\n"; } else { boolean whitelisted = false; boolean blocklisted = false; TreeSet<String> unblockURLSet = new TreeSet<String>(); StringTokenizer ticketRokenizer = new StringTokenizer(ticketSet, ";"); int n = ticketRokenizer.countTokens(); ArrayList<User.Query> queryList = new ArrayList<User.Query>(n); while (ticketRokenizer.hasMoreTokens()) { String ticket = ticketRokenizer.nextToken(); String userEmail; try { userEmail = SPF.getClientURLSafe(ticket); } catch (Exception ex) { userEmail = client == null ? null : client.getEmail(); } if ((user = User.get(userEmail)) != null) { userList.add(user); long dateTicket = SPF.getDateTicket(ticket); User.Query queryTicket = user.getQuery(dateTicket); if (queryTicket != null) { queryList.add(queryTicket); String resultLocal = queryTicket.setHeader(from, replyto, subject, messageID, unsubscribe); if ("WHITE".equals(resultLocal)) { whitelisted = true; } else if ("BLOCK".equals(resultLocal)) { blocklisted = true; String url = queryTicket.getUnblockURL(); if (url != null) { unblockURLSet.add(url); } } User.storeDB(dateTicket, queryTicket); } } } if (whitelisted) { for (User.Query queryTicket : queryList) { queryTicket.setResult("WHITE"); } result = "WHITE\n"; } else if (blocklisted) { for (User.Query queryTicket : queryList) { queryTicket.setResult("BLOCK"); } if (unblockURLSet.size() == 1) { result = "BLOCKED " + unblockURLSet.first() + "\n"; } else { result = "BLOCKED\n"; } } else { result = "CLEAR\n"; } } } else if (firstToken.equals("HAM") && tokenizer.countTokens() == 1) { String ticket = tokenizer.nextToken(); TreeSet<String> tokenSet = deleteComplainURLSafe(origin, ticket); if (tokenSet == null) { result = "ALREADY REMOVED\n"; } else { String recipient; try { recipient = SPF.getRecipientURLSafe(ticket); } catch (ProcessException ex) { recipient = null; } result = "OK " + tokenSet + (recipient == null ? "" : " >" + recipient) + "\n"; } } else if (firstToken.equals("REFRESH") && tokenizer.countTokens() == 1) { String address = tokenizer.nextToken(); try { if (CacheSPF.refresh(address, true)) { result = "UPDATED\n"; } else { result = "NOT LOADED\n"; } } catch (ProcessException ex) { result = ex.getMessage() + "\n"; } } else if ((firstToken.equals("SPF") && tokenizer.countTokens() >= 4) || tokenizer.countTokens() == 2 || tokenizer.countTokens() == 1 || (firstToken.equals("CHECK") && tokenizer.countTokens() == 4) || (firstToken.equals("CHECK") && tokenizer.countTokens() == 3) || (firstToken.equals("CHECK") && tokenizer.countTokens() == 2)) { try { String ip; String sender; String helo; String recipient; String origem; String fluxo; if (firstToken.equals("SPF")) { // Nova formatao de consulta. ip = tokenizer.nextToken(); sender = tokenizer.nextToken(); while (!sender.endsWith("'") && tokenizer.hasMoreTokens()) { sender += " " + tokenizer.nextToken(); } helo = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "''"; recipient = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : "''"; ip = ip.substring(1, ip.length() - 1); sender = sender.substring(1, sender.length() - 1); helo = helo.substring(1, helo.length() - 1); if (recipient.equals("'")) { recipient = tokenizer.hasMoreTokens() ? tokenizer.nextToken() : ""; if (recipient.endsWith("'")) { recipient = recipient.substring(0, recipient.length() - 1); } } else { recipient = recipient.substring(1, recipient.length() - 1); } if (sender.length() == 0) { sender = null; } else { sender = sender.toLowerCase(); } recipient = recipient.toLowerCase(); recipient = recipient.replace("\"", ""); } else if (firstToken.equals("CHECK") && tokenizer.countTokens() == 4) { ip = tokenizer.nextToken().toLowerCase(); sender = tokenizer.nextToken().toLowerCase(); helo = tokenizer.nextToken(); recipient = tokenizer.nextToken().toLowerCase(); if (ip.startsWith("'") && ip.endsWith("'")) { ip = ip.substring(1, ip.length() - 1); } if (sender.startsWith("'") && sender.endsWith("'")) { sender = sender.substring(1, sender.length() - 1); } if (helo.startsWith("'") && helo.endsWith("'")) { helo = helo.substring(1, helo.length() - 1); } if (recipient.startsWith("'") && recipient.endsWith("'")) { recipient = recipient.substring(1, recipient.length() - 1); } if (ip.length() == 0) { ip = null; } if (sender.length() == 0) { sender = null; } if (!Domain.isHostname(helo)) { helo = null; } if (recipient.length() == 0) { recipient = null; } else { recipient = recipient.toLowerCase(); } } else { // Manter compatibilidade da verso antiga. // Verso obsoleta. if (firstToken.equals("CHECK")) { ip = tokenizer.nextToken(); } else { ip = firstToken; } if (tokenizer.countTokens() == 2) { sender = tokenizer.nextToken().toLowerCase(); helo = tokenizer.nextToken(); } else { sender = null; helo = tokenizer.nextToken(); } recipient = null; if (ip.startsWith("'") && ip.endsWith("'")) { ip = ip.substring(1, ip.length() - 1); } if (sender != null && sender.startsWith("'") && sender.endsWith("'")) { sender = sender.substring(1, sender.length() - 1); if (sender.length() == 0) { sender = null; } } if (helo.startsWith("'") && helo.endsWith("'")) { helo = helo.substring(1, helo.length() - 1); } } if (!Subnet.isValidIP(ip)) { return "INVALID\n"; } else if (sender != null && !Domain.isEmail(sender)) { return "INVALID\n"; } else if (recipient != null && !Domain.isEmail(recipient)) { return "INVALID\n"; } else if (Subnet.isReservedIP(ip)) { // Message from LAN. return "LAN\n"; } else if (client != null && client.containsFull(ip)) { // Message from LAN. return "LAN\n"; } else { TreeSet<String> tokenSet = new TreeSet<String>(); ip = Subnet.normalizeIP(ip); tokenSet.add(ip); if (Domain.isValidEmail(recipient)) { // Se houver um remetente vlido, // Adicionar no ticket para controle. tokenSet.add('>' + recipient); } if (recipient != null) { User recipientUser = User.get(recipient); if (recipientUser == null) { // Se a consulta originar de destinatrio com postmaster cadastrado, // considerar o prprio postmaster como usurio da consulta. int index = recipient.indexOf('@'); String postmaster = "postmaster" + recipient.substring(index); User postmasterUser = User.get(postmaster); if (postmasterUser != null) { user = postmasterUser; } } else { user = recipientUser; } } if (user != null) { userList.add(user); tokenSet.add(user.getEmail() + ':'); } else if (client != null && client.hasEmail()) { tokenSet.add(client.getEmail() + ':'); } // Passar a acompanhar todos os // HELO quando apontados para o IP para // uma nova forma de interpretar dados. String hostname; if (CacheHELO.match(ip, helo, false)) { hostname = Domain.normalizeHostname(helo, true); } else { hostname = Reverse.getHostname(ip); hostname = Domain.normalizeHostname(hostname, true); } if (hostname == null) { Server.logDebug("no rDNS for " + ip + "."); } else if (Domain.isOfficialTLD(hostname)) { return "INVALID\n"; } else { // Verificao de pilha dupla, // para pontuao em ambas pilhas. String ipv4 = CacheHELO.getUniqueIPv4(hostname); String ipv6 = CacheHELO.getUniqueIPv6(hostname); if (ip.equals(ipv6) && CacheHELO.match(ipv4, hostname, false)) { // Equivalncia de pilha dupla se // IPv4 for nico para o hostname. tokenSet.add(ipv4); } else if (ip.equals(ipv4) && CacheHELO.match(ipv6, hostname, false)) { // Equivalncia de pilha dupla se // IPv6 for nico para o hostname. tokenSet.add(ipv6); } } if (Generic.containsGenericSoft(hostname)) { // Quando o reverso for // genrico, no consider-lo. hostname = null; } else if (hostname != null) { tokenSet.add(hostname); } LinkedList<String> logList = null; if (sender != null && firstToken.equals("CHECK")) { int index = sender.lastIndexOf('@'); String domain = sender.substring(index + 1); logList = new LinkedList<String>(); try { CacheSPF.refresh(domain, false); } catch (ProcessException ex) { logList.add("Cannot refresh SPF registry: " + ex.getErrorMessage()); logList.add("Using cached SPF registry."); } } SPF spf; if (sender == null) { spf = null; result = "NONE"; } else if (Domain.isOfficialTLD(sender)) { spf = null; result = "NONE"; } else if (Generic.containsGeneric(sender)) { spf = null; result = "NONE"; } else if ((spf = CacheSPF.get(sender)) == null) { result = "NONE"; } else if (spf.isInexistent()) { result = "NONE"; } else { result = spf.getResult(ip, sender, helo, logList); } String mx = Domain.extractHost(sender, true); if (user != null && user.isLocal()) { // Message from local user. return "LAN\n"; } else if (recipient != null && result.equals("PASS")) { if (recipient.endsWith(mx)) { // Message from same domain. return "LAN\n"; } else if (recipient.equals(Core.getAbuseEmail()) && User.exists(sender, "postmaster" + mx)) { // Message to abuse. return "LAN\n"; } } if (result.equals("PASS") || (sender != null && Provider.containsHELO(ip, hostname))) { // Quando fo PASS, significa que o domnio // autorizou envio pelo IP, portanto o dono dele // responsavel pelas mensagens. if (!Provider.containsExact(mx)) { // No um provedor ento // o MX deve ser listado. tokenSet.add(mx); origem = mx; } else if (Domain.isValidEmail(sender)) { // Listar apenas o remetente se o // hostname for um provedor de e-mail. String userEmail = null; String recipientEmail = null; for (String token : tokenSet) { if (token.endsWith(":")) { userEmail = token; } else if (token.startsWith(">")) { recipientEmail = token; } } tokenSet.clear(); tokenSet.add(sender); if (userEmail != null) { tokenSet.add(userEmail); } if (recipientEmail != null) { tokenSet.add(recipientEmail); } origem = sender; } else { origem = sender; } fluxo = origem + ">" + recipient; } else if (hostname == null) { origem = (sender == null ? "" : sender + '>') + ip; fluxo = origem + ">" + recipient; } else { String dominio = Domain.extractDomain(hostname, true); origem = (sender == null ? "" : sender + '>') + (dominio == null ? hostname : dominio.substring(1)); fluxo = origem + ">" + recipient; } Long recipientTrapTime = Trap.getTimeRecipient(client, user, recipient); if (firstToken.equals("CHECK")) { String results = "\nSPF resolution results:\n"; if (spf != null && spf.isInexistent()) { results += " NXDOMAIN\n"; } else if (logList == null || logList.isEmpty()) { results += " NONE\n"; } else { for (String log : logList) { results += " " + log + "\n"; } } String white; String block; if ((white = White.find(client, user, ip, sender, hostname, result, recipient)) != null) { results += "\nFirst WHITE match: " + white + "\n"; } else if ((block = Block.find(client, user, ip, sender, hostname, result, recipient, false, true, true, false)) != null) { results += "\nFirst BLOCK match: " + block + "\n"; } TreeSet<String> graceSet = new TreeSet<String>(); if (Domain.isGraceTime(sender)) { graceSet.add(Domain.extractDomain(sender, false)); } if (Domain.isGraceTime(hostname)) { graceSet.add(Domain.extractDomain(hostname, false)); } if (!graceSet.isEmpty()) { results += "\n"; results += "Domains in grace time:\n"; for (String grace : graceSet) { results += " " + grace + "\n"; } } results += "\n"; results += "Considered identifiers and status:\n"; tokenSet = expandTokenSet(tokenSet); TreeMap<String, Distribution> distributionMap = CacheDistribution.getMap(tokenSet); int count = 0; for (String token : tokenSet) { if (!token.startsWith(">") && !token.endsWith(":")) { if (!Ignore.contains(token)) { float probability; Status status; if (distributionMap.containsKey(token)) { Distribution distribution = distributionMap.get(token); probability = distribution.getSpamProbability(token); status = distribution.getStatus(token); } else { probability = 0.0f; status = SPF.Status.GREEN; } results += " " + token + " " + status.name() + " " + Core.DECIMAL_FORMAT.format(probability) + "\n"; count++; } } } if (count == 0) { results += " NONE\n"; } results += "\n"; return results; } else if (recipientTrapTime == null && White.contains(client, user, ip, sender, hostname, result, recipient)) { if (White.contains(client, user, ip, sender, hostname, result, null)) { // Limpa da lista BLOCK um possvel falso positivo. Block.clear(client, user, ip, sender, hostname, result, null); } // Calcula frequencia de consultas. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "WHITE"); return "WHITE " + (url == null ? ticket : url + ticket) + "\n"; } else if (Block.contains(client, user, ip, sender, hostname, result, recipient, true, true, true, true)) { Action action = client == null ? Action.REJECT : client.getActionBLOCK(); if (action == Action.REJECT) { // Calcula frequencia de consultas. long time = Server.getNewUniqueTime(); User.Query queryLocal = SPF.addQuerySpam(time, client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "BLOCK"); action = client == null ? Action.FLAG : client.getActionRED(); if (action != Action.REJECT && queryLocal != null && queryLocal.needHeader()) { if (action == Action.FLAG) { queryLocal.setResult("FLAG"); String url = Core.getURL(); String ticket = SPF.createTicket(time, tokenSet); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { queryLocal.setResult("HOLD"); String url = Core.getURL(); String ticket = SPF.createTicket(time, tokenSet); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else { String url = Core.getUnblockURL(client, user, ip, sender, hostname, recipient); if (url == null) { return "BLOCKED\n"; } else { return "BLOCKED " + url + "\n"; } } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (Generic.containsDynamicDomain(hostname)) { // Bloquear automaticamente range de IP dinmico. String cidr = Subnet .normalizeCIDR(SubnetIPv4.isValidIPv4(ip) ? ip + "/24" : ip + "/48"); if (Block.tryOverlap(cidr)) { Server.logDebug( "new BLOCK '" + cidr + "' added by '" + hostname + ";DYNAMIC'."); } else if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + hostname + ";DYNAMIC'."); } SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (spf != null && spf.isDefinitelyInexistent()) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + mx + ";NXDOMAIN'."); } } Analise.processToday(ip); // O domnio foi dado como inexistente inmeras vezes. // Rejeitar e denunciar o host pois h abuso de tentativas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "NXDOMAIN"); return "NXDOMAIN\n"; } else if (spf != null && spf.isInexistent()) { Analise.processToday(ip); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "NXDOMAIN"); return "NXDOMAIN\n"; } else if (result.equals("FAIL")) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by '" + sender + ";FAIL'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FAIL"); // Retornar FAIL somente se no houver // liberao literal do remetente com FAIL. return "FAIL\n"; } else if (sender != null && !Domain.isEmail(sender)) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + sender + ";INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (sender != null && Domain.isOfficialTLD(sender)) { // Bloquear automaticamente IP com reputao vermelha. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + sender + ";RESERVED'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (sender == null && !CacheHELO.match(ip, hostname, false)) { // Bloquear automaticamente IP com reputao ruim. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); // HELO invlido sem remetente. return "INVALID\n"; } else if (hostname == null && Core.isReverseRequired()) { if (Block.tryAdd(ip)) { Server.logDebug("new BLOCK '" + ip + "' added by 'NONE'."); } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); // Require a valid HELO or reverse. return "INVALID\n"; } else if (recipient != null && !Domain.isValidEmail(recipient)) { Analise.processToday(ip); Analise.processToday(mx); SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INEXISTENT"); return "INEXISTENT\n"; } else if (recipientTrapTime != null) { if (System.currentTimeMillis() > recipientTrapTime) { // Spamtrap for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";SPAMTRAP'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";SPAMTRAP'."); } } Analise.processToday(ip); Analise.processToday(mx); // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "TRAP"); return "SPAMTRAP\n"; } else { // Inexistent for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";INEXISTENT'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + recipient + ";INEXISTENT'."); } } Analise.processToday(ip); Analise.processToday(mx); SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INEXISTENT"); return "INEXISTENT\n"; } } else if (Defer.count(fluxo) > Core.getFloodMaxRetry()) { Analise.processToday(ip); Analise.processToday(mx); // A origem atingiu o limite de atraso // para liberao do destinatrio. long time = System.currentTimeMillis(); Defer.end(fluxo); Server.logDefer(time, fluxo, "DEFER FLOOD"); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (!result.equals("PASS") && !CacheHELO.match(ip, hostname, false)) { // Bloquear automaticamente IP com reputao amarela. if (SPF.isRed(ip)) { if (Block.tryAdd(ip)) { Server.logDebug( "new BLOCK '" + ip + "' added by '" + recipient + ";INVALID'."); } } Analise.processToday(ip); SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "INVALID"); return "INVALID\n"; } else if (recipient != null && recipient.startsWith("postmaster@")) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return result + " " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } else if (result.equals("PASS") && SPF.isGood(Provider.containsExact(mx) ? sender : mx)) { // O remetente vlido e tem excelente reputao, // ainda que o provedor dele esteja com reputao ruim. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return "PASS " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } else if (SPF.hasRed(tokenSet) || Analise.isCusterRED(ip, sender, hostname)) { Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.REJECT : client.getActionRED(); if (action == Action.REJECT) { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (action == Action.DEFER) { if (Defer.defer(fluxo, Core.getDeferTimeRED())) { String url = Core.getReleaseURL(fluxo); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "LISTED"); if (url == null || Defer.count(fluxo) > 1) { return "LISTED\n"; } else if (result.equals("PASS") && enviarLiberacao(url, sender, recipient)) { // Envio da liberao por e-mail se // houver validao do remetente por PASS. return "LISTED\n"; } else { return "LISTED " + url + "\n"; } } else { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (Domain.isGraceTime(sender) || Domain.isGraceTime(hostname)) { Server.logTrace("domain in grace time."); for (String token : tokenSet) { String block; Status status = SPF.getStatus(token); if (status == Status.RED && (block = Block.add(token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + status + "'."); Peer.sendBlockToAll(block); } if (status != Status.GREEN && !Subnet.isValidIP(token) && (block = Block.addIfNotNull(user, token)) != null) { Server.logDebug("new BLOCK '" + block + "' added by '" + status + "'."); } } Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.REJECT : client.getActionGRACE(); if (action == Action.REJECT) { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } else if (action == Action.DEFER) { if (Defer.defer(fluxo, Core.getDeferTimeRED())) { String url = Core.getReleaseURL(fluxo); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "LISTED"); if (url == null || Defer.count(fluxo) > 1) { return "LISTED\n"; } else if (result.equals("PASS") && enviarLiberacao(url, sender, recipient)) { // Envio da liberao por e-mail se // houver validao do remetente por PASS. return "LISTED\n"; } else { return "LISTED " + url + "\n"; } } else { // Calcula frequencia de consultas. SPF.addQuerySpam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "REJECT"); return "BLOCKED\n"; } } else if (action == Action.FLAG) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "FLAG"); return "FLAG " + (url == null ? ticket : url + ticket) + "\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (SPF.hasYellow(tokenSet) && Defer.defer(fluxo, Core.getDeferTimeYELLOW())) { Analise.processToday(ip); Analise.processToday(mx); Action action = client == null ? Action.DEFER : client.getActionYELLOW(); if (action == Action.DEFER) { // Pelo menos um identificador do conjunto est em greylisting com atrazo de 10min. SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else if (action == Action.HOLD) { String url = Core.getURL(); String ticket = SPF.getTicket(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "HOLD"); return "HOLD " + (url == null ? ticket : url + ticket) + "\n"; } else { return "ERROR: UNDEFINED ACTION\n"; } } else if (SPF.isFlood(tokenSet) && !Provider.containsHELO(ip, hostname) && Defer.defer(origem, Core.getDeferTimeFLOOD())) { Analise.processToday(ip); Analise.processToday(mx); // Pelo menos um identificador est com frequncia superior ao permitido. Server.logDebug("FLOOD " + tokenSet); SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else if (result.equals("SOFTFAIL") && !Provider.containsHELO(ip, hostname) && Defer.defer(fluxo, Core.getDeferTimeSOFTFAIL())) { Analise.processToday(ip); Analise.processToday(mx); // SOFTFAIL com atrazo de 1min. SPF.addQuery(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "GREYLIST"); return "GREYLIST\n"; } else { Analise.processToday(ip); Analise.processToday(mx); // Calcula frequencia de consultas. String url = Core.getURL(); String ticket = SPF.addQueryHam(client, user, ip, helo, hostname, sender, result, recipient, tokenSet, "ACCEPT"); return result + " " + (url == null ? ticket : url + URLEncoder.encode(ticket, "UTF-8")) + "\n"; } } } catch (ProcessException ex) { if (ex.isErrorMessage("HOST NOT FOUND")) { return "NXDOMAIN\n"; } else { throw ex; } } } else { return "INVALID QUERY\n"; } } return result; } catch (ProcessException ex) { Server.logError(ex); return ex.getMessage() + "\n"; } catch (Exception ex) { Server.logError(ex); return "ERROR: FATAL\n"; } }