Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:org.alfresco.solr.query.Solr4QueryParser.java

@SuppressWarnings("unchecked")
protected Query getFieldQueryImpl(String field, String queryText, AnalysisMode analysisMode,
        LuceneFunction luceneFunction) throws ParseException, IOException {
    // make sure the field exists or return a dummy query so we have no error ....ACE-3231
    SchemaField schemaField = schema.getFieldOrNull(field);
    boolean isNumeric = false;
    if (schemaField == null) {
        return new TermQuery(new Term("_dummy_", "_miss_"));
    } else {//from   w w  w  .  jav a2s  .co m
        isNumeric = (schemaField.getType().getNumericType() != null);
    }

    // Use the analyzer to get all the tokens, and then build a TermQuery,
    // PhraseQuery, or noth

    // TODO: Untokenised columns with functions require special handling

    if (luceneFunction != LuceneFunction.FIELD) {
        throw new UnsupportedOperationException(
                "Field queries are not supported on lucene functions (UPPER, LOWER, etc)");
    }

    // if the incoming string already has a language identifier we strip it iff and addit back on again

    String localePrefix = "";

    String toTokenise = queryText;

    if (queryText.startsWith("{")) {
        int position = queryText.indexOf("}");
        if (position > 0) {
            String language = queryText.substring(0, position + 1);
            Locale locale = new Locale(queryText.substring(1, position));
            String token = queryText.substring(position + 1);
            boolean found = false;
            for (Locale current : Locale.getAvailableLocales()) {
                if (current.toString().equalsIgnoreCase(locale.toString())) {
                    found = true;
                    break;
                }
            }
            if (found) {
                localePrefix = language;
                toTokenise = token;
            } else {
                //toTokenise = token;
            }
        }
    }

    String testText = toTokenise;
    boolean requiresMLTokenDuplication = false;
    String localeString = null;
    if (isPropertyField(field) && (localePrefix.length() == 0)) {
        if ((queryText.length() > 0) && (queryText.charAt(0) == '\u0000')) {
            int position = queryText.indexOf("\u0000", 1);
            testText = queryText.substring(position + 1);
            requiresMLTokenDuplication = true;
            localeString = queryText.substring(1, position);

        }
    }

    // find the positions of any escaped * and ? and ignore them

    Set<Integer> wildcardPoistions = getWildcardPositions(testText);

    TokenStream source = null;
    ArrayList<org.apache.lucene.analysis.Token> list = new ArrayList<org.apache.lucene.analysis.Token>();
    boolean severalTokensAtSamePosition = false;
    org.apache.lucene.analysis.Token nextToken;
    int positionCount = 0;

    try {
        org.apache.lucene.analysis.Token reusableToken = new org.apache.lucene.analysis.Token();

        source = getAnalyzer().tokenStream(field, new StringReader(toTokenise));
        source.reset();
        while (source.incrementToken()) {
            CharTermAttribute cta = source.getAttribute(CharTermAttribute.class);
            OffsetAttribute offsetAtt = source.getAttribute(OffsetAttribute.class);
            TypeAttribute typeAtt = null;
            if (source.hasAttribute(TypeAttribute.class)) {
                typeAtt = source.getAttribute(TypeAttribute.class);
            }
            PositionIncrementAttribute posIncAtt = null;
            if (source.hasAttribute(PositionIncrementAttribute.class)) {
                posIncAtt = source.getAttribute(PositionIncrementAttribute.class);
            }
            nextToken = new Token(cta.buffer(), 0, cta.length(), offsetAtt.startOffset(),
                    offsetAtt.endOffset());
            if (typeAtt != null) {
                nextToken.setType(typeAtt.type());
            }
            if (posIncAtt != null) {
                nextToken.setPositionIncrement(posIncAtt.getPositionIncrement());
            }

            list.add(nextToken);
            if (nextToken.getPositionIncrement() != 0)
                positionCount += nextToken.getPositionIncrement();
            else
                severalTokensAtSamePosition = true;
        }
    } catch (SolrException e) {
        // MNT-15336
        // Text against a numeric field should fail silently rather then tell you it is not possible.
        if (isNumeric && e.getMessage() != null && e.getMessage().startsWith("Invalid Number:")) {
            // Generate a query that does not match any document - rather than nothing
            return createNoMatchQuery();
        } else {
            throw e;
        }
    } finally {
        try {
            if (source != null) {
                source.close();
            }
        } catch (IOException e) {
            // ignore
        }
    }

    // add any alpha numeric wildcards that have been missed
    // Fixes most stop word and wild card issues

    for (int index = 0; index < testText.length(); index++) {
        char current = testText.charAt(index);
        if (((current == '*') || (current == '?')) && wildcardPoistions.contains(index)) {
            StringBuilder pre = new StringBuilder(10);
            if (index == 0) {
                // "*" and "?" at the start

                boolean found = false;
                for (int j = 0; j < list.size(); j++) {
                    org.apache.lucene.analysis.Token test = list.get(j);
                    if ((test.startOffset() <= 0) && (0 < test.endOffset())) {
                        found = true;
                        break;
                    }
                }
                if (!found && (list.size() == 0)) {
                    // Add new token followed by * not given by the tokeniser
                    org.apache.lucene.analysis.Token newToken = new org.apache.lucene.analysis.Token("", 0, 0);
                    newToken.setType("ALPHANUM");
                    if (requiresMLTokenDuplication) {
                        Locale locale = I18NUtil.parseLocale(localeString);
                        MLTokenDuplicator duplicator = new MLTokenDuplicator(locale,
                                MLAnalysisMode.EXACT_LANGUAGE);
                        Iterator<org.apache.lucene.analysis.Token> it = duplicator.buildIterator(newToken);
                        if (it != null) {
                            int count = 0;
                            while (it.hasNext()) {
                                list.add(it.next());
                                count++;
                                if (count > 1) {
                                    severalTokensAtSamePosition = true;
                                }
                            }
                        }
                    }
                    // content
                    else {
                        list.add(newToken);
                    }
                }
            } else if (index > 0) {
                // Add * and ? back into any tokens from which it has been removed

                boolean tokenFound = false;
                for (int j = 0; j < list.size(); j++) {
                    org.apache.lucene.analysis.Token test = list.get(j);
                    if ((test.startOffset() <= index) && (index < test.endOffset())) {
                        if (requiresMLTokenDuplication) {
                            String termText = test.toString();
                            int position = termText.indexOf("}");
                            String language = termText.substring(0, position + 1);
                            String token = termText.substring(position + 1);
                            if (index >= test.startOffset() + token.length()) {
                                test.setEmpty();
                                test.append(language + token + current);
                            }
                        } else {
                            if (index >= test.startOffset() + test.length()) {
                                test.setEmpty();
                                test.append(test.toString() + current);
                            }
                        }
                        tokenFound = true;
                        break;
                    }
                }

                if (!tokenFound) {
                    for (int i = index - 1; i >= 0; i--) {
                        char c = testText.charAt(i);
                        if (Character.isLetterOrDigit(c)) {
                            boolean found = false;
                            for (int j = 0; j < list.size(); j++) {
                                org.apache.lucene.analysis.Token test = list.get(j);
                                if ((test.startOffset() <= i) && (i < test.endOffset())) {
                                    found = true;
                                    break;
                                }
                            }
                            if (found) {
                                break;
                            } else {
                                pre.insert(0, c);
                            }
                        } else {
                            break;
                        }
                    }
                    if (pre.length() > 0) {
                        // Add new token followed by * not given by the tokeniser
                        org.apache.lucene.analysis.Token newToken = new org.apache.lucene.analysis.Token(
                                pre.toString(), index - pre.length(), index);
                        newToken.setType("ALPHANUM");
                        if (requiresMLTokenDuplication) {
                            Locale locale = I18NUtil.parseLocale(localeString);
                            MLTokenDuplicator duplicator = new MLTokenDuplicator(locale,
                                    MLAnalysisMode.EXACT_LANGUAGE);
                            Iterator<org.apache.lucene.analysis.Token> it = duplicator.buildIterator(newToken);
                            if (it != null) {
                                int count = 0;
                                while (it.hasNext()) {
                                    list.add(it.next());
                                    count++;
                                    if (count > 1) {
                                        severalTokensAtSamePosition = true;
                                    }
                                }
                            }
                        }
                        // content
                        else {
                            list.add(newToken);
                        }
                    }
                }
            }

            StringBuilder post = new StringBuilder(10);
            if (index > 0) {
                for (int i = index + 1; i < testText.length(); i++) {
                    char c = testText.charAt(i);
                    if (Character.isLetterOrDigit(c)) {
                        boolean found = false;
                        for (int j = 0; j < list.size(); j++) {
                            org.apache.lucene.analysis.Token test = list.get(j);
                            if ((test.startOffset() <= i) && (i < test.endOffset())) {
                                found = true;
                                break;
                            }
                        }
                        if (found) {
                            break;
                        } else {
                            post.append(c);
                        }
                    } else {
                        break;
                    }
                }
                if (post.length() > 0) {
                    // Add new token followed by * not given by the tokeniser
                    org.apache.lucene.analysis.Token newToken = new org.apache.lucene.analysis.Token(
                            post.toString(), index + 1, index + 1 + post.length());
                    newToken.setType("ALPHANUM");
                    if (requiresMLTokenDuplication) {
                        Locale locale = I18NUtil.parseLocale(localeString);
                        MLTokenDuplicator duplicator = new MLTokenDuplicator(locale,
                                MLAnalysisMode.EXACT_LANGUAGE);
                        Iterator<org.apache.lucene.analysis.Token> it = duplicator.buildIterator(newToken);
                        if (it != null) {
                            int count = 0;
                            while (it.hasNext()) {
                                list.add(it.next());
                                count++;
                                if (count > 1) {
                                    severalTokensAtSamePosition = true;
                                }
                            }
                        }
                    }
                    // content
                    else {
                        list.add(newToken);
                    }
                }
            }

        }
    }

    // Put in real position increments as we treat them correctly

    int curentIncrement = -1;
    for (org.apache.lucene.analysis.Token c : list) {
        if (curentIncrement == -1) {
            curentIncrement = c.getPositionIncrement();
        } else if (c.getPositionIncrement() > 0) {
            curentIncrement = c.getPositionIncrement();
        } else {
            c.setPositionIncrement(curentIncrement);
        }
    }

    // Remove small bits already covered in larger fragments 
    list = getNonContained(list);

    Collections.sort(list, new Comparator<org.apache.lucene.analysis.Token>() {

        public int compare(Token o1, Token o2) {
            int dif = o1.startOffset() - o2.startOffset();
            return dif;

        }
    });

    // Combined * and ? based strings - should redo the tokeniser

    // Build tokens by position

    LinkedList<LinkedList<org.apache.lucene.analysis.Token>> tokensByPosition = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>();
    LinkedList<org.apache.lucene.analysis.Token> currentList = null;
    int lastStart = 0;
    for (org.apache.lucene.analysis.Token c : list) {
        if (c.startOffset() == lastStart) {
            if (currentList == null) {
                currentList = new LinkedList<org.apache.lucene.analysis.Token>();
                tokensByPosition.add(currentList);
            }
            currentList.add(c);
        } else {
            currentList = new LinkedList<org.apache.lucene.analysis.Token>();
            tokensByPosition.add(currentList);
            currentList.add(c);
        }
        lastStart = c.startOffset();
    }

    // Build all the token sequences and see which ones get strung together

    OrderedHashSet<LinkedList<org.apache.lucene.analysis.Token>> allTokenSequencesSet = new OrderedHashSet<LinkedList<org.apache.lucene.analysis.Token>>();
    for (LinkedList<org.apache.lucene.analysis.Token> tokensAtPosition : tokensByPosition) {
        OrderedHashSet<LinkedList<org.apache.lucene.analysis.Token>> positionalSynonymSequencesSet = new OrderedHashSet<LinkedList<org.apache.lucene.analysis.Token>>();

        OrderedHashSet<LinkedList<org.apache.lucene.analysis.Token>> newAllTokenSequencesSet = new OrderedHashSet<LinkedList<org.apache.lucene.analysis.Token>>();

        FOR_FIRST_TOKEN_AT_POSITION_ONLY: for (org.apache.lucene.analysis.Token t : tokensAtPosition) {
            org.apache.lucene.analysis.Token replace = new org.apache.lucene.analysis.Token(t, t.startOffset(),
                    t.endOffset());
            replace.setType(t.type());
            replace.setPositionIncrement(t.getPositionIncrement());

            boolean tokenFoundSequence = false;
            for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : allTokenSequencesSet) {
                LinkedList<org.apache.lucene.analysis.Token> newEntry = new LinkedList<org.apache.lucene.analysis.Token>();
                newEntry.addAll(tokenSequence);
                if ((newEntry.getLast().endOffset() == replace.endOffset())
                        && replace.type().equals(SynonymFilter.TYPE_SYNONYM)) {
                    if ((newEntry.getLast().startOffset() == replace.startOffset())
                            && newEntry.getLast().type().equals(SynonymFilter.TYPE_SYNONYM)) {
                        positionalSynonymSequencesSet.add(tokenSequence);
                        newEntry.add(replace);
                        tokenFoundSequence = true;
                    } else if (newEntry.getLast().type().equals(CommonGramsFilter.GRAM_TYPE)) {
                        if (newEntry.toString().endsWith(replace.toString())) {
                            // already in the gram
                            positionalSynonymSequencesSet.add(tokenSequence);
                            tokenFoundSequence = true;
                        } else {
                            // need to replace the synonym in the current gram
                            tokenFoundSequence = true;
                            StringBuffer old = new StringBuffer(newEntry.getLast().toString());
                            old.replace(replace.startOffset() - newEntry.getLast().startOffset(),
                                    replace.endOffset() - newEntry.getLast().startOffset(), replace.toString());
                            Token newToken = new org.apache.lucene.analysis.Token(old.toString(),
                                    newEntry.getLast().startOffset(), newEntry.getLast().endOffset());
                            newEntry.removeLast();
                            newEntry.add(newToken);
                        }
                    }
                } else if ((newEntry.getLast().startOffset() < replace.startOffset())
                        && (newEntry.getLast().endOffset() < replace.endOffset())) {
                    if (newEntry.getLast().type().equals(SynonymFilter.TYPE_SYNONYM)
                            && replace.type().equals(SynonymFilter.TYPE_SYNONYM)) {
                        positionalSynonymSequencesSet.add(tokenSequence);
                    }
                    newEntry.add(replace);
                    tokenFoundSequence = true;
                }
                newAllTokenSequencesSet.add(newEntry);
            }
            if (false == tokenFoundSequence) {
                for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : newAllTokenSequencesSet) {
                    LinkedList<org.apache.lucene.analysis.Token> newEntry = new LinkedList<org.apache.lucene.analysis.Token>();
                    newEntry.addAll(tokenSequence);
                    if ((newEntry.getLast().endOffset() == replace.endOffset())
                            && replace.type().equals(SynonymFilter.TYPE_SYNONYM)) {
                        if ((newEntry.getLast().startOffset() == replace.startOffset())
                                && newEntry.getLast().type().equals(SynonymFilter.TYPE_SYNONYM)) {
                            positionalSynonymSequencesSet.add(tokenSequence);
                            newEntry.add(replace);
                            tokenFoundSequence = true;
                        } else if (newEntry.getLast().type().equals(CommonGramsFilter.GRAM_TYPE)) {
                            if (newEntry.toString().endsWith(replace.toString())) {
                                // already in the gram
                                positionalSynonymSequencesSet.add(tokenSequence);
                                tokenFoundSequence = true;
                            } else {
                                // need to replace the synonym in the current gram
                                tokenFoundSequence = true;
                                StringBuffer old = new StringBuffer(newEntry.getLast().toString());
                                old.replace(replace.startOffset() - newEntry.getLast().startOffset(),
                                        replace.endOffset() - newEntry.getLast().startOffset(),
                                        replace.toString());
                                Token newToken = new org.apache.lucene.analysis.Token(old.toString(),
                                        newEntry.getLast().startOffset(), newEntry.getLast().endOffset());
                                newEntry.removeLast();
                                newEntry.add(newToken);
                                positionalSynonymSequencesSet.add(newEntry);
                            }
                        }
                    } else if ((newEntry.getLast().startOffset() < replace.startOffset())
                            && (newEntry.getLast().endOffset() < replace.endOffset())) {
                        if (newEntry.getLast().type().equals(SynonymFilter.TYPE_SYNONYM)
                                && replace.type().equals(SynonymFilter.TYPE_SYNONYM)) {
                            positionalSynonymSequencesSet.add(tokenSequence);
                            newEntry.add(replace);
                            tokenFoundSequence = true;
                        }
                    }
                }
            }
            if (false == tokenFoundSequence) {
                LinkedList<org.apache.lucene.analysis.Token> newEntry = new LinkedList<org.apache.lucene.analysis.Token>();
                newEntry.add(replace);
                newAllTokenSequencesSet.add(newEntry);
            }
            // Limit the max number of permutations we consider
            if (newAllTokenSequencesSet.size() > 64) {
                break FOR_FIRST_TOKEN_AT_POSITION_ONLY;
            }
        }
        allTokenSequencesSet = newAllTokenSequencesSet;
        allTokenSequencesSet.addAll(positionalSynonymSequencesSet);

    }

    LinkedList<LinkedList<org.apache.lucene.analysis.Token>> allTokenSequences = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>(
            allTokenSequencesSet);

    // build the unique

    LinkedList<LinkedList<org.apache.lucene.analysis.Token>> fixedTokenSequences = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>();
    for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : allTokenSequences) {
        LinkedList<org.apache.lucene.analysis.Token> fixedTokenSequence = new LinkedList<org.apache.lucene.analysis.Token>();
        fixedTokenSequences.add(fixedTokenSequence);
        org.apache.lucene.analysis.Token replace = null;
        for (org.apache.lucene.analysis.Token c : tokenSequence) {
            if (replace == null) {
                StringBuilder prefix = new StringBuilder();
                for (int i = c.startOffset() - 1; i >= 0; i--) {
                    char test = testText.charAt(i);
                    if (((test == '*') || (test == '?')) && wildcardPoistions.contains(i)) {
                        prefix.insert(0, test);
                    } else {
                        break;
                    }
                }
                String pre = prefix.toString();
                if (requiresMLTokenDuplication) {
                    String termText = c.toString();
                    int position = termText.indexOf("}");
                    String language = termText.substring(0, position + 1);
                    String token = termText.substring(position + 1);
                    replace = new org.apache.lucene.analysis.Token(language + pre + token,
                            c.startOffset() - pre.length(), c.endOffset());
                    replace.setType(c.type());
                    replace.setPositionIncrement(c.getPositionIncrement());
                } else {
                    String termText = c.toString();
                    replace = new org.apache.lucene.analysis.Token(pre + termText,
                            c.startOffset() - pre.length(), c.endOffset());
                    replace.setType(c.type());
                    replace.setPositionIncrement(c.getPositionIncrement());
                }
            } else {
                StringBuilder prefix = new StringBuilder();
                StringBuilder postfix = new StringBuilder();
                StringBuilder builder = prefix;
                for (int i = c.startOffset() - 1; i >= replace.endOffset(); i--) {
                    char test = testText.charAt(i);
                    if (((test == '*') || (test == '?')) && wildcardPoistions.contains(i)) {
                        builder.insert(0, test);
                    } else {
                        builder = postfix;
                        postfix.setLength(0);
                    }
                }
                String pre = prefix.toString();
                String post = postfix.toString();

                // Does it bridge?
                if ((pre.length() > 0) && (replace.endOffset() + pre.length()) == c.startOffset()) {
                    String termText = c.toString();
                    if (requiresMLTokenDuplication) {
                        int position = termText.indexOf("}");
                        @SuppressWarnings("unused")
                        String language = termText.substring(0, position + 1);
                        String token = termText.substring(position + 1);
                        int oldPositionIncrement = replace.getPositionIncrement();
                        String replaceTermText = replace.toString();
                        replace = new org.apache.lucene.analysis.Token(replaceTermText + pre + token,
                                replace.startOffset(), c.endOffset());
                        replace.setType(replace.type());
                        replace.setPositionIncrement(oldPositionIncrement);
                    } else {
                        int oldPositionIncrement = replace.getPositionIncrement();
                        String replaceTermText = replace.toString();
                        replace = new org.apache.lucene.analysis.Token(replaceTermText + pre + termText,
                                replace.startOffset(), c.endOffset());
                        replace.setType(replace.type());
                        replace.setPositionIncrement(oldPositionIncrement);
                    }
                } else {
                    String termText = c.toString();
                    if (requiresMLTokenDuplication) {
                        int position = termText.indexOf("}");
                        String language = termText.substring(0, position + 1);
                        String token = termText.substring(position + 1);
                        String replaceTermText = replace.toString();
                        org.apache.lucene.analysis.Token last = new org.apache.lucene.analysis.Token(
                                replaceTermText + post, replace.startOffset(),
                                replace.endOffset() + post.length());
                        last.setType(replace.type());
                        last.setPositionIncrement(replace.getPositionIncrement());
                        fixedTokenSequence.add(last);
                        replace = new org.apache.lucene.analysis.Token(language + pre + token,
                                c.startOffset() - pre.length(), c.endOffset());
                        replace.setType(c.type());
                        replace.setPositionIncrement(c.getPositionIncrement());
                    } else {
                        String replaceTermText = replace.toString();
                        org.apache.lucene.analysis.Token last = new org.apache.lucene.analysis.Token(
                                replaceTermText + post, replace.startOffset(),
                                replace.endOffset() + post.length());
                        last.setType(replace.type());
                        last.setPositionIncrement(replace.getPositionIncrement());
                        fixedTokenSequence.add(last);
                        replace = new org.apache.lucene.analysis.Token(pre + termText,
                                c.startOffset() - pre.length(), c.endOffset());
                        replace.setType(c.type());
                        replace.setPositionIncrement(c.getPositionIncrement());
                    }
                }
            }
        }
        // finish last
        if (replace != null) {
            StringBuilder postfix = new StringBuilder();
            if ((replace.endOffset() >= 0) && (replace.endOffset() < testText.length())) {
                for (int i = replace.endOffset(); i < testText.length(); i++) {
                    char test = testText.charAt(i);
                    if (((test == '*') || (test == '?')) && wildcardPoistions.contains(i)) {
                        postfix.append(test);
                    } else {
                        break;
                    }
                }
            }
            String post = postfix.toString();
            int oldPositionIncrement = replace.getPositionIncrement();
            String replaceTermText = replace.toString();
            replace = new org.apache.lucene.analysis.Token(replaceTermText + post, replace.startOffset(),
                    replace.endOffset() + post.length());
            replace.setType(replace.type());
            replace.setPositionIncrement(oldPositionIncrement);
            fixedTokenSequence.add(replace);
        }
    }

    // rebuild fixed list

    ArrayList<org.apache.lucene.analysis.Token> fixed = new ArrayList<org.apache.lucene.analysis.Token>();
    for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : fixedTokenSequences) {
        for (org.apache.lucene.analysis.Token token : tokenSequence) {
            fixed.add(token);
        }
    }

    // reorder by start position and increment

    Collections.sort(fixed, new Comparator<org.apache.lucene.analysis.Token>() {

        public int compare(Token o1, Token o2) {
            int dif = o1.startOffset() - o2.startOffset();
            if (dif != 0) {
                return dif;
            } else {
                return o1.getPositionIncrement() - o2.getPositionIncrement();
            }
        }
    });

    // make sure we remove any tokens we have duplicated

    @SuppressWarnings("rawtypes")
    OrderedHashSet unique = new OrderedHashSet();
    unique.addAll(fixed);
    fixed = new ArrayList<org.apache.lucene.analysis.Token>(unique);

    list = fixed;

    // add any missing locales back to the tokens

    if (localePrefix.length() > 0) {
        for (int j = 0; j < list.size(); j++) {
            org.apache.lucene.analysis.Token currentToken = list.get(j);
            String termText = currentToken.toString();
            currentToken.setEmpty();
            currentToken.append(localePrefix + termText);
        }
    }

    SchemaField sf = schema.getField(field);
    TokenizerChain tokenizerChain = (sf.getType().getQueryAnalyzer() instanceof TokenizerChain)
            ? ((TokenizerChain) sf.getType().getQueryAnalyzer())
            : null;
    boolean isShingled = false;
    if (tokenizerChain != null) {
        for (TokenFilterFactory factory : tokenizerChain.getTokenFilterFactories()) {
            if (factory instanceof ShingleFilterFactory) {
                isShingled = true;
                break;
            }
        }
    }
    AlfrescoAnalyzerWrapper analyzerWrapper = (sf.getType()
            .getQueryAnalyzer() instanceof AlfrescoAnalyzerWrapper)
                    ? ((AlfrescoAnalyzerWrapper) sf.getType().getQueryAnalyzer())
                    : null;
    if (analyzerWrapper != null) {
        // assume if there are no term positions it is shingled ....
        isShingled = true;
    }

    boolean forceConjuncion = rerankPhase == RerankPhase.QUERY_PHASE;

    if (list.size() == 0)
        return null;
    else if (list.size() == 1) {
        nextToken = list.get(0);
        String termText = nextToken.toString();
        if (!isNumeric && (termText.contains("*") || termText.contains("?"))) {
            return newWildcardQuery(new Term(field, termText));
        } else {
            return newTermQuery(new Term(field, termText));
        }
    } else {
        if (severalTokensAtSamePosition) {
            if (positionCount == 1) {
                // no phrase query:
                BooleanQuery q = newBooleanQuery(true);
                for (int i = 0; i < list.size(); i++) {
                    Query currentQuery;
                    nextToken = list.get(i);
                    String termText = nextToken.toString();
                    if (termText.contains("*") || termText.contains("?")) {
                        currentQuery = newWildcardQuery(new Term(field, termText));
                    } else {
                        currentQuery = newTermQuery(new Term(field, termText));
                    }
                    q.add(currentQuery, BooleanClause.Occur.SHOULD);
                }
                return q;
            } else if (forceConjuncion) {
                BooleanQuery or = new BooleanQuery();

                for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : fixedTokenSequences) {
                    BooleanQuery and = new BooleanQuery();
                    for (int i = 0; i < tokenSequence.size(); i++) {
                        nextToken = (org.apache.lucene.analysis.Token) tokenSequence.get(i);
                        String termText = nextToken.toString();

                        Term term = new Term(field, termText);
                        if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                            org.apache.lucene.search.WildcardQuery wildQuery = new org.apache.lucene.search.WildcardQuery(
                                    term);
                            and.add(wildQuery, Occur.MUST);
                        } else {
                            TermQuery termQuery = new TermQuery(term);
                            and.add(termQuery, Occur.MUST);
                        }
                    }
                    if (and.clauses().size() > 0) {
                        or.add(and, Occur.SHOULD);
                    }
                }
                return or;
            }
            // shingle
            else if (sf.omitPositions() && isShingled) {

                ArrayList<org.apache.lucene.analysis.Token> nonContained = getNonContained(list);
                Query currentQuery;

                BooleanQuery weakPhrase = new BooleanQuery();
                for (org.apache.lucene.analysis.Token shingleToken : nonContained) {
                    String termText = shingleToken.toString();
                    Term term = new Term(field, termText);

                    if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                        currentQuery = new org.apache.lucene.search.WildcardQuery(term);
                    } else {
                        currentQuery = new TermQuery(term);
                    }
                    weakPhrase.add(currentQuery, Occur.MUST);
                }

                return weakPhrase;

            }
            // Consider if we can use a multi-phrase query (e.g for synonym use rather then WordDelimiterFilterFactory)
            else if (canUseMultiPhraseQuery(fixedTokenSequences)) {
                // phrase query:
                MultiPhraseQuery mpq = newMultiPhraseQuery();
                mpq.setSlop(internalSlop);
                ArrayList<Term> multiTerms = new ArrayList<Term>();
                int position = 0;
                for (int i = 0; i < list.size(); i++) {
                    nextToken = list.get(i);
                    String termText = nextToken.toString();

                    Term term = new Term(field, termText);
                    if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                        throw new IllegalStateException("Wildcards are not allowed in multi phrase anymore");
                    } else {
                        multiTerms.add(term);
                    }

                    if (nextToken.getPositionIncrement() > 0 && multiTerms.size() > 0) {
                        if (getEnablePositionIncrements()) {
                            mpq.add(multiTerms.toArray(new Term[0]), position);
                        } else {
                            mpq.add(multiTerms.toArray(new Term[0]));
                        }
                        checkTermCount(field, queryText, mpq);
                        multiTerms.clear();
                    }
                    position += nextToken.getPositionIncrement();

                }
                if (getEnablePositionIncrements()) {
                    if (multiTerms.size() > 0) {
                        mpq.add(multiTerms.toArray(new Term[0]), position);
                    }
                    //                        else
                    //                        {
                    //                            mpq.add(new Term[] { new Term(field, "\u0000") }, position);
                    //                        }
                } else {
                    if (multiTerms.size() > 0) {
                        mpq.add(multiTerms.toArray(new Term[0]));
                    }
                    //                        else
                    //                        {
                    //                            mpq.add(new Term[] { new Term(field, "\u0000") });
                    //                        }
                }
                checkTermCount(field, queryText, mpq);
                return mpq;

            }
            // Word delimiter factory and other odd things generate complex token patterns
            // Smart skip token  sequences with small tokens that generate toomany wildcards
            // Fall back to the larger pattern
            // e.g Site1* will not do (S ite 1*) or (Site 1*)  if 1* matches too much (S ite1*)  and (Site1*) will still be OK 
            // If we skip all (for just 1* in the input) this is still an issue.
            else {

                return generateSpanOrQuery(field, fixedTokenSequences);

            }
        } else {
            if (forceConjuncion) {
                BooleanQuery or = new BooleanQuery();

                for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : fixedTokenSequences) {
                    BooleanQuery and = new BooleanQuery();
                    for (int i = 0; i < tokenSequence.size(); i++) {
                        nextToken = (org.apache.lucene.analysis.Token) tokenSequence.get(i);
                        String termText = nextToken.toString();

                        Term term = new Term(field, termText);
                        if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                            org.apache.lucene.search.WildcardQuery wildQuery = new org.apache.lucene.search.WildcardQuery(
                                    term);
                            and.add(wildQuery, Occur.MUST);
                        } else {
                            TermQuery termQuery = new TermQuery(term);
                            and.add(termQuery, Occur.MUST);
                        }
                    }
                    if (and.clauses().size() > 0) {
                        or.add(and, Occur.SHOULD);
                    }
                }
                return or;
            } else {
                SpanQuery spanQuery = null;
                SpanOrQuery atSamePosition = new SpanOrQuery();
                int gap = 0;
                for (int i = 0; i < list.size(); i++) {
                    nextToken = list.get(i);
                    String termText = nextToken.toString();
                    Term term = new Term(field, termText);
                    if (getEnablePositionIncrements()) {
                        SpanQuery nextSpanQuery;
                        if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                            org.apache.lucene.search.WildcardQuery wildQuery = new org.apache.lucene.search.WildcardQuery(
                                    term);
                            SpanMultiTermQueryWrapper wrapper = new SpanMultiTermQueryWrapper<>(wildQuery);
                            wrapper.setRewriteMethod(
                                    new TopTermsSpanBooleanQueryRewrite(topTermSpanRewriteLimit));
                            nextSpanQuery = wrapper;
                        } else {
                            nextSpanQuery = new SpanTermQuery(term);
                        }
                        if (gap == 0) {
                            atSamePosition.addClause(nextSpanQuery);
                        } else {
                            if (atSamePosition.getClauses().length == 0) {
                                if (spanQuery == null) {
                                    spanQuery = nextSpanQuery;
                                } else {
                                    spanQuery = new SpanNearQuery(new SpanQuery[] { spanQuery, nextSpanQuery },
                                            (gap - 1) + internalSlop, internalSlop < 2);
                                }
                                atSamePosition = new SpanOrQuery();
                            } else if (atSamePosition.getClauses().length == 1) {
                                if (spanQuery == null) {
                                    spanQuery = atSamePosition.getClauses()[0];
                                } else {
                                    spanQuery = new SpanNearQuery(
                                            new SpanQuery[] { spanQuery, atSamePosition.getClauses()[0] },
                                            (gap - 1) + internalSlop, internalSlop < 2);
                                }
                                atSamePosition = new SpanOrQuery();
                                atSamePosition.addClause(nextSpanQuery);
                            } else {
                                if (spanQuery == null) {
                                    spanQuery = atSamePosition;
                                } else {
                                    spanQuery = new SpanNearQuery(new SpanQuery[] { spanQuery, atSamePosition },
                                            (gap - 1) + internalSlop, internalSlop < 2);
                                }
                                atSamePosition = new SpanOrQuery();
                                atSamePosition.addClause(nextSpanQuery);
                            }
                        }
                        gap = nextToken.getPositionIncrement();
                    } else {
                        SpanQuery nextSpanQuery;
                        if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                            org.apache.lucene.search.WildcardQuery wildQuery = new org.apache.lucene.search.WildcardQuery(
                                    term);
                            SpanMultiTermQueryWrapper wrapper = new SpanMultiTermQueryWrapper<>(wildQuery);
                            wrapper.setRewriteMethod(
                                    new TopTermsSpanBooleanQueryRewrite(topTermSpanRewriteLimit));
                            nextSpanQuery = wrapper;
                        } else {
                            nextSpanQuery = new SpanTermQuery(term);
                        }
                        if (spanQuery == null) {
                            spanQuery = new SpanOrQuery();
                            ((SpanOrQuery) spanQuery).addClause(nextSpanQuery);
                        } else {
                            ((SpanOrQuery) spanQuery).addClause(nextSpanQuery);
                        }
                    }
                }
                if (atSamePosition.getClauses().length == 0) {
                    return spanQuery;
                } else if (atSamePosition.getClauses().length == 1) {
                    if (spanQuery == null) {
                        spanQuery = atSamePosition.getClauses()[0];
                    } else {
                        spanQuery = new SpanNearQuery(
                                new SpanQuery[] { spanQuery, atSamePosition.getClauses()[0] },
                                (gap - 1) + internalSlop, internalSlop < 2);
                    }
                    return spanQuery;
                } else {
                    if (spanQuery == null) {
                        spanQuery = atSamePosition;
                    } else {
                        spanQuery = new SpanNearQuery(new SpanQuery[] { spanQuery, atSamePosition },
                                (gap - 1) + internalSlop, internalSlop < 2);
                    }
                    return spanQuery;
                }
            }
        }
    }
}

From source file:com.mimp.controllers.main.java

@RequestMapping(value = "/MainEditarEstudio", method = RequestMethod.POST)
public ModelAndView EditarEstudio(ModelMap map, HttpSession session, @RequestParam("orden") String orden) {
    Personal usuario = (Personal) session.getAttribute("usuario");
    if (usuario == null) {
        String mensaje = "La sesin ha finalizado. Favor identificarse nuevamente";
        map.addAttribute("mensaje", mensaje);
        return new ModelAndView("login", map);
    }/*from  w  ww .  j  a v a 2  s . co  m*/

    ArrayList<EstudioCaso> listaEstudiosCaso = new ArrayList();
    listaEstudiosCaso = servicioEtapa.listaExpedientesDeEstudio(orden);

    map.put("df", df);

    ArrayList<Long> allID = new ArrayList();
    ArrayList<Nna> listaDeNna = new ArrayList();
    allID = servicioEtapa.listaNnaDeEstudio(orden);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
            listaDeNna.add(nnaInfo);
        }
    }
    allID.clear();
    map.put("listaNna", listaDeNna);
    map.put("listaEstudios", listaEstudiosCaso);
    return new ModelAndView("/Personal/nna/edit_estudio", map);

}

From source file:com.mimp.controllers.main.java

@RequestMapping(value = "/MainGuardarFechaSolicitud", method = RequestMethod.GET)
public ModelAndView MainGuardarFechaSolicitud_GET(ModelMap map, HttpSession session) {
    String orden = "";
    String fechaSolicitud = "";
    long idExpFam = 0;
    try {/*  ww  w.j  a va 2 s . c om*/
        orden = (String) session.getAttribute("orden");
        fechaSolicitud = (String) session.getAttribute("fechaSolicitud");
        idExpFam = Long.parseLong(session.getAttribute("idExpFam").toString());
    } catch (Exception ex) {
        return new ModelAndView("redirect:/inicioper", map);
    }
    session.removeAttribute("orden");
    session.removeAttribute("fechaSolicitud");
    session.removeAttribute("idExpFam");

    Personal usuario = (Personal) session.getAttribute("usuario");
    if (usuario == null) {
        String mensaje = "La sesin ha finalizado. Favor identificarse nuevamente";
        map.addAttribute("mensaje", mensaje);
        return new ModelAndView("login", map);
    }

    ArrayList<Long> allID = new ArrayList();
    ArrayList<Nna> listaDeNna = new ArrayList();
    allID = servicioEtapa.listaNnaDeEstudio(orden);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
            listaDeNna.add(nnaInfo);
        }
    }

    allID.clear();
    for (Nna nna : listaDeNna) {
        EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam, orden);
        if (fechaSolicitud != null && !fechaSolicitud.equals("")) {
            tempEst.setFechaSolAdop(df.stringToDate(fechaSolicitud));
        } else if (fechaSolicitud == null && fechaSolicitud.equals("")) {
            tempEst.setFechaSolAdop(null);
        }
        ServicioMain.updateEstudio(tempEst);
    }
    map.put("df", df);
    map.put("listaNna", listaDeNna);
    map.put("listaEstudios", servicioEtapa.listaExpedientesDeEstudio(orden));
    return new ModelAndView("/Personal/nna/edit_estudio", map);

}

From source file:com.sentaroh.android.SMBExplorer.SMBExplorerMain.java

public void scanRemoteNetworkDlg(final NotifyEvent p_ntfy, String port_number) {
    //??/* w w  w.  ja  v a2  s  . c  om*/
    final Dialog dialog = new Dialog(mContext);
    dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
    dialog.setCanceledOnTouchOutside(false);
    dialog.setContentView(R.layout.scan_remote_ntwk_dlg);
    final Button btn_scan = (Button) dialog.findViewById(R.id.scan_remote_ntwk_btn_ok);
    final Button btn_cancel = (Button) dialog.findViewById(R.id.scan_remote_ntwk_btn_cancel);
    final TextView tvmsg = (TextView) dialog.findViewById(R.id.scan_remote_ntwk_msg);
    final TextView tv_result = (TextView) dialog.findViewById(R.id.scan_remote_ntwk_scan_result_title);
    tvmsg.setText(mContext.getString(R.string.msgs_scan_ip_address_press_scan_btn));
    tv_result.setVisibility(TextView.GONE);

    final String from = getLocalIpAddress();
    String subnet = from.substring(0, from.lastIndexOf("."));
    String subnet_o1, subnet_o2, subnet_o3;
    subnet_o1 = subnet.substring(0, subnet.indexOf("."));
    subnet_o2 = subnet.substring(subnet.indexOf(".") + 1, subnet.lastIndexOf("."));
    subnet_o3 = subnet.substring(subnet.lastIndexOf(".") + 1, subnet.length());
    final EditText baEt1 = (EditText) dialog.findViewById(R.id.scan_remote_ntwk_begin_address_o1);
    final EditText baEt2 = (EditText) dialog.findViewById(R.id.scan_remote_ntwk_begin_address_o2);
    final EditText baEt3 = (EditText) dialog.findViewById(R.id.scan_remote_ntwk_begin_address_o3);
    final EditText baEt4 = (EditText) dialog.findViewById(R.id.scan_remote_ntwk_begin_address_o4);
    final EditText eaEt4 = (EditText) dialog.findViewById(R.id.scan_remote_ntwk_end_address_o4);
    baEt1.setText(subnet_o1);
    baEt2.setText(subnet_o2);
    baEt3.setText(subnet_o3);
    baEt4.setText("1");
    baEt4.setSelection(1);
    eaEt4.setText("254");
    baEt4.requestFocus();

    final CheckBox cb_use_port_number = (CheckBox) dialog.findViewById(R.id.scan_remote_ntwk_use_port);
    final EditText et_port_number = (EditText) dialog.findViewById(R.id.scan_remote_ntwk_port_number);

    CommonDialog.setDlgBoxSizeLimit(dialog, true);

    if (port_number.equals("")) {
        et_port_number.setEnabled(false);
        cb_use_port_number.setChecked(false);
    } else {
        et_port_number.setEnabled(true);
        et_port_number.setText(port_number);
        cb_use_port_number.setChecked(true);
    }
    cb_use_port_number.setOnCheckedChangeListener(new OnCheckedChangeListener() {
        @Override
        public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
            et_port_number.setEnabled(isChecked);
        }
    });

    final NotifyEvent ntfy_lv_click = new NotifyEvent(mContext);
    ntfy_lv_click.setListener(new NotifyEventListener() {
        @Override
        public void positiveResponse(Context c, Object[] o) {
            dialog.dismiss();
            p_ntfy.notifyToListener(true, o);
        }

        @Override
        public void negativeResponse(Context c, Object[] o) {
        }
    });

    final ArrayList<ScanAddressResultListItem> ipAddressList = new ArrayList<ScanAddressResultListItem>();
    //      ScanAddressResultListItem li=new ScanAddressResultListItem();
    //      li.server_name=mContext.getString(R.string.msgs_ip_address_no_address);
    //      ipAddressList.add(li);
    final ListView lv = (ListView) dialog.findViewById(R.id.scan_remote_ntwk_scan_result_list);
    final AdapterScanAddressResultList adap = new AdapterScanAddressResultList(mContext,
            R.layout.scan_address_result_list_item, ipAddressList, ntfy_lv_click);
    lv.setAdapter(adap);
    lv.setScrollingCacheEnabled(false);
    lv.setScrollbarFadingEnabled(false);

    //SCAN?
    btn_scan.setOnClickListener(new View.OnClickListener() {
        public void onClick(View v) {
            ipAddressList.clear();
            NotifyEvent ntfy = new NotifyEvent(mContext);
            ntfy.setListener(new NotifyEventListener() {
                @Override
                public void positiveResponse(Context c, Object[] o) {
                    if (ipAddressList.size() < 1) {
                        tvmsg.setText(mContext.getString(R.string.msgs_scan_ip_address_not_detected));
                        tv_result.setVisibility(TextView.GONE);
                    } else {
                        tvmsg.setText(mContext.getString(R.string.msgs_scan_ip_address_select_detected_host));
                        tv_result.setVisibility(TextView.VISIBLE);
                    }
                    //                   adap.clear();
                    //                   for (int i=0;i<ipAddressList.size();i++) 
                    //                      adap.add(ipAddressList.get(i));
                }

                @Override
                public void negativeResponse(Context c, Object[] o) {
                }

            });
            if (auditScanAddressRangeValue(dialog)) {
                tv_result.setVisibility(TextView.GONE);
                String ba1 = baEt1.getText().toString();
                String ba2 = baEt2.getText().toString();
                String ba3 = baEt3.getText().toString();
                String ba4 = baEt4.getText().toString();
                String ea4 = eaEt4.getText().toString();
                String subnet = ba1 + "." + ba2 + "." + ba3;
                int begin_addr = Integer.parseInt(ba4);
                int end_addr = Integer.parseInt(ea4);
                scanRemoteNetwork(dialog, lv, adap, ipAddressList, subnet, begin_addr, end_addr, ntfy);
            } else {
                //error
            }
        }
    });

    //CANCEL?
    btn_cancel.setOnClickListener(new View.OnClickListener() {
        public void onClick(View v) {
            dialog.dismiss();
            p_ntfy.notifyToListener(false, null);
        }
    });
    // Cancel?
    dialog.setOnCancelListener(new Dialog.OnCancelListener() {
        @Override
        public void onCancel(DialogInterface arg0) {
            btn_cancel.performClick();
        }
    });
    dialog.show();

}

From source file:com.mimp.controllers.main.java

@RequestMapping(value = "/MainGenerarDesignacionPrioritario", method = RequestMethod.GET)
public ModelAndView MainGenerarDesignacionPrioritario_GET(ModelMap map, HttpSession session) {
    String orden = "";
    String numDesig = "";
    long idExpFam = 0;
    String fechaPropuesta = "";
    try {//w  w w .j  a va2  s.  com
        orden = (String) session.getAttribute("orden");
        numDesig = (String) session.getAttribute("numDesig");
        idExpFam = Long.parseLong(session.getAttribute("idExpFam").toString());
        fechaPropuesta = (String) session.getAttribute("fechaPropuesta");
    } catch (Exception ex) {
        return new ModelAndView("redirect:/inicioper", map);
    }
    session.removeAttribute("orden");
    session.removeAttribute("numDesig");
    session.removeAttribute("idExpFam");
    session.removeAttribute("fechaPropuesta");

    Personal usuario = (Personal) session.getAttribute("usuario");
    if (usuario == null) {
        String mensaje = "La sesin ha finalizado. Favor identificarse nuevamente";
        map.addAttribute("mensaje", mensaje);
        return new ModelAndView("login", map);
    }

    if (numDesig == null || numDesig.equals("") || fechaPropuesta == null || fechaPropuesta.equals("")) {
        ArrayList<EstudioCaso> listaEstudiosCaso = new ArrayList();
        listaEstudiosCaso = servicioEtapa.listaExpedientesDeEstudio(orden);

        ArrayList<Long> allID = new ArrayList();
        ArrayList<Nna> listaDeNna = new ArrayList();
        allID = servicioEtapa.listaNnaDeEstudio(orden);
        if (!allID.isEmpty()) {
            for (Long id : allID) {
                Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
                listaDeNna.add(nnaInfo);
            }
        }

        allID.clear();
        map.put("mensaje", "Debe llenar los datos correctamente");
        map.put("df", df);
        map.put("listaNna", listaDeNna);
        map.put("listaEstudios", listaEstudiosCaso);
        return new ModelAndView("/Personal/nna/edit_estudio", map);

    } else {
        ArrayList<Long> allID = new ArrayList();
        ArrayList<Nna> listaDeNna = new ArrayList();
        allID = servicioEtapa.listaNnaDeEstudio(orden);
        if (!allID.isEmpty()) {
            for (Long id : allID) {
                Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
                listaDeNna.add(nnaInfo);
            }
        }

        allID.clear();

        for (Nna nna : listaDeNna) {
            EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam, orden);
            long nsol = 0;
            tempEst.setNSolicitud(nsol);
            ServicioMain.updateEstudio(tempEst);
        }

        for (Nna nna : listaDeNna) {
            EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam, orden);
            ExpedienteFamilia tempExp = tempEst.getExpedienteFamilia();
            Nna tempNna = nna;
            Designacion tempDesign = new Designacion();
            tempDesign.setExpedienteFamilia(tempExp);
            tempDesign.setNna(tempNna);
            tempDesign.setPersonal(usuario);
            tempDesign.setNDesignacion(numDesig);
            tempDesign.setTipoPropuesta("directa");
            tempDesign.setAceptacionConsejo(Short.parseShort("1"));
            Date fechaPropuestaDesig = df.stringToDate(fechaPropuesta);
            tempDesign.setFechaPropuesta(fechaPropuestaDesig);
            servicioEtapa.crearDesignacion(tempDesign);
            tempExp.setEstado("designado");
            servicioEtapa.updateExpedienteFamilia(tempExp);
            ExpedienteNna tempExpNna = ServicioNna.getExpNna(tempNna.getIdnna());
            tempExpNna.setEstado("desig");
            Date ahora = new Date();
            java.sql.Date sql = new java.sql.Date(ahora.getTime());
            tempExpNna.setFechaEstado(sql);
            ServicioNna.updateExpNna(tempExpNna);
        }
        map.put("listaDesignaciones", servicioEtapa.getListaDesignaciones());
        return new ModelAndView("/Personal/Buscador_etapa/etapa_designacion/etapa_designacion", map);
    }
}

From source file:carnero.cgeo.cgBase.java

public Long searchByGeocode(HashMap<String, String> parameters, int reason, boolean forceReload) {
    final cgSearch search = new cgSearch();
    String geocode = parameters.get("geocode");
    String guid = parameters.get("guid");

    if ((geocode == null || geocode.length() == 0) && ((guid == null || guid.length() == 0))) {
        Log.e(cgSettings.tag, "cgeoBase.searchByGeocode: No geocode nor guid given");
        return null;
    }//from   ww  w.  j  ava 2  s .c o  m

    if (forceReload == false && reason == 0
            && (app.isOffline(geocode, guid) == true || app.isThere(geocode, guid, true, true) == true)) {
        if ((geocode == null || geocode.length() == 0) && guid != null && guid.length() > 0) {
            geocode = app.getGeocode(guid);
        }

        ArrayList<cgCache> cacheList = new ArrayList<cgCache>();
        cacheList.add(app.getCacheByGeocode(geocode, true, true, true, true, true, true));
        search.addGeocode(geocode);

        app.addSearch(search, cacheList, false, reason);

        cacheList.clear();
        cacheList = null;

        return search.getCurrentId();
    }

    final String host = "www.geocaching.com";
    final String path = "/seek/cache_details.aspx";
    final String method = "GET";
    final HashMap<String, String> params = new HashMap<String, String>();
    if (geocode != null && geocode.length() > 0) {
        params.put("wp", geocode);
    } else if (guid != null && guid.length() > 0) {
        params.put("guid", guid);
    }
    params.put("decrypt", "y");
    params.put("log", "y"); // download logs (more than 5
    params.put("numlogs", "35"); // 35 logs

    String page = requestLogged(false, host, path, method, params, false, false, false);

    if (page == null || page.length() == 0) {
        if (app.isThere(geocode, guid, true, false) == true) {
            if ((geocode == null || geocode.length() == 0) && guid != null && guid.length() > 0) {
                Log.i(cgSettings.tag, "Loading old cache from cache.");

                geocode = app.getGeocode(guid);
            }

            final ArrayList<cgCache> cacheList = new ArrayList<cgCache>();
            cacheList.add(app.getCacheByGeocode(geocode));
            search.addGeocode(geocode);
            search.error = null;
            search.errorRetrieve = 0; // reset errors from previous failed request

            app.addSearch(search, cacheList, false, reason);

            cacheList.clear();

            return search.getCurrentId();
        }

        Log.e(cgSettings.tag, "cgeoBase.searchByGeocode: No data from server");
        return null;
    }

    final cgCacheWrap caches = parseCache(page, reason);
    if (caches == null || caches.cacheList == null || caches.cacheList.isEmpty()) {
        if (caches != null && caches.error != null && caches.error.length() > 0) {
            search.error = caches.error;
        }
        if (caches != null && caches.url != null && caches.url.length() > 0) {
            search.url = caches.url;
        }

        app.addSearch(search, null, true, reason);

        Log.e(cgSettings.tag, "cgeoBase.searchByGeocode: No cache parsed");
        return null;
    }

    if (app == null) {
        Log.e(cgSettings.tag, "cgeoBase.searchByGeocode: No application found");
        return null;
    }

    final ArrayList<cgCache> cacheList = new ArrayList<cgCache>();
    if (caches != null) {
        if (caches.error != null && caches.error.length() > 0) {
            search.error = caches.error;
        }
        if (caches.url != null && caches.url.length() > 0) {
            search.url = caches.url;
        }
        if (caches.viewstate != null && caches.viewstate.length() > 0) {
            search.viewstate = caches.viewstate;
        }
        if (caches.viewstate1 != null && caches.viewstate1.length() > 0) {
            search.viewstate1 = caches.viewstate1;
        }
        search.totalCnt = caches.totalCnt;

        for (cgCache cache : caches.cacheList) {
            search.addGeocode(cache.geocode);
            cacheList.add(cache);
        }
    }

    app.addSearch(search, cacheList, true, reason);

    page = null;
    cacheList.clear();

    return search.getCurrentId();
}

From source file:org.fhcrc.cpl.viewer.gui.MRMDialog.java

protected void createChartInPanelDaughterTasksOnly(XYPlot xyp) {
    XYSeries coloredDataset = transitionOnPlot.getCurrentDaughter().getGraphData();
    Paint daughterColor = Utils.paleColor((Color) transitionOnPlot.getCurrentDaughter().getGraphColor());
    ArrayList<XYLineAnnotation> coloredDaughters = new ArrayList<XYLineAnnotation>();
    //Trace calculated elution curves over data spikes
    if (transitionOnPlot.getElutionCurves() != null && !transitionOnPlot.getElutionCurves().isEmpty()) {
        MRMDaughter curDaughter = transitionOnPlot.getCurrentDaughter();
        ElutionCurveStrategy ecs = transitionOnPlot.getElutionCurves().get(curDaughter);
        //Is current daughter rejected?
        Boolean accepted = (Boolean) ((PeaksTableModel) peaksTable.getModel()).data[curDaughter
                .getElutionDataTableRow()][peaksData.Accept.colno];
        if (accepted == null || !accepted) {
            xyp.setBackgroundPaint(new Color(255, 230, 230));
        }/* w  w w  . java 2s .  c o m*/
        List<ElutionCurve> ecl = ecs.getDaughterCurves();
        if (ecl != null) {
            for (ElutionCurve e : ecl) {
                List<Line2D.Double> ll2dd = e.getSegments();
                for (Line2D.Double l2dd : ll2dd) {
                    xyp.addAnnotation(Utils.line2Annotation(l2dd, new BasicStroke(2.0f),
                            ecs.isBestDaughterCurve(e) ? Color.BLACK : Color.LIGHT_GRAY));
                }
            }
        }
    }

    // If there is a valid "current" daughter draw the spikes in the daughter's color
    // as annotations (sensu JFree)

    if (coloredDataset != null) {
        int nOfPoints = coloredDataset.getItemCount();
        for (int i = 0; i < (nOfPoints - 1); i++) {
            XYDataItem p1 = coloredDataset.getDataItem(i);
            XYDataItem p2 = coloredDataset.getDataItem(i + 1);
            coloredDaughters.add(new XYLineAnnotation(p1.getX().doubleValue(), p1.getY().doubleValue(),
                    p2.getX().doubleValue(), p2.getY().doubleValue(), new BasicStroke(1.5f),
                    transitionOnPlot.getCurrentDaughter().getGraphColor())
            //                 new XYLineAnnotation(p1.getX().doubleValue(),p1.getY().doubleValue(),p2.getX().doubleValue(),p2.getY().doubleValue(),new BasicStroke(1.5f),daughterColor)
            );
        }
    }
    if (_traceAllFragments) {
        for (MRMDaughter d : transitionOnPlot.getDaughters().values()) {
            if (d == transitionOnPlot.getCurrentDaughter())
                continue;
            XYSeries curXYSeries = d.getContinDaughterData();
            if (curXYSeries == null || curXYSeries.getItemCount() == 0)
                continue;
            if (d.getBestElutionCurve() == null)
                continue;
            int nOfPoints = curXYSeries.getItemCount();
            for (int i = 0; i < (nOfPoints - 1); i++) {
                XYDataItem p1 = curXYSeries.getDataItem(i);
                XYDataItem p2 = curXYSeries.getDataItem(i + 1);
                coloredDaughters.add(
                        //                        new XYLineAnnotation(p1.getX().doubleValue(),p1.getY().doubleValue(),p2.getX().doubleValue(),p2.getY().doubleValue(),new BasicStroke(1f),Utils.paleColor((Color)d.getGraphColor()))
                        new XYLineAnnotation(p1.getX().doubleValue(), p1.getY().doubleValue(),
                                p2.getX().doubleValue(), p2.getY().doubleValue(), new BasicStroke(1f),
                                d.getGraphColor()));
            }
        }
    }
    if (coloredDaughters != null) {
        for (XYLineAnnotation xyla : coloredDaughters) {
            xyp.addAnnotation(xyla);
        }
    }
    coloredDaughters.clear();

    //Display L or H label in upper left hand corner
    Range xRange = xyp.getDomainAxis().getRange();
    Range yRange = xyp.getRangeAxis().getRange();
    XYTextAnnotation lab = new XYTextAnnotation(
            (String) ((PeaksTableModel) peaksTable.getModel()).data[transitionOnPlot.getCurrentDaughter()
                    .getElutionDataTableRow()][peaksData.Label.colno],
            xRange.getUpperBound() - (0.05 * xRange.getLength()),
            yRange.getUpperBound() - (0.05 * yRange.getLength()));
    lab.setFont(lab.getFont().deriveFont(Font.BOLD, 40.0F));
    xyp.addAnnotation(lab);

    XYTextAnnotation midMarker = new XYTextAnnotation("\u25BC",
            ((MRMTransition) transitionOnPlot).getCalcXatMaxYAllDaughters(),
            ((MRMTransition) transitionOnPlot).getCalcMaxYAllDaughters());
    midMarker.setPaint(Color.RED);
    midMarker.setFont(midMarker.getFont().deriveFont(Font.BOLD, 20F));
    XYTextAnnotation midMarkerOutline = new XYTextAnnotation("\u25BC",
            ((MRMTransition) transitionOnPlot).getCalcXatMaxYAllDaughters(),
            ((MRMTransition) transitionOnPlot).getCalcMaxYAllDaughters());
    midMarkerOutline.setPaint(Color.BLACK);
    midMarkerOutline.setFont(midMarker.getFont().deriveFont(Font.BOLD, 23F));
    xyp.addAnnotation(midMarkerOutline);
    xyp.addAnnotation(midMarker);
}

From source file:com.mimp.controllers.main.java

@RequestMapping(value = "/MainActualizarEstudio", method = RequestMethod.GET)
public ModelAndView MainActualizarEstudio_GET(ModelMap map, HttpSession session) {
    String orden = "";
    Long[] idExpFam = null;/*w ww.j a  va 2 s.c o m*/
    String[] resultado = null;
    String[] fechaEst = null;
    int elegido = 0;
    try {
        orden = (String) session.getAttribute("orden");
        idExpFam = (Long[]) session.getAttribute("idExpFam");
        fechaEst = (String[]) session.getAttribute("fechaEst");
        resultado = (String[]) session.getAttribute("resultado");
        elegido = Integer.parseInt(session.getAttribute("elegido").toString());
    } catch (Exception ex) {
        return new ModelAndView("redirect:/inicioper", map);
    }
    session.removeAttribute("orden");
    session.removeAttribute("idExpFam");
    session.removeAttribute("resultado");
    session.removeAttribute("fechaEst");
    session.removeAttribute("elegido");

    Personal usuario = (Personal) session.getAttribute("usuario");
    if (usuario == null) {
        String mensaje = "La sesin ha finalizado. Favor identificarse nuevamente";
        map.addAttribute("mensaje", mensaje);
        return new ModelAndView("login", map);
    }

    ArrayList<Long> allID = new ArrayList();
    ArrayList<Nna> listaDeNna = new ArrayList();
    allID = servicioEtapa.listaNnaDeEstudio(orden);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
            listaDeNna.add(nnaInfo);
        }
    }
    allID.clear();

    if (resultado[elegido].equals("acep")) {

        for (Nna nna : listaDeNna) {
            EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam[elegido],
                    orden);
            if (fechaEst[elegido] != null && !fechaEst[elegido].equals("")) {
                tempEst.setFechaEstudio(df.stringToDate(fechaEst[elegido]));
            } else if (fechaEst[elegido] == null && fechaEst[elegido].equals("")) {
                tempEst.setFechaEstudio(null);
            }
            tempEst.setResultado(resultado[elegido]);
            servicioEtapa.updateEstudioCaso(tempEst);

            String mensaje_log = "Se edit el estudio de caso con Orden: " + tempEst.getOrden() + " y ID: "
                    + String.valueOf(tempEst.getIdestudioCaso());
            String Tipo_registro = "Estu_Caso";

            //try{
            String Numero_registro = tempEst.getOrden();

            ServicioPersonal.InsertLog(usuario, Tipo_registro, Numero_registro, mensaje_log);
        }

        ArrayList<EstudioCaso> allEstudioCaso = new ArrayList();
        allEstudioCaso = servicioEtapa.getListaEstudioCasoOrden(orden);
        for (EstudioCaso estudioCaso : allEstudioCaso) {
            if (estudioCaso.getResultado() == null) {
                estudioCaso.setResultado("noobs");
                servicioEtapa.updateEstudioCaso(estudioCaso);
                ExpedienteFamilia tempExp = estudioCaso.getExpedienteFamilia();
                tempExp = estudioCaso.getExpedienteFamilia();
                tempExp.setEstado("espera");
                servicioEtapa.updateExpedienteFamilia(tempExp);
            }
        }

        map.put("df", df);
        map.put("listaNna", listaDeNna);
        map.put("listaEstudios", servicioEtapa.listaExpedientesDeEstudio(orden));
        return new ModelAndView("/Personal/nna/edit_estudio", map);
    } else if (resultado[elegido].equals("noacep")) {
        for (Nna nna : listaDeNna) {
            EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam[elegido],
                    orden);
            if (fechaEst[elegido] != null && !fechaEst[elegido].equals("")) {
                tempEst.setFechaEstudio(df.stringToDate(fechaEst[elegido]));
            } else if (fechaEst[elegido] == null && fechaEst[elegido].equals("")) {
                tempEst.setFechaEstudio(null);
            }

            tempEst.setResultado(resultado[elegido]);
            servicioEtapa.updateEstudioCaso(tempEst);
            ExpedienteFamilia tempExp = tempEst.getExpedienteFamilia();
            tempExp = tempEst.getExpedienteFamilia();
            tempExp.setEstado("espera");
            servicioEtapa.updateExpedienteFamilia(tempExp);

            String mensaje_log = "Se edit el estudio de caso con Orden: " + tempEst.getOrden() + " y ID: "
                    + String.valueOf(tempEst.getIdestudioCaso());
            String Tipo_registro = "Estu_Caso";

            //try{
            String Numero_registro = tempEst.getOrden();

            ServicioPersonal.InsertLog(usuario, Tipo_registro, Numero_registro, mensaje_log);
        }
        map.put("df", df);
        map.put("listaNna", listaDeNna);
        map.put("listaEstudios", servicioEtapa.listaExpedientesDeEstudio(orden));
        return new ModelAndView("/Personal/nna/edit_estudio", map);
    } else if (resultado[elegido].equals("noobs")) {
        for (Nna nna : listaDeNna) {
            EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam[elegido],
                    orden);
            tempEst.setResultado(resultado[elegido]);
            servicioEtapa.updateEstudioCaso(tempEst);
            ExpedienteFamilia tempExp = tempEst.getExpedienteFamilia();
            tempExp = tempEst.getExpedienteFamilia();
            tempExp.setEstado("espera");
            servicioEtapa.updateExpedienteFamilia(tempExp);

            String mensaje_log = "Se edit el estudio de caso con Orden: " + tempEst.getOrden() + " y ID: "
                    + String.valueOf(tempEst.getIdestudioCaso());
            String Tipo_registro = "Estu_Caso";

            //try{
            String Numero_registro = tempEst.getOrden();

            ServicioPersonal.InsertLog(usuario, Tipo_registro, Numero_registro, mensaje_log);
        }
        map.put("df", df);
        map.put("listaNna", listaDeNna);
        map.put("listaEstudios", servicioEtapa.listaExpedientesDeEstudio(orden));
        return new ModelAndView("/Personal/nna/edit_estudio", map);
    } else {
        for (Nna nna : listaDeNna) {
            EstudioCaso tempEst = ServicioMain.getEstudioCasoEspecifico(nna.getIdnna(), idExpFam[elegido],
                    orden);
            if (fechaEst[elegido] != null && !fechaEst[elegido].equals("")) {
                tempEst.setFechaEstudio(df.stringToDate(fechaEst[elegido]));
            } else if (fechaEst[elegido] == null && fechaEst[elegido].equals("")) {
                tempEst.setFechaEstudio(null);
            }
            tempEst.setResultado(resultado[elegido]);
            servicioEtapa.updateEstudioCaso(tempEst);

            String mensaje_log = "Se edit el estudio de caso con Orden: " + tempEst.getOrden() + " y ID: "
                    + String.valueOf(tempEst.getIdestudioCaso());
            String Tipo_registro = "Estu_Caso";

            //try{
            String Numero_registro = tempEst.getOrden();

            ServicioPersonal.InsertLog(usuario, Tipo_registro, Numero_registro, mensaje_log);
        }
        map.put("df", df);
        map.put("listaNna", listaDeNna);
        map.put("listaEstudios", servicioEtapa.listaExpedientesDeEstudio(orden));
        return new ModelAndView("/Personal/nna/edit_estudio", map);
    }
}

From source file:com.mimp.controllers.main.java

@RequestMapping(value = "/MainEditarRevision", method = RequestMethod.POST)
public ModelAndView EditarRevision(ModelMap map, HttpSession session, @RequestParam("numero") String numero) {
    Personal usuario = (Personal) session.getAttribute("usuario");
    if (usuario == null) {
        String mensaje = "La sesin ha finalizado. Favor identificarse nuevamente";
        map.addAttribute("mensaje", mensaje);
        return new ModelAndView("login", map);
    }/*  w w w .j  av  a2s. c o  m*/
    listaFamiliasEstudio.clear();
    ArrayList<Long> allID = new ArrayList();
    ArrayList<Nna> listaDeNna = new ArrayList();
    ArrayList<ExpedienteFamilia> listaDeExpedientes = new ArrayList();
    ArrayList<Entidad> listaDeEntidades = new ArrayList();
    allID = servicioEtapa.listaNnaDeRevision(numero);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
            listaDeNna.add(nnaInfo);
        }
    }
    allID.clear();
    allID = servicioEtapa.listaExpedientesDeRevision(numero);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            ExpedienteFamilia expInfo = ServicioMain.getInformacionRegistro(id);
            listaDeExpedientes.add(expInfo);
        }
    }
    listaFamiliasEstudio = listaDeExpedientes;
    allID.clear();

    allID = servicioEtapa.listaOrganismosDeRevision(numero);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Entidad entidadInfo = ServicioPersonal.getEntidad(id);
            listaDeEntidades.add(entidadInfo);
        }
    }
    allID.clear();

    map.put("listaNna", listaDeNna);
    map.put("listaExpedientes", listaFamiliasEstudio);
    map.put("listaEntidades", listaDeEntidades);
    ArrayList<Revision> allRevisiones = new ArrayList();
    allRevisiones = ServicioMain.getListaRevisionesPorNumero(numero);
    map.put("listaRevisiones", allRevisiones);
    map.put("df", df);
    map.addAttribute("numero", numero);
    return new ModelAndView("/Personal/nna/edit_revision", map);

}

From source file:com.mimp.controllers.main.java

@RequestMapping(value = "/MainGuardarRevision", method = RequestMethod.GET)
public ModelAndView MainGuardarRevision(ModelMap map, HttpSession session) {
    String numero = "";
    String coments = "";
    try {// w w w.j  av  a  2  s .c  om
        numero = (String) session.getAttribute("numero");
        coments = (String) session.getAttribute("coments");
    } catch (Exception ex) {
        return new ModelAndView("redirect:/inicioper", map);
    }
    session.removeAttribute("numero");
    session.removeAttribute("coments");

    Personal usuario = (Personal) session.getAttribute("usuario");
    if (usuario == null) {
        String mensaje = "La sesin ha finalizado. Favor identificarse nuevamente";
        map.addAttribute("mensaje", mensaje);
        return new ModelAndView("login", map);
    }
    listaFamiliasEstudio.clear();
    ArrayList<Revision> allRevisiones = new ArrayList();
    allRevisiones = ServicioMain.getListaRevisionesPorNumero(numero);

    for (Revision revision : allRevisiones) {
        revision.setComentarios(coments);
        ServicioMain.crearRevision(revision);

        String mensaje_log = "El usuario, " + usuario.getUser() + " con ID: " + usuario.getIdpersonal()
                + ". Guard la " + "revisin con nmero: " + numero;
        String Tipo_registro = "Personal";

        try {
            String Numero_registro = String.valueOf(usuario.getIdpersonal());

            ServicioPersonal.InsertLog(usuario, Tipo_registro, Numero_registro, mensaje_log);
        } catch (Exception ex) {
        }
    }

    ArrayList<Long> allID = new ArrayList();
    ArrayList<Nna> listaDeNna = new ArrayList();
    ArrayList<ExpedienteFamilia> listaDeExpedientes = new ArrayList();
    ArrayList<Entidad> listaDeEntidades = new ArrayList();
    allID = servicioEtapa.listaNnaDeRevision(numero);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Nna nnaInfo = ServicioMain.getTodosDatosNna(id);
            listaDeNna.add(nnaInfo);
        }
    }
    allID.clear();
    allID = servicioEtapa.listaExpedientesDeRevision(numero);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            ExpedienteFamilia expInfo = ServicioMain.getInformacionRegistro(id);
            listaDeExpedientes.add(expInfo);
        }
    }
    listaFamiliasEstudio = listaDeExpedientes;
    allID.clear();

    allID = servicioEtapa.listaOrganismosDeRevision(numero);
    if (!allID.isEmpty()) {
        for (Long id : allID) {
            Entidad entidadInfo = ServicioPersonal.getEntidad(id);
            listaDeEntidades.add(entidadInfo);
        }
    }
    allID.clear();

    map.put("listaNna", listaDeNna);
    map.put("listaExpedientes", listaFamiliasEstudio);
    map.put("listaEntidades", listaDeEntidades);
    allRevisiones = ServicioMain.getListaRevisionesPorNumero(numero);
    map.put("listaRevisiones", allRevisiones);
    map.put("df", df);
    map.addAttribute("numero", numero);
    return new ModelAndView("/Personal/nna/edit_revision", map);

}