Example usage for java.util ArrayList clear

List of usage examples for java.util ArrayList clear

Introduction

In this page you can find the example usage for java.util ArrayList clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the elements from this list.

Usage

From source file:com.ibm.bi.dml.lops.compile.Dag.java

/**
 * Method to generate MapReduce job instructions from a given set of nodes.
 * /*w w w.  ja v  a 2  s  .  c  om*/
 * @param execNodes
 * @param inst
 * @param deleteinst
 * @param jobType
 * @throws LopsException
 * @throws DMLUnsupportedOperationException
 * @throws DMLRuntimeException
 */
@SuppressWarnings("unchecked")
public void generateMapReduceInstructions(ArrayList<N> execNodes, ArrayList<Instruction> inst,
        ArrayList<Instruction> writeinst, ArrayList<Instruction> deleteinst, ArrayList<Instruction> rmvarinst,
        JobType jt) throws LopsException, DMLUnsupportedOperationException, DMLRuntimeException {
    ArrayList<Byte> resultIndices = new ArrayList<Byte>();
    ArrayList<String> inputs = new ArrayList<String>();
    ArrayList<String> outputs = new ArrayList<String>();
    ArrayList<InputInfo> inputInfos = new ArrayList<InputInfo>();
    ArrayList<OutputInfo> outputInfos = new ArrayList<OutputInfo>();
    ArrayList<Long> numRows = new ArrayList<Long>();
    ArrayList<Long> numCols = new ArrayList<Long>();
    ArrayList<Long> numRowsPerBlock = new ArrayList<Long>();
    ArrayList<Long> numColsPerBlock = new ArrayList<Long>();
    ArrayList<String> mapperInstructions = new ArrayList<String>();
    ArrayList<String> randInstructions = new ArrayList<String>();
    ArrayList<String> recordReaderInstructions = new ArrayList<String>();
    int numReducers = 0;
    int replication = 1;
    ArrayList<String> inputLabels = new ArrayList<String>();
    ArrayList<String> outputLabels = new ArrayList<String>();
    ArrayList<Instruction> renameInstructions = new ArrayList<Instruction>();
    ArrayList<Instruction> variableInstructions = new ArrayList<Instruction>();
    ArrayList<Instruction> postInstructions = new ArrayList<Instruction>();
    ArrayList<Integer> MRJobLineNumbers = null;
    if (DMLScript.ENABLE_DEBUG_MODE) {
        MRJobLineNumbers = new ArrayList<Integer>();
    }

    ArrayList<Lop> inputLops = new ArrayList<Lop>();

    boolean cellModeOverride = false;

    /* Find the nodes that produce an output */
    ArrayList<N> rootNodes = new ArrayList<N>();
    getOutputNodes(execNodes, rootNodes, jt);
    if (LOG.isTraceEnabled())
        LOG.trace("# of root nodes = " + rootNodes.size());

    /* Remove transient writes that are simple copy of transient reads */
    if (jt == JobType.GMR || jt == JobType.GMRCELL) {
        ArrayList<N> markedNodes = new ArrayList<N>();
        // only keep data nodes that are results of some computation.
        for (int i = 0; i < rootNodes.size(); i++) {
            N node = rootNodes.get(i);
            if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient()
                    && ((Data) node).getOperationType() == OperationTypes.WRITE
                    && ((Data) node).getDataType() == DataType.MATRIX) {
                // no computation, just a copy
                if (node.getInputs().get(0).getExecLocation() == ExecLocation.Data
                        && ((Data) node.getInputs().get(0)).isTransient()
                        && node.getOutputParameters().getLabel()
                                .compareTo(node.getInputs().get(0).getOutputParameters().getLabel()) == 0) {
                    markedNodes.add(node);
                }
            }
        }
        // delete marked nodes
        rootNodes.removeAll(markedNodes);
        markedNodes.clear();
        if (rootNodes.isEmpty())
            return;
    }

    // structure that maps node to their indices that will be used in the instructions
    HashMap<N, Integer> nodeIndexMapping = new HashMap<N, Integer>();

    /* Determine all input data files */

    for (int i = 0; i < rootNodes.size(); i++) {
        getInputPathsAndParameters(rootNodes.get(i), execNodes, inputs, inputInfos, numRows, numCols,
                numRowsPerBlock, numColsPerBlock, nodeIndexMapping, inputLabels, inputLops, MRJobLineNumbers);
    }

    // In case of RAND job, instructions are defined in the input file
    if (jt == JobType.DATAGEN)
        randInstructions = inputs;

    int[] start_index = new int[1];
    start_index[0] = inputs.size();

    /* Get RecordReader Instructions */

    // currently, recordreader instructions are allowed only in GMR jobs
    if (jt == JobType.GMR || jt == JobType.GMRCELL) {
        for (int i = 0; i < rootNodes.size(); i++) {
            getRecordReaderInstructions(rootNodes.get(i), execNodes, inputs, recordReaderInstructions,
                    nodeIndexMapping, start_index, inputLabels, inputLops, MRJobLineNumbers);
            if (recordReaderInstructions.size() > 1)
                throw new LopsException("MapReduce job can only have a single recordreader instruction: "
                        + recordReaderInstructions.toString());
        }
    }

    /*
     * Handle cases when job's output is FORCED to be cell format.
     * - If there exist a cell input, then output can not be blocked. 
     *   Only exception is when jobType = REBLOCK/CSVREBLOCK (for obvisous reason)
     *   or when jobType = RAND since RandJob takes a special input file, 
     *   whose format should not be used to dictate the output format.
     * - If there exists a recordReader instruction
     * - If jobtype = GroupedAgg. This job can only run in cell mode.
     */

    // 
    if (jt != JobType.REBLOCK && jt != JobType.CSV_REBLOCK && jt != JobType.DATAGEN
            && jt != JobType.TRANSFORM) {
        for (int i = 0; i < inputInfos.size(); i++)
            if (inputInfos.get(i) == InputInfo.BinaryCellInputInfo
                    || inputInfos.get(i) == InputInfo.TextCellInputInfo)
                cellModeOverride = true;
    }

    if (!recordReaderInstructions.isEmpty() || jt == JobType.GROUPED_AGG)
        cellModeOverride = true;

    /* Get Mapper Instructions */

    for (int i = 0; i < rootNodes.size(); i++) {
        getMapperInstructions(rootNodes.get(i), execNodes, inputs, mapperInstructions, nodeIndexMapping,
                start_index, inputLabels, inputLops, MRJobLineNumbers);
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("    Input strings: " + inputs.toString());
        if (jt == JobType.DATAGEN)
            LOG.trace("    Rand instructions: " + getCSVString(randInstructions));
        if (jt == JobType.GMR)
            LOG.trace("    RecordReader instructions: " + getCSVString(recordReaderInstructions));
        LOG.trace("    Mapper instructions: " + getCSVString(mapperInstructions));
    }

    /* Get Shuffle and Reducer Instructions */

    ArrayList<String> shuffleInstructions = new ArrayList<String>();
    ArrayList<String> aggInstructionsReducer = new ArrayList<String>();
    ArrayList<String> otherInstructionsReducer = new ArrayList<String>();

    for (int i = 0; i < rootNodes.size(); i++) {
        N rn = rootNodes.get(i);
        int resultIndex = getAggAndOtherInstructions(rn, execNodes, shuffleInstructions, aggInstructionsReducer,
                otherInstructionsReducer, nodeIndexMapping, start_index, inputLabels, inputLops,
                MRJobLineNumbers);
        if (resultIndex == -1)
            throw new LopsException("Unexpected error in piggybacking!");

        if (rn.getExecLocation() == ExecLocation.Data
                && ((Data) rn).getOperationType() == Data.OperationTypes.WRITE && ((Data) rn).isTransient()
                && rootNodes.contains(rn.getInputs().get(0))) {
            // Both rn (a transient write) and its input are root nodes.
            // Instead of creating two copies of the data, simply generate a cpvar instruction 
            NodeOutput out = setupNodeOutputs(rootNodes.get(i), ExecType.MR, cellModeOverride, true);
            writeinst.addAll(out.getLastInstructions());
        } else {
            resultIndices.add(Byte.valueOf((byte) resultIndex));

            // setup output filenames and outputInfos and generate related instructions
            NodeOutput out = setupNodeOutputs(rootNodes.get(i), ExecType.MR, cellModeOverride, false);
            outputLabels.add(out.getVarName());
            outputs.add(out.getFileName());
            outputInfos.add(out.getOutInfo());
            if (LOG.isTraceEnabled()) {
                LOG.trace("    Output Info: " + out.getFileName() + ";"
                        + OutputInfo.outputInfoToString(out.getOutInfo()) + ";" + out.getVarName());
            }
            renameInstructions.addAll(out.getLastInstructions());
            variableInstructions.addAll(out.getPreInstructions());
            postInstructions.addAll(out.getPostInstructions());
        }

    }

    /* Determine if the output dimensions are known */

    byte[] resultIndicesByte = new byte[resultIndices.size()];
    for (int i = 0; i < resultIndicesByte.length; i++) {
        resultIndicesByte[i] = resultIndices.get(i).byteValue();
    }

    if (LOG.isTraceEnabled()) {
        LOG.trace("    Shuffle Instructions: " + getCSVString(shuffleInstructions));
        LOG.trace("    Aggregate Instructions: " + getCSVString(aggInstructionsReducer));
        LOG.trace("    Other instructions =" + getCSVString(otherInstructionsReducer));
        LOG.trace("    Output strings: " + outputs.toString());
        LOG.trace("    ResultIndices = " + resultIndices.toString());
    }

    /* Prepare the MapReduce job instruction */

    MRJobInstruction mr = new MRJobInstruction(jt);

    // check if this is a map-only job. If not, set the number of reducers
    if (!shuffleInstructions.isEmpty() || !aggInstructionsReducer.isEmpty()
            || !otherInstructionsReducer.isEmpty())
        numReducers = total_reducers;

    // set inputs, outputs, and other other properties for the job 
    mr.setInputOutputLabels(getStringArray(inputLabels), getStringArray(outputLabels));
    mr.setOutputs(resultIndicesByte);
    mr.setDimsUnknownFilePrefix(getFilePath());

    mr.setNumberOfReducers(numReducers);
    mr.setReplication(replication);

    // set instructions for recordReader and mapper
    mr.setRecordReaderInstructions(getCSVString(recordReaderInstructions));
    mr.setMapperInstructions(getCSVString(mapperInstructions));

    //compute and set mapper memory requirements (for consistency of runtime piggybacking)
    if (jt == JobType.GMR) {
        double mem = 0;
        for (N n : execNodes)
            mem += computeFootprintInMapper(n);
        mr.setMemoryRequirements(mem);
    }

    if (jt == JobType.DATAGEN)
        mr.setRandInstructions(getCSVString(randInstructions));

    // set shuffle instructions
    mr.setShuffleInstructions(getCSVString(shuffleInstructions));

    // set reducer instruction
    mr.setAggregateInstructionsInReducer(getCSVString(aggInstructionsReducer));
    mr.setOtherInstructionsInReducer(getCSVString(otherInstructionsReducer));
    if (DMLScript.ENABLE_DEBUG_MODE) {
        // set line number information for each MR instruction
        mr.setMRJobInstructionsLineNumbers(MRJobLineNumbers);
    }

    /* Add the prepared instructions to output set */
    inst.addAll(variableInstructions);
    inst.add(mr);
    inst.addAll(postInstructions);
    deleteinst.addAll(renameInstructions);

    for (Lop l : inputLops) {
        if (DMLScript.ENABLE_DEBUG_MODE) {
            processConsumers((N) l, rmvarinst, deleteinst, (N) l);
        } else {
            processConsumers((N) l, rmvarinst, deleteinst, null);
        }
    }

}

From source file:com.amaze.carbonfilemanager.activities.MainActivity.java

/**
 * Returns all available SD-Cards in the system (include emulated)
 * <p>//  w  ww.  j  a v a  2 s  .co m
 * Warning: Hack! Based on Android source code of version 4.3 (API 18)
 * Because there is no standard way to get it.
 * TODO: Test on future Android versions 4.4+
 *
 * @return paths to all available SD-Cards in the system (include emulated)
 */
public synchronized ArrayList<String> getStorageDirectories() {
    // Final set of paths
    final ArrayList<String> rv = new ArrayList<>();
    // Primary physical SD-CARD (not emulated)
    final String rawExternalStorage = System.getenv("EXTERNAL_STORAGE");
    // All Secondary SD-CARDs (all exclude primary) separated by ":"
    final String rawSecondaryStoragesStr = System.getenv("SECONDARY_STORAGE");
    // Primary emulated SD-CARD
    final String rawEmulatedStorageTarget = System.getenv("EMULATED_STORAGE_TARGET");
    if (TextUtils.isEmpty(rawEmulatedStorageTarget)) {
        // Device has physical external storage; use plain paths.
        if (TextUtils.isEmpty(rawExternalStorage)) {
            // EXTERNAL_STORAGE undefined; falling back to default.
            rv.add("/storage/sdcard0");
        } else {
            rv.add(rawExternalStorage);
        }
    } else {
        // Device has emulated storage; external storage paths should have
        // userId burned into them.
        final String rawUserId;
        if (SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) {
            rawUserId = "";
        } else {
            final String path = Environment.getExternalStorageDirectory().getAbsolutePath();
            final String[] folders = DIR_SEPARATOR.split(path);
            final String lastFolder = folders[folders.length - 1];
            boolean isDigit = false;
            try {
                Integer.valueOf(lastFolder);
                isDigit = true;
            } catch (NumberFormatException ignored) {
            }
            rawUserId = isDigit ? lastFolder : "";
        }
        // /storage/emulated/0[1,2,...]
        if (TextUtils.isEmpty(rawUserId)) {
            rv.add(rawEmulatedStorageTarget);
        } else {
            rv.add(rawEmulatedStorageTarget + File.separator + rawUserId);
        }
    }
    // Add all secondary storages
    if (!TextUtils.isEmpty(rawSecondaryStoragesStr)) {
        // All Secondary SD-CARDs splited into array
        final String[] rawSecondaryStorages = rawSecondaryStoragesStr.split(File.pathSeparator);
        Collections.addAll(rv, rawSecondaryStorages);
    }
    if (SDK_INT >= Build.VERSION_CODES.M && checkStoragePermission())
        rv.clear();
    if (SDK_INT >= Build.VERSION_CODES.KITKAT) {
        String strings[] = FileUtil.getExtSdCardPathsForActivity(this);
        for (String s : strings) {
            File f = new File(s);
            if (!rv.contains(s) && Futils.canListFiles(f))
                rv.add(s);
        }
    }
    if (BaseActivity.rootMode)
        rv.add("/");
    File usb = getUsbDrive();
    if (usb != null && !rv.contains(usb.getPath()))
        rv.add(usb.getPath());

    if (SDK_INT >= Build.VERSION_CODES.KITKAT) {
        if (isUsbDeviceConnected())
            rv.add(OTGUtil.PREFIX_OTG + "/");
    }
    return rv;
}

From source file:com.common.log.newproxy.BizStaticFileAppender.java

/**
 * // ww w  . ja v a 2 s .  c o m
 */
private void publishOldCheckFile() {
    String statPath = LogHelper.getLogRootPath() + FILE_SEP + "log" + FILE_SEP + CHECKFILE_PATH + FILE_SEP;
    File path = new File(statPath);

    File[] checkFiles = path.listFiles();
    if (checkFiles == null) {//tat
        return;
    }
    HashMap extLogMap = new HashMap();
    HashMap chkLogMap = new HashMap();
    ArrayList keyDate = new ArrayList();
    for (int i = 0; i < checkFiles.length; i++) {
        File oldChkFile = checkFiles[i];
        String tmpfileName = oldChkFile.getName();
        String currfilename = currFile.getName();
        if (!tmpfileName.startsWith(currFile.getName())) {//?
            continue;
        }
        if (tmpfileName.endsWith(".bak")) {//??
                                           //int index=tmpfileName.indexOf(".log");
                                           //String fileDate=tmpfileName.substring(index+4,index+4+10);
                                           //String bizName=tmpfileName.substring(0,index+4);
            int index = fileName.lastIndexOf(FILE_SEP);
            //
            String bizName = fileName.substring(index + 1);
            String fileDate = getUnidateFromFileName(tmpfileName, bizName, datePattern);
            extLogMap.put(bizName + fileDate, oldChkFile);
        } else if (tmpfileName.endsWith(".999999")) {//?
            int index = fileName.lastIndexOf(FILE_SEP);
            //
            String bizName = fileName.substring(index + 1);
            String fileDate = getUnidateFromFileName(tmpfileName, bizName, stat_datepattern);
            keyDate.add(bizName + fileDate);
            //?
            //String strCurrDate = StringUtils.toString(new Date(),"yyyyMMdd");
            String yesterday = getLastDate();
            if (yesterday.equals(fileDate)) { //??,
                chkLogMap.put(bizName + fileDate, oldChkFile);
            }
        }
    }
    for (int i = 0; i < keyDate.size(); i++) {//?
        String logDay = (String) keyDate.get(i);
        if (chkLogMap.containsKey(logDay)) {//
            File checkFile = (File) chkLogMap.get(logDay);
            try {
                File targetFile = new File(currFile.getParentFile(), checkFile.getName());
                FileWriter fw = new FileWriter(checkFile, true);

                if (extLogMap.containsKey(logDay)) { //
                    File extFile = (File) extLogMap.get(logDay);
                    //?
                    fw.write(extFile.getName() + NEXT_LINE);
                    //
                    File targetExtFile = new File(currFile.getParentFile(), extFile.getName());
                    fileCopy(extFile, targetFile);

                    //?
                    File publishedExtFile = new File(extFile.getParentFile(),
                            extFile.getName() + PUBLISHED_CHECKFILE);
                    extFile.renameTo(publishedExtFile);

                }
                fw.write("999999" + NEXT_LINE);
                fw.flush();
                fw.close();
                fileCopy(checkFile, targetFile);
                //?
                File publishedCheckFile = new File(checkFile.getParentFile(),
                        checkFile.getName() + PUBLISHED_CHECKFILE);
                checkFile.renameTo(publishedCheckFile);
            } catch (IOException ex) {
                ex.printStackTrace();
            }

        }
    }
    extLogMap.clear();
    extLogMap = null;
    chkLogMap.clear();
    chkLogMap = null;
    keyDate.clear();
    keyDate = null;

}

From source file:org.alfresco.repo.search.impl.lucene.AbstractLuceneQueryParser.java

@SuppressWarnings("unchecked")
protected Query getFieldQueryImpl(String field, String queryText, AnalysisMode analysisMode,
        LuceneFunction luceneFunction) throws ParseException {
    // Use the analyzer to get all the tokens, and then build a TermQuery,
    // PhraseQuery, or noth

    // TODO: Untokenised columns with functions require special handling

    if (luceneFunction != LuceneFunction.FIELD) {
        throw new UnsupportedOperationException(
                "Field queries are not supported on lucene functions (UPPER, LOWER, etc)");
    }/*from   w  ww . j  a  v a2  s.  c om*/

    // if the incoming string already has a language identifier we strip it iff and addit back on again

    String localePrefix = "";

    String toTokenise = queryText;

    if (queryText.startsWith("{")) {
        int position = queryText.indexOf("}");
        String language = queryText.substring(0, position + 1);
        Locale locale = new Locale(queryText.substring(1, position));
        String token = queryText.substring(position + 1);
        boolean found = false;
        if (!locale.toString().isEmpty()) {
            for (Locale current : Locale.getAvailableLocales()) {
                if (current.toString().equalsIgnoreCase(locale.toString())) {
                    found = true;
                    break;
                }
            }
        }
        if (found) {
            localePrefix = language;
            toTokenise = token;
        } else {
            toTokenise = token;
        }
    }

    String testText = toTokenise;
    boolean requiresMLTokenDuplication = false;
    String localeString = null;
    if (field.startsWith(PROPERTY_FIELD_PREFIX) && (localePrefix.length() == 0)) {
        if ((queryText.length() > 0) && (queryText.charAt(0) == '\u0000')) {
            int position = queryText.indexOf("\u0000", 1);
            testText = queryText.substring(position + 1);
            requiresMLTokenDuplication = true;
            localeString = queryText.substring(1, position);
        }
    }

    // find the positions of any escaped * and ? and ignore them

    Set<Integer> wildcardPoistions = getWildcardPositions(testText);

    TokenStream source;
    if ((localePrefix.length() == 0) || (wildcardPoistions.size() > 0)
            || (analysisMode == AnalysisMode.IDENTIFIER)) {
        source = getAnalyzer().tokenStream(field, new StringReader(toTokenise), analysisMode);
    } else {
        source = getAnalyzer().tokenStream(field, new StringReader(
                "\u0000" + localePrefix.substring(1, localePrefix.length() - 1) + "\u0000" + toTokenise),
                analysisMode);
        localePrefix = "";
    }

    ArrayList<org.apache.lucene.analysis.Token> list = new ArrayList<org.apache.lucene.analysis.Token>();
    org.apache.lucene.analysis.Token reusableToken = new org.apache.lucene.analysis.Token();
    org.apache.lucene.analysis.Token nextToken;
    int positionCount = 0;
    boolean severalTokensAtSamePosition = false;

    while (true) {
        try {
            nextToken = source.next(reusableToken);
        } catch (IOException e) {
            nextToken = null;
        }
        if (nextToken == null)
            break;
        list.add((org.apache.lucene.analysis.Token) nextToken.clone());
        if (nextToken.getPositionIncrement() != 0)
            positionCount += nextToken.getPositionIncrement();
        else
            severalTokensAtSamePosition = true;
    }
    try {
        source.close();
    } catch (IOException e) {
        // ignore
    }

    // add any alpha numeric wildcards that have been missed
    // Fixes most stop word and wild card issues

    for (int index = 0; index < testText.length(); index++) {
        char current = testText.charAt(index);
        if (((current == '*') || (current == '?')) && wildcardPoistions.contains(index)) {
            StringBuilder pre = new StringBuilder(10);
            if (index == 0) {
                // "*" and "?" at the start

                boolean found = false;
                for (int j = 0; j < list.size(); j++) {
                    org.apache.lucene.analysis.Token test = list.get(j);
                    if ((test.startOffset() <= 0) && (0 < test.endOffset())) {
                        found = true;
                        break;
                    }
                }
                if (!found && (testText.length() == 1)) {
                    // Add new token followed by * not given by the tokeniser
                    org.apache.lucene.analysis.Token newToken = new org.apache.lucene.analysis.Token(0, 0);
                    newToken.setTermBuffer("");
                    newToken.setType("ALPHANUM");
                    if (requiresMLTokenDuplication) {
                        Locale locale = I18NUtil.parseLocale(localeString);
                        MLAnalysisMode mlAnalysisMode = searchParameters.getMlAnalaysisMode() == null
                                ? defaultSearchMLAnalysisMode
                                : searchParameters.getMlAnalaysisMode();
                        MLTokenDuplicator duplicator = new MLTokenDuplicator(locale, mlAnalysisMode);
                        Iterator<org.apache.lucene.analysis.Token> it = duplicator.buildIterator(newToken);
                        if (it != null) {
                            int count = 0;
                            while (it.hasNext()) {
                                list.add(it.next());
                                count++;
                                if (count > 1) {
                                    severalTokensAtSamePosition = true;
                                }
                            }
                        }
                    }
                    // content
                    else {
                        list.add(newToken);
                    }
                }
            } else if (index > 0) {
                // Add * and ? back into any tokens from which it has been removed

                boolean tokenFound = false;
                for (int j = 0; j < list.size(); j++) {
                    org.apache.lucene.analysis.Token test = list.get(j);
                    if ((test.startOffset() <= index) && (index < test.endOffset())) {
                        if (requiresMLTokenDuplication) {
                            String termText = new String(test.termBuffer(), 0, test.termLength());
                            int position = termText.indexOf("}");
                            String language = termText.substring(0, position + 1);
                            String token = termText.substring(position + 1);
                            if (index >= test.startOffset() + token.length()) {
                                test.setTermBuffer(language + token + current);
                            }
                        } else {
                            if (index >= test.startOffset() + test.termLength()) {
                                test.setTermBuffer(test.term() + current);
                            }
                        }
                        tokenFound = true;
                        break;
                    }
                }

                if (!tokenFound) {
                    for (int i = index - 1; i >= 0; i--) {
                        char c = testText.charAt(i);
                        if (Character.isLetterOrDigit(c)) {
                            boolean found = false;
                            for (int j = 0; j < list.size(); j++) {
                                org.apache.lucene.analysis.Token test = list.get(j);
                                if ((test.startOffset() <= i) && (i < test.endOffset())) {
                                    found = true;
                                    break;
                                }
                            }
                            if (found) {
                                break;
                            } else {
                                pre.insert(0, c);
                            }
                        } else {
                            break;
                        }
                    }
                    if (pre.length() > 0) {
                        // Add new token followed by * not given by the tokeniser
                        org.apache.lucene.analysis.Token newToken = new org.apache.lucene.analysis.Token(
                                index - pre.length(), index);
                        newToken.setTermBuffer(pre.toString());
                        newToken.setType("ALPHANUM");
                        if (requiresMLTokenDuplication) {
                            Locale locale = I18NUtil.parseLocale(localeString);
                            MLAnalysisMode mlAnalysisMode = searchParameters.getMlAnalaysisMode() == null
                                    ? defaultSearchMLAnalysisMode
                                    : searchParameters.getMlAnalaysisMode();
                            MLTokenDuplicator duplicator = new MLTokenDuplicator(locale, mlAnalysisMode);
                            Iterator<org.apache.lucene.analysis.Token> it = duplicator.buildIterator(newToken);
                            if (it != null) {
                                int count = 0;
                                while (it.hasNext()) {
                                    list.add(it.next());
                                    count++;
                                    if (count > 1) {
                                        severalTokensAtSamePosition = true;
                                    }
                                }
                            }
                        }
                        // content
                        else {
                            list.add(newToken);
                        }
                    }
                }
            }

            StringBuilder post = new StringBuilder(10);
            if (index > 0) {
                for (int i = index + 1; i < testText.length(); i++) {
                    char c = testText.charAt(i);
                    if (Character.isLetterOrDigit(c)) {
                        boolean found = false;
                        for (int j = 0; j < list.size(); j++) {
                            org.apache.lucene.analysis.Token test = list.get(j);
                            if ((test.startOffset() <= i) && (i < test.endOffset())) {
                                found = true;
                                break;
                            }
                        }
                        if (found) {
                            break;
                        } else {
                            post.append(c);
                        }
                    } else {
                        break;
                    }
                }
                if (post.length() > 0) {
                    // Add new token followed by * not given by the tokeniser
                    org.apache.lucene.analysis.Token newToken = new org.apache.lucene.analysis.Token(index + 1,
                            index + 1 + post.length());
                    newToken.setTermBuffer(post.toString());
                    newToken.setType("ALPHANUM");
                    if (requiresMLTokenDuplication) {
                        Locale locale = I18NUtil.parseLocale(localeString);
                        MLAnalysisMode mlAnalysisMode = searchParameters.getMlAnalaysisMode() == null
                                ? defaultSearchMLAnalysisMode
                                : searchParameters.getMlAnalaysisMode();
                        MLTokenDuplicator duplicator = new MLTokenDuplicator(locale, mlAnalysisMode);
                        Iterator<org.apache.lucene.analysis.Token> it = duplicator.buildIterator(newToken);
                        if (it != null) {
                            int count = 0;
                            while (it.hasNext()) {
                                list.add(it.next());
                                count++;
                                if (count > 1) {
                                    severalTokensAtSamePosition = true;
                                }
                            }
                        }
                    }
                    // content
                    else {
                        list.add(newToken);
                    }
                }
            }

        }
    }

    Collections.sort(list, new Comparator<org.apache.lucene.analysis.Token>() {

        public int compare(Token o1, Token o2) {
            int dif = o1.startOffset() - o2.startOffset();
            if (dif != 0) {
                return dif;
            } else {
                return o2.getPositionIncrement() - o1.getPositionIncrement();
            }
        }
    });

    // Combined * and ? based strings - should redo the tokeniser

    // Build tokens by position

    LinkedList<LinkedList<org.apache.lucene.analysis.Token>> tokensByPosition = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>();
    LinkedList<org.apache.lucene.analysis.Token> currentList = null;
    for (org.apache.lucene.analysis.Token c : list) {
        if (c.getPositionIncrement() == 0) {
            if (currentList == null) {
                currentList = new LinkedList<org.apache.lucene.analysis.Token>();
                tokensByPosition.add(currentList);
            }
            currentList.add(c);
        } else {
            currentList = new LinkedList<org.apache.lucene.analysis.Token>();
            tokensByPosition.add(currentList);
            currentList.add(c);
        }
    }

    // Build all the token sequences and see which ones get strung together

    LinkedList<LinkedList<org.apache.lucene.analysis.Token>> allTokenSequences = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>();
    for (LinkedList<org.apache.lucene.analysis.Token> tokensAtPosition : tokensByPosition) {
        if (allTokenSequences.size() == 0) {
            for (org.apache.lucene.analysis.Token t : tokensAtPosition) {
                LinkedList<org.apache.lucene.analysis.Token> newEntry = new LinkedList<org.apache.lucene.analysis.Token>();
                newEntry.add(t);
                allTokenSequences.add(newEntry);
            }
        } else {
            LinkedList<LinkedList<org.apache.lucene.analysis.Token>> newAllTokeSequences = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>();

            FOR_FIRST_TOKEN_AT_POSITION_ONLY: for (org.apache.lucene.analysis.Token t : tokensAtPosition) {
                boolean tokenFoundSequence = false;
                for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : allTokenSequences) {
                    LinkedList<org.apache.lucene.analysis.Token> newEntry = new LinkedList<org.apache.lucene.analysis.Token>();
                    newEntry.addAll(tokenSequence);
                    if (newEntry.getLast().endOffset() <= t.startOffset()) {
                        newEntry.add(t);
                        tokenFoundSequence = true;
                    }
                    newAllTokeSequences.add(newEntry);
                }
                if (false == tokenFoundSequence) {
                    LinkedList<org.apache.lucene.analysis.Token> newEntry = new LinkedList<org.apache.lucene.analysis.Token>();
                    newEntry.add(t);
                    newAllTokeSequences.add(newEntry);
                }
                // Limit the max number of permutations we consider
                if (newAllTokeSequences.size() > 64) {
                    break FOR_FIRST_TOKEN_AT_POSITION_ONLY;
                }
            }
            allTokenSequences = newAllTokeSequences;
        }
    }

    // build the uniquie

    LinkedList<LinkedList<org.apache.lucene.analysis.Token>> fixedTokenSequences = new LinkedList<LinkedList<org.apache.lucene.analysis.Token>>();
    for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : allTokenSequences) {
        LinkedList<org.apache.lucene.analysis.Token> fixedTokenSequence = new LinkedList<org.apache.lucene.analysis.Token>();
        fixedTokenSequences.add(fixedTokenSequence);
        org.apache.lucene.analysis.Token replace = null;
        for (org.apache.lucene.analysis.Token c : tokenSequence) {
            if (replace == null) {
                StringBuilder prefix = new StringBuilder();
                for (int i = c.startOffset() - 1; i >= 0; i--) {
                    char test = testText.charAt(i);
                    if (((test == '*') || (test == '?')) && wildcardPoistions.contains(i)) {
                        prefix.insert(0, test);
                    } else {
                        break;
                    }
                }
                String pre = prefix.toString();
                if (requiresMLTokenDuplication) {
                    String termText = new String(c.termBuffer(), 0, c.termLength());
                    int position = termText.indexOf("}");
                    String language = termText.substring(0, position + 1);
                    String token = termText.substring(position + 1);
                    replace = new org.apache.lucene.analysis.Token(c.startOffset() - pre.length(),
                            c.endOffset());
                    replace.setTermBuffer(language + pre + token);
                    replace.setType(c.type());
                    replace.setPositionIncrement(c.getPositionIncrement());
                } else {
                    String termText = new String(c.termBuffer(), 0, c.termLength());
                    replace = new org.apache.lucene.analysis.Token(c.startOffset() - pre.length(),
                            c.endOffset());
                    replace.setTermBuffer(pre + termText);
                    replace.setType(c.type());
                    replace.setPositionIncrement(c.getPositionIncrement());
                }
            } else {
                StringBuilder prefix = new StringBuilder();
                StringBuilder postfix = new StringBuilder();
                StringBuilder builder = prefix;
                for (int i = c.startOffset() - 1; i >= replace.endOffset(); i--) {
                    char test = testText.charAt(i);
                    if (((test == '*') || (test == '?')) && wildcardPoistions.contains(i)) {
                        builder.insert(0, test);
                    } else {
                        builder = postfix;
                        postfix.setLength(0);
                    }
                }
                String pre = prefix.toString();
                String post = postfix.toString();

                // Does it bridge?
                if ((pre.length() > 0) && (replace.endOffset() + pre.length()) == c.startOffset()) {
                    String termText = new String(c.termBuffer(), 0, c.termLength());
                    if (requiresMLTokenDuplication) {
                        int position = termText.indexOf("}");
                        @SuppressWarnings("unused")
                        String language = termText.substring(0, position + 1);
                        String token = termText.substring(position + 1);
                        int oldPositionIncrement = replace.getPositionIncrement();
                        String replaceTermText = new String(replace.termBuffer(), 0, replace.termLength());
                        replace = new org.apache.lucene.analysis.Token(replace.startOffset(), c.endOffset());
                        replace.setTermBuffer(replaceTermText + pre + token);
                        replace.setType(replace.type());
                        replace.setPositionIncrement(oldPositionIncrement);
                    } else {
                        int oldPositionIncrement = replace.getPositionIncrement();
                        String replaceTermText = new String(replace.termBuffer(), 0, replace.termLength());
                        replace = new org.apache.lucene.analysis.Token(replace.startOffset(), c.endOffset());
                        replace.setTermBuffer(replaceTermText + pre + termText);
                        replace.setType(replace.type());
                        replace.setPositionIncrement(oldPositionIncrement);
                    }
                } else {
                    String termText = new String(c.termBuffer(), 0, c.termLength());
                    if (requiresMLTokenDuplication) {
                        int position = termText.indexOf("}");
                        String language = termText.substring(0, position + 1);
                        String token = termText.substring(position + 1);
                        String replaceTermText = new String(replace.termBuffer(), 0, replace.termLength());
                        org.apache.lucene.analysis.Token last = new org.apache.lucene.analysis.Token(
                                replace.startOffset(), replace.endOffset() + post.length());
                        last.setTermBuffer(replaceTermText + post);
                        last.setType(replace.type());
                        last.setPositionIncrement(replace.getPositionIncrement());
                        fixedTokenSequence.add(last);
                        replace = new org.apache.lucene.analysis.Token(c.startOffset() - pre.length(),
                                c.endOffset());
                        replace.setTermBuffer(language + pre + token);
                        replace.setType(c.type());
                        replace.setPositionIncrement(c.getPositionIncrement());
                    } else {
                        String replaceTermText = new String(replace.termBuffer(), 0, replace.termLength());
                        org.apache.lucene.analysis.Token last = new org.apache.lucene.analysis.Token(
                                replace.startOffset(), replace.endOffset() + post.length());
                        last.setTermBuffer(replaceTermText + post);
                        last.setType(replace.type());
                        last.setPositionIncrement(replace.getPositionIncrement());
                        fixedTokenSequence.add(last);
                        replace = new org.apache.lucene.analysis.Token(c.startOffset() - pre.length(),
                                c.endOffset());
                        replace.setTermBuffer(pre + termText);
                        replace.setType(c.type());
                        replace.setPositionIncrement(c.getPositionIncrement());
                    }
                }
            }
        }
        // finish last
        if (replace != null) {
            StringBuilder postfix = new StringBuilder();
            if ((replace.endOffset() >= 0) && (replace.endOffset() < testText.length())) {
                for (int i = replace.endOffset(); i < testText.length(); i++) {
                    char test = testText.charAt(i);
                    if (((test == '*') || (test == '?')) && wildcardPoistions.contains(i)) {
                        postfix.append(test);
                    } else {
                        break;
                    }
                }
            }
            String post = postfix.toString();
            int oldPositionIncrement = replace.getPositionIncrement();
            String replaceTermText = new String(replace.termBuffer(), 0, replace.termLength());
            replace = new org.apache.lucene.analysis.Token(replace.startOffset(),
                    replace.endOffset() + post.length());
            replace.setTermBuffer(replaceTermText + post);
            replace.setType(replace.type());
            replace.setPositionIncrement(oldPositionIncrement);
            fixedTokenSequence.add(replace);
        }
    }

    // rebuild fixed list

    ArrayList<org.apache.lucene.analysis.Token> fixed = new ArrayList<org.apache.lucene.analysis.Token>();
    for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : fixedTokenSequences) {
        for (org.apache.lucene.analysis.Token token : tokenSequence) {
            fixed.add(token);
        }
    }

    // reorder by start position and increment

    Collections.sort(fixed, new Comparator<org.apache.lucene.analysis.Token>() {

        public int compare(Token o1, Token o2) {
            int dif = o1.startOffset() - o2.startOffset();
            if (dif != 0) {
                return dif;
            } else {
                return o1.getPositionIncrement() - o2.getPositionIncrement();
            }
        }
    });

    // make sure we remove any tokens we have duplicated

    @SuppressWarnings("rawtypes")
    OrderedHashSet unique = new OrderedHashSet();
    unique.addAll(fixed);
    fixed = new ArrayList<org.apache.lucene.analysis.Token>(unique);

    list = fixed;

    // add any missing locales back to the tokens

    if (localePrefix.length() > 0) {
        for (int j = 0; j < list.size(); j++) {
            org.apache.lucene.analysis.Token currentToken = list.get(j);
            String termText = new String(currentToken.termBuffer(), 0, currentToken.termLength());
            currentToken.setTermBuffer(localePrefix + termText);
        }
    }

    if (list.size() == 0)
        return null;
    else if (list.size() == 1) {
        nextToken = list.get(0);
        String termText = new String(nextToken.termBuffer(), 0, nextToken.termLength());
        if (termText.contains("*") || termText.contains("?")) {
            return newWildcardQuery(
                    new Term(field, getLowercaseExpandedTerms() ? termText.toLowerCase() : termText));
        } else {
            return newTermQuery(new Term(field, termText));
        }
    } else {
        if (severalTokensAtSamePosition) {
            if (positionCount == 1) {
                // no phrase query:
                BooleanQuery q = newBooleanQuery(true);
                for (int i = 0; i < list.size(); i++) {
                    Query currentQuery;
                    nextToken = list.get(i);
                    String termText = new String(nextToken.termBuffer(), 0, nextToken.termLength());
                    if (termText.contains("*") || termText.contains("?")) {
                        currentQuery = newWildcardQuery(new Term(field,
                                getLowercaseExpandedTerms() ? termText.toLowerCase() : termText));
                    } else {
                        currentQuery = newTermQuery(new Term(field, termText));
                    }
                    q.add(currentQuery, BooleanClause.Occur.SHOULD);
                }
                return q;
            }
            // Consider if we can use a multi-phrase query (e.g for synonym use rather then WordDelimiterFilterFactory)
            else if (canUseMultiPhraseQuery(fixedTokenSequences)) {
                // phrase query:
                MultiPhraseQuery mpq = newMultiPhraseQuery();
                mpq.setSlop(internalSlop);
                ArrayList<Term> multiTerms = new ArrayList<Term>();
                int position = 0;
                for (int i = 0; i < list.size(); i++) {
                    nextToken = list.get(i);
                    String termText = new String(nextToken.termBuffer(), 0, nextToken.termLength());

                    Term term = new Term(field, termText);
                    if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                        addWildcardTerms(multiTerms, term);
                    } else {
                        multiTerms.add(term);
                    }

                    if (nextToken.getPositionIncrement() > 0 && multiTerms.size() > 0) {
                        if (getEnablePositionIncrements()) {
                            mpq.add(multiTerms.toArray(new Term[0]), position);
                        } else {
                            mpq.add(multiTerms.toArray(new Term[0]));
                        }
                        checkTermCount(field, queryText, mpq);
                        multiTerms.clear();
                    }
                    position += nextToken.getPositionIncrement();

                }
                if (getEnablePositionIncrements()) {
                    if (multiTerms.size() > 0) {
                        mpq.add(multiTerms.toArray(new Term[0]), position);
                    }
                    //                        else
                    //                        {
                    //                            mpq.add(new Term[] { new Term(field, "\u0000") }, position);
                    //                        }
                } else {
                    if (multiTerms.size() > 0) {
                        mpq.add(multiTerms.toArray(new Term[0]));
                    }
                    //                        else
                    //                        {
                    //                            mpq.add(new Term[] { new Term(field, "\u0000") });
                    //                        }
                }
                checkTermCount(field, queryText, mpq);
                return mpq;

            }
            // Word delimiter factory and other odd things generate complex token patterns
            // Smart skip token  sequences with small tokens that generate toomany wildcards
            // Fall back to the larger pattern
            // e.g Site1* will not do (S ite 1*) or (Site 1*)  if 1* matches too much (S ite1*)  and (Site1*) will still be OK 
            // If we skip all (for just 1* in the input) this is still an issue.
            else {
                boolean skippedTokens = false;
                BooleanQuery q = newBooleanQuery(true);
                TOKEN_SEQUENCE: for (LinkedList<org.apache.lucene.analysis.Token> tokenSequence : fixedTokenSequences) {
                    // phrase query:
                    MultiPhraseQuery mpq = newMultiPhraseQuery();
                    mpq.setSlop(internalSlop);
                    int position = 0;
                    for (int i = 0; i < tokenSequence.size(); i++) {
                        nextToken = (org.apache.lucene.analysis.Token) tokenSequence.get(i);
                        String termText = new String(nextToken.termBuffer(), 0, nextToken.termLength());

                        Term term = new Term(field, termText);

                        if (getEnablePositionIncrements()) {
                            if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                                mpq.add(getMatchingTerms(field, term), position);
                            } else {
                                mpq.add(new Term[] { term }, position);
                            }
                            if (exceedsTermCount(mpq)) {
                                // We could duplicate the token sequence without the failing wildcard expansion and try again ??
                                skippedTokens = true;
                                continue TOKEN_SEQUENCE;
                            }
                            if (nextToken.getPositionIncrement() > 0) {
                                position += nextToken.getPositionIncrement();
                            } else {
                                position++;
                            }

                        } else {
                            if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                                mpq.add(getMatchingTerms(field, term));
                            } else {
                                mpq.add(term);
                            }
                            if (exceedsTermCount(mpq)) {
                                skippedTokens = true;
                                continue TOKEN_SEQUENCE;
                            }
                        }
                    }
                    q.add(mpq, BooleanClause.Occur.SHOULD);
                }
                if (skippedTokens && (q.clauses().size() == 0)) {
                    throw new LuceneQueryParserException(
                            "Query skipped all token sequences as wildcards generated too many clauses: "
                                    + field + " " + queryText);
                }
                return q;
            }
        } else {
            MultiPhraseQuery q = new MultiPhraseQuery();
            q.setSlop(internalSlop);
            int position = 0;
            for (int i = 0; i < list.size(); i++) {
                nextToken = list.get(i);
                String termText = new String(nextToken.termBuffer(), 0, nextToken.termLength());
                Term term = new Term(field, termText);
                if (getEnablePositionIncrements()) {
                    if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                        q.add(getMatchingTerms(field, term), position);
                    } else {
                        q.add(new Term[] { term }, position);
                    }
                    checkTermCount(field, queryText, q);
                    if (nextToken.getPositionIncrement() > 0) {
                        position += nextToken.getPositionIncrement();
                    } else {
                        position++;
                    }
                } else {
                    if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                        q.add(getMatchingTerms(field, term));
                    } else {
                        q.add(term);
                    }
                    checkTermCount(field, queryText, q);
                }
            }
            return q;
        }
    }
}

From source file:com.ferdi2005.secondgram.NotificationsController.java

public void processDialogsUpdateRead(final HashMap<Long, Integer> dialogsToUpdate) {
    final ArrayList<MessageObject> popupArray = popupMessages.isEmpty() ? null : new ArrayList<>(popupMessages);
    notificationsQueue.postRunnable(new Runnable() {
        @Override//w  w w .  ja  v  a2  s .c  om
        public void run() {
            int old_unread_count = total_unread_count;
            SharedPreferences preferences = ApplicationLoader.applicationContext
                    .getSharedPreferences("Notifications", Context.MODE_PRIVATE);
            for (HashMap.Entry<Long, Integer> entry : dialogsToUpdate.entrySet()) {
                long dialog_id = entry.getKey();

                int notifyOverride = getNotifyOverride(preferences, dialog_id);
                if (notifyCheck) {
                    Integer override = pushDialogsOverrideMention.get(dialog_id);
                    if (override != null && override == 1) {
                        pushDialogsOverrideMention.put(dialog_id, 0);
                        notifyOverride = 1;
                    }
                }
                boolean canAddValue = !(notifyOverride == 2 || (!preferences.getBoolean("EnableAll", true)
                        || ((int) dialog_id < 0) && !preferences.getBoolean("EnableGroup", true))
                        && notifyOverride == 0);

                Integer currentCount = pushDialogs.get(dialog_id);
                Integer newCount = entry.getValue();
                if (newCount == 0) {
                    smartNotificationsDialogs.remove(dialog_id);
                }

                if (newCount < 0) {
                    if (currentCount == null) {
                        continue;
                    }
                    newCount = currentCount + newCount;
                }
                if (canAddValue || newCount == 0) {
                    if (currentCount != null) {
                        total_unread_count -= currentCount;
                    }
                }
                if (newCount == 0) {
                    pushDialogs.remove(dialog_id);
                    pushDialogsOverrideMention.remove(dialog_id);
                    for (int a = 0; a < pushMessages.size(); a++) {
                        MessageObject messageObject = pushMessages.get(a);
                        if (messageObject.getDialogId() == dialog_id) {
                            if (isPersonalMessage(messageObject)) {
                                personal_count--;
                            }
                            pushMessages.remove(a);
                            a--;
                            delayedPushMessages.remove(messageObject);
                            long mid = messageObject.messageOwner.id;
                            if (messageObject.messageOwner.to_id.channel_id != 0) {
                                mid |= ((long) messageObject.messageOwner.to_id.channel_id) << 32;
                            }
                            pushMessagesDict.remove(mid);
                            if (popupArray != null) {
                                popupArray.remove(messageObject);
                            }
                        }
                    }
                    if (popupArray != null && pushMessages.isEmpty() && !popupArray.isEmpty()) {
                        popupArray.clear();
                    }
                } else if (canAddValue) {
                    total_unread_count += newCount;
                    pushDialogs.put(dialog_id, newCount);
                }
            }
            if (popupArray != null) {
                AndroidUtilities.runOnUIThread(new Runnable() {
                    @Override
                    public void run() {
                        popupMessages = popupArray;
                    }
                });
            }
            if (old_unread_count != total_unread_count) {
                if (!notifyCheck) {
                    delayedPushMessages.clear();
                    showOrUpdateNotification(notifyCheck);
                } else {
                    scheduleNotificationDelay(
                            lastOnlineFromOtherDevice > ConnectionsManager.getInstance().getCurrentTime());
                }
            }
            notifyCheck = false;
            if (preferences.getBoolean("badgeNumber", true)) {
                setBadge(total_unread_count);
            }
        }
    });
}

From source file:carnero.cgeo.original.libs.Base.java

public Long searchByGeocode(HashMap<String, String> parameters, int reason, boolean forceReload) {
    final Search search = new Search();
    String geocode = parameters.get("geocode");
    String guid = parameters.get("guid");

    if ((geocode == null || geocode.length() == 0) && ((guid == null || guid.length() == 0))) {
        Log.e(Settings.tag, "cgeoBase.searchByGeocode: No geocode nor guid given");
        return null;
    }/*  w ww  . j a  v  a2 s  . com*/

    if (forceReload == false && reason == 0
            && (app.isOffline(geocode, guid) == true || app.isThere(geocode, guid, true, true) == true)) {
        if ((geocode == null || geocode.length() == 0) && guid != null && guid.length() > 0) {
            geocode = app.getGeocode(guid);
        }

        ArrayList<Cache> cacheList = new ArrayList<Cache>();
        cacheList.add(app.getCacheByGeocode(geocode, true, true, true, true, true, true));
        search.addGeocode(geocode);

        app.addSearch(search, cacheList, false, reason);

        cacheList.clear();
        cacheList = null;

        return search.getCurrentId();
    }

    final String host = "www.geocaching.com";
    final String path = "/seek/cache_details.aspx";
    final String method = "GET";
    final HashMap<String, String> params = new HashMap<String, String>();
    if (geocode != null && geocode.length() > 0) {
        params.put("wp", geocode);
    } else if (guid != null && guid.length() > 0) {
        params.put("guid", guid);
    }
    params.put("decrypt", "y");
    params.put("log", "y"); // download logs (more than 5
    params.put("numlogs", "35"); // 35 logs

    String page = requestLogged(false, host, path, method, params, false, false, false);

    if (page == null || page.length() == 0) {
        if (app.isThere(geocode, guid, true, false) == true) {
            if ((geocode == null || geocode.length() == 0) && guid != null && guid.length() > 0) {
                Log.i(Settings.tag, "Loading old cache from cache.");

                geocode = app.getGeocode(guid);
            }

            final ArrayList<Cache> cacheList = new ArrayList<Cache>();
            cacheList.add(app.getCacheByGeocode(geocode));
            search.addGeocode(geocode);
            search.error = null;
            search.errorRetrieve = 0; // reset errors from previous failed request

            app.addSearch(search, cacheList, false, reason);

            cacheList.clear();

            return search.getCurrentId();
        }

        Log.e(Settings.tag, "cgeoBase.searchByGeocode: No data from server");
        return null;
    }

    final CacheWrap caches = parseCache(page, reason);
    if (caches == null || caches.cacheList == null || caches.cacheList.isEmpty()) {
        if (caches != null && caches.error != null && caches.error.length() > 0) {
            search.error = caches.error;
        }
        if (caches != null && caches.url != null && caches.url.length() > 0) {
            search.url = caches.url;
        }

        app.addSearch(search, null, true, reason);

        Log.e(Settings.tag, "cgeoBase.searchByGeocode: No cache parsed");
        return null;
    }

    if (app == null) {
        Log.e(Settings.tag, "cgeoBase.searchByGeocode: No application found");
        return null;
    }

    final ArrayList<Cache> cacheList = new ArrayList<Cache>();
    if (caches != null) {
        if (caches.error != null && caches.error.length() > 0) {
            search.error = caches.error;
        }
        if (caches.url != null && caches.url.length() > 0) {
            search.url = caches.url;
        }
        if (caches.viewstate != null && caches.viewstate.length() > 0) {
            search.viewstate = caches.viewstate;
        }
        if (caches.viewstate1 != null && caches.viewstate1.length() > 0) {
            search.viewstate1 = caches.viewstate1;
        }
        search.totalCnt = caches.totalCnt;

        for (Cache cache : caches.cacheList) {
            search.addGeocode(cache.geocode);
            cacheList.add(cache);
        }
    }

    app.addSearch(search, cacheList, true, reason);

    page = null;
    cacheList.clear();

    return search.getCurrentId();
}

From source file:com.ibm.bi.dml.lops.compile.Dag.java

/**
 * Method to group a vector of sorted lops.
 * /*w w w . ja v a 2s.  co m*/
 * @param node_v
 * @throws LopsException
 * @throws DMLUnsupportedOperationException
 * @throws DMLRuntimeException
 */

@SuppressWarnings("unchecked")
private ArrayList<Instruction> doGreedyGrouping(StatementBlock sb, ArrayList<N> node_v)
        throws LopsException, IOException, DMLRuntimeException, DMLUnsupportedOperationException

{
    LOG.trace("Grouping DAG ============");

    // nodes to be executed in current iteration
    ArrayList<N> execNodes = new ArrayList<N>();
    // nodes that have already been processed
    ArrayList<N> finishedNodes = new ArrayList<N>();
    // nodes that are queued for the following iteration
    ArrayList<N> queuedNodes = new ArrayList<N>();

    ArrayList<ArrayList<N>> jobNodes = createNodeVectors(JobType.getNumJobTypes());

    // list of instructions
    ArrayList<Instruction> inst = new ArrayList<Instruction>();

    //ArrayList<Instruction> preWriteDeleteInst = new ArrayList<Instruction>();
    ArrayList<Instruction> writeInst = new ArrayList<Instruction>();
    ArrayList<Instruction> deleteInst = new ArrayList<Instruction>();
    ArrayList<Instruction> endOfBlockInst = new ArrayList<Instruction>();

    // delete transient variables that are no longer needed
    //deleteUnwantedTransientReadVariables(node_v, deleteInst);

    // remove files for transient reads that are updated.
    deleteUpdatedTransientReadVariables(sb, node_v, writeInst);

    generateRemoveInstructions(sb, endOfBlockInst);

    generateInstructionsForInputVariables(node_v, inst);

    boolean done = false;
    String indent = "    ";

    while (!done) {
        LOG.trace("Grouping nodes in DAG");

        execNodes.clear();
        queuedNodes.clear();
        clearNodeVectors(jobNodes);
        gmrMapperFootprint = 0;

        for (int i = 0; i < node_v.size(); i++) {
            N node = node_v.get(i);

            // finished nodes don't need to be processed

            if (finishedNodes.contains(node))
                continue;

            if (LOG.isTraceEnabled())
                LOG.trace("Processing node (" + node.getID() + ") " + node.toString() + " exec nodes size is "
                        + execNodes.size());

            //if node defines MR job, make sure it is compatible with all 
            //its children nodes in execNodes 
            if (node.definesMRJob() && !compatibleWithChildrenInExecNodes(execNodes, node)) {
                if (LOG.isTraceEnabled())
                    LOG.trace(indent + "Queueing node " + node.toString() + " (code 1)");

                queuedNodes.add(node);
                removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
                continue;
            }

            // if child is queued, this node will be processed in the later
            // iteration
            if (hasChildNode(node, queuedNodes)) {

                if (LOG.isTraceEnabled())
                    LOG.trace(indent + "Queueing node " + node.toString() + " (code 2)");
                //for(N q: queuedNodes) {
                //   LOG.trace(indent + "  " + q.getType() + "," + q.getID());
                //}
                queuedNodes.add(node);

                // if node has more than two inputs,
                // remove children that will be needed in a future
                // iterations
                // may also have to remove parent nodes of these children
                removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);

                continue;
            }

            // if inputs come from different jobs, then queue
            if (node.getInputs().size() >= 2) {
                int jobid = Integer.MIN_VALUE;
                boolean queueit = false;
                for (int idx = 0; idx < node.getInputs().size(); idx++) {
                    int input_jobid = jobType(node.getInputs().get(idx), jobNodes);
                    if (input_jobid != -1) {
                        if (jobid == Integer.MIN_VALUE)
                            jobid = input_jobid;
                        else if (jobid != input_jobid) {
                            queueit = true;
                            break;
                        }
                    }
                }
                if (queueit) {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Queueing node " + node.toString() + " (code 3)");
                    queuedNodes.add(node);
                    removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
                    continue;
                }
            }

            /*if (node.getInputs().size() == 2) {
               int j1 = jobType(node.getInputs().get(0), jobNodes);
               int j2 = jobType(node.getInputs().get(1), jobNodes);
               if (j1 != -1 && j2 != -1 && j1 != j2) {
                  LOG.trace(indent + "Queueing node "
                   + node.toString() + " (code 3)");
                    
                  queuedNodes.add(node);
                    
                  removeNodesForNextIteration(node, finishedNodes,
                execNodes, queuedNodes, jobNodes);
                    
                  continue;
               }
            }*/

            // See if this lop can be eliminated
            // This check is for "aligner" lops (e.g., group)
            boolean eliminate = false;
            eliminate = canEliminateLop(node, execNodes);
            if (eliminate) {
                if (LOG.isTraceEnabled())
                    LOG.trace(indent + "Adding -" + node.toString());
                execNodes.add(node);
                finishedNodes.add(node);
                addNodeByJobType(node, jobNodes, execNodes, eliminate);
                continue;
            }

            // If the node defines a MR Job then make sure none of its
            // children that defines a MR Job are present in execNodes
            if (node.definesMRJob()) {
                if (hasMRJobChildNode(node, execNodes)) {
                    // "node" must NOT be queued when node=group and the child that defines job is Rand
                    // this is because "group" can be pushed into the "Rand" job.
                    if (!(node.getType() == Lop.Type.Grouping && checkDataGenAsChildNode(node, execNodes))) {
                        if (LOG.isTraceEnabled())
                            LOG.trace(indent + "Queueing node " + node.toString() + " (code 4)");

                        queuedNodes.add(node);

                        removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);

                        continue;
                    }
                }
            }

            // if "node" has more than one input, and has a descendant lop
            // in execNodes that is of type RecordReader
            // then all its inputs must be ancestors of RecordReader. If
            // not, queue "node"
            if (node.getInputs().size() > 1 && hasChildNode(node, execNodes, ExecLocation.RecordReader)) {
                // get the actual RecordReader lop
                N rr_node = getChildNode(node, execNodes, ExecLocation.RecordReader);

                // all inputs of "node" must be ancestors of rr_node
                boolean queue_it = false;
                for (int in = 0; in < node.getInputs().size(); in++) {
                    // each input should be ancestor of RecordReader lop
                    N n = (N) node.getInputs().get(in);
                    if (!n.equals(rr_node) && !isChild(rr_node, n, IDMap)) {
                        queue_it = true; // i.e., "node" must be queued
                        break;
                    }
                }

                if (queue_it) {
                    // queue node
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Queueing -" + node.toString() + " (code 5)");
                    queuedNodes.add(node);
                    // TODO: does this have to be modified to handle
                    // recordreader lops?
                    removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
                    continue;
                } else {
                    // nothing here.. subsequent checks have to be performed
                    // on "node"
                    ;
                }
            }

            // data node, always add if child not queued
            // only write nodes are kept in execnodes
            if (node.getExecLocation() == ExecLocation.Data) {
                Data dnode = (Data) node;
                boolean dnode_queued = false;

                if (dnode.getOperationType() == OperationTypes.READ) {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Adding Data -" + node.toString());

                    // TODO: avoid readScalar instruction, and read it on-demand just like the way Matrices are read in control program
                    if (node.getDataType() == DataType.SCALAR
                            //TODO: LEO check the following condition is still needed
                            && node.getOutputParameters().getFile_name() != null) {
                        // this lop corresponds to reading a scalar from HDFS file
                        // add it to execNodes so that "readScalar" instruction gets generated
                        execNodes.add(node);
                        // note: no need to add it to any job vector
                    }
                } else if (dnode.getOperationType() == OperationTypes.WRITE) {
                    // Skip the transient write <code>node</code> if the input is a 
                    // transient read with the same variable name. i.e., a dummy copy. 
                    // Hence, <code>node</code> can be avoided.
                    // TODO: this case should ideally be handled in the language layer 
                    //       prior to the construction of Hops Dag 
                    N input = (N) dnode.getInputs().get(0);
                    if (dnode.isTransient() && input.getExecLocation() == ExecLocation.Data
                            && ((Data) input).isTransient() && dnode.getOutputParameters().getLabel()
                                    .compareTo(input.getOutputParameters().getLabel()) == 0) {
                        // do nothing, <code>node</code> must not processed any further.
                        ;
                    } else if (execNodes.contains(input) && !isCompatible(node, input)
                            && sendWriteLopToMR(node)) {
                        // input is in execNodes but it is not compatible with write lop. So, queue the write lop.
                        if (LOG.isTraceEnabled())
                            LOG.trace(indent + "Queueing -" + node.toString());
                        queuedNodes.add(node);
                        dnode_queued = true;
                    } else {
                        if (LOG.isTraceEnabled())
                            LOG.trace(indent + "Adding Data -" + node.toString());

                        execNodes.add(node);
                        if (sendWriteLopToMR(node)) {
                            addNodeByJobType(node, jobNodes, execNodes, false);
                        }
                    }
                }
                if (!dnode_queued)
                    finishedNodes.add(node);

                continue;
            }

            // map or reduce node, can always be piggybacked with parent
            if (node.getExecLocation() == ExecLocation.MapOrReduce) {
                if (LOG.isTraceEnabled())
                    LOG.trace(indent + "Adding -" + node.toString());
                execNodes.add(node);
                finishedNodes.add(node);
                addNodeByJobType(node, jobNodes, execNodes, false);

                continue;
            }

            // RecordReader node, add, if no parent needs reduce, else queue
            if (node.getExecLocation() == ExecLocation.RecordReader) {
                // "node" should not have any children in
                // execNodes .. it has to be the first one in the job!
                if (!hasChildNode(node, execNodes, ExecLocation.Map)
                        && !hasChildNode(node, execNodes, ExecLocation.MapAndReduce)) {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Adding -" + node.toString());
                    execNodes.add(node);
                    finishedNodes.add(node);
                    addNodeByJobType(node, jobNodes, execNodes, false);
                } else {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Queueing -" + node.toString() + " (code 6)");
                    queuedNodes.add(node);
                    removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);

                }
                continue;
            }

            // map node, add, if no parent needs reduce, else queue
            if (node.getExecLocation() == ExecLocation.Map) {
                boolean queueThisNode = false;
                int subcode = -1;
                if (node.usesDistributedCache()) {
                    // if an input to <code>node</code> comes from distributed cache
                    // then that input must get executed in one of the previous jobs.
                    int[] dcInputIndexes = node.distributedCacheInputIndex();
                    for (int dcInputIndex : dcInputIndexes) {
                        N dcInput = (N) node.getInputs().get(dcInputIndex - 1);
                        if ((dcInput.getType() != Lop.Type.Data && dcInput.getExecType() == ExecType.MR)
                                && execNodes.contains(dcInput)) {
                            queueThisNode = true;
                            subcode = 1;
                        }
                    }

                    // Limit the number of distributed cache inputs based on the available memory in mappers
                    double memsize = computeFootprintInMapper(node);
                    //gmrMapperFootprint += computeFootprintInMapper(node);
                    if (gmrMapperFootprint > 0 && !checkMemoryLimits(node, gmrMapperFootprint + memsize)) {
                        queueThisNode = true;
                        subcode = 2;
                    }
                    if (!queueThisNode)
                        gmrMapperFootprint += memsize;
                }
                if (!queueThisNode && !hasChildNode(node, execNodes, ExecLocation.MapAndReduce)
                        && !hasMRJobChildNode(node, execNodes)) {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Adding -" + node.toString());
                    execNodes.add(node);
                    finishedNodes.add(node);
                    addNodeByJobType(node, jobNodes, execNodes, false);
                } else {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Queueing -" + node.toString() + " (code 7 - " + "subcode " + subcode
                                + ")");
                    queuedNodes.add(node);
                    removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);

                }
                continue;
            }

            // reduce node, make sure no parent needs reduce, else queue
            if (node.getExecLocation() == ExecLocation.MapAndReduce) {

                // boolean eliminate = false;
                // eliminate = canEliminateLop(node, execNodes);
                // if (eliminate || (!hasChildNode(node, execNodes,
                // ExecLocation.MapAndReduce)) &&
                // !hasMRJobChildNode(node,execNodes)) {

                // TODO: statiko -- keep the middle condition
                // discuss about having a lop that is MapAndReduce but does
                // not define a job
                if (LOG.isTraceEnabled())
                    LOG.trace(indent + "Adding -" + node.toString());
                execNodes.add(node);
                finishedNodes.add(node);
                addNodeByJobType(node, jobNodes, execNodes, eliminate);

                // } else {
                // if (DEBUG)
                // System.out.println("Queueing -" + node.toString());
                // queuedNodes.add(node);
                // removeNodesForNextIteration(node, finishedNodes,
                // execNodes, queuedNodes, jobNodes);
                // }
                continue;
            }

            // aligned reduce, make sure a parent that is reduce exists
            if (node.getExecLocation() == ExecLocation.Reduce) {
                if (compatibleWithChildrenInExecNodes(execNodes, node)
                        && (hasChildNode(node, execNodes, ExecLocation.MapAndReduce)
                                || hasChildNode(node, execNodes, ExecLocation.Map))) {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Adding -" + node.toString());
                    execNodes.add(node);
                    finishedNodes.add(node);
                    addNodeByJobType(node, jobNodes, execNodes, false);
                } else {
                    if (LOG.isTraceEnabled())
                        LOG.trace(indent + "Queueing -" + node.toString() + " (code 8)");
                    queuedNodes.add(node);
                    removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
                }

                continue;

            }

            // add Scalar to execNodes if it has no child in exec nodes
            // that will be executed in a MR job.
            if (node.getExecLocation() == ExecLocation.ControlProgram) {
                for (int j = 0; j < node.getInputs().size(); j++) {
                    if (execNodes.contains(node.getInputs().get(j))
                            && !(node.getInputs().get(j).getExecLocation() == ExecLocation.Data)
                            && !(node.getInputs().get(j).getExecLocation() == ExecLocation.ControlProgram)) {
                        if (LOG.isTraceEnabled())
                            LOG.trace(indent + "Queueing -" + node.toString() + " (code 9)");

                        queuedNodes.add(node);
                        removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
                        break;
                    }
                }

                if (queuedNodes.contains(node))
                    continue;
                if (LOG.isTraceEnabled())
                    LOG.trace(indent + "Adding - scalar" + node.toString());
                execNodes.add(node);
                addNodeByJobType(node, jobNodes, execNodes, false);
                finishedNodes.add(node);
                continue;
            }

        }

        // no work to do
        if (execNodes.isEmpty()) {

            if (!queuedNodes.isEmpty()) {
                //System.err.println("Queued nodes should be 0");
                throw new LopsException("Queued nodes should not be 0 at this point \n");
            }

            if (LOG.isTraceEnabled())
                LOG.trace("All done! queuedNodes = " + queuedNodes.size());

            done = true;
        } else {
            // work to do

            if (LOG.isTraceEnabled())
                LOG.trace("Generating jobs for group -- Node count=" + execNodes.size());

            // first process scalar instructions
            generateControlProgramJobs(execNodes, inst, writeInst, deleteInst);

            // copy unassigned lops in execnodes to gmrnodes
            for (int i = 0; i < execNodes.size(); i++) {
                N node = execNodes.get(i);
                if (jobType(node, jobNodes) == -1) {
                    if (isCompatible(node, JobType.GMR)) {
                        if (node.hasNonBlockedInputs()) {
                            jobNodes.get(JobType.GMRCELL.getId()).add(node);
                            addChildren(node, jobNodes.get(JobType.GMRCELL.getId()), execNodes);
                        } else {
                            jobNodes.get(JobType.GMR.getId()).add(node);
                            addChildren(node, jobNodes.get(JobType.GMR.getId()), execNodes);
                        }
                    } else {
                        if (LOG.isTraceEnabled())
                            LOG.trace(indent + "Queueing -" + node.toString() + " (code 10)");
                        execNodes.remove(i);
                        finishedNodes.remove(node);
                        queuedNodes.add(node);
                        removeNodesForNextIteration(node, finishedNodes, execNodes, queuedNodes, jobNodes);
                    }
                }
            }

            // next generate MR instructions
            if (!execNodes.isEmpty())
                generateMRJobs(execNodes, inst, writeInst, deleteInst, jobNodes);

            handleSingleOutputJobs(execNodes, jobNodes, finishedNodes);

        }

    }

    // add write and delete inst at the very end.

    //inst.addAll(preWriteDeleteInst);
    inst.addAll(writeInst);
    inst.addAll(deleteInst);
    inst.addAll(endOfBlockInst);

    return inst;

}

From source file:com.krawler.spring.hrms.common.hrmsCommonController.java

public ModelAndView saveUser(HttpServletRequest request, HttpServletResponse response) {
    JSONObject jobj = new JSONObject();
    Integer codeid2 = null;/*from   w  w  w. ja va  2  s  . c  om*/
    KwlReturnObject result = null;
    String msg = "";
    int roleflag = 0;
    String employeeIdFormat = "";
    boolean isStadardEmpFormatWithIdAvilable = false;

    //Create transaction
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("JE_Tx");
    def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    def.setIsolationLevel(TransactionDefinition.ISOLATION_READ_UNCOMMITTED);
    TransactionStatus status = txnManager.getTransaction(def);

    try {

        HashMap<String, Object> requestParams = new HashMap<String, Object>();
        ArrayList filter_names = new ArrayList(), filter_values = new ArrayList();

        HashMap newhm = new FileUploadHandler().getItems(request);
        HashMap<String, String> hm = new HashMap<String, String>();
        for (Object key : newhm.keySet()) {
            hm.put(key.toString(),
                    new String(newhm.get(key.toString()).toString().getBytes("iso-8859-1"), "UTF-8"));
        }
        String id = (String) hm.get("userid");
        //String lastname = (String) hm.get("lastname");
        //lastname = new String (lastname.getBytes ("iso-8859-1"), "UTF-8");

        if (!StringUtil.isNullOrEmpty((String) hm.get("employeeid"))) {
            String[] codeid = ((String) hm.get("employeeid")).split("-");

            for (int x = 0; x < codeid.length; x++) {
                if (codeid[x].matches("[0-9]*") == true) {
                    codeid2 = Integer.parseInt(codeid[x]);
                } else {
                    employeeIdFormat += (codeid[x] + "-");
                }
            }
            if (employeeIdFormat.length() > 0) {
                employeeIdFormat = employeeIdFormat.substring(0, employeeIdFormat.length() - 1);
            }
        }
        if (StringUtil.isNullOrEmpty(employeeIdFormat))
            employeeIdFormat = null;
        String companyid = sessionHandlerImplObj.getCompanyid(request);
        String pwd = null;

        if (!StringUtil.isNullOrEmpty(id)) {
            requestParams.clear();

            //                filter_names.add("employeeid");
            //                filter_values.add(codeid2);
            //
            //                filter_names.add("userID");
            //                filter_values.add(id);
            //
            //                filter_names.add("company.companyID");
            //                filter_values.add(companyid);
            //
            //                requestParams.put("filter_names", filter_names);
            //                requestParams.put("filter_values", filter_values);
            //
            //                result = hrmsCommonDAOObj.getUsers(requestParams);
            //                if (result.getEntityList().isEmpty()) {
            requestParams.put("employeeIdFormat", employeeIdFormat);
            requestParams.put("userID", id);
            requestParams.put("employeeid", codeid2);
            requestParams.put("request", request);
            isStadardEmpFormatWithIdAvilable = isStadardEmpFormatWithIdAvilable(requestParams);
            String standardEmpId = getStadardEmpFormat(requestParams);
            if (standardEmpId != null && employeeIdFormat != null && standardEmpId.equals(employeeIdFormat)) {
                employeeIdFormat = null;
            }
            requestParams.clear();
            filter_names.clear();
            filter_values.clear();

            filter_names.add("employeeid");
            filter_values.add(codeid2);

            if (employeeIdFormat == null) {
                filter_names.add("IS employeeIdFormat");
            } else {
                filter_names.add("employeeIdFormat");
                filter_values.add(employeeIdFormat);
            }
            filter_names.add("!userID");
            filter_values.add(id);

            filter_names.add("user.company.companyID");
            filter_values.add(companyid);

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getUseraccount(requestParams);

            if (!result.getEntityList().isEmpty() || isStadardEmpFormatWithIdAvilable) {
                throw new Exception("Employee ID already present.");
            }
            //                }
            requestParams.clear();
            requestParams.put("id", id);
            if (((String) hm.get("formname")).equals("user")) {
                if (!StringUtil.isNullOrEmpty((String) hm.get("templateid"))) {
                    requestParams.put("templateid", (String) hm.get("templateid"));
                } else {
                    requestParams.put("templateid", " ");
                }
            }
        } else {
            requestParams.clear();
            filter_names.clear();
            filter_values.clear();

            filter_names.add("userLogin.userName");
            filter_values.add(hm.get("username"));

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getUsers(requestParams);
            if (!result.getEntityList().isEmpty()) {
                throw new Exception("User name not available.");
            }
            requestParams.clear();
            filter_names.clear();
            filter_values.clear();

            filter_names.add("employeeid");
            filter_values.add(codeid2);

            filter_names.add("company.companyID");
            filter_values.add(companyid);

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getUsers(requestParams);

            if (!result.getEntityList().isEmpty()) {
                throw new Exception("Employee ID already present.");
            }

            requestParams.clear();
            requestParams.put("username", hm.get("username"));
            pwd = AuthHandler.generateNewPassword();
            requestParams.put("pwd", AuthHandler.getSHA1(pwd));
            requestParams.put("companyid", companyid);
        }

        requestParams.put("fname", hm.get("firstname"));
        requestParams.put("lname", hm.get("lastname"));
        requestParams.put("emailid", hm.get("emailid"));
        requestParams.put("address", hm.get("address"));
        requestParams.put("contactno", hm.get("contactnumber"));
        requestParams.put("empid", codeid2);
        requestParams.put("employeeIdFormat", employeeIdFormat);
        requestParams.put("companyid", companyid);

        int histsave = 0;
        String histdept = "";
        String histdesig = "";
        String histsal = "";
        Date saveDate = new Date();
        SimpleDateFormat fmt = new SimpleDateFormat("yyyy/MM/dd");
        saveDate = new Date(fmt.format(saveDate));
        String updatedby = sessionHandlerImplObj.getUserid(request);
        Useraccount ua = (Useraccount) kwlCommonTablesDAOObj.getObject("com.krawler.common.admin.Useraccount",
                id);

        if (!StringUtil.isNullOrEmpty((String) hm.get("roleid"))
                && !hm.get("roleid").equals(ua.getRole().getID())) {
            if (ua.getRole().getID().equals("1") && hrmsCommonDAOObj.isCompanySuperAdmin(id, companyid)) {//Can't Edit role for super admin
                roleflag = 1;
            } else {

                String newRoleId = hm.get("roleid").toString();
                if (StringUtil.equal(newRoleId, Role.COMPANY_USER)) { // Check whether new Role is Company User/ Company Employee

                    List<Empprofile> childList = organizationChartDAOObj.getChildNode(id); // Check for child list before changing its role to Employee.

                    if (childList.size() > 0) {
                        roleflag = 2;
                    } else {
                        requestParams.put("roleid", newRoleId);
                    }
                } else {

                    requestParams.put("roleid", newRoleId);
                }

            }
        }
        if (!StringUtil.isNullOrEmpty((String) hm.get("designationid"))) {
            if ((MasterData) kwlCommonTablesDAOObj.getObject("com.krawler.hrms.master.MasterData",
                    (String) hm.get("designationid")) != ua.getDesignationid()) {
                histdesig = (ua.getDesignationid() == null) ? "" : ua.getDesignationid().getId();
                histsave = 1;
            }
            requestParams.put("designationid", hm.get("designationid"));
        }
        if (!StringUtil.isNullOrEmpty((String) hm.get("department"))) {
            if ((MasterData) kwlCommonTablesDAOObj.getObject("com.krawler.hrms.master.MasterData",
                    (String) hm.get("department")) != ua.getDepartment()) {
                histdept = (ua.getDepartment() == null) ? "" : ua.getDepartment().getId();
                if (histsave == 0) {
                    histdesig = (ua.getDesignationid() == null) ? "" : ua.getDesignationid().getId();
                }
                histsave = 2;
            }
            requestParams.put("department", hm.get("department"));
        }
        if (!StringUtil.isNullOrEmpty((String) hm.get("salary"))) {
            String tempsal = "0";
            if (((String) hm.get("salary")).length() > 0) {
                tempsal = hm.get("salary").toString();
            }
            if (!tempsal.equals(ua.getSalary())) {
                if (ua.getSalary() != null) {
                    histsal = ua.getSalary();
                }
            }
            requestParams.put("salary", tempsal);
        }

        if (!StringUtil.isNullOrEmpty((String) hm.get("accno"))) {
            if (((String) hm.get("accno")).length() > 0) {
                requestParams.put("accno", hm.get("accno"));
            } else {
                requestParams.put("accno", "0");
            }
        }

        if (!StringUtil.isNullOrEmpty((String) hm.get("formatid"))) {
            requestParams.put("formatid", hm.get("formatid"));
        }
        String diff = null;
        if (!StringUtil.isNullOrEmpty((String) hm.get("tzid"))) {
            KWLTimeZone timeZone = (KWLTimeZone) kwlCommonTablesDAOObj
                    .getObject("com.krawler.common.admin.KWLTimeZone", (String) hm.get("tzid"));
            diff = timeZone.getDifference();
            requestParams.put("tzid", hm.get("tzid"));
        }
        if (!StringUtil.isNullOrEmpty((String) hm.get("aboutuser"))) {
            requestParams.put("aboutuser", hm.get("aboutuser"));
        }
        String imageName = "";
        if (newhm.get("userimage") != null) {
            imageName = ((FileItem) (newhm.get("userimage"))).getName();
            if (!StringUtil.isNullOrEmpty(imageName)) {
                requestParams.put("userimage", hm.get("userimage"));
            }
        }

        result = hrmsCommonDAOObj.saveUser(requestParams, RequestContextUtils.getLocale(request));
        if (!StringUtil.isNullOrEmpty(imageName)) {
            User user = (User) result.getEntityList().get(0);
            String fileName = user.getImage().substring(user.getImage().lastIndexOf("/") + 1,
                    user.getImage().length());
            new FileUploadHandler().uploadImage((FileItem) (newhm.get("userimage")), fileName,
                    StorageHandler.GetProfileImgStorePath(), 100, 100, false, false);
        }
        msg = result.getMsg();
        requestParams.clear();
        if (histsave == 1) {
            histdept = ua.getDepartment().getId();
        }
        if (histsave == 1 || histsave == 2) {
            String latestUpdate = "";
            HashMap<String, Object> requestParams2 = new HashMap<String, Object>();
            requestParams2.put("id", id);
            requestParams2.put("cat", Emphistory.Emp_Desg_change);
            result = hrmsCommonDAOObj.getLastUpdatedHistory(requestParams2);
            latestUpdate = result.getEntityList().get(0).toString();
            if (!latestUpdate.equals("")) {
                latestUpdate = latestUpdate.replace("-", "/");
                requestParams.put("Joindate", fmt.parse(latestUpdate));
            }
            requestParams.put("Department", histdept);
            requestParams.put("Designation", histdesig);
            requestParams.put("Userid", id);
            requestParams.put("Empid", ua.getEmployeeid());
            requestParams.put("Updatedon", saveDate);
            requestParams.put("Updatedby", updatedby);
            requestParams.put("Category", Emphistory.Emp_Desg_change);
            result = hrmsCommonDAOObj.addEmphistory(requestParams);
        }
        if (!histsal.equals("")) {
            requestParams.clear();
            requestParams.put("Userid", id);
            requestParams.put("Salary", histsal);
            requestParams.put("Updatedon", saveDate);
            requestParams.put("Updatedby", updatedby);
            requestParams.put("Category", Emphistory.Emp_Salary);
            result = hrmsCommonDAOObj.addEmphistory(requestParams);
        }

        sessionHandlerImplObj.updatePreferences(request, null,
                (StringUtil.isNullOrEmpty((String) hm.get("formatid")) ? null : (String) hm.get("formatid")),
                (StringUtil.isNullOrEmpty((String) hm.get("tzid")) ? null : (String) hm.get("tzid")), diff);
        if (roleflag == 1) {
            msg = msg + " "
                    + messageSource.getMessage("hrms.common.Rolecannotbechangedforsuperadministrator", null,
                            "Role cannot be changed for Super Administrator.",
                            RequestContextUtils.getLocale(request));
            jobj.put("roleflag", roleflag);
        }
        if (roleflag == 2) {
            msg = msg + " <br><br><br>" + messageSource.getMessage(
                    "hrms.common.rolecannotbechangedtocompanyemployee", null,
                    "Note : Role cannot be changed to Company Employee. Please re-assign or remove its child node in Organization Chart before changing its role to Company Employee.",
                    RequestContextUtils.getLocale(request));
            jobj.put("roleflag", roleflag);
        }
        jobj.put("msg", msg);
        jobj.put("success", true);
        txnManager.commit(status);
    } catch (Exception e) {
        try {
            if (e.getMessage().equals("Employee ID already present.")) {
                jobj.put("msg", e.getMessage());
            }
        } catch (Exception ex) {
            e.printStackTrace();
        }
        e.printStackTrace();
        txnManager.rollback(status);
    }
    return new ModelAndView("jsonView", "model", jobj.toString());
}

From source file:och.front.service.FrontAppTest.java

private void test_billing_blockUserAccs() throws Exception {

     MailServiceStub mailService = new MailServiceStub(mailSender, props);

     // ?    /*from   ww w.  ja va2 s . com*/
     Date longFuture = parseStandartDateTime("02.09.2040 3:00:00");
     universal.update(new UpdateAllChatAccounts(new TariffStart(longFuture), new TariffLastPay(longFuture)));

     MapProps paypalProps = new MapProps();
     paypalProps.putVal(paypal_sync_debug_DisableTimer, true);

     PaypalPaymentsSynchService paySync = new PaypalPaymentsSynchService();
     paySync.setCacheServerContext(new CacheServerContext(paypalProps, cacheSever, db, mailService));
     paySync.setClient(paypalClient);
     paySync.init();

     BillingSyncService billingSync = new BillingSyncService();
     billingSync.setCacheServerContext(new CacheServerContext(props, cacheSever, db, mailService));
     billingSync.init();

     ArrayList<Pair<Long, Boolean>> blockReqs = new ArrayList<>();
     BillingOps.SEND_ACCS_BLOCKED_LISTENER = (ownerId, val) -> blockReqs
             .add(new Pair<Long, Boolean>(ownerId, val));

     pushToSecurityContext_SYSTEM_USER();
     try {

         int tariffId = 2;
         long userId = userId4;
         String accUid = "billing_blockUserAccs";

         List<String> oldAccs = db.chats.getOwnerAccs(userId);
         assertTrue(oldAccs.size() > 0);

         //create acc
         chats.createAcc(serverId1, accUid, userId, "test_monthBill", tariffId);
         chats.setOperatorForAcc(accUid, userId);
         assertEquals(1, chats.getAccOperators(accUid).size());

         //  ? - 
         {
             correctBalance(userId, new BigDecimal(4.99d));
             assertEquals("4.99", billing.getUserBalance(userId).toString());

             BigDecimal initBalance = billing.getUserBalance(userId);
             String expAmount = "-5.00";
             assertFalse(findBalance(universal, userId).accsBlocked);

             Date pastPay = parseStandartDateTime("01.08.2014 00:00:00");
             Date now = parseStandartDateTime("02.09.2014 3:00:00");
             universal.update(
                     new UpdateChatAccountByUid(accUid, new TariffStart(pastPay), new TariffLastPay(pastPay)));
             assertEquals(1, billingSync.doSyncWork(false, now));
             assertEquals(expAmount, getDeltaVal(userId, initBalance));

             //
             assertEquals("-0.01", billing.getUserBalance(userId).toString());
             assertTrue(findBalance(universal, userId).accsBlocked);

             //?  ??
             assertEquals(parseStandartDateTime("01.09.2014 00:00:00"),
                     universal.selectOne(new GetChatAccount(accUid)).tariffLastPay);

             //?     , ..     
             for (String uid : oldAccs)
                 assertEquals(longFuture, universal.selectOne(new GetChatAccount(uid)).tariffLastPay);

             //? ?  
             assertEquals(1, blockReqs.size());
             assertTrue(blockReqs.get(0).second);
             blockReqs.clear();

             // 
             pushToSecurityContext(new User(userId));
             try {

                 //  ?  
                 try {
                     chats.updateAccTariffByUser(accUid, tariffId);
                     fail_exception_expected();
                 } catch (ChatAccountBlockedException e) {
                     //ok
                 }

                 //  ?  
                 try {
                     chats.createAccByUser("some");
                     fail_exception_expected();
                 } catch (ChatAccountBlockedException e) {
                     //ok
                 }

                 //  
                 try {
                     chats.pauseAccByUser(accUid);
                     fail_exception_expected();
                 } catch (ChatAccountBlockedException e) {
                     //ok
                 }

                 //  
                 try {
                     chats.unpauseAccByUser(accUid);
                     fail_exception_expected();
                 } catch (ChatAccountBlockedException e) {
                     //ok
                 }

             } finally {
                 popUserFromSecurityContext();
             }

             //  ?   ?  
             for (String uid : db.chats.getOwnerAccs(userId)) {
                 assertEquals("accUid=" + uid, true, isAccBlockedFromCache(cacheClient, uid));
             }

             //?  ? ? ?,    
             {
                 Cache newCache = new CacheImpl(0);
                 BillingSyncService otherBillingSync = new BillingSyncService();
                 otherBillingSync
                         .setCacheServerContext(new CacheServerContext(props, newCache, db, mailService));
                 otherBillingSync.init();
                 for (String uid : db.chats.getOwnerAccs(userId)) {
                     assertEquals(true, isAccBlockedFromCache(newCache, uid));
                 }
             }

             //  ?     
             pushToSecurityContext(new User(userId));
             try {
                 assertTrue(chats.getBlockedAccs().size() > 0);
                 for (ChatAccount acc : chats.getAccsForOperator(userId)) {
                     assertEquals(true, acc.blocked);
                 }
             } finally {
                 popUserFromSecurityContext();
             }

         }

         // ?? -- ?   -- ?   
         {
             assertTrue(findBalance(universal, userId).accsBlocked);
             BigDecimal initBalance = billing.getUserBalance(userId);

             Date now = parseStandartDateTime("01.10.2014 3:00:00");
             assertEquals(1, billingSync.doSyncWork(false, now));
             assertEquals("0.00", getDeltaVal(userId, initBalance));
             assertEquals("-0.01", billing.getUserBalance(userId).toString());

             // ?  ??
             assertTrue(findBalance(universal, userId).accsBlocked);
             assertEquals(parseStandartDateTime("01.10.2014 00:00:00"),
                     universal.selectOne(new GetChatAccount(accUid)).tariffLastPay);

             //?     , ..     
             for (String uid : oldAccs)
                 assertEquals(longFuture, universal.selectOne(new GetChatAccount(uid)).tariffLastPay);

             //  ?  
             assertEquals(0, blockReqs.size());

             //  ?   ?  
             for (String uid : db.chats.getOwnerAccs(userId)) {
                 assertEquals(true, BillingOps.isAccBlockedFromCache(cacheClient, uid));
             }
         }

         //   ???      
         {
             Date now = parseStandartDateTime("12.10.2014 15:45:00");
             pushToSecurityContext(new User(userId));
             try {
                 paypalClient.payAmount = new BigDecimal("0.01");
                 billing.sendPayReq(paypal_key.strDefVal(), paypalClient.payAmount);
                 billing.paypal_preparePayConfirm(randomSimpleId(), STUB_TOKEN);
                 billing.paypal_finishPayment(now);
             } finally {
                 popUserFromSecurityContext();
             }
             assertEquals("0.00", billing.getUserBalance(userId).toString());
             assertFalse(findBalance(universal, userId).accsBlocked);
             assertEquals(now, universal.selectOne(new GetChatAccount(accUid)).tariffLastPay);

             //?     , ..     
             for (String uid : oldAccs)
                 assertEquals(longFuture, universal.selectOne(new GetChatAccount(uid)).tariffLastPay);

             //? ?  
             assertEquals(1, blockReqs.size());
             assertFalse(blockReqs.get(0).second);
             blockReqs.clear();

             // ?  
             pushToSecurityContext(new User(userId));
             try {
                 chats.updateAccTariffByUser(accUid, tariffId);
             } finally {
                 popUserFromSecurityContext();
             }

             //     ?  
             for (String uid : db.chats.getOwnerAccs(userId)) {
                 assertEquals(false, BillingOps.isAccBlockedFromCache(cacheClient, uid));
             }

             //?  ? ? ?,    
             {
                 Cache newCache = new CacheImpl(0);
                 BillingSyncService otherBillingSync = new BillingSyncService();
                 otherBillingSync
                         .setCacheServerContext(new CacheServerContext(props, newCache, db, mailService));
                 otherBillingSync.init();
                 for (String uid : db.chats.getOwnerAccs(userId)) {
                     assertEquals(false, isAccBlockedFromCache(newCache, uid));
                 }
             }

             //  ?     
             pushToSecurityContext(new User(userId));
             try {
                 assertTrue(chats.getBlockedAccs().size() == 0);
                 for (ChatAccount acc : chats.getAccsForOperator(userId)) {
                     assertEquals(false, acc.blocked);
                 }
             } finally {
                 popUserFromSecurityContext();
             }
         }

         // ??, ? ??   
         {
             assertFalse(findBalance(universal, userId).accsBlocked);
             BigDecimal initBalance = billing.getUserBalance(userId);

             Date now = parseStandartDateTime("01.11.2014 5:15:00");
             assertEquals(1, billingSync.doSyncWork(false, now));
             assertEquals("-3.12", getDeltaVal(userId, initBalance));
             assertEquals("-3.12", billing.getUserBalance(userId).toString());

             // ?  ??
             assertTrue(findBalance(universal, userId).accsBlocked);
             assertEquals(parseStandartDateTime("01.11.2014 00:00:00"),
                     universal.selectOne(new GetChatAccount(accUid)).tariffLastPay);

             //?     , ..     
             for (String uid : oldAccs)
                 assertEquals(longFuture, universal.selectOne(new GetChatAccount(uid)).tariffLastPay);

             //? ?  
             assertEquals(1, blockReqs.size());
             assertTrue(blockReqs.get(0).second);
             blockReqs.clear();

             //  ?  
             pushToSecurityContext(new User(userId));
             try {
                 chats.updateAccTariffByUser(accUid, tariffId);
                 fail_exception_expected();
             } catch (ChatAccountBlockedException e) {
                 //ok
             } finally {
                 popUserFromSecurityContext();
             }
         }

         //       
         {
             assertTrue(findBalance(universal, userId).accsBlocked);
             Date oldLastPay = universal.selectOne(new GetChatAccount(accUid)).tariffLastPay;

             Date now = parseStandartDateTime("03.11.2014 13:12:00");
             pushToSecurityContext(new User(userId));
             try {
                 paypalClient.payAmount = new BigDecimal("3.11");
                 billing.sendPayReq(paypal_key.strDefVal(), paypalClient.payAmount);
                 billing.paypal_preparePayConfirm(randomSimpleId(), STUB_TOKEN);
                 billing.paypal_finishPayment(now);
             } finally {
                 popUserFromSecurityContext();
             }

             //? ??,     ?? 
             assertEquals("-0.01", billing.getUserBalance(userId).toString());
             assertTrue(findBalance(universal, userId).accsBlocked);
             assertEquals(oldLastPay, universal.selectOne(new GetChatAccount(accUid)).tariffLastPay);

             assertEquals(0, blockReqs.size());
         }

         //  ? ??    ? 
         {
             assertTrue(findBalance(universal, userId).accsBlocked);

             BigDecimal payAmount = new BigDecimal("5.01");
             Date prev = parseStandartDateTime("04.11.2014 11:00:00");
             Date now = parseStandartDateTime("05.11.2014 13:12:00");
             long payId = universal.nextSeqFor(payments);
             universal.update(new CreatePayment(
                     new PaymentExt(payId, userId, PAYPAL, "somePay", WAIT, prev, prev, payAmount)));

             paypalClient.payAmount = payAmount;
             paypalClient.paymentHistory = list(new PaymentBase("somePay", COMPLETED));
             paypalClient.paymentId = "somePay";
             paypalClient.payment = new PaymentBase(paypalClient.paymentId, COMPLETED);
             paySync.doSyncWork(now);

             // 
             assertEquals(COMPLETED, universal.selectOne(new GetPaymentById(payId)).paymentStatus);

             // ?
             assertEquals("5.00", billing.getUserBalance(userId).toString());
             assertFalse(findBalance(universal, userId).accsBlocked);
             assertEquals(now, universal.selectOne(new GetChatAccount(accUid)).tariffLastPay);

             //?     , ..     
             for (String uid : oldAccs)
                 assertEquals(longFuture, universal.selectOne(new GetChatAccount(uid)).tariffLastPay);

             //? ?  
             assertEquals(1, blockReqs.size());
             assertFalse(blockReqs.get(0).second);
             blockReqs.clear();

         }

     } finally {
         popUserFromSecurityContext();
     }

     //?  
     Date now = new Date();
     universal.update(new UpdateAllChatAccounts(new TariffStart(now), new TariffLastPay(now)));
 }

From source file:com.ntua.cosmos.hackathonplanneraas.Planner.java

/**
 * Method to search for Cases similar with those of the input.
 * @param w A List of doubles containing weights for Case problem parameters. If null, no special weight given to parameters.
 * @param prob A string arraylist with the values of the problem parameters.
 * @param params A string arraylist with the names of problem parameters as they appear in the ontology.
 * @param solutionat A string arraylist with the names of the solution parameters for the expected case.
 * @param thr A double containing the similarity threshold for Cases retrieval.
 * @param shareable A parameter indicating whether the result is destined for sharing purposes or not.
 * @return An arraylist with solution attribute values as well as the URI of the
 * recommended service and any message that may be contained.
 *//*from   www.  j  a  va  2s .c o m*/
public ArrayList<String> searchSimilarCase(List<Double> w, ArrayList<String> prob, ArrayList<String> params,
        ArrayList<String> solutionat, double thr, boolean shareable) {
    System.out.println("****************************************************");
    System.out.println("We now begin the actual search for a similar problem");
    System.out.println("****************************************************\n\n");
    StoredPaths pan = new StoredPaths();
    if (w == null) {
        w = new ArrayList<>();
        double fractal = 1.0 / params.size();
        for (String param : params) {
            w.add(fractal);
        }
    }
    double prevsim = 0.0;
    boolean hasresult = false;
    ArrayList<String> solution = new ArrayList<>();
    ArrayList<String> tempbest = new ArrayList<>();
    String URI = "None", message = "None";
    int[] answer = new int[solutionat.size()];
    //Begin the initialisation process.
    OntModelSpec s = new OntModelSpec(PelletReasonerFactory.THE_SPEC);
    OntDocumentManager dm = OntDocumentManager.getInstance();
    dm.setFileManager(FileManager.get());
    s.setDocumentManager(dm);
    OntModel m = ModelFactory.createOntologyModel(s, null);
    InputStream in = FileManager.get().open(StoredPaths.casebasepath);
    if (in == null) {
        throw new IllegalArgumentException("File: " + StoredPaths.casebasepath + " not found");
    }
    // read the file
    m.read(in, null);
    //begin building query string.
    String queryString = pan.prefixrdf + pan.prefixowl + pan.prefixxsd + pan.prefixrdfs + pan.prefixCasePath;
    //add answer params to query string select
    queryString += "\nselect distinct ";
    for (int z = 0; z < params.size(); z++) {
        queryString += "?param" + z + " ";
    }
    for (int z = 0; z < solutionat.size(); z++) {
        queryString += "?answer" + z + " ";
    }
    //add URI and message params to query string select
    queryString += "?URI ?message where{";//expected return values, must all exist in ontology even if blank
    //add problem params and values to query
    for (int z = 0; z < params.size(); z++) {
        if (w.get(z) >= 0.1)
            queryString += "?prob base:" + params.get(z) + " ?param" + z + " . ";
        else
            queryString += "OPTIONAL{?prob base:" + params.get(z) + " ?param" + z + "} . ";
    }
    if (shareable)
        queryString += "?prob base:isShareable true . ";
    //connect problem and solution
    queryString += "?solution base:solves ?prob . ";
    //add solution params to query body
    for (int z = 0; z < solutionat.size(); z++) {
        queryString += "?solution base:" + solutionat.get(z) + " ?answer" + z + " . ";
    }
    //add params for uri and message
    queryString += "?solution base:URI ?URI . ?solution base:exposesMessage ?message}";
    System.out.println("********************************************************************");
    System.out.println("This is the dynamicaly generated query String for problem retrieval.");
    System.out.println(queryString);//print query string for checking purposes.
    System.out.println("********************************************************************\n\n");
    try {
        Query query = QueryFactory.create(queryString);
        QueryExecution qe = QueryExecutionFactory.create(query, m);
        ResultSet results = qe.execSelect();
        for (; results.hasNext();) {
            double similarity = 0.0;
            QuerySolution soln = results.nextSolution();
            // Access variables: soln.get("x");
            Literal lit;
            ArrayList<Literal> literlistparam = new ArrayList<>();
            ArrayList<Literal> literlistsol = new ArrayList<>();
            for (int z = 0; z < params.size(); z++) {
                lit = soln.getLiteral("param" + z);// Get a result variable by name.
                literlistparam.add(lit);
            }
            List<Double> answers = new ArrayList<>();
            List<Double> a = new ArrayList<>();
            List<Double> b = new ArrayList<>();
            double length = 0.0;
            for (int z = 0; z < literlistparam.size(); z++) {
                a.add(Double.valueOf(prob.get(z)));
                if (!literlistparam.get(z).getString().equalsIgnoreCase("")) {
                    b.add(Double.valueOf(literlistparam.get(z).getString()));
                } else
                    b.add(0.0);
            }
            for (int z = 0; z < literlistparam.size(); z++) {
                length += a.get(z) + b.get(z);
            }

            System.out.println("********************************");
            System.out.println("Problem parameter similarities: ");
            for (int z = 0; z < literlistparam.size(); z++) {
                answers.add(1.0 - Math.abs(a.get(z) - b.get(z)) / length);
                System.out.println(answers.get(z));
            }
            System.out.println("********************************");

            for (int z = 0; z < answers.size(); z++) {
                similarity += w.get(z) * answers.get(z);
            }
            System.out.println("Total similarity from previous parameters: " + similarity);
            if (similarity > prevsim && similarity >= thr) {
                hasresult = true;
                for (int z = 0; z < solutionat.size(); z++) {
                    lit = soln.getLiteral("answer" + z);// Get a result variable by name.
                    literlistsol.add(lit);
                }
                Literal lit2 = soln.getLiteral("URI");
                Literal lit3 = soln.getLiteral("message");

                tempbest.clear();
                for (int z = 0; z < solutionat.size(); z++) {
                    if (literlistsol.get(z).isLiteral())
                        tempbest.add(literlistsol.get(z).getString());
                }
                if (lit2.isLiteral())
                    URI = lit2.getLexicalForm();
                if (lit3.isLiteral()) {
                    if (!lit3.getString().equalsIgnoreCase(""))
                        message = lit3.getString();
                } else {
                    message = "None";
                }
            }
            if (similarity > prevsim)
                prevsim = similarity;
        }
        qe.close();
    } catch (NumberFormatException e) {
        System.out.println("Query not valid.");
    }
    m.close();
    if (!hasresult)
        //solution.add("0");
        solution.add("NOANSWER");
    else
        solution.addAll(tempbest);
    solution.add(String.valueOf(prevsim));
    solution.add(URI);
    solution.add(message);
    return solution;
}