Example usage for java.util LinkedList removeFirst

List of usage examples for java.util LinkedList removeFirst

Introduction

In this page you can find the example usage for java.util LinkedList removeFirst.

Prototype

public E removeFirst() 

Source Link

Document

Removes and returns the first element from this list.

Usage

From source file:android.net.http.Connection.java

/**
 * Process requests in queue// ww w  . j  av  a  2s .c om
 * pipelines requests
 */
void processRequests(Request firstRequest) {
    Request req = null;
    boolean empty;
    int error = EventHandler.OK;
    Exception exception = null;

    LinkedList<Request> pipe = new LinkedList<Request>();

    int minPipe = MIN_PIPE, maxPipe = MAX_PIPE;
    int state = SEND;

    while (state != DONE) {
        if (HttpLog.LOGV)
            HttpLog.v(states[state] + " pipe " + pipe.size());

        /* If a request was cancelled, give other cancel requests
           some time to go through so we don't uselessly restart
           connections */
        if (mActive == STATE_CANCEL_REQUESTED) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException x) {
                /* ignore */ }
            mActive = STATE_NORMAL;
        }

        switch (state) {
        case SEND: {
            if (pipe.size() == maxPipe) {
                state = READ;
                break;
            }
            /* get a request */
            if (firstRequest == null) {
                req = mRequestFeeder.getRequest(mHost);
            } else {
                req = firstRequest;
                firstRequest = null;
            }
            if (req == null) {
                state = DRAIN;
                break;
            }
            req.setConnection(this);

            /* Don't work on cancelled requests. */
            if (req.mCancelled) {
                if (HttpLog.LOGV)
                    HttpLog.v("processRequests(): skipping cancelled request " + req);
                req.complete();
                break;
            }

            if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) {
                /* If this call fails, the address is bad or
                   the net is down.  Punt for now.
                        
                   FIXME: blow out entire queue here on
                   connection failure if net up? */

                if (!openHttpConnection(req)) {
                    state = DONE;
                    break;
                }
            }

            try {
                /* FIXME: don't increment failure count if old
                   connection?  There should not be a penalty for
                   attempting to reuse an old connection */
                req.sendRequest(mHttpClientConnection);
            } catch (HttpException e) {
                exception = e;
                error = EventHandler.ERROR;
            } catch (IOException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IllegalStateException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            }
            if (exception != null) {
                if (httpFailure(req, error, exception) && !req.mCancelled) {
                    /* retry request if not permanent failure
                       or cancelled */
                    pipe.addLast(req);
                }
                exception = null;
                state = clearPipe(pipe) ? DONE : SEND;
                minPipe = maxPipe = 1;
                break;
            }

            pipe.addLast(req);
            if (!mCanPersist)
                state = READ;
            break;

        }
        case DRAIN:
        case READ: {
            empty = !mRequestFeeder.haveRequest(mHost);
            int pipeSize = pipe.size();
            if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) {
                state = SEND;
                break;
            } else if (pipeSize == 0) {
                /* Done if no other work to do */
                state = empty ? DONE : SEND;
                break;
            }

            req = (Request) pipe.removeFirst();
            if (HttpLog.LOGV)
                HttpLog.v("processRequests() reading " + req);

            try {
                req.readResponse(mHttpClientConnection);
            } catch (ParseException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IOException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IllegalStateException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            }
            if (exception != null) {
                if (httpFailure(req, error, exception) && !req.mCancelled) {
                    /* retry request if not permanent failure
                       or cancelled */
                    req.reset();
                    pipe.addFirst(req);
                }
                exception = null;
                mCanPersist = false;
            }
            if (!mCanPersist) {
                if (HttpLog.LOGV)
                    HttpLog.v("processRequests(): no persist, closing " + mHost);

                closeConnection();

                mHttpContext.removeAttribute(HTTP_CONNECTION);
                clearPipe(pipe);
                minPipe = maxPipe = 1;
                state = SEND;
            }
            break;
        }
        }
    }
}

From source file:org.mycard.net.network.Connection.java

/***
 * Process requests in queue/*from w w  w  .j ava2 s.c  om*/
 * pipelines requests
 */
void processRequests(Request firstRequest) {
    Request req = null;
    boolean empty;
    int error = EventHandler.OK;
    Exception exception = null;

    LinkedList<Request> pipe = new LinkedList<Request>();

    int minPipe = MIN_PIPE, maxPipe = MAX_PIPE;
    int state = SEND;

    while (state != DONE) {
        /** If a request was cancelled, give other cancel requests
           some time to go through so we don't uselessly restart
           connections */
        if (mActive == STATE_CANCEL_REQUESTED) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException x) {
                /** ignore */
            }
            mActive = STATE_NORMAL;
        }

        switch (state) {
        case SEND: {
            if (pipe.size() == maxPipe) {
                state = READ;
                break;
            }
            /** get a request */
            if (firstRequest == null) {
                req = mRequestFeeder.getRequest(mHost);
            } else {
                req = firstRequest;
                firstRequest = null;
            }
            if (req == null) {
                state = DRAIN;
                break;
            }
            req.setConnection(this);

            /** Don't work on cancelled requests. */
            if (req.mCancelled) {
                req.complete();
                break;
            }

            if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) {
                /** If this call fails, the address is bad or
                   the net is down.  Punt for now.
                        
                   FIXME: blow out entire queue here on
                   connection failure if net up? */

                if (!openHttpConnection(req)) {
                    state = DONE;
                    break;
                }
            }

            /** we have a connection, let the event handler
             * know of any associated certificate,
             * potentially none.
             */
            //req.mEventHandler.certificate(mCertificate);

            try {
                /** FIXME: don't increment failure count if old
                   connection?  There should not be a penalty for
                   attempting to reuse an old connection */
                req.sendRequest(mHttpClientConnection);
            } catch (HttpException e) {
                exception = e;
                error = EventHandler.ERROR;
            } catch (IOException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IllegalStateException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            }
            if (exception != null) {
                if (httpFailure(req, error, exception) && !req.mCancelled) {
                    /** retry request if not permanent failure
                       or cancelled */
                    pipe.addLast(req);
                }
                exception = null;
                state = clearPipe(pipe) ? DONE : SEND;
                minPipe = maxPipe = 1;
                break;
            }

            pipe.addLast(req);
            if (!mCanPersist)
                state = READ;
            break;

        }
        case DRAIN:
        case READ: {
            empty = !mRequestFeeder.haveRequest(mHost);
            int pipeSize = pipe.size();
            if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) {
                state = SEND;
                break;
            } else if (pipeSize == 0) {
                /** Done if no other work to do */
                state = empty ? DONE : SEND;
                break;
            }

            req = (Request) pipe.removeFirst();

            try {
                req.readResponse(mHttpClientConnection);
            } catch (ParseException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IOException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IllegalStateException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            }
            if (exception != null) {
                if (httpFailure(req, error, exception) && !req.mCancelled) {
                    /** retry request if not permanent failure
                       or cancelled */
                    req.reset();
                    pipe.addFirst(req);
                }
                exception = null;
                mCanPersist = false;
            }
            if (!mCanPersist) {

                closeConnection();

                mHttpContext.removeAttribute(HTTP_CONNECTION);
                clearPipe(pipe);
                minPipe = maxPipe = 1;
                state = SEND;
            }
            break;
        }
        }
    }
}

From source file:android.net.http.Connection.java

/**
 * Process requests in queue/*from  w  w  w  .  j av a 2 s.c om*/
 * pipelines requests
 */
void processRequests(Request firstRequest) {
    Request req = null;
    boolean empty;
    int error = EventHandler.OK;
    Exception exception = null;

    LinkedList<Request> pipe = new LinkedList<Request>();

    int minPipe = MIN_PIPE, maxPipe = MAX_PIPE;
    int state = SEND;

    while (state != DONE) {
        if (HttpLog.LOGV)
            HttpLog.v(states[state] + " pipe " + pipe.size());

        /* If a request was cancelled, give other cancel requests
           some time to go through so we don't uselessly restart
           connections */
        if (mActive == STATE_CANCEL_REQUESTED) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException x) {
                /* ignore */ }
            mActive = STATE_NORMAL;
        }

        switch (state) {
        case SEND: {
            if (pipe.size() == maxPipe) {
                state = READ;
                break;
            }
            /* get a request */
            if (firstRequest == null) {
                req = mRequestFeeder.getRequest(mHost);
            } else {
                req = firstRequest;
                firstRequest = null;
            }
            if (req == null) {
                state = DRAIN;
                break;
            }
            req.setConnection(this);

            /* Don't work on cancelled requests. */
            if (req.mCancelled) {
                if (HttpLog.LOGV)
                    HttpLog.v("processRequests(): skipping cancelled request " + req);
                req.complete();
                break;
            }

            if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) {
                /* If this call fails, the address is bad or
                   the net is down.  Punt for now.
                        
                   FIXME: blow out entire queue here on
                   connection failure if net up? */

                if (!openHttpConnection(req)) {
                    state = DONE;
                    break;
                }
            }

            /* we have a connection, let the event handler
             * know of any associated certificate,
             * potentially none.
             */
            req.mEventHandler.certificate(mCertificate);

            try {
                /* FIXME: don't increment failure count if old
                   connection?  There should not be a penalty for
                   attempting to reuse an old connection */
                req.sendRequest(mHttpClientConnection);
            } catch (HttpException e) {
                exception = e;
                error = EventHandler.ERROR;
            } catch (IOException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IllegalStateException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            }
            if (exception != null) {
                if (httpFailure(req, error, exception) && !req.mCancelled) {
                    /* retry request if not permanent failure
                       or cancelled */
                    pipe.addLast(req);
                }
                exception = null;
                state = clearPipe(pipe) ? DONE : SEND;
                minPipe = maxPipe = 1;
                break;
            }

            pipe.addLast(req);
            if (!mCanPersist)
                state = READ;
            break;

        }
        case DRAIN:
        case READ: {
            empty = !mRequestFeeder.haveRequest(mHost);
            int pipeSize = pipe.size();
            if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) {
                state = SEND;
                break;
            } else if (pipeSize == 0) {
                /* Done if no other work to do */
                state = empty ? DONE : SEND;
                break;
            }

            req = (Request) pipe.removeFirst();
            if (HttpLog.LOGV)
                HttpLog.v("processRequests() reading " + req);

            try {
                req.readResponse(mHttpClientConnection);
            } catch (ParseException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IOException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            } catch (IllegalStateException e) {
                exception = e;
                error = EventHandler.ERROR_IO;
            }
            if (exception != null) {
                if (httpFailure(req, error, exception) && !req.mCancelled) {
                    /* retry request if not permanent failure
                       or cancelled */
                    req.reset();
                    pipe.addFirst(req);
                }
                exception = null;
                mCanPersist = false;
            }
            if (!mCanPersist) {
                if (HttpLog.LOGV)
                    HttpLog.v("processRequests(): no persist, closing " + mHost);

                closeConnection();

                mHttpContext.removeAttribute(HTTP_CONNECTION);
                clearPipe(pipe);
                minPipe = maxPipe = 1;
                state = SEND;
            }
            break;
        }
        }
    }
}

From source file:elh.eus.absa.Features.java

/**
 *   Function fills the attribute vectors for the instances existing in the corpus given. 
 *   Attribute vectors contain the features loaded by the creatFeatureSet() function.
 * /*from  www . j a v  a2 s .  co  m*/
 * @param boolean save : whether the Instances file should be saved to an arff file or not.
 * @return Weka Instances object containing the attribute vectors filled with the features specified
 *          in the parameter file.
 */
public Instances loadInstances(boolean save, String prefix) throws IOException {
    String savePath = params.getProperty("fVectorDir") + File.separator + "arff" + File.separator + "train_"
            + prefix;
    HashMap<String, Opinion> trainExamples = corpus.getOpinions();

    int trainExamplesNum = trainExamples.size();

    int bowWin = 0;
    if (params.containsKey("window")) {
        bowWin = Integer.parseInt(params.getProperty("window"));
        savePath = savePath + "_w" + bowWin;
    }

    //Properties posProp = new Properties();
    //eus.ixa.ixa.pipe.pos.Annotate postagger = new eus.ixa.ixa.pipe.pos.Annotate(posProp);      
    if (params.containsKey("lemmaNgrams")) {
        Properties posProp = NLPpipelineWrapper.setPostaggerProperties(params.getProperty("pos-model"),
                corpus.getLang(), "3", "bin", "false");

        postagger = new eus.ixa.ixa.pipe.pos.Annotate(posProp);
    }

    //System.out.println("train examples: "+trainExamplesNum);
    //Create the Weka object for the training set
    Instances rsltdata = new Instances("train", atts, trainExamplesNum);

    // setting class attribute (last attribute in train data.
    //traindata.setClassIndex(traindata.numAttributes() - 1);

    System.err.println("Features: loadInstances() - featNum: " + this.featNum + " - trainset attrib num -> "
            + rsltdata.numAttributes() + " - ");
    System.out.println("Features: loadInstances() - featNum: " + this.featNum + " - trainset attrib num -> "
            + rsltdata.numAttributes() + " - ");

    int instId = 1;
    // fill the vectors for each training example
    for (String oId : trainExamples.keySet()) {
        //System.err.println("sentence: "+ corpus.getOpinionSentence(o.getId()));

        //value vector
        double[] values = new double[featNum];

        // first element is the instanceId         
        values[rsltdata.attribute("instanceId").index()] = instId;

        // string normalization (emoticons, twitter grammar,...)
        String opNormalized = corpus.getOpinionSentence(oId);

        // compute uppercase ratio before normalization (if needed)      
        double upRatio = 0.0;
        if (params.getProperty("upperCaseRatio", "no").equalsIgnoreCase("yes")) {
            String upper = opNormalized.replaceAll("[\\p{Ll}]", "");
            upRatio = (double) upper.length() / (double) opNormalized.length();
            values[rsltdata.attribute("upperCaseRation").index()] = upRatio;
        }

        // string normalization (emoticons, twitter grammar,...)
        if ((params.containsKey("wfngrams") || params.containsKey("lemmaNgrams"))
                && (!params.getProperty("normalization", "none").equalsIgnoreCase("noEmot"))) {
            opNormalized = normalize(opNormalized, params.getProperty("normalization", "none"));
        }

        //process the current instance with the NLP pipeline in order to get token and lemma|pos features
        KAFDocument nafinst = new KAFDocument("", "");
        String nafname = trainExamples.get(oId).getsId().replace(':', '_');
        String nafDir = params.getProperty("kafDir");
        String nafPath = nafDir + File.separator + nafname + ".kaf";
        //counter for opinion sentence token number. Used for computing relative values of the features
        int tokNum = 1;
        try {
            if (params.containsKey("lemmaNgrams")) //(lemmaNgrams != null) && (!lemmaNgrams.isEmpty()))
            {
                if (FileUtilsElh.checkFile(nafPath)) {
                    nafinst = KAFDocument.createFromFile(new File(nafPath));
                } else {
                    nafinst = NLPpipelineWrapper.ixaPipesTokPos(opNormalized, corpus.getLang(),
                            params.getProperty("pos-model"), postagger);
                    Files.createDirectories(Paths.get(nafDir));
                    nafinst.save(nafPath);
                }
                tokNum = nafinst.getWFs().size();
                //System.err.println("Features::loadInstances - postagging opinion sentence ("+oId+") - "+corpus.getOpinionSentence(oId));
            } else {
                if (FileUtilsElh.checkFile(nafPath)) {
                    nafinst = KAFDocument.createFromFile(new File(nafPath));
                } else {
                    nafinst = NLPpipelineWrapper.ixaPipesTok(opNormalized, corpus.getLang());
                }
                tokNum = nafinst.getWFs().size();
                //System.err.println("Features::loadInstances - tokenizing opinion sentence ("+oId+") - "+corpus.getOpinionSentence(oId));

            }
        } catch (IOException | JDOMException e) {
            System.err.println("Features::loadInstances() - error when NLP processing the instance " + instId
                    + "|" + oId + ") for filling the attribute vector");
            e.printStackTrace();
            System.exit(5);
        }

        LinkedList<String> ngrams = new LinkedList<String>();
        int ngramDim;
        try {
            ngramDim = Integer.valueOf(params.getProperty("wfngrams"));
        } catch (Exception e) {
            ngramDim = 0;
        }

        boolean polNgrams = false;
        if (params.containsKey("polNgrams")) {
            polNgrams = params.getProperty("polNgrams").equalsIgnoreCase("yes");
        }

        List<WF> window = nafinst.getWFs();
        Integer end = corpus.getOpinion(oId).getTo();
        // apply window if window active (>0) and if the target is not null (to=0)
        if ((bowWin > 0) && (end > 0)) {
            Integer start = corpus.getOpinion(oId).getFrom();
            Integer to = window.size();
            Integer from = 0;
            end++;
            for (int i = 0; i < window.size(); i++) {
                WF wf = window.get(i);
                if ((wf.getOffset() == start) && (i >= bowWin)) {
                    from = i - bowWin;
                } else if (wf.getOffset() >= end) {
                    if (i + bowWin < window.size()) {
                        to = i + bowWin;
                    }
                    break;
                }
            }
            window = window.subList(from, to);
            //System.out.println("startTgt: "+start+" - from: "+from+" | endTrgt:"+(end-1)+" - to:"+to);
        }

        //System.out.println("Sentence: "+corpus.getOpinionSentence(oId)+" - target: "+corpus.getOpinion(oId).getTarget()+
        //      "\n window: from-> "+window.get(0).getForm()+" to-> "+window.get(window.size()-1)+" .\n");

        List<String> windowWFIds = new ArrayList<String>();

        // word form ngram related features
        for (WF wf : window) {
            windowWFIds.add(wf.getId());

            String wfStr = wf.getForm();
            if (params.containsKey("wfngrams") && ngramDim > 0) {
                if (!savePath.contains("_wf" + ngramDim)) {
                    savePath = savePath + "_wf" + ngramDim;
                }
                //if the current word form is in the ngram list activate the feature in the vector
                if (ngrams.size() >= ngramDim) {
                    ngrams.removeFirst();
                }
                ngrams.add(wfStr);

                // add ngrams to the feature vector
                checkNgramFeatures(ngrams, values, "wf", 1, false); //toknum

            }
            // Clark cluster info corresponding to the current word form
            if (params.containsKey("clark") && attributeSets.get("ClarkCl").containsKey(wfStr)) {
                if (!savePath.contains("_cl")) {
                    savePath = savePath + "_cl";
                }
                values[rsltdata.attribute("ClarkClId_" + attributeSets.get("ClarkCl").get(wfStr)).index()]++;
            }

            // Clark cluster info corresponding to the current word form
            if (params.containsKey("brown") && attributeSets.get("BrownCl").containsKey(wfStr)) {
                if (!savePath.contains("_br")) {
                    savePath = savePath + "_br";
                }
                values[rsltdata.attribute("BrownClId_" + attributeSets.get("BrownCl").get(wfStr)).index()]++;
            }

            // Clark cluster info corresponding to the current word form
            if (params.containsKey("word2vec") && attributeSets.get("w2vCl").containsKey(wfStr)) {
                if (!savePath.contains("_w2v")) {
                    savePath = savePath + "_w2v";
                }
                values[rsltdata.attribute("w2vClId_" + attributeSets.get("w2vCl").get(wfStr)).index()]++;
            }

        }

        //empty ngram list and add remaining ngrams to the feature list
        checkNgramFeatures(ngrams, values, "wf", 1, true); //toknum

        // PoS tagger related attributes: lemmas and pos tags
        if (params.containsKey("lemmaNgrams")
                || (params.containsKey("pos") && !params.getProperty("pos").equalsIgnoreCase("0"))
                || params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) {
            ngrams = new LinkedList<String>();
            if (params.containsKey("lemmaNgrams")
                    && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) {
                ngramDim = Integer.valueOf(params.getProperty("lemmaNgrams"));
            } else {
                ngramDim = 3;
            }
            LinkedList<String> posNgrams = new LinkedList<String>();
            int posNgramDim = 0;
            if (params.containsKey("pos")) {
                posNgramDim = Integer.valueOf(params.getProperty("pos"));
            }

            for (Term t : nafinst.getTermsFromWFs(windowWFIds)) {
                //lemmas // && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))
                if ((params.containsKey("lemmaNgrams")) || params.containsKey("polarLexiconGeneral")
                        || params.containsKey("polarLexiconDomain")) {
                    if (!savePath.contains("_l" + ngramDim)) {
                        savePath = savePath + "_l" + ngramDim;
                    }

                    String lemma = t.getLemma();

                    if (ngrams.size() >= ngramDim) {
                        ngrams.removeFirst();
                    }
                    ngrams.add(lemma);

                    // add ngrams to the feature vector
                    for (int i = 0; i < ngrams.size(); i++) {
                        String ng = featureFromArray(ngrams.subList(0, i + 1), "lemma");
                        //if the current lemma is in the ngram list activate the feature in the vector
                        if (params.containsKey("lemmaNgrams")
                                && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) {
                            Attribute ngAtt = rsltdata.attribute(ng);
                            if (ngAtt != null) {
                                addNumericToFeatureVector(ng, values, 1); //tokNum                     
                            }
                        }

                        ng = featureFromArray(ngrams.subList(0, i + 1), "");
                        if (params.containsKey("polarLexiconGeneral")
                                || params.containsKey("polarLexiconDomain")) {
                            checkPolarityLexicons(ng, values, tokNum, polNgrams);
                        } //end polarity ngram checker
                    } //end ngram checking                                      
                }
                //pos tags
                if (params.containsKey("pos") && !params.getProperty("pos").equalsIgnoreCase("0")) {
                    if (!savePath.contains("_p")) {
                        savePath = savePath + "_p";
                    }

                    if (posNgrams.size() >= posNgramDim) {
                        posNgrams.removeFirst();
                    }
                    posNgrams.add(t.getPos());

                    // add ngrams to the feature vector
                    checkNgramFeatures(posNgrams, values, "pos", 1, false);
                }
            } //endFor

            //empty ngram list and add remaining ngrams to the feature list
            while (!ngrams.isEmpty()) {
                String ng = featureFromArray(ngrams, "lemma");

                //if the current lemma is in the ngram list activate the feature in the vector
                if (rsltdata.attribute(ng) != null) {
                    addNumericToFeatureVector(ng, values, 1); //tokNum
                }

                // polarity lexicons
                if (params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) {
                    checkPolarityLexicons(ng, values, tokNum, polNgrams);
                } //end polarity ngram checker

                ngrams.removeFirst();
            }

            //empty pos ngram list and add remaining pos ngrams to the feature list
            checkNgramFeatures(posNgrams, values, "pos", 1, true);

        }

        // add sentence length as a feature
        if (params.containsKey("sentenceLength")
                && (!params.getProperty("sentenceLength").equalsIgnoreCase("no"))) {
            values[rsltdata.attribute("sentenceLength").index()] = tokNum;
        }

        //create object for the current instance and associate it with the current train dataset.         
        Instance inst = new SparseInstance(1.0, values);
        inst.setDataset(rsltdata);

        // add category attributte values
        String cat = trainExamples.get(oId).getCategory();

        if (params.containsKey("categories") && params.getProperty("categories").compareTo("E&A") == 0) {
            if (cat.compareTo("NULL") == 0) {
                inst.setValue(rsltdata.attribute("entCat").index(), cat);
                inst.setValue(rsltdata.attribute("attCat").index(), cat);
            } else {
                String[] splitCat = cat.split("#");
                inst.setValue(rsltdata.attribute("entCat").index(), splitCat[0]);
                inst.setValue(rsltdata.attribute("attCat").index(), splitCat[1]);
            }

            //inst.setValue(attIndexes.get("entAttCat"), cat);
        } else if (params.containsKey("categories") && params.getProperty("categories").compareTo("E#A") == 0) {
            inst.setValue(rsltdata.attribute("entAttCat").index(), cat);
        }

        if (params.containsKey("polarity") && params.getProperty("polarity").compareTo("yes") == 0) {
            // add class value as a double (Weka stores all values as doubles )
            String pol = normalizePolarity(trainExamples.get(oId).getPolarity());
            //System.err.println("Features::loadInstances - pol "+pol+" for oid "+oId+" - text:"+corpus.getOpinionSentence(oId));
            if (pol != null && !pol.isEmpty()) {
                //System.err.println("polarity: _"+pol+"_");
                inst.setValue(rsltdata.attribute("polarityCat"), pol);
            } else {
                inst.setMissing(rsltdata.attribute("polarityCat"));
            }
        }

        //add instance to train data
        rsltdata.add(inst);

        //store opinion Id and instance Id
        this.opInst.put(oId, instId);
        instId++;
    }

    System.err.println("Features : loadInstances() - training data ready total number of examples -> "
            + trainExamplesNum + " - " + rsltdata.numInstances());

    if (save) {
        try {
            savePath = savePath + ".arff";
            System.err.println("arff written to: " + savePath);
            ArffSaver saver = new ArffSaver();

            saver.setInstances(rsltdata);

            saver.setFile(new File(savePath));
            saver.writeBatch();
        } catch (IOException e1) {
            e1.printStackTrace();
        } catch (Exception e2) {
            e2.printStackTrace();
        }
    }
    return rsltdata;
}

From source file:com.joliciel.talismane.TalismaneImpl.java

public void analyse(TalismaneConfig config) {
    try {//from www .j  a  va2s  .  c o m
        if (config.needsSentenceDetector()) {
            if (config.getSentenceDetector() == null) {
                throw new TalismaneException("Sentence detector not provided.");
            }
        }
        if (config.needsTokeniser()) {
            if (config.getTokeniser() == null) {
                throw new TalismaneException("Tokeniser not provided.");
            }
        }
        if (config.needsPosTagger()) {
            if (config.getPosTagger() == null) {
                throw new TalismaneException("Pos-tagger not provided.");
            }
        }
        if (config.needsParser()) {
            if (config.getParser() == null) {
                throw new TalismaneException("Parser not provided.");
            }
        }

        if (config.getEndModule().equals(Module.SentenceDetector)) {
            if (this.getSentenceProcessor() == null) {
                throw new TalismaneException(
                        "No sentence processor provided with sentence detector end module, cannot generate output.");
            }
        }
        if (config.getEndModule().equals(Module.Tokeniser)) {
            if (this.getTokenSequenceProcessor() == null) {
                throw new TalismaneException(
                        "No token sequence processor provided with tokeniser end module, cannot generate output.");
            }
        }
        if (config.getEndModule().equals(Module.PosTagger)) {
            if (this.getPosTagSequenceProcessor() == null) {
                throw new TalismaneException(
                        "No postag sequence processor provided with pos-tagger end module, cannot generate output.");
            }
        }
        if (config.getEndModule().equals(Module.Parser)) {
            if (this.getParseConfigurationProcessor() == null) {
                throw new TalismaneException(
                        "No parse configuration processor provided with parser end module, cannot generate output.");
            }
        }

        LinkedList<String> textSegments = new LinkedList<String>();
        LinkedList<Sentence> sentences = new LinkedList<Sentence>();
        TokenSequence tokenSequence = null;
        PosTagSequence posTagSequence = null;

        RollingSentenceProcessor rollingSentenceProcessor = this.getFilterService()
                .getRollingSentenceProcessor(config.getFileName(), config.isProcessByDefault());
        Sentence leftover = null;
        if (config.getStartModule().equals(Module.SentenceDetector)
                || config.getStartModule().equals(Module.Tokeniser)) {
            // prime the sentence detector with two text segments, to ensure everything gets processed
            textSegments.addLast("");
            textSegments.addLast("");
        }

        StringBuilder stringBuilder = new StringBuilder();
        boolean finished = false;
        int sentenceCount = 0;

        String prevProcessedText = "";
        String processedText = "";
        String nextProcessedText = "";
        SentenceHolder prevSentenceHolder = null;

        int endBlockCharacterCount = 0;

        while (!finished) {
            if (config.getStartModule().equals(Module.SentenceDetector)
                    || config.getStartModule().equals(Module.Tokeniser)) {
                // Note SentenceDetector and Tokeniser start modules treated identically,
                // except that for SentenceDetector we apply a probabilistic sentence detector
                // whereas for Tokeniser we assume all sentence breaks are marked by filters

                // read characters from the reader, one at a time
                char c;
                int r = -1;
                try {
                    r = this.getReader().read();
                } catch (IOException e) {
                    LogUtils.logError(LOG, e);
                }

                if (r == -1) {
                    finished = true;
                    c = '\n';
                } else {
                    c = (char) r;
                }

                // Jump out if we have 3 consecutive end-block characters.
                if (c == config.getEndBlockCharacter()) {
                    endBlockCharacterCount++;
                    if (endBlockCharacterCount == 3) {
                        LOG.info("Three consecutive end-block characters. Exiting.");
                        finished = true;
                    }
                } else {
                    endBlockCharacterCount = 0;
                }

                // have sentence detector
                if (finished || (Character.isWhitespace(c) && stringBuilder.length() > config.getBlockSize())
                        || c == config.getEndBlockCharacter()) {
                    if (c == config.getEndBlockCharacter())
                        stringBuilder.append(c);
                    if (stringBuilder.length() > 0) {
                        String textSegment = stringBuilder.toString();
                        stringBuilder = new StringBuilder();

                        textSegments.add(textSegment);
                    } // is the current block > 0 characters?
                    if (c == config.getEndBlockCharacter()) {
                        textSegments.addLast("");
                    }
                } // is there a next block available?

                if (finished) {
                    if (stringBuilder.length() > 0) {
                        textSegments.addLast(stringBuilder.toString());
                        stringBuilder = new StringBuilder();
                    }
                    textSegments.addLast("");
                    textSegments.addLast("");
                    textSegments.addLast("");
                }

                if (c != config.getEndBlockCharacter())
                    stringBuilder.append(c);

                while (textSegments.size() >= 3) {
                    String prevText = textSegments.removeFirst();
                    String text = textSegments.removeFirst();
                    String nextText = textSegments.removeFirst();
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("prevText: " + prevText);
                        LOG.trace("text: " + text);
                        LOG.trace("nextText: " + nextText);
                    }

                    Set<TextMarker> textMarkers = new TreeSet<TextMarker>();
                    for (TextMarkerFilter textMarkerFilter : config.getTextMarkerFilters()) {
                        Set<TextMarker> result = textMarkerFilter.apply(prevText, text, nextText);
                        textMarkers.addAll(result);
                    }

                    // push the text segments back onto the beginning of Deque
                    textSegments.addFirst(nextText);
                    textSegments.addFirst(text);

                    SentenceHolder sentenceHolder = rollingSentenceProcessor.addNextSegment(text, textMarkers);
                    prevProcessedText = processedText;
                    processedText = nextProcessedText;
                    nextProcessedText = sentenceHolder.getText();

                    if (LOG.isTraceEnabled()) {
                        LOG.trace("prevProcessedText: " + prevProcessedText);
                        LOG.trace("processedText: " + processedText);
                        LOG.trace("nextProcessedText: " + nextProcessedText);
                    }

                    boolean reallyFinished = finished && textSegments.size() == 3;

                    if (prevSentenceHolder != null) {
                        if (config.getStartModule().equals(Module.SentenceDetector)) {
                            List<Integer> sentenceBreaks = config.getSentenceDetector()
                                    .detectSentences(prevProcessedText, processedText, nextProcessedText);
                            for (int sentenceBreak : sentenceBreaks) {
                                prevSentenceHolder.addSentenceBoundary(sentenceBreak);
                            }
                        }

                        List<Sentence> theSentences = prevSentenceHolder.getDetectedSentences(leftover);
                        leftover = null;
                        for (Sentence sentence : theSentences) {
                            if (sentence.isComplete() || reallyFinished) {
                                sentences.add(sentence);
                                sentenceCount++;
                            } else {
                                LOG.debug("Setting leftover to: " + sentence.getText());
                                leftover = sentence;
                            }
                        }
                        if (config.getMaxSentenceCount() > 0 && sentenceCount >= config.getMaxSentenceCount()) {
                            finished = true;
                        }
                    }
                    prevSentenceHolder = sentenceHolder;
                } // we have at least 3 text segments (should always be the case once we get started)
            } else if (config.getStartModule().equals(Module.PosTagger)) {
                if (config.getTokenCorpusReader().hasNextTokenSequence()) {
                    tokenSequence = config.getTokenCorpusReader().nextTokenSequence();
                } else {
                    tokenSequence = null;
                    finished = true;
                }
            } else if (config.getStartModule().equals(Module.Parser)) {
                if (config.getPosTagCorpusReader().hasNextPosTagSequence()) {
                    posTagSequence = config.getPosTagCorpusReader().nextPosTagSequence();
                } else {
                    posTagSequence = null;
                    finished = true;
                }
            } // which start module?

            boolean needToProcess = false;
            if (config.getStartModule().equals(Module.SentenceDetector)
                    || config.getStartModule().equals(Module.Tokeniser))
                needToProcess = !sentences.isEmpty();
            else if (config.getStartModule().equals(Module.PosTagger))
                needToProcess = tokenSequence != null;
            else if (config.getStartModule().equals(Module.Parser))
                needToProcess = posTagSequence != null;

            while (needToProcess) {
                Sentence sentence = null;
                if (config.getStartModule().compareTo(Module.Tokeniser) <= 0
                        && config.getEndModule().compareTo(Module.SentenceDetector) >= 0) {
                    sentence = sentences.poll();
                    LOG.debug("Sentence: " + sentence);
                    if (this.getSentenceProcessor() != null)
                        this.getSentenceProcessor().onNextSentence(sentence.getText(), this.getWriter());
                } // need to read next sentence

                List<TokenSequence> tokenSequences = null;
                if (config.needsTokeniser()) {
                    tokenSequences = config.getTokeniser().tokenise(sentence);
                    tokenSequence = tokenSequences.get(0);

                    if (this.getTokenSequenceProcessor() != null) {
                        this.getTokenSequenceProcessor().onNextTokenSequence(tokenSequence, this.getWriter());
                    }
                } // need to tokenise ?

                List<PosTagSequence> posTagSequences = null;
                if (config.needsPosTagger()) {
                    posTagSequence = null;
                    if (tokenSequences == null || !config.isPropagateTokeniserBeam()) {
                        tokenSequences = new ArrayList<TokenSequence>();
                        tokenSequences.add(tokenSequence);
                    }

                    if (config.getPosTagger() instanceof NonDeterministicPosTagger) {
                        NonDeterministicPosTagger nonDeterministicPosTagger = (NonDeterministicPosTagger) config
                                .getPosTagger();
                        posTagSequences = nonDeterministicPosTagger.tagSentence(tokenSequences);
                        posTagSequence = posTagSequences.get(0);
                    } else {
                        posTagSequence = config.getPosTagger().tagSentence(tokenSequence);
                    }

                    if (posTagSequenceProcessor != null) {
                        posTagSequenceProcessor.onNextPosTagSequence(posTagSequence, this.getWriter());
                    }

                    tokenSequence = null;
                } // need to postag

                if (config.needsParser()) {
                    if (posTagSequences == null || !config.isPropagatePosTaggerBeam()) {
                        posTagSequences = new ArrayList<PosTagSequence>();
                        posTagSequences.add(posTagSequence);
                    }

                    ParseConfiguration parseConfiguration = null;
                    List<ParseConfiguration> parseConfigurations = null;
                    try {
                        if (config.getParser() instanceof NonDeterministicParser) {
                            NonDeterministicParser nonDeterministicParser = (NonDeterministicParser) config
                                    .getParser();
                            parseConfigurations = nonDeterministicParser.parseSentence(posTagSequences);
                            parseConfiguration = parseConfigurations.get(0);
                        } else {
                            parseConfiguration = config.getParser().parseSentence(posTagSequence);
                        }

                        if (this.getParseConfigurationProcessor() != null) {
                            this.getParseConfigurationProcessor().onNextParseConfiguration(parseConfiguration,
                                    this.getWriter());
                        }
                    } catch (Exception e) {
                        LOG.error(e);
                        if (stopOnError)
                            throw new RuntimeException(e);
                    }
                    posTagSequence = null;
                } // need to parse

                if (config.getStartModule().equals(Module.SentenceDetector)
                        || config.getStartModule().equals(Module.Tokeniser))
                    needToProcess = !sentences.isEmpty();
                else if (config.getStartModule().equals(Module.PosTagger))
                    needToProcess = tokenSequence != null;
                else if (config.getStartModule().equals(Module.Parser))
                    needToProcess = posTagSequence != null;
            } // next sentence
        } // next character
    } finally {
        if (this.getParseConfigurationProcessor() != null) {
            this.getParseConfigurationProcessor().onCompleteParse();
        }

        try {
            this.getReader().close();
            this.getWriter().flush();
            this.getWriter().close();
        } catch (IOException ioe2) {
            LOG.error(ioe2);
            throw new RuntimeException(ioe2);
        }

    }
}

From source file:org.hammurapi.HammurapiTask.java

public void execute() throws BuildException {
    long started = System.currentTimeMillis();

    if (!suppressLogo) {
        log("Hammurapi 3.18.4 Copyright (C) 2004 Hammurapi Group");
    }//  ww w . ja va  2  s .c  o m

    File archiveTmpDir = processArchive();

    try {
        Logger logger = new AntLogger(this);

        final VisitorStack[] visitorStack = { null };
        final VisitorStackSource visitorStackSource = new VisitorStackSource() {
            public VisitorStack getVisitorStack() {
                return visitorStack[0];
            }
        };

        final SessionImpl reviewSession = new SessionImpl();

        InspectorSet inspectorSet = new InspectorSet(new InspectorContextFactory() {
            public InspectorContext newContext(InspectorDescriptor descriptor, Logger logger) {
                return new InspectorContextImpl(descriptor, logger, visitorStackSource, reviewSession,
                        violationFilters);
            }
        }, logger);

        if (embeddedInspectors) {
            log("Loading embedded inspectors", Project.MSG_VERBOSE);
            loadEmbeddedInspectors(inspectorSet);
        }

        log("Loading inspectors", Project.MSG_VERBOSE);
        Iterator it = inspectors.iterator();
        while (it.hasNext()) {
            Object o = it.next();
            if (o instanceof InspectorSource) {
                ((InspectorSource) o).loadInspectors(inspectorSet);
            } else {
                InspectorEntry inspectorEntry = (InspectorEntry) o;
                inspectorSet.addDescriptor(inspectorEntry);
                inspectorSet.addInspectorSourceInfo(
                        new InspectorSourceInfo("Inline inspector " + inspectorEntry.getName(),
                                "Build file: " + inspectorEntry.getLocation().toString(), ""));
            }
        }

        log("Inspectors loaded: " + inspectorSet.size(), Project.MSG_VERBOSE);

        log("Loading waivers", Project.MSG_VERBOSE);
        Date now = new Date();
        WaiverSet waiverSet = new WaiverSet();
        it = waivers.iterator();
        while (it.hasNext()) {
            ((WaiverSource) it.next()).loadWaivers(waiverSet, now);
        }

        log("Waivers loaded: " + waiverSet.size(), Project.MSG_VERBOSE);

        log("Loading listeners", Project.MSG_VERBOSE);
        List listeners = new LinkedList();
        it = listenerEntries.iterator();
        while (it.hasNext()) {
            listeners.add(((ListenerEntry) it.next()).getObject(null));
        }

        //Outputs
        listeners.addAll(outputs);
        listeners.add(new ReviewToLogListener(project));

        log("Loading source files", Project.MSG_VERBOSE);

        RepositoryConfig config = new RepositoryConfig();
        if (classPath != null) {
            log("Loading class files to repository", Project.MSG_DEBUG);
            config.setClassLoader(new AntClassLoader(project, classPath, false));
            reviewSession.setClassPath(classPath.list());
        }

        config.setLogger(logger);
        config.setCalculateDependencies(calculateDependencies);
        config.setStoreSource(storeSource);

        it = srcFileSets.iterator();
        while (it.hasNext()) {
            HammurapiFileSet fs = (HammurapiFileSet) it.next();
            fs.setDefaultIncludes();
            DirectoryScanner scanner = fs.getDirectoryScanner(project);
            config.addFile(scanner.getBasedir(), scanner.getIncludedFiles());
        }

        /**
         * For command-line interface
         */
        it = srcFiles.iterator();
        while (it.hasNext()) {
            config.addFile((File) it.next());
        }

        config.setName(title);

        if (revisionMapper != null) {
            config.setRevisionMapper((RevisionMapper) revisionMapper.getObject(null));
        }

        ConnectionPerThreadDataSource dataSource = createDataSource(reviewSession);

        reviewSession.setDatasource(dataSource);

        final LinkedList repoWarnings = new LinkedList();
        config.setWarningSink(new WarningSink() {
            public void consume(final String source, final String message) {
                repoWarnings.add(new Violation() {
                    public String getMessage() {
                        return message;
                    }

                    public InspectorDescriptor getDescriptor() {
                        return null;
                    }

                    SourceMarker sm = new SimpleSourceMarker(0, 0, source, null);

                    public SourceMarker getSource() {
                        return sm;
                    }

                    public int compareTo(Object obj) {
                        if (obj instanceof Violation) {
                            Violation v = (Violation) obj;
                            int c = SourceMarkerComparator._compare(getSource(), v.getSource());
                            return c == 0 ? getMessage().compareTo(v.getMessage()) : c;
                        }

                        return hashCode() - obj.hashCode();
                    }
                });
            }
        });

        config.setDataSource(dataSource);
        final SQLProcessor sqlProcessor = new SQLProcessor(dataSource, null);
        sqlProcessor.setTimeIntervalCategory(tic);

        DbRepositoryImpl repositoryImpl = new DbRepositoryImpl(config);
        Repository repository = wrap ? (Repository) repositoryImpl.getProxy() : repositoryImpl;

        //new SimpleResultsFactory(waiverSet).install();

        ResultsFactoryConfig rfConfig = new ResultsFactoryConfig();
        rfConfig.setInspectorSet(inspectorSet);
        rfConfig.setName(title);
        rfConfig.setReportNumber(repository.getScanNumber());
        rfConfig.setRepository(repository);
        rfConfig.setSqlProcessor(sqlProcessor);
        rfConfig.setHostId(hostId);
        rfConfig.setBaseLine(baseLine);
        rfConfig.setDescription(reviewDescription);

        try {
            rfConfig.setHostName(InetAddress.getLocalHost().getHostName());
        } catch (Exception e) {
            log("Cannot resolve host name: " + e);
        }

        CompositeStorage storage = new CompositeStorage();
        storage.addStorage("jdbc", new JdbcStorage(sqlProcessor));
        storage.addStorage("file",
                new FileStorage(new File(System.getProperties().getProperty("java.io.tmpdir"))));
        storage.addStorage("memory", new MemoryStorage());

        rfConfig.setStorage(storage);
        rfConfig.setWaiverSet(waiverSet);

        ResultsFactory resultsFactory = new ResultsFactory(rfConfig);
        resultsFactory.install();

        CompositeResults summary = ResultsFactory.getInstance().newCompositeResults(title);
        ResultsFactory.getInstance().setSummary(summary);
        ResultsFactory.pushThreadResults(summary);

        Collection inspectorsPerSe = new LinkedList(inspectorSet.getInspectors());
        reviewSession.setInspectors(inspectorSet);
        Iterator inspectorsIt = inspectorsPerSe.iterator();
        log("Inspectors mapping", Project.MSG_VERBOSE);
        while (inspectorsIt.hasNext()) {
            Inspector inspector = (Inspector) inspectorsIt.next();
            log("\t" + inspector.getContext().getDescriptor().getName() + " -> "
                    + inspector.getClass().getName(), Project.MSG_VERBOSE);
        }

        // Initializes listeners
        it = listeners.iterator();
        while (it.hasNext()) {
            ((Listener) it.next()).onBegin(inspectorSet);
        }

        Iterator vfit = violationFilters.iterator();
        while (vfit.hasNext()) {
            Object vf = vfit.next();
            if (vf instanceof DataAccessObject) {
                ((DataAccessObject) vf).setSQLProcessor(sqlProcessor);
            }
        }

        ResultsCollector collector = new ResultsCollector(this, inspectorSet, waiverSet, summary, listeners);
        inspectorsPerSe.add(collector);

        // Storing repo warnings 
        while (!repoWarnings.isEmpty()) {
            collector.getSummary().addWarning((Violation) repoWarnings.removeFirst());
        }

        log("Reviewing", Project.MSG_VERBOSE);

        inspectorsPerSe.add(new ViolationFilterVisitor());

        SimpleReviewEngine rengine = new SimpleReviewEngine(inspectorsPerSe, this);
        reviewSession.setVisitor(rengine.getVisitor());
        visitorStack[0] = rengine.getVisitorStack();

        rengine.review(repository);

        writeWaiverStubs(waiverSet.getRejectedRequests());

        ResultsFactory.getInstance().commit(System.currentTimeMillis() - started);

        if (cleanup) {
            repositoryImpl.cleanupOldScans();
            resultsFactory.cleanupOldReports();
        }

        repositoryImpl.shutdown();
        reviewSession.shutdown();
        resultsFactory.shutdown();
        dataSource.shutdown();

        //log("SQL metrics:\n"+resultsFactory.getSQLMetrics(),Project.MSG_VERBOSE);

        if (hadExceptions) {
            throw new BuildException("There have been exceptions during execution. Check log output.");
        }
    } catch (JselException e) {
        throw new BuildException(e);
    } catch (HammurapiException e) {
        throw new BuildException(e);
    } catch (ConfigurationException e) {
        throw new BuildException(e);
    } catch (FileNotFoundException e) {
        throw new BuildException(e);
    } catch (ClassNotFoundException e) {
        throw new BuildException(e);
    } catch (IOException e) {
        throw new BuildException(e);
    } catch (SQLException e) {
        throw new BuildException(e);
    } catch (RenderingException e) {
        throw new BuildException(e);
    } finally {
        if (archiveTmpDir != null) {
            deleteFile(archiveTmpDir);
        }
    }
}

From source file:elh.eus.absa.Features.java

/**
 *   Function fills the attribute vectors for the instances existing in the Conll tabulated formatted corpus given. 
 *   Attribute vectors contain the features loaded by the creatFeatureSet() function.
 * /*from   w  w w  .j  ava 2  s  .  co m*/
 * @param boolean save : whether the Instances file should be saved to an arff file or not.
 * @return Weka Instances object containing the attribute vectors filled with the features specified
 *          in the parameter file.
 */
public Instances loadInstancesTAB(boolean save, String prefix) {
    String savePath = params.getProperty("fVectorDir") + File.separator + "arff" + File.separator + "train_"
            + prefix;
    HashMap<String, Opinion> trainExamples = corpus.getOpinions();

    int trainExamplesNum = trainExamples.size();

    int bowWin = 0;
    if (params.containsKey("window")) {
        bowWin = Integer.parseInt(params.getProperty("window"));
        savePath = savePath + "_w" + bowWin;
    }

    //System.out.println("train examples: "+trainExamplesNum);
    //Create the Weka object for the training set
    Instances rsltdata = new Instances("train", atts, trainExamplesNum);

    // setting class attribute (last attribute in train data.
    //traindata.setClassIndex(traindata.numAttributes() - 1);

    System.err.println("Features: loadInstancesTAB() - featNum: " + this.featNum + " - trainset attrib num -> "
            + rsltdata.numAttributes() + " - ");
    System.out.println("Features: loadInstancesTAB() - featNum: " + this.featNum + " - trainset attrib num -> "
            + rsltdata.numAttributes() + " - ");

    int instId = 1;
    // fill the vectors for each training example
    for (String oId : trainExamples.keySet()) {
        //System.err.println("sentence: "+ corpus.getOpinionSentence(o.getId()));

        //value vector
        double[] values = new double[featNum];

        // first element is the instanceId         
        values[rsltdata.attribute("instanceId").index()] = instId;

        LinkedList<String> ngrams = new LinkedList<String>();
        int ngramDim;
        try {
            ngramDim = Integer.valueOf(params.getProperty("wfngrams"));
        } catch (Exception e) {
            ngramDim = 0;
        }

        boolean polNgrams = false;
        if (params.containsKey("polNgrams")) {
            polNgrams = params.getProperty("polNgrams").equalsIgnoreCase("yes");
        }

        String[] noWindow = corpus.getOpinionSentence(oId).split("\n");

        //counter for opinion sentence token number. Used for computing relative values of the features
        int tokNum = noWindow.length;

        List<String> window = Arrays.asList(noWindow);
        Integer end = corpus.getOpinion(oId).getTo();
        // apply window if window active (>0) and if the target is not null (to=0)
        if ((bowWin > 0) && (end > 0)) {
            Integer start = corpus.getOpinion(oId).getFrom();
            Integer from = start - bowWin;
            if (from < 0) {
                from = 0;
            }
            Integer to = end + bowWin;
            if (to > noWindow.length - 1) {
                to = noWindow.length - 1;
            }
            window = Arrays.asList(Arrays.copyOfRange(noWindow, from, to));
        }

        //System.out.println("Sentence: "+corpus.getOpinionSentence(oId)+" - target: "+corpus.getOpinion(oId).getTarget()+
        //      "\n window: from-> "+window.get(0).getForm()+" to-> "+window.get(window.size()-1)+" .\n");

        //System.err.println(Arrays.toString(window.toArray()));

        // word form ngram related features
        for (String wf : window) {
            String[] fields = wf.split("\t");
            String wfStr = normalize(fields[0], params.getProperty("normalization", "none"));
            // blank line means we found a sentence end. Empty n-gram list and reiniciate.  
            if (wf.equals("")) {
                // add ngrams to the feature vector
                checkNgramFeatures(ngrams, values, "", 1, true); //toknum

                // since wf is empty no need to check for clusters and other features.
                continue;
            }

            if (params.containsKey("wfngrams") && ngramDim > 0) {
                if (!savePath.contains("_wf" + ngramDim)) {
                    savePath = savePath + "_wf" + ngramDim;
                }
                //if the current word form is in the ngram list activate the feature in the vector
                if (ngrams.size() >= ngramDim) {
                    ngrams.removeFirst();
                }
                ngrams.add(wfStr);

                // add ngrams to the feature vector
                checkNgramFeatures(ngrams, values, "", 1, false); //toknum
            }
            // Clark cluster info corresponding to the current word form
            if (params.containsKey("clark") && attributeSets.get("ClarkCl").containsKey(wfStr)) {
                if (!savePath.contains("_cl")) {
                    savePath = savePath + "_cl";
                }
                values[rsltdata.attribute("ClarkClId_" + attributeSets.get("ClarkCl").get(wfStr)).index()]++;
            }

            // Clark cluster info corresponding to the current word form
            if (params.containsKey("brown") && attributeSets.get("BrownCl").containsKey(wfStr)) {
                if (!savePath.contains("_br")) {
                    savePath = savePath + "_br";
                }
                values[rsltdata.attribute("BrownClId_" + attributeSets.get("BrownCl").get(wfStr)).index()]++;
            }

            // Clark cluster info corresponding to the current word form
            if (params.containsKey("word2vec") && attributeSets.get("w2vCl").containsKey(wfStr)) {
                if (!savePath.contains("_w2v")) {
                    savePath = savePath + "_w2v";
                }
                values[rsltdata.attribute("w2vClId_" + attributeSets.get("w2vCl").get(wfStr)).index()]++;
            }

        }

        //empty ngram list and add remaining ngrams to the feature list
        checkNgramFeatures(ngrams, values, "", 1, true); //toknum

        // PoS tagger related attributes: lemmas and pos tags
        if (params.containsKey("lemmaNgrams")
                || (params.containsKey("pos") && !params.getProperty("pos").equalsIgnoreCase("0"))
                || params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) {
            ngrams = new LinkedList<String>();
            if (params.containsKey("lemmaNgrams")
                    && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) {
                ngramDim = Integer.valueOf(params.getProperty("lemmaNgrams"));
            } else {
                ngramDim = 3;
            }
            LinkedList<String> posNgrams = new LinkedList<String>();
            int posNgramDim = 0;
            if (params.containsKey("pos")) {
                posNgramDim = Integer.valueOf(params.getProperty("pos"));
            }

            for (String t : window) {
                //lemmas // && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))
                if ((params.containsKey("lemmaNgrams")) || params.containsKey("polarLexiconGeneral")
                        || params.containsKey("polarLexiconDomain")) {
                    if (!savePath.contains("_l" + ngramDim)) {
                        savePath = savePath + "_l" + ngramDim;
                    }

                    //blank line means we found a sentence end. Empty n-gram list and reiniciate.
                    if (t.equals("")) {
                        // check both lemma n-grams and polarity lexicons, and add values to the feature vector
                        checkNgramsAndPolarLexicons(ngrams, values, "lemma", 1, tokNum, true, polNgrams); //toknum

                        // since t is empty no need to check for clusters and other features.
                        continue;
                    }

                    String[] fields = t.split("\t");
                    if (fields.length < 2) {
                        continue;
                    }
                    String lemma = normalize(fields[1], params.getProperty("normalization", "none"));

                    if (ngrams.size() >= ngramDim) {
                        ngrams.removeFirst();
                    }
                    ngrams.add(lemma);

                    // check both lemma n-grams and polarity lexicons, and add values to the feature vector
                    checkNgramsAndPolarLexicons(ngrams, values, "lemma", 1, tokNum, false, polNgrams);

                }

                //pos tags
                if (params.containsKey("pos") && !params.getProperty("pos").equalsIgnoreCase("0")) {
                    if (!savePath.contains("_p")) {
                        savePath = savePath + "_p";
                    }

                    if (posNgrams.size() >= posNgramDim) {
                        posNgrams.removeFirst();
                    }

                    String[] fields = t.split("\t");
                    if (fields.length < 3) {
                        continue;
                    }
                    String pos = fields[2];

                    posNgrams.add(pos);

                    // add ngrams to the feature vector
                    checkNgramFeatures(posNgrams, values, "pos", 1, false);
                }
            } //endFor

            //empty ngram list and add remaining ngrams to the feature list
            // check both lemma n-grams and polarity lexicons, and add values to the feature vector
            checkNgramsAndPolarLexicons(ngrams, values, "", 1, tokNum, true, polNgrams);

            //empty pos ngram list and add remaining pos ngrams to the feature list
            checkNgramFeatures(posNgrams, values, "pos", 1, true);

        }

        // add sentence length as a feature
        if (params.containsKey("sentenceLength")
                && (!params.getProperty("sentenceLength").equalsIgnoreCase("no"))) {
            values[rsltdata.attribute("sentenceLength").index()] = tokNum;
        }

        // compute uppercase ratio before normalization (if needed)      
        //double upRatio =0.0;
        //if (params.getProperty("upperCaseRatio", "no").equalsIgnoreCase("yes"))
        //{
        //   String upper = opNormalized.replaceAll("[a-z]", "");
        //   upRatio = (double)upper.length() / (double)opNormalized.length();
        //   values[rsltdata.attribute("upperCaseRation").index()] = upRatio;
        //}

        //create object for the current instance and associate it with the current train dataset.         
        Instance inst = new SparseInstance(1.0, values);
        inst.setDataset(rsltdata);

        // add category attributte values
        String cat = trainExamples.get(oId).getCategory();

        if (params.containsKey("categories") && params.getProperty("categories").compareTo("E&A") == 0) {
            if (cat.compareTo("NULL") == 0) {
                inst.setValue(rsltdata.attribute("entCat").index(), cat);
                inst.setValue(rsltdata.attribute("attCat").index(), cat);
            } else {
                String[] splitCat = cat.split("#");
                inst.setValue(rsltdata.attribute("entCat").index(), splitCat[0]);
                inst.setValue(rsltdata.attribute("attCat").index(), splitCat[1]);
            }

            //inst.setValue(attIndexes.get("entAttCat"), cat);
        } else if (params.containsKey("categories") && params.getProperty("categories").compareTo("E#A") == 0) {
            inst.setValue(rsltdata.attribute("entAttCat").index(), cat);
        }

        if (params.containsKey("polarity") && params.getProperty("polarity").compareTo("yes") == 0) {
            // add class value as a double (Weka stores all values as doubles )
            String pol = normalizePolarity(trainExamples.get(oId).getPolarity());
            if (pol != null && !pol.isEmpty()) {
                inst.setValue(rsltdata.attribute("polarityCat"), pol);
            } else {
                //System.err.println("polarity: _"+pol+"_");
                inst.setMissing(rsltdata.attribute("polarityCat"));
            }
        }

        //add instance to train data
        rsltdata.add(inst);

        //store opinion Id and instance Id
        this.opInst.put(oId, instId);
        instId++;
    }

    System.err.println("Features : loadInstancesTAB() - training data ready total number of examples -> "
            + trainExamplesNum + " - " + rsltdata.numInstances());

    if (save) {
        try {
            savePath = savePath + ".arff";
            System.err.println("arff written to: " + savePath);
            ArffSaver saver = new ArffSaver();

            saver.setInstances(rsltdata);

            saver.setFile(new File(savePath));
            saver.writeBatch();
        } catch (IOException e1) {
            e1.printStackTrace();
        } catch (Exception e2) {
            e2.printStackTrace();
        }
    }

    return rsltdata;
}

From source file:de.uni_koblenz.jgralab.utilities.rsa.Rsa2Tg.java

private List<EdgeClass> getEdgeClassesInTopologicalOrder() {
    List<EdgeClass> result = new ArrayList<EdgeClass>();
    Map<EdgeClass, Integer> numberOfPredecessors = new HashMap<EdgeClass, Integer>();
    LinkedList<EdgeClass> zeroValued = new LinkedList<EdgeClass>();

    for (EdgeClass ec : sg.getEdgeClassVertices()) {
        if (!BinaryEdgeClass.class.isInstance(ec)) {
            int numberOfPred = ec.getDegree(SpecializesTypedElementClass_subclass.class);
            if (numberOfPred == 0) {
                zeroValued.add(ec);/*from   w  w  w.  j a  v a 2  s. c  om*/
            } else {
                numberOfPredecessors.put(ec, numberOfPred);
            }
        }
    }

    while (!zeroValued.isEmpty()) {
        EdgeClass current = zeroValued.removeFirst();
        result.add(current);
        for (SpecializesEdgeClass sec : current.getIncidentEdges(SpecializesEdgeClass.class,
                de.uni_koblenz.jgralab.Direction.EDGE_TO_VERTEX)) {
            EdgeClass otherEnd = (EdgeClass) sec.getAlpha();
            Integer numberOfPred = numberOfPredecessors.get(otherEnd);
            if (numberOfPred != null) {
                if (numberOfPred == 1) {
                    numberOfPredecessors.remove(otherEnd);
                    zeroValued.add(otherEnd);
                } else {
                    numberOfPredecessors.put(otherEnd, --numberOfPred);
                }
            }
        }
    }

    if (numberOfPredecessors.isEmpty()) {
        return result;
    } else {
        return null;
    }
}

From source file:com.cognizant.trumobi.PersonaLauncher.java

private void bindAppWidgets(PersonaLauncher.DesktopBinder binder,
        LinkedList<PersonaLauncherAppWidgetInfo> appWidgets) {

    final PersonaWorkspace personaWorkspace = mWorkspace;
    final boolean desktopLocked = mDesktopLocked;

    if (!appWidgets.isEmpty()) {
        final PersonaLauncherAppWidgetInfo item = appWidgets.removeFirst();

        final int appWidgetId = item.appWidgetId;
        final AppWidgetProviderInfo appWidgetInfo = mAppWidgetManager.getAppWidgetInfo(appWidgetId);
        item.hostView = mAppWidgetHost.createView(this, appWidgetId, appWidgetInfo);

        if (LOGD) {
            PersonaLog.d(LOG_TAG,//from  www  .j  a v a  2s  . c om
                    String.format("about to setAppWidget for id=%d, info=%s", appWidgetId, appWidgetInfo));
        }

        item.hostView.setAppWidget(appWidgetId, appWidgetInfo);
        item.hostView.setTag(item);

        personaWorkspace.addInScreen(item.hostView, item.screen, item.cellX, item.cellY, item.spanX, item.spanY,
                !desktopLocked);

        personaWorkspace.requestLayout();
        // finish load a widget, send it an intent
        if (appWidgetInfo != null)
            appwidgetReadyBroadcast(appWidgetId, appWidgetInfo.provider, new int[] { item.spanX, item.spanY });
    }

    if (appWidgets.isEmpty()) {
        if (PROFILE_ROTATE) {
            android.os.Debug.stopMethodTracing();
        }
    } else {
        binder.obtainMessage(DesktopBinder.MESSAGE_BIND_APPWIDGETS).sendToTarget();
    }
}

From source file:net.spfbl.spf.SPF.java

/**
 * Merge nas listas de fixao de SPF./*  www .  j a v a 2  s.  c  o  m*/
 *
 * @param midleList lista dos mecanismos centrais.
 * @param errorList lista dos mecanismos com erro de sintaxe.
 */
private static void mergeMechanism(LinkedList<String> midleList, LinkedList<String> errorList) {
    while (!errorList.isEmpty()) {
        boolean fixed = false;
        if (errorList.size() > 1) {
            for (int index = 1; index < errorList.size(); index++) {
                String tokenFix = errorList.getFirst();
                for (String tokenError : errorList.subList(1, index + 1)) {
                    tokenFix += tokenError;
                }
                if (isMechanismMiddle(tokenFix)) {
                    midleList.add(tokenFix);
                    int k = 0;
                    while (k++ <= index) {
                        errorList.removeFirst();
                    }
                    fixed = true;
                    break;
                }
            }

        }
        if (!fixed) {
            // No foi capaz de corrigir o erro.
            midleList.add(errorList.removeFirst());
        }
    }
}