Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:org.ohmage.query.impl.CampaignQueries.java

public void createCampaign(final Campaign campaign, final Collection<String> classIds,
        final String creatorUsername) throws DataAccessException {

    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a new campaign.");

    try {/*  www .ja va  2 s . c  o m*/
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);

        String iconUrlString = null;
        URL iconUrl = campaign.getIconUrl();
        if (iconUrl != null) {
            iconUrlString = iconUrl.toString();
        }

        String xml;
        try {
            xml = campaign.getXml();
        } catch (DomainException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("The XML could not be saved.");
        }

        // Create the campaign.
        try {
            getJdbcTemplate().update(SQL_INSERT_CAMPAIGN,
                    new Object[] { campaign.getId(), campaign.getName(), xml, campaign.getDescription(),
                            iconUrlString, campaign.getAuthoredBy(), campaign.getRunningState().toString(),
                            campaign.getPrivacyState().toString() });
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_CAMPAIGN + "' with parameters: "
                    + campaign.getId() + ", " + campaign.getName() + ", " + xml + ", "
                    + campaign.getDescription() + ", " + iconUrlString + ", " + campaign.getAuthoredBy() + ", "
                    + campaign.getRunningState().toString() + ", " + campaign.getPrivacyState().toString(), e);
        }

        // Create the set of survey and prompt IDs for this campaign.
        final Set<String> surveyIds = new HashSet<String>();
        final Set<String> promptIds = new HashSet<String>();

        // Loop through all of the surveys and add the survey and prompt
        // IDs.
        for (Survey survey : campaign.getSurveys().values()) {
            // Get this survey's ID.
            surveyIds.add(survey.getId());

            Queue<SurveyItem> surveyItems = new LinkedList<SurveyItem>();
            surveyItems.addAll(survey.getSurveyItems().values());
            while (surveyItems.size() > 0) {
                SurveyItem surveyItem = surveyItems.poll();

                if (surveyItem instanceof RepeatableSet) {
                    RepeatableSet repeatableSet = (RepeatableSet) surveyItem;

                    for (SurveyItem rsSurveyItem : repeatableSet.getSurveyItems().values()) {
                        surveyItems.add(rsSurveyItem);
                    }
                } else if (surveyItem instanceof Prompt) {
                    promptIds.add(((Prompt) surveyItem).getId());
                }
            }
        }

        // Get the campaign's ID.
        final String campaignId = campaign.getId();

        // Compile the list of parameters for the survey ID lookup table.
        List<Object[]> surveyParameters = new ArrayList<Object[]>(surveyIds.size());
        for (String surveyId : surveyIds) {
            Object[] params = new Object[2];
            params[0] = surveyId;
            params[1] = campaignId;
            surveyParameters.add(params);
        }

        // The SQL to write the data.
        final String surveyIdLookupBatchSql = "INSERT INTO " + "campaign_survey_lookup(survey_id, campaign_id) "
                + "VALUES (?, (SELECT id FROM campaign WHERE urn = ?))";

        // Add the survey IDs to the lookup table.
        try {
            getJdbcTemplate().batchUpdate(surveyIdLookupBatchSql, surveyParameters);
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + surveyIdLookupBatchSql + "'.", e);
        }

        // Compile the list of parameters for the prompt ID lookup table.
        List<Object[]> promptParameters = new ArrayList<Object[]>(surveyIds.size());
        for (String promptId : promptIds) {
            Object[] params = new Object[2];
            params[0] = promptId;
            params[1] = campaignId;
            promptParameters.add(params);
        }

        // The SQL to write the data.
        final String promptIdLookupBatchSql = "INSERT INTO " + "campaign_prompt_lookup(prompt_id, campaign_id) "
                + "VALUES (?, (SELECT id FROM campaign WHERE urn = ?))";

        // Add the prompt IDs to the lookup table.
        try {
            getJdbcTemplate().batchUpdate(promptIdLookupBatchSql, promptParameters);
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + promptIdLookupBatchSql + "'.", e);
        }

        // Add each of the classes to the campaign.
        for (String classId : classIds) {
            associateCampaignAndClass(transactionManager, status, campaign.getId(), classId);
        }

        // Add the requesting user as the author. This may have already 
        // happened above.
        try {
            getJdbcTemplate().update(SQL_INSERT_USER_ROLE_CAMPAIGN, creatorUsername, campaign.getId(),
                    Campaign.Role.AUTHOR.toString());
        } catch (org.springframework.dao.DataIntegrityViolationException e) {
            // The user was already an author of this campaign implying 
            // that it's one of the default campaign roles based on a class
            // role that the 'creatorUsername' has.
            e.printStackTrace();
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_USER_ROLE_CAMPAIGN
                    + "' with parameters: " + creatorUsername + ", " + campaign.getId() + ", "
                    + Campaign.Role.AUTHOR.toString(), e);
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }
    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}

From source file:org.apache.pdfbox.pdfparser.COSParser.java

/**
 * Adds newObject to toBeParsedList if it is not an COSObject or we didn't
 * add this COSObject already (checked via addedObjects).
 *//*from ww  w  .j ava 2 s .  co m*/
private void addNewToList(final Queue<COSBase> toBeParsedList, final COSBase newObject,
        final Set<Long> addedObjects) {
    if (newObject instanceof COSObject) {
        final long objId = getObjectId((COSObject) newObject);
        if (!addedObjects.add(objId)) {
            return;
        }
    }
    toBeParsedList.add(newObject);
}

From source file:edu.brown.hstore.dtxn.LocalTransaction.java

/**
 * Queues up a WorkFragment for this txn
 * If the return value is true, then the FragmentTaskMessage is blocked waiting for dependencies
 * If the return value is false, then the FragmentTaskMessage can be executed immediately (either locally or on at a remote partition)
 * @param fragment/*from   w w w.jav  a 2s  .  c om*/
 */
public boolean addWorkFragment(WorkFragment fragment) {
    assert (this.round_state[hstore_site
            .getLocalPartitionOffset(this.base_partition)] == RoundState.INITIALIZED) : String.format(
                    "Invalid round state %s for %s at partition %d",
                    this.round_state[hstore_site.getLocalPartitionOffset(this.base_partition)], this,
                    this.base_partition);

    // The partition that this task is being sent to for execution
    boolean blocked = false;
    final int partition = fragment.getPartitionId();
    final int num_fragments = fragment.getFragmentIdCount();

    if (d)
        LOG.debug(String.format("%s - Adding %s for partition %d with %d fragments", this,
                fragment.getClass().getSimpleName(), partition, num_fragments));

    // PAVLO: 2011-12-10
    // We moved updating the exec_touchedPartitions histogram into the
    // BatchPlanner so that we won't increase the counter for a partition
    // if we read from a replicated table at the local partition
    // this.state.exec_touchedPartitions.put(partition, num_fragments);

    // PAVLO 2011-12-20
    // I don't know why, but before this loop used to be synchronized
    // It definitely does not need to be because this is only invoked by the
    // transaction's base partition PartitionExecutor
    for (int i = 0; i < num_fragments; i++) {
        int stmt_index = fragment.getStmtIndex(i);
        //            int param_index = fragment.getParamIndex(i);

        // If this task produces output dependencies, then we need to make 
        // sure that the txn wait for it to arrive first
        int output_dep_id = fragment.getOutputDepId(i);
        if (output_dep_id != HStoreConstants.NULL_DEPENDENCY_ID) {
            DependencyInfo dinfo = this.getOrCreateDependencyInfo(stmt_index, output_dep_id);
            dinfo.addPartition(partition);
            if (d)
                LOG.debug(String.format(
                        "%s - Adding new DependencyInfo %s for PlanFragment %d at Partition %d [ctr=%d]\n%s",
                        this, debugStmtDep(stmt_index, output_dep_id), fragment.getFragmentId(i),
                        this.state.dependency_ctr, partition, dinfo.toString()));
            this.state.dependency_ctr++;

            // Store the stmt_index of when this dependency will show up
            Integer key_idx = this.state.createPartitionDependencyKey(partition, output_dep_id);
            Queue<Integer> rest_stmt_ctr = this.state.results_dependency_stmt_ctr.get(key_idx);
            if (rest_stmt_ctr == null) {
                rest_stmt_ctr = new LinkedList<Integer>();
                this.state.results_dependency_stmt_ctr.put(key_idx, rest_stmt_ctr);
            }
            rest_stmt_ctr.add(stmt_index);
            if (t)
                LOG.trace(String.format("%s - Set Dependency Statement Counters for <%d %d>: %s", this,
                        partition, output_dep_id, rest_stmt_ctr));
        } // IF

        // If this WorkFragment needs an input dependency, then we need to make sure it arrives at
        // the executor before it is allowed to start executing
        WorkFragment.InputDependency input_dep_ids = fragment.getInputDepId(i);
        if (input_dep_ids.getIdsCount() > 0) {
            for (int dependency_id : input_dep_ids.getIdsList()) {
                if (dependency_id != HStoreConstants.NULL_DEPENDENCY_ID) {
                    DependencyInfo dinfo = this.getOrCreateDependencyInfo(stmt_index, dependency_id);
                    dinfo.addBlockedWorkFragment(fragment);
                    dinfo.markInternal();
                    if (blocked == false) {
                        this.state.blocked_tasks.add(fragment);
                        blocked = true;
                    }
                    if (d)
                        LOG.debug(String.format(
                                "%s - Created internal input dependency %d for PlanFragment %d\n%s", this,
                                dependency_id, fragment.getFragmentId(i), dinfo.toString()));
                }
            } // FOR
        }

        // *********************************** DEBUG ***********************************
        if (t) {
            StringBuilder sb = new StringBuilder();
            int output_ctr = 0;
            int dep_ctr = 0;
            for (DependencyInfo dinfo : this.state.dependencies.values()) {
                if (dinfo.getStatementIndex() == stmt_index)
                    dep_ctr++;
                if (dinfo.isInternal() == false) {
                    output_ctr++;
                    sb.append("  Output -> " + dinfo.toString());
                }
            } // FOR
            LOG.trace(String.format("%s - Number of Output Dependencies for StmtIndex #%d: %d out of %d\n%s",
                    this, stmt_index, output_ctr, dep_ctr, sb));
        }
        // *********************************** DEBUG ***********************************

    } // FOR

    // *********************************** DEBUG ***********************************
    if (d) {
        CatalogType catalog_obj = null;
        if (catalog_proc.getSystemproc()) {
            catalog_obj = catalog_proc;
        } else {
            for (int i = 0; i < num_fragments; i++) {
                int frag_id = fragment.getFragmentId(i);
                PlanFragment catalog_frag = CatalogUtil.getPlanFragment(catalog_proc, frag_id);
                catalog_obj = catalog_frag.getParent();
                if (catalog_obj != null)
                    break;
            } // FOR
        }
        LOG.debug(String.format("%s - Queued up %s WorkFragment for partition %d and marked as %s [fragIds=%s]",
                this, catalog_obj, partition, (blocked ? "blocked" : "not blocked"),
                fragment.getFragmentIdList()));
        if (t)
            LOG.trace("WorkFragment Contents for txn #" + this.txn_id + ":\n" + fragment);
    }
    // *********************************** DEBUG ***********************************

    return (blocked);
}

From source file:edu.emory.cci.aiw.umls.UMLSDatabaseConnection.java

@Override
public int getDistBF(ConceptUID cui1, ConceptUID cui2, String rela, SAB sab, int maxR)
        throws UMLSQueryException {
    Queue<ConceptUID> cuiQue = new LinkedList<ConceptUID>();
    Set<ConceptUID> visited = new HashSet<ConceptUID>();
    Map<Integer, Integer> radiusIdx = new HashMap<Integer, Integer>();
    int queIdx = 0;
    int r = 0;// w w w  .j  a va 2s  .co m
    radiusIdx.put(r, 0);

    if (maxR <= 0) {
        maxR = 3;
    }

    try {
        setupConn();
        cuiQue.add(cui1);
        visited.add(cui1);

        List<UMLSQuerySearchUID> params = new ArrayList<UMLSQuerySearchUID>();
        StringBuilder sql = new StringBuilder(
                "select distinct(CUI2) from MRREL where CUI1 = ? and (rel='PAR' or rel='CHD')");
        params.add(ConceptUID.EMPTY_CUI);
        if (sab != null) {
            sql.append(" and SAB = ?");
            params.add(sab);
        }
        if (rela != null && !rela.equals("")) {
            sql.append(" and RELA = ?");
            params.add(UMLSQueryStringValue.fromString(rela));
        }

        while (!cuiQue.isEmpty()) {
            ConceptUID node = cuiQue.remove();
            params.set(0, node);
            if (node.equals(cui2)) {
                return r;
            }

            List<ConceptUID> adjNodes = new ArrayList<ConceptUID>();

            ResultSet rs = executeAndLogQuery(substParams(sql.toString(), params));
            while (rs.next()) {
                ConceptUID c2 = ConceptUID.fromString(rs.getString(1));
                if (!visited.contains(c2)) {
                    adjNodes.add(c2);
                }
            }

            if (!radiusIdx.containsKey(r + 1)) {
                radiusIdx.put(r + 1, queIdx + cuiQue.size());
            }
            radiusIdx.put(r + 1, adjNodes.size());

            if (queIdx == radiusIdx.get(r)) {
                r++;
            }
            queIdx++;

            for (ConceptUID c : adjNodes) {
                visited.add(c);
                cuiQue.add(c);
            }
            if (r > maxR) {
                return r;
            }
        }
    } catch (SQLException sqle) {
        throw new UMLSQueryException(sqle);
    } catch (MalformedUMLSUniqueIdentifierException muuie) {
        throw new UMLSQueryException(muuie);
    } finally {
        tearDownConn();
    }

    log(Level.FINEST, "Returning -1");
    return -1;
}

From source file:org.apache.hadoop.hive.ql.QueryPlan.java

/**
 * generate the operator graph and operator list for the given task based on
 * the operators corresponding to that task.
 *
 * @param task/*from   w ww.j a v a 2  s  . c  om*/
 *          api.Task which needs its operator graph populated
 * @param topOps
 *          the set of top operators from which the operator graph for the
 *          task is hanging
 */
private void populateOperatorGraph(org.apache.hadoop.hive.ql.plan.api.Task task,
        Collection<Operator<? extends OperatorDesc>> topOps) {

    task.setOperatorGraph(new org.apache.hadoop.hive.ql.plan.api.Graph());
    task.getOperatorGraph().setNodeType(NodeType.OPERATOR);

    Queue<Operator<? extends OperatorDesc>> opsToVisit = new LinkedList<Operator<? extends OperatorDesc>>();
    Set<Operator<? extends OperatorDesc>> opsVisited = new HashSet<Operator<? extends OperatorDesc>>();
    opsToVisit.addAll(topOps);
    while (opsToVisit.peek() != null) {
        Operator<? extends OperatorDesc> op = opsToVisit.remove();
        opsVisited.add(op);
        // populate the operator
        org.apache.hadoop.hive.ql.plan.api.Operator operator = new org.apache.hadoop.hive.ql.plan.api.Operator();
        operator.setOperatorId(op.getOperatorId());
        operator.setOperatorType(op.getType());
        task.addToOperatorList(operator);
        // done processing the operator
        if (op.getChildOperators() != null) {
            org.apache.hadoop.hive.ql.plan.api.Adjacency entry = new org.apache.hadoop.hive.ql.plan.api.Adjacency();
            entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE);
            entry.setNode(op.getOperatorId());
            for (Operator<? extends OperatorDesc> childOp : op.getChildOperators()) {
                entry.addToChildren(childOp.getOperatorId());
                if (!opsVisited.contains(childOp)) {
                    opsToVisit.add(childOp);
                }
            }
            task.getOperatorGraph().addToAdjacencyList(entry);
        }
    }
}

From source file:org.geoserver.wms.legendgraphic.ColorMapLegendCreator.java

private Queue<BufferedImage> createFooter() {

    // creating a backbuffer image on which we should draw the bkgColor for this colormap element
    final BufferedImage image = ImageUtils.createImage(1, 1, (IndexColorModel) null, transparent);
    final Map<Key, Object> hintsMap = new HashMap<Key, Object>();
    final Graphics2D graphics = ImageUtils.prepareTransparency(transparent, backgroundColor, image, hintsMap);

    // list where we store the rows for the footer
    final Queue<BufferedImage> queue = new LinkedList<BufferedImage>();
    // //the height is already fixed
    // final int rowHeight=(int)Math.round(rowH);
    final int rowWidth = (int) Math.round(footerW);
    // final Rectangle clipboxA=new Rectangle(0,0,rowWidth,rowHeight);
    ////from  ww  w .  ja  v a2s . c  o m
    // footer
    //
    //
    // draw the various bodyCells
    for (Cell cell : footerRows) {

        // get dim
        final Dimension dim = cell.getPreferredDimension(graphics);
        // final int rowWidth=(int)Math.round(dim.getWidth());
        final int rowHeight = (int) Math.round(dim.getHeight());
        final Rectangle clipboxA = new Rectangle(0, 0, rowWidth, rowHeight);

        // draw it
        final BufferedImage colorCellLegend = new BufferedImage(rowWidth, rowHeight,
                BufferedImage.TYPE_INT_ARGB);
        Graphics2D rlg = colorCellLegend.createGraphics();
        rlg.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
        cell.draw(rlg, clipboxA, border);
        rlg.dispose();

        queue.add(colorCellLegend);

    }

    graphics.dispose();

    return queue;// mergeRows(queue);
}

From source file:org.apache.gobblin.example.wikipedia.WikipediaExtractor.java

private Queue<JsonElement> retrievePageRevisions(Map<String, String> query)
        throws IOException, URISyntaxException {

    Queue<JsonElement> retrievedRevisions = new LinkedList<>();

    JsonElement jsonElement = performHttpQuery(this.rootUrl, query);

    if (jsonElement == null || !jsonElement.isJsonObject()) {
        return retrievedRevisions;
    }/*from w  w  w.j av  a2s  . c o  m*/

    JsonObject jsonObj = jsonElement.getAsJsonObject();
    if (jsonObj == null || !jsonObj.has(JSON_MEMBER_QUERY)) {
        return retrievedRevisions;
    }

    JsonObject queryObj = jsonObj.getAsJsonObject(JSON_MEMBER_QUERY);
    if (!queryObj.has(JSON_MEMBER_PAGES)) {
        return retrievedRevisions;
    }

    JsonObject pagesObj = queryObj.getAsJsonObject(JSON_MEMBER_PAGES);
    if (pagesObj.entrySet().isEmpty()) {
        return retrievedRevisions;
    }

    JsonObject pageIdObj = pagesObj.getAsJsonObject(pagesObj.entrySet().iterator().next().getKey());
    if (!pageIdObj.has(JSON_MEMBER_REVISIONS)) {
        return retrievedRevisions;
    }

    //retrieve revisions of the current pageTitle
    JsonArray jsonArr = pageIdObj.getAsJsonArray(JSON_MEMBER_REVISIONS);
    for (JsonElement revElement : jsonArr) {
        JsonObject revObj = revElement.getAsJsonObject();

        /*'pageid' and 'title' are associated with the parent object
         * of all revisions. Add them to each individual revision.
         */
        if (pageIdObj.has(JSON_MEMBER_PAGEID)) {
            revObj.add(JSON_MEMBER_PAGEID, pageIdObj.get(JSON_MEMBER_PAGEID));
        }
        if (pageIdObj.has(JSON_MEMBER_TITLE)) {
            revObj.add(JSON_MEMBER_TITLE, pageIdObj.get(JSON_MEMBER_TITLE));
        }
        retrievedRevisions.add(revObj);
    }

    LOG.info(retrievedRevisions.size() + " record(s) retrieved for title " + this.requestedTitle);
    return retrievedRevisions;
}

From source file:org.kuali.rice.krad.uif.lifecycle.ViewLifecyclePhaseBase.java

/**
 * {@inheritDoc}// w  w  w.j  a  v  a  2 s  . co m
 */
@Override
public String toString() {
    StringBuilder sb = new StringBuilder();
    Queue<ViewLifecyclePhase> toPrint = new LinkedList<ViewLifecyclePhase>();
    toPrint.offer(this);
    while (!toPrint.isEmpty()) {
        ViewLifecyclePhase tp = toPrint.poll();

        if (tp.getElement() == null) {
            sb.append("\n      ");
            sb.append(tp.getClass().getSimpleName());
            sb.append(" (recycled)");
            continue;
        }

        String indent;
        if (tp == this) {
            sb.append("\nProcessed? ");
            sb.append(processed);
            indent = "\n";
        } else {
            indent = "\n    ";
        }
        sb.append(indent);

        sb.append(tp.getClass().getSimpleName());
        sb.append(" ");
        sb.append(System.identityHashCode(tp));
        sb.append(" ");
        sb.append(tp.getViewPath());
        sb.append(" ");
        sb.append(tp.getElement().getClass().getSimpleName());
        sb.append(" ");
        sb.append(tp.getElement().getId());
        sb.append(" ");
        sb.append(pendingSuccessors);

        if (tp == this) {
            sb.append("\nPredecessor Phases:");
        }

        ViewLifecyclePhase tpredecessor = tp.getPredecessor();
        if (tpredecessor != null) {
            toPrint.add(tpredecessor);
        }
    }
    return sb.toString();
}

From source file:org.apache.synapse.transport.nhttp.HttpCoreNIOListener.java

private void startEndpoints() throws AxisFault {
    Queue<ListenerEndpoint> endpoints = new LinkedList<ListenerEndpoint>();

    Set<InetSocketAddress> addressSet = new HashSet<InetSocketAddress>();
    addressSet.addAll(connFactory.getBindAddresses());
    if (NHttpConfiguration.getInstance().getMaxActiveConnections() != -1) {
        addMaxConnectionCountController(NHttpConfiguration.getInstance().getMaxActiveConnections());
    }/* w w w.j a v  a  2 s.  c  o m*/
    if (listenerContext.getBindAddress() != null) {
        addressSet.add(new InetSocketAddress(listenerContext.getBindAddress(), listenerContext.getPort()));
    }
    if (addressSet.isEmpty()) {
        addressSet.add(new InetSocketAddress(listenerContext.getPort()));
    }

    // Ensure simple but stable order
    List<InetSocketAddress> addressList = new ArrayList<InetSocketAddress>(addressSet);
    Collections.sort(addressList, new Comparator<InetSocketAddress>() {

        public int compare(InetSocketAddress a1, InetSocketAddress a2) {
            String s1 = a1.toString();
            String s2 = a2.toString();
            return s1.compareTo(s2);
        }

    });
    for (InetSocketAddress address : addressList) {
        endpoints.add(ioReactor.listen(address));
    }

    // Wait for the endpoint to become ready, i.e. for the listener to start accepting
    // requests.
    while (!endpoints.isEmpty()) {
        ListenerEndpoint endpoint = endpoints.remove();
        try {
            endpoint.waitFor();
            if (log.isInfoEnabled()) {
                InetSocketAddress address = (InetSocketAddress) endpoint.getAddress();
                if (!address.isUnresolved()) {
                    log.info(name + " started on " + address.getHostName() + ":" + address.getPort());
                } else {
                    log.info(name + " started on " + address);
                }
            }
        } catch (InterruptedException e) {
            log.warn("Listener startup was interrupted");
            break;
        }
    }
}

From source file:it.geosolutions.geobatch.unredd.script.reprocess.ReprocessAction.java

/**
 * Main loop on input files. Single file processing is called on execute(File xmlFile)
 *///w w  w  .  j  av  a  2 s . c  o  m
public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {

    if (getTempDir() == null) {
        throw new IllegalStateException("temp dir has not been initialized");
    }
    if (!getTempDir().exists()) {
        throw new IllegalStateException("temp dir does not exist");
    }

    geoStoreUtil = new GeoStoreUtil(conf.getGeoStoreConfig(), getTempDir());

    //        initComponents(properties);

    final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

    while (!events.isEmpty()) {
        final FileSystemEvent ev = events.remove();

        try {
            if (ev != null) {
                if (LOGGER.isTraceEnabled()) {
                    LOGGER.trace("Processing incoming event: " + ev.getSource());
                }
                File xmlFile = ev.getSource(); // this is the input xml file

                /**
                 * *************************
                 * The reprocessing flow will recompute statistics and charts. it is needed when data in the staging area are
                 * changed; i.e.: - vector data are edited; - chart scripts are modified or inserted; - new statistics are
                 * added. Each doXXX methos manages one of this case
                 */
                ReprocessRequest request = RequestReader.load(xmlFile);
                if (request == null) {
                    throw new ActionException(this, "Could not parse input file:" + xmlFile.getName());
                }

                if (request instanceof ReprocessLayerRequest) {
                    reprocessLayer((ReprocessLayerRequest) request);

                } else if (request instanceof ReprocessChartRequest) {
                    reprocessChart((ReprocessChartRequest) request);

                } else if (request instanceof ReprocessStatsRequest) {
                    reprocessStats((ReprocessStatsRequest) request);

                }

                ret.add(new FileSystemEvent(xmlFile, FileSystemEventType.FILE_ADDED));

            } else {
                LOGGER.error("Encountered a null event: skipping event");
                continue;
            }

        } catch (ActionException ex) {
            LOGGER.error(ex.getMessage());
            listenerForwarder.failed(ex);
            throw ex;

        } catch (Exception ex) {
            LOGGER.error(ex.getMessage(), ex);
            listenerForwarder.failed(ex);
            throw new ActionException(this, ex.getMessage(), ex);
        }
    }

    return ret;
}