Example usage for java.util Queue addAll

List of usage examples for java.util Queue addAll

Introduction

In this page you can find the example usage for java.util Queue addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this collection (optional operation).

Usage

From source file:nl.b3p.viewer.admin.stripes.GeoServiceActionBean.java

public Resolution generateSld() throws Exception {

    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    dbf.setNamespaceAware(true);// w w w .j  a  v a2 s.  c o  m
    DocumentBuilder db = dbf.newDocumentBuilder();
    Document sldDoc = db.newDocument();

    Element sldEl = sldDoc.createElementNS(NS_SLD, "StyledLayerDescriptor");
    sldDoc.appendChild(sldEl);
    sldEl.setAttributeNS(NS_SLD, "version", "1.0.0");
    sldEl.setAttributeNS("http://www.w3.org/2001/XMLSchema-instance", "xsi:schemaLocation",
            "http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd");
    sldEl.setAttribute("xmlns:ogc", NS_OGC);
    sldEl.setAttribute("xmlns:gml", NS_GML);
    service.loadLayerTree();

    Queue<Layer> layerStack = new LinkedList();
    Layer l = service.getTopLayer();
    while (l != null) {
        layerStack.addAll(service.getLayerChildrenCache(l));

        if (l.getName() != null) {
            Element nlEl = sldDoc.createElementNS(NS_SLD, "NamedLayer");
            sldEl.appendChild(nlEl);
            String title = l.getTitleAlias() != null ? l.getTitleAlias() : l.getTitle();
            if (title != null) {
                nlEl.appendChild(sldDoc.createComment(" Layer '" + title + "' "));
            }
            Element nEl = sldDoc.createElementNS(NS_SLD, "Name");
            nEl.setTextContent(l.getName());
            nlEl.appendChild(nEl);

            if (l.getFeatureType() != null) {
                String protocol = "";
                if (l.getFeatureType().getFeatureSource() != null) {
                    protocol = " (protocol " + l.getFeatureType().getFeatureSource().getProtocol() + ")";
                }

                String ftComment = " This layer has a feature type" + protocol
                        + " you can use in a FeatureTypeConstraint element as follows:\n";
                ftComment += "            <LayerFeatureConstraints>\n";
                ftComment += "                <FeatureTypeConstraint>\n";
                ftComment += "                    <FeatureTypeName>" + l.getFeatureType().getTypeName()
                        + "</FeatureTypeName>\n";
                ftComment += "                    Add ogc:Filter or Extent element here. ";
                if (l.getFeatureType().getAttributes().isEmpty()) {
                    ftComment += " No feature type attributes are known.\n";
                } else {
                    ftComment += " You can use the following feature type attributes in ogc:PropertyName elements:\n";
                    for (AttributeDescriptor ad : l.getFeatureType().getAttributes()) {
                        ftComment += "                    <ogc:PropertyName>" + ad.getName()
                                + "</ogc:PropertyName>";
                        if (ad.getAlias() != null) {
                            ftComment += " (" + ad.getAlias() + ")";
                        }
                        if (ad.getType() != null) {
                            ftComment += " (type: " + ad.getType() + ")";
                        }
                        ftComment += "\n";
                    }
                }
                ftComment += "                </FeatureTypeConstraint>\n";
                ftComment += "            </LayerFeatureConstraints>\n";
                ftComment += "        ";
                nlEl.appendChild(sldDoc.createComment(ftComment));
            }

            nlEl.appendChild(sldDoc.createComment(" Add a UserStyle or NamedStyle element here "));
            String styleComment = " (no server-side named styles are known other than 'default') ";
            ClobElement styleDetail = l.getDetails().get(Layer.DETAIL_WMS_STYLES);
            if (styleDetail != null) {
                try {
                    JSONArray styles = new JSONArray(styleDetail.getValue());

                    if (styles.length() > 0) {
                        styleComment = " The following NamedStyles are available according to the capabilities: \n";

                        for (int i = 0; i < styles.length(); i++) {
                            JSONObject jStyle = styles.getJSONObject(i);

                            styleComment += "            <NamedStyle><Name>" + jStyle.getString("name")
                                    + "</Name></NamedStyle>";
                            if (jStyle.has("title")) {
                                styleComment += " (" + jStyle.getString("title") + ")";
                            }
                            styleComment += "\n";
                        }
                    }

                } catch (JSONException e) {
                }
                styleComment += "        ";
            }
            nlEl.appendChild(sldDoc.createComment(styleComment));
        }

        l = layerStack.poll();
    }

    TransformerFactory tf = TransformerFactory.newInstance();
    Transformer t = tf.newTransformer();
    t.setOutputProperty(OutputKeys.INDENT, "yes");
    t.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4");
    t.setOutputProperty(OutputKeys.ENCODING, "UTF-8");

    DOMSource source = new DOMSource(sldDoc);
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    StreamResult result = new StreamResult(bos);
    t.transform(source, result);
    generatedSld = new String(bos.toByteArray(), "UTF-8");

    // indent doesn't add newline after XML declaration
    generatedSld = generatedSld.replaceFirst("\"\\?><StyledLayerDescriptor", "\"?>\n<StyledLayerDescriptor");
    return new ForwardResolution(JSP_EDIT_SLD);
}

From source file:org.eclipse.skalli.core.search.LuceneIndex.java

private List<IndexEntry> indexEntity(T entity) {
    List<IndexEntry> fields = new LinkedList<IndexEntry>();

    Queue<EntityBase> queue = new LinkedList<EntityBase>();
    queue.add(entity);// w  ww  .j  a  v a  2s. co  m

    while (!queue.isEmpty()) {
        EntityBase currentEntity = queue.poll();

        for (ExtensionService<?> extensionService : ExtensionServices.getAll()) {
            if (currentEntity.getClass().equals(extensionService.getExtensionClass())) {
                Indexer<?> indexer = extensionService.getIndexer();
                if (indexer != null) {
                    indexer.indexEntity(fields, currentEntity);
                }
            }
        }

        if (currentEntity instanceof ExtensibleEntityBase) {
            queue.addAll(((ExtensibleEntityBase) currentEntity).getAllExtensions());
        }
    }
    return fields;
}

From source file:alluxio.proxy.s3.S3RestServiceHandler.java

private List<URIStatus> listObjects(AlluxioURI uri, ListBucketOptions listBucketOptions)
        throws FileDoesNotExistException, IOException, AlluxioException {
    List<URIStatus> objects = new ArrayList<>();
    Queue<URIStatus> traverseQueue = new ArrayDeque<>();

    List<URIStatus> children;
    String prefix = listBucketOptions.getPrefix();
    if (prefix != null && prefix.contains(AlluxioURI.SEPARATOR)) {
        AlluxioURI prefixDirUri = new AlluxioURI(uri.getPath() + AlluxioURI.SEPARATOR
                + prefix.substring(0, prefix.lastIndexOf(AlluxioURI.SEPARATOR)));
        children = mFileSystem.listStatus(prefixDirUri);
    } else {/*  w  w  w .  j  ava 2s . com*/
        children = mFileSystem.listStatus(uri);
    }
    traverseQueue.addAll(children);
    while (!traverseQueue.isEmpty()) {
        URIStatus cur = traverseQueue.remove();
        if (!cur.isFolder()) {
            // Alluxio file is an object.
            objects.add(cur);
        } else if (!cur.getName().endsWith(Constants.S3_MULTIPART_TEMPORARY_DIR_SUFFIX)) {
            // The directory is not a temporary directory of multipart upload, list recursively.
            List<URIStatus> curChildren = mFileSystem.listStatus(new AlluxioURI(cur.getPath()));
            if (curChildren.isEmpty()) {
                // An empty Alluxio directory is considered as a valid object.
                objects.add(cur);
            } else {
                traverseQueue.addAll(curChildren);
            }
        }
    }
    return objects;
}

From source file:org.commonjava.maven.ext.manip.rest.DefaultVersionTranslator.java

/**
 * Translate the versions.//from   www . j av a  2 s . co m
 * There may be a lot of them, possibly causing timeouts or other issues.
 * This is mitigated by splitting them into smaller chunks when an error occurs and retrying.
 */
public Map<ProjectVersionRef, String> translateVersions(List<ProjectVersionRef> projects) {
    final Map<ProjectVersionRef, String> result = new HashMap<>();
    final Queue<Task> queue = new ArrayDeque<>();
    queue.add(new Task(pvrm, projects, endpointUrl));

    while (!queue.isEmpty()) {
        Task task = queue.remove();
        task.executeTranslate();
        if (task.isSuccess()) {
            result.putAll(task.getResult());
        } else {
            if (task.canSplit()) {
                if (task.getStatus() < 0) {
                    logger.debug("Caught exception calling server with message {}", task.getErrorMessage());
                } else {
                    logger.debug("Did not get status {} but received {}", SC_OK, task.getStatus());
                }

                List<Task> tasks = task.split();

                logger.warn(
                        "Failed to translate versions for task @{}, splitting and retrying. Chunk size was: {} and new chunk size {} in {} segments.",
                        task.hashCode(), task.getChunkSize(), tasks.get(0).getChunkSize(), tasks.size());
                queue.addAll(tasks);
            } else {
                logger.debug("Cannot split and retry anymore.");
                if (task.getStatus() > 0) {
                    throw new RestException("Received response status " + task.getStatus() + " with message: "
                            + task.getErrorMessage());
                } else {
                    throw new RestException("Received response status " + task.getStatus() + " with message "
                            + task.getErrorMessage());
                }
            }
        }
    }
    return result;
}

From source file:org.jboss.qa.phaser.Executor.java

public void execute() throws Exception {
    final List<ErrorReport> throwAtEnd = new LinkedList<>();
    invokeJobMethods(BeforeJob.class);

    final Queue<ExecutionNode> nodeQueue = new LinkedList<>(roots);
    boolean finalizeState = false;
    while (!nodeQueue.isEmpty()) {
        final ExecutionNode node = nodeQueue.poll();

        final ExecutionError err = node.execute(finalizeState);

        if (err != null) {
            final ExceptionHandling eh = err.getExceptionHandling();
            final ErrorReport errorReport = new ErrorReport("Exception thrown by phase execution:",
                    err.getThrowable());
            switch (eh.getReport()) {
            case THROW_AT_END:
                throwAtEnd.add(errorReport);
                break;
            case LOG:
                ErrorReporter.report(errorReport);
                break;
            default:
                log.debug("Exception by phase execution, continue.");
            }//  w w  w.  j  a  va 2s.  c  om

            if (eh.getExecution() == ExceptionHandling.Execution.IMMEDIATELY_STOP) {
                break;
            } else if (eh.getExecution() == ExceptionHandling.Execution.FINALIZE) {
                finalizeState = true;
            }
        }
        nodeQueue.addAll(node.getChildNodes());
    }

    invokeJobMethods(AfterJob.class);
    ErrorReporter.finalErrorReport(throwAtEnd);
}

From source file:org.phenotips.data.permissions.internal.DefaultPatientAccessHelper.java

@Override
public AccessLevel getAccessLevel(Patient patient, EntityReference user) {
    AccessLevel result = this.manager.resolveAccessLevel("none");
    if (patient == null || user == null) {
        return result;
    }/*from w ww .  j  ava  2s .  c  o m*/
    try {
        EntityReference owner = getOwner(patient).getUser();
        Collection<Collaborator> collaborators = getCollaborators(patient);
        Set<DocumentReference> processedEntities = new HashSet<DocumentReference>();
        Queue<DocumentReference> entitiesToCheck = new LinkedList<DocumentReference>();
        entitiesToCheck.add((DocumentReference) user);
        AccessLevel currentItemAccess = null;
        DocumentReference currentItem;
        XWikiContext context = getXWikiContext();
        XWikiGroupService groupService = context.getWiki().getGroupService(context);
        while (!entitiesToCheck.isEmpty()) {
            currentItem = entitiesToCheck.poll();
            currentItemAccess = getAccessLevel(currentItem, owner, collaborators);
            if (currentItemAccess.compareTo(result) > 0) {
                result = currentItemAccess;
            }
            processedEntities.add(currentItem);
            Collection<DocumentReference> groups = groupService.getAllGroupsReferencesForMember(currentItem, 0,
                    0, context);
            groups.removeAll(processedEntities);
            entitiesToCheck.addAll(groups);
        }
    } catch (XWikiException ex) {
        this.logger.warn("Failed to compute access level for [{}] on [{}]: {}", user, patient.getId(),
                ex.getMessage());
    }
    return result;
}

From source file:org.openengsb.labs.paxexam.karaf.container.internal.KarafTestContainer.java

/**
 * Since we might get quite deep use a simple breath first search algorithm
 *//*from   w  w  w . j a  v a2s . c  om*/
private File searchKarafBase(File targetFolder) {
    Queue<File> searchNext = new LinkedList<File>();
    searchNext.add(targetFolder);
    while (!searchNext.isEmpty()) {
        File head = searchNext.poll();
        if (!head.isDirectory()) {
            continue;
        }
        boolean system = false;
        boolean etc = false;
        for (File file : head.listFiles()) {
            if (file.isDirectory() && file.getName().equals("system")) {
                system = true;
            }
            if (file.isDirectory() && file.getName().equals("etc")) {
                etc = true;
            }
        }
        if (system && etc) {
            return head;
        }
        searchNext.addAll(Arrays.asList(head.listFiles()));
    }
    throw new IllegalStateException("No karaf base dir found in extracted distribution.");
}

From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java

/**
 * Since we might get quite deep use a simple breath first search algorithm
 *///ww w. j  av a 2  s.co  m
private File searchKarafBase(File _targetFolder) {
    Queue<File> searchNext = new LinkedList<File>();
    searchNext.add(_targetFolder);
    while (!searchNext.isEmpty()) {
        File head = searchNext.poll();
        if (!head.isDirectory()) {
            continue;
        }
        boolean isSystem = false;
        boolean etc = false;
        for (File file : head.listFiles()) {
            if (file.isDirectory() && file.getName().equals("system")) {
                isSystem = true;
            }
            if (file.isDirectory() && file.getName().equals("etc")) {
                etc = true;
            }
        }
        if (isSystem && etc) {
            return head;
        }
        searchNext.addAll(Arrays.asList(head.listFiles()));
    }
    throw new IllegalStateException("No karaf base dir found in extracted distribution.");
}

From source file:org.paxle.core.io.temp.impl.CommandTempReleaser.java

private void releaseCommandFiles(final ICommand cmd, final Long id) {
    try {/*from   w w  w  . j a va  2s  .  c  om*/
        File file;
        final ICrawlerDocument cdoc = cmd.getCrawlerDocument();
        if (cdoc != null && (file = cdoc.getContent()) != null) {
            if (tfm.isKnown(file)) {
                try {
                    tfm.releaseTempFile(file);
                } catch (FileNotFoundException e) {
                    this.logger.warn("downloaded crawler-data not available for release");
                }
            } else {
                this.logger.debug(String.format("Crawlerdoc tempfile %s not managed by tempfilemanager",
                        file.toString()));
            }
        }

        final Queue<Map.Entry<String, IParserDocument>> pdocs = new LinkedList<Map.Entry<String, IParserDocument>>();

        IParserDocument pdoc = cmd.getParserDocument();
        Map.Entry<String, IParserDocument> entry = null;
        if (pdoc != null) {
            do {
                if (entry != null) {
                    pdoc = entry.getValue();
                }

                if ((file = pdoc.getTextFile()) != null) {
                    if (tfm.isKnown(file)) {
                        try {
                            tfm.releaseTempFile(file);
                        } catch (FileNotFoundException e) {
                            final String msg = (entry == null) ? "parser-document"
                                    : "sub parser-document '" + entry.getKey() + "'";
                            logger.warn(String.format("data of %s of cmd [%06d] not available for release", msg,
                                    id));
                        }
                    } else {
                        this.logger.debug(String.format("Parserdoc tempfile %s not managed by tempfilemanager",
                                file.toString()));
                    }
                }

                pdocs.addAll(pdoc.getSubDocs().entrySet());
            } while ((entry = pdocs.poll()) != null);
        }

    } catch (Throwable e) {
        this.logger.error(String.format("Unexpected '%s' while releasing temporary files of command '%s'.",
                e.getClass().getName(), cmd.getLocation()), e);
    }
}

From source file:org.ohmage.query.impl.CampaignQueries.java

public void createCampaign(final Campaign campaign, final Collection<String> classIds,
        final String creatorUsername) throws DataAccessException {

    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a new campaign.");

    try {/*from   www  .j a  va2s. c  o m*/
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);

        String iconUrlString = null;
        URL iconUrl = campaign.getIconUrl();
        if (iconUrl != null) {
            iconUrlString = iconUrl.toString();
        }

        String xml;
        try {
            xml = campaign.getXml();
        } catch (DomainException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("The XML could not be saved.");
        }

        // Create the campaign.
        try {
            getJdbcTemplate().update(SQL_INSERT_CAMPAIGN,
                    new Object[] { campaign.getId(), campaign.getName(), xml, campaign.getDescription(),
                            iconUrlString, campaign.getAuthoredBy(), campaign.getRunningState().toString(),
                            campaign.getPrivacyState().toString() });
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_CAMPAIGN + "' with parameters: "
                    + campaign.getId() + ", " + campaign.getName() + ", " + xml + ", "
                    + campaign.getDescription() + ", " + iconUrlString + ", " + campaign.getAuthoredBy() + ", "
                    + campaign.getRunningState().toString() + ", " + campaign.getPrivacyState().toString(), e);
        }

        // Create the set of survey and prompt IDs for this campaign.
        final Set<String> surveyIds = new HashSet<String>();
        final Set<String> promptIds = new HashSet<String>();

        // Loop through all of the surveys and add the survey and prompt
        // IDs.
        for (Survey survey : campaign.getSurveys().values()) {
            // Get this survey's ID.
            surveyIds.add(survey.getId());

            Queue<SurveyItem> surveyItems = new LinkedList<SurveyItem>();
            surveyItems.addAll(survey.getSurveyItems().values());
            while (surveyItems.size() > 0) {
                SurveyItem surveyItem = surveyItems.poll();

                if (surveyItem instanceof RepeatableSet) {
                    RepeatableSet repeatableSet = (RepeatableSet) surveyItem;

                    for (SurveyItem rsSurveyItem : repeatableSet.getSurveyItems().values()) {
                        surveyItems.add(rsSurveyItem);
                    }
                } else if (surveyItem instanceof Prompt) {
                    promptIds.add(((Prompt) surveyItem).getId());
                }
            }
        }

        // Get the campaign's ID.
        final String campaignId = campaign.getId();

        // Compile the list of parameters for the survey ID lookup table.
        List<Object[]> surveyParameters = new ArrayList<Object[]>(surveyIds.size());
        for (String surveyId : surveyIds) {
            Object[] params = new Object[2];
            params[0] = surveyId;
            params[1] = campaignId;
            surveyParameters.add(params);
        }

        // The SQL to write the data.
        final String surveyIdLookupBatchSql = "INSERT INTO " + "campaign_survey_lookup(survey_id, campaign_id) "
                + "VALUES (?, (SELECT id FROM campaign WHERE urn = ?))";

        // Add the survey IDs to the lookup table.
        try {
            getJdbcTemplate().batchUpdate(surveyIdLookupBatchSql, surveyParameters);
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + surveyIdLookupBatchSql + "'.", e);
        }

        // Compile the list of parameters for the prompt ID lookup table.
        List<Object[]> promptParameters = new ArrayList<Object[]>(surveyIds.size());
        for (String promptId : promptIds) {
            Object[] params = new Object[2];
            params[0] = promptId;
            params[1] = campaignId;
            promptParameters.add(params);
        }

        // The SQL to write the data.
        final String promptIdLookupBatchSql = "INSERT INTO " + "campaign_prompt_lookup(prompt_id, campaign_id) "
                + "VALUES (?, (SELECT id FROM campaign WHERE urn = ?))";

        // Add the prompt IDs to the lookup table.
        try {
            getJdbcTemplate().batchUpdate(promptIdLookupBatchSql, promptParameters);
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + promptIdLookupBatchSql + "'.", e);
        }

        // Add each of the classes to the campaign.
        for (String classId : classIds) {
            associateCampaignAndClass(transactionManager, status, campaign.getId(), classId);
        }

        // Add the requesting user as the author. This may have already 
        // happened above.
        try {
            getJdbcTemplate().update(SQL_INSERT_USER_ROLE_CAMPAIGN, creatorUsername, campaign.getId(),
                    Campaign.Role.AUTHOR.toString());
        } catch (org.springframework.dao.DataIntegrityViolationException e) {
            // The user was already an author of this campaign implying 
            // that it's one of the default campaign roles based on a class
            // role that the 'creatorUsername' has.
            e.printStackTrace();
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_USER_ROLE_CAMPAIGN
                    + "' with parameters: " + creatorUsername + ", " + campaign.getId() + ", "
                    + Campaign.Role.AUTHOR.toString(), e);
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }
    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}