Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:org.structr.core.entity.SchemaMethod.java

private void determineSignature(final Map<String, SchemaNode> schemaNodes,
        final AbstractSchemaNode schemaEntity, final ActionEntry entry, final String methodName)
        throws FrameworkException {

    final App app = StructrApp.getInstance();
    final Set<String> visitedTypes = new LinkedHashSet<>();
    final Queue<String> typeQueue = new LinkedList<>();
    final String structrPackage = "org.structr.dynamic.";

    // initial type
    addType(typeQueue, schemaEntity);//from   ww  w  . j  a va  2s .c  o  m

    while (!typeQueue.isEmpty()) {

        final String typeName = typeQueue.poll();
        String shortTypeName = typeName;

        if (typeName != null && !visitedTypes.contains(typeName)) {

            visitedTypes.add(typeName);

            if (typeName.startsWith(structrPackage)) {
                shortTypeName = typeName.substring(structrPackage.length());
            }

            // try to find schema node for the given type
            final SchemaNode typeNode = schemaNodes.get(shortTypeName);
            if (typeNode != null && !typeNode.equals(schemaEntity)) {

                // try to identify overridden schema method from database
                final SchemaMethod superMethod = app.nodeQuery(SchemaMethod.class)
                        .and(SchemaMethod.schemaNode, typeNode).and(SchemaMethod.name, methodName).getFirst();

                if (superMethod != null) {

                    final ActionEntry superEntry = superMethod.getActionEntry(schemaNodes, typeNode);

                    entry.copy(superEntry);

                    // done
                    return;
                }

                // next type in queue
                addType(typeQueue, typeNode);

            } else {

                // no schema node for the given type found, try internal types
                final Class internalType = SchemaHelper.classForName(typeName);
                if (internalType != null) {

                    if (getSignature(internalType, methodName, entry)) {

                        return;
                    }

                    final Class superclass = internalType.getSuperclass();
                    if (superclass != null) {

                        // examine superclass as well
                        typeQueue.add(superclass.getName());

                        // collect interfaces
                        for (final Class iface : internalType.getInterfaces()) {
                            typeQueue.add(iface.getName());
                        }
                    }
                }
            }
        }
    }
}

From source file:ubic.gemma.core.search.SearchServiceImpl.java

private Collection<SearchResult> characteristicExpressionExperimentSearch(final SearchSettings settings) {

    Collection<Class<?>> classToSearch = new ArrayList<>(1); // this is a collection because of the API
    // for characteristicService; could add
    // findByUri(Class<?>...)

    // order matters if we hit the limits
    Queue<Class<?>> orderedClassesToSearch = new LinkedList<>();
    orderedClassesToSearch.add(ExpressionExperiment.class);
    orderedClassesToSearch.add(FactorValue.class);
    orderedClassesToSearch.add(BioMaterial.class);

    Collection<SearchResult> results = new HashSet<>();

    StopWatch watch = new StopWatch();
    watch.start();//from   www  .  j  a v a2s  .  c om

    while (results.size() < SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
            && !orderedClassesToSearch.isEmpty()) {
        classToSearch.clear();
        classToSearch.add(orderedClassesToSearch.poll());
        // We handle the OR clauses here.
        String[] subclauses = settings.getQuery().split(" OR ");
        for (String subclause : subclauses) {
            /*
             * Note that the AND is applied only within one entity type. The fix would be to apply AND at this
             * level.
             */
            Collection<SearchResult> classResults = this.characteristicSearchWithChildren(classToSearch,
                    subclause);
            if (!classResults.isEmpty()) {
                String msg = "Found " + classResults.size() + " "
                        + classToSearch.iterator().next().getSimpleName()
                        + " results from characteristic search.";
                if (results.size() >= SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS) {
                    msg += " Total found > "
                            + SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
                            + ", will not search for more entities.";
                }
                SearchServiceImpl.log.info(msg);
            }
            results.addAll(classResults);
        }

    }

    SearchServiceImpl.log.debug("ExpressionExperiment search: " + settings + " -> " + results.size()
            + " characteristic hits " + watch.getTime() + " ms");

    // Note that if we do this earlier (within each query) the limit SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS has
    // more meaning. We would have to unroll the loop above
    return filterExperimentHitsByTaxon(results, settings.getTaxon());
}

From source file:org.apache.hadoop.corona.SchedulerForType.java

/**
 * Try match one request to one node/* w w  w .j  ava  2 s.co  m*/
 *
 * @param nodeWait Time to wait for node locality
 * @param rackWait Time to wait for rack locality
 * @return The pair contains a session id and a granted resource
 *         or null when no task can be scheduled
 */
private ScheduledPair scheduleOneTask(long nodeWait, long rackWait) {
    if (!nodeManager.existRunnableNodes(type)) {
        return null;
    }

    Queue<PoolGroupSchedulable> poolGroupQueue = poolGroupManager.getScheduleQueue();
    while (!poolGroupQueue.isEmpty()) {
        PoolGroupSchedulable poolGroup = poolGroupQueue.poll();
        if (poolGroup.reachedMaximum()) {
            continue;
        }
        // Get the appropriate pool from the pool group to schedule, then
        // schedule the best session
        Queue<PoolSchedulable> poolQueue = poolGroup.getScheduleQueue();
        while (!poolQueue.isEmpty()) {
            PoolSchedulable pool = poolQueue.poll();
            if (pool.reachedMaximum()) {
                continue;
            }
            Queue<SessionSchedulable> sessionQueue = pool.getScheduleQueue();
            while (!sessionQueue.isEmpty()) {
                SessionSchedulable schedulable = sessionQueue.poll();
                Session session = schedulable.getSession();
                long now = ClusterManager.clock.getTime();
                MatchedPair pair = doMatch(schedulable, now, nodeWait, rackWait);
                synchronized (session) {
                    if (session.isDeleted()) {
                        continue;
                    }
                    if (pair != null) {
                        ResourceGrant grant = commitMatchedResource(session, pair);
                        if (grant != null) {
                            poolGroup.incGranted(1);
                            pool.incGranted(1);
                            schedulable.incGranted(1);
                            // Put back to the queue only if we scheduled successfully
                            poolGroupQueue.add(poolGroup);
                            poolQueue.add(pool);
                            sessionQueue.add(schedulable);
                            return new ScheduledPair(session.getSessionId().toString(), grant);
                        }
                    }
                }
            }
        }
    }
    return null;
}

From source file:tachyon.master.MasterInfo.java

/**
 * Get absolute paths of all in memory files.
 *
 * @return absolute paths of all in memory files.
 *///w w  w.j ava  2s. c o  m
public List<TachyonURI> getInMemoryFiles() {
    List<TachyonURI> ret = new ArrayList<TachyonURI>();
    LOG.info("getInMemoryFiles()");
    Queue<Pair<InodeFolder, TachyonURI>> nodesQueue = new LinkedList<Pair<InodeFolder, TachyonURI>>();
    synchronized (mRootLock) {
        // TODO: Verify we want to use absolute path.
        nodesQueue.add(new Pair<InodeFolder, TachyonURI>(mRoot, new TachyonURI(TachyonURI.SEPARATOR)));
        while (!nodesQueue.isEmpty()) {
            Pair<InodeFolder, TachyonURI> tPair = nodesQueue.poll();
            InodeFolder tFolder = tPair.getFirst();
            TachyonURI curUri = tPair.getSecond();

            Set<Inode> children = tFolder.getChildren();
            for (Inode tInode : children) {
                TachyonURI newUri = curUri.join(tInode.getName());
                if (tInode.isDirectory()) {
                    nodesQueue.add(new Pair<InodeFolder, TachyonURI>((InodeFolder) tInode, newUri));
                } else if (((InodeFile) tInode).isFullyInMemory()) {
                    ret.add(newUri);
                }
            }
        }
    }
    return ret;
}

From source file:it.geosolutions.geobatch.geotiff.publish.GeotiffGeoServerAction.java

public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {
    try {/* w w  w .j a  va 2 s  .  c o m*/
        listenerForwarder.started();
        final GeoServerActionConfiguration configuration = getConfiguration();
        // //
        // data flow configuration and dataStore name must not be null.
        // //
        if (configuration == null) {
            final String message = "DataFlowConfig is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new IllegalStateException(message);
        }

        if (events == null) {
            final String message = "Incoming events queue is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new IllegalStateException(message);
        }

        // returning queue
        final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

        // for each incoming file
        for (FileSystemEvent event : events) {

            final File inputFile = event.getSource();

            // checks on input file
            if (!inputFile.exists()) {
                // ERROR or LOG since it does not exists
                if (!configuration.isFailIgnored())
                    throw new IllegalStateException(
                            "File: " + inputFile.getAbsolutePath() + " does not exist!");
                else {
                    if (LOGGER.isWarnEnabled()) {
                        LOGGER.warn("File: " + inputFile.getAbsolutePath() + " does not exist!");
                    }
                }
            }
            // check if is File
            if (!inputFile.isFile()) {
                // ERROR or LOG
                if (!configuration.isFailIgnored())
                    throw new IllegalStateException("File: " + inputFile.getAbsolutePath() + " is not a file!");
                else {
                    if (LOGGER.isWarnEnabled()) {
                        LOGGER.warn("File: " + inputFile.getAbsolutePath() + " is not a file!");
                    }
                }
            }

            // check if we can read it
            if (!inputFile.canRead()) {
                // ERROR or LOG
                if (!configuration.isFailIgnored())
                    throw new IllegalStateException(
                            "File: " + inputFile.getAbsolutePath() + " is not readable!");
                else {
                    if (LOGGER.isWarnEnabled()) {
                        LOGGER.warn("File: " + inputFile.getAbsolutePath() + " is not readablet!");
                    }
                }
            }

            // do your magic
            listenerForwarder.setTask("Publishing: " + inputFile);
            // try to publish on geoserver
            if (publishGeoTiff(inputFile, configuration)) {
                // if success add the geotiff to the output queue
                ret.add(event);
            }
        }
        listenerForwarder.completed();
        return ret;
    } catch (Throwable t) {
        final String message = "FATAL -> " + t.getLocalizedMessage();
        if (LOGGER.isErrorEnabled()) {
            LOGGER.error(message, t); // no need to
        }
        listenerForwarder.failed(t);
        throw new ActionException(this, message, t);
    }
}

From source file:org.apache.hadoop.corona.SchedulerForType.java

/**
 * Find the most over-scheduled session in the most over-scheduled pool.
 * Kill tasks from this session.//from ww  w  . j  ava2  s .co  m
 * @param maxToPreemt The maximum number of tasks to kill
 * @param maxRunningTime The killed task cannot be older than this time
 * @return The number of tasks actually killed
 */
private int preemptOneSession(int maxToPreemt, long maxRunningTime) {
    Queue<PoolGroupSchedulable> poolGroupQueue = poolGroupManager.getPreemptQueue();
    while (!poolGroupQueue.isEmpty()) {
        PoolGroupSchedulable poolGroup = poolGroupQueue.poll();
        poolGroup.distributeShare();
        Queue<PoolSchedulable> poolQueue = poolGroup.getPreemptQueue();
        while (!poolQueue.isEmpty()) {
            PoolSchedulable pool = poolQueue.poll();
            pool.distributeShare();
            if (!pool.isPreemptable()) {
                continue;
            }
            Queue<SessionSchedulable> sessionQueue = pool.getPreemptQueue();
            while (!sessionQueue.isEmpty()) {
                SessionSchedulable schedulable = sessionQueue.poll();
                try {
                    int overScheduled = (int) (schedulable.getGranted() - schedulable.getShare());
                    if (overScheduled <= 0) {
                        continue;
                    }
                    maxToPreemt = Math.min(maxToPreemt, overScheduled);
                    LOG.info("Trying to preempt " + maxToPreemt + " " + type + " from "
                            + schedulable.getSession().getHandle());
                    int preempted = preemptSession(schedulable, maxToPreemt, maxRunningTime);
                    poolGroup.incGranted(-1 * preempted);
                    pool.incGranted(-1 * preempted);
                    schedulable.incGranted(-1 * preempted);
                    return preempted;
                } catch (InvalidSessionHandle e) {
                    LOG.warn("Invalid session handle:" + schedulable.getSession().getHandle()
                            + " Session may be removed");
                } finally {
                    // Add back the queue so it can be further preempt for other
                    // sessions.
                    poolGroupQueue.add(poolGroup);
                    poolQueue.add(pool);
                }
            }
        }
    }
    return 0;
}

From source file:ubic.gemma.search.SearchServiceImpl.java

/**
 * @param settings//from  w  w  w . j  a  v  a 2 s.  c  o  m
 */
private Collection<SearchResult> characteristicExpressionExperimentSearch(final SearchSettings settings) {

    Collection<SearchResult> results = new HashSet<SearchResult>();

    Collection<Class<?>> classToSearch = new ArrayList<Class<?>>(1);
    Queue<Class<?>> orderedClassesToSearch = new LinkedList<Class<?>>();
    orderedClassesToSearch.add(ExpressionExperiment.class);
    orderedClassesToSearch.add(FactorValue.class);
    orderedClassesToSearch.add(BioMaterial.class);
    orderedClassesToSearch.add(Treatment.class);

    Collection<SearchResult> characterSearchResults = new HashSet<SearchResult>();

    while (characterSearchResults.size() < SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
            && !orderedClassesToSearch.isEmpty()) {
        classToSearch.clear();
        classToSearch.add(orderedClassesToSearch.poll());
        Collection<SearchResult> classResults = ontologySearchAnnotatedObject(classToSearch, settings);
        characterSearchResults.addAll(classResults);

        String msg = "Found " + classResults.size() + " " + classToSearch.iterator().next().getSimpleName()
                + " results from characteristic search.";
        if (characterSearchResults.size() >= SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS) {
            msg += " Total found > " + SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
                    + ", will not search for more entities.";
        }
        log.info(msg);
    }

    StopWatch watch = new StopWatch();
    watch.start();

    // filter and get parents...
    int numEEs = 0;
    Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>();
    Collection<FactorValue> factorValues = new HashSet<FactorValue>();
    Collection<Treatment> treatments = new HashSet<Treatment>();

    for (SearchResult sr : characterSearchResults) {
        Class<?> resultClass = sr.getResultClass();
        if (ExpressionExperiment.class.isAssignableFrom(resultClass)) {
            sr.setHighlightedText(sr.getHighlightedText() + " (characteristic)");
            results.add(sr);
            numEEs++;
        } else if (BioMaterial.class.isAssignableFrom(resultClass)) {
            biomaterials.add((BioMaterial) sr.getResultObject());
        } else if (FactorValue.class.isAssignableFrom(resultClass)) {
            factorValues.add((FactorValue) sr.getResultObject());
        } else if (Treatment.class.isAssignableFrom(resultClass)) {
            treatments.add((Treatment) sr.getResultObject());
        }
    }

    /*
     * Much faster to batch it...
     */
    if (biomaterials.size() > 0) {
        Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials(biomaterials);
        for (ExpressionExperiment ee : ees) {
            results.add(new SearchResult(ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic"));
        }
    }

    if (factorValues.size() > 0) {
        Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues(factorValues);
        for (ExpressionExperiment ee : ees) {
            if (log.isDebugEnabled())
                log.debug(ee);
            results.add(new SearchResult(ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic"));
        }
    }

    if (treatments.size() > 0) {
        log.info("Not processing treatments, but hits were found");
        // Collection<ExpressionExperiment> ees = expressionExperimentService.findByTreatments( treatments );
        // for ( ExpressionExperiment ee : ees ) {
        // if ( !results.contains( ee ) ) {
        // results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Treatment" ) );
        // }
        // }
    }

    if (log.isDebugEnabled()) {
        log.debug(
                "ExpressionExperiment search: " + settings + " -> " + results.size() + " characteristic hits");
    }

    if (watch.getTime() > 1000) {
        log.info("Retrieving " + results.size() + " experiments from " + characterSearchResults.size()
                + " retrieved characteristics took " + watch.getTime() + " ms");
        log.info("Breakdown: " + numEEs + " via direct association with EE; " + biomaterials.size()
                + " via association with Biomaterial; " + factorValues.size() + " via experimental design");
    }

    return results;
}

From source file:at.alladin.rmbt.statisticServer.OpenTestSearchResource.java

/**
 * Transforms the given parameters in a psql where-clause, starting with "AND"
 * @param attr the attribute name from the get-request - is replaced with the real column name
 * @param value what the column given in 'attr' should have as value
 * @param comperator the comparator, eg. '=', '>=', '<=' 'LIKE'
 * @param negate true, if the results should NOT match the criteria
 * @param type the type of the column (numeric, string, uuid, date)
 * @param queue the queue where the resulting transformed value should be put in
 * @return the formatted AND-Clause for the prepared statement (AND xxx = ?)
 *//*  w  w w.ja  va 2 s  . co  m*/
private String formatWhereClause(String attr, String value, String comperator, boolean negate, FieldType type,
        Queue<Map.Entry<String, FieldType>> queue) {
    //if it is a array => remove the brackets
    if (attr.endsWith("[]")) {
        attr = attr.substring(0, attr.length() - 2);
    }

    //because we use aliases, some modifications have to be made
    if (attr.equals("download_kbit")) {
        attr = "t.speed_download";
        //histogram
        if (comperator.equals(">="))
            this.histogramInfo.min_download = Long.parseLong(value);
        else if (comperator.equals("<="))
            this.histogramInfo.max_download = Long.parseLong(value);
    } else if (attr.equals("upload_kbit")) {
        attr = "t.speed_upload";
        //histogram
        if (comperator.equals(">="))
            this.histogramInfo.min_upload = Long.parseLong(value);
        else if (comperator.equals("<="))
            this.histogramInfo.max_upload = Long.parseLong(value);
    } else if (attr.equals("ping_ms")) {
        attr = "t.ping_median";
        Double v = Double.parseDouble(value) * 1000000;
        //histogram
        if (comperator.equals(">="))
            this.histogramInfo.min_ping = Double.parseDouble(value);
        else if (comperator.equals("<="))
            this.histogramInfo.max_ping = Double.parseDouble(value);
        value = v.toString();
    } else if (attr.equals("time")) {
        attr = "t.time";
    } else if (attr.equals("cat_technology")) {
        attr = "nt.group_name";
    } else if (attr.equals("client_version")) {
        attr = "client_software_version";
    } else if (attr.equals("model")) {
        attr = "(adm.fullname ILIKE ? OR t.model ILIKE ?)";
        queue.add(new AbstractMap.SimpleEntry<>(value, type));
        queue.add(new AbstractMap.SimpleEntry<>(value, type));
        if (!negate) {
            return " AND " + attr;
        } else {
            return " AND NOT " + attr;
        }
    } else if (attr.equals("provider_name")) {
        attr = "(mprov.name ILIKE ? OR (mprov.name IS NULL AND  prov.name ILIKE ?))";
        queue.add(new AbstractMap.SimpleEntry<>(value, type));
        queue.add(new AbstractMap.SimpleEntry<>(value, type));
        if (!negate) {
            return " AND " + attr;
        } else {
            return " AND NOT " + attr;
        }
    } else if (attr.equals("mobile_provider_name")) {
        attr = "mprov.name";
    } else if (attr.equals("network_name")) {
        attr = "network_operator_name";
    } else if (attr.equals("network_type")) {
        attr = "t.network_group_type";
    } else if (attr.equals("cursor") || attr.equals("max_results")) {
        return "";
    } else if (attr.equals("platform")) {
        attr = "(t.plattform ILIKE ? OR (t.plattform IS NULL AND t.client_name ILIKE ?))";
        queue.add(new AbstractMap.SimpleEntry<>(value, type));
        queue.add(new AbstractMap.SimpleEntry<>(value, type));
        if (!negate) {
            return " AND " + attr;
        } else {
            return " AND NOT " + attr;
        }
    } else if (attr.equals("signal_strength")) {
        attr = "t.signal_strength";
    } else if (attr.equals("open_uuid")) {
        attr = "t.open_uuid";
    } else if (attr.equals("lat")) {
        attr = "t.geo_lat";
    } else if (attr.equals("long")) {
        attr = "t.geo_long";
    } else if (attr.equals("sim_mcc_mnc")) {
        attr = "network_sim_operator";
    } else if (attr.equals("sim_country")) {
        attr = "network_sim_country";
    } else if (attr.equals("asn")) {
        attr = "public_ip_asn";
    } else if (attr.equals("implausible")) {
        //if false -> also allow null
        if (value.toLowerCase().equals("true")) {
            this.excludeImplausible = false;
            //return " AND (t.implausible = FALSE or t.implausible IS NULL)";
        }
    } else if (attr.equals("loc_accuracy")) {
        attr = "t.geo_accuracy";

        //special case: if value > threshold -> ignore and find no results (?)
        if (comperator.equals(">=") || comperator.equals("=")) {
            long val = Long.parseLong(value);
            if (val > Double.parseDouble(getSetting("rmbt_geo_accuracy_detail_limit"))) {
                return " AND 1=0";
            }
        }

        //special case: if (-1) than NULL values should be found
        if (value.equals("-1")) {
            return " AND t.geo_accuracy IS NULL";
        }
    } else if (attr.equals("ip_anonym")) {
        attr = "client_public_ip_anonymized";
    } else if (attr.equals("ip_version")) {
        attr = "family(client_public_ip::inet)";
    }

    //, zip_code are not renamed

    queue.add(new AbstractMap.SimpleEntry<>(value, type));
    if (negate) {
        return " AND NOT " + attr + " " + comperator + " ?";
    } else {
        return " AND " + attr + " " + comperator + " ?";
    }
}

From source file:org.aksw.simba.cetus.yago.YagoBasedTypeSearcher.java

protected void searchDolceSuperClasses(Set<Resource> types) {
    Queue<Resource> queue = new LinkedList<Resource>(types);
    Resource classResource, superClass;
    RDFNode node;/*from w  ww .  j  a v a 2s .c  o m*/
    NodeIterator nodeIterator;
    Set<Resource> yagoSuperClasses = new HashSet<Resource>();
    Set<Resource> dolceSuperClasses = new HashSet<Resource>();
    boolean dolceClassFound = false;
    while (!queue.isEmpty()) {
        classResource = queue.poll();
        // If this resource is a DOLCE resource
        if (dolceClassModel.containsResource(classResource)) {
            dolceClassFound = true;
        } else {
            nodeIterator = classesModel.listObjectsOfProperty(classResource, RDFS.subClassOf);
            yagoSuperClasses.clear();
            dolceSuperClasses.clear();
            while (nodeIterator.hasNext()) {
                node = nodeIterator.next();
                if (node.isResource()) {
                    superClass = node.asResource();
                    if (dolceClassModel.containsResource(superClass)) {
                        dolceSuperClasses.add(superClass);
                    } else {
                        yagoSuperClasses.add(superClass);
                    }
                } else {
                    LOGGER.error("Expected a resource in the statement (" + classResource
                            + ", rdfs:subClassOf, " + node + "). Ignoring this statement.");
                }
            }

            // If a DOLCE class has been found
            if (dolceSuperClasses.size() > 0) {
                // add only the DOLCE classes and discard all others
                types.addAll(dolceSuperClasses);
                dolceClassFound = true;
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Added super classes of " + classResource.getURI() + " --> "
                            + Arrays.toString(dolceSuperClasses.toArray()));
                }
            } else {
                for (Resource r : yagoSuperClasses) {
                    // If they have not been found before (and already have
                    // been
                    // added to the queue)
                    if (!types.contains(r)) {
                        types.add(r);
                        queue.add(r);
                    }
                    if (LOGGER.isDebugEnabled()) {
                        LOGGER.debug("Added super classes of " + classResource.getURI() + " --> "
                                + Arrays.toString(yagoSuperClasses.toArray()));
                    }
                }
            }
        }
    }
    if (!dolceClassFound) {
        LOGGER.warn("Couldn't find a DOLCE class for the following list of types: "
                + Arrays.toString(types.toArray()));
    }
}

From source file:it.geosolutions.geobatch.actions.xstream.XstreamAction.java

public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    // the output
    final Queue<EventObject> ret = new LinkedList<EventObject>();
    listenerForwarder.started();/*ww w .j  a  va 2 s .c  o  m*/
    while (events.size() > 0) {
        final EventObject event = events.remove();
        if (event == null) {
            final String message = "The passed event object is null";
            if (LOGGER.isWarnEnabled())
                LOGGER.warn(message);
            if (conf.isFailIgnored()) {
                continue;
            } else {
                final ActionException e = new ActionException(this, message);
                listenerForwarder.failed(e);
                throw e;
            }
        }

        if (event instanceof FileSystemEvent) {
            // generate an object
            final File sourceFile = File.class.cast(event.getSource());
            if (!sourceFile.exists() || !sourceFile.canRead()) {
                final String message = "XstreamAction.adapter(): The passed FileSystemEvent "
                        + "reference to a not readable or not existent file: " + sourceFile.getAbsolutePath();
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(message);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    final ActionException e = new ActionException(this, message);
                    listenerForwarder.failed(e);
                    throw e;
                }
            }
            FileInputStream inputStream = null;
            try {
                inputStream = new FileInputStream(sourceFile);
                final Map<String, String> aliases = conf.getAlias();
                if (aliases != null && aliases.size() > 0) {
                    for (String alias : aliases.keySet()) {
                        final Class<?> clazz = Class.forName(aliases.get(alias));
                        xstream.alias(alias, clazz);
                    }
                }

                listenerForwarder.setTask("Converting file to a java object");

                // deserialize
                final Object res = xstream.fromXML(inputStream);
                // generate event
                final EventObject eo = new EventObject(res);
                // append to the output
                ret.add(eo);

            } catch (XStreamException e) {
                // the object cannot be deserialized
                if (LOGGER.isErrorEnabled())
                    LOGGER.error("The passed FileSystemEvent reference to a not deserializable file: "
                            + sourceFile.getAbsolutePath(), e);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(e);
                    throw new ActionException(this, e.getLocalizedMessage());
                }
            } catch (Throwable e) {
                // the object cannot be deserialized
                if (LOGGER.isErrorEnabled())
                    LOGGER.error("XstreamAction.adapter(): " + e.getLocalizedMessage(), e);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(e);
                    throw new ActionException(this, e.getLocalizedMessage());
                }
            } finally {
                IOUtils.closeQuietly(inputStream);
            }

        } else {

            // try to serialize
            // build the output absolute file name
            File outputDir;
            try {
                outputDir = new File(conf.getOutput());
                // the output
                if (!outputDir.isAbsolute())
                    outputDir = it.geosolutions.tools.commons.file.Path.findLocation(outputDir, getTempDir());

                if (!outputDir.exists()) {
                    if (!outputDir.mkdirs()) {
                        final String message = "Unable to create the ouptut dir named: " + outputDir.toString();
                        if (LOGGER.isInfoEnabled())
                            LOGGER.info(message);
                        if (conf.isFailIgnored()) {
                            continue;
                        } else {
                            final ActionException e = new ActionException(this, message);
                            listenerForwarder.failed(e);
                            throw e;
                        }
                    }
                }
                if (LOGGER.isInfoEnabled()) {
                    LOGGER.info("Output dir name: " + outputDir.toString());
                }

            } catch (NullPointerException npe) {
                final String message = "Unable to get the output file path from :" + conf.getOutput();
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(message, npe);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(npe);
                    throw new ActionException(this, npe.getLocalizedMessage());
                }
            }

            final File outputFile;
            try {
                outputFile = File.createTempFile(conf.getOutput(), null, outputDir);
            } catch (IOException ioe) {
                final String message = "Unable to build the output file writer: " + ioe.getLocalizedMessage();
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(message, ioe);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(ioe);
                    throw new ActionException(this, ioe.getLocalizedMessage());
                }
            }

            // try to open the file to write into
            FileWriter fw = null;
            try {
                listenerForwarder.setTask("Serializing java object to " + outputFile);
                fw = new FileWriter(outputFile);

                final Map<String, String> aliases = conf.getAlias();
                if (aliases != null && aliases.size() > 0) {
                    for (String alias : aliases.keySet()) {
                        final Class<?> clazz = Class.forName(aliases.get(alias));
                        xstream.alias(alias, clazz);
                    }
                }
                xstream.toXML(event.getSource(), fw);

            } catch (XStreamException e) {
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(
                            "The passed event object cannot be serialized to: " + outputFile.getAbsolutePath(),
                            e);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(e);
                    throw new ActionException(this, e.getLocalizedMessage());
                }
            } catch (Throwable e) {
                // the object cannot be deserialized
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(e.getLocalizedMessage(), e);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(e);
                    throw new ActionException(this, e.getLocalizedMessage());
                }
            } finally {
                IOUtils.closeQuietly(fw);
            }

            // add the file to the queue
            ret.add(new FileSystemEvent(outputFile.getAbsoluteFile(), FileSystemEventType.FILE_ADDED));

        }
    }
    listenerForwarder.completed();
    return ret;
}