Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:org.aksw.simba.cetus.yago.YagoBasedTypeSearcher.java

protected static void addHigherIncludedDolceClass(Set<Resource> classes, Model dolceClassModel,
        Set<Resource> unlinkedDolceClasses) {
    Queue<Resource> queue = new LinkedList<Resource>(classes);
    Resource classResource, superClass, subClass;
    RDFNode node;/*from   w  w w. j  a  va  2s . c  om*/
    NodeIterator nodeIterator;
    ResIterator resIterator;
    boolean addClass;
    while (!queue.isEmpty()) {
        classResource = queue.poll();
        if (dolceClassModel.containsResource(classResource)) {
            nodeIterator = dolceClassModel.listObjectsOfProperty(classResource, RDFS.subClassOf);
            while (nodeIterator.hasNext()) {
                node = nodeIterator.next();
                if (node.isResource()) {
                    superClass = node.asResource();
                    // get all sub classes of this class
                    resIterator = dolceClassModel.listSubjectsWithProperty(RDFS.subClassOf, superClass);
                    addClass = true;
                    // Check that all sub classes of this super class are
                    // already in
                    // the list of classes or are marked as unlinked classes
                    while (resIterator.hasNext() && addClass) {
                        subClass = resIterator.next();
                        addClass = classes.contains(subClass) || unlinkedDolceClasses.contains(subClass);
                    }
                    if (addClass) {
                        classes.add(superClass);
                        queue.add(superClass);
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Added " + superClass.getURI());
                        }
                    }
                } else {
                    LOGGER.error("Expected a resource in the statement (" + classResource
                            + ", rdfs:subClassOf, " + node + "). Ignoring this statement.");
                }
            }
        }
    }
}

From source file:info.raack.appliancelabeler.machinelearning.appliancedetection.algorithms.ApplianceEnergyConsumptionDetectionAlgorithm.java

protected Queue<EnergyTimestep> getEnergyTimesteps(Calendar firstMeasurementDate,
        Calendar lastMeasurementDate) {
    Queue<EnergyTimestep> timesteps = new LinkedBlockingQueue<EnergyTimestep>();

    Calendar startDate = dateUtils.getPreviousFiveMinuteIncrement(firstMeasurementDate);
    Calendar endDate = dateUtils.getPreviousFiveMinuteIncrement(lastMeasurementDate);

    while (startDate.getTimeInMillis() < endDate.getTimeInMillis()) {
        EnergyTimestep timestep = new EnergyTimestep();

        timestep.setStartTime(startDate.getTime());

        startDate.add(Calendar.MINUTE, 5);
        startDate.add(Calendar.SECOND, -1);
        timestep.setEndTime(startDate.getTime());

        startDate.add(Calendar.SECOND, 1);

        //logger.debug("Adding timestep " + timestep.getStartTime() + " - " + timestep.getEndTime());
        timesteps.add(timestep);
    }//from  w  ww .  ja v a  2  s. c o  m

    return timesteps;
}

From source file:org.eclipse.tracecompass.statesystem.core.tests.perf.historytree.HistoryTreeBackendBenchmark.java

/**
 * Benchmarks creating, single querying and full querying the state system
 */// w  w  w . j av  a 2 s .co  m
@Test
public void testBenchmark() {
    /* Check arguments */
    long totalTime = this.fNbAvgIntervals * INTERVAL_AVG_TIME;

    Performance perf = Performance.getDefault();
    PerformanceMeter pmBuild = perf.createPerformanceMeter(TEST_PREFIX + TEST_BUILDING_ID + fName);
    perf.tagAsSummary(pmBuild, TEST_BUILDING_ID + fShortName, Dimension.CPU_TIME);

    PerformanceMeter pmSingleQuery = perf.createPerformanceMeter(TEST_PREFIX + TEST_SINGLE_QUERY_ID + fName);
    perf.tagAsSummary(pmSingleQuery, TEST_SINGLE_QUERY_ID + fShortName, Dimension.CPU_TIME);

    PerformanceMeter pmFullQuery = perf.createPerformanceMeter(TEST_PREFIX + TEST_FULL_QUERY_ID + fName);
    perf.tagAsSummary(pmFullQuery, TEST_FULL_QUERY_ID + fShortName, Dimension.CPU_TIME);

    PerformanceMeter pmRangeQuery = perf.createPerformanceMeter(TEST_PREFIX + TEST_QUERY_RANGE_ID + fName);
    perf.tagAsSummary(pmRangeQuery, TEST_QUERY_RANGE_ID + fShortName, Dimension.CPU_TIME);

    for (int i = 0; i < fNbLoops; i++) {
        try {
            /* Create the state system */
            createFile();
            IStateHistoryBackend backend = StateHistoryBackendFactory.createHistoryTreeBackendNewFile(
                    TEST_BUILDING_ID, NonNullUtils.checkNotNull(fTempFile), 1, 1, QUEUE_SIZE);
            ITmfStateSystemBuilder ss = StateSystemFactory.newStateSystem(backend);

            /* Initialize the attributes */
            Queue<QuarkEvent> quarkEvents = new PriorityQueue<>(fNbAttrib);
            Random randomGenerator = new Random(SEED);
            int rootQuark = ss.getQuarkAbsoluteAndAdd(ROOT_NODE);

            /* Create all attributes before testing */
            for (int j = 0; j < fNbAttrib; j++) {
                int quark = ss.getQuarkRelativeAndAdd(rootQuark, String.valueOf(j));
                quarkEvents.add(new QuarkEvent(quark,
                        (Math.abs(randomGenerator.nextLong()) % INTERVAL_AVG_TIME) + 1, fValues.getValues()));
            }

            /* Adds random intervals to the state system */
            pmBuild.start();
            while (true) {
                QuarkEvent quarkEvent = quarkEvents.poll();
                if (quarkEvent == null) {
                    break;
                }
                long eventTime = quarkEvent.getNextEventTime();
                ss.modifyAttribute(eventTime, quarkEvent.getNextValue(), quarkEvent.getQuark());
                long nextDelta = fDistributionMethod.getNextEndTime(randomGenerator, INTERVAL_AVG_TIME);
                long nextEndTime = eventTime + nextDelta;
                if (nextEndTime <= totalTime) {
                    quarkEvent.setNextEventTime(nextEndTime);
                    quarkEvents.add(quarkEvent);
                }
            }
            ss.closeHistory(totalTime);
            pmBuild.stop();

            /*
             * Benchmark the single queries: for each random timestamp,
             * query a random attribute
             */
            List<Integer> subAttributes = ss.getSubAttributes(rootQuark, false);
            pmSingleQuery.start();
            for (int j = 0; j < QUERY_COUNT; j++) {
                long ts = getNextRandomValue(randomGenerator, totalTime);
                int attrib = (int) getNextRandomValue(randomGenerator, subAttributes.size());
                ss.querySingleState(ts, attrib);
            }
            pmSingleQuery.stop();

            /* Benchmark the history range query of 10 attributes */
            pmRangeQuery.start();
            for (int j = 0; j < 10; j++) {
                int attrib = (int) getNextRandomValue(randomGenerator, subAttributes.size());
                StateSystemUtils.queryHistoryRange(ss, attrib, ss.getStartTime(), ss.getCurrentEndTime());
            }
            pmRangeQuery.stop();

            /* Benchmark the full queries */
            pmFullQuery.start();
            for (int j = 0; j < QUERY_COUNT; j++) {
                long ts = getNextRandomValue(randomGenerator, totalTime);
                ss.queryFullState(ts);
            }
            pmFullQuery.stop();

            /* Output some data on the file */
            if (i == 0) {
                if (backend instanceof HistoryTreeBackend) {
                    HistoryTreeBackend htBackend = (HistoryTreeBackend) backend;
                    System.out.println("History tree file size: "
                            + FileUtils.byteCountToDisplaySize(htBackend.getFileSize()));
                    System.out.println("Average node usage: " + htBackend.getAverageNodeUsage());
                }
            }
            deleteFile();
        } catch (IOException | StateValueTypeException | AttributeNotFoundException
                | StateSystemDisposedException e) {
            fail(e.getMessage());
        } finally {
            deleteFile();
        }
    }
    pmBuild.commit();
    pmSingleQuery.commit();
    pmFullQuery.commit();
    pmRangeQuery.commit();
}

From source file:org.glassfish.jersey.examples.sseitemstore.jersey.JerseyItemStoreResourceTest.java

/**
 * Test the {@link EventSource} reconnect feature.
 *
 * @throws Exception in case of a test failure.
 *//*from   w  ww  .j a  va  2  s . co  m*/
@Test
public void testEventSourceReconnect() throws Exception {
    final WebTarget itemsTarget = target("items");
    final CountDownLatch latch = new CountDownLatch(MAX_ITEMS * MAX_LISTENERS * 2); // countdown only on new item events
    final List<Queue<String>> receivedQueues = new ArrayList<>(MAX_LISTENERS);
    final EventSource[] sources = new EventSource[MAX_LISTENERS];

    for (int i = 0; i < MAX_LISTENERS; i++) {
        final int id = i;
        final EventSource es = EventSource.target(itemsTarget.path("events")).named("SOURCE " + id).build();
        sources[id] = es;

        final Queue<String> received = new ConcurrentLinkedQueue<>();
        receivedQueues.add(received);

        es.register(inboundEvent -> {
            try {
                if (inboundEvent.getName() == null) {
                    latch.countDown();
                    final String data = inboundEvent.readData();
                    LOGGER.info("[-i-] SOURCE " + id + ": Received event id=" + inboundEvent.getId() + " data="
                            + data);
                    received.add(data);
                }
            } catch (Exception ex) {
                LOGGER.log(Level.SEVERE, "[-x-] SOURCE " + id + ": Error getting event data.", ex);
                received.add("[data processing error]");
            }
        });
    }

    final String[] postedItems = new String[MAX_ITEMS * 2];
    try {
        open(sources);

        for (int i = 0; i < MAX_ITEMS; i++) {
            final String item = String.format("round-1-%02d", i);
            postItem(itemsTarget, item);
            postedItems[i] = item;
            sendCommand(itemsTarget, "disconnect");
            Thread.sleep(100);
        }

        final int reconnectDelay = 1;
        sendCommand(itemsTarget, "reconnect " + reconnectDelay);
        sendCommand(itemsTarget, "disconnect");

        Thread.sleep(reconnectDelay * 1000);

        for (int i = 0; i < MAX_ITEMS; i++) {
            final String item = String.format("round-2-%02d", i);
            postedItems[i + MAX_ITEMS] = item;
            postItem(itemsTarget, item);
        }

        sendCommand(itemsTarget, "reconnect now");

        assertTrue("Waiting to receive all events has timed out.",
                latch.await(
                        (1 + MAX_LISTENERS * (MAX_ITEMS + 1) * reconnectDelay) * getAsyncTimeoutMultiplier(),
                        TimeUnit.SECONDS));

        // need to force disconnect on server in order for EventSource.close(...) to succeed with HttpUrlConnection
        sendCommand(itemsTarget, "disconnect");
    } finally {
        close(sources);
    }

    final String storedItems = itemsTarget.request().get(String.class);
    for (String item : postedItems) {
        assertThat("Posted item '" + item + "' stored on server", storedItems, containsString(item));
    }

    int sourceId = 0;
    for (Queue<String> queue : receivedQueues) {
        assertThat("Received events in source " + sourceId, queue, describedAs("Collection containing %0",
                hasItems(postedItems), Arrays.asList(postedItems).toString()));
        assertThat("Size of received queue for source " + sourceId, queue.size(), equalTo(postedItems.length));
        sourceId++;
    }
}

From source file:org.opencron.server.service.ExecuteService.java

/**
 * ? /* w w  w. ja v  a 2 s  . c o m*/
 */
public void batchExecuteJob(final Long userId, String command, String agentIds) {
    final Queue<JobVo> jobQueue = new LinkedBlockingQueue<JobVo>();

    String[] arrayIds = agentIds.split(";");
    for (String agentId : arrayIds) {
        Agent agent = agentService.getAgent(Long.parseLong(agentId));
        JobVo jobVo = new JobVo(userId, command, agent);
        jobQueue.add(jobVo);
    }

    Thread jobThread = new Thread(new Runnable() {
        @Override
        public void run() {
            for (final JobVo jobVo : jobQueue) {
                //?(?,?)
                Thread thread = new Thread(new Runnable() {
                    public void run() {
                        executeSingleJob(jobVo, userId);
                    }
                });
                thread.start();
            }
        }
    });
    jobThread.start();
}

From source file:org.wso2.carbon.andes.event.core.internal.subscription.registry.TopicManagerServiceImpl.java

/**
 * {@inheritDoc}//from   ww  w  . j av  a2 s . c  om
 */
@Override
public Subscription[] getSubscriptions(String topicName, boolean withChildren) throws EventBrokerException {

    List<Subscription> subscriptions = new ArrayList<Subscription>();
    Queue<String> pathsQueue = new LinkedList<String>();
    String resourcePath = JavaUtil.getResourcePath(topicName, this.topicStoragePath);

    pathsQueue.add(resourcePath);
    while (!pathsQueue.isEmpty()) {
        addSubscriptions(pathsQueue.remove(), subscriptions, pathsQueue, withChildren);
    }

    return subscriptions.toArray(new Subscription[subscriptions.size()]);

}

From source file:org.apache.camel.component.jpa.JpaConsumer.java

@Override
protected int poll() throws Exception {
    // must reset for each poll
    shutdownRunningTask = null;/*from w  ww.ja  v  a2 s . co  m*/
    pendingExchanges = 0;

    Object messagePolled = template.execute(new JpaCallback() {
        public Object doInJpa(EntityManager entityManager) throws PersistenceException {
            Queue<DataHolder> answer = new LinkedList<DataHolder>();

            Query query = getQueryFactory().createQuery(entityManager);
            configureParameters(query);
            List<Object> results = CastUtils.cast(query.getResultList());
            for (Object result : results) {
                DataHolder holder = new DataHolder();
                holder.manager = entityManager;
                holder.result = result;
                holder.exchange = createExchange(result);
                answer.add(holder);
            }

            int messagePolled;
            try {
                messagePolled = processBatch(CastUtils.cast(answer));
            } catch (Exception e) {
                throw new PersistenceException(e);
            }

            entityManager.flush();
            return messagePolled;
        }
    });

    return endpoint.getCamelContext().getTypeConverter().convertTo(int.class, messagePolled);
}

From source file:it.geosolutions.geobatch.actions.ds2ds.Ds2dsAction.java

/**
 * Gets the list of received file events, filtering out those not correct for this action.
 *
 * @param events//w w w .j a  v  a2s  .  co m
 * @return
 */
private Queue<FileSystemEvent> acceptableFiles(Queue<FileSystemEvent> events) {
    updateTask("Recognize file type");
    Queue<FileSystemEvent> accepted = new LinkedList<FileSystemEvent>();
    for (FileSystemEvent event : events) {
        String fileType = getFileType(event);
        if (ACCEPTED_FILE_TYPES.contains(fileType) && !configuration.getSkippedTypes().contains(fileType)) {
            if (LOGGER.isTraceEnabled()) {
                LOGGER.trace("Accepted file: " + event.getSource().getName());
            }
            accepted.add(event);
        } else {
            if (LOGGER.isTraceEnabled()) {
                LOGGER.trace("Skipped file: " + event.getSource().getName());
            }
        }
    }
    return accepted;
}

From source file:sadl.models.pdrta.PDRTA.java

public void toDOTLang(Appendable ap, double minP, boolean withInput, StateColoring sc) {

    // Write transitions with high probability
    final StringBuilder sb = new StringBuilder();
    final Queue<PDRTAState> q = new ArrayDeque<>();
    final Set<PDRTAState> found = new HashSet<>();
    q.add(root);
    found.add(root);/*from   ww w  . ja va2 s.  co m*/
    while (!q.isEmpty()) {
        final PDRTAState s = q.remove();
        for (int i = 0; i < input.getAlphSize(); i++) {
            final Set<Entry<Integer, Interval>> ins = s.getIntervals(i).entrySet();
            for (final Entry<Integer, Interval> eIn : ins) {
                final Interval in = eIn.getValue();
                final double p = s.getStat().getTransProb(i, in);
                final PDRTAState t = in.getTarget();
                if (t != null && p >= minP) {
                    if (!found.contains(t)) {
                        q.add(t);
                        found.add(t);
                    }
                    // Write transition
                    sb.append(s.getIndex());
                    sb.append(" -> ");
                    sb.append(t.getIndex());
                    sb.append(" [ label = \"");
                    sb.append(getSymbol(i));
                    sb.append(" [");
                    sb.append(in.getBegin());
                    sb.append(", ");
                    sb.append(in.getEnd());
                    sb.append("] p=");
                    sb.append(p);
                    if (withInput) {
                        sb.append(" n=");
                        sb.append(in.getTails().size());
                    }
                    sb.append("\" ];\n");
                }
            }
        }
    }

    try {
        writeStatData(ap, found);

        // Write automaton in DOT language
        ap.append("digraph PDRTA {\n");
        ap.append("rankdir=LR;\n");
        ap.append("node[style = filled, fillcolor = white, shape = circle];\n");
        ap.append("\"\"[style = invis, shape = none, margin = 0, width = 0, heigth = 0];\n");
        ap.append("\"\" -> 0;\n");

        // Write states
        for (final PDRTAState s : states.valueCollection()) {
            if (found.contains(s)) {
                ap.append(Integer.toString(s.getIndex()));
                ap.append(" [ xlabel = \"");
                ap.append(Double.toString(s.getStat().getTailEndProb()));
                ap.append("\"");
                if (sc != null) {
                    if (sc.isRed(s)) {
                        ap.append(", fillcolor = \"#FFA9A9\"");
                    } else if (sc.isBlue(s)) {
                        ap.append(", fillcolor = \"#A9D1FF\"");
                    }
                }
                ap.append(" ];\n");
            }
        }

        // Add transitions
        ap.append(sb.toString());

        ap.append("}");
    } catch (final IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.datumbox.common.persistentstorage.factories.MongoDBStructureFactory.java

@Override
public void preSave(BigDataStructureContainer learnedParameters, MemoryConfiguration memoryConfiguration) {

    boolean usesInMemoryStructures = memoryConfiguration.getMapType().isInMemory()
            || memoryConfiguration.getSetType().isInMemory() || memoryConfiguration.getQueueType().isInMemory()
            || memoryConfiguration.getCollectionType().isInMemory();

    //If in-memory structures are used to speed up the execution of the algorithm, then
    //those fields are marked as Transient and thus they will not be stored
    //by Morphia. To avoid losing this information, we check to find the fields
    //of ModelParameter object and we try to spote the fields that are marked
    //as Transient and BigDataStructureMarker (custom annotation). If such a field is found
    //we add its contents in the database in a collection named as the name
    //of the field.
    if (usesInMemoryStructures) {
        Queue<BigDataStructureContainer> learnableObjects = new LinkedList<>();
        Set<BigDataStructureContainer> alreadyChecked = new HashSet<>(); //This set uses the default equals() which means that it compares memory addresses. This behavior is desired

        learnableObjects.add(learnedParameters);

        while (learnableObjects.size() > 0) {
            //get the next object from the queue
            BigDataStructureContainer obj = learnableObjects.poll();

            //mark it as examined
            alreadyChecked.add(obj);/* ww  w.  j  av  a  2 s  .c om*/

            //get all the fields from all the inherited classes
            for (Field field : getAllFields(new LinkedList<>(), obj.getClass())) {
                handleBigDataStructureField(field, obj, memoryConfiguration);

                Class<?> fieldClass = field.getType();
                //if this object can be learned and is not already checked add it in the Queue
                if (BigDataStructureContainer.class.isAssignableFrom(fieldClass)) {
                    field.setAccessible(true);
                    BigDataStructureContainer fieldValue;
                    try {
                        fieldValue = (BigDataStructureContainer) field.get(obj);
                    } catch (IllegalArgumentException | IllegalAccessException ex) {
                        throw new RuntimeException(ex);
                    }

                    if (!alreadyChecked.contains(fieldValue)) {
                        learnableObjects.add(fieldValue);
                    }
                }
            }
        }

    }
}