Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:it.geosolutions.geobatch.opensdi.csvingest.CSVIngestActionTest.java

@Test
public void loadAll() throws Exception {

    createCropDescriptors();/*from  ww  w  .  j a v a 2 s .c  o m*/
    createUnitOfMeasures();

    Queue<EventObject> events = new LinkedList<EventObject>();
    File dir = loadFile("all");
    assertNotNull(dir);
    assertTrue(dir.isDirectory());

    CSVIngestAction action = new CSVIngestAction(new CSVIngestConfiguration(null, null, null));
    //        action.setCropDataDao(cropDataDAO);
    //        action.setCropDescriptorDao(cropDescriptorDAO);
    //        action.setAgrometDao(agrometDAO);
    //        action.setCropStatusDao(cropStatusDAO);
    action.setUnitOfMeasureService(unitOfMeasureService);
    action.afterPropertiesSet();

    for (File file : FileUtils.listFiles(dir, new String[] { "csv" }, true)) {
        LOGGER.info("Loading " + file);
        FileSystemEvent event = new FileSystemEvent(file, FileSystemEventType.FILE_ADDED);
        events.add(event);
        action.addListener(new DummyProgressListener());
        action.execute(events);
    }
    checkSampleData();

}

From source file:com.baifendian.swordfish.common.utils.graph.Graph.java

/**
 * ??, ???, //from  w  w w  .  j a v a 2 s  . c  o  m
 *
 * @return
 * @throws Exception
 */
public List<VK> broadFirstSearch() throws Exception {
    List<VK> visit = new ArrayList<>();
    Queue<VK> q = new LinkedList<>();
    Set<VK> hasVisited = new HashSet<>();

    synchronized (this) {
        // 
        for (VK key : getStartVertex()) {
            q.add(key);
            hasVisited.add(key);
            visit.add(key);
        }

        while (!q.isEmpty()) {
            VK key = q.poll();

            // ?
            for (VK postKey : getPostNode(key)) {
                if (!hasVisited.contains(postKey)) {
                    q.add(postKey);
                    hasVisited.add(postKey);
                    visit.add(postKey);
                }
            }
        }

        // ??
        if (visit.size() != getVertexNumber()) {
            throw new Exception("Broad first search can't search complete.");
        }
    }

    return visit;
}

From source file:org.jboss.errai.ioc.rebind.ioc.graph.impl.DependencyGraphBuilderImpl.java

private Injectable resolveDependency(final BaseDependency dep, final Injectable concrete,
        final Collection<String> problems, final Map<String, Injectable> customProvidedInjectables) {
    if (dep.injectable.resolution != null) {
        return dep.injectable.resolution;
    }/*from  ww  w .j a  va 2s . c  om*/

    final Multimap<ResolutionPriority, ConcreteInjectable> resolvedByPriority = HashMultimap.create();
    final Queue<AbstractInjectable> resolutionQueue = new LinkedList<AbstractInjectable>();
    resolutionQueue.add(dep.injectable);
    resolutionQueue.add(addMatchingExactTypeInjectables(dep.injectable));

    processResolutionQueue(resolutionQueue, resolvedByPriority);

    // Iterates through priorities from highest to lowest.
    for (final ResolutionPriority priority : ResolutionPriority.values()) {
        if (resolvedByPriority.containsKey(priority)) {
            final Collection<ConcreteInjectable> resolved = resolvedByPriority.get(priority);
            if (resolved.size() > 1) {
                problems.add(
                        ambiguousDependencyMessage(dep, concrete, new ArrayList<ConcreteInjectable>(resolved)));
                return null;
            } else {
                Injectable injectable = resolved.iterator().next();
                if (injectable.isExtension()) {
                    final ExtensionInjectable providedInjectable = (ExtensionInjectable) injectable;
                    final Collection<Injectable> otherResolvedInjectables = new ArrayList<Injectable>(
                            resolvedByPriority.values());
                    otherResolvedInjectables.remove(injectable);

                    final InjectionSite site = new InjectionSite(concrete.getInjectedType(), getAnnotated(dep),
                            otherResolvedInjectables);
                    injectable = providedInjectable.provider.getInjectable(site, nameGenerator);
                    customProvidedInjectables.put(injectable.getFactoryName(), injectable);
                    dep.injectable = copyAbstractInjectable(dep.injectable);
                }
                return (dep.injectable.resolution = injectable);
            }
        }
    }

    problems.add(unsatisfiedDependencyMessage(dep, concrete));
    return null;
}

From source file:org.amanzi.neo.services.impl.statistics.PropertyStatisticsService.java

protected void updatePropertyVault(final Node propertyVault, final PropertyVault vault)
        throws ServiceException {
    nodeService.updateProperty(propertyVault, statisticsNodeProperties.getClassProperty(),
            vault.getClassName());/*from  w  w  w.  j ava2s .c  o m*/

    int size = nodeService.getNodeProperty(propertyVault, getGeneralNodeProperties().getSizeProperty(),
            NumberUtils.INTEGER_ZERO, false);

    Map<Object, Integer> values = new HashMap<Object, Integer>(vault.getValuesMap());

    Queue<Integer> removedIndexes = new LinkedList<Integer>();
    Stack<Integer> processedIndex = new Stack<Integer>();

    if (size > 0) {
        for (int i = 0; i < size; i++) {
            Object property = nodeService.getNodeProperty(propertyVault,
                    statisticsNodeProperties.getValuePrefix() + i, null, true);

            Integer newCount = values.remove(property);
            if (newCount != null) {
                nodeService.updateProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + i,
                        newCount);
            } else {
                removedIndexes.add(i);
            }
            processedIndex.add(i);
        }
    }

    // remove old values
    for (Integer index : removedIndexes) {
        nodeService.removeNodeProperty(propertyVault, statisticsNodeProperties.getValuePrefix() + index, false);
        nodeService.removeNodeProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + index, false);
    }

    int counter = size;
    for (Entry<Object, Integer> statEntry : values.entrySet()) {
        counter = removedIndexes.isEmpty() ? counter : removedIndexes.remove();

        nodeService.updateProperty(propertyVault, statisticsNodeProperties.getValuePrefix() + counter,
                statEntry.getKey());
        nodeService.updateProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + counter,
                statEntry.getValue());

        counter++;
    }

    for (Integer newIndex : removedIndexes) {
        int oldIndex = processedIndex.pop();
        nodeService.renameNodeProperty(propertyVault, statisticsNodeProperties.getValuePrefix() + oldIndex,
                statisticsNodeProperties.getValuePrefix() + newIndex, false);
        nodeService.renameNodeProperty(propertyVault, statisticsNodeProperties.getCountPrefix() + oldIndex,
                statisticsNodeProperties.getCountPrefix() + newIndex, false);
    }

    nodeService.updateProperty(propertyVault, getGeneralNodeProperties().getSizeProperty(), values.size());
    nodeService.updateProperty(propertyVault, statisticsNodeProperties.getDefaultValueProperty(),
            vault.getDefaultValue());
}

From source file:org.unitime.timetable.backup.SessionBackup.java

@Override
public void backup(OutputStream out, Progress progress, Long sessionId) throws IOException {
    iOut = CodedOutputStream.newInstance(out);
    iProgress = progress;//w  w w . j  a  v a  2  s. c o m
    iSessionId = sessionId;
    iHibSession = new _RootDAO().createNewSession();
    iHibSession.setCacheMode(CacheMode.IGNORE);
    iHibSessionFactory = iHibSession.getSessionFactory();
    try {
        iProgress.setStatus("Exporting Session");
        iProgress.setPhase("Loading Model", 3);
        TreeSet<ClassMetadata> allMeta = new TreeSet<ClassMetadata>(new Comparator<ClassMetadata>() {
            @Override
            public int compare(ClassMetadata m1, ClassMetadata m2) {
                return m1.getEntityName().compareTo(m2.getEntityName());
            }
        });
        allMeta.addAll(iHibSessionFactory.getAllClassMetadata().values());
        iProgress.incProgress();

        Queue<QueueItem> queue = new LinkedList<QueueItem>();

        queue.add(new QueueItem(iHibSessionFactory.getClassMetadata(Session.class), null, "uniqueId",
                Relation.None));

        Set<String> avoid = new HashSet<String>();
        // avoid following relations
        avoid.add(TimetableManager.class.getName() + ".departments");
        avoid.add(TimetableManager.class.getName() + ".solverGroups");
        avoid.add(DistributionType.class.getName() + ".departments");
        avoid.add(LastLikeCourseDemand.class.getName() + ".student");
        avoid.add(Student.class.getName() + ".lastLikeCourseDemands");

        Set<String> disallowedNotNullRelations = new HashSet<String>();
        disallowedNotNullRelations.add(Assignment.class.getName() + ".datePattern");
        disallowedNotNullRelations.add(Assignment.class.getName() + ".timePattern");
        disallowedNotNullRelations.add(LastLikeCourseDemand.class.getName() + ".student");
        disallowedNotNullRelations.add(OnlineSectioningLog.class.getName() + ".session");

        Map<String, List<QueueItem>> data = new HashMap<String, List<QueueItem>>();
        List<QueueItem> sessions = new ArrayList<QueueItem>();
        sessions.add(queue.peek());
        data.put(queue.peek().name(), sessions);

        QueueItem item = null;
        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (ClassMetadata meta : allMeta) {
                if (meta.hasSubclasses())
                    continue;
                for (int i = 0; i < meta.getPropertyNames().length; i++) {
                    String property = meta.getPropertyNames()[i];
                    if (disallowedNotNullRelations.contains(meta.getEntityName() + "." + property)
                            || meta.getPropertyNullability()[i])
                        continue;
                    Type type = meta.getPropertyTypes()[i];
                    if (type instanceof EntityType && type.getReturnedClass().equals(item.clazz())) {
                        QueueItem qi = new QueueItem(meta, item, property, Relation.Parent);
                        if (!data.containsKey(qi.name())) {
                            List<QueueItem> items = new ArrayList<QueueItem>();
                            data.put(qi.name(), items);
                            queue.add(qi);
                            items.add(qi);
                            if (qi.size() > 0)
                                iProgress.info("Parent: " + qi);
                        }
                    }
                }
            }
        }
        iProgress.incProgress();

        for (List<QueueItem> list : data.values())
            queue.addAll(list);

        // The following part is needed to ensure that instructor distribution preferences are saved including their distribution types 
        List<QueueItem> distributions = new ArrayList<QueueItem>();
        for (QueueItem instructor : data.get(DepartmentalInstructor.class.getName())) {
            QueueItem qi = new QueueItem(iHibSessionFactory.getClassMetadata(DistributionPref.class),
                    instructor, "owner", Relation.Parent);
            distributions.add(qi);
            queue.add(qi);
            if (qi.size() > 0)
                iProgress.info("Extra: " + qi);
        }
        data.put(DistributionPref.class.getName(), distributions);

        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (int i = 0; i < item.meta().getPropertyNames().length; i++) {
                String property = item.meta().getPropertyNames()[i];
                Type type = item.meta().getPropertyTypes()[i];
                if (type instanceof EntityType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(type.getReturnedClass());
                    if (item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.One);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("One: " + qi);
                }
                if (type instanceof CollectionType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(((CollectionType) type)
                            .getElementType((SessionFactoryImplementor) iHibSessionFactory).getReturnedClass());
                    if (meta == null || item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.Many);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("Many: " + qi);
                }
            }
        }
        iProgress.incProgress();

        Map<String, Set<Serializable>> allExportedIds = new HashMap<String, Set<Serializable>>();
        for (String name : new TreeSet<String>(data.keySet())) {
            List<QueueItem> list = data.get(name);
            Map<String, TableData.Table.Builder> tables = new HashMap<String, TableData.Table.Builder>();
            for (QueueItem current : list) {
                if (current.size() == 0)
                    continue;
                iProgress.info("Loading " + current);
                List<Object> objects = current.list();
                if (objects == null || objects.isEmpty())
                    continue;
                iProgress.setPhase(current.abbv() + " [" + objects.size() + "]", objects.size());
                objects: for (Object object : objects) {
                    iProgress.incProgress();

                    // Get meta data (check for sub-classes)
                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(object.getClass());
                    if (meta == null)
                        meta = current.meta();
                    if (meta.hasSubclasses()) {
                        for (Iterator i = iHibSessionFactory.getAllClassMetadata().entrySet().iterator(); i
                                .hasNext();) {
                            Map.Entry entry = (Map.Entry) i.next();
                            ClassMetadata classMetadata = (ClassMetadata) entry.getValue();
                            if (classMetadata.getMappedClass().isInstance(object)
                                    && !classMetadata.hasSubclasses()) {
                                meta = classMetadata;
                                break;
                            }
                        }
                    }

                    // Get unique identifier
                    Serializable id = meta.getIdentifier(object, (SessionImplementor) iHibSession);

                    // Check if already exported
                    Set<Serializable> exportedIds = allExportedIds.get(meta.getEntityName());
                    if (exportedIds == null) {
                        exportedIds = new HashSet<Serializable>();
                        allExportedIds.put(meta.getEntityName(), exportedIds);
                    }
                    if (!exportedIds.add(id))
                        continue;

                    // Check relation to an academic session (if exists)
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        if (type instanceof EntityType && type.getReturnedClass().equals(Session.class)) {
                            Session s = (Session) meta.getPropertyValue(object, property);
                            if (s != null && !s.getUniqueId().equals(iSessionId)) {
                                iProgress.warn(meta.getEntityName()
                                        .substring(meta.getEntityName().lastIndexOf('.') + 1) + "@" + id
                                        + " belongs to a different academic session (" + s + ")");
                                continue objects; // wrong session
                            }
                        }
                    }

                    // Get appropriate table
                    TableData.Table.Builder table = tables.get(meta.getEntityName());
                    if (table == null) {
                        table = TableData.Table.newBuilder();
                        tables.put(meta.getEntityName(), table);
                        table.setName(meta.getEntityName());
                    }

                    // Export object
                    TableData.Record.Builder record = TableData.Record.newBuilder();
                    record.setId(id.toString());
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        Object value = meta.getPropertyValue(object, property);
                        if (value == null)
                            continue;
                        TableData.Element.Builder element = TableData.Element.newBuilder();
                        element.setName(property);
                        if (type instanceof PrimitiveType) {
                            element.addValue(((PrimitiveType) type).toString(value));
                        } else if (type instanceof StringType) {
                            element.addValue(((StringType) type).toString((String) value));
                        } else if (type instanceof BinaryType) {
                            element.addValueBytes(ByteString.copyFrom((byte[]) value));
                        } else if (type instanceof TimestampType) {
                            element.addValue(((TimestampType) type).toString((Date) value));
                        } else if (type instanceof DateType) {
                            element.addValue(((DateType) type).toString((Date) value));
                        } else if (type instanceof EntityType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                            iHibSession.evict(value);
                        } else if (type instanceof CustomType && value instanceof Document) {
                            if (object instanceof CurriculumClassification && property.equals("students"))
                                continue;
                            StringWriter w = new StringWriter();
                            XMLWriter x = new XMLWriter(w, OutputFormat.createCompactFormat());
                            x.write((Document) value);
                            x.flush();
                            x.close();
                            element.addValue(w.toString());
                        } else if (type instanceof CollectionType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                        } else if (type instanceof EmbeddedComponentType
                                && property.equalsIgnoreCase("uniqueCourseNbr")) {
                            continue;
                        } else {
                            iProgress.warn("Unknown data type: " + type + " (property " + meta.getEntityName()
                                    + "." + property + ", class " + value.getClass() + ")");
                            continue;
                        }
                        record.addElement(element.build());

                    }
                    table.addRecord(record.build());
                    iHibSession.evict(object);
                }
                current.clearCache();
            }

            for (TableData.Table.Builder table : tables.values()) {
                add(table.build());
            }
        }

        /*
        // Skip ConstraintInfo
        if (!iData.containsKey(ConstraintInfo.class.getName()))
           iData.put(ConstraintInfo.class.getName(), new QueueItem(iHibSessionFactory.getClassMetadata(ConstraintInfo.class), null, null, Relation.Empty));
                
        for (String name: items)
           export(iData.get(name));
                    
        while (true) {
         List<Object> objects = new ArrayList<Object>();
         ClassMetadata meta = null;
         for (Entity e: iObjects) {
        if (e.exported()) continue;
        if (objects.isEmpty() || meta.getEntityName().equals(e.name())) {
           meta = e.meta();
           objects.add(e.object());
           e.notifyExported();
        }
         }
         if (objects.isEmpty()) break;
         export(meta, objects, null);
        }
        */
        iProgress.setStatus("All done.");
    } finally {
        iHibSession.close();
    }
}

From source file:com.linkedin.pinot.core.startree.OffHeapStarTreeBuilder.java

/**
 * Helper method that visits each leaf node does the following:
 * - Re-orders the doc-id's corresponding to leaf node wrt time column.
 * - Create children nodes for each time value under this leaf node.
 * - Adds a new record with aggregated data for this leaf node.
 * @throws Exception/*ww  w .  j  a  va 2  s  .c o m*/
 */
private void splitLeafNodesOnTimeColumn() throws Exception {
    Queue<StarTreeIndexNode> nodes = new LinkedList<>();
    nodes.add(starTreeRootIndexNode);
    StarTreeDataSorter dataSorter = new StarTreeDataSorter(dataFile, dimensionSizeBytes, metricSizeBytes);
    while (!nodes.isEmpty()) {
        StarTreeIndexNode node = nodes.remove();
        if (node.isLeaf()) {
            // If we have time column, split on time column, helps in time based filtering
            if (timeColumnName != null) {
                int level = node.getLevel();
                int[] newSortOrder = moveColumnInSortOrder(timeColumnName, getSortOrder(), level);

                int startDocId = node.getStartDocumentId();
                int endDocId = node.getEndDocumentId();
                dataSorter.sort(startDocId, endDocId, newSortOrder);
                int timeColIndex = dimensionNameToIndexMap.get(timeColumnName);
                Map<Integer, IntPair> timeColumnRangeMap = dataSorter.groupByIntColumnCount(startDocId,
                        endDocId, timeColIndex);

                node.setChildDimensionName(timeColIndex);
                node.setChildren(new HashMap<Integer, StarTreeIndexNode>());

                for (int timeValue : timeColumnRangeMap.keySet()) {
                    IntPair range = timeColumnRangeMap.get(timeValue);
                    StarTreeIndexNode child = new StarTreeIndexNode();
                    child.setDimensionName(timeColIndex);
                    child.setDimensionValue(timeValue);
                    child.setParent(node);
                    child.setLevel(node.getLevel() + 1);
                    child.setStartDocumentId(range.getLeft());
                    child.setEndDocumentId(range.getRight());
                    node.addChild(child, timeValue);
                }
            }
        } else {
            Iterator<StarTreeIndexNode> childrenIterator = node.getChildrenIterator();
            while (childrenIterator.hasNext()) {
                nodes.add(childrenIterator.next());
            }
        }
    }
    dataSorter.close();
}

From source file:org.apache.gobblin.runtime.TaskStateCollectorService.java

/**
 * Collect output {@link TaskState}s of tasks of the job launched.
 *
 * <p>//from  w ww  . j  av a 2s. com
 *   This method collects all available output {@link TaskState} files at the time it is called. It
 *   uses a {@link ParallelRunner} to deserialize the {@link TaskState}s. Each {@link TaskState}
 *   file gets deleted after the {@link TaskState} it stores is successfully collected.
 * </p>
 *
 * @throws IOException if it fails to collect the output {@link TaskState}s
 */
private void collectOutputTaskStates() throws IOException {
    List<String> taskStateNames = taskStateStore.getTableNames(outputTaskStateDir.getName(),
            new Predicate<String>() {
                @Override
                public boolean apply(String input) {
                    return input.endsWith(AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX)
                            && !input.startsWith(FsStateStore.TMP_FILE_PREFIX);
                }
            });

    if (taskStateNames == null || taskStateNames.size() == 0) {
        LOGGER.debug("No output task state files found in " + this.outputTaskStateDir);
        return;
    }

    final Queue<TaskState> taskStateQueue = Queues.newConcurrentLinkedQueue();
    try (ParallelRunner stateSerDeRunner = new ParallelRunner(this.stateSerDeRunnerThreads, null)) {
        for (final String taskStateName : taskStateNames) {
            LOGGER.debug("Found output task state file " + taskStateName);
            // Deserialize the TaskState and delete the file
            stateSerDeRunner.submitCallable(new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    TaskState taskState = taskStateStore.getAll(outputTaskStateDir.getName(), taskStateName)
                            .get(0);
                    taskStateQueue.add(taskState);
                    taskStateStore.delete(outputTaskStateDir.getName(), taskStateName);
                    return null;
                }
            }, "Deserialize state for " + taskStateName);
        }
    } catch (IOException ioe) {
        LOGGER.warn("Could not read all task state files.");
    }

    LOGGER.info(String.format("Collected task state of %d completed tasks", taskStateQueue.size()));

    // Add the TaskStates of completed tasks to the JobState so when the control
    // returns to the launcher, it sees the TaskStates of all completed tasks.
    for (TaskState taskState : taskStateQueue) {
        taskState.setJobState(this.jobState);
        this.jobState.addTaskState(taskState);
    }

    // Finish any addtional steps defined in handler on driver level.
    // Currently implemented handler for Hive registration only.
    if (optionalTaskCollectorHandler.isPresent()) {
        LOGGER.info(
                "Execute Pipelined TaskStateCollectorService Handler for " + taskStateQueue.size() + " tasks");

        try {
            optionalTaskCollectorHandler.get().handle(taskStateQueue);
        } catch (Throwable t) {
            if (isJobProceedOnCollectorServiceFailure) {
                log.error("Failed to commit dataset while job proceeds", t);
                SafeDatasetCommit.setTaskFailureException(taskStateQueue, t);
            } else {
                throw new RuntimeException("Hive Registration as the TaskStateCollectorServiceHandler failed.",
                        t);
            }
        }
    }

    // Notify the listeners for the completion of the tasks
    this.eventBus.post(new NewTaskCompletionEvent(ImmutableList.copyOf(taskStateQueue)));
}

From source file:com.demandware.vulnapp.challenge.impl.XSSChallenge.java

/**
 * puts a message on a user's message queue
 *//*ww w.j  a  va2s  . c  o  m*/
private void addMessageForUser(DIVAServletRequestWrapper req) {
    Queue<String> messageList;
    String recipient = req.getParameter(XSSChallenge.RECIPIENT_PARAM);
    String message = req.getParameter(XSSChallenge.MESSAGE_PARAM);
    boolean sendFlag = Helpers.isTruthy(req.getParameter(XSSChallenge.SEND_FLAG_PARAM));
    if (isAdminSession(req) && sendFlag) {
        message += generateFlagForRequestedUser(recipient);
    }
    if (this.messageMap.containsKey(recipient)) {
        messageList = this.messageMap.get(recipient);
    } else {
        messageList = EvictingQueue.create(XSSChallenge.MESSAGE_QUEUE_SIZE);
        this.messageMap.put(recipient, messageList);
    }
    messageList.add(message);
}

From source file:io.hops.ha.common.TransactionStateImpl.java

private void persistApplicationStateToRemove() throws StorageException {
    if (!applicationsStateToRemove.isEmpty()) {
        ApplicationStateDataAccess DA = (ApplicationStateDataAccess) RMStorageFactory
                .getDataAccess(ApplicationStateDataAccess.class);
        Queue<ApplicationState> appToRemove = new ConcurrentLinkedQueue<ApplicationState>();
        for (ApplicationId appId : applicationsStateToRemove) {
            appToRemove.add(new ApplicationState(appId.toString()));
        }//from w  ww.  j  a  v a  2  s  . c om
        DA.removeAll(appToRemove);
        //TODO remove appattempts
    }
}

From source file:com.liveramp.hank.partition_server.UpdateManager.java

@Override
public void update() throws IOException {
    HankTimer timer = new HankTimer();
    try {/*from ww  w.  j  a va2  s.  c  o m*/

        // Delete unknown files
        deleteUnknownFiles();
        // Perform update
        Semaphore concurrentUpdatesSemaphore = new Semaphore(configurator.getNumConcurrentUpdates());
        List<Throwable> encounteredThrowables = new ArrayList<Throwable>();
        PartitionUpdateTaskStatisticsAggregator partitionUpdateTaskStatisticsAggregator = new PartitionUpdateTaskStatisticsAggregator();
        Map<String, Queue<PartitionUpdateTask>> dataDirectoryToUpdateTasks = new HashMap<String, Queue<PartitionUpdateTask>>();
        List<PartitionUpdateTask> allUpdateTasks = buildPartitionUpdateTasks(
                partitionUpdateTaskStatisticsAggregator, encounteredThrowables);
        // Build and organize update tasks per data directory
        for (PartitionUpdateTask updateTask : allUpdateTasks) {
            String dataDirectory = updateTask.getDataDirectory();
            Queue<PartitionUpdateTask> updateTasks = dataDirectoryToUpdateTasks.get(dataDirectory);
            if (updateTasks == null) {
                updateTasks = new LinkedList<PartitionUpdateTask>();
                dataDirectoryToUpdateTasks.put(dataDirectory, updateTasks);
            }
            updateTasks.add(updateTask);
        }

        // Logging
        LOG.info("Number of update tasks: " + allUpdateTasks.size());
        for (Map.Entry<String, Queue<PartitionUpdateTask>> entry : dataDirectoryToUpdateTasks.entrySet()) {
            LOG.info("Number of update tasks scheduled in " + entry.getKey() + ": " + entry.getValue().size());
        }

        // Build executor services
        Map<String, ExecutorService> dataDirectoryToExecutorService = new HashMap<String, ExecutorService>();
        for (String dataDirectory : dataDirectoryToUpdateTasks.keySet()) {
            dataDirectoryToExecutorService.put(dataDirectory,
                    new UpdateThreadPoolExecutor(configurator.getMaxConcurrentUpdatesPerDataDirectory(),
                            new UpdaterThreadFactory(dataDirectory), concurrentUpdatesSemaphore));
        }

        LOG.info("Submitting update tasks for " + dataDirectoryToUpdateTasks.size() + " directories.");

        // Execute tasks. We execute one task for each data directory and loop around so that the tasks
        // attempt to acquire the semaphore in a reasonable order.
        boolean remaining = true;
        while (remaining) {
            remaining = false;
            for (Map.Entry<String, Queue<PartitionUpdateTask>> entry : dataDirectoryToUpdateTasks.entrySet()) {
                // Pop next task
                Queue<PartitionUpdateTask> partitionUpdateTasks = entry.getValue();
                if (!partitionUpdateTasks.isEmpty()) {
                    PartitionUpdateTask partitionUpdateTask = partitionUpdateTasks.remove();
                    // Execute task
                    dataDirectoryToExecutorService.get(entry.getKey()).execute(partitionUpdateTask);
                }
                if (!partitionUpdateTasks.isEmpty()) {
                    remaining = true;
                }
            }
        }

        LOG.info("All update tasks submitted, shutting down executor services");

        // Shutdown executors
        for (ExecutorService executorService : dataDirectoryToExecutorService.values()) {
            executorService.shutdown();
        }

        LOG.info("Waiting for executors to finish.");

        // Wait for executors to finish
        for (Map.Entry<String, ExecutorService> entry : dataDirectoryToExecutorService.entrySet()) {
            String directory = entry.getKey();
            ExecutorService executorService = entry.getValue();

            boolean keepWaiting = true;
            while (keepWaiting) {
                try {
                    LOG.info("Waiting for updates to complete on data directory: " + directory);
                    boolean terminated = executorService.awaitTermination(
                            UPDATE_EXECUTOR_TERMINATION_CHECK_TIMEOUT_VALUE,
                            UPDATE_EXECUTOR_TERMINATION_CHECK_TIMEOUT_UNIT);
                    if (terminated) {
                        // We finished executing all tasks
                        // Otherwise, timeout elapsed and current thread was not interrupted. Keep waiting.
                        LOG.info("Finished updates for directory: " + directory);
                        keepWaiting = false;
                    }
                    // Record update ETA
                    Hosts.setUpdateETA(host, partitionUpdateTaskStatisticsAggregator.computeETA());
                } catch (InterruptedException e) {
                    // Received interruption (stop request).
                    // Swallow the interrupted state and ask the executor to shutdown immediately. Also, keep waiting.
                    LOG.info(
                            "The update manager was interrupted. Stopping the update process (stop executing new partition update tasks"
                                    + " and wait for those that were running to finish).");
                    // Shutdown all executors
                    for (ExecutorService otherExecutorService : dataDirectoryToExecutorService.values()) {
                        otherExecutorService.shutdownNow();
                    }
                    // Record failed update exception (we need to keep waiting)
                    encounteredThrowables.add(
                            new IOException("Failed to complete update: update interruption was requested."));
                }
            }
        }

        LOG.info("All executors have finished updates");

        // Shutdown all executors
        for (ExecutorService executorService : dataDirectoryToExecutorService.values()) {
            executorService.shutdownNow();
        }

        LOG.info("Finished with " + encounteredThrowables.size() + " errors.");

        // Detect failures
        if (!encounteredThrowables.isEmpty()) {
            LOG.error(String.format("%d exceptions encountered while running partition update tasks:",
                    encounteredThrowables.size()));
            int i = 0;
            for (Throwable t : encounteredThrowables) {
                LOG.error(String.format("Exception %d/%d:", ++i, encounteredThrowables.size()), t);
            }
            throw new IOException(String.format(
                    "Failed to complete update: %d exceptions encountered while running partition update tasks.",
                    encounteredThrowables.size()));
        }

        // Garbage collect useless host domains
        garbageCollectHostDomains(host);

        // Log statistics
        partitionUpdateTaskStatisticsAggregator.logStats();

    } catch (IOException e) {
        LOG.info("Update failed and took " + FormatUtils.formatSecondsDuration(timer.getDurationMs() / 1000));
        throw e;
    }
    LOG.info("Update succeeded and took " + FormatUtils.formatSecondsDuration(timer.getDurationMs() / 1000));
}