Example usage for java.util Queue isEmpty

List of usage examples for java.util Queue isEmpty

Introduction

In this page you can find the example usage for java.util Queue isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:tachyon.master.MasterInfo.java

/**
 * Get absolute paths of all in memory files.
 *
 * @return absolute paths of all in memory files.
 *///from w  w  w.  ja  v a  2  s .  co m
public List<TachyonURI> getInMemoryFiles() {
    List<TachyonURI> ret = new ArrayList<TachyonURI>();
    LOG.info("getInMemoryFiles()");
    Queue<Pair<InodeFolder, TachyonURI>> nodesQueue = new LinkedList<Pair<InodeFolder, TachyonURI>>();
    synchronized (mRootLock) {
        // TODO: Verify we want to use absolute path.
        nodesQueue.add(new Pair<InodeFolder, TachyonURI>(mRoot, new TachyonURI(TachyonURI.SEPARATOR)));
        while (!nodesQueue.isEmpty()) {
            Pair<InodeFolder, TachyonURI> tPair = nodesQueue.poll();
            InodeFolder tFolder = tPair.getFirst();
            TachyonURI curUri = tPair.getSecond();

            Set<Inode> children = tFolder.getChildren();
            for (Inode tInode : children) {
                TachyonURI newUri = curUri.join(tInode.getName());
                if (tInode.isDirectory()) {
                    nodesQueue.add(new Pair<InodeFolder, TachyonURI>((InodeFolder) tInode, newUri));
                } else if (((InodeFile) tInode).isFullyInMemory()) {
                    ret.add(newUri);
                }
            }
        }
    }
    return ret;
}

From source file:com.comphenix.xp.parser.text.ItemParser.java

@Override
public Query parse(String text) throws ParsingException {

    if (text.length() == 0)
        // Empty names are not legal in YAML, so this shouldn't be possible 
        throw new IllegalArgumentException("Key must have some characters.");

    Queue<String> tokens = getParameterQueue(text);

    List<Integer> itemIDs = Utility.getElementList((Integer) null);
    ;//from w ww .  j a  v a 2  s .  c  o m
    List<Integer> durabilities = Utility.getElementList((Integer) null);
    ;

    ParsingException errorReason = null;
    Integer first = null;

    boolean isPotion = false;
    boolean sameCategory = false;

    try {
        // Get item IDs
        itemIDs = Utility.flatten(itemNameParser.parse(tokens));
        first = null;

        // Get the first element
        if (!itemIDs.isEmpty()) {
            first = itemIDs.get(0);
            isPotion = itemIDs.contains(Material.POTION.getId());
        }

        /* We may have a slight problem here. Durabilities may require the item ID to properly decode the name,
         * but if we have multiple item IDs that may conflict with each other. 
         *
         * Therefore, only items of the same category may have multiple durabilities.
         */
        sameCategory = ItemDurabilityParser.inSameCategory(itemIDs);

        // Set the first item or null
        elementDurability.setItemID(first);
        elementDurability.setUsedName(false);

        // Get list of durabilities
        durabilities = durabilityParser.parse(tokens);

        // Check for multiple items and named durabilities
        if (!sameCategory && elementDurability.isUsedName())
            throw ParsingException.fromFormat(
                    "Cannot use named durabilities (%s) with items of different data categories.",
                    StringUtils.join(durabilities, ", "));

        // Negative items or durabilities are not legal
        if (hasNegativeIntegers(itemIDs) || hasNegativeIntegers(durabilities))
            throw new ParsingException("Item ID or durability cannot contain negative numbers");

    } catch (ParsingException ex) {

        // Potion? Try again.
        if (isPotion)
            return parseAsPotion(text);

        // Check for named categories
        if (!sameCategory && elementDurability.isUsedName())
            throw new ParsingException("Named durabilities with different data categories.");
        else
            // Try more
            errorReason = ex;
    }

    // Scan for the "player creation" option
    List<Boolean> playerCreation = playerParser.parseAny(tokens);

    // Still more tokens? Something is wrong.
    if (!tokens.isEmpty()) {
        if (isPotion)
            return parseAsPotion(text);
        else if (errorReason == null)
            throw ParsingException.fromFormat("Unknown item tokens: ", StringUtils.join(tokens, ", "));
        else
            throw errorReason;
    }

    // Return universal potion query
    if (isPotion && durabilities.isEmpty()) {
        return PotionQuery.fromAny();
    }

    // At this point we have all we need to know
    return new ItemQuery(itemIDs, durabilities, playerCreation);
}

From source file:org.apache.giraph.worker.BspServiceSource.java

/**
 * Save the edges using the user-defined EdgeOutputFormat from our
 * vertexArray based on the split./*  w w  w.  j  a va 2 s.co m*/
 *
 * @throws InterruptedException
 */
private void saveEdges() throws IOException, InterruptedException {
    final ImmutableClassesGiraphConfiguration<I, V, E> conf = getConfiguration();

    if (conf.getEdgeOutputFormatClass() == null) {
        LOG.warn("saveEdges: " + GiraphConstants.EDGE_OUTPUT_FORMAT_CLASS
                + "Make sure that the EdgeOutputFormat is not required.");
        return;
    }

    final int numPartitions = getPartitionStore().getNumPartitions();
    int numThreads = Math.min(conf.getNumOutputThreads(), numPartitions);
    LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO,
            "saveEdges: Starting to save the edges using " + numThreads + " threads");
    final EdgeOutputFormat<I, V, E> edgeOutputFormat = conf.createWrappedEdgeOutputFormat();

    final Queue<Integer> partitionIdQueue = (numPartitions == 0) ? new LinkedList<Integer>()
            : new ArrayBlockingQueue<Integer>(numPartitions);
    Iterables.addAll(partitionIdQueue, getPartitionStore().getPartitionIds());

    CallableFactory<Void> callableFactory = new CallableFactory<Void>() {
        @Override
        public Callable<Void> newCallable(int callableId) {
            return new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    EdgeWriter<I, V, E> edgeWriter = edgeOutputFormat.createEdgeWriter(getContext());
                    edgeWriter.setConf(conf);
                    edgeWriter.initialize(getContext());

                    long nextPrintVertices = 0;
                    long nextPrintMsecs = System.currentTimeMillis() + 15000;
                    int partitionIndex = 0;
                    int numPartitions = getPartitionStore().getNumPartitions();
                    while (!partitionIdQueue.isEmpty()) {
                        Integer partitionId = partitionIdQueue.poll();
                        if (partitionId == null) {
                            break;
                        }

                        Partition<I, V, E> partition = getPartitionStore().getOrCreatePartition(partitionId);
                        long vertices = 0;
                        long edges = 0;
                        long partitionEdgeCount = partition.getEdgeCount();
                        for (Vertex<I, V, E> vertex : partition) {
                            for (Edge<I, E> edge : vertex.getEdges()) {
                                edgeWriter.writeEdge(vertex.getId(), vertex.getValue(), edge);
                                ++edges;
                            }
                            ++vertices;

                            // Update status at most every 250k vertices or 15 seconds
                            if (vertices > nextPrintVertices && System.currentTimeMillis() > nextPrintMsecs) {
                                LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO,
                                        "saveEdges: Saved " + edges + " edges out of " + partitionEdgeCount
                                                + " partition edges, on partition " + partitionIndex
                                                + " out of " + numPartitions);
                                nextPrintMsecs = System.currentTimeMillis() + 15000;
                                nextPrintVertices = vertices + 250000;
                            }
                        }
                        getPartitionStore().putPartition(partition);
                        ++partitionIndex;
                    }
                    edgeWriter.close(getContext()); // the temp results are saved now
                    return null;
                }
            };
        }
    };
    ProgressableUtils.getResultsWithNCallables(callableFactory, numThreads, "save-vertices-%d", getContext());

    LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO, "saveEdges: Done saving edges.");
    // YARN: must complete the commit the "task" output, Hadoop isn't there.
    if (conf.isPureYarnJob() && conf.getVertexOutputFormatClass() != null) {
        try {
            OutputCommitter outputCommitter = edgeOutputFormat.getOutputCommitter(getContext());
            if (outputCommitter.needsTaskCommit(getContext())) {
                LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO,
                        "OutputCommitter: committing task output.");
                // transfer from temp dirs to "task commit" dirs to prep for
                // the master's OutputCommitter#commitJob(context) call to finish.
                outputCommitter.commitTask(getContext());
            }
        } catch (InterruptedException ie) {
            LOG.error("Interrupted while attempting to obtain " + "OutputCommitter.", ie);
        } catch (IOException ioe) {
            LOG.error("Master task's attempt to commit output has " + "FAILED.", ioe);
        }
    }
}

From source file:ubic.gemma.core.search.SearchServiceImpl.java

private Collection<SearchResult> characteristicExpressionExperimentSearch(final SearchSettings settings) {

    Collection<Class<?>> classToSearch = new ArrayList<>(1); // this is a collection because of the API
    // for characteristicService; could add
    // findByUri(Class<?>...)

    // order matters if we hit the limits
    Queue<Class<?>> orderedClassesToSearch = new LinkedList<>();
    orderedClassesToSearch.add(ExpressionExperiment.class);
    orderedClassesToSearch.add(FactorValue.class);
    orderedClassesToSearch.add(BioMaterial.class);

    Collection<SearchResult> results = new HashSet<>();

    StopWatch watch = new StopWatch();
    watch.start();/*from  w  w  w . ja  v a  2s .c  o m*/

    while (results.size() < SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
            && !orderedClassesToSearch.isEmpty()) {
        classToSearch.clear();
        classToSearch.add(orderedClassesToSearch.poll());
        // We handle the OR clauses here.
        String[] subclauses = settings.getQuery().split(" OR ");
        for (String subclause : subclauses) {
            /*
             * Note that the AND is applied only within one entity type. The fix would be to apply AND at this
             * level.
             */
            Collection<SearchResult> classResults = this.characteristicSearchWithChildren(classToSearch,
                    subclause);
            if (!classResults.isEmpty()) {
                String msg = "Found " + classResults.size() + " "
                        + classToSearch.iterator().next().getSimpleName()
                        + " results from characteristic search.";
                if (results.size() >= SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS) {
                    msg += " Total found > "
                            + SearchServiceImpl.SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
                            + ", will not search for more entities.";
                }
                SearchServiceImpl.log.info(msg);
            }
            results.addAll(classResults);
        }

    }

    SearchServiceImpl.log.debug("ExpressionExperiment search: " + settings + " -> " + results.size()
            + " characteristic hits " + watch.getTime() + " ms");

    // Note that if we do this earlier (within each query) the limit SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS has
    // more meaning. We would have to unroll the loop above
    return filterExperimentHitsByTaxon(results, settings.getTaxon());
}

From source file:org.gvnix.flex.entity.ActionScriptEntityMetadataProvider.java

private void createActionScriptMirrorClass(String asEntityId, ActionScriptType asType, JavaType javaType) {
    Queue<TypeMapping> relatedTypes = new LinkedList<TypeMapping>();

    List<MetaTagAttributeValue<?>> attributes = new ArrayList<MetaTagAttributeValue<?>>();
    attributes.add(new StringAttributeValue(new ActionScriptSymbolName(ALIAS_ATTR),
            javaType.getFullyQualifiedTypeName()));
    ASMetaTagMetadata remoteClassTag = new DefaultASMetaTagMetadata(REMOTE_CLASS_TAG, attributes);
    List<ASMetaTagMetadata> typeMetaTags = new ArrayList<ASMetaTagMetadata>();
    typeMetaTags.add(remoteClassTag);//from w  w w. ja  v a  2s .  c o  m

    // TODO - for now we will only handle classes...interfaces could come
    // later but would add complexity (i.e., need
    // to find all implementations and mirror those as well)

    List<ASFieldMetadata> declaredFields = new ArrayList<ASFieldMetadata>();
    MemberDetails memberDetails = getMemberDetails(javaType);
    for (MethodMetadata method : MemberFindingUtils.getMethods(memberDetails)) {
        if (BeanInfoUtils.isAccessorMethod(method)) {
            JavaSymbolName propertyName = BeanInfoUtils.getPropertyNameForJavaBeanMethod(method);
            FieldMetadata javaField = BeanInfoUtils.getFieldForPropertyName(memberDetails, propertyName);

            // TODO - We don't add any meta-tags and we set the field to
            // public - any other choice?
            ASFieldMetadata asField = ActionScriptMappingUtils.toASFieldMetadata(asEntityId, javaField, true);
            relatedTypes.addAll(findRequiredMappings(javaField, asField));
            declaredFields.add(asField);
        }
    }

    ASClassOrInterfaceTypeDetails asDetails = new DefaultASClassOrInterfaceTypeDetails(asEntityId, asType,
            ASPhysicalTypeCategory.CLASS, declaredFields, null, null, null, null, null, typeMetaTags);
    // new DefaultASClassOrInterfaceTypeDetails(declaredByMetadataId, name,
    // physicalTypeCategory, declaredFields,
    // declaredConstructor, declaredMethods, superClass, extendsTypes,
    // implementsTypes, typeMetaTags);
    ASPhysicalTypeMetadata asMetadata = new DefaultASPhysicalTypeMetadata(asEntityId,
            getPhysicalLocationCanonicalPath(asEntityId), asDetails);
    getAsPhysicalTypeProvider().createPhysicalType(asMetadata);

    // Now trigger the creation of any related types
    while (!relatedTypes.isEmpty()) {
        TypeMapping mapping = relatedTypes.poll();
        createActionScriptMirrorClass(mapping.getMetadataId(), mapping.getAsType(), mapping.getJavaType());
    }
}

From source file:co.paralleluniverse.galaxy.core.Cache.java

private void receiveShortCircuit() {
    Queue<Message> ms = this.shortCircuitMessage.get();
    if (ms != null) {
        while (!ms.isEmpty()) {
            Message m = ms.remove();//  w ww .j a  va  2s . c o  m
            receive1(m);
        }
    }
    this.shortCircuitMessage.remove();
}

From source file:ubic.gemma.search.SearchServiceImpl.java

/**
 * @param settings//ww  w. j  ava2  s  .  c om
 */
private Collection<SearchResult> characteristicExpressionExperimentSearch(final SearchSettings settings) {

    Collection<SearchResult> results = new HashSet<SearchResult>();

    Collection<Class<?>> classToSearch = new ArrayList<Class<?>>(1);
    Queue<Class<?>> orderedClassesToSearch = new LinkedList<Class<?>>();
    orderedClassesToSearch.add(ExpressionExperiment.class);
    orderedClassesToSearch.add(FactorValue.class);
    orderedClassesToSearch.add(BioMaterial.class);
    orderedClassesToSearch.add(Treatment.class);

    Collection<SearchResult> characterSearchResults = new HashSet<SearchResult>();

    while (characterSearchResults.size() < SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
            && !orderedClassesToSearch.isEmpty()) {
        classToSearch.clear();
        classToSearch.add(orderedClassesToSearch.poll());
        Collection<SearchResult> classResults = ontologySearchAnnotatedObject(classToSearch, settings);
        characterSearchResults.addAll(classResults);

        String msg = "Found " + classResults.size() + " " + classToSearch.iterator().next().getSimpleName()
                + " results from characteristic search.";
        if (characterSearchResults.size() >= SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS) {
            msg += " Total found > " + SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS
                    + ", will not search for more entities.";
        }
        log.info(msg);
    }

    StopWatch watch = new StopWatch();
    watch.start();

    // filter and get parents...
    int numEEs = 0;
    Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>();
    Collection<FactorValue> factorValues = new HashSet<FactorValue>();
    Collection<Treatment> treatments = new HashSet<Treatment>();

    for (SearchResult sr : characterSearchResults) {
        Class<?> resultClass = sr.getResultClass();
        if (ExpressionExperiment.class.isAssignableFrom(resultClass)) {
            sr.setHighlightedText(sr.getHighlightedText() + " (characteristic)");
            results.add(sr);
            numEEs++;
        } else if (BioMaterial.class.isAssignableFrom(resultClass)) {
            biomaterials.add((BioMaterial) sr.getResultObject());
        } else if (FactorValue.class.isAssignableFrom(resultClass)) {
            factorValues.add((FactorValue) sr.getResultObject());
        } else if (Treatment.class.isAssignableFrom(resultClass)) {
            treatments.add((Treatment) sr.getResultObject());
        }
    }

    /*
     * Much faster to batch it...
     */
    if (biomaterials.size() > 0) {
        Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials(biomaterials);
        for (ExpressionExperiment ee : ees) {
            results.add(new SearchResult(ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic"));
        }
    }

    if (factorValues.size() > 0) {
        Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues(factorValues);
        for (ExpressionExperiment ee : ees) {
            if (log.isDebugEnabled())
                log.debug(ee);
            results.add(new SearchResult(ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic"));
        }
    }

    if (treatments.size() > 0) {
        log.info("Not processing treatments, but hits were found");
        // Collection<ExpressionExperiment> ees = expressionExperimentService.findByTreatments( treatments );
        // for ( ExpressionExperiment ee : ees ) {
        // if ( !results.contains( ee ) ) {
        // results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Treatment" ) );
        // }
        // }
    }

    if (log.isDebugEnabled()) {
        log.debug(
                "ExpressionExperiment search: " + settings + " -> " + results.size() + " characteristic hits");
    }

    if (watch.getTime() > 1000) {
        log.info("Retrieving " + results.size() + " experiments from " + characterSearchResults.size()
                + " retrieved characteristics took " + watch.getTime() + " ms");
        log.info("Breakdown: " + numEEs + " via direct association with EE; " + biomaterials.size()
                + " via association with Biomaterial; " + factorValues.size() + " via experimental design");
    }

    return results;
}

From source file:de.uni_koblenz.jgralab.utilities.rsa2tg.Rsa2Tg.java

private void checkAttributes() {
    GraphClass graphClass = sg.getFirstGraphClass();
    Map<String, AttributedElementClass> definedAttributes = new HashMap<>();
    for (Attribute a : graphClass.get_attributes()) {
        if (definedAttributes.containsKey(a.get_name())) {
            throw new RuntimeException(
                    "Attribute " + a.get_name() + " at " + graphClass.get_qualifiedName() + " is duplicate.");
        }/*from w  w  w .  j a v a  2s.c o m*/
        definedAttributes.put(a.get_name(), graphClass);
    }

    for (GraphElementClass gec : sg.getGraphElementClassVertices()) {
        boolean isVertexClass = gec.isInstanceOf(VertexClass.VC);
        definedAttributes = new HashMap<>();
        BooleanGraphMarker alreadyChecked = new BooleanGraphMarker(sg);
        Queue<GraphElementClass> queue = new LinkedList<>();
        queue.add(gec);
        while (!queue.isEmpty()) {
            GraphElementClass current = queue.poll();
            if (alreadyChecked.isMarked(current)) {
                continue;
            }
            for (Attribute att : current.get_attributes()) {
                if (definedAttributes.containsKey(att.get_name())) {
                    AttributedElementClass childClass = definedAttributes.get(att.get_name());
                    throw new RuntimeException("The name of the "
                            + ((childClass == gec) && (current != gec) ? "" : "inherited ") + "attribute "
                            + att.get_name() + " of " + (isVertexClass ? "VertexClass" : "EdgeClass") + " "
                            + childClass.get_qualifiedName()
                            + (current == gec ? " is duplicate"
                                    : (" is the same name as the inherited attribute of "
                                            + (isVertexClass ? "VertexClass" : "EdgeClass") + " "
                                            + current.get_qualifiedName()))
                            + ".");
                } else {
                    definedAttributes.put(att.get_name(), current);
                }
            }
            alreadyChecked.mark(current);
            for (Edge toSuperClass : current.incidences(
                    isVertexClass ? SpecializesVertexClass.EC : SpecializesEdgeClass.EC, EdgeDirection.OUT)) {
                GraphElementClass superClass = (GraphElementClass) toSuperClass.getThat();
                if (!alreadyChecked.isMarked(superClass)) {
                    queue.add(superClass);
                }
            }
        }
    }
}

From source file:org.apache.giraph.worker.BspServiceSource.java

/**
 * Save the vertices using the user-defined VertexOutputFormat from our
 * vertexArray based on the split.//from  w  w  w. jav  a  2  s . c  o  m
 *
 * @param numLocalVertices Number of local vertices
 * @throws InterruptedException
 */
private void saveVertices(long numLocalVertices) throws IOException, InterruptedException {
    ImmutableClassesGiraphConfiguration<I, V, E> conf = getConfiguration();

    if (conf.getVertexOutputFormatClass() == null) {
        LOG.warn("saveVertices: " + GiraphConstants.VERTEX_OUTPUT_FORMAT_CLASS
                + " not specified -- there will be no saved output");
        return;
    }
    if (conf.doOutputDuringComputation()) {
        if (LOG.isInfoEnabled()) {
            LOG.info("saveVertices: The option for doing output during "
                    + "computation is selected, so there will be no saving of the "
                    + "output in the end of application");
        }
        return;
    }

    final int numPartitions = getPartitionStore().getNumPartitions();
    int numThreads = Math.min(getConfiguration().getNumOutputThreads(), numPartitions);
    LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO, "saveVertices: Starting to save "
            + numLocalVertices + " vertices " + "using " + numThreads + " threads");
    final VertexOutputFormat<I, V, E> vertexOutputFormat = getConfiguration().createWrappedVertexOutputFormat();

    final Queue<Integer> partitionIdQueue = (numPartitions == 0) ? new LinkedList<Integer>()
            : new ArrayBlockingQueue<Integer>(numPartitions);
    Iterables.addAll(partitionIdQueue, getPartitionStore().getPartitionIds());

    long verticesToStore = 0;
    PartitionStore<I, V, E> partitionStore = getPartitionStore();
    for (int partitionId : partitionStore.getPartitionIds()) {
        Partition<I, V, E> partition = partitionStore.getOrCreatePartition(partitionId);
        verticesToStore += partition.getVertexCount();
        partitionStore.putPartition(partition);
    }
    WorkerProgress.get().startStoring(verticesToStore, getPartitionStore().getNumPartitions());

    CallableFactory<Void> callableFactory = new CallableFactory<Void>() {
        @Override
        public Callable<Void> newCallable(int callableId) {
            return new Callable<Void>() {
                /** How often to update WorkerProgress */
                private static final long VERTICES_TO_UPDATE_PROGRESS = 100000;

                @Override
                public Void call() throws Exception {
                    VertexWriter<I, V, E> vertexWriter = vertexOutputFormat.createVertexWriter(getContext());
                    vertexWriter.setConf(getConfiguration());
                    vertexWriter.initialize(getContext());
                    long nextPrintVertices = 0;
                    long nextUpdateProgressVertices = VERTICES_TO_UPDATE_PROGRESS;
                    long nextPrintMsecs = System.currentTimeMillis() + 15000;
                    int partitionIndex = 0;
                    int numPartitions = getPartitionStore().getNumPartitions();
                    while (!partitionIdQueue.isEmpty()) {
                        Integer partitionId = partitionIdQueue.poll();
                        if (partitionId == null) {
                            break;
                        }

                        Partition<I, V, E> partition = getPartitionStore().getOrCreatePartition(partitionId);
                        long verticesWritten = 0;
                        for (Vertex<I, V, E> vertex : partition) {
                            vertexWriter.writeVertex(vertex);
                            ++verticesWritten;

                            // Update status at most every 250k vertices or 15 seconds
                            if (verticesWritten > nextPrintVertices
                                    && System.currentTimeMillis() > nextPrintMsecs) {
                                LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO,
                                        "saveVertices: Saved " + verticesWritten + " out of "
                                                + partition.getVertexCount() + " partition vertices, "
                                                + "on partition " + partitionIndex + " out of "
                                                + numPartitions);
                                nextPrintMsecs = System.currentTimeMillis() + 15000;
                                nextPrintVertices = verticesWritten + 250000;
                            }

                            if (verticesWritten >= nextUpdateProgressVertices) {
                                WorkerProgress.get().addVerticesStored(VERTICES_TO_UPDATE_PROGRESS);
                                nextUpdateProgressVertices += VERTICES_TO_UPDATE_PROGRESS;
                            }
                        }
                        getPartitionStore().putPartition(partition);
                        ++partitionIndex;
                        WorkerProgress.get().addVerticesStored(verticesWritten % VERTICES_TO_UPDATE_PROGRESS);
                        WorkerProgress.get().incrementPartitionsStored();
                    }
                    vertexWriter.close(getContext()); // the temp results are saved now
                    return null;
                }
            };
        }
    };
    ProgressableUtils.getResultsWithNCallables(callableFactory, numThreads, "save-vertices-%d", getContext());

    LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO, "saveVertices: Done saving vertices.");
    // YARN: must complete the commit the "task" output, Hadoop isn't there.
    if (getConfiguration().isPureYarnJob() && getConfiguration().getVertexOutputFormatClass() != null) {
        try {
            OutputCommitter outputCommitter = vertexOutputFormat.getOutputCommitter(getContext());
            if (outputCommitter.needsTaskCommit(getContext())) {
                LoggerUtils.setStatusAndLog(getContext(), LOG, Level.INFO,
                        "OutputCommitter: committing task output.");
                // transfer from temp dirs to "task commit" dirs to prep for
                // the master's OutputCommitter#commitJob(context) call to finish.
                outputCommitter.commitTask(getContext());
            }
        } catch (InterruptedException ie) {
            LOG.error("Interrupted while attempting to obtain " + "OutputCommitter.", ie);
        } catch (IOException ioe) {
            LOG.error("Master task's attempt to commit output has " + "FAILED.", ioe);
        }
    }
}

From source file:org.gvnix.flex.entity.ActionScriptEntityMetadataProvider.java

private void processJavaTypeChanged(String javaEntityId) {
    Queue<TypeMapping> relatedTypes = new LinkedList<TypeMapping>();
    List<ASFieldMetadata> processedProperties = new ArrayList<ASFieldMetadata>();

    JavaType javaType = PhysicalTypeIdentifier.getJavaType(javaEntityId);

    ActionScriptType asType = ActionScriptMappingUtils.toActionScriptType(javaType);
    String asEntityId = ASPhysicalTypeIdentifier.createIdentifier(asType, "src/main/flex");

    ASMutableClassOrInterfaceTypeDetails asTypeDetails = getASClassDetails(asEntityId);

    if (asTypeDetails == null) {
        return;/* w ww  .  ja  v a 2  s .com*/
    }

    // Verify that the ActionScript class is enabled for remoting
    if (!isRemotingClass(javaType, asTypeDetails)) {
        return;
    }

    List<ASFieldMetadata> declaredFields = asTypeDetails.getDeclaredFields();

    MemberDetails memberDetails = getMemberDetails(javaType);

    if (memberDetails == null) {
        return;
    }

    for (MethodMetadata method : MemberFindingUtils.getMethods(memberDetails)) {
        if (BeanInfoUtils.isMutatorMethod(method)) {
            JavaSymbolName propertyName = BeanInfoUtils.getPropertyNameForJavaBeanMethod(method);
            FieldMetadata javaField = BeanInfoUtils.getFieldForPropertyName(memberDetails, propertyName);

            // TODO - We don't add any meta-tags and we set the field to
            // public - any other choice? Probaby not until
            // we potentially add some sort of support for AS getters and
            // setters
            ASFieldMetadata asField = ActionScriptMappingUtils.toASFieldMetadata(asEntityId, javaField, true);

            int existingIndex = declaredFields.indexOf(asField);
            if (existingIndex > -1) {
                // Field already exists...does it need to be updated? Should
                // we even do this, or just assume if the
                // type is different that the user changed it intentionally.
                ASFieldMetadata existingField = declaredFields.get(existingIndex);
                if (!existingField.getFieldType().equals(asField.getFieldType())) {
                    asTypeDetails.updateField(asField, false);
                }
            } else {
                asTypeDetails.addField(asField, false);
            }

            relatedTypes.addAll(findRequiredMappings(javaField, asField));

            processedProperties.add(asField);
        }
    }

    // TODO - how should we handle fields that don't exist in the Java
    // object? For now we will just remove...should
    // add some way to turn this off later.
    for (ASFieldMetadata asField : asTypeDetails.getDeclaredFields()) {
        if (!processedProperties.contains(asField)) {
            asTypeDetails.removeField(asField.getFieldName());
        }
    }

    asTypeDetails.commit();

    // Now trigger the creation of any newly added related types
    while (!relatedTypes.isEmpty()) {
        TypeMapping mapping = relatedTypes.poll();
        createActionScriptMirrorClass(mapping.getMetadataId(), mapping.getAsType(), mapping.getJavaType());
    }
}