Example usage for java.util Queue addAll

List of usage examples for java.util Queue addAll

Introduction

In this page you can find the example usage for java.util Queue addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this collection (optional operation).

Usage

From source file:org.apache.hadoop.hive.ql.MultiDriver_BAK.java

private CommandProcessorResponse runInternal(ArrayList<Pair<String, Configuration>> multiCmds)
        throws CommandNeedRetryException {
    errorMessage = null;// w  w w  .  j a  va2s  .  c o  m
    SQLState = null;
    downstreamError = null;

    if (!validateConfVariables()) {
        return new CommandProcessorResponse(12, errorMessage, SQLState);
    }

    // Reset the perf logger
    PerfLogger perfLogger = PerfLogger.getPerfLogger(true);
    perfLogger.PerfLogBegin(LOG, PerfLogger.MULTIDRIVER_RUN);
    perfLogger.PerfLogBegin(LOG, PerfLogger.TIME_TO_SUBMIT);

    int ret;
    synchronized (compileMonitor) {
        ret = multiCompile(multiCmds);
    }

    if (ret != 0) {
        for (int key = 0; key < multiPctx.size(); key++) {
            Context ctx = multiPctx.get(key).getContext();
            releaseLocks(ctx.getHiveLocks());
        }
        return new CommandProcessorResponse(ret, errorMessage, SQLState);
    }

    boolean requireLock = false;
    boolean ckLock = checkLockManager();

    if (ckLock) {
        boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
        if (lockOnlyMapred) {
            Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
            taskQueue.addAll(plan.getRootTasks());
            while (taskQueue.peek() != null) {
                Task<? extends Serializable> tsk = taskQueue.remove();
                requireLock = requireLock || tsk.requireLock();
                if (requireLock) {
                    break;
                }
                if (tsk instanceof ConditionalTask) {
                    taskQueue.addAll(((ConditionalTask) tsk).getListTasks());
                }
                if (tsk.getChildTasks() != null) {
                    taskQueue.addAll(tsk.getChildTasks());
                }
                // does not add back up task here, because back up task should be the same
                // type of the original task.
            }
        } else {
            requireLock = true;
        }
    }

    if (requireLock) {
        ret = acquireReadWriteLocks();
        if (ret != 0) {
            releaseLocks(ctx.getHiveLocks());
            //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
        }
    }
    boolean isexplain = ctx.getExplain();
    // if(isexplain){
    // multiOutputexplain();
    // }else{
    // reserved function
    ret = multiExecute();

    if (ret != 0) {
        //if needRequireLock is false, the release here will do nothing because there is no lock
        releaseLocks(ctx.getHiveLocks());
        return new CommandProcessorResponse(ret, errorMessage, SQLState);
    }
    multiOutputResult();

    //if needRequireLock is false, the release here will do nothing because there is no lock
    releaseLocks(ctx.getHiveLocks());

    for (int key = 0; key < multiPctx.size(); key++) {
        Context ctx = multiPctx.get(key).getContext();
        releaseLocks(ctx.getHiveLocks());
    }

    multiPctx.clear();
    perfLogger.PerfLogEnd(LOG, PerfLogger.MULTIDRIVER_RUN);
    perfLogger.close(LOG, plan);

    return new CommandProcessorResponse(ret);
}

From source file:org.kuali.rice.krad.service.impl.DictionaryValidationServiceImpl.java

/**
 * process constraints for the provided value using the provided constraint processors
 *
 * @param result - used to store the validation results
 * @param value - the object on which constraints are to be processed - a collection or the value of an attribute
 * @param definition - a Data Dictionary definition e.g. {@code ComplexAttributeDefinition} or {@code
 * CollectionDefinition}//from w w  w. j a v  a2s  . c  o m
 * @param attributeValueReader - a class that encapsulate access to both dictionary metadata and object field
 * values
 * @param doOptionalProcessing - true if the validation should do optional validation, false otherwise
 */
@SuppressWarnings("unchecked")
private void processConstraints(DictionaryValidationResult result,
        List<? extends ConstraintProcessor> constraintProcessors, Object value, Constrainable definition,
        AttributeValueReader attributeValueReader, boolean doOptionalProcessing, String validationState,
        StateMapping stateMapping) {
    //TODO: Implement custom validators

    if (constraintProcessors != null) {
        Constrainable selectedDefinition = definition;
        AttributeValueReader selectedAttributeValueReader = attributeValueReader;

        // First - take the constrainable definition and get its constraints

        Queue<Constraint> constraintQueue = new LinkedList<Constraint>();

        // Using a for loop to iterate through constraint processors because ordering is important
        for (ConstraintProcessor<Object, Constraint> processor : constraintProcessors) {

            // Let the calling method opt out of any optional processing
            if (!doOptionalProcessing && processor.isOptional()) {
                result.addSkipped(attributeValueReader, processor.getName());
                continue;
            }

            Class<? extends Constraint> constraintType = processor.getConstraintType();

            // Add all of the constraints for this constraint type for all providers to the queue
            for (ConstraintProvider constraintProvider : constraintProviders) {
                if (constraintProvider.isSupported(selectedDefinition)) {
                    Collection<Constraint> constraintList = constraintProvider
                            .getConstraints(selectedDefinition, constraintType);
                    if (constraintList != null) {
                        constraintQueue.addAll(constraintList);
                    }
                }
            }

            // If there are no constraints provided for this definition, then just skip it
            if (constraintQueue.isEmpty()) {
                result.addSkipped(attributeValueReader, processor.getName());
                continue;
            }

            Collection<Constraint> additionalConstraints = new LinkedList<Constraint>();

            // This loop is functionally identical to a for loop, but it has the advantage of letting us keep the queue around
            // and populate it with any new constraints contributed by the processor
            while (!constraintQueue.isEmpty()) {

                Constraint constraint = constraintQueue.poll();

                // If this constraint is not one that this process handles, then skip and add to the queue for the next processor;
                // obviously this would be redundant (we're only looking at constraints that this processor can process) except that
                // the previous processor might have stuck a new constraint (or constraints) on the queue
                if (!constraintType.isInstance(constraint)) {
                    result.addSkipped(attributeValueReader, processor.getName());
                    additionalConstraints.add(constraint);
                    continue;
                }

                constraint = ConstraintStateUtils.getApplicableConstraint(constraint, validationState,
                        stateMapping);

                if (constraint != null) {
                    ProcessorResult processorResult = processor.process(result, value, constraint,
                            selectedAttributeValueReader);

                    Collection<Constraint> processorResultContraints = processorResult.getConstraints();
                    if (processorResultContraints != null && processorResultContraints.size() > 0) {
                        constraintQueue.addAll(processorResultContraints);
                    }

                    // Change the selected definition to whatever was returned from the processor
                    if (processorResult.isDefinitionProvided()) {
                        selectedDefinition = processorResult.getDefinition();
                    }
                    // Change the selected attribute value reader to whatever was returned from the processor
                    if (processorResult.isAttributeValueReaderProvided()) {
                        selectedAttributeValueReader = processorResult.getAttributeValueReader();
                    }
                }
            }

            // After iterating through all the constraints for this processor, add the ones that werent consumed by this processor to the queue
            constraintQueue.addAll(additionalConstraints);
        }
    }
}

From source file:org.apache.hadoop.hive.ql.Driver.java

private boolean requiresLock() {
    if (!checkConcurrency()) {
        return false;
    }/*from w w w  .  jav  a  2 s  . co m*/
    // Lock operations themselves don't require the lock.
    if (isExplicitLockOperation()) {
        return false;
    }
    if (!HiveConf.getBoolVar(conf, ConfVars.HIVE_LOCK_MAPRED_ONLY)) {
        return true;
    }
    Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
    taskQueue.addAll(plan.getRootTasks());
    while (taskQueue.peek() != null) {
        Task<? extends Serializable> tsk = taskQueue.remove();
        if (tsk.requireLock()) {
            return true;
        }
        if (tsk instanceof ConditionalTask) {
            taskQueue.addAll(((ConditionalTask) tsk).getListTasks());
        }
        if (tsk.getChildTasks() != null) {
            taskQueue.addAll(tsk.getChildTasks());
        }
        // does not add back up task here, because back up task should be the same
        // type of the original task.
    }
    return false;
}

From source file:tachyon.master.MasterInfo.java

/**
 * Get the id of the file at the given path. If recursive, it scans the subdirectories as well.
 *
 * @param path The path to start looking at
 * @param recursive If true, recursively scan the subdirectories at the given path as well
 * @return the list of the inode id's at the path
 * @throws InvalidPathException// www .ja  v  a  2  s  .c  o  m
 * @throws FileDoesNotExistException
 */
public List<Integer> listFiles(TachyonURI path, boolean recursive)
        throws InvalidPathException, FileDoesNotExistException {
    List<Integer> ret = new ArrayList<Integer>();
    synchronized (mRootLock) {
        Inode inode = getInode(path);
        if (inode == null) {
            throw new FileDoesNotExistException(path.toString());
        }

        if (inode.isFile()) {
            ret.add(inode.getId());
        } else if (recursive) {
            Queue<Inode> queue = new LinkedList<Inode>();
            queue.addAll(((InodeFolder) inode).getChildren());

            while (!queue.isEmpty()) {
                Inode qinode = queue.poll();
                if (qinode.isDirectory()) {
                    queue.addAll(((InodeFolder) qinode).getChildren());
                } else {
                    ret.add(qinode.getId());
                }
            }
        } else {
            for (Inode child : ((InodeFolder) inode).getChildren()) {
                ret.add(child.getId());
            }
        }
    }

    return ret;
}

From source file:org.apache.hadoop.hive.ql.MultiDriver.java

private int createcachetable(ArrayList<Pair<String, Configuration>> multiCmds)
        throws CommandNeedRetryException {

    int ret;//  w  w w. j av a  2s. c  om
    synchronized (compileMonitor) {
        ret = CreateTableCompile(multiCmds, true);
    }

    boolean requireLock = false;
    boolean ckLock = checkLockManager();

    if (ckLock) {
        boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
        if (lockOnlyMapred) {
            Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
            taskQueue.addAll(plan.getRootTasks());
            while (taskQueue.peek() != null) {
                Task<? extends Serializable> tsk = taskQueue.remove();
                requireLock = requireLock || tsk.requireLock();
                if (requireLock) {
                    break;
                }
                if (tsk instanceof ConditionalTask) {
                    taskQueue.addAll(((ConditionalTask) tsk).getListTasks());
                }
                if (tsk.getChildTasks() != null) {
                    taskQueue.addAll(tsk.getChildTasks());
                }
                // does not add back up task here, because back up task should be the same
                // type of the original task.
            }
        } else {
            requireLock = true;
        }
    }

    if (requireLock) {
        ret = acquireReadWriteLocks();
        if (ret != 0) {
            releaseLocks(ctx.getHiveLocks());
            //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
        }
    }

    ret = multiExecute();

    return ret;

}

From source file:org.apache.hadoop.hive.ql.MultiDriver.java

private CommandProcessorResponse runInternal(ArrayList<Pair<String, Configuration>> multiCmds)
        throws CommandNeedRetryException {
    errorMessage = null;//w  w  w . j a va 2s. com
    SQLState = null;
    downstreamError = null;

    if (!validateConfVariables()) {
        return new CommandProcessorResponse(12, errorMessage, SQLState);
    }

    // Reset the perf logger
    PerfLogger perfLogger = PerfLogger.getPerfLogger(true);
    perfLogger.PerfLogBegin(LOG, PerfLogger.MULTIDRIVER_RUN);
    perfLogger.PerfLogBegin(LOG, PerfLogger.TIME_TO_SUBMIT);
    //createcachetable(multiCmds);

    int ret;
    synchronized (compileMonitor) {
        ret = multiCompile(multiCmds);
    }

    if (ret != 0) {
        for (int key = 0; key < multiPctx.size(); key++) {
            Context ctx = multiPctx.get(key).getContext();
            releaseLocks(ctx.getHiveLocks());
        }
        return new CommandProcessorResponse(ret, errorMessage, SQLState);
    }

    boolean requireLock = false;
    boolean ckLock = checkLockManager();

    if (ckLock) {
        boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
        if (lockOnlyMapred) {
            Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
            taskQueue.addAll(plan.getRootTasks());
            while (taskQueue.peek() != null) {
                Task<? extends Serializable> tsk = taskQueue.remove();
                requireLock = requireLock || tsk.requireLock();
                if (requireLock) {
                    break;
                }
                if (tsk instanceof ConditionalTask) {
                    taskQueue.addAll(((ConditionalTask) tsk).getListTasks());
                }
                if (tsk.getChildTasks() != null) {
                    taskQueue.addAll(tsk.getChildTasks());
                }
                // does not add back up task here, because back up task should be the same
                // type of the original task.
            }
        } else {
            requireLock = true;
        }
    }

    if (requireLock) {
        ret = acquireReadWriteLocks();
        if (ret != 0) {
            releaseLocks(ctx.getHiveLocks());
            //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
        }
    }
    boolean isexplain = ctx.getExplain();
    // if(isexplain){
    // multiOutputexplain();
    // }else{
    // reserved function
    ret = multiExecute();

    if (ret != 0) {
        //if needRequireLock is false, the release here will do nothing because there is no lock
        releaseLocks(ctx.getHiveLocks());
        return new CommandProcessorResponse(ret, errorMessage, SQLState);
    }
    multiOutputResult();

    //if needRequireLock is false, the release here will do nothing because there is no lock
    releaseLocks(ctx.getHiveLocks());

    for (int key = 0; key < multiPctx.size(); key++) {
        Context ctx = multiPctx.get(key).getContext();
        releaseLocks(ctx.getHiveLocks());
    }

    multiPctx.clear();
    perfLogger.PerfLogEnd(LOG, PerfLogger.MULTIDRIVER_RUN);
    perfLogger.close(LOG, plan);

    return new CommandProcessorResponse(ret);
}

From source file:org.apache.hadoop.hive.ql.MultiDriver.java

private int multipreoptimizetest() throws CommandNeedRetryException {
    int i;/*from   w  w w . ja v  a2s  .c o m*/
    PerfLogger perfLogger = PerfLogger.getPerfLogger();

    for (i = 0; i < cmds.size(); i++) {
        TaskFactory.resetId();
        ParseContext pCtx = multiPctx.get(i);
        //  conf=(HiveConf)confs.get(i);
        conf = pCtx.getConf();
        ctx = pCtx.getContext();

        LOG.info("Before  MultidoPhase2forTest Optree:\n" + Operator.toString(pCtx.getTopOps().values()));
        // do Optimizer  gen MR task
        SemanticAnalyzer sem;
        try {
            sem = new SemanticAnalyzer(conf);
            sem.MultidoPhase2forTest(pCtx);
            sem.validate();

            plan = new QueryPlan(cmds.get(i), sem, perfLogger.getStartTime(PerfLogger.DRIVER_RUN));

            if (false) {
                String queryPlanFileName = ctx.getLocalScratchDir(true) + Path.SEPARATOR_CHAR + "queryplan.xml";
                LOG.info("query plan = " + queryPlanFileName);
                queryPlanFileName = new Path(queryPlanFileName).toUri().getPath();

                // serialize the queryPlan
                FileOutputStream fos = new FileOutputStream(queryPlanFileName);
                Utilities.serializeObject(plan, fos);
                fos.close();
            }

            // initialize FetchTask right here
            if (plan.getFetchTask() != null) {
                plan.getFetchTask().initialize(conf, plan, null);
            }

            // get the output schema
            schema = schemas.get(i);

        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        boolean requireLock = false;
        boolean ckLock = checkLockManager();

        if (ckLock) {
            boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY);
            if (lockOnlyMapred) {
                Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
                taskQueue.addAll(plan.getRootTasks());
                while (taskQueue.peek() != null) {
                    Task<? extends Serializable> tsk = taskQueue.remove();
                    requireLock = requireLock || tsk.requireLock();
                    if (requireLock) {
                        break;
                    }
                    if (tsk instanceof ConditionalTask) {
                        taskQueue.addAll(((ConditionalTask) tsk).getListTasks());
                    }
                    if (tsk.getChildTasks() != null) {
                        taskQueue.addAll(tsk.getChildTasks());
                    }
                    // does not add back up task here, because back up task should be the same
                    // type of the original task.
                }
            } else {
                requireLock = true;
            }
        }
        int ret;
        if (requireLock) {
            ret = acquireReadWriteLocks();
            if (ret != 0) {
                releaseLocks(ctx.getHiveLocks());
                //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
            }
        }

        ret = execute();
        if (ret != 0) {
            //if needRequireLock is false, the release here will do nothing because there is no lock
            releaseLocks(ctx.getHiveLocks());
            //  return new CommandProcessorResponse(ret, errorMessage, SQLState);
        }

        //if needRequireLock is false, the release here will do nothing because there is no lock
        releaseLocks(ctx.getHiveLocks());

        //test output
        SessionState ss = SessionState.get();
        PrintStream out = ss.out;
        ArrayList<String> res = new ArrayList<String>();
        LOG.info("Output the result of query ID(" + i + "):");
        printHeader(this, out);
        int counter = 0;
        try {
            while (this.getResults(res)) {
                for (String r : res) {
                    out.println(r);
                }
                counter += res.size();
                res.clear();
                if (out.checkError()) {
                    break;
                }
            }
        } catch (IOException e) {
            console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                    "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
        }

    }

    return 0;
}

From source file:org.gvnix.flex.entity.ActionScriptEntityMetadataProvider.java

private void processJavaTypeChanged(String javaEntityId) {
    Queue<TypeMapping> relatedTypes = new LinkedList<TypeMapping>();
    List<ASFieldMetadata> processedProperties = new ArrayList<ASFieldMetadata>();

    JavaType javaType = PhysicalTypeIdentifier.getJavaType(javaEntityId);

    ActionScriptType asType = ActionScriptMappingUtils.toActionScriptType(javaType);
    String asEntityId = ASPhysicalTypeIdentifier.createIdentifier(asType, "src/main/flex");

    ASMutableClassOrInterfaceTypeDetails asTypeDetails = getASClassDetails(asEntityId);

    if (asTypeDetails == null) {
        return;/*from ww  w .jav  a2 s.co m*/
    }

    // Verify that the ActionScript class is enabled for remoting
    if (!isRemotingClass(javaType, asTypeDetails)) {
        return;
    }

    List<ASFieldMetadata> declaredFields = asTypeDetails.getDeclaredFields();

    MemberDetails memberDetails = getMemberDetails(javaType);

    if (memberDetails == null) {
        return;
    }

    for (MethodMetadata method : MemberFindingUtils.getMethods(memberDetails)) {
        if (BeanInfoUtils.isMutatorMethod(method)) {
            JavaSymbolName propertyName = BeanInfoUtils.getPropertyNameForJavaBeanMethod(method);
            FieldMetadata javaField = BeanInfoUtils.getFieldForPropertyName(memberDetails, propertyName);

            // TODO - We don't add any meta-tags and we set the field to
            // public - any other choice? Probaby not until
            // we potentially add some sort of support for AS getters and
            // setters
            ASFieldMetadata asField = ActionScriptMappingUtils.toASFieldMetadata(asEntityId, javaField, true);

            int existingIndex = declaredFields.indexOf(asField);
            if (existingIndex > -1) {
                // Field already exists...does it need to be updated? Should
                // we even do this, or just assume if the
                // type is different that the user changed it intentionally.
                ASFieldMetadata existingField = declaredFields.get(existingIndex);
                if (!existingField.getFieldType().equals(asField.getFieldType())) {
                    asTypeDetails.updateField(asField, false);
                }
            } else {
                asTypeDetails.addField(asField, false);
            }

            relatedTypes.addAll(findRequiredMappings(javaField, asField));

            processedProperties.add(asField);
        }
    }

    // TODO - how should we handle fields that don't exist in the Java
    // object? For now we will just remove...should
    // add some way to turn this off later.
    for (ASFieldMetadata asField : asTypeDetails.getDeclaredFields()) {
        if (!processedProperties.contains(asField)) {
            asTypeDetails.removeField(asField.getFieldName());
        }
    }

    asTypeDetails.commit();

    // Now trigger the creation of any newly added related types
    while (!relatedTypes.isEmpty()) {
        TypeMapping mapping = relatedTypes.poll();
        createActionScriptMirrorClass(mapping.getMetadataId(), mapping.getAsType(), mapping.getJavaType());
    }
}

From source file:org.unitime.timetable.backup.SessionBackup.java

@Override
public void backup(OutputStream out, Progress progress, Long sessionId) throws IOException {
    iOut = CodedOutputStream.newInstance(out);
    iProgress = progress;/*ww w .  ja v  a2s.co  m*/
    iSessionId = sessionId;
    iHibSession = new _RootDAO().createNewSession();
    iHibSession.setCacheMode(CacheMode.IGNORE);
    iHibSessionFactory = iHibSession.getSessionFactory();
    try {
        iProgress.setStatus("Exporting Session");
        iProgress.setPhase("Loading Model", 3);
        TreeSet<ClassMetadata> allMeta = new TreeSet<ClassMetadata>(new Comparator<ClassMetadata>() {
            @Override
            public int compare(ClassMetadata m1, ClassMetadata m2) {
                return m1.getEntityName().compareTo(m2.getEntityName());
            }
        });
        allMeta.addAll(iHibSessionFactory.getAllClassMetadata().values());
        iProgress.incProgress();

        Queue<QueueItem> queue = new LinkedList<QueueItem>();

        queue.add(new QueueItem(iHibSessionFactory.getClassMetadata(Session.class), null, "uniqueId",
                Relation.None));

        Set<String> avoid = new HashSet<String>();
        // avoid following relations
        avoid.add(TimetableManager.class.getName() + ".departments");
        avoid.add(TimetableManager.class.getName() + ".solverGroups");
        avoid.add(DistributionType.class.getName() + ".departments");
        avoid.add(LastLikeCourseDemand.class.getName() + ".student");
        avoid.add(Student.class.getName() + ".lastLikeCourseDemands");

        Set<String> disallowedNotNullRelations = new HashSet<String>();
        disallowedNotNullRelations.add(Assignment.class.getName() + ".datePattern");
        disallowedNotNullRelations.add(Assignment.class.getName() + ".timePattern");
        disallowedNotNullRelations.add(LastLikeCourseDemand.class.getName() + ".student");
        disallowedNotNullRelations.add(OnlineSectioningLog.class.getName() + ".session");

        Map<String, List<QueueItem>> data = new HashMap<String, List<QueueItem>>();
        List<QueueItem> sessions = new ArrayList<QueueItem>();
        sessions.add(queue.peek());
        data.put(queue.peek().name(), sessions);

        QueueItem item = null;
        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (ClassMetadata meta : allMeta) {
                if (meta.hasSubclasses())
                    continue;
                for (int i = 0; i < meta.getPropertyNames().length; i++) {
                    String property = meta.getPropertyNames()[i];
                    if (disallowedNotNullRelations.contains(meta.getEntityName() + "." + property)
                            || meta.getPropertyNullability()[i])
                        continue;
                    Type type = meta.getPropertyTypes()[i];
                    if (type instanceof EntityType && type.getReturnedClass().equals(item.clazz())) {
                        QueueItem qi = new QueueItem(meta, item, property, Relation.Parent);
                        if (!data.containsKey(qi.name())) {
                            List<QueueItem> items = new ArrayList<QueueItem>();
                            data.put(qi.name(), items);
                            queue.add(qi);
                            items.add(qi);
                            if (qi.size() > 0)
                                iProgress.info("Parent: " + qi);
                        }
                    }
                }
            }
        }
        iProgress.incProgress();

        for (List<QueueItem> list : data.values())
            queue.addAll(list);

        // The following part is needed to ensure that instructor distribution preferences are saved including their distribution types 
        List<QueueItem> distributions = new ArrayList<QueueItem>();
        for (QueueItem instructor : data.get(DepartmentalInstructor.class.getName())) {
            QueueItem qi = new QueueItem(iHibSessionFactory.getClassMetadata(DistributionPref.class),
                    instructor, "owner", Relation.Parent);
            distributions.add(qi);
            queue.add(qi);
            if (qi.size() > 0)
                iProgress.info("Extra: " + qi);
        }
        data.put(DistributionPref.class.getName(), distributions);

        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (int i = 0; i < item.meta().getPropertyNames().length; i++) {
                String property = item.meta().getPropertyNames()[i];
                Type type = item.meta().getPropertyTypes()[i];
                if (type instanceof EntityType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(type.getReturnedClass());
                    if (item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.One);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("One: " + qi);
                }
                if (type instanceof CollectionType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(((CollectionType) type)
                            .getElementType((SessionFactoryImplementor) iHibSessionFactory).getReturnedClass());
                    if (meta == null || item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.Many);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("Many: " + qi);
                }
            }
        }
        iProgress.incProgress();

        Map<String, Set<Serializable>> allExportedIds = new HashMap<String, Set<Serializable>>();
        for (String name : new TreeSet<String>(data.keySet())) {
            List<QueueItem> list = data.get(name);
            Map<String, TableData.Table.Builder> tables = new HashMap<String, TableData.Table.Builder>();
            for (QueueItem current : list) {
                if (current.size() == 0)
                    continue;
                iProgress.info("Loading " + current);
                List<Object> objects = current.list();
                if (objects == null || objects.isEmpty())
                    continue;
                iProgress.setPhase(current.abbv() + " [" + objects.size() + "]", objects.size());
                objects: for (Object object : objects) {
                    iProgress.incProgress();

                    // Get meta data (check for sub-classes)
                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(object.getClass());
                    if (meta == null)
                        meta = current.meta();
                    if (meta.hasSubclasses()) {
                        for (Iterator i = iHibSessionFactory.getAllClassMetadata().entrySet().iterator(); i
                                .hasNext();) {
                            Map.Entry entry = (Map.Entry) i.next();
                            ClassMetadata classMetadata = (ClassMetadata) entry.getValue();
                            if (classMetadata.getMappedClass().isInstance(object)
                                    && !classMetadata.hasSubclasses()) {
                                meta = classMetadata;
                                break;
                            }
                        }
                    }

                    // Get unique identifier
                    Serializable id = meta.getIdentifier(object, (SessionImplementor) iHibSession);

                    // Check if already exported
                    Set<Serializable> exportedIds = allExportedIds.get(meta.getEntityName());
                    if (exportedIds == null) {
                        exportedIds = new HashSet<Serializable>();
                        allExportedIds.put(meta.getEntityName(), exportedIds);
                    }
                    if (!exportedIds.add(id))
                        continue;

                    // Check relation to an academic session (if exists)
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        if (type instanceof EntityType && type.getReturnedClass().equals(Session.class)) {
                            Session s = (Session) meta.getPropertyValue(object, property);
                            if (s != null && !s.getUniqueId().equals(iSessionId)) {
                                iProgress.warn(meta.getEntityName()
                                        .substring(meta.getEntityName().lastIndexOf('.') + 1) + "@" + id
                                        + " belongs to a different academic session (" + s + ")");
                                continue objects; // wrong session
                            }
                        }
                    }

                    // Get appropriate table
                    TableData.Table.Builder table = tables.get(meta.getEntityName());
                    if (table == null) {
                        table = TableData.Table.newBuilder();
                        tables.put(meta.getEntityName(), table);
                        table.setName(meta.getEntityName());
                    }

                    // Export object
                    TableData.Record.Builder record = TableData.Record.newBuilder();
                    record.setId(id.toString());
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        Object value = meta.getPropertyValue(object, property);
                        if (value == null)
                            continue;
                        TableData.Element.Builder element = TableData.Element.newBuilder();
                        element.setName(property);
                        if (type instanceof PrimitiveType) {
                            element.addValue(((PrimitiveType) type).toString(value));
                        } else if (type instanceof StringType) {
                            element.addValue(((StringType) type).toString((String) value));
                        } else if (type instanceof BinaryType) {
                            element.addValueBytes(ByteString.copyFrom((byte[]) value));
                        } else if (type instanceof TimestampType) {
                            element.addValue(((TimestampType) type).toString((Date) value));
                        } else if (type instanceof DateType) {
                            element.addValue(((DateType) type).toString((Date) value));
                        } else if (type instanceof EntityType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                            iHibSession.evict(value);
                        } else if (type instanceof CustomType && value instanceof Document) {
                            if (object instanceof CurriculumClassification && property.equals("students"))
                                continue;
                            StringWriter w = new StringWriter();
                            XMLWriter x = new XMLWriter(w, OutputFormat.createCompactFormat());
                            x.write((Document) value);
                            x.flush();
                            x.close();
                            element.addValue(w.toString());
                        } else if (type instanceof CollectionType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                        } else if (type instanceof EmbeddedComponentType
                                && property.equalsIgnoreCase("uniqueCourseNbr")) {
                            continue;
                        } else {
                            iProgress.warn("Unknown data type: " + type + " (property " + meta.getEntityName()
                                    + "." + property + ", class " + value.getClass() + ")");
                            continue;
                        }
                        record.addElement(element.build());

                    }
                    table.addRecord(record.build());
                    iHibSession.evict(object);
                }
                current.clearCache();
            }

            for (TableData.Table.Builder table : tables.values()) {
                add(table.build());
            }
        }

        /*
        // Skip ConstraintInfo
        if (!iData.containsKey(ConstraintInfo.class.getName()))
           iData.put(ConstraintInfo.class.getName(), new QueueItem(iHibSessionFactory.getClassMetadata(ConstraintInfo.class), null, null, Relation.Empty));
                
        for (String name: items)
           export(iData.get(name));
                    
        while (true) {
         List<Object> objects = new ArrayList<Object>();
         ClassMetadata meta = null;
         for (Entity e: iObjects) {
        if (e.exported()) continue;
        if (objects.isEmpty() || meta.getEntityName().equals(e.name())) {
           meta = e.meta();
           objects.add(e.object());
           e.notifyExported();
        }
         }
         if (objects.isEmpty()) break;
         export(meta, objects, null);
        }
        */
        iProgress.setStatus("All done.");
    } finally {
        iHibSession.close();
    }
}

From source file:replicatorg.app.gcode.GCodeParser.java

private void buildGCodes(GCodeCommand gcode, Queue<DriverCommand> commands) throws GCodeException {
    if (!gcode.hasCode('G')) {
        throw new GCodeException("Not a G code!");
    }//from w  w  w  . jav a2 s. c  om

    // start us off at our current position...
    Point5d pos = driver.getCurrentPosition(false);

    // initialize our points, etc.
    double iVal = convertToMM(gcode.getCodeValue('I'), units); // / X offset
    // for arcs
    double jVal = convertToMM(gcode.getCodeValue('J'), units); // / Y offset
    // for arcs
    @SuppressWarnings("unused")
    double kVal = convertToMM(gcode.getCodeValue('K'), units); // / Z offset
    // for arcs
    @SuppressWarnings("unused")
    double qVal = convertToMM(gcode.getCodeValue('Q'), units); // / feed
    // increment for
    // G83
    double rVal = convertToMM(gcode.getCodeValue('R'), units); // / arc radius
    double xVal = convertToMM(gcode.getCodeValue('X'), units); // / X units
    double yVal = convertToMM(gcode.getCodeValue('Y'), units); // / Y units
    double zVal = convertToMM(gcode.getCodeValue('Z'), units); // / Z units
    double aVal = convertToMM(gcode.getCodeValue('A'), units); // / A units
    double bVal = convertToMM(gcode.getCodeValue('B'), units); // / B units
    // Note: The E axis is treated internally as the A or B axis
    double eVal = convertToMM(gcode.getCodeValue('E'), units); // / E units

    // adjust for our offsets
    xVal += currentOffset.x;
    yVal += currentOffset.y;
    zVal += currentOffset.z;

    // absolute just specifies the new position
    if (absoluteMode) {
        if (gcode.hasCode('X'))
            pos.setX(xVal);
        if (gcode.hasCode('Y'))
            pos.setY(yVal);
        if (gcode.hasCode('Z'))
            pos.setZ(zVal);
        if (gcode.hasCode('A'))
            pos.setA(aVal);
        if (gcode.hasCode('E')) {
            // can't assume tool 0 == a, it's configurable in machine.xml!
            if (driver.getMachine().getTool(tool).getMotorStepperAxis().name() == "B") {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setB(eVal);
            } else {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setA(eVal);
            }
        }
        if (gcode.hasCode('B'))
            pos.setB(bVal);
    }
    // relative specifies a delta
    else {
        if (gcode.hasCode('X'))
            pos.setX(pos.x() + xVal);
        if (gcode.hasCode('Y'))
            pos.setY(pos.y() + yVal);
        if (gcode.hasCode('Z'))
            pos.setZ(pos.z() + zVal);
        if (gcode.hasCode('A'))
            pos.setA(pos.a() + aVal);
        if (gcode.hasCode('E')) {
            // can't assume tool 0 == a, it's configurable in machine.xml!
            if (driver.getMachine().getTool(tool).getMotorStepperAxis().name() == "B") {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setB(pos.b() + eVal);
            } else {
                // Base.logger.warning("Mapping axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                pos.setA(pos.a() + eVal);
            }
        }
        if (gcode.hasCode('B'))
            pos.setB(pos.b() + bVal);
    }

    // Get feedrate if supplied
    if (gcode.hasCode('F')) {
        // Read feedrate in mm/min.
        feedrate = gcode.getCodeValue('F');

        // TODO: Why do we do this here, and not in individual commands?
        commands.add(new replicatorg.drivers.commands.SetFeedrate(feedrate));
    }

    GCodeEnumeration codeEnum = GCodeEnumeration.getGCode("G", (int) gcode.getCodeValue('G'));

    // handle unrecognised GCode
    if (codeEnum == null) {
        String message = "Unrecognized GCode! G" + (int) gcode.getCodeValue('G');
        Base.logger.log(Level.SEVERE, message);
        throw new GCodeException(message);
    }

    switch (codeEnum) {
    // these are basically the same thing, but G0 is supposed to do it as quickly as possible.
    // Rapid Positioning
    case G0:
        if (gcode.hasCode('F')) {
            // Allow user to explicitly override G0 feedrate if they so desire.
            commands.add(new replicatorg.drivers.commands.SetFeedrate(feedrate));
        } else {
            // Compute the most rapid possible rate for this move.
            Point5d diff = driver.getCurrentPosition(false);
            diff.sub(pos);
            diff.absolute();
            double length = diff.length();
            double selectedFR = Double.MAX_VALUE;
            Point5d maxFR = driver.getMaximumFeedrates();
            // Compute the feedrate using assuming maximum feed along each axis, and select
            // the slowest option.
            for (int idx = 0; idx < 3; idx++) {
                double axisMove = diff.get(idx);
                if (axisMove == 0) {
                    continue;
                }
                double candidate = maxFR.get(idx) * length / axisMove;
                if (candidate < selectedFR) {
                    selectedFR = candidate;
                }
            }
            // Add a sane default for the null move, just in case.
            if (selectedFR == Double.MAX_VALUE) {
                selectedFR = maxFR.get(0);
            }
            commands.add(new replicatorg.drivers.commands.SetFeedrate(selectedFR));
        }
        commands.add(new replicatorg.drivers.commands.QueuePoint(pos));
        break;
    // Linear Interpolation
    case G1:
        // set our target.
        commands.add(new replicatorg.drivers.commands.SetFeedrate(feedrate));
        commands.add(new replicatorg.drivers.commands.QueuePoint(pos));
        break;
    // Clockwise arc
    case G2:
        // Counterclockwise arc
    case G3: {
        // call our arc drawing function.
        // Note: We don't support 5D
        if (gcode.hasCode('I') || gcode.hasCode('J')) {
            // our centerpoint
            Point5d center = new Point5d();
            Point5d current = driver.getCurrentPosition(false);
            center.setX(current.x() + iVal);
            center.setY(current.y() + jVal);

            // Get the points for the arc
            if (codeEnum == GCodeEnumeration.G2)
                commands.addAll(drawArc(center, pos, true));
            else
                commands.addAll(drawArc(center, pos, false));
        }
        // or we want a radius based one
        else if (gcode.hasCode('R')) {
            throw new GCodeException("G02/G03 arcs with (R)adius parameter are not supported yet.");
        }
    }
        break;
    // dwell
    case G4:
        commands.add(new replicatorg.drivers.commands.Delay((long) gcode.getCodeValue('P')));
        break;
    case G10:
        if (gcode.hasCode('P')) {
            int offsetSystemNum = ((int) gcode.getCodeValue('P'));
            if (offsetSystemNum >= 1 && offsetSystemNum <= 6) {
                if (gcode.hasCode('X'))
                    commands.add(new replicatorg.drivers.commands.SetAxisOffset(AxisId.X, offsetSystemNum,
                            gcode.getCodeValue('X')));
                if (gcode.hasCode('Y'))
                    commands.add(new replicatorg.drivers.commands.SetAxisOffset(AxisId.Y, offsetSystemNum,
                            gcode.getCodeValue('Y')));
                if (gcode.hasCode('Z'))
                    commands.add(new replicatorg.drivers.commands.SetAxisOffset(AxisId.Z, offsetSystemNum,
                            gcode.getCodeValue('Z')));
            }
        } else
            Base.logger.warning("No coordinate system indicated use G10 Pn, where n is 0-6.");
        break;
    // Inches for Units
    case G20:
    case G70:
        units = UNITS_INCHES;
        curveSection = curveSectionInches;
        break;
    // mm for Units
    case G21:
    case G71:
        units = UNITS_MM;
        curveSection = curveSectionMM;
        break;
    // This should be "return to home".  We need to introduce new GCodes for homing.
    //replaced by G161, G162
    case G28: {
        // home all axes?
        EnumSet<AxisId> axes = getAxes(gcode);

        if (gcode.hasCode('F')) {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE, feedrate));
        } else {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE));
        }
    }
        break;
    // home negative.
    case G161: {
        // home all axes?
        EnumSet<AxisId> axes = getAxes(gcode);

        if (gcode.hasCode('F')) {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.NEGATIVE, feedrate));
        } else {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.NEGATIVE));
        }
    }
        break;
    // home positive.
    case G162: {
        // home all axes?
        EnumSet<AxisId> axes = getAxes(gcode);
        if (gcode.hasCode('F')) {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE, feedrate));
        } else {
            commands.add(new replicatorg.drivers.commands.HomeAxes(axes, LinearDirection.POSITIVE));
        }
    }
        break;
    // master offset
    case G53:
        currentOffset = driver.getOffset(0);
        break;
    // fixture offset 1
    case G54:
        currentOffset = driver.getOffset(1);
        break;
    // fixture offset 2
    case G55:
        currentOffset = driver.getOffset(2);
        break;
    // fixture offset 3
    case G56:
        currentOffset = driver.getOffset(3);
        break;
    // fixture offset 4
    case G57:
        currentOffset = driver.getOffset(4);
        break;
    // fixture offset 5
    case G58:
        currentOffset = driver.getOffset(5);
        break;
    // fixture offset 6
    case G59:
        currentOffset = driver.getOffset(6);
        break;
    // Absolute Positioning
    case G90:
        absoluteMode = true;
        break;
    // Incremental Positioning
    case G91:
        absoluteMode = false;
        break;
    // Set position
    case G92:
        Point5d current = driver.getCurrentPosition(false);

        if (gcode.hasCode('X'))
            current.setX(xVal);
        if (gcode.hasCode('Y'))
            current.setY(yVal);
        if (gcode.hasCode('Z'))
            current.setZ(zVal);
        if (gcode.hasCode('A'))
            current.setA(aVal);
        if (gcode.hasCode('E')) {
            // can't assume tool 0 == a, it's configurable in machine.xml!
            if (driver.getMachine().getTool(tool).getMotorStepperAxis().name() == "B") {
                // Base.logger.warning("Resetting position of axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                current.setB(eVal);
            } else {
                // Base.logger.warning("Resetting position of axis E to axis: " + driver.getMachine().getTool(tool).getMotorStepperAxis().name());
                current.setA(eVal);
            }
        }
        if (gcode.hasCode('B'))
            current.setB(bVal);

        commands.add(new replicatorg.drivers.commands.SetCurrentPosition(current));
        break;
    //       feed rate mode
    //       case G93: //inverse time feed rate
    //      case G94: // IPM feed rate (our default)
    //          case G95: //IPR feed rate
    //          TODO: make this work.
    //         break;
    // spindle speed rate
    case G97:
        commands.add(new replicatorg.drivers.commands.SetSpindleRPM(gcode.getCodeValue('S')));
        break;
    case G130:
        /// TODO:  axis ids should not be hard coded
        if (gcode.hasCode('X'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(0, (int) gcode.getCodeValue('X')));
        if (gcode.hasCode('Y'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(1, (int) gcode.getCodeValue('Y')));
        if (gcode.hasCode('Z'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(2, (int) gcode.getCodeValue('Z')));
        if (gcode.hasCode('A'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(3, (int) gcode.getCodeValue('A')));
        if (gcode.hasCode('B'))
            commands.add(new replicatorg.drivers.commands.SetStepperVoltage(4, (int) gcode.getCodeValue('B')));
        break;
    // error, error!
    default:
        throw new GCodeException("Unknown G code: G" + (int) gcode.getCodeValue('G'));
    }
}