Example usage for java.util Deque isEmpty

List of usage examples for java.util Deque isEmpty

Introduction

In this page you can find the example usage for java.util Deque isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:org.rhq.core.pc.inventory.InventoryManager.java

private void syncDriftDefinitionsRecursively(Resource resource) {
    if (resource.getInventoryStatus() != InventoryStatus.COMMITTED) {
        return;// w  w  w  . j ava 2  s. co m
    }

    Deque<Resource> resources = new LinkedList<Resource>();
    resources.push(resource);

    Set<Integer> resourceIds = new HashSet<Integer>();

    while (!resources.isEmpty()) {
        Resource r = resources.pop();
        if (supportsDriftManagement(r)) {
            resourceIds.add(r.getId());
        }
        for (Resource child : r.getChildResources()) {
            resources.push(child);
        }
    }

    DriftSyncManager driftSyncMgr = createDriftSyncManager();
    driftSyncMgr.syncWithServer(resourceIds);
}

From source file:de.interactive_instruments.ShapeChange.Target.ArcGISWorkspace.ArcGISWorkspace.java

private int establishEAPackageHierarchy(ClassInfo ci, int mainWorkspaceSubPkgId) throws EAException {

    // get path up to but not including the application schema package
    Deque<PackageInfo> pathToAppSchemaAsStack = new ArrayDeque<PackageInfo>();

    if (ci.pkg() != this.appSchemaPkg) {

        PackageInfo pkg = ci.pkg();// w w w .  ja  v  a2 s.com

        while (pkg != null && pkg != this.appSchemaPkg) {

            pathToAppSchemaAsStack.addFirst(pkg);

            pkg = pkg.owner();
        }
    }

    if (pathToAppSchemaAsStack.isEmpty()) {

        // class is situated in app schema package and thus shall be created
        // in main workspace sub-package
        return mainWorkspaceSubPkgId;

    } else {

        // walk down the path, create packages as needed

        Map<PackageInfo, Integer> eaPkgIdByModelPkg = eaPkgIdByModelPkg_byWorkspaceSubPkgId
                .get(mainWorkspaceSubPkgId);

        Integer eaParentPkgId = mainWorkspaceSubPkgId;
        Integer eaPkgId = null;

        while (!pathToAppSchemaAsStack.isEmpty()) {

            PackageInfo pi = pathToAppSchemaAsStack.removeFirst();

            if (eaPkgIdByModelPkg.containsKey(pi)) {

                eaPkgId = eaPkgIdByModelPkg.get(pi);

            } else {

                // create the EA package
                eaPkgId = EAModelUtil.createEAPackage(rep, pi, eaParentPkgId);
                eaPkgIdByModelPkg.put(pi, eaPkgId);
            }

            eaParentPkgId = eaPkgId;
        }

        return eaPkgId;
    }
}

From source file:org.apache.asterix.metadata.utils.TypeUtil.java

/**
 * Merges typed index fields with specified recordType, allowing indexed fields to be optional.
 * I.e. the type { "personId":int32, "name": string, "address" : { "street": string } } with typed indexes
 * on age:int32, address.state:string will be merged into type { "personId":int32, "name": string,
 * "age": int32? "address" : { "street": string, "state": string? } } Used by open indexes to enforce
 * the type of an indexed record/*w w  w .  jav  a2 s  .c  o  m*/
 */
public static Pair<ARecordType, ARecordType> createEnforcedType(ARecordType recordType, ARecordType metaType,
        List<Index> indexes) throws AlgebricksException {
    ARecordType enforcedRecordType = recordType;
    ARecordType enforcedMetaType = metaType;
    for (Index index : indexes) {
        if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) {
            continue;
        }
        if (index.hasMetaFields()) {
            throw new AlgebricksException("Indexing an open field is only supported on the record part");
        }
        for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
            Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>();
            List<String> splits = index.getKeyFieldNames().get(i);
            ARecordType nestedFieldType = enforcedRecordType;
            boolean openRecords = false;
            String bridgeName = nestedFieldType.getTypeName();
            int j;
            // Build the stack for the enforced type
            for (j = 1; j < splits.size(); j++) {
                nestedTypeStack.push(new Pair<>(nestedFieldType, splits.get(j - 1)));
                bridgeName = nestedFieldType.getTypeName();
                nestedFieldType = (ARecordType) enforcedRecordType.getSubFieldType(splits.subList(0, j));
                if (nestedFieldType == null) {
                    openRecords = true;
                    break;
                }
            }
            if (openRecords) {
                // create the smallest record
                enforcedRecordType = new ARecordType(splits.get(splits.size() - 2),
                        new String[] { splits.get(splits.size() - 1) },
                        new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) },
                        true);
                // create the open part of the nested field
                for (int k = splits.size() - 3; k > (j - 2); k--) {
                    enforcedRecordType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
                            new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) }, true);
                }
                // Bridge the gap
                Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
                ARecordType parent = gapPair.first;

                IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
                        new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) });
                enforcedRecordType = new ARecordType(bridgeName,
                        ArrayUtils.addAll(parent.getFieldNames(), enforcedRecordType.getTypeName()),
                        parentFieldTypes, true);
            } else {
                //Schema is closed all the way to the field
                //enforced fields are either null or strongly typed
                Map<String, IAType> recordNameTypesMap = TypeUtil.createRecordNameTypeMap(nestedFieldType);
                // if a an enforced field already exists and the type is correct
                IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
                if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION
                        && ((AUnionType) enforcedFieldType).isUnknownableType()) {
                    enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
                }
                if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(),
                        index.getKeyFieldTypes().get(i).getTypeTag())) {
                    throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i)
                            + " to have type " + index.getKeyFieldTypes().get(i));
                }
                if (enforcedFieldType == null) {
                    recordNameTypesMap.put(splits.get(splits.size() - 1),
                            AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
                }
                enforcedRecordType = new ARecordType(nestedFieldType.getTypeName(),
                        recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]),
                        recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]),
                        nestedFieldType.isOpen());
            }

            // Create the enforced type for the nested fields in the schema, from the ground up
            if (!nestedTypeStack.isEmpty()) {
                while (!nestedTypeStack.isEmpty()) {
                    Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
                    ARecordType nestedRecType = nestedTypePair.first;
                    IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
                    nestedRecTypeFieldTypes[nestedRecType
                            .getFieldIndex(nestedTypePair.second)] = enforcedRecordType;
                    enforcedRecordType = new ARecordType(nestedRecType.getTypeName() + "_enforced",
                            nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
                }
            }
        }
    }
    return new Pair<>(enforcedRecordType, enforcedMetaType);
}

From source file:specminers.evaluation.RandoopGeneratedTestParser.java

private void loadTestMethods() {
    this.testMethods = new LinkedList<>();
    this.testMethodDetails = new HashMap<>();
    String testMethodRegularExpressionDeclaration = "^public\\svoid\\stest(\\d+)\\(\\).+$";

    Deque<String> openBraces = new ArrayDeque<>();
    String currentMethod = null;//from ww w  .  ja  v  a2s .  c o m
    List<String> statementsBeforeTryCatch = null;
    String currentClass = "";
    boolean foundTryCatchForCurrentTestMethod = false;
    String firstVarFound = "";
    String varRegex = "var\\d+\\W";
    Pattern p = Pattern.compile(varRegex);

    for (int i = 0; i < lines.size(); i++) {
        String line = lines.get(i);

        if (line.matches(testMethodRegularExpressionDeclaration)) {
            openBraces.add("{");
            currentMethod = StringHelper.extractSingleValueWithRegex(line,
                    testMethodRegularExpressionDeclaration, 1);
            statementsBeforeTryCatch = new LinkedList<>();
            foundTryCatchForCurrentTestMethod = false;
        } else {
            if (currentMethod != null) {
                if (line.contains("try {")) {
                    foundTryCatchForCurrentTestMethod = true;
                }

                if (!line.contains("if (debug) { System.out.println();")) {
                    Matcher m = p.matcher(line);

                    if (m.find()) {
                        if (!foundTryCatchForCurrentTestMethod) {
                            if (StringUtils.isEmpty(firstVarFound)) {
                                firstVarFound = m.group(0).trim();
                            }

                            if (line.contains(firstVarFound)) {

                                if (line.contains(firstVarFound + " = new java.")) {
                                    int startIndex = line.indexOf("new") + 3;
                                    int endIndex = line.indexOf("(", startIndex);
                                    try {
                                        currentClass = line.substring(startIndex, endIndex);
                                        statementsBeforeTryCatch.add((currentClass + ".<init>()").trim());
                                    } catch (StringIndexOutOfBoundsException ex) {
                                        System.out.println("Error parsing line " + line + " startIndex "
                                                + startIndex + " endIndex " + endIndex + " from java file "
                                                + javaFile.getAbsolutePath() + " current test method test"
                                                + currentMethod);
                                    }
                                } else {
                                    if (line.contains(firstVarFound + ".")) {
                                        int startIndex = line.indexOf(firstVarFound + ".") + 4;
                                        int endIndex = line.lastIndexOf("(");
                                        String calledMethod = "";
                                        calledMethod = line.substring(startIndex, endIndex);
                                        statementsBeforeTryCatch.add(currentClass + calledMethod
                                                + (calledMethod.endsWith("(") ? "" : "(") + ")");

                                    }
                                }
                            }
                        }
                    }
                    for (int j = 0; j < line.length(); j++) {
                        if (line.charAt(j) == '{') {
                            openBraces.add("{");
                        }
                        if (line.charAt(j) == '}') {
                            if (!openBraces.isEmpty()) {
                                openBraces.pop();
                            }
                        }
                    }
                }

                if (openBraces.isEmpty()) {
                    String testMethodStatements = statementsBeforeTryCatch.stream().map(st -> st.trim())
                            .collect(Collectors.joining(""));
                    Map<String, String> currentTestDetails = new HashMap<>();
                    currentTestDetails.put("foundTryCatch", foundTryCatchForCurrentTestMethod + "");
                    currentTestDetails.put("statementsBeforeTryCatch", testMethodStatements);

                    if (StringUtils.isNotBlank(currentMethod)) {
                        testMethodDetails.put(currentMethod, currentTestDetails);
                    }

                    currentMethod = null;
                    statementsBeforeTryCatch.clear();
                    ;
                    foundTryCatchForCurrentTestMethod = false;
                    firstVarFound = null;
                    // Prepare for new method
                }
            }
        }
    }

}

From source file:org.apache.tez.dag.api.DAG.java

@Private
public synchronized DAGPlan createDag(Configuration tezConf, Credentials extraCredentials,
        Map<String, LocalResource> tezJarResources, LocalResource binaryConfig, boolean tezLrsAsArchive,
        ServicePluginsDescriptor servicePluginsDescriptor, JavaOptsChecker javaOptsChecker) {
    Deque<String> topologicalVertexStack = verify(true);
    verifyLocalResources(tezConf);/*from  w ww . jav a  2  s. c o m*/

    DAGPlan.Builder dagBuilder = DAGPlan.newBuilder();
    dagBuilder.setName(this.name);

    if (this.callerContext != null) {
        dagBuilder.setCallerContext(DagTypeConverters.convertCallerContextToProto(callerContext));
    }
    if (this.dagInfo != null && !this.dagInfo.isEmpty()) {
        dagBuilder.setDagInfo(this.dagInfo);
    }

    // Setup default execution context.
    VertexExecutionContext defaultContext = getDefaultExecutionContext();
    verifyExecutionContext(defaultContext, servicePluginsDescriptor, "DAGDefault");
    if (defaultContext != null) {
        DAGProtos.VertexExecutionContextProto contextProto = DagTypeConverters.convertToProto(defaultContext);
        dagBuilder.setDefaultExecutionContext(contextProto);
    }

    if (!vertexGroups.isEmpty()) {
        for (VertexGroup av : vertexGroups) {
            GroupInfo groupInfo = av.getGroupInfo();
            PlanVertexGroupInfo.Builder groupBuilder = PlanVertexGroupInfo.newBuilder();
            groupBuilder.setGroupName(groupInfo.getGroupName());
            for (Vertex v : groupInfo.getMembers()) {
                groupBuilder.addGroupMembers(v.getName());
            }
            groupBuilder.addAllOutputs(groupInfo.outputs);
            for (Map.Entry<String, InputDescriptor> entry : groupInfo.edgeMergedInputs.entrySet()) {
                groupBuilder.addEdgeMergedInputs(
                        PlanGroupInputEdgeInfo.newBuilder().setDestVertexName(entry.getKey())
                                .setMergedInput(DagTypeConverters.convertToDAGPlan(entry.getValue())));
            }
            dagBuilder.addVertexGroups(groupBuilder);
        }
    }

    Credentials dagCredentials = new Credentials();
    if (extraCredentials != null) {
        dagCredentials.mergeAll(extraCredentials);
    }
    dagCredentials.mergeAll(credentials);
    if (!commonTaskLocalFiles.isEmpty()) {
        dagBuilder.addAllLocalResource(DagTypeConverters.convertToDAGPlan(commonTaskLocalFiles));
    }

    Preconditions.checkArgument(topologicalVertexStack.size() == vertices.size(),
            "size of topologicalVertexStack is:" + topologicalVertexStack.size() + " while size of vertices is:"
                    + vertices.size() + ", make sure they are the same in order to sort the vertices");
    while (!topologicalVertexStack.isEmpty()) {
        Vertex vertex = vertices.get(topologicalVertexStack.pop());
        // infer credentials, resources and parallelism from data source
        Resource vertexTaskResource = vertex.getTaskResource();
        if (vertexTaskResource == null) {
            vertexTaskResource = Resource.newInstance(
                    tezConf.getInt(TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB,
                            TezConfiguration.TEZ_TASK_RESOURCE_MEMORY_MB_DEFAULT),
                    tezConf.getInt(TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES,
                            TezConfiguration.TEZ_TASK_RESOURCE_CPU_VCORES_DEFAULT));
        }
        Map<String, LocalResource> vertexLRs = Maps.newHashMap();
        vertexLRs.putAll(vertex.getTaskLocalFiles());
        List<DataSourceDescriptor> dataSources = vertex.getDataSources();
        for (DataSourceDescriptor dataSource : dataSources) {
            if (dataSource.getCredentials() != null) {
                dagCredentials.addAll(dataSource.getCredentials());
            }
            if (dataSource.getAdditionalLocalFiles() != null) {
                TezCommonUtils.addAdditionalLocalResources(dataSource.getAdditionalLocalFiles(), vertexLRs,
                        "Vertex " + vertex.getName());
            }
        }
        if (tezJarResources != null) {
            TezCommonUtils.addAdditionalLocalResources(tezJarResources, vertexLRs,
                    "Vertex " + vertex.getName());
        }
        if (binaryConfig != null) {
            vertexLRs.put(TezConstants.TEZ_PB_BINARY_CONF_NAME, binaryConfig);
        }
        int vertexParallelism = vertex.getParallelism();
        VertexLocationHint vertexLocationHint = vertex.getLocationHint();
        if (dataSources.size() == 1) {
            DataSourceDescriptor dataSource = dataSources.get(0);
            if (vertexParallelism == -1 && dataSource.getNumberOfShards() > -1) {
                vertexParallelism = dataSource.getNumberOfShards();
            }
            if (vertexLocationHint == null && dataSource.getLocationHint() != null) {
                vertexLocationHint = dataSource.getLocationHint();
            }
        }
        if (vertexParallelism == -1) {
            Preconditions.checkState(vertexLocationHint == null,
                    "Cannot specify vertex location hint without specifying vertex parallelism. Vertex: "
                            + vertex.getName());
        } else if (vertexLocationHint != null) {
            Preconditions.checkState(vertexParallelism == vertexLocationHint.getTaskLocationHints().size(),
                    "vertex task location hint must equal vertex parallelism. Vertex: " + vertex.getName());
        }
        for (DataSinkDescriptor dataSink : vertex.getDataSinks()) {
            if (dataSink.getCredentials() != null) {
                dagCredentials.addAll(dataSink.getCredentials());
            }
        }

        VertexPlan.Builder vertexBuilder = VertexPlan.newBuilder();
        vertexBuilder.setName(vertex.getName());
        vertexBuilder.setType(PlanVertexType.NORMAL); // vertex type is implicitly NORMAL until  TEZ-46.
        vertexBuilder
                .setProcessorDescriptor(DagTypeConverters.convertToDAGPlan(vertex.getProcessorDescriptor()));

        // Vertex ExecutionContext setup
        VertexExecutionContext execContext = vertex.getVertexExecutionContext();
        verifyExecutionContext(execContext, servicePluginsDescriptor, vertex.getName());
        if (execContext != null) {
            DAGProtos.VertexExecutionContextProto contextProto = DagTypeConverters.convertToProto(execContext);
            vertexBuilder.setExecutionContext(contextProto);
        }
        // End of VertexExecutionContext setup.

        if (vertex.getInputs().size() > 0) {
            for (RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor> input : vertex.getInputs()) {
                vertexBuilder.addInputs(DagTypeConverters.convertToDAGPlan(input));
            }
        }
        if (vertex.getOutputs().size() > 0) {
            for (RootInputLeafOutput<OutputDescriptor, OutputCommitterDescriptor> output : vertex
                    .getOutputs()) {
                vertexBuilder.addOutputs(DagTypeConverters.convertToDAGPlan(output));
            }
        }

        if (vertex.getConf() != null && vertex.getConf().size() > 0) {
            ConfigurationProto.Builder confBuilder = ConfigurationProto.newBuilder();
            TezUtils.populateConfProtoFromEntries(vertex.getConf().entrySet(), confBuilder);
            vertexBuilder.setVertexConf(confBuilder);
        }

        //task config
        PlanTaskConfiguration.Builder taskConfigBuilder = PlanTaskConfiguration.newBuilder();
        taskConfigBuilder.setNumTasks(vertexParallelism);
        taskConfigBuilder.setMemoryMb(vertexTaskResource.getMemory());
        taskConfigBuilder.setVirtualCores(vertexTaskResource.getVirtualCores());

        try {
            taskConfigBuilder.setJavaOpts(TezClientUtils
                    .addDefaultsToTaskLaunchCmdOpts(vertex.getTaskLaunchCmdOpts(), tezConf, javaOptsChecker));
        } catch (TezException e) {
            throw new TezUncheckedException(
                    "Invalid TaskLaunchCmdOpts defined for Vertex " + vertex.getName() + " : " + e.getMessage(),
                    e);
        }

        taskConfigBuilder.setTaskModule(vertex.getName());
        if (!vertexLRs.isEmpty()) {
            taskConfigBuilder.addAllLocalResource(DagTypeConverters.convertToDAGPlan(vertexLRs));
        }

        Map<String, String> taskEnv = Maps.newHashMap(vertex.getTaskEnvironment());
        TezYARNUtils.setupDefaultEnv(taskEnv, tezConf, TezConfiguration.TEZ_TASK_LAUNCH_ENV,
                TezConfiguration.TEZ_TASK_LAUNCH_ENV_DEFAULT,
                TezConfiguration.TEZ_TASK_LAUNCH_CLUSTER_DEFAULT_ENV,
                TezConfiguration.TEZ_TASK_LAUNCH_CLUSTER_DEFAULT_ENV_DEFAULT, tezLrsAsArchive);
        for (Map.Entry<String, String> entry : taskEnv.entrySet()) {
            PlanKeyValuePair.Builder envSettingBuilder = PlanKeyValuePair.newBuilder();
            envSettingBuilder.setKey(entry.getKey());
            envSettingBuilder.setValue(entry.getValue());
            taskConfigBuilder.addEnvironmentSetting(envSettingBuilder);
        }

        if (vertexLocationHint != null) {
            if (vertexLocationHint.getTaskLocationHints() != null) {
                for (TaskLocationHint hint : vertexLocationHint.getTaskLocationHints()) {
                    PlanTaskLocationHint.Builder taskLocationHintBuilder = PlanTaskLocationHint.newBuilder();
                    // we can allow this later on if needed
                    if (hint.getAffinitizedTask() != null) {
                        throw new TezUncheckedException(
                                "Task based affinity may not be specified via the DAG API");
                    }

                    if (hint.getHosts() != null) {
                        taskLocationHintBuilder.addAllHost(hint.getHosts());
                    }
                    if (hint.getRacks() != null) {
                        taskLocationHintBuilder.addAllRack(hint.getRacks());
                    }

                    vertexBuilder.addTaskLocationHint(taskLocationHintBuilder);
                }
            }
        }

        if (vertex.getVertexManagerPlugin() != null) {
            vertexBuilder.setVertexManagerPlugin(
                    DagTypeConverters.convertToDAGPlan(vertex.getVertexManagerPlugin()));
        }

        for (Edge inEdge : vertex.getInputEdges()) {
            vertexBuilder.addInEdgeId(inEdge.getId());
        }

        for (Edge outEdge : vertex.getOutputEdges()) {
            vertexBuilder.addOutEdgeId(outEdge.getId());
        }

        vertexBuilder.setTaskConfig(taskConfigBuilder);
        dagBuilder.addVertex(vertexBuilder);
    }

    for (Edge edge : edges) {
        EdgePlan.Builder edgeBuilder = EdgePlan.newBuilder();
        edgeBuilder.setId(edge.getId());
        edgeBuilder.setInputVertexName(edge.getInputVertex().getName());
        edgeBuilder.setOutputVertexName(edge.getOutputVertex().getName());
        edgeBuilder.setDataMovementType(
                DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getDataMovementType()));
        edgeBuilder.setDataSourceType(
                DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getDataSourceType()));
        edgeBuilder.setSchedulingType(
                DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getSchedulingType()));
        edgeBuilder.setEdgeSource(DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeSource()));
        edgeBuilder.setEdgeDestination(
                DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeDestination()));
        if (edge.getEdgeProperty().getDataMovementType() == DataMovementType.CUSTOM) {
            if (edge.getEdgeProperty().getEdgeManagerDescriptor() != null) {
                edgeBuilder.setEdgeManager(
                        DagTypeConverters.convertToDAGPlan(edge.getEdgeProperty().getEdgeManagerDescriptor()));
            } // else the AM will deal with this.
        }
        dagBuilder.addEdge(edgeBuilder);
    }

    if (dagAccessControls != null) {
        dagBuilder.setAclInfo(DagTypeConverters.convertDAGAccessControlsToProto(dagAccessControls));
    }

    ConfigurationProto.Builder confProtoBuilder = ConfigurationProto.newBuilder();
    if (!this.dagConf.isEmpty()) {
        TezUtils.populateConfProtoFromEntries(this.dagConf.entrySet(), confProtoBuilder);
    }
    // Copy historyLogLevel from tezConf into dagConf if its not overridden in dagConf.
    String logLevel = this.dagConf.get(TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL);
    if (logLevel != null) {
        // The config is from dagConf, we have already added it to the proto above, just check if
        // the value is valid.
        if (!HistoryLogLevel.validateLogLevel(logLevel)) {
            throw new IllegalArgumentException("Config: " + TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL
                    + " is set to invalid value: " + logLevel);
        }
    } else {
        // Validate and set value from tezConf.
        logLevel = tezConf.get(TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL);
        if (logLevel != null) {
            if (!HistoryLogLevel.validateLogLevel(logLevel)) {
                throw new IllegalArgumentException("Config: " + TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL
                        + " is set to invalid value: " + logLevel);
            }
            PlanKeyValuePair.Builder kvp = PlanKeyValuePair.newBuilder();
            kvp.setKey(TezConfiguration.TEZ_HISTORY_LOGGING_LOGLEVEL);
            kvp.setValue(logLevel);
            confProtoBuilder.addConfKeyValues(kvp);
        }
    }
    dagBuilder.setDagConf(confProtoBuilder);

    if (dagCredentials != null) {
        dagBuilder.setCredentialsBinary(DagTypeConverters.convertCredentialsToProto(dagCredentials));
        TezCommonUtils.logCredentials(LOG, dagCredentials, "dag");
    }

    return dagBuilder.build();
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

public SCM getScm(IMode mode) {
    InheritanceGovernor<SCM> gov = new InheritanceGovernor<SCM>("scm", SELECTOR.MISC, this) {
        @Override/*from w w  w . j av  a 2  s .c  o  m*/
        protected SCM castToDestinationType(Object o) {
            return (o instanceof SCM) ? (SCM) o : null;
        }

        @Override
        public SCM getRawField(InheritanceProject ip) {
            return ip.getRawScm();
        }

        @Override
        protected SCM reduceFromFullInheritance(Deque<SCM> list) {
            if (list == null || list.isEmpty()) {
                return new NullSCM();
            }
            //Return the SCM that was defined last and is not a NullSCM
            Iterator<SCM> iter = list.descendingIterator();
            while (iter.hasNext()) {
                SCM scm = iter.next();
                if (scm != null && !(scm instanceof NullSCM)) {
                    return scm;
                }
            }
            //All SCMs are NullSCMs; so it does not matter which one to return
            return list.peekLast();
        }
    };

    SCM scm = gov.retrieveFullyDerivedField(this, mode);

    //We may not return null directly
    return (scm == null) ? new NullSCM() : scm;
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

public Label getAssignedLabel(IMode mode) {
    InheritanceGovernor<Label> gov = new InheritanceGovernor<Label>("assignedLabel", SELECTOR.MISC, this) {
        @Override/*  w  w w  . ja v a2 s  .  c o m*/
        protected Label castToDestinationType(Object o) {
            if (o instanceof Label) {
                return (Label) o;
            }
            return null;
        }

        @Override
        public Label getRawField(InheritanceProject ip) {
            return ip.getRawAssignedLabel();
        }

        @Override
        protected Label reduceFromFullInheritance(Deque<Label> list) {
            //We simply join the labels via the AND operator
            Label out = null;
            if (list == null || list.isEmpty()) {
                return out;
            }
            for (Label l : list) {
                if (l == null) {
                    continue;
                }
                out = (out == null) ? l : out.and(l);
            }
            return out;
        }
    };

    //Generate the label on this node and the optional "magic" label restriction
    Label lbl = gov.retrieveFullyDerivedField(this, mode);
    Label magic = ProjectCreationEngine.instance.getMagicNodeLabelForTestingValue();

    //Check if the magic label needs to be applied (only when building)
    if (magic != null && !magic.isEmpty() && InheritanceGovernor.inheritanceLookupRequired(this)) {
        if (lbl != null) {
            String labelExpr = lbl.getName();
            String magicExpr = magic.getName();
            if (!labelExpr.contains(magicExpr)) {
                //We need to add the magic to the label
                lbl = lbl.and(magic.not());
            }
        } else {
            //No label present, just use magic value as-is
            lbl = magic.not();
        }
    }

    if (lbl == null) {
        return null;
    }

    /* The labels stored in versioning are essentially cached; which means
     * that their "applicable nodes" list is out-of-date.
     * 
     * As such, we will use Jenkins' caching mechanism to update the labels,
     * as it will "know" when to refresh labels and when not.
     * Unfortunately, Jenkins is braindead and "unquotes" the strings
     * aggressively, by just stripping out the outermost and innermost
     * quote sign; EVEN if the quotes do not belong to each other.
     * 
     * E.g.:
     *       "os:linux"&&"role:foobar"
     * will be turned into:
     *       os:linux"&&"role:foobar
     * 
     * We "solve" this by adding a pointless quote around the label's
     * string representation
     */
    return Jenkins.getInstance().getLabel(String.format("\"%s\"", lbl.getName()));
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

private static ARecordType createEnforcedType(ARecordType initialType, List<Index> indexes)
        throws AlgebricksException {
    ARecordType enforcedType = initialType;
    for (Index index : indexes) {
        if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) {
            continue;
        }//from  w ww  .java 2  s . c  o m
        if (index.hasMetaFields()) {
            throw new AlgebricksException("Indexing an open field is only supported on the record part");
        }
        for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
            Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>();
            List<String> splits = index.getKeyFieldNames().get(i);
            ARecordType nestedFieldType = enforcedType;
            boolean openRecords = false;
            String bridgeName = nestedFieldType.getTypeName();
            int j;
            // Build the stack for the enforced type
            for (j = 1; j < splits.size(); j++) {
                nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1)));
                bridgeName = nestedFieldType.getTypeName();
                nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j));
                if (nestedFieldType == null) {
                    openRecords = true;
                    break;
                }
            }
            if (openRecords) {
                // create the smallest record
                enforcedType = new ARecordType(splits.get(splits.size() - 2),
                        new String[] { splits.get(splits.size() - 1) },
                        new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) },
                        true);
                // create the open part of the nested field
                for (int k = splits.size() - 3; k > (j - 2); k--) {
                    enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
                            new IAType[] { AUnionType.createUnknownableType(enforcedType) }, true);
                }
                // Bridge the gap
                Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
                ARecordType parent = gapPair.first;

                IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
                        new IAType[] { AUnionType.createUnknownableType(enforcedType) });
                enforcedType = new ARecordType(bridgeName,
                        ArrayUtils.addAll(parent.getFieldNames(), enforcedType.getTypeName()), parentFieldTypes,
                        true);
            } else {
                //Schema is closed all the way to the field
                //enforced fields are either null or strongly typed
                LinkedHashMap<String, IAType> recordNameTypesMap = createRecordNameTypeMap(nestedFieldType);
                // if a an enforced field already exists and the type is correct
                IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
                if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION
                        && ((AUnionType) enforcedFieldType).isUnknownableType()) {
                    enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
                }
                if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(),
                        index.getKeyFieldTypes().get(i).getTypeTag())) {
                    throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i)
                            + " to have type " + index.getKeyFieldTypes().get(i));
                }
                if (enforcedFieldType == null) {
                    recordNameTypesMap.put(splits.get(splits.size() - 1),
                            AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
                }
                enforcedType = new ARecordType(nestedFieldType.getTypeName(),
                        recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]),
                        recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]),
                        nestedFieldType.isOpen());
            }

            // Create the enforced type for the nested fields in the schema, from the ground up
            if (!nestedTypeStack.isEmpty()) {
                while (!nestedTypeStack.isEmpty()) {
                    Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
                    ARecordType nestedRecType = nestedTypePair.first;
                    IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
                    nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedTypePair.second)] = enforcedType;
                    enforcedType = new ARecordType(nestedRecType.getTypeName() + "_enforced",
                            nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
                }
            }
        }
    }
    return enforcedType;
}

From source file:org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.java

public void processPositionAlias(ASTNode ast) throws SemanticException {
    boolean isBothByPos = HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS);
    boolean isGbyByPos = isBothByPos || HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_POSITION_ALIAS);
    boolean isObyByPos = isBothByPos || HiveConf.getBoolVar(conf, ConfVars.HIVE_ORDERBY_POSITION_ALIAS);

    Deque<ASTNode> stack = new ArrayDeque<ASTNode>();
    stack.push(ast);//from www  . j  a v  a  2 s  .c om

    while (!stack.isEmpty()) {
        ASTNode next = stack.pop();

        if (next.getChildCount() == 0) {
            continue;
        }

        boolean isAllCol;
        ASTNode selectNode = null;
        ASTNode groupbyNode = null;
        ASTNode orderbyNode = null;

        // get node type
        int child_count = next.getChildCount();
        for (int child_pos = 0; child_pos < child_count; ++child_pos) {
            ASTNode node = (ASTNode) next.getChild(child_pos);
            int type = node.getToken().getType();
            if (type == HiveParser.TOK_SELECT) {
                selectNode = node;
            } else if (type == HiveParser.TOK_GROUPBY) {
                groupbyNode = node;
            } else if (type == HiveParser.TOK_ORDERBY) {
                orderbyNode = node;
            }
        }

        if (selectNode != null) {
            int selectExpCnt = selectNode.getChildCount();

            // replace each of the position alias in GROUPBY with the actual column name
            if (groupbyNode != null) {
                for (int child_pos = 0; child_pos < groupbyNode.getChildCount(); ++child_pos) {
                    ASTNode node = (ASTNode) groupbyNode.getChild(child_pos);
                    if (node.getToken().getType() == HiveParser.Number) {
                        if (isGbyByPos) {
                            int pos = Integer.parseInt(node.getText());
                            if (pos > 0 && pos <= selectExpCnt) {
                                groupbyNode.setChild(child_pos, selectNode.getChild(pos - 1).getChild(0));
                            } else {
                                throw new SemanticException(ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY
                                        .getMsg("Position alias: " + pos + " does not exist\n"
                                                + "The Select List is indexed from 1 to " + selectExpCnt));
                            }
                        } else {
                            warn("Using constant number  " + node.getText()
                                    + " in group by. If you try to use position alias when hive.groupby.position.alias is false, the position alias will be ignored.");
                        }
                    }
                }
            }

            // orderby position will be processed in genPlan
        }

        for (int i = next.getChildren().size() - 1; i >= 0; i--) {
            stack.push((ASTNode) next.getChildren().get(i));
        }
    }
}