Example usage for com.google.common.base Optional orNull

List of usage examples for com.google.common.base Optional orNull

Introduction

In this page you can find the example usage for com.google.common.base Optional orNull.

Prototype

@Nullable
public abstract T orNull();

Source Link

Document

Returns the contained instance if it is present; null otherwise.

Usage

From source file:ua.utility.kfsdbupgrade.MaintainableXMLConversionServiceImpl.java

/**
 * For each child of <code>node</code>
 * //from  w  w w.  ja va  2  s  .c  o m
 * @param document
 * @param node
 * @param currentClass
 * @param propertyMappings
 * @throws ClassNotFoundException
 * @throws XPathExpressionException
 * @throws IllegalAccessException
 * @throws InvocationTargetException
 * @throws NoSuchMethodException
 * @throws InstantiationException
 */
private void transformNode(Document document, Node node, Class<?> currentClass,
        Map<String, String> propertyMappings) throws ClassNotFoundException, XPathExpressionException,
        IllegalAccessException, InvocationTargetException, NoSuchMethodException, InstantiationException {
    LOGGER.trace("Transforming node: " + node.getBaseURI() + "/" + node.getNodeName());
    for (Node childNode = node.getFirstChild(); childNode != null;) {
        Node nextChild = childNode.getNextSibling();
        String propertyName = childNode.getNodeName();
        if (childNode.hasAttributes()) {
            XPath xpath = XPathFactory.newInstance().newXPath();
            Node serializationAttribute = childNode.getAttributes().getNamedItem(SERIALIZATION_ATTRIBUTE);
            if (serializationAttribute != null
                    && StringUtils.equals(serializationAttribute.getNodeValue(), "custom")) {
                Node classAttribute = childNode.getAttributes().getNamedItem(CLASS_ATTRIBUTE);
                if (classAttribute != null && StringUtils.equals(classAttribute.getNodeValue(),
                        "org.kuali.rice.kns.util.TypedArrayList")) {
                    handleTypedArrayList(document, xpath, (Element) childNode);
                } else if (isTargetEffortCertificationReportPositionsNode(childNode)) {
                    // Need to skip over ECRD positions list due to needing serialization attr
                    // that otherwise was getting stripped on line 924. This also avoids a child
                    // list node from getting errantly pruned off ECRD doc types
                    deleteAllNoneListProxyChildren(childNode);
                } else {
                    ((Element) childNode).removeAttribute(SERIALIZATION_ATTRIBUTE);

                    XPathExpression mapContentsExpression = xpath.compile("//" + propertyName + "/map/string");
                    NodeList mapContents = (NodeList) mapContentsExpression.evaluate(childNode,
                            XPathConstants.NODESET);
                    List<Node> nodesToAdd = new ArrayList<Node>();
                    if (mapContents.getLength() > 0 && mapContents.getLength() % 2 == 0) {
                        for (int i = 0; i < mapContents.getLength(); i++) {
                            Node keyNode = mapContents.item(i);
                            Node valueNode = mapContents.item(++i);
                            Node entryNode = document.createElement("entry");
                            entryNode.appendChild(keyNode);
                            entryNode.appendChild(valueNode);
                            nodesToAdd.add(entryNode);
                        }
                    }
                    for (Node removeNode = childNode.getFirstChild(); removeNode != null;) {
                        Node nextRemoveNode = removeNode.getNextSibling();
                        childNode.removeChild(removeNode);
                        removeNode = nextRemoveNode;
                    }
                    for (Node nodeToAdd : nodesToAdd) {
                        childNode.appendChild(nodeToAdd);
                    }
                }
            }
        }
        if (propertyMappings != null && propertyMappings.containsKey(propertyName)) {
            String newPropertyName = propertyMappings.get(propertyName);
            if (StringUtils.isNotBlank(newPropertyName)) {
                document.renameNode(childNode, null, newPropertyName);
                propertyName = newPropertyName;
            } else {
                // If there is no replacement name then the element needs
                // to be removed and skip all other processing
                node.removeChild(childNode);
                childNode = nextChild;
                continue;
            }
        }

        if (dateRuleMap != null && dateRuleMap.containsKey(propertyName)) {
            String newDateValue = dateRuleMap.get(propertyName);
            if (StringUtils.isNotBlank(newDateValue)) {
                if (childNode.getTextContent().length() == 10) {
                    childNode.setTextContent(childNode.getTextContent() + " " + newDateValue);

                }
            }
        }

        if ((currentClass != null) && isValidClass(currentClass)) {
            if (childNode.hasChildNodes() && !(Collection.class.isAssignableFrom(currentClass)
                    || Map.class.isAssignableFrom(currentClass))) {
                PropertyClassKey key = new PropertyClassKey(currentClass, propertyName);
                Optional<Class<?>> propertyClass = propertyClassCache.getUnchecked(key);
                if (propertyClass.isPresent()
                        && classPropertyRuleMap.containsKey(propertyClass.get().getName())) {
                    transformNode(document, childNode, propertyClass.get(),
                            this.classPropertyRuleMap.get(propertyClass.get().getName()));
                }
                transformNode(document, childNode, propertyClass.orNull(), classPropertyRuleMap.get("*"));
            }
        }
        childNode = nextChild;
    }
}

From source file:org.auraframework.impl.system.MasterDefRegistryImpl.java

/**
 * Fill a compiling def for a descriptor.
 * /* ww  w . j ava  2 s.c o  m*/
 * This makes sure that we can get a registry for a given def, then tries to get the def from the global cache, if
 * that fails, it retrieves from the registry, and marks the def as locally built.
 * 
 * @param compiling the current compiling def (if there is one).
 * @throws QuickFixException if validateDefinition caused a quickfix.
 */
private <D extends Definition> boolean fillCompilingDef(CompilingDef<D> compiling, AuraContext context)
        throws QuickFixException {
    assert compiling.def == null;

    //
    // First, check our local cached defs to see if we have a fully compiled version.
    // in this case, we don't care about caching, since we are done.
    //
    if (hasLocalDef(compiling.descriptor)) {
        D localDef = getLocalDef(compiling.descriptor);
        if (localDef != null) {
            compiling.def = localDef;
            // I think this is no longer possible.
            compiling.built = !localDef.isValid();
            if (compiling.built) {
                localDef.validateDefinition();
            }
            return true;
        } else {
            return false;
        }
    }

    //
    // If there is no local cache, we must first check to see if there is a registry, as we may not have
    // a registry (depending on configuration). In the case that we don't find one, we are done here.
    //
    DefRegistry<D> registry = getRegistryFor(compiling.descriptor);
    if (registry == null) {
        defs.put(compiling.descriptor, null);
        return false;
    }

    //
    // Now, check if we can cache the def later, as we won't have the registry to check at a later time.
    // If we can cache, look it up in the cache. If we find it, we have a built definition.
    // Currently, static registries are neither cached, nor do they affect dependency caching
    //
    if (!registry.isStatic()) {
        if (registry.isCacheable() && shouldCache(compiling.descriptor)) {
            compiling.cacheable = true;

            @SuppressWarnings("unchecked")
            Optional<D> opt = (Optional<D>) defsCache.getIfPresent(compiling.descriptor);
            if (opt != null) {
                D cachedDef = opt.orNull();

                if (cachedDef != null) {
                    @SuppressWarnings("unchecked")
                    DefDescriptor<D> canonical = (DefDescriptor<D>) cachedDef.getDescriptor();

                    compiling.def = cachedDef;
                    compiling.descriptor = canonical;
                    compiling.built = false;
                    return true;
                } else {
                    return false;
                }
            }
        } else {
            // if not a cacheable registry or not shouldCache, test other exceptions that might still
            // allow dependency caching (if it's from static registry, it can't affect our decision on
            // depsCaching) test for special cases: compounds and static apex types
            boolean qualified = isOkForDependencyCaching(compiling.descriptor);

            currentCC.shouldCacheDependencies = qualified;
        }
    }

    //
    // The last case. This is our first compile or the def is uncacheable.
    // In this case, we make sure that the initial validation is called, and put
    // the def in the 'built' set.
    //
    compiling.def = registry.getDef(compiling.descriptor);
    if (compiling.def == null) {
        return false;
    }
    @SuppressWarnings("unchecked")
    DefDescriptor<D> canonical = (DefDescriptor<D>) compiling.def.getDescriptor();
    compiling.descriptor = canonical;
    currentCC.loggingService.incrementNum(LoggingService.DEF_COUNT);
    context.pushCallingDescriptor(canonical);
    try {
        compiling.def.validateDefinition();
    } finally {
        context.popCallingDescriptor();
    }
    compiling.built = true;
    return true;
}

From source file:google.registry.model.registry.label.PremiumList.java

/**
 * Persists a PremiumList object to Datastore.
 *
 * <p> The flow here is: save the new premium list entries parented on that revision entity,
 * save/update the PremiumList, and then delete the old premium list entries associated with the
 * old revision.//from  ww  w.  ja  v  a2  s. c  o m
 */
public PremiumList saveAndUpdateEntries() {
    final Optional<PremiumList> oldPremiumList = get(name);
    // Only update entries if there's actually a new revision of the list to save (which there will
    // be if the list content changes, vs just the description/metadata).
    boolean entriesToUpdate = !oldPremiumList.isPresent()
            || !Objects.equals(oldPremiumList.get().revisionKey, this.revisionKey);
    // If needed, save the new child entities in a series of transactions.
    if (entriesToUpdate) {
        for (final List<PremiumListEntry> batch : partition(premiumListMap.values(), TRANSACTION_BATCH_SIZE)) {
            ofy().transactNew(new VoidWork() {
                @Override
                public void vrun() {
                    ofy().save().entities(batch);
                }
            });
        }
    }
    // Save the new PremiumList itself.
    PremiumList updated = ofy().transactNew(new Work<PremiumList>() {
        @Override
        public PremiumList run() {
            DateTime now = ofy().getTransactionTime();
            // Assert that the premium list hasn't been changed since we started this process.
            checkState(
                    Objects.equals(ofy().load().type(PremiumList.class).parent(getCrossTldKey()).id(name).now(),
                            oldPremiumList.orNull()),
                    "PremiumList was concurrently edited");
            PremiumList newList = PremiumList.this.asBuilder().setLastUpdateTime(now)
                    .setCreationTime(oldPremiumList.isPresent() ? oldPremiumList.get().creationTime : now)
                    .build();
            ofy().save().entity(newList);
            return newList;
        }
    });
    // Update the cache.
    PremiumList.cache.put(name, updated);
    // If needed and there are any, delete the entities under the old PremiumList.
    if (entriesToUpdate && oldPremiumList.isPresent()) {
        oldPremiumList.get().deleteEntries();
    }
    return updated;
}

From source file:ua.utility.kfsdbupgrade.App.java

private static boolean isIngestWorkflow(Optional<String> workflowIndicator) {
    return "ingestWorkflow".equalsIgnoreCase(workflowIndicator.orNull());
}

From source file:org.apache.aurora.scheduler.state.StateManagerImpl.java

private StateChangeResult updateTaskAndExternalState(TaskStore.Mutable taskStore, String taskId,
        // Note: This argument should be used with caution.
        // This is because using the captured value within the storage operation below is
        // highly-risky, since it doesn't necessarily represent the value in storage.
        // As a result, it would be easy to accidentally clobber mutations.
        Optional<IScheduledTask> task, Optional<ScheduleStatus> targetState,
        Optional<String> transitionMessage) {

    if (task.isPresent()) {
        Preconditions.checkArgument(taskId.equals(task.get().getAssignedTask().getTaskId()));
    }/* www  . j av  a 2s.c om*/

    List<PubsubEvent> events = Lists.newArrayList();

    TaskStateMachine stateMachine = task.isPresent() ? new TaskStateMachine(task.get())
            : new TaskStateMachine(taskId);

    TransitionResult result = stateMachine.updateState(targetState);

    for (SideEffect sideEffect : ACTION_ORDER.sortedCopy(result.getSideEffects())) {
        Optional<IScheduledTask> upToDateTask = taskStore.fetchTask(taskId);

        switch (sideEffect.getAction()) {
        case INCREMENT_FAILURES:
            taskStore.mutateTask(taskId, task1 -> IScheduledTask
                    .build(task1.newBuilder().setFailureCount(task1.getFailureCount() + 1)));
            break;

        case SAVE_STATE:
            Preconditions.checkState(upToDateTask.isPresent(), "Operation expected task %s to be present.",
                    taskId);

            Optional<IScheduledTask> mutated = taskStore.mutateTask(taskId, task1 -> {
                ScheduledTask mutableTask = task1.newBuilder();
                mutableTask.setStatus(targetState.get());
                mutableTask.addToTaskEvents(new TaskEvent().setTimestamp(clock.nowMillis())
                        .setStatus(targetState.get()).setMessage(transitionMessage.orNull())
                        .setScheduler(LOCAL_HOST_SUPPLIER.get()));
                return IScheduledTask.build(mutableTask);
            });
            events.add(TaskStateChange.transition(mutated.get(), stateMachine.getPreviousState()));
            break;

        case RESCHEDULE:
            Preconditions.checkState(upToDateTask.isPresent(), "Operation expected task %s to be present.",
                    taskId);
            LOG.info("Task being rescheduled: " + taskId);

            ScheduleStatus newState;
            String auditMessage;
            long flapPenaltyMs = rescheduleCalculator.getFlappingPenaltyMs(upToDateTask.get());
            if (flapPenaltyMs > 0) {
                newState = THROTTLED;
                auditMessage = String.format("Rescheduled, penalized for %s ms for flapping", flapPenaltyMs);
            } else {
                newState = PENDING;
                auditMessage = "Rescheduled";
            }

            IScheduledTask newTask = IScheduledTask
                    .build(createTask(upToDateTask.get().getAssignedTask().getInstanceId(),
                            upToDateTask.get().getAssignedTask().getTask()).newBuilder()
                                    .setFailureCount(upToDateTask.get().getFailureCount())
                                    .setAncestorId(taskId));
            taskStore.saveTasks(ImmutableSet.of(newTask));
            updateTaskAndExternalState(taskStore, Tasks.id(newTask), Optional.of(newTask),
                    Optional.of(newState), Optional.of(auditMessage));
            break;

        case KILL:
            driver.killTask(taskId);
            break;

        case DELETE:
            Preconditions.checkState(upToDateTask.isPresent(), "Operation expected task %s to be present.",
                    taskId);

            PubsubEvent.TasksDeleted event = createDeleteEvent(taskStore, ImmutableSet.of(taskId));
            taskStore.deleteTasks(event.getTasks().stream().map(Tasks::id).collect(Collectors.toSet()));
            events.add(event);
            break;

        default:
            throw new IllegalStateException("Unrecognized side-effect " + sideEffect.getAction());
        }
    }

    // Note (AURORA-138): Delaying events until after the write operation is somewhat futile, since
    // the state may actually not be written to durable store
    // (e.g. if this is a nested transaction). Ideally, Storage would add a facility to attach
    // side-effects that are performed after the outer-most transaction completes (meaning state
    // has been durably persisted).
    for (PubsubEvent event : events) {
        eventSink.post(event);
    }

    return result.getResult();
}

From source file:com.google.security.zynamics.binnavi.Database.MockClasses.MockSqlProvider.java

@Override
public void updateMember(final TypeMember member, final String newName, final BaseType newBaseType,
        final Optional<Integer> newOffset, final Optional<Integer> newNumberOfElements,
        final Optional<Integer> newArgumentIndex, final INaviModule module) {
    final RawTypeMember rawMember = findMember(member, module);
    if (rawMember != null) {
        members.remove(module, rawMember);
        members.put(module,/*from  w  ww  .  ja  v  a2s .co  m*/
                new RawTypeMember(rawMember.getId(), newName, newBaseType.getId(), rawMember.getParentId(),
                        newOffset.orNull(), newArgumentIndex.orNull(), newNumberOfElements.orNull()));
    } else {
        throw new IllegalStateException("Trying to update non-existing member.");
    }
}

From source file:org.sosy_lab.cpachecker.cfa.postprocessing.global.singleloop.CFASingleLoopTransformation.java

/**
 * Copies the given control flow edge using the given new predecessor and
 * successor. Any additionally required nodes are taken from the given
 * mapping by using the corresponding node of the old edge as a key or, if
 * no node is mapped to this key, by copying the key and recording the result
 * in the mapping./*  ww  w.j a va2 s.c o m*/
 *
 * @param pEdge the edge to copy.
 * @param pNewPredecessor the new predecessor.
 * @param pNewSuccessor the new successor.
 * @param pNewToOldMapping a mapping of old nodes to new nodes.
 *
 * @return a new edge with the given predecessor and successor.
 */
private CFAEdge copyCFAEdgeWithNewNodes(CFAEdge pEdge, CFANode pNewPredecessor, CFANode pNewSuccessor,
        final Map<CFANode, CFANode> pNewToOldMapping) {
    String rawStatement = pEdge.getRawStatement();
    FileLocation fileLocation = pEdge.getFileLocation();
    switch (pEdge.getEdgeType()) {
    case AssumeEdge:
        CAssumeEdge assumeEdge = (CAssumeEdge) pEdge;
        return new CAssumeEdge(rawStatement, fileLocation, pNewPredecessor, pNewSuccessor,
                assumeEdge.getExpression(), assumeEdge.getTruthAssumption());
    case BlankEdge:
        return new BlankEdge(rawStatement, fileLocation, pNewPredecessor, pNewSuccessor,
                pEdge.getDescription());
    case DeclarationEdge:
        CDeclarationEdge declarationEdge = (CDeclarationEdge) pEdge;
        return new CDeclarationEdge(rawStatement, fileLocation, pNewPredecessor, pNewSuccessor,
                declarationEdge.getDeclaration());
    case FunctionCallEdge: {
        if (!(pNewSuccessor instanceof FunctionEntryNode)) {
            throw new IllegalArgumentException(
                    "The successor of a function call edge must be a function entry node.");
        }
        CFunctionCallEdge functionCallEdge = (CFunctionCallEdge) pEdge;
        FunctionSummaryEdge oldSummaryEdge = functionCallEdge.getSummaryEdge();
        CFunctionSummaryEdge functionSummaryEdge = (CFunctionSummaryEdge) copyCFAEdgeWithNewNodes(
                oldSummaryEdge, pNewPredecessor,
                getOrCreateNewFromOld(oldSummaryEdge.getSuccessor(), pNewToOldMapping), pNewToOldMapping);
        addToNodes(functionSummaryEdge);
        Optional<CFunctionCall> cFunctionCall = functionCallEdge.getRawAST();
        return new CFunctionCallEdge(rawStatement, fileLocation, pNewPredecessor,
                (CFunctionEntryNode) pNewSuccessor, cFunctionCall.orNull(), functionSummaryEdge);
    }
    case FunctionReturnEdge:
        if (!(pNewPredecessor instanceof FunctionExitNode)) {
            throw new IllegalArgumentException(
                    "The predecessor of a function return edge must be a function exit node.");
        }
        CFunctionReturnEdge functionReturnEdge = (CFunctionReturnEdge) pEdge;
        CFunctionSummaryEdge oldSummaryEdge = functionReturnEdge.getSummaryEdge();
        CFANode functionCallPred = oldSummaryEdge.getPredecessor();
        CFANode functionSummarySucc = oldSummaryEdge.getSuccessor();
        // If there is a conflicting summary edge, never use the one stored with the function return edge
        if (oldSummaryEdge != functionCallPred.getLeavingSummaryEdge()
                && functionCallPred.getLeavingSummaryEdge() != null) {
            oldSummaryEdge = (CFunctionSummaryEdge) functionCallPred.getLeavingSummaryEdge();
        } else if (oldSummaryEdge != functionSummarySucc.getEnteringSummaryEdge()
                && functionSummarySucc.getEnteringSummaryEdge() != null) {
            oldSummaryEdge = (CFunctionSummaryEdge) functionSummarySucc.getEnteringSummaryEdge();
        }
        CFunctionSummaryEdge functionSummaryEdge = (CFunctionSummaryEdge) copyCFAEdgeWithNewNodes(
                oldSummaryEdge, pNewToOldMapping);
        addToNodes(functionSummaryEdge);
        return new CFunctionReturnEdge(fileLocation, (FunctionExitNode) pNewPredecessor, pNewSuccessor,
                functionSummaryEdge);
    case MultiEdge:
        MultiEdge multiEdge = (MultiEdge) pEdge;
        return new MultiEdge(pNewPredecessor, pNewSuccessor,
                from(multiEdge.getEdges()).transform(new Function<CFAEdge, CFAEdge>() {

                    @Override
                    @Nullable
                    public CFAEdge apply(@Nullable CFAEdge pOldEdge) {
                        if (pOldEdge == null) {
                            return null;
                        }
                        return copyCFAEdgeWithNewNodes(pOldEdge, pNewToOldMapping);
                    }

                }).toList());
    case ReturnStatementEdge:
        if (!(pNewSuccessor instanceof FunctionExitNode)) {
            throw new IllegalArgumentException(
                    "The successor of a return statement edge must be a function exit node.");
        }
        CReturnStatementEdge returnStatementEdge = (CReturnStatementEdge) pEdge;
        Optional<CReturnStatement> cReturnStatement = returnStatementEdge.getRawAST();
        return new CReturnStatementEdge(rawStatement, cReturnStatement.orNull(), fileLocation, pNewPredecessor,
                (FunctionExitNode) pNewSuccessor);
    case StatementEdge:
        CStatementEdge statementEdge = (CStatementEdge) pEdge;
        if (statementEdge instanceof CFunctionSummaryStatementEdge) {
            CFunctionSummaryStatementEdge functionStatementEdge = (CFunctionSummaryStatementEdge) pEdge;
            return new CFunctionSummaryStatementEdge(rawStatement, statementEdge.getStatement(), fileLocation,
                    pNewPredecessor, pNewSuccessor, functionStatementEdge.getFunctionCall(),
                    functionStatementEdge.getFunctionName());
        }
        return new CStatementEdge(rawStatement, statementEdge.getStatement(), fileLocation, pNewPredecessor,
                pNewSuccessor);
    case CallToReturnEdge:
        CFunctionSummaryEdge cFunctionSummaryEdge = (CFunctionSummaryEdge) pEdge;
        return new CFunctionSummaryEdge(rawStatement, fileLocation, pNewPredecessor, pNewSuccessor,
                cFunctionSummaryEdge.getExpression(),
                (CFunctionEntryNode) getOrCreateNewFromOld(cFunctionSummaryEdge.getFunctionEntry(),
                        pNewToOldMapping));
    default:
        throw new IllegalArgumentException("Unsupported edge type: " + pEdge.getEdgeType());
    }
}

From source file:com.github.jsdossier.Config.java

/**
 * Creates a new runtime configuration./*from  w ww.  ja  v  a 2  s  .co m*/
 *
 *
 * @param srcs The list of compiler input sources.
 * @param modules The list of CommonJS compiler input sources.
 * @param externs The list of extern files for the Closure compiler.
 * @param excludes The list of excluded files.
 * @param typeFilters The list of types to filter from generated output.
 * @param output Path to the output directory.
 * @param isZipOutput Whether the output directory belongs to a zip file system.
 * @param readme Path to a markdown file to include in the main index.
 * @param customPages Custom markdown files to include in the generated documentation.
 * @param modulePrefix Prefix to strip from each module path when rendering documentation.
 * @param strict Whether to enable all type checks.
 * @param useMarkdown Whether to use markdown for comment parser.
 * @param language The JavaScript dialog sources must conform to.
 * @param outputStream The stream to use for standard output.
 * @param errorStream The stream to use for error output.
 * @throws IllegalStateException If any of source, moudle, and extern sets intersect, or if the
 *     output path is not a directory.
 */
private Config(ImmutableSet<Path> srcs, ImmutableSet<Path> modules, ImmutableSet<Path> externs,
        ImmutableSet<Path> excludes, ImmutableSet<Pattern> typeFilters, boolean isZipOutput, Path output,
        Optional<Path> readme, List<Page> customPages, Optional<Path> modulePrefix, boolean strict,
        boolean useMarkdown, Language language, PrintStream outputStream, PrintStream errorStream,
        FileSystem fileSystem) {
    checkArgument(!srcs.isEmpty() || !modules.isEmpty(), "There must be at least one input source or module");
    checkArgument(intersection(srcs, externs).isEmpty(),
            "The sources and externs inputs must be disjoint:\n  sources: %s\n  externs: %s", srcs, externs);
    checkArgument(intersection(srcs, modules).isEmpty(),
            "The sources and modules inputs must be disjoint:\n  sources: %s\n  modules: %s", srcs, modules);
    checkArgument(intersection(modules, externs).isEmpty(),
            "The sources and modules inputs must be disjoint:\n  modules: %s\n  externs: %s", modules, externs);
    checkArgument(!exists(output) || isDirectory(output), "Output path, %s, is not a directory", output);
    checkArgument(!readme.isPresent() || exists(readme.get()), "README path, %s, does not exist",
            readme.orNull());
    for (Page page : customPages) {
        checkArgument(exists(page.getPath()), "For custom page \"%s\", file does not exist: %s", page.getName(),
                page.getPath());
    }

    this.srcs = srcs;
    this.modules = modules;
    this.srcPrefix = getSourcePrefixPath(fileSystem, srcs, modules);
    this.modulePrefix = getModulePreixPath(fileSystem, modulePrefix, modules);
    this.externs = externs;
    this.excludes = excludes;
    this.typeFilters = typeFilters;
    this.output = output;
    this.isZipOutput = isZipOutput;
    this.readme = readme;
    this.customPages = ImmutableList.copyOf(customPages);
    this.strict = strict;
    this.useMarkdown = useMarkdown;
    this.language = language;
    this.outputStream = outputStream;
    this.errorStream = errorStream;
    this.fileSystem = fileSystem;
}

From source file:com.github.filosganga.geogson.gson.PositionsAdapter.java

private Positions parsePositions(JsonReader in) throws IOException {

    Optional<Positions> parsed = Optional.absent();

    if (in.peek() != JsonToken.BEGIN_ARRAY) {
        throw new IllegalArgumentException("The given json is not a valid positions");
    }//from w ww .j  av a 2s.c  o  m

    in.beginArray();
    if (in.peek() == JsonToken.NUMBER) {
        parsed = Optional.of(parseSinglePosition(in));
    } else if (in.peek() == JsonToken.BEGIN_ARRAY) {
        while (in.hasNext()) {
            Positions thisPositions = parsePositions(in);
            // fix bug #30: according to the recursion (i.e. the array structure;
            // recognize that we came from a recursion because no parsed has no
            // value yet): convert the already parsed Positions to the
            // LinearPositions/AreaPositions matching the recursion level
            if (parsed.equals(Optional.absent()) && thisPositions instanceof LinearPositions) {
                AreaPositions areaPositions = new AreaPositions(
                        ImmutableList.of((LinearPositions) thisPositions));
                parsed = Optional.of((Positions) areaPositions);
            } else if (parsed.equals(Optional.absent()) && thisPositions instanceof AreaPositions) {
                MultiDimensionalPositions multiPositions = new MultiDimensionalPositions(
                        ImmutableList.of((AreaPositions) thisPositions));
                parsed = Optional.of((Positions) multiPositions);
            } else {
                // mergeFn() does all the rest, if parsed has a value
                parsed = parsed.transform(mergeFn(thisPositions)).or(Optional.of(thisPositions));
            }

        }
    }

    in.endArray();

    return parsed.orNull();
}

From source file:org.opencms.xml.containerpage.CmsFormatterConfiguration.java

/**
 * Returns the formatter from this configuration that is to be used for the preview in the ADE gallery GUI,
 * or <code>null</code> if there is no preview formatter configured.<p>
 *
 * @return the formatter from this configuration that is to be used for the preview in the ADE gallery GUI,
 * or <code>null</code> if there is no preview formatter configured
 *//*from w ww  .  j  a  va  2s. c om*/
public I_CmsFormatterBean getPreviewFormatter() {

    Optional<I_CmsFormatterBean> result;
    result = Iterables.tryFind(m_allFormatters, new Predicate<I_CmsFormatterBean>() {

        public boolean apply(I_CmsFormatterBean formatter) {

            return formatter.isPreviewFormatter();
        }
    });
    if (!result.isPresent()) {
        result = Iterables.tryFind(m_allFormatters, new Predicate<I_CmsFormatterBean>() {

            public boolean apply(I_CmsFormatterBean formatter) {

                if (formatter.isTypeFormatter()) {
                    return formatter.getContainerTypes().contains(CmsFormatterBean.PREVIEW_TYPE);
                } else {
                    return (formatter.getMinWidth() <= CmsFormatterBean.PREVIEW_WIDTH)
                            && (CmsFormatterBean.PREVIEW_WIDTH <= formatter.getMaxWidth());
                }
            }
        });
    }
    if (!result.isPresent()) {
        result = Iterables.tryFind(m_allFormatters, new Predicate<I_CmsFormatterBean>() {

            public boolean apply(I_CmsFormatterBean formatter) {

                return !formatter.isTypeFormatter()
                        && (formatter.getMaxWidth() >= CmsFormatterBean.PREVIEW_WIDTH);

            }
        });
    }
    if (!result.isPresent() && !m_allFormatters.isEmpty()) {
        result = Optional.fromNullable(m_allFormatters.iterator().next());
    }
    return result.orNull();
}