List of usage examples for com.google.common.collect ImmutableMap isEmpty
@Override public boolean isEmpty()
From source file:org.openqa.selenium.remote.session.InternetExplorerFilter.java
@Override public Map<String, Object> apply(Map<String, Object> unmodifiedCaps) { ImmutableMap<String, Object> caps = unmodifiedCaps.entrySet().parallelStream().filter( entry -> ("browserName".equals(entry.getKey()) && "internet explorer".equals(entry.getValue())) || "browserAttachTimeout".equals(entry.getKey()) || "enableElementCacheCleanup".equals(entry.getKey()) || "enablePersistentHover".equals(entry.getKey()) || "extractPath".equals(entry.getKey()) || "host".equals(entry.getKey()) || "ignoreZoomSetting".equals(entry.getKey()) || "initialBrowserZoom".equals(entry.getKey()) || "logFile".equals(entry.getKey()) || "logLevel".equals(entry.getKey()) || "requireWindowFocus".equals(entry.getKey()) || "se:ieOptions".equals(entry.getKey()) || "silent".equals(entry.getKey()) || entry.getKey().startsWith("ie.")) .distinct().filter(entry -> Objects.nonNull(entry.getValue())) .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); return caps.isEmpty() ? null : caps; }
From source file:com.facebook.buck.config.BuckConfig.java
public ImmutableMap<String, ImmutableMap<String, String>> getRawConfigForParser() { ImmutableMap<String, ImmutableMap<String, String>> rawSections = config.getSectionToEntries(); // If the raw config doesn't have sections which have ignored fields, then just return it as-is. ImmutableSet<String> sectionsWithIgnoredFields = IGNORE_FIELDS_FOR_DAEMON_RESTART.keySet(); if (Sets.intersection(rawSections.keySet(), sectionsWithIgnoredFields).isEmpty()) { return rawSections; }/*from w w w . j a v a2 s . c o m*/ // Otherwise, iterate through the config to do finer-grain filtering. ImmutableMap.Builder<String, ImmutableMap<String, String>> filtered = ImmutableMap.builder(); for (Map.Entry<String, ImmutableMap<String, String>> sectionEnt : rawSections.entrySet()) { String sectionName = sectionEnt.getKey(); // If this section doesn't have a corresponding ignored section, then just add it as-is. if (!sectionsWithIgnoredFields.contains(sectionName)) { filtered.put(sectionEnt); continue; } // If none of this section's entries are ignored, then add it as-is. ImmutableMap<String, String> fields = sectionEnt.getValue(); ImmutableSet<String> ignoredFieldNames = IGNORE_FIELDS_FOR_DAEMON_RESTART.getOrDefault(sectionName, ImmutableSet.of()); if (Sets.intersection(fields.keySet(), ignoredFieldNames).isEmpty()) { filtered.put(sectionEnt); continue; } // Otherwise, filter out the ignored fields. ImmutableMap<String, String> remainingKeys = ImmutableMap .copyOf(Maps.filterKeys(fields, Predicates.not(ignoredFieldNames::contains))); if (!remainingKeys.isEmpty()) { filtered.put(sectionName, remainingKeys); } } return filtered.build(); }
From source file:org.locationtech.geogig.storage.postgresql.PGObjectStore.java
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override// w w w. j ava 2 s.co m public <T extends RevObject> Iterator<T> getAll(Iterable<ObjectId> ids, BulkOpListener listener, Class<T> type) { checkNotNull(ids, "ids is null"); checkNotNull(listener, "listener is null"); checkNotNull(type, "type is null"); checkState(isOpen(), "Database is closed"); config.checkRepositoryExists(); final Set<ObjectId> queryIds = ids instanceof Set ? (Set<ObjectId>) ids : Sets.newHashSet(ids); ImmutableMap<ObjectId, byte[]> cached = byteCache.getAllPresent(queryIds); Iterator<T> hits = Collections.emptyIterator(); Iterator<T> stream = Collections.emptyIterator(); if (!cached.isEmpty()) { Map<ObjectId, T> cachedObjects = Maps.transformEntries(cached, (id, bytes) -> { RevObject o = encoder.decode(id, bytes); if (type.isAssignableFrom(o.getClass())) { listener.found(id, Integer.valueOf(bytes.length)); return type.cast(o); } listener.notFound(id); return null; }); hits = Iterators.filter(cachedObjects.values().iterator(), Predicates.notNull()); } if (queryIds.size() > cached.size()) { Set<ObjectId> misses = Sets.difference(queryIds, cached.keySet()); stream = new GetAllIterator(dataSource, misses.iterator(), type, listener, this); } return Iterators.concat(hits, stream); }
From source file:com.google.javascript.jscomp.FunctionTypeBuilder.java
/** * Infer the template type from the doc info. *///w ww . j a v a2 s .c o m FunctionTypeBuilder inferTemplateTypeName(@Nullable JSDocInfo info, JSType ownerType) { // NOTE: these template type names may override a list // of inherited ones from an overridden function. if (info != null) { ImmutableList.Builder<TemplateType> builder = ImmutableList.builder(); ImmutableList<String> infoTemplateTypeNames = info.getTemplateTypeNames(); ImmutableMap<String, Node> infoTypeTransformations = info.getTypeTransformations(); if (!infoTemplateTypeNames.isEmpty()) { for (String key : infoTemplateTypeNames) { if (typeRegistry.isIObjectValueKey(fnName, key)) { builder.add(typeRegistry.getIObjectValueKey()); } else { builder.add(typeRegistry.createTemplateType(key)); } } } if (!infoTypeTransformations.isEmpty()) { for (Entry<String, Node> entry : infoTypeTransformations.entrySet()) { builder.add( typeRegistry.createTemplateTypeWithTransformation(entry.getKey(), entry.getValue())); } } if (!infoTemplateTypeNames.isEmpty() || !infoTypeTransformations.isEmpty()) { templateTypeNames = builder.build(); } } ImmutableList<TemplateType> keys = templateTypeNames; if (ownerType != null) { ImmutableList<TemplateType> ownerTypeKeys = ownerType.getTemplateTypeMap().getTemplateKeys(); if (!ownerTypeKeys.isEmpty()) { ImmutableList.Builder<TemplateType> builder = ImmutableList.builder(); builder.addAll(templateTypeNames); builder.addAll(ownerTypeKeys); keys = builder.build(); } } if (!keys.isEmpty()) { typeRegistry.setTemplateTypeNames(keys); } return this; }
From source file:org.kiji.scoring.impl.InternalFreshKijiTableReader.java
/** {@inheritDoc} */ @Override// w w w . ja va 2s.c o m public KijiRowData get(final EntityId entityId, final KijiDataRequest dataRequest, final FreshRequestOptions options) throws IOException { requireState(LifecycleState.OPEN); // Get the start time for the request. final long startTime = System.nanoTime(); final String id = String.format("%s#%s", mReaderUID, mUniqueIdGenerator.getNextUniqueId()); LOG.debug("{} starting with EntityId: {} data request: {} request options: {}", id, entityId, dataRequest, options); final KijiTableReader requestReader = ScoringUtils.getPooledReader(mReaderPool); try { final ImmutableList<KijiColumnName> requestColumns = removeDisabledColumns( getColumnsFromRequest(dataRequest), options.getDisabledColumns()); final ImmutableMap<KijiColumnName, Freshener> fresheners; final ImmutableMap<KijiColumnName, KijiFreshenerRecord> records; // Get a retained snapshot of the rereadable state. final RereadableState rereadableState = getRereadableState(); try { // Collect the Fresheners and Records applicable to this request. fresheners = filterFresheners(requestColumns, rereadableState.mFresheners); records = filterRecords(rereadableState.mFreshenerRecords, requestColumns); // If there are no Fresheners attached to the requested columns, return the requested data. if (fresheners.isEmpty()) { return requestReader.get(entityId, dataRequest); } else { // Retain the Fresheners so that they cannot be cleaned up while in use. for (Map.Entry<KijiColumnName, Freshener> freshenerEntry : fresheners.entrySet()) { freshenerEntry.getValue().retain(); } } } finally { rereadableState.release(); } LOG.debug("{} will run Fresheners: {}", id, fresheners.values()); final Future<KijiRowData> clientDataFuture = ScoringUtils.getFuture(mExecutorService, new TableReadCallable(mReaderPool, entityId, dataRequest)); final FresheningRequestContext requestContext = new FresheningRequestContext(id, startTime, fresheners, options.getParameters(), records, mReaderPool, entityId, dataRequest, clientDataFuture, mBufferedWriter, mAllowPartial, mStatisticGatheringMode, mStatisticsQueue, mExecutorService); final ImmutableList<Future<Boolean>> futures = requestContext.getFuturesForFresheners(); final Future<List<Boolean>> superFuture = ScoringUtils.getFuture(mExecutorService, new FutureAggregatingCallable<Boolean>(futures)); // If the options specify timeout of -1 this indicates we should use the configured timeout. final long timeout = (-1 == options.getTimeout()) ? mTimeout : options.getTimeout(); try { if (ScoringUtils.getFromFuture(superFuture, timeout).contains(true)) { // If all Fresheners return in time and at least one has written a new value, read from // the table. LOG.debug("{} completed on time and data was written.", id); return requestReader.get(entityId, dataRequest); } else { // If all Fresheners return in time, but none have written new values, do not read from // the table. LOG.debug("{} completed on time and no data was written.", id); try { return ScoringUtils.getFromFuture(clientDataFuture, 0L); } catch (TimeoutException te) { // If client data is not immediately available, read from the table. return requestReader.get(entityId, dataRequest); } } } catch (TimeoutException te) { requestContext.timeOut(); // If superFuture times out, read partially freshened data from the table or return the // cached data based on whether partial freshness is allowed. LOG.debug("{} timed out, checking for partial writes.", id); return requestContext.checkAndRead(); } } finally { // Return the reader to the pool. requestReader.close(); } }
From source file:com.aitorvs.autoparcel.internal.codegen.AutoParcelProcessor.java
private String generateClass(TypeElement type, String className, String classToExtend, boolean isFinal) { if (type == null) { mErrorReporter.abortWithError("generateClass was invoked with null type", type); }//w ww . j av a 2s . com if (className == null) { mErrorReporter.abortWithError("generateClass was invoked with null class name", type); } if (classToExtend == null) { mErrorReporter.abortWithError("generateClass was invoked with null parent class", type); } List<VariableElement> nonPrivateFields = getParcelableFieldsOrError(type); if (nonPrivateFields.isEmpty()) { mErrorReporter.abortWithError("generateClass error, all fields are declared PRIVATE", type); } // get the properties ImmutableList<Property> properties = buildProperties(nonPrivateFields); // get the type adapters ImmutableMap<TypeMirror, FieldSpec> typeAdapters = getTypeAdapters(properties); // get the parcel version //noinspection ConstantConditions int version = type.getAnnotation(AutoParcel.class).version(); // Generate the AutoParcel_??? class String pkg = TypeUtil.packageNameOf(type); TypeName classTypeName = ClassName.get(pkg, className); TypeSpec.Builder subClass = TypeSpec.classBuilder(className) // Add the version .addField(TypeName.INT, "version", PRIVATE) // Class must be always final .addModifiers(FINAL) // extends from original abstract class .superclass(ClassName.get(pkg, classToExtend)) // Add the DEFAULT constructor .addMethod(generateConstructor(properties)) // Add the private constructor .addMethod(generateConstructorFromParcel(processingEnv, properties, typeAdapters)) // overrides describeContents() .addMethod(generateDescribeContents()) // static final CREATOR .addField(generateCreator(processingEnv, properties, classTypeName, typeAdapters)) // overrides writeToParcel() .addMethod(generateWriteToParcel(version, processingEnv, properties, typeAdapters)); // generate writeToParcel() if (!ancestoIsParcelable(processingEnv, type)) { // Implement android.os.Parcelable if the ancestor does not do it. subClass.addSuperinterface(ClassName.get("android.os", "Parcelable")); } if (!typeAdapters.isEmpty()) { typeAdapters.values().forEach(subClass::addField); } JavaFile javaFile = JavaFile.builder(pkg, subClass.build()).build(); return javaFile.toString(); }
From source file:com.google.devtools.build.lib.skyframe.SkyframeBuildView.java
/** * Analyzes the specified targets using Skyframe as the driving framework. * * @return the configured targets that should be built along with a WalkableGraph of the analysis. *///from w w w. j av a2s .c o m public SkyframeAnalysisResult configureTargets(EventHandler eventHandler, List<ConfiguredTargetKey> values, List<AspectValueKey> aspectKeys, EventBus eventBus, boolean keepGoing, int numThreads) throws InterruptedException, ViewCreationFailedException { enableAnalysis(true); EvaluationResult<ActionLookupValue> result; try { result = skyframeExecutor.configureTargets(eventHandler, values, aspectKeys, keepGoing, numThreads); } finally { enableAnalysis(false); } ImmutableMap<ActionAnalysisMetadata, ConflictException> badActions = skyframeExecutor .findArtifactConflicts(); Collection<AspectValue> goodAspects = Lists.newArrayListWithCapacity(values.size()); NestedSetBuilder<Package> packages = NestedSetBuilder.stableOrder(); for (AspectValueKey aspectKey : aspectKeys) { AspectValue value = (AspectValue) result.get(aspectKey.getSkyKey()); if (value == null) { // Skip aspects that couldn't be applied to targets. continue; } goodAspects.add(value); packages.addTransitive(value.getTransitivePackages()); } // Filter out all CTs that have a bad action and convert to a list of configured targets. This // code ensures that the resulting list of configured targets has the same order as the incoming // list of values, i.e., that the order is deterministic. Collection<ConfiguredTarget> goodCts = Lists.newArrayListWithCapacity(values.size()); for (ConfiguredTargetKey value : values) { ConfiguredTargetValue ctValue = (ConfiguredTargetValue) result.get(ConfiguredTargetValue.key(value)); if (ctValue == null) { continue; } goodCts.add(ctValue.getConfiguredTarget()); packages.addTransitive(ctValue.getTransitivePackages()); } ImmutableMap<PackageIdentifier, Path> packageRoots = LoadingPhaseRunner .collectPackageRoots(packages.build().toCollection()); if (!result.hasError() && badActions.isEmpty()) { return new SkyframeAnalysisResult(/*hasLoadingError=*/false, /*hasAnalysisError=*/false, ImmutableList.copyOf(goodCts), result.getWalkableGraph(), ImmutableList.copyOf(goodAspects), packageRoots); } // --nokeep_going so we fail with an exception for the first error. // TODO(bazel-team): We might want to report the other errors through the event bus but // for keeping this code in parity with legacy we just report the first error for now. if (!keepGoing) { for (Map.Entry<ActionAnalysisMetadata, ConflictException> bad : badActions.entrySet()) { ConflictException ex = bad.getValue(); try { ex.rethrowTyped(); } catch (MutableActionGraph.ActionConflictException ace) { ace.reportTo(eventHandler); String errorMsg = "Analysis of target '" + bad.getKey().getOwner().getLabel() + "' failed; build aborted"; throw new ViewCreationFailedException(errorMsg); } catch (ArtifactPrefixConflictException apce) { eventHandler.handle(Event.error(apce.getMessage())); } throw new ViewCreationFailedException(ex.getMessage()); } Map.Entry<SkyKey, ErrorInfo> error = result.errorMap().entrySet().iterator().next(); SkyKey topLevel = error.getKey(); ErrorInfo errorInfo = error.getValue(); assertSaneAnalysisError(errorInfo, topLevel); skyframeExecutor.getCyclesReporter().reportCycles(errorInfo.getCycleInfo(), topLevel, eventHandler); Throwable cause = errorInfo.getException(); Preconditions.checkState(cause != null || !Iterables.isEmpty(errorInfo.getCycleInfo()), errorInfo); String errorMsg = null; if (topLevel.argument() instanceof ConfiguredTargetKey) { errorMsg = "Analysis of target '" + ConfiguredTargetValue.extractLabel(topLevel) + "' failed; build aborted"; } else if (topLevel.argument() instanceof AspectValueKey) { AspectValueKey aspectKey = (AspectValueKey) topLevel.argument(); errorMsg = "Analysis of aspect '" + aspectKey.getDescription() + "' failed; build aborted"; } else { assert false; } if (cause instanceof ActionConflictException) { ((ActionConflictException) cause).reportTo(eventHandler); } throw new ViewCreationFailedException(errorMsg); } boolean hasLoadingError = false; // --keep_going : We notify the error and return a ConfiguredTargetValue for (Map.Entry<SkyKey, ErrorInfo> errorEntry : result.errorMap().entrySet()) { // Only handle errors of configured targets, not errors of top-level aspects. // TODO(ulfjack): this is quadratic - if there are a lot of CTs, this could be rather slow. if (!values.contains(errorEntry.getKey().argument())) { continue; } SkyKey errorKey = errorEntry.getKey(); ConfiguredTargetKey label = (ConfiguredTargetKey) errorKey.argument(); Label topLevelLabel = label.getLabel(); ErrorInfo errorInfo = errorEntry.getValue(); assertSaneAnalysisError(errorInfo, errorKey); skyframeExecutor.getCyclesReporter().reportCycles(errorInfo.getCycleInfo(), errorKey, eventHandler); Exception cause = errorInfo.getException(); Label analysisRootCause = null; if (cause instanceof ConfiguredValueCreationException) { ConfiguredValueCreationException ctCause = (ConfiguredValueCreationException) cause; for (Label rootCause : ctCause.getRootCauses()) { hasLoadingError = true; eventBus.post(new LoadingFailureEvent(topLevelLabel, rootCause)); } analysisRootCause = ctCause.getAnalysisRootCause(); } else if (!Iterables.isEmpty(errorInfo.getCycleInfo())) { analysisRootCause = maybeGetConfiguredTargetCycleCulprit(topLevelLabel, errorInfo.getCycleInfo()); } else if (cause instanceof ActionConflictException) { ((ActionConflictException) cause).reportTo(eventHandler); } eventHandler.handle(Event.warn( "errors encountered while analyzing target '" + topLevelLabel + "': it will not be built")); if (analysisRootCause != null) { eventBus.post(new AnalysisFailureEvent( LabelAndConfiguration.of(topLevelLabel, label.getConfiguration()), analysisRootCause)); } } Collection<Exception> reportedExceptions = Sets.newHashSet(); for (Map.Entry<ActionAnalysisMetadata, ConflictException> bad : badActions.entrySet()) { ConflictException ex = bad.getValue(); try { ex.rethrowTyped(); } catch (MutableActionGraph.ActionConflictException ace) { ace.reportTo(eventHandler); eventHandler.handle(Event.warn("errors encountered while analyzing target '" + bad.getKey().getOwner().getLabel() + "': it will not be built")); } catch (ArtifactPrefixConflictException apce) { if (reportedExceptions.add(apce)) { eventHandler.handle(Event.error(apce.getMessage())); } } } if (!badActions.isEmpty()) { // In order to determine the set of configured targets transitively error free from action // conflict issues, we run a post-processing update() that uses the bad action map. EvaluationResult<PostConfiguredTargetValue> actionConflictResult = skyframeExecutor .postConfigureTargets(eventHandler, values, keepGoing, badActions); goodCts = Lists.newArrayListWithCapacity(values.size()); for (ConfiguredTargetKey value : values) { PostConfiguredTargetValue postCt = actionConflictResult.get(PostConfiguredTargetValue.key(value)); if (postCt != null) { goodCts.add(postCt.getCt()); } } } return new SkyframeAnalysisResult(hasLoadingError, result.hasError() || !badActions.isEmpty(), ImmutableList.copyOf(goodCts), result.getWalkableGraph(), ImmutableList.copyOf(goodAspects), packageRoots); }
From source file:org.zanata.client.commands.push.RawPushCommand.java
/** * Return map of validated DocumentType to set of corresponding extensions, by applying the user's * file type options to the server's accepted file types. * * Validate user input file types against server accepted file types * * Valid input - properties_utf8,properties[txt],plain_text[md;markdown] * * - Each file type must appear only once - e.g. * - valid: "html,properties,txt"/*from w w w.ja v a 2s . c o m*/ * - invalid: "html,properties,html" * - Same file extension must not appear in multiple file types - e.g. plain_text[txt],properties[txt] * @param serverFileTypes * @param fileTypesSpec */ public ImmutableList<FileTypeInfo> getActualFileTypes(List<FileTypeInfo> serverFileTypes, List<String> fileTypesSpec) { // cumulative list of activated types ImmutableList.Builder<FileTypeInfo> docTypeMappings = new ImmutableList.Builder<>(); // types which have been specified by the user so far Set<FileTypeName> seenUserDocTypes = new HashSet<>(); // extensions which have been specified by the user so far Set<String> seenUserExtensions = new HashSet<>(); if (fileTypesSpec.isEmpty()) { return ImmutableList.of(); } for (String fileTypeSpec : fileTypesSpec) { @Nullable FileTypeName userType = extractFileTypeName(fileTypeSpec); ImmutableMap<String, String> userExtensions; if (userType == null) { // try parameter as a list of file extensions: ZNTA-1248 String[] exts = fileTypeSpec.split(","); ImmutableMap.Builder<String, String> builder = new ImmutableMap.Builder<>(); for (String ext : exts) { builder.put(ext, ext); } userExtensions = builder.build(); } else { userExtensions = extractExtensions(fileTypeSpec); } validateFileExtensions(userType, userExtensions, serverFileTypes); assert userType != null; @Nullable FileTypeInfo fileTypeInfo = serverFileTypes.stream() .filter((FileTypeInfo info) -> info.getType().equals(userType)).findAny().orElse(null); // throw error if file type is not supported by server if (fileTypeInfo == null) { String msg = Messages.format("file.type.typeNotSupported", userType); throw new InvalidUserInputException(msg); } if (!seenUserDocTypes.add(userType)) { //throw error if file type is listed more than once String msg = Messages.format("file.type.duplicateFileType", userType); log.error(msg); throw new RuntimeException(msg); } for (String srcExt : userExtensions.keySet()) { //throw error if same file extension found in multiple file types if (!seenUserExtensions.add(srcExt)) { String msg = Messages.format("file.type.conflictExtension", srcExt, userType); log.error(msg); throw new RuntimeException(msg); } } // Use the extensions from docTypeMappingSpec if specified, // otherwise use the extensions from server. Map<String, String> filteredExtensions = userExtensions.isEmpty() ? fileTypeInfo.getExtensions() : userExtensions; docTypeMappings.add(new FileTypeInfo(userType, filteredExtensions)); } return docTypeMappings.build(); }
From source file:com.facebook.buck.features.apple.project.ProjectGenerator.java
private void createHeaderSymlinkTree(Map<Path, SourcePath> contents, ImmutableMap<Path, Path> nonSourcePaths, Optional<String> moduleName, Path headerSymlinkTreeRoot, boolean shouldCreateHeadersSymlinks, boolean shouldCreateHeaderMap, boolean shouldGenerateUmbrellaHeaderIfMissing) throws IOException { if (!shouldCreateHeaderMap && !shouldCreateHeadersSymlinks) { return;//w ww. j a v a 2 s. c o m } LOG.verbose("Building header symlink tree at %s with contents %s", headerSymlinkTreeRoot, contents); ImmutableSortedMap.Builder<Path, Path> resolvedContentsBuilder = ImmutableSortedMap.naturalOrder(); for (Map.Entry<Path, SourcePath> entry : contents.entrySet()) { Path link = headerSymlinkTreeRoot.resolve(entry.getKey()); Path existing = projectFilesystem.resolve(resolveSourcePath(entry.getValue())); resolvedContentsBuilder.put(link, existing); } for (Map.Entry<Path, Path> entry : nonSourcePaths.entrySet()) { Path link = headerSymlinkTreeRoot.resolve(entry.getKey()); resolvedContentsBuilder.put(link, entry.getValue()); } ImmutableSortedMap<Path, Path> resolvedContents = resolvedContentsBuilder.build(); Path headerMapLocation = getHeaderMapLocationFromSymlinkTreeRoot(headerSymlinkTreeRoot); Path hashCodeFilePath = headerSymlinkTreeRoot.resolve(".contents-hash"); Optional<String> currentHashCode = projectFilesystem.readFileIfItExists(hashCodeFilePath); String newHashCode = getHeaderSymlinkTreeHashCode(resolvedContents, moduleName, shouldCreateHeadersSymlinks, shouldCreateHeaderMap).toString(); if (Optional.of(newHashCode).equals(currentHashCode)) { LOG.debug("Symlink tree at %s is up to date, not regenerating (key %s).", headerSymlinkTreeRoot, newHashCode); } else { LOG.debug("Updating symlink tree at %s (old key %s, new key %s).", headerSymlinkTreeRoot, currentHashCode, newHashCode); projectFilesystem.deleteRecursivelyIfExists(headerSymlinkTreeRoot); projectFilesystem.mkdirs(headerSymlinkTreeRoot); if (shouldCreateHeadersSymlinks) { for (Map.Entry<Path, Path> entry : resolvedContents.entrySet()) { Path link = entry.getKey(); Path existing = entry.getValue(); projectFilesystem.createParentDirs(link); projectFilesystem.createSymLink(link, existing, /* force */ false); } } projectFilesystem.writeContentsToPath(newHashCode, hashCodeFilePath); if (shouldCreateHeaderMap) { HeaderMap.Builder headerMapBuilder = new HeaderMap.Builder(); for (Map.Entry<Path, SourcePath> entry : contents.entrySet()) { if (shouldCreateHeadersSymlinks) { headerMapBuilder.add(entry.getKey().toString(), getHeaderMapRelativeSymlinkPathForEntry(entry, headerSymlinkTreeRoot)); } else { headerMapBuilder.add(entry.getKey().toString(), projectFilesystem.resolve(resolveSourcePath(entry.getValue()))); } } for (Map.Entry<Path, Path> entry : nonSourcePaths.entrySet()) { if (shouldCreateHeadersSymlinks) { headerMapBuilder.add(entry.getKey().toString(), getHeaderMapRelativeSymlinkPathForEntry(entry, headerSymlinkTreeRoot)); } else { headerMapBuilder.add(entry.getKey().toString(), entry.getValue()); } } projectFilesystem.writeBytesToPath(headerMapBuilder.build().getBytes(), headerMapLocation); } if (moduleName.isPresent() && resolvedContents.size() > 0) { if (shouldGenerateUmbrellaHeaderIfMissing) { writeUmbrellaHeaderIfNeeded(moduleName.get(), resolvedContents.keySet(), headerSymlinkTreeRoot); } boolean containsSwift = !nonSourcePaths.isEmpty(); if (containsSwift) { projectFilesystem.writeContentsToPath( new ModuleMap(moduleName.get(), ModuleMap.SwiftMode.INCLUDE_SWIFT_HEADER).render(), headerSymlinkTreeRoot.resolve(moduleName.get()).resolve("module.modulemap")); projectFilesystem.writeContentsToPath( new ModuleMap(moduleName.get(), ModuleMap.SwiftMode.EXCLUDE_SWIFT_HEADER).render(), headerSymlinkTreeRoot.resolve(moduleName.get()).resolve("objc.modulemap")); Path absoluteModuleRoot = projectFilesystem.getRootPath() .resolve(headerSymlinkTreeRoot.resolve(moduleName.get())); VFSOverlay vfsOverlay = new VFSOverlay( ImmutableSortedMap.of(absoluteModuleRoot.resolve("module.modulemap"), absoluteModuleRoot.resolve("objc.modulemap"))); projectFilesystem.writeContentsToPath(vfsOverlay.render(), getObjcModulemapVFSOverlayLocationFromSymlinkTreeRoot(headerSymlinkTreeRoot)); } else { projectFilesystem.writeContentsToPath( new ModuleMap(moduleName.get(), ModuleMap.SwiftMode.NO_SWIFT).render(), headerSymlinkTreeRoot.resolve(moduleName.get()).resolve("module.modulemap")); } Path absoluteModuleRoot = projectFilesystem.getRootPath() .resolve(headerSymlinkTreeRoot.resolve(moduleName.get())); VFSOverlay vfsOverlay = new VFSOverlay( ImmutableSortedMap.of(absoluteModuleRoot.resolve("module.modulemap"), absoluteModuleRoot.resolve("testing.modulemap"))); projectFilesystem.writeContentsToPath(vfsOverlay.render(), getTestingModulemapVFSOverlayLocationFromSymlinkTreeRoot(headerSymlinkTreeRoot)); projectFilesystem.writeContentsToPath("", // empty modulemap to allow non-modular imports for testing headerSymlinkTreeRoot.resolve(moduleName.get()).resolve("testing.modulemap")); } } headerSymlinkTrees.add(headerSymlinkTreeRoot); }