Example usage for com.google.common.collect ImmutableSet size

List of usage examples for com.google.common.collect ImmutableSet size

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:com.google.caliper.runner.ExperimentingCaliperRun.java

@Override
public void run() throws InvalidBenchmarkException {
    ImmutableSet<Experiment> allExperiments = selector.selectExperiments();
    // TODO(lukes): move this standard-out handling into the ConsoleOutput class?
    stdout.println("Experiment selection: ");
    stdout.println("  Benchmark Methods:   "
            + FluentIterable.from(allExperiments).transform(new Function<Experiment, String>() {
                @Override//from  w w  w . j av  a2  s . c o m
                public String apply(Experiment experiment) {
                    return experiment.instrumentation().benchmarkMethod().getName();
                }
            }).toSet());
    stdout.println("  Instruments:   "
            + FluentIterable.from(selector.instruments()).transform(new Function<Instrument, String>() {
                @Override
                public String apply(Instrument instrument) {
                    return instrument.name();
                }
            }));
    stdout.println("  User parameters:   " + selector.userParameters());
    stdout.println("  Virtual machines:  "
            + FluentIterable.from(selector.vms()).transform(new Function<VirtualMachine, String>() {
                @Override
                public String apply(VirtualMachine vm) {
                    return vm.name;
                }
            }));
    stdout.println("  Selection type:    " + selector.selectionType());
    stdout.println();

    if (allExperiments.isEmpty()) {
        throw new InvalidBenchmarkException(
                "There were no experiments to be performed for the class %s using the instruments %s",
                benchmarkClass.benchmarkClass().getSimpleName(), instruments);
    }

    stdout.format("This selection yields %s experiments.%n", allExperiments.size());
    stdout.flush();

    // always dry run first.
    ImmutableSet<Experiment> experimentsToRun = dryRun(allExperiments);
    if (experimentsToRun.size() != allExperiments.size()) {
        stdout.format("%d experiments were skipped.%n", allExperiments.size() - experimentsToRun.size());
    }

    if (experimentsToRun.isEmpty()) {
        throw new InvalidBenchmarkException("All experiments were skipped.");
    }

    if (options.dryRun()) {
        return;
    }

    stdout.flush();

    int totalTrials = experimentsToRun.size() * options.trialsPerScenario();
    Stopwatch stopwatch = Stopwatch.createStarted();
    List<ScheduledTrial> trials = createScheduledTrials(experimentsToRun, totalTrials);

    final ListeningExecutorService executor = executorProvider.get();
    List<ListenableFuture<TrialResult>> pendingTrials = scheduleTrials(trials, executor);
    ConsoleOutput output = new ConsoleOutput(stdout, totalTrials, stopwatch);
    try {
        // Process results as they complete.
        for (ListenableFuture<TrialResult> trialFuture : inCompletionOrder(pendingTrials)) {
            try {
                TrialResult result = trialFuture.get();
                output.processTrial(result);
                for (ResultProcessor resultProcessor : resultProcessors) {
                    resultProcessor.processTrial(result.getTrial());
                }
            } catch (ExecutionException e) {
                if (e.getCause() instanceof TrialFailureException) {
                    output.processFailedTrial((TrialFailureException) e.getCause());
                } else {
                    for (ListenableFuture<?> toCancel : pendingTrials) {
                        toCancel.cancel(true);
                    }
                    throw Throwables.propagate(e.getCause());
                }
            } catch (InterruptedException e) {
                // be responsive to interruption, cancel outstanding work and exit
                for (ListenableFuture<?> toCancel : pendingTrials) {
                    // N.B. TrialRunLoop is responsive to interruption.
                    toCancel.cancel(true);
                }
                throw new RuntimeException(e);
            }
        }
    } finally {
        executor.shutdown();
        output.close();
    }

    for (ResultProcessor resultProcessor : resultProcessors) {
        try {
            resultProcessor.close();
        } catch (IOException e) {
            logger.log(WARNING, "Could not close a result processor: " + resultProcessor, e);
        }
    }
}

From source file:org.apache.hadoop.hdfs.DataStreamer.java

/**
 * Open a DataStreamer to a DataNode so that it can be written to.
 * This happens when a file is created and each time a new block is allocated.
 * Must get block ID and the IDs of the destinations from the namenode.
 * Returns the list of target datanodes.
 *///from w ww.  j a v  a2  s .  co  m
private LocatedBlock nextBlockOutputStream() throws IOException {
    LocatedBlock lb = null;
    DatanodeInfo[] nodes = null;
    StorageType[] storageTypes = null;
    int count = dfsClient.getConf().nBlockWriteRetry;
    boolean success = false;
    ExtendedBlock oldBlock = block;
    do {
        hasError = false;
        lastException.set(null);
        errorIndex = -1;
        success = false;

        DatanodeInfo[] excluded;
        long startTime = Time.now();

        if ((erasureCodingSourceStream && currentBlockIndex % stripeLength == 0)) {
            usedNodes.clear();
            LOG.info("Stripe length " + stripeLength + " parity length " + parityLength);
            LOG.info("Source write block index " + currentBlockIndex);
        }

        if (erasureCodingParityStream && currentBlockIndex % parityLength == 0) {
            usedNodes.clear();
            stripeNodes.clear();
            int stripe = (int) Math.ceil(currentBlockIndex / (float) parityLength);
            int index = stripe * stripeLength;
            LOG.info("Stripe length " + stripeLength + " parity length " + parityLength);
            LOG.info("Parity write block index " + currentBlockIndex + " found index " + index + " end "
                    + (index + stripeLength));
            for (int j = index; j < sourceBlocks.size() && j < index + stripeLength; j++) {
                DatanodeInfo[] nodeInfos = sourceBlocks.get(j).getLocations();
                Collections.addAll(stripeNodes, nodeInfos);
            }
        }

        if (erasureCodingSourceStream || erasureCodingParityStream) {
            ImmutableSet<DatanodeInfo> excludedSet = excludedNodes.getAllPresent(excludedNodes.asMap().keySet())
                    .keySet();

            excluded = new DatanodeInfo[excludedSet.size() + usedNodes.size() + stripeNodes.size()
                    + parityStripeNodes.size()];
            int i = 0;
            for (DatanodeInfo node : excludedSet) {
                excluded[i] = node;
                LOG.info("Excluding node " + node);
                i++;
            }
            for (DatanodeInfo node : usedNodes) {
                excluded[i] = node;
                LOG.info((erasureCodingSourceStream ? "Source stream: " : " Parity stream: ") + "Block "
                        + currentBlockIndex + " excluding used node " + node);
                i++;
            }
            for (DatanodeInfo node : stripeNodes) {
                excluded[i] = node;
                LOG.info((erasureCodingSourceStream ? "Source stream: " : " Parity stream: ") + "Block "
                        + currentBlockIndex + " excluding stripe node " + node);
                i++;
            }
            for (DatanodeInfo node : parityStripeNodes) {
                excluded[i] = node;
                LOG.info((erasureCodingSourceStream ? "Source stream: " : " Parity stream: ") + "Block "
                        + currentBlockIndex + " excluding parity node " + node);
                i++;
            }
            currentBlockIndex++;
        } else {
            excluded = excludedNodes.getAllPresent(excludedNodes.asMap().keySet()).keySet()
                    .toArray(new DatanodeInfo[0]);
        }
        block = oldBlock;
        lb = locateFollowingBlock(excluded.length > 0 ? excluded : null);
        block = lb.getBlock();
        block.setNumBytes(0);
        bytesSent = 0;
        accessToken = lb.getBlockToken();
        nodes = lb.getLocations();
        storageTypes = lb.getStorageTypes();

        //
        // Connect to first DataNode in the list.
        //
        success = createBlockOutputStream(nodes, storageTypes, 0L, false);

        if (!success) {
            DFSClient.LOG.info("Abandoning " + block);
            dfsClient.namenode.abandonBlock(block, stat.getFileId(), src, dfsClient.clientName);
            block = null;
            DFSClient.LOG.info("Excluding datanode " + nodes[errorIndex]);
            excludedNodes.put(nodes[errorIndex], nodes[errorIndex]);
        }
    } while (!success && --count >= 0);

    if (!success) {
        throw new IOException("Unable to create new block.");
    }

    if (erasureCodingSourceStream || erasureCodingParityStream) {
        Collections.addAll(usedNodes, nodes);
    }

    return lb;
}

From source file:org.apache.hadoop.hive.druid.DruidStorageHandler.java

public void publishSegments(Table table, boolean overwrite) throws MetaException {
    if (MetaStoreUtils.isExternalTable(table)) {
        return;/*from   w w w .  j a v  a  2s .  c o  m*/
    }
    Lifecycle lifecycle = new Lifecycle();
    LOG.info("Committing table {} to the druid metastore", table.getDbName());
    final Path tableDir = getSegmentDescriptorDir();
    try {
        List<DataSegment> segmentList = DruidStorageHandlerUtils.getPublishedSegments(tableDir, getConf());
        LOG.info("Found {} segments under path {}", segmentList.size(), tableDir);
        final String dataSourceName = table.getParameters().get(Constants.DRUID_DATA_SOURCE);
        final String segmentDirectory = table.getParameters().get(Constants.DRUID_SEGMENT_DIRECTORY) != null
                ? table.getParameters().get(Constants.DRUID_SEGMENT_DIRECTORY)
                : HiveConf.getVar(getConf(), HiveConf.ConfVars.DRUID_SEGMENT_DIRECTORY);
        DruidStorageHandlerUtils.publishSegments(connector, druidMetadataStorageTablesConfig, dataSourceName,
                segmentList, overwrite, segmentDirectory, getConf()

        );
        final String coordinatorAddress = HiveConf.getVar(getConf(),
                HiveConf.ConfVars.HIVE_DRUID_COORDINATOR_DEFAULT_ADDRESS);
        int maxTries = HiveConf.getIntVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_MAX_TRIES);
        LOG.info("checking load status from coordinator {}", coordinatorAddress);

        String coordinatorResponse = null;
        try {
            coordinatorResponse = RetryUtils.retry(new Callable<String>() {
                @Override
                public String call() throws Exception {
                    return DruidStorageHandlerUtils.getURL(getHttpClient(),
                            new URL(String.format("http://%s/status", coordinatorAddress)));
                }
            }, new Predicate<Throwable>() {
                @Override
                public boolean apply(@Nullable Throwable input) {
                    return input instanceof IOException;
                }
            }, maxTries);
        } catch (Exception e) {
            console.printInfo("Will skip waiting for data loading");
            return;
        }
        if (Strings.isNullOrEmpty(coordinatorResponse)) {
            console.printInfo("Will skip waiting for data loading");
            return;
        }
        console.printInfo(String.format("Waiting for the loading of [%s] segments", segmentList.size()));
        long passiveWaitTimeMs = HiveConf.getLongVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_PASSIVE_WAIT_TIME);
        ImmutableSet<URL> setOfUrls = FluentIterable.from(segmentList)
                .transform(new Function<DataSegment, URL>() {
                    @Override
                    public URL apply(DataSegment dataSegment) {
                        try {
                            //Need to make sure that we are using UTC since most of the druid cluster use UTC by default
                            return new URL(
                                    String.format("http://%s/druid/coordinator/v1/datasources/%s/segments/%s",
                                            coordinatorAddress, dataSourceName,
                                            DataSegment.makeDataSegmentIdentifier(dataSegment.getDataSource(),
                                                    new DateTime(dataSegment.getInterval().getStartMillis(),
                                                            DateTimeZone.UTC),
                                                    new DateTime(dataSegment.getInterval().getEndMillis(),
                                                            DateTimeZone.UTC),
                                                    dataSegment.getVersion(), dataSegment.getShardSpec())));
                        } catch (MalformedURLException e) {
                            Throwables.propagate(e);
                        }
                        return null;
                    }
                }).toSet();

        int numRetries = 0;
        while (numRetries++ < maxTries && !setOfUrls.isEmpty()) {
            setOfUrls = ImmutableSet.copyOf(Sets.filter(setOfUrls, new Predicate<URL>() {
                @Override
                public boolean apply(URL input) {
                    try {
                        String result = DruidStorageHandlerUtils.getURL(getHttpClient(), input);
                        LOG.debug("Checking segment {} response is {}", input, result);
                        return Strings.isNullOrEmpty(result);
                    } catch (IOException e) {
                        LOG.error(String.format("Error while checking URL [%s]", input), e);
                        return true;
                    }
                }
            }));

            try {
                if (!setOfUrls.isEmpty()) {
                    Thread.sleep(passiveWaitTimeMs);
                }
            } catch (InterruptedException e) {
                Thread.interrupted();
                Throwables.propagate(e);
            }
        }
        if (!setOfUrls.isEmpty()) {
            // We are not Throwing an exception since it might be a transient issue that is blocking loading
            console.printError(
                    String.format("Wait time exhausted and we have [%s] out of [%s] segments not loaded yet",
                            setOfUrls.size(), segmentList.size()));
        }
    } catch (IOException e) {
        LOG.error("Exception while commit", e);
        Throwables.propagate(e);
    } finally {
        cleanWorkingDir();
        lifecycle.stop();
    }
}

From source file:org.xolstice.maven.plugin.protobuf.AbstractProtocMojo.java

/**
 * Executes the mojo.//from   w w w.j  a va 2s  .  c  o m
 */
@Override
public void execute() throws MojoExecutionException, MojoFailureException {

    if (skipMojo()) {
        return;
    }

    checkParameters();
    final File protoSourceRoot = getProtoSourceRoot();
    if (protoSourceRoot.exists()) {
        try {
            final ImmutableSet<File> protoFiles = findProtoFilesInDirectory(protoSourceRoot);
            final File outputDirectory = getOutputDirectory();
            final ImmutableSet<File> outputFiles = findGeneratedFilesInDirectory(getOutputDirectory());

            if (protoFiles.isEmpty()) {
                getLog().info("No proto files to compile.");
            } else if (!hasDelta(protoFiles)) {
                getLog().info("Skipping compilation because build context has no changes.");
                doAttachFiles();
            } else if (checkStaleness && checkFilesUpToDate(protoFiles, outputFiles)) {
                getLog().info("Skipping compilation because target directory newer than sources.");
                doAttachFiles();
            } else {
                final ImmutableSet<File> derivedProtoPathElements = makeProtoPathFromJars(
                        temporaryProtoFileDirectory, getDependencyArtifactFiles());
                FileUtils.mkdir(outputDirectory.getAbsolutePath());

                if (clearOutputDirectory) {
                    cleanDirectory(outputDirectory);
                }

                if (writeDescriptorSet) {
                    final File descriptorSetOutputDirectory = getDescriptorSetOutputDirectory();
                    FileUtils.mkdir(descriptorSetOutputDirectory.getAbsolutePath());
                    if (clearOutputDirectory) {
                        cleanDirectory(descriptorSetOutputDirectory);
                    }
                }

                if (protocPlugins != null) {
                    createProtocPlugins();
                }

                //get toolchain from context
                final Toolchain tc = toolchainManager.getToolchainFromBuildContext("protobuf", session); //NOI18N
                if (tc != null) {
                    getLog().info("Toolchain in protobuf-maven-plugin: " + tc);
                    //when the executable to use is explicitly set by user in mojo's parameter, ignore toolchains.
                    if (protocExecutable != null) {
                        getLog().warn("Toolchains are ignored, 'protocExecutable' parameter is set to "
                                + protocExecutable);
                    } else {
                        //assign the path to executable from toolchains
                        protocExecutable = tc.findTool("protoc"); //NOI18N
                    }
                }
                if (protocExecutable == null && protocArtifact != null) {
                    final Artifact artifact = createDependencyArtifact(protocArtifact);
                    final File file = resolveBinaryArtifact(artifact);
                    protocExecutable = file.getAbsolutePath();
                }
                if (protocExecutable == null) {
                    // Try to fall back to 'protoc' in $PATH
                    getLog().warn("No 'protocExecutable' parameter is configured, using the default: 'protoc'");
                    protocExecutable = "protoc";
                }

                final Protoc.Builder protocBuilder = new Protoc.Builder(protocExecutable)
                        .addProtoPathElement(protoSourceRoot).addProtoPathElements(derivedProtoPathElements)
                        .addProtoPathElements(asList(additionalProtoPathElements)).addProtoFiles(protoFiles);
                addProtocBuilderParameters(protocBuilder);
                final Protoc protoc = protocBuilder.build();

                if (getLog().isDebugEnabled()) {
                    getLog().debug("Proto source root:");
                    getLog().debug(" " + protoSourceRoot);

                    if (derivedProtoPathElements != null && !derivedProtoPathElements.isEmpty()) {
                        getLog().debug("Derived proto paths:");
                        for (final File path : derivedProtoPathElements) {
                            getLog().debug(" " + path);
                        }
                    }

                    if (additionalProtoPathElements != null && additionalProtoPathElements.length > 0) {
                        getLog().debug("Additional proto paths:");
                        for (final File path : additionalProtoPathElements) {
                            getLog().debug(" " + path);
                        }
                    }
                }
                protoc.logExecutionParameters(getLog());

                getLog().info(format("Compiling %d proto file(s) to %s", protoFiles.size(), outputDirectory));

                final int exitStatus = protoc.execute();
                if (StringUtils.isNotBlank(protoc.getOutput())) {
                    getLog().info("PROTOC: " + protoc.getOutput());
                }
                if (exitStatus != 0) {
                    getLog().error("PROTOC FAILED: " + protoc.getError());
                    for (File pf : protoFiles) {
                        buildContext.removeMessages(pf);
                        buildContext.addMessage(pf, 0, 0, protoc.getError(), BuildContext.SEVERITY_ERROR, null);
                    }
                    throw new MojoFailureException(
                            "protoc did not exit cleanly. Review output for more information.");
                } else if (StringUtils.isNotBlank(protoc.getError())) {
                    getLog().warn("PROTOC: " + protoc.getError());
                }
                doAttachFiles();
            }
        } catch (IOException e) {
            throw new MojoExecutionException("An IO error occured", e);
        } catch (IllegalArgumentException e) {
            throw new MojoFailureException("protoc failed to execute because: " + e.getMessage(), e);
        } catch (CommandLineException e) {
            throw new MojoExecutionException("An error occurred while invoking protoc.", e);
        }
    } else {
        getLog().info(format("%s does not exist. Review the configuration or consider disabling the plugin.",
                protoSourceRoot));
    }
}

From source file:edu.buaa.satla.analysis.core.predicate.PredicateAbstractionManager.java

/**
 * Compute an abstraction of the conjunction of an AbstractionFormula and
 * a PathFormula. The AbstractionFormula will be used in its instantiated form,
 * so the indices there should match those from the PathFormula.
 * @param abstractionFormula An AbstractionFormula that is used as input.
 * @param pathFormula A PathFormula that is used as input.
 * @param predicates The set of predicates used for abstraction.
 * @return An AbstractionFormula instance representing an abstraction of
 *          "abstractionFormula & pathFormula" with pathFormula as the block formula.
 * @throws InterruptedException//w  w  w  . jav a  2  s  . c om
 */
public AbstractionFormula buildAbstraction(CFANode location, AbstractionFormula abstractionFormula,
        PathFormula pathFormula, Collection<AbstractionPredicate> pPredicates)
        throws SolverException, InterruptedException {

    stats.numCallsAbstraction++;

    logger.log(Level.FINEST, "Computing abstraction", stats.numCallsAbstraction, "with", pPredicates.size(),
            "predicates");
    logger.log(Level.ALL, "Old abstraction:", abstractionFormula.asFormula());
    logger.log(Level.ALL, "Path formula:", pathFormula);
    logger.log(Level.ALL, "Predicates:", pPredicates);

    BooleanFormula absFormula = abstractionFormula.asInstantiatedFormula();
    BooleanFormula symbFormula = buildFormula(pathFormula.getFormula());
    BooleanFormula f = bfmgr.and(absFormula, symbFormula);
    final SSAMap ssa = pathFormula.getSsa();

    ImmutableSet<AbstractionPredicate> predicates = getRelevantPredicates(pPredicates, f, ssa);

    // Try to reuse stored abstractions
    if (reuseAbstractionsFrom != null && !abstractionReuseDisabledBecauseOfAmbiguity) {
        stats.abstractionReuseTime.start();
        ProverEnvironment reuseEnv = solver.newProverEnvironment();
        try {
            reuseEnv.push(f);

            Deque<Pair<Integer, Integer>> tryReuseBasedOnPredecessors = new ArrayDeque<>();
            Set<Integer> idsOfStoredAbstractionReused = abstractionFormula.getIdsOfStoredAbstractionReused();
            for (Integer id : idsOfStoredAbstractionReused) {
                tryReuseBasedOnPredecessors.add(Pair.of(id, 0));
            }

            if (tryReuseBasedOnPredecessors.isEmpty()) {
                tryReuseBasedOnPredecessors.add(Pair.of(abstractionStorage.getRootAbstractionId(), 0));
            }

            while (!tryReuseBasedOnPredecessors.isEmpty()) {
                final Pair<Integer, Integer> tryBasedOn = tryReuseBasedOnPredecessors.pop();
                final int tryBasedOnAbstractionId = tryBasedOn.getFirst();
                final int tryLevel = tryBasedOn.getSecond();

                if (tryLevel > maxAbstractionReusePrescan) {
                    continue;
                }

                Set<AbstractionNode> candidateAbstractions = getSuccessorsInAbstractionTree(
                        tryBasedOnAbstractionId);
                Preconditions.checkNotNull(candidateAbstractions);

                //logger.log(Level.WARNING, "Raw candidates based on", tryBasedOnAbstractionId, ":", candidateAbstractions);

                Iterator<AbstractionNode> candidateIterator = candidateAbstractions.iterator();
                while (candidateIterator.hasNext()) {
                    AbstractionNode an = candidateIterator.next();
                    Preconditions.checkNotNull(an);
                    tryReuseBasedOnPredecessors.add(Pair.of(an.getId(), tryLevel + 1));

                    if (bfmgr.isTrue(an.getFormula())) {
                        candidateIterator.remove();
                        continue;
                    }

                    if (an.getLocationId().isPresent()) {
                        if (location.getNodeNumber() != an.getLocationId().get()) {
                            candidateIterator.remove();
                            continue;
                        }
                    }
                }

                //logger.log(Level.WARNING, "Filtered candidates", "location", location.getNodeNumber(), "abstraction", tryBasedOnAbstractionId, ":", candidateAbstractions);

                if (candidateAbstractions.size() > 1) {
                    logger.log(Level.WARNING, "Too many abstraction candidates on location", location,
                            "for abstraction", tryBasedOnAbstractionId, ". Disabling abstraction reuse!");
                    this.abstractionReuseDisabledBecauseOfAmbiguity = true;
                    tryReuseBasedOnPredecessors.clear();
                    continue;
                }

                Set<Integer> reuseIds = Sets.newTreeSet();
                BooleanFormula reuseFormula = bfmgr.makeBoolean(true);
                for (AbstractionNode an : candidateAbstractions) {
                    reuseFormula = bfmgr.and(reuseFormula, an.getFormula());
                    abstractionStorage.markAbstractionBeingReused(an.getId());
                    reuseIds.add(an.getId());
                }
                BooleanFormula instantiatedReuseFormula = fmgr.instantiate(reuseFormula, ssa);

                stats.abstractionReuseImplicationTime.start();
                reuseEnv.push(bfmgr.not(instantiatedReuseFormula));
                boolean implication = reuseEnv.isUnsat();
                reuseEnv.pop();
                stats.abstractionReuseImplicationTime.stop();

                if (implication) {
                    stats.numAbstractionReuses++;

                    Region reuseFormulaRegion = buildRegionFromFormula(reuseFormula);
                    return new AbstractionFormula(fmgr, reuseFormulaRegion, reuseFormula,
                            instantiatedReuseFormula, pathFormula, reuseIds);
                }
            }
        } finally {
            reuseEnv.close();
            stats.abstractionReuseTime.stop();
        }
    }
    // <-- End of reuse

    // Shortcut if the precision is empty
    if (pPredicates.isEmpty()) {
        logger.log(Level.FINEST, "Abstraction", stats.numCallsAbstraction, "with empty precision is true");
        stats.numSymbolicAbstractions++;
        return makeTrueAbstractionFormula(pathFormula);
    }

    // caching
    Pair<BooleanFormula, ImmutableSet<AbstractionPredicate>> absKey = null;
    if (useCache) {
        absKey = Pair.of(f, predicates);
        AbstractionFormula result = abstractionCache.get(absKey);

        if (result != null) {
            // create new abstraction object to have a unique abstraction id

            // instantiate the formula with the current indices
            BooleanFormula stateFormula = result.asFormula();
            BooleanFormula instantiatedFormula = fmgr.instantiate(stateFormula, ssa);

            result = new AbstractionFormula(fmgr, result.asRegion(), stateFormula, instantiatedFormula,
                    pathFormula, result.getIdsOfStoredAbstractionReused());
            logger.log(Level.FINEST, "Abstraction", stats.numCallsAbstraction, "was cached");
            logger.log(Level.ALL, "Abstraction result is", result.asFormula());
            stats.numCallsAbstractionCached++;
            return result;
        }

        boolean unsatisfiable = unsatisfiabilityCache.contains(symbFormula)
                || unsatisfiabilityCache.contains(f);
        if (unsatisfiable) {
            // block is infeasible
            logger.log(Level.FINEST, "Block feasibility of abstraction", stats.numCallsAbstraction,
                    "was cached and is false.");
            stats.numCallsAbstractionCached++;
            return new AbstractionFormula(fmgr, rmgr.makeFalse(), bfmgr.makeBoolean(false),
                    bfmgr.makeBoolean(false), pathFormula, noAbstractionReuse);
        }
    }

    // We update statistics here because we want to ignore calls
    // where the result was in the cache.
    stats.numTotalPredicates += pPredicates.size();
    stats.maxPredicates = Math.max(stats.maxPredicates, pPredicates.size());
    stats.numIrrelevantPredicates += pPredicates.size() - predicates.size();

    // Compute result for those predicates
    // where we can trivially identify their truthness in the result
    Region abs = rmgr.makeTrue();
    if (identifyTrivialPredicates) {
        stats.trivialPredicatesTime.start();
        abs = identifyTrivialPredicates(predicates, abstractionFormula, pathFormula);

        // Calculate the set of predicates we still need to use for abstraction.
        predicates = from(predicates).filter(not(in(amgr.extractPredicates(abs)))).toSet();
        stats.trivialPredicatesTime.stop();
    }

    try (ProverEnvironment thmProver = solver.newProverEnvironment()) {
        thmProver.push(f);

        if (predicates.isEmpty()) {
            stats.numSatCheckAbstractions++;

            stats.abstractionSolveTime.start();
            boolean feasibility;
            try {
                feasibility = !thmProver.isUnsat();
            } finally {
                stats.abstractionSolveTime.stop();
            }

            if (!feasibility) {
                abs = rmgr.makeFalse();
            }

        } else {
            if (abstractionType != AbstractionType.BOOLEAN) {
                // First do cartesian abstraction if desired
                stats.cartesianAbstractionTime.start();
                try {
                    abs = rmgr.makeAnd(abs, buildCartesianAbstraction(f, ssa, thmProver, predicates));
                } finally {
                    stats.cartesianAbstractionTime.stop();
                }
            }

            if (abstractionType == AbstractionType.COMBINED) {
                // Calculate the set of predicates that cartesian abstraction couldn't handle.
                predicates = from(predicates).filter(not(in(amgr.extractPredicates(abs)))).toSet();
            }

            if (abstractionType != AbstractionType.CARTESIAN && !predicates.isEmpty()) {
                // Last do boolean abstraction if desired and necessary
                stats.numBooleanAbsPredicates += predicates.size();
                stats.booleanAbstractionTime.start();
                try {
                    abs = rmgr.makeAnd(abs, buildBooleanAbstraction(location, ssa, thmProver, predicates));
                } finally {
                    stats.booleanAbstractionTime.stop();
                }

                // Warning:
                // buildBooleanAbstraction() does not clean up thmProver, so do not use it here.
            }
        }
    }

    AbstractionFormula result = makeAbstractionFormula(abs, ssa, pathFormula);

    if (useCache) {
        abstractionCache.put(absKey, result);

        if (result.isFalse()) {
            unsatisfiabilityCache.add(f);
        }
    }

    long abstractionTime = TimeSpan.sum(stats.abstractionSolveTime.getLengthOfLastInterval(),
            stats.abstractionEnumTime.getLengthOfLastOuterInterval()).asMillis();
    logger.log(Level.FINEST, "Computing abstraction took", abstractionTime, "ms");
    logger.log(Level.ALL, "Abstraction result is", result.asFormula());

    if (dumpHardAbstractions && abstractionTime > 10000) {
        // we want to dump "hard" problems...
        Path dumpFile;

        dumpFile = fmgr.formatFormulaOutputFile("abstraction", stats.numCallsAbstraction, "input", 0);
        fmgr.dumpFormulaToFile(f, dumpFile);

        dumpFile = fmgr.formatFormulaOutputFile("abstraction", stats.numCallsAbstraction, "predicates", 0);
        try (Writer w = dumpFile.asCharSink(StandardCharsets.UTF_8).openBufferedStream()) {
            Joiner.on('\n').appendTo(w, predicates);
        } catch (IOException e) {
            logger.logUserException(Level.WARNING, e, "Failed to wrote predicates to file");
        }

        dumpFile = fmgr.formatFormulaOutputFile("abstraction", stats.numCallsAbstraction, "result", 0);
        fmgr.dumpFormulaToFile(result.asInstantiatedFormula(), dumpFile);
    }

    return result;
}

From source file:org.sosy_lab.cpachecker.cpa.predicate.PredicateAbstractionManager.java

/**
 * Compute an abstraction of the conjunction of an AbstractionFormula and
 * a PathFormula. The AbstractionFormula will be used in its instantiated form,
 * so the indices there should match those from the PathFormula.
 * @param abstractionFormula An AbstractionFormula that is used as input.
 * @param pathFormula A PathFormula that is used as input.
 * @param predicates The set of predicates used for abstraction.
 * @return An AbstractionFormula instance representing an abstraction of
 *          "abstractionFormula & pathFormula" with pathFormula as the block formula.
 * @throws InterruptedException/*from  w w w  .  j a v  a2 s  . c  om*/
 */
public AbstractionFormula buildAbstraction(CFANode location, AbstractionFormula abstractionFormula,
        PathFormula pathFormula, Collection<AbstractionPredicate> pPredicates)
        throws SolverException, InterruptedException {

    stats.numCallsAbstraction++;

    logger.log(Level.FINEST, "Computing abstraction", stats.numCallsAbstraction, "with", pPredicates.size(),
            "predicates");
    logger.log(Level.ALL, "Old abstraction:", abstractionFormula.asFormula());
    logger.log(Level.ALL, "Path formula:", pathFormula);
    logger.log(Level.ALL, "Predicates:", pPredicates);

    BooleanFormula absFormula = abstractionFormula.asInstantiatedFormula();
    BooleanFormula symbFormula = buildFormula(pathFormula.getFormula());
    BooleanFormula f = bfmgr.and(absFormula, symbFormula);
    final SSAMap ssa = pathFormula.getSsa();

    ImmutableSet<AbstractionPredicate> predicates = getRelevantPredicates(pPredicates, f, ssa);

    // Try to reuse stored abstractions
    if (reuseAbstractionsFrom != null && !abstractionReuseDisabledBecauseOfAmbiguity) {
        stats.abstractionReuseTime.start();
        ProverEnvironment reuseEnv = solver.newProverEnvironment();
        try {
            reuseEnv.push(f);

            Deque<Pair<Integer, Integer>> tryReuseBasedOnPredecessors = new ArrayDeque<>();
            Set<Integer> idsOfStoredAbstractionReused = abstractionFormula.getIdsOfStoredAbstractionReused();
            for (Integer id : idsOfStoredAbstractionReused) {
                tryReuseBasedOnPredecessors.add(Pair.of(id, 0));
            }

            if (tryReuseBasedOnPredecessors.isEmpty()) {
                tryReuseBasedOnPredecessors.add(Pair.of(abstractionStorage.getRootAbstractionId(), 0));
            }

            while (!tryReuseBasedOnPredecessors.isEmpty()) {
                final Pair<Integer, Integer> tryBasedOn = tryReuseBasedOnPredecessors.pop();
                final int tryBasedOnAbstractionId = tryBasedOn.getFirst();
                final int tryLevel = tryBasedOn.getSecond();

                if (tryLevel > maxAbstractionReusePrescan) {
                    continue;
                }

                Set<AbstractionNode> candidateAbstractions = getSuccessorsInAbstractionTree(
                        tryBasedOnAbstractionId);
                Preconditions.checkNotNull(candidateAbstractions);

                //logger.log(Level.WARNING, "Raw candidates based on", tryBasedOnAbstractionId, ":", candidateAbstractions);

                Iterator<AbstractionNode> candidateIterator = candidateAbstractions.iterator();
                while (candidateIterator.hasNext()) {
                    AbstractionNode an = candidateIterator.next();
                    Preconditions.checkNotNull(an);
                    tryReuseBasedOnPredecessors.add(Pair.of(an.getId(), tryLevel + 1));

                    if (bfmgr.isTrue(an.getFormula())) {
                        candidateIterator.remove();
                        continue;
                    }

                    if (an.getLocationId().isPresent()) {
                        if (location.getNodeNumber() != an.getLocationId().get()) {
                            candidateIterator.remove();
                            continue;
                        }
                    }
                }

                //logger.log(Level.WARNING, "Filtered candidates", "location", location.getNodeNumber(), "abstraction", tryBasedOnAbstractionId, ":", candidateAbstractions);

                if (candidateAbstractions.size() > 1) {
                    logger.log(Level.WARNING, "Too many abstraction candidates on location", location,
                            "for abstraction", tryBasedOnAbstractionId, ". Disabling abstraction reuse!");
                    this.abstractionReuseDisabledBecauseOfAmbiguity = true;
                    tryReuseBasedOnPredecessors.clear();
                    continue;
                }

                Set<Integer> reuseIds = Sets.newTreeSet();
                BooleanFormula reuseFormula = bfmgr.makeBoolean(true);
                for (AbstractionNode an : candidateAbstractions) {
                    reuseFormula = bfmgr.and(reuseFormula, an.getFormula());
                    abstractionStorage.markAbstractionBeingReused(an.getId());
                    reuseIds.add(an.getId());
                }
                BooleanFormula instantiatedReuseFormula = fmgr.instantiate(reuseFormula, ssa);

                stats.abstractionReuseImplicationTime.start();
                reuseEnv.push(bfmgr.not(instantiatedReuseFormula));
                boolean implication = reuseEnv.isUnsat();
                reuseEnv.pop();
                stats.abstractionReuseImplicationTime.stop();

                if (implication) {
                    stats.numAbstractionReuses++;

                    Region reuseFormulaRegion = buildRegionFromFormula(reuseFormula);
                    return new AbstractionFormula(fmgr, reuseFormulaRegion, reuseFormula,
                            instantiatedReuseFormula, pathFormula, reuseIds);
                }
            }
        } finally {
            reuseEnv.close();
            stats.abstractionReuseTime.stop();
        }
    }
    // <-- End of reuse

    // Shortcut if the precision is empty
    if (pPredicates.isEmpty() && (abstractionType != AbstractionType.ELIMINATION)) {
        logger.log(Level.FINEST, "Abstraction", stats.numCallsAbstraction, "with empty precision is true");
        stats.numSymbolicAbstractions++;
        return makeTrueAbstractionFormula(pathFormula);
    }

    // caching
    Pair<BooleanFormula, ImmutableSet<AbstractionPredicate>> absKey = null;
    if (useCache) {
        absKey = Pair.of(f, predicates);
        AbstractionFormula result = abstractionCache.get(absKey);

        if (result != null) {
            // create new abstraction object to have a unique abstraction id

            // instantiate the formula with the current indices
            BooleanFormula stateFormula = result.asFormula();
            BooleanFormula instantiatedFormula = fmgr.instantiate(stateFormula, ssa);

            result = new AbstractionFormula(fmgr, result.asRegion(), stateFormula, instantiatedFormula,
                    pathFormula, result.getIdsOfStoredAbstractionReused());
            logger.log(Level.FINEST, "Abstraction", stats.numCallsAbstraction, "was cached");
            logger.log(Level.ALL, "Abstraction result is", result.asFormula());
            stats.numCallsAbstractionCached++;
            return result;
        }

        boolean unsatisfiable = unsatisfiabilityCache.contains(symbFormula)
                || unsatisfiabilityCache.contains(f);
        if (unsatisfiable) {
            // block is infeasible
            logger.log(Level.FINEST, "Block feasibility of abstraction", stats.numCallsAbstraction,
                    "was cached and is false.");
            stats.numCallsAbstractionCached++;
            return new AbstractionFormula(fmgr, rmgr.makeFalse(), bfmgr.makeBoolean(false),
                    bfmgr.makeBoolean(false), pathFormula, noAbstractionReuse);
        }
    }

    // We update statistics here because we want to ignore calls
    // where the result was in the cache.
    stats.numTotalPredicates += pPredicates.size();
    stats.maxPredicates = Math.max(stats.maxPredicates, pPredicates.size());
    stats.numIrrelevantPredicates += pPredicates.size() - predicates.size();

    // Compute result for those predicates
    // where we can trivially identify their truthness in the result
    Region abs = rmgr.makeTrue();
    if (identifyTrivialPredicates) {
        stats.trivialPredicatesTime.start();
        abs = identifyTrivialPredicates(predicates, abstractionFormula, pathFormula);

        // Calculate the set of predicates we still need to use for abstraction.
        predicates = from(predicates).filter(not(in(amgr.extractPredicates(abs)))).toSet();
        stats.trivialPredicatesTime.stop();
    }

    try (ProverEnvironment thmProver = solver.newProverEnvironment()) {
        thmProver.push(f);

        if (predicates.isEmpty() && (abstractionType != AbstractionType.ELIMINATION)) {
            stats.numSatCheckAbstractions++;

            stats.abstractionSolveTime.start();
            boolean feasibility;
            try {
                feasibility = !thmProver.isUnsat();
            } finally {
                stats.abstractionSolveTime.stop();
            }

            if (!feasibility) {
                abs = rmgr.makeFalse();
            }

        } else if (abstractionType == AbstractionType.ELIMINATION) {
            stats.quantifierEliminationTime.start();
            try {
                abs = rmgr.makeAnd(abs,
                        eliminateIrrelevantVariablePropositions(f, location, ssa, thmProver, predicates));
            } finally {
                stats.quantifierEliminationTime.stop();
            }
        } else {
            if (abstractionType != AbstractionType.BOOLEAN) {
                // First do cartesian abstraction if desired
                stats.cartesianAbstractionTime.start();
                try {
                    abs = rmgr.makeAnd(abs, buildCartesianAbstraction(f, ssa, thmProver, predicates));
                } finally {
                    stats.cartesianAbstractionTime.stop();
                }
            }

            if (abstractionType == AbstractionType.COMBINED) {
                // Calculate the set of predicates that cartesian abstraction couldn't handle.
                predicates = from(predicates).filter(not(in(amgr.extractPredicates(abs)))).toSet();
            }

            if (abstractionType != AbstractionType.CARTESIAN && !predicates.isEmpty()) {
                // Last do boolean abstraction if desired and necessary
                stats.numBooleanAbsPredicates += predicates.size();
                stats.booleanAbstractionTime.start();
                try {
                    abs = rmgr.makeAnd(abs, buildBooleanAbstraction(ssa, thmProver, predicates));
                } finally {
                    stats.booleanAbstractionTime.stop();
                }

                // Warning:
                // buildBooleanAbstraction() does not clean up thmProver, so do not use it here.
            }
        }
    }

    AbstractionFormula result = makeAbstractionFormula(abs, ssa, pathFormula);

    if (useCache) {
        abstractionCache.put(absKey, result);

        if (result.isFalse()) {
            unsatisfiabilityCache.add(f);
        }
    }

    long abstractionTime = TimeSpan.sum(stats.abstractionSolveTime.getLengthOfLastInterval(),
            stats.abstractionEnumTime.getLengthOfLastOuterInterval()).asMillis();
    logger.log(Level.FINEST, "Computing abstraction took", abstractionTime, "ms");
    logger.log(Level.ALL, "Abstraction result is", result.asFormula());

    if (dumpHardAbstractions && abstractionTime > 10000) {
        // we want to dump "hard" problems...
        Path dumpFile;

        dumpFile = fmgr.formatFormulaOutputFile("abstraction", stats.numCallsAbstraction, "input", 0);
        fmgr.dumpFormulaToFile(f, dumpFile);

        dumpFile = fmgr.formatFormulaOutputFile("abstraction", stats.numCallsAbstraction, "predicates", 0);
        try (Writer w = dumpFile.asCharSink(StandardCharsets.UTF_8).openBufferedStream()) {
            Joiner.on('\n').appendTo(w, predicates);
        } catch (IOException e) {
            logger.logUserException(Level.WARNING, e, "Failed to wrote predicates to file");
        }

        dumpFile = fmgr.formatFormulaOutputFile("abstraction", stats.numCallsAbstraction, "result", 0);
        fmgr.dumpFormulaToFile(result.asInstantiatedFormula(), dumpFile);
    }

    return result;
}

From source file:com.facebook.buck.features.apple.project.ProjectGenerator.java

private ImmutableMap<String, String> getFrameworkAndLibrarySearchPathConfigs(
        TargetNode<? extends CxxLibraryDescription.CommonArg> node, boolean includeFrameworks) {
    HashSet<String> frameworkSearchPaths = new HashSet<>();
    frameworkSearchPaths.add("$BUILT_PRODUCTS_DIR");
    HashSet<String> librarySearchPaths = new HashSet<>();
    librarySearchPaths.add("$BUILT_PRODUCTS_DIR");
    HashSet<String> iOSLdRunpathSearchPaths = new HashSet<>();
    HashSet<String> macOSLdRunpathSearchPaths = new HashSet<>();

    FluentIterable<TargetNode<?>> depTargetNodes = collectRecursiveLibraryDepTargets(node);
    ImmutableSet<PBXFileReference> swiftDeps = filterRecursiveLibraryDependenciesWithSwiftSources(
            depTargetNodes);//from  ww w .  j a  v  a2s . c o  m

    Stream.concat(
            // Collect all the nodes that contribute to linking
            // ... Which the node includes itself
            Stream.of(node),
            // ... And recursive dependencies that gets linked in
            AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(xcodeDescriptions, targetGraph,
                    Optional.of(dependenciesCache), AppleBuildRules.RecursiveDependenciesMode.LINKING, node,
                    ImmutableSet.of(AppleLibraryDescription.class, CxxLibraryDescription.class,
                            PrebuiltAppleFrameworkDescription.class, PrebuiltCxxLibraryDescription.class))
                    .stream())
            .map(castedNode -> {
                // If the item itself is a prebuilt library, add it to framework_search_paths.
                // This is needed for prebuilt framework's headers to be reference-able.
                TargetNodes.castArg(castedNode, PrebuiltCxxLibraryDescriptionArg.class).ifPresent(prebuilt -> {
                    SourcePath path = null;
                    if (prebuilt.getConstructorArg().getSharedLib().isPresent()) {
                        path = prebuilt.getConstructorArg().getSharedLib().get();
                    } else if (prebuilt.getConstructorArg().getStaticLib().isPresent()) {
                        path = prebuilt.getConstructorArg().getStaticLib().get();
                    } else if (prebuilt.getConstructorArg().getStaticPicLib().isPresent()) {
                        path = prebuilt.getConstructorArg().getStaticPicLib().get();
                    }
                    if (path != null) {
                        librarySearchPaths.add("$REPO_ROOT/" + resolveSourcePath(path).getParent());
                    }
                });
                return castedNode;
            })
            // Keep only the ones that may have frameworks and libraries fields.
            .flatMap(input -> RichStream.from(TargetNodes.castArg(input, HasSystemFrameworkAndLibraries.class)))
            // Then for each of them
            .forEach(castedNode -> {
                // ... Add the framework path strings.
                castedNode.getConstructorArg().getFrameworks().stream().filter(x -> !x.isSDKROOTFrameworkPath())
                        .map(frameworkPath -> FrameworkPath.getUnexpandedSearchPath(this::resolveSourcePath,
                                pathRelativizer::outputDirToRootRelative, frameworkPath).toString())
                        .forEach(frameworkSearchPaths::add);

                // ... And do the same for libraries.
                castedNode.getConstructorArg().getLibraries().stream()
                        .map(libraryPath -> FrameworkPath.getUnexpandedSearchPath(this::resolveSourcePath,
                                pathRelativizer::outputDirToRootRelative, libraryPath).toString())
                        .forEach(librarySearchPaths::add);

                // If the item itself is a prebuilt framework, add it to framework_search_paths.
                // This is needed for prebuilt framework's headers to be reference-able.
                TargetNodes.castArg(castedNode, PrebuiltAppleFrameworkDescriptionArg.class)
                        .ifPresent(prebuilt -> {
                            frameworkSearchPaths.add("$REPO_ROOT/"
                                    + resolveSourcePath(prebuilt.getConstructorArg().getFramework())
                                            .getParent());
                            if (prebuilt.getConstructorArg()
                                    .getPreferredLinkage() != NativeLinkable.Linkage.STATIC) {
                                // Frameworks that are copied into the binary.
                                iOSLdRunpathSearchPaths.add("@loader_path/Frameworks");
                                iOSLdRunpathSearchPaths.add("@executable_path/Frameworks");
                                macOSLdRunpathSearchPaths.add("@loader_path/../Frameworks");
                                macOSLdRunpathSearchPaths.add("@executable_path/../Frameworks");
                            }
                        });
            });

    if (includeFrameworks && swiftDeps.size() > 0) {
        // When Xcode compiles static Swift libs, it will include linker commands (LC_LINKER_OPTION)
        // that will be carried over for the final binary to link to the appropriate Swift overlays
        // and libs. This means that the final binary must be able to locate the Swift libs in the
        // library search path. If an Xcode target includes Swift, Xcode will automatically append
        // the Swift lib folder when invoking the linker. Unfortunately, this will not happen if
        // we have a plain apple_binary that has Swift deps. So we're manually doing exactly what
        // Xcode does to make sure binaries link successfully if they use Swift directly or
        // transitively.
        librarySearchPaths.add("$DT_TOOLCHAIN_DIR/usr/lib/swift/$PLATFORM_NAME");
    }

    if (swiftDeps.size() > 0 || projGenerationStateCache.targetContainsSwiftSourceCode(node)) {
        iOSLdRunpathSearchPaths.add("@executable_path/Frameworks");
        iOSLdRunpathSearchPaths.add("@loader_path/Frameworks");
        macOSLdRunpathSearchPaths.add("@executable_path/../Frameworks");
        macOSLdRunpathSearchPaths.add("@loader_path/../Frameworks");
    }

    ImmutableMap.Builder<String, String> results = ImmutableMap.<String, String>builder()
            .put("FRAMEWORK_SEARCH_PATHS", Joiner.on(' ').join(frameworkSearchPaths))
            .put("LIBRARY_SEARCH_PATHS", Joiner.on(' ').join(librarySearchPaths));
    if (!iOSLdRunpathSearchPaths.isEmpty()) {
        results.put("LD_RUNPATH_SEARCH_PATHS[sdk=iphoneos*]", Joiner.on(' ').join(iOSLdRunpathSearchPaths));
        results.put("LD_RUNPATH_SEARCH_PATHS[sdk=iphonesimulator*]",
                Joiner.on(' ').join(iOSLdRunpathSearchPaths));
    }
    if (!macOSLdRunpathSearchPaths.isEmpty()) {
        results.put("LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]", Joiner.on(' ').join(macOSLdRunpathSearchPaths));
    }
    return results.build();
}