Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:org.apache.storm.daemon.nimbus.NimbusUtils.java

@ClojureClass(className = "backtype.storm.daemon.nimbus#compute-new-topology->executor->node+port")
public static Map<String, Map<ExecutorDetails, WorkerSlot>> computeNewTopologyToExecutorToNodeport(
        NimbusData nimbusData, Map<String, Assignment> existingAssignments, Topologies topologies,
        String scratchTopologyID) throws Exception {
    // topology->executors
    Map<String, Set<ExecutorInfo>> topologyToExecutor = computeTopologyToExecutors(nimbusData,
            existingAssignments.keySet());

    // update the executors heartbeats first.
    updateAllHeartbeats(nimbusData, existingAssignments, topologyToExecutor);

    // topology->alive-executors
    Map<String, Set<ExecutorInfo>> topologyToAliveExecutors = computeTopologyToAliveExecutors(nimbusData,
            existingAssignments, topologies, topologyToExecutor, scratchTopologyID);

    // supervisor->dead-ports
    Map<String, Set<Integer>> supervisorToDeadPorts = computeSupervisorToDeadports(nimbusData,
            existingAssignments, topologyToExecutor, topologyToAliveExecutors);

    // topology->scheduler-assignment
    Map<String, SchedulerAssignmentImpl> topologyToSchedulerAssignment = computeTopologyToSchedulerAssignment(
            nimbusData, existingAssignments, topologyToAliveExecutors);

    // missing-assignment-topologies
    List<String> missingAssignmentTopologies = mkMissingAssignmentTopologies(topologies, topologyToExecutor,
            topologyToAliveExecutors, topologyToSchedulerAssignment);

    // all-scheduling-slots
    Collection<WorkerSlot> allSchedulingSlotsList = allSchedulingSlots(nimbusData, topologies,
            missingAssignmentTopologies);

    Map<String, Set<Integer>> allSchedulingSlots = new HashMap<String, Set<Integer>>();
    for (WorkerSlot workerSlot : allSchedulingSlotsList) {
        String nodeId = workerSlot.getNodeId();
        Integer port = (Integer) workerSlot.getPort();
        if (allSchedulingSlots.containsKey(nodeId)) {
            Set<Integer> portsSet = allSchedulingSlots.get(nodeId);
            portsSet.add(port);// w  ww .j a  v  a2 s.  c o m
        } else {
            Set<Integer> portsSet = new HashSet<Integer>();
            portsSet.add(port);
            allSchedulingSlots.put(nodeId, portsSet);
        }
    }

    // supervisors
    Map<String, SupervisorDetails> supervisors = readAllSupervisorDetails(nimbusData, allSchedulingSlots,
            supervisorToDeadPorts);

    backtype.storm.scheduler.Cluster cluster = new backtype.storm.scheduler.Cluster(nimbusData.getInimubs(),
            supervisors, topologyToSchedulerAssignment);

    // call scheduler.schedule to schedule all the topologies
    // the new assignments for all the topologies are in the cluster object.
    nimbusData.getScheduler().schedule(topologies, cluster);

    // new-scheduler-assignments
    Map<String, SchedulerAssignment> newSchedulerAssignments = cluster.getAssignments();

    // add more information to convert SchedulerAssignment to Assignment
    Map<String, Map<ExecutorDetails, WorkerSlot>> newTopologyToExecutorToNodeport = computeTopologyToExecutorToNodeport(
            newSchedulerAssignments);
    // print some useful information
    for (Entry<String, Map<ExecutorDetails, WorkerSlot>> entry : newTopologyToExecutorToNodeport.entrySet()) {
        Map<ExecutorDetails, WorkerSlot> reassignment = new HashMap<ExecutorDetails, WorkerSlot>();
        String tid = entry.getKey();
        Map<ExecutorDetails, WorkerSlot> executorToNodeport = entry.getValue();

        Map<ExecutorInfo, WorkerSlot> oldExecutorToNodeport = new HashMap<ExecutorInfo, WorkerSlot>();
        if (existingAssignments.containsKey(tid)) {
            oldExecutorToNodeport = existingAssignments.get(tid).getExecutorToNodeport();
        }

        for (Entry<ExecutorDetails, WorkerSlot> item : executorToNodeport.entrySet()) {
            ExecutorDetails executorDetails = item.getKey();
            WorkerSlot workerSlot = item.getValue();
            ExecutorInfo execute = new ExecutorInfo(executorDetails.getStartTask(),
                    executorDetails.getEndTask());
            if (oldExecutorToNodeport.containsKey(execute)
                    && !workerSlot.equals(oldExecutorToNodeport.get(execute))) {
                reassignment.put(executorDetails, workerSlot);
            }
        }
        if (!reassignment.isEmpty()) {
            int newSlotCnt = executorToNodeport.values().size();
            Set<ExecutorDetails> reassignExecutors = reassignment.keySet();

            LOG.info("Reassigning " + tid + " to " + newSlotCnt + " slots");
            LOG.info("Reassign executors:  " + reassignExecutors.toString());
        }
    }
    return newTopologyToExecutorToNodeport;

}

From source file:com.jayway.maven.plugins.android.phase05compile.NdkBuildMojo.java

private Set<Artifact> findNativeLibraryDependencies() throws MojoExecutionException {
    final NativeHelper nativeHelper = getNativeHelper();
    final Set<Artifact> staticLibraryArtifacts = nativeHelper.getNativeDependenciesArtifacts(this,
            getUnpackedLibsDirectory(), false);
    final Set<Artifact> sharedLibraryArtifacts = nativeHelper.getNativeDependenciesArtifacts(this,
            getUnpackedLibsDirectory(), true);

    final Set<Artifact> mergedArtifacts = new LinkedHashSet<Artifact>();
    filterNativeDependencies(mergedArtifacts, staticLibraryArtifacts);
    filterNativeDependencies(mergedArtifacts, sharedLibraryArtifacts);

    getLog().debug("findNativeLibraryDependencies found " + mergedArtifacts.size() + ": "
            + mergedArtifacts.toString());

    return mergedArtifacts;
}

From source file:com.jayway.maven.plugins.android.phase05compile.NdkBuildMojo.java

private void compileForArchitecture(String architecture)
        throws MojoExecutionException, IOException, ExecutionException {
    getLog().debug("Resolving for NDK architecture : " + architecture);

    // Start setting up the command line to be executed
    final CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
    // Add an error listener to the build - this allows the build to conditionally fail
    // depending on a) the output of the build b) whether or not build errors (output on stderr) should be
    // ignored and c) whether the pattern matches or not
    executor.setErrorListener(getNdkErrorListener());

    final Set<Artifact> nativeLibraryArtifacts = findNativeLibraryDependencies();

    // If there are any static libraries the code needs to link to, include those in the make file
    final Set<Artifact> resolveNativeLibraryArtifacts = getArtifactResolverHelper()
            .resolveArtifacts(nativeLibraryArtifacts);

    getLog().debug("resolveArtifacts found " + resolveNativeLibraryArtifacts.size() + ": "
            + resolveNativeLibraryArtifacts.toString());

    final File buildFolder = new File(ndkBuildDirectory, architecture);
    buildFolder.mkdirs();// w  ww  .  jav a 2s .c om

    final File androidMavenMakefile = new File(buildFolder, "android_maven_plugin_makefile.mk");
    final MakefileHelper makefileHelper = new MakefileHelper(getLog(), getArtifactResolverHelper(),
            harArtifactHandler, getUnpackedLibsDirectory());

    final MakefileHelper.MakefileHolder makefileHolder = makefileHelper.createMakefileFromArtifacts(
            resolveNativeLibraryArtifacts, architecture, "armeabi", useHeaderArchives);

    final FileOutputStream output = new FileOutputStream(androidMavenMakefile);
    try {
        IOUtil.copy(makefileHolder.getMakeFile(), output);
    } finally {
        output.close();
    }

    // Add the path to the generated makefile - this is picked up by the build (by an include from the user)
    executor.addEnvironment("ANDROID_MAVEN_PLUGIN_MAKEFILE", androidMavenMakefile.getAbsolutePath());

    setupNativeLibraryEnvironment(makefileHelper, executor, resolveNativeLibraryArtifacts, architecture);

    // Adds the location of the Makefile capturer file - this file will after the build include
    // things like header files, flags etc.  It is processed after the build to retrieve the headers
    // and also capture flags etc ...
    final File makefileCaptureFile = File.createTempFile("android_maven_plugin_makefile_captures", ".tmp");
    makefileCaptureFile.deleteOnExit();
    executor.addEnvironment(MakefileHelper.MAKEFILE_CAPTURE_FILE, makefileCaptureFile.getAbsolutePath());

    // Add any defined system properties
    if (systemProperties != null && !systemProperties.isEmpty()) {
        for (Map.Entry<String, String> entry : systemProperties.entrySet()) {
            executor.addEnvironment(entry.getKey(), entry.getValue());
        }
    }
    executor.setLogger(this.getLog());
    // Setup the command line for the make
    final List<String> commands = new ArrayList<String>();
    // Setup the build directory (defaults to the current directory) but may be different depending
    // on user configuration
    commands.add("-C");
    commands.add(project.getBasedir().getAbsolutePath());

    // If the build should use a custom makefile or not - some validation is done to ensure
    // this exists and all
    if (makefile != null) {
        File makeFile = new File(project.getBasedir(), makefile);
        if (!makeFile.exists()) {
            getLog().error("Specified makefile " + makeFile + " does not exist");
            throw new MojoExecutionException("Specified makefile " + makeFile + " does not exist");
        }
        commands.add("-f");
        commands.add(makefile);
    }

    configureApplicationMakefile(commands);
    configureMaxJobs(commands);
    configureNdkToolchain(architecture, commands);
    configureAdditionalCommands(commands);

    // If a build target is specified, tag that onto the command line as the very last of the parameters
    if (target != null) {
        commands.add(target);
    } else /*if ( Const.ArtifactType.NATIVE_IMPLEMENTATION_ARCHIVE.equals( project.getPackaging() ) )*/
    {
        commands.add(project.getArtifactId());
    }

    final String ndkBuildPath = resolveNdkBuildExecutable();
    getLog().debug(ndkBuildPath + " " + commands.toString());
    getLog().info("Executing NDK " + architecture + " make at : " + ndkBuildDirectory);

    executor.setCaptureStdOut(true);
    executor.executeCommand(ndkBuildPath, commands, ndkBuildDirectory, true);
    getLog().debug("Executed NDK " + architecture + " make at : " + ndkBuildDirectory);

    // Where the NDK build creates the libs.
    final File nativeLibOutputDirectory = new File(nativeLibrariesOutputDirectory, architecture);
    nativeLibOutputDirectory.mkdirs();

    // Move the built native libs into the packaging folder.
    // We don't create them there to start with because the NDK build seems determined to create them in the root.
    final File destinationDirectory = new File(ndkOutputDirectory, architecture);
    FileUtils.moveDirectory(nativeLibOutputDirectory, destinationDirectory);

    // Attempt to attach the native library if the project is defined as a "pure" native Android library
    // (packaging is 'so' or 'a') or if the plugin has been configured to attach the native library to the build
    if (Const.ArtifactType.NATIVE_SYMBOL_OBJECT.equals(project.getPackaging())
            || Const.ArtifactType.NATIVE_IMPLEMENTATION_ARCHIVE.equals(project.getPackaging())
            || attachNativeArtifacts) {
        attachNativeLib(destinationDirectory, architecture);
    }

    // Process conditionally any of the headers to include into the header archive file
    if (attachHeaderFiles) {
        attachHeaderFiles(makefileCaptureFile, architecture);
    }

    // If we created a makefile for the build we should be polite and remove any extracted include
    // directories after we're done
    getLog().info("Cleaning up extracted include directories used for build");
    MakefileHelper.cleanupAfterBuild(makefileHolder);
}

From source file:org.apache.heron.streamlet.impl.StreamletImpl.java

/**
 * Set the id of the stream to be used by the children nodes.
 * Usage (assuming source is a Streamlet object with two output streams: stream1 and stream2):
 *   source.withStream("stream1").filter(...).log();
 *   source.withStream("stream2").filter(...).log();
 * @param streamId The specified stream id
 * @return Returns back the Streamlet with changed stream id
 *///  ww w. j a v  a  2s . c om
@SuppressWarnings("HiddenField")
@Override
public Streamlet<R> withStream(String streamId) {
    checkNotBlank(streamId, "streamId can't be empty");

    Set<String> availableIds = getAvailableStreamIds();
    if (availableIds.contains(streamId)) {
        return new StreamletShadow<R>(this) {
            @Override
            public String getStreamId() {
                return streamId;
            }
        };
    } else {
        throw new RuntimeException(String.format("Stream id %s is not available in %s. Available ids are: %s.",
                streamId, getName(), availableIds.toString()));
    }
}

From source file:ai.grakn.test.graql.reasoner.AtomicTest.java

@Test
public void testUnification_MatchAllParentAtom() {
    GraknGraph graph = snbGraph.graph();
    String parentString = "{$r($a, $x);}";
    Relation parent = (Relation) ReasonerQueries.atomic(conjunction(parentString, graph), graph).getAtom();

    PatternAdmin body = graph.graql()/*from   ww  w.  j  ava2 s . c  om*/
            .parsePattern("(recommended-customer: $z, recommended-product: $b) isa recommendation").admin();
    PatternAdmin head = graph.graql()
            .parsePattern("(recommended-customer: $z, recommended-product: $b) isa recommendation").admin();
    InferenceRule rule = new InferenceRule(graph.admin().getMetaRuleInference().putRule(body, head), graph);

    Unifier unifier = rule.getUnifier(parent);
    Set<Var> vars = rule.getHead().getAtom().getVarNames();
    Set<Var> correctVars = Sets.newHashSet(Graql.var("r"), Graql.var("a"), Graql.var("x"));
    assertTrue(!vars.contains(Graql.var("")));
    assertTrue("Variables not in subset relation:\n" + correctVars.toString() + "\n" + vars.toString(),
            unifier.values().containsAll(correctVars));
}

From source file:org.apache.lens.cube.parse.join.AutoJoinContext.java

private void pruneEmptyPaths(Map<Aliased<Dimension>, List<JoinPath>> allPaths) throws LensException {
    Iterator<Map.Entry<Aliased<Dimension>, List<JoinPath>>> iter = allPaths.entrySet().iterator();
    Set<Dimension> noPathDims = new HashSet<>();
    while (iter.hasNext()) {
        Map.Entry<Aliased<Dimension>, List<JoinPath>> entry = iter.next();
        if (entry.getValue().isEmpty()) {
            noPathDims.add(entry.getKey().getObject());
            iter.remove();/*from   www.ja va  2 s .com*/
        }
    }
    noPathDims.retainAll(requiredDimensions);

    if (!noPathDims.isEmpty()) {
        throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(), autoJoinTarget.getName(),
                noPathDims.toString());
    }
}

From source file:com.evolveum.midpoint.model.common.stringpolicy.ValuePolicyProcessor.java

private void testMustBeFirst(StringLimitType stringLimitation, OperationResult result,
        List<LocalizableMessage> messages, String value, Set<String> validFirstChars) {
    if (StringUtils.isNotEmpty(value) && isTrue(stringLimitation.isMustBeFirst())
            && !validFirstChars.contains(value.substring(0, 1))) {
        LocalizableMessage msg = new LocalizableMessageBuilder().key("ValuePolicy.firstCharacterNotAllowed")
                .arg(validFirstChars.toString()).build();
        result.addSubresult(/* w  ww  . j ava  2  s  .  c  om*/
                new OperationResult("Check valid first char", OperationResultStatus.FATAL_ERROR, msg));
        messages.add(msg);
    }
}

From source file:org.marketcetera.marketdata.core.provider.AbstractMarketDataProvider.java

@Override
public void requestMarketData(MarketDataRequestToken inRequestToken) {
    if (!isRunning()) {
        throw new MarketDataProviderNotAvailable();
    }//from   ww w  .ja  v  a  2 s . co m
    Set<MarketDataRequestAtom> atoms = explodeRequest(inRequestToken.getRequest());
    totalRequests += atoms.size();
    SLF4JLoggerProxy.debug(this, "Received market data request {}, exploded to {}", //$NON-NLS-1$
            inRequestToken, atoms);
    Lock marketdataRequestLock = marketdataLock.writeLock();
    try {
        marketdataRequestLock.lockInterruptibly();
    } catch (InterruptedException e) {
        org.marketcetera.marketdata.core.Messages.UNABLE_TO_ACQUIRE_LOCK.error(this);
        stop();
        throw new MarketDataRequestFailed(e);
    }
    SLF4JLoggerProxy.trace(this, "Acquired lock"); //$NON-NLS-1$
    try {
        mapRequestToInstruments(inRequestToken);
        for (MarketDataRequestAtom atom : atoms) {
            if (requestsByAtom.containsKey(atom)) {
                SLF4JLoggerProxy.debug(this, "Already requested {}, adding to reference count", atom);
                Instrument snapshotInstrument = instrumentsBySymbol.get(atom.getSymbol());
                if (snapshotInstrument == null) {
                    SLF4JLoggerProxy.warn(this, "Symbol {} not yet mapped, cannot send snapshot",
                            atom.getSymbol());
                } else {
                    Event snapshotEvent = getSnapshot(snapshotInstrument, atom.getContent());
                    if (snapshotEvent instanceof HasEventType) {
                        HasEventType eventTypeSnapshot = (HasEventType) snapshotEvent;
                        eventTypeSnapshot.setEventType(EventType.SNAPSHOT_FINAL);
                    }
                    if (snapshotEvent != null) {
                        SLF4JLoggerProxy.debug(this, "Sending snapshot: {}", snapshotEvent);
                        if (inRequestToken.getSubscriber() != null) {
                            inRequestToken.getSubscriber().publishTo(snapshotEvent);
                        }
                    } else {
                        SLF4JLoggerProxy.debug(this, "No snapshot for {}", atom);
                    }
                }
                requestsByAtom.put(atom, inRequestToken);
                requestsBySymbol.put(atom.getSymbol(), inRequestToken);
            } else {
                Capability requiredCapability = necessaryCapabilities.get(atom.getContent());
                if (requiredCapability == null) {
                    org.marketcetera.marketdata.core.Messages.UNKNOWN_MARKETDATA_CONTENT.error(this,
                            atom.getContent());
                    throw new UnsupportedOperationException(
                            org.marketcetera.marketdata.core.Messages.UNKNOWN_MARKETDATA_CONTENT
                                    .getText(atom.getContent()));
                }
                Set<Capability> capabilities = getCapabilities();
                if (!capabilities.contains(requiredCapability)) {
                    org.marketcetera.marketdata.core.Messages.UNSUPPORTED_MARKETDATA_CONTENT.error(this,
                            atom.getContent(), capabilities.toString());
                    throw new MarketDataRequestFailed(new I18NBoundMessage2P(
                            org.marketcetera.marketdata.core.Messages.UNSUPPORTED_MARKETDATA_CONTENT,
                            atom.getContent(), capabilities.toString()));
                }
                requestsByAtom.put(atom, inRequestToken);
                requestsBySymbol.put(atom.getSymbol(), inRequestToken);
                SLF4JLoggerProxy.debug(this, "Requesting {}", atom);
                doMarketDataRequest(inRequestToken.getRequest(), atom);
            }
        }
    } catch (Exception e) {
        try {
            cancelMarketDataRequest(inRequestToken);
        } catch (Exception ignored) {
        }
        org.marketcetera.marketdata.core.Messages.MARKETDATA_REQUEST_FAILED.warn(this, e);
        if (e instanceof MarketDataException) {
            throw (MarketDataException) e;
        }
        throw new MarketDataRequestFailed(e);
    } finally {
        marketdataRequestLock.unlock();
        SLF4JLoggerProxy.trace(this, "Lock released"); //$NON-NLS-1$
    }
}

From source file:org.squashtest.tm.service.internal.requirement.VerifiedRequirementsManagerServiceImpl.java

@Override
public List<VerifiedRequirement> findAllVerifiedRequirementsByTestCaseId(long testCaseId) {
    LOGGER.debug("Looking for verified requirements of TestCase[id:{}]", testCaseId);

    Set<Long> calleesIds = callTreeFinder.getTestCaseCallTree(testCaseId);

    calleesIds.add(testCaseId);/*ww  w. j  a  v a  2s.co m*/

    LOGGER.debug("Fetching Requirements verified by TestCases {}", calleesIds.toString());

    List<RequirementVersion> pagedVersionVerifiedByCalles = requirementVersionCoverageDao
            .findDistinctRequirementVersionsByTestCases(calleesIds);

    TestCase mainTestCase = testCaseDao.findById(testCaseId);

    return buildVerifiedRequirementList(mainTestCase, pagedVersionVerifiedByCalles);
}

From source file:net.ymate.platform.mvc.support.RequestExecutor.java

/**
 * @return //w ww  .  ja  va 2s  . c om
 * @throws Exception ?
 */
public IView execute() throws Exception {
    _LOG.info(I18N.formatMessage(YMP.__LSTRING_FILE, null, null, "ymp.mvc.request_executor_startup",
            this.requestMeta.getRequestMapping()));
    IView _view = null;
    if (chain != null) {
        _view = chain.doChain(this.requestMeta);
    }
    if (_view == null) {
        try {
            Object[] _params = null;
            if (this.requestMeta.getRequestMethodHandler() != null) {
                //               _LOG.info("?????");
                _params = this.requestMeta.getRequestMethodHandler().getMethodParams();
            } else {
                _params = this.getMethodParams();
            }
            if (hasValidation()) {
                Set<ValidateResult> _results = null;
                if (this.requestMeta.getRequestMethodHandler() != null) {
                    //                  _LOG.info("????");
                    _results = this.requestMeta.getRequestMethodHandler()
                            .doValidation(this.requestMeta.getMethod(), _params);
                } else {
                    _results = Validates.execute(validateRuleConf.getKey(), validateRuleConf.getValue(),
                            validateFieldValues);
                }
                if (_results != null && !_results.isEmpty()) {
                    if (WebMVC.getConfig().getErrorHandlerClassImpl() != null) {
                        _view = WebMVC.getConfig().getErrorHandlerClassImpl().onValidation(_results);
                    }
                    if (_view == null) {
                        throw new ValidationException(_results.toString());
                    }
                }
            }
            if (_view == null) {
                Object _result = this.requestMeta.getMethod().invoke(this.requestMeta.getTarget(), _params);
                _view = this.processMethodResultToView(_result);
            }
        } finally {
            validateFieldValues.clear();
        }
    }
    _LOG.info(I18N.formatMessage(YMP.__LSTRING_FILE, null, null, "ymp.mvc.request_executor_stop",
            this.requestMeta.getRequestMapping()));
    return _view;
}