Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:org.apache.accumulo.examples.wikisearch.parser.RangeCalculator.java

/**
 * /*from  w  ww .  j av a  2 s  .co  m*/
 * @param indexRanges
 * @param tableName
 * @param isReverse
 *          switch that determines whether or not to reverse the results
 * @param override
 *          mapKey for wildcard and range queries that specify which mapkey to use in the results
 * @param typeFilter
 *          - optional list of datatypes
 * @throws TableNotFoundException
 */
protected Map<MapKey, TermRange> queryGlobalIndex(Map<MapKey, Set<Range>> indexRanges, String specificFieldName,
        String tableName, boolean isReverse, MapKey override, Set<String> typeFilter)
        throws TableNotFoundException {

    // The results map where the key is the field name and field value and the
    // value is a set of ranges. The mapkey will always be the field name
    // and field value that was passed in the original query. The TermRange
    // will contain the field name and field value found in the index.
    Map<MapKey, TermRange> results = new HashMap<MapKey, TermRange>();

    // Seed the results map and create the range set for the batch scanner
    Set<Range> rangeSuperSet = new HashSet<Range>();
    for (Entry<MapKey, Set<Range>> entry : indexRanges.entrySet()) {
        rangeSuperSet.addAll(entry.getValue());
        TermRange tr = new TermRange(entry.getKey().getFieldName(), entry.getKey().getFieldValue());
        if (null == override)
            results.put(entry.getKey(), tr);
        else
            results.put(override, tr);
    }

    if (log.isDebugEnabled())
        log.debug("Querying global index table: " + tableName + ", range: " + rangeSuperSet.toString()
                + " colf: " + specificFieldName);
    BatchScanner bs = this.c.createBatchScanner(tableName, this.auths, this.queryThreads);
    bs.setRanges(rangeSuperSet);
    if (null != specificFieldName) {
        bs.fetchColumnFamily(new Text(specificFieldName));
    }

    for (Entry<Key, Value> entry : bs) {
        if (log.isDebugEnabled()) {
            log.debug("Index entry: " + entry.getKey().toString());
        }
        String fieldValue = null;
        if (!isReverse) {
            fieldValue = entry.getKey().getRow().toString();
        } else {
            StringBuilder buf = new StringBuilder(entry.getKey().getRow().toString());
            fieldValue = buf.reverse().toString();
        }

        String fieldName = entry.getKey().getColumnFamily().toString();
        // Get the shard id and datatype from the colq
        String colq = entry.getKey().getColumnQualifier().toString();
        int separator = colq.indexOf(EvaluatingIterator.NULL_BYTE_STRING);
        String shardId = null;
        String datatype = null;
        if (separator != -1) {
            shardId = colq.substring(0, separator);
            datatype = colq.substring(separator + 1);
        } else {
            shardId = colq;
        }
        // Skip this entry if the type is not correct
        if (null != datatype && null != typeFilter && !typeFilter.contains(datatype))
            continue;
        // Parse the UID.List object from the value
        Uid.List uidList = null;
        try {
            uidList = Uid.List.parseFrom(entry.getValue().get());
        } catch (InvalidProtocolBufferException e) {
            // Don't add UID information, at least we know what shards
            // it is located in.
        }

        // Add the count for this shard to the total count for the term.
        long count = 0;
        Long storedCount = termCardinalities.get(fieldName);
        if (null == storedCount || 0 == storedCount) {
            count = uidList.getCOUNT();
        } else {
            count = uidList.getCOUNT() + storedCount;
        }
        termCardinalities.put(fieldName, count);
        this.indexEntries.put(fieldName, fieldValue);

        if (null == override)
            this.indexValues.put(fieldValue, fieldValue);
        else
            this.indexValues.put(fieldValue, override.getOriginalQueryValue());

        // Create the keys
        Text shard = new Text(shardId);
        if (uidList.getIGNORE()) {
            // Then we create a scan range that is the entire shard
            if (null == override)
                results.get(new MapKey(fieldName, fieldValue)).add(new Range(shard));
            else
                results.get(override).add(new Range(shard));
        } else {
            // We should have UUIDs, create event ranges
            for (String uuid : uidList.getUIDList()) {
                Text cf = new Text(datatype);
                TextUtil.textAppend(cf, uuid);
                Key startKey = new Key(shard, cf);
                Key endKey = new Key(shard, new Text(cf.toString() + EvaluatingIterator.NULL_BYTE_STRING));
                Range eventRange = new Range(startKey, true, endKey, false);
                if (null == override)
                    results.get(new MapKey(fieldName, fieldValue)).add(eventRange);
                else
                    results.get(override).add(eventRange);
            }
        }
    }
    bs.close();
    return results;
}

From source file:adalid.commons.velocity.Writer.java

private Object invoke(File templatePropertiesFile, ForEachVariable variable, Object object) {
    String pattern = "failed to get a valid getter for for-each variable \"{0}\"";
    pattern += HINT//from  w w w. ja va 2  s. co  m
            + "a valid getter is a public zero-arguments method that returns either an object or a collection";
    String message;
    if (variable.getter == null) {
        Set<String> getters = gettersOf(variable.token);
        String strip = StringUtils.strip(getters.toString(), "[]");
        pattern += HINT + object.getClass() + " doesn''t implement any of these: " + strip;
        pattern += HINT + "add property \"{0}.getter\" to file \"{1}\"";
        message = MessageFormat.format(pattern, variable.token, templatePropertiesFile);
        return invoke(object, getters, message);
    } else {
        pattern += HINT + "check property \"{0}.getter\" at file \"{1}\"";
        message = MessageFormat.format(pattern, variable.token, templatePropertiesFile);
        return invoke(object, variable.getter, message);
    }
}

From source file:com.netflix.curator.framework.recipes.queue.TestDistributedQueue.java

@Test
public void testCustomExecutor() throws Exception {
    final int ITERATIONS = 1000;

    Timing timing = new Timing();
    DistributedQueue<String> queue = null;
    final CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(),
            timing.session(), timing.connection(), new RetryOneTime(1));
    client.start();//from   ww  w  .  j  a  va  2  s.c  o m
    try {
        final CountDownLatch latch = new CountDownLatch(ITERATIONS);
        QueueConsumer<String> consumer = new QueueConsumer<String>() {
            @Override
            public void consumeMessage(String message) throws Exception {
                latch.countDown();
            }

            @Override
            public void stateChanged(CuratorFramework client, ConnectionState newState) {
            }
        };
        QueueSerializer<String> serializer = new QueueSerializer<String>() {
            @Override
            public byte[] serialize(String item) {
                return item.getBytes();
            }

            @Override
            public String deserialize(byte[] bytes) {
                return new String(bytes);
            }
        };

        Executor executor = Executors.newCachedThreadPool();

        final Set<String> used = Sets.newHashSet();
        final Set<String> doubleUsed = Sets.newHashSet();
        queue = new DistributedQueue<String>(client, consumer, serializer, QUEUE_PATH,
                QueueBuilder.defaultThreadFactory, executor, Integer.MAX_VALUE, false, "/lock",
                QueueBuilder.NOT_SET, true, 5000) {
            @SuppressWarnings("SimplifiableConditionalExpression")
            @Override
            protected boolean processWithLockSafety(String itemNode, DistributedQueue.ProcessType type)
                    throws Exception {
                if (used.contains(itemNode)) {
                    doubleUsed.add(itemNode);
                } else {
                    used.add(itemNode);
                }
                return (client.getState() == CuratorFrameworkState.STARTED)
                        ? super.processWithLockSafety(itemNode, type)
                        : false;
            }
        };
        queue.start();

        for (int i = 0; i < ITERATIONS; ++i) {
            queue.put(Integer.toString(i));
        }

        Assert.assertTrue(timing.awaitLatch(latch));

        Assert.assertTrue(doubleUsed.size() == 0, doubleUsed.toString());
    } finally {
        IOUtils.closeQuietly(queue);
        IOUtils.closeQuietly(client);
    }
}

From source file:org.dkpro.lab.engine.impl.BatchTaskEngine.java

@Override
public String run(Task aConfiguration) throws ExecutionException, LifeCycleException {
    if (!(aConfiguration instanceof BatchTask)) {
        throw new ExecutionException("This engine can only execute [" + BatchTask.class.getName() + "]");
    }/*from  ww w  .  j  av  a 2  s. co m*/

    // Create persistence service for injection into analysis components
    TaskContext ctx = null;
    try {
        ctx = contextFactory.createContext(aConfiguration);

        // Now the setup is complete
        ctx.getLifeCycleManager().initialize(ctx, aConfiguration);

        // Start recording
        ctx.getLifeCycleManager().begin(ctx, aConfiguration);

        try {
            BatchTask cfg = (BatchTask) aConfiguration;

            ParameterSpace parameterSpace = cfg.getParameterSpace();

            // Try to calculate the parameter space size.
            int estimatedSize = 1;
            for (Dimension<?> d : parameterSpace.getDimensions()) {
                if (d instanceof FixedSizeDimension) {
                    FixedSizeDimension fsd = (FixedSizeDimension) d;
                    if (fsd.size() > 0) {
                        estimatedSize *= fsd.size();
                    }
                }
            }

            // A subtask execution may apply to multiple parameter space coordinates!
            Set<String> executedSubtasks = new LinkedHashSet<String>();

            ProgressMeter progress = new ProgressMeter(estimatedSize);
            for (Map<String, Object> config : parameterSpace) {
                if (cfg.getConfiguration() != null) {
                    for (Entry<String, Object> e : cfg.getConfiguration().entrySet()) {
                        if (!config.containsKey(e.getKey())) {
                            config.put(e.getKey(), e.getValue());
                        }
                    }
                }

                log.info("== Running new configuration [" + ctx.getId() + "] ==");
                List<String> keys = new ArrayList<String>(config.keySet());
                for (String key : keys) {
                    log.info("[" + key + "]: ["
                            + StringUtils.abbreviateMiddle(Util.toString(config.get(key)), "", 150) + "]");
                }

                executeConfiguration(cfg, ctx, config, executedSubtasks);

                progress.next();
                log.info("Completed configuration " + progress);
            }

            // Set the subtask property and persist again, so the property is available to reports
            cfg.setAttribute(SUBTASKS_KEY, executedSubtasks.toString());
            cfg.persist(ctx);
        } catch (LifeCycleException e) {
            ctx.getLifeCycleManager().fail(ctx, aConfiguration, e);
            throw e;
        } catch (UnresolvedImportException e) {
            // HACK - pass unresolved import exceptions up to the outer batch task
            ctx.getLifeCycleManager().fail(ctx, aConfiguration, e);
            throw e;
        } catch (Throwable e) {
            ctx.getLifeCycleManager().fail(ctx, aConfiguration, e);
            throw new ExecutionException(e);
        }

        // End recording (here the reports will nbe done)
        ctx.getLifeCycleManager().complete(ctx, aConfiguration);

        return ctx.getId();
    } finally {
        if (ctx != null) {
            ctx.getLifeCycleManager().destroy(ctx, aConfiguration);
        }
    }
}

From source file:com.wizecommerce.hecuba.hector.HectorBasedHecubaClientManager.java

@Override
public CassandraResultSet<K, String> readColumns(Set<K> keys, List<String> columnNames) throws Exception {
    if (CollectionUtils.isNotEmpty(columnNames)) {
        ColumnFamilyResult<K, String> queriedColumns;
        try {//from  ww  w.  j av a  2s  . c om
            queriedColumns = getColumnFamily().queryColumns(keys, columnNames, null);
            if (isClientAdapterDebugMessagesEnabled) {
                log.info(columnNames.size() + " columns retrieved from Cassandra [Hector] for " + keys.size()
                        + " keys . Exec Time (micro-sec) = " + queriedColumns.getExecutionTimeMicro()
                        + ", Host used = " + queriedColumns.getHostUsed());
            }
            return new HectorResultSet<K, String>(queriedColumns);
        } catch (HectorException e) {
            log.info("HecubaClientManager error while retrieving " + columnNames.size() + " columns. keys = "
                    + keys.toString());
            if (log.isDebugEnabled()) {
                log.debug("Caught Exception", e);
            }
            throw e;
        }
    } else {
        return readAllColumns(keys);
    }
}

From source file:com.wizecommerce.hecuba.hector.HectorBasedHecubaClientManager.java

/**
 * Retrieves all the columns for the list of keys
 *
 * @param keys - set of keys to retrieve
 *///from  w  w w  .  jav a2 s .c  om
@Override
public CassandraResultSet<K, String> readAllColumns(Set<K> keys) throws HectorException {
    try {
        if (maxColumnCount > 0) {
            return readColumnSlice(keys, null, null, false);
        } else {
            ColumnFamilyResult<K, String> queriedColumns = getColumnFamily().queryColumns(keys);
            if (isClientAdapterDebugMessagesEnabled) {
                log.info("Rows retrieved from Cassandra [Hector] (for " + keys.size() + " keys). Exec Time "
                        + "(micro-sec) = " + queriedColumns.getExecutionTimeMicro() + ", Host used = "
                        + queriedColumns.getHostUsed());
            }
            return new HectorResultSet<K, String>(queriedColumns);
        }
    } catch (HectorException e) {
        log.warn("HecubaClientManager [Hector] error while reading multiple keys. Number of keys = "
                + keys.size() + ", keys = " + keys.toString());
        if (log.isDebugEnabled()) {
            log.debug("Caught Exception while reading for multiple keys", e);
        }
        throw e;
    }
}

From source file:org.apache.geode.management.internal.cli.commands.QueryCommand.java

private DataCommandResult select(String query) {
    InternalCache cache = (InternalCache) CacheFactory.getAnyInstance();
    DataCommandResult dataResult;/*from   www. ja v a 2  s.c o  m*/

    if (StringUtils.isEmpty(query)) {
        dataResult = DataCommandResult.createSelectInfoResult(null, null, -1, null,
                CliStrings.QUERY__MSG__QUERY_EMPTY, false);
        return dataResult;
    }

    boolean limitAdded = false;

    if (!StringUtils.containsIgnoreCase(query, " limit") && !StringUtils.containsIgnoreCase(query, " count(")) {
        query = query + " limit " + CommandExecutionContext.getShellFetchSize();
        limitAdded = true;
    }

    @SuppressWarnings("deprecation")
    QCompiler compiler = new QCompiler();
    Set<String> regionsInQuery;
    try {
        CompiledValue compiledQuery = compiler.compileQuery(query);
        Set<String> regions = new HashSet<>();
        compiledQuery.getRegionsInQuery(regions, null);

        // authorize data read on these regions
        for (String region : regions) {
            cache.getSecurityService().authorize(Resource.DATA, Operation.READ, region);
        }

        regionsInQuery = Collections.unmodifiableSet(regions);
        if (regionsInQuery.size() > 0) {
            Set<DistributedMember> members = DataCommandsUtils.getQueryRegionsAssociatedMembers(regionsInQuery,
                    cache, false);
            if (members != null && members.size() > 0) {
                DataCommandFunction function = new DataCommandFunction();
                DataCommandRequest request = new DataCommandRequest();
                request.setCommand(CliStrings.QUERY);
                request.setQuery(query);
                Subject subject = cache.getSecurityService().getSubject();
                if (subject != null) {
                    request.setPrincipal(subject.getPrincipal());
                }
                dataResult = callFunctionForRegion(request, function, members);
                dataResult.setInputQuery(query);
                if (limitAdded) {
                    dataResult.setLimit(CommandExecutionContext.getShellFetchSize());
                }
                return dataResult;
            } else {
                return DataCommandResult.createSelectInfoResult(null, null, -1, null,
                        CliStrings.format(CliStrings.QUERY__MSG__REGIONS_NOT_FOUND, regionsInQuery.toString()),
                        false);
            }
        } else {
            return DataCommandResult.createSelectInfoResult(null, null, -1, null,
                    CliStrings.format(CliStrings.QUERY__MSG__INVALID_QUERY,
                            "Region mentioned in query probably missing /"),
                    false);
        }
    } catch (QueryInvalidException qe) {
        logger.error("{} Failed Error {}", query, qe.getMessage(), qe);
        return DataCommandResult.createSelectInfoResult(null, null, -1, null,
                CliStrings.format(CliStrings.QUERY__MSG__INVALID_QUERY, qe.getMessage()), false);
    }
}

From source file:uk.ac.ox.oucs.vle.CourseSignupServiceImpl.java

@Override
public List<CourseComponentExport> exportComponentSignups(String componentId, Set<Status> statuses,
        Integer year) {/*from w w w.  j a va2  s . c om*/
    List<Map> componentSignups = dao.findComponentSignups(componentId, statuses, year);
    // These should be ordered already
    List<CourseComponentExport> exports = new ArrayList<>();
    CourseComponentExport export = null;
    for (Map map : componentSignups) {
        CourseSignupDAO signupDAO = (CourseSignupDAO) map.get("signup");
        CourseComponentDAO componentDAO = (CourseComponentDAO) map.get("this");
        CourseGroupDAO groupDAO = (CourseGroupDAO) map.get("group");
        if (componentDAO == null) {
            // We don't bail out here so that we still get a partial export.
            log.error(String.format("Failed to get the complete data when exporting %s, %s, %d", componentId,
                    statuses.toString(), year));
            continue;
        }
        CourseComponent component = new CourseComponentImpl(componentDAO);
        if (isAdministrator(componentDAO)) {
            if (export == null || !export.getComponent().equals(component)) {
                export = new CourseComponentExport(component);
                exports.add(export);
            }
            if (signupDAO != null && groupDAO != null) {
                CourseSignup signup = new CourseSignupImpl(signupDAO, this);
                CourseGroup group = new CourseGroupImpl(groupDAO, this);
                export.addSignup(new CourseSignupExport(signup, group));
            }
        }
    }
    return exports;
}

From source file:org.lockss.plugin.springer.link.BaseSpringerLinkCrawlSeed.java

/**
 * <p>/*from   w  w w  .ja  va  2 s.co  m*/
 * Makes a URL fetcher for the given API request, that will parse the result
 * using the given {@link SpringerLinkPamLinkExtractor} instance.
 * </p>
 * 
 * @param ple
 *          A {@link SpringerLinkPamLinkExtractor} instance to parse the API
 *          response with.
 * @param url
 *          A query URL.
 * @return A URL fetcher for the given query URL.
 * @since 1.67.5
 */
protected UrlFetcher makeApiUrlFetcher(final SpringerLinkPamLinkExtractor ple, final String url,
        final String loggerUrl) {
    // Make a URL fetcher
    UrlFetcher uf = facade.makeUrlFetcher(url);

    // Set refetch flag
    BitSet permFetchFlags = uf.getFetchFlags();
    permFetchFlags.set(UrlCacher.REFETCH_FLAG);
    uf.setFetchFlags(permFetchFlags);

    // Set custom crawl rate limiter
    uf.setCrawlRateLimiter(API_CRAWL_RATE_LIMITER);

    // Set custom URL consumer factory
    uf.setUrlConsumerFactory(new UrlConsumerFactory() {
        @Override
        public UrlConsumer createUrlConsumer(CrawlerFacade ucfFacade, FetchedUrlData ucfFud) {
            // Make custom URL consumer
            return new SimpleUrlConsumer(ucfFacade, ucfFud) {
                @Override
                public void consume() throws IOException {
                    // Apply link extractor to URL and output results into a list
                    final Set<String> partial = new HashSet<String>();
                    try {
                        String au_cset = AuUtil.getCharsetOrDefault(fud.headers);
                        String cset = CharsetUtil.guessCharsetFromStream(fud.input, au_cset);
                        //FIXME 1.69 
                        // Once guessCharsetFromStream correctly uses the hint instead of returning null
                        // this local bit won't be needed.
                        if (cset == null) {
                            cset = au_cset;
                        }
                        //
                        ple.extractUrls(au, fud.input, cset, loggerUrl, // rather than fud.origUrl
                                new Callback() {
                                    @Override
                                    public void foundLink(String url) {
                                        partial.add(url);
                                    }
                                });
                    } catch (IOException ioe) {
                        log.debug2("Link extractor threw", ioe);
                        throw new IOException("Error while parsing PAM response for " + loggerUrl, ioe);
                    } finally {
                        // Logging
                        log.debug2(String.format("Step ending with %d URLs", partial.size()));
                        if (log.isDebug3()) {
                            log.debug3("URLs from step: " + partial.toString());
                        }
                        // Output accumulated URLs to start URL list
                        urlList.addAll(convertDoisToUrls(partial));
                    }
                }
            };
        }
    });
    return uf;
}