Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:registry.ExtensionRegistryController.java

private JsonNode createExt(ObjectNode node, String url) {
    ObjectNode result = json.newObject();
    //map our incoming json to the
    Extension extension = json.mapper().convertValue(node, Extension.class);
    Set<ConstraintViolation<Extension>> errors = validator.validate(extension);
    if (!errors.isEmpty()) {
        Iterator<ConstraintViolation<Extension>> i = errors.iterator();
        String msg = "";
        while (i.hasNext()) {
            msg = msg + i.next().getPropertyPath() + " ";

        }/*from  ww  w  . j a  va2  s  . c  o  m*/
        result.put("error", "Error while adding extension").put("reason", msg + "need(s) to be set");
        logger().error("Something really bad happened...");
        logger().error(errors.toString());
        return result;
    }

    List<Extension> omg = extensionCrud.query(new OSQLSynchQuery<Extension>(
            "select * " + "from  " + "Extension where name like '" + extension.getName() + "'"));

    if (!omg.isEmpty()) {
        updateExtInDB(extension, url, omg.get(0));
        node.put("updated", true);
    } else {
        addExtToDB(extension, url);
        node.put("updated", false);
    }
    return node;
}

From source file:org.apache.ctakes.ytex.kernel.IntrinsicInfoContentEvaluatorImpl.java

public Set<String> getAllLeaves(ConceptGraph cg, BufferedWriter w) throws IOException {
    Set<String> leafSet = new HashSet<String>();
    for (Map.Entry<String, ConcRel> con : cg.getConceptMap().entrySet()) {
        if (con.getValue().isLeaf()) {
            leafSet.add(con.getValue().getConceptID());
        }//from  w w w  .  j a va2s  .  c om
    }
    if (w != null) {
        w.write(Integer.toString(leafSet.size()));
        w.write("\t");
        w.write(leafSet.toString());
        w.newLine();
    }
    return leafSet;
}

From source file:org.alfresco.repo.content.metadata.TikaPoweredMetadataExtracter.java

private String getMetadataValue(Metadata metadata, String key) {
    if (metadata.isMultiValued(key)) {
        String[] parts = metadata.getValues(key);

        // use Set to prevent duplicates
        Set<String> value = new LinkedHashSet<String>(parts.length);

        for (int i = 0; i < parts.length; i++) {
            value.add(parts[i]);/*w  w  w . j av  a  2s.c  o  m*/
        }

        String valueStr = value.toString();

        // remove leading/trailing braces []
        return valueStr.substring(1, valueStr.length() - 1);
    } else {
        return metadata.get(key);
    }
}

From source file:storm.mesos.schedulers.DefaultScheduler.java

@Override
public List<WorkerSlot> allSlotsAvailableForScheduling(RotatingMap<Protos.OfferID, Protos.Offer> offers,
        Collection<SupervisorDetails> existingSupervisors, Topologies topologies,
        Set<String> topologiesMissingAssignments) {
    if (topologiesMissingAssignments.isEmpty()) {
        log.info("Declining all offers that are currently buffered because no topologies need assignments");
        // TODO(ksoundararaj): Do we need to clear offers not that consolidate resources?
        offers.clear();//  w w w  . j  av a  2  s .  co m
        return new ArrayList<>();
    }

    log.info("Topologies that need assignments: {}", topologiesMissingAssignments.toString());

    List<WorkerSlot> allSlots = new ArrayList<>();
    Map<String, AggregatedOffers> aggregatedOffersPerNode = MesosCommon.getAggregatedOffersPerNode(offers);

    for (String currentTopology : topologiesMissingAssignments) {
        TopologyDetails topologyDetails = topologies.getById(currentTopology);
        int slotsNeeded = topologyDetails.getNumWorkers();

        log.info("Trying to find {} slots for {}", slotsNeeded, topologyDetails.getId());
        if (slotsNeeded <= 0) {
            continue;
        }

        Set<String> nodesWithExistingSupervisors = new HashSet<>();
        for (String currentNode : aggregatedOffersPerNode.keySet()) {
            if (SchedulerUtils.supervisorExists(currentNode, existingSupervisors, currentTopology)) {
                nodesWithExistingSupervisors.add(currentNode);
            }
        }

        List<MesosWorkerSlot> mesosWorkerSlotList = getMesosWorkerSlots(aggregatedOffersPerNode,
                nodesWithExistingSupervisors, topologyDetails);
        for (MesosWorkerSlot mesosWorkerSlot : mesosWorkerSlotList) {
            String slotId = String.format("%s:%s", mesosWorkerSlot.getNodeId(), mesosWorkerSlot.getPort());
            mesosWorkerSlotMap.put(slotId, mesosWorkerSlot);
            allSlots.add(mesosWorkerSlot);
        }

        log.info("Number of available slots for {}: {}", topologyDetails.getId(), mesosWorkerSlotList.size());
    }

    List<String> slotsStrings = new ArrayList<String>();
    for (WorkerSlot slot : allSlots) {
        slotsStrings.add("" + slot.getNodeId() + ":" + slot.getPort());
    }
    log.info("allSlotsAvailableForScheduling: {} available slots: [{}]", allSlots.size(),
            StringUtils.join(slotsStrings, ", "));
    return allSlots;
}

From source file:org.eurekastreams.server.service.opensocial.spi.ActivityServiceImpl.java

/**
 * Shindig implementation for retrieving activities from a set of users.
 *
 * @param userIds/*from w w w  .j  a v a  2  s.com*/
 *            - set of userIds to retrieve activities for.
 * @param groupId
 *            - //TODO not sure about this one yet.
 * @param appId
 *            - id of the application requesting the activities.
 * @param fields
 *            - set of fields to retrieve.
 * @param options
 *            - collection of options for retrieving activities.
 * @param token
 *            - the security token for the request.
 *
 * @return collection of activities.
 */
@SuppressWarnings("unchecked")
public Future<RestfulCollection<Activity>> getActivities(final Set<UserId> userIds, final GroupId groupId,
        final String appId, final Set<String> fields, final CollectionOptions options,
        final SecurityToken token) {
    log.trace("Entering getActivities");

    List<Activity> osActivities = new ArrayList<Activity>();

    try {
        Set<String> userIdList = new HashSet<String>();
        for (UserId currentUserId : userIds) {
            if (!currentUserId.getUserId(token).equals("null")) {
                userIdList.add(currentUserId.getUserId(token));
            }
        }

        log.debug("Sending getActivities userIdList to action: " + userIdList.toString());

        GetUserActivitiesRequest currentRequest = new GetUserActivitiesRequest(new ArrayList<Long>(),
                userIdList);
        ServiceActionContext currentContext = new ServiceActionContext(currentRequest,
                openSocialPrincipalPopulator.getPrincipal(token.getViewerId()));

        LinkedList<ActivityDTO> activities = (LinkedList<ActivityDTO>) serviceActionController
                .execute(currentContext, getUserActivitiesAction);

        log.debug("Retrieved " + activities.size() + " activities from action");

        for (ActivityDTO currentActivity : activities) {
            osActivities.add(convertActivityFromEurekaActivityDTOToOS(currentActivity));
        }
    } catch (Exception ex) {
        log.error("Error occurred retrieving activities ", ex);
        throw new ProtocolException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getMessage());
    }

    return ImmediateFuture.newInstance(new RestfulCollection<Activity>(osActivities));
}

From source file:org.fcrepo.auth.roles.basic.BasicRolesPEP.java

@Override
public boolean hasModeShapePermission(final Path absPath, final String[] actions,
        final Set<Principal> allPrincipals, final Principal userPrincipal) {
    final boolean newNode = false;
    Set<String> roles = null;
    try {/*  ww  w  . j av  a 2  s . c om*/
        final Session session = sessionFactory.getInternalSession();
        final Node realNode = findRealNode(absPath, session);
        log.debug("using real node: " + realNode);
        roles = this.getRoles(session, allPrincipals, realNode);
        log.debug("roles for this request: " + roles);
    } catch (final RepositoryException e) {
        throw new Error("Cannot look up node information on " + absPath + " for permissions check.", e);
    }

    if (clog.isDebugEnabled()) {
        final StringBuilder msg = new StringBuilder();
        msg.append(roles.toString()).append("\t").append(Arrays.toString(actions)).append("\t")
                .append(newNode ? "NEW" : "OLD").append("\t")
                .append((absPath == null ? absPath : absPath.toString()));
        clog.debug(msg.toString());
        if (actions.length > 1) { // have yet to see more than one
            clog.debug("FOUND MULTIPLE ACTIONS: " + Arrays.toString(actions));
        }
    }

    if (roles.size() == 0) {
        log.debug("A caller without content roles can do nothing in the repository.");
        return false;
    }
    if (roles.contains("admin")) {
        log.debug("Granting an admin role permission to perform any action.");
        return true;
    }
    if (roles.contains("writer")) {
        if (absPath.toString().contains(AUTHZ_DETECTION)) {
            log.debug("Denying writer role permission to perform an action on an ACL node.");
            return false;
        } else {
            log.debug("Granting writer role permission to perform any action on a non-ACL nodes.");
            return true;
        }
    }
    if (roles.contains("reader")) {
        if (actions.length == 1 && "read".equals(actions[0])) {
            log.debug("Granting reader role permission to perform a read action.");
            return true;
        } else {
            log.debug("Denying reader role permission to perform a non-read action.");
            return false;
        }
    }
    log.error("There are roles in session that aren't recognized by this PEP: " + roles);
    return false;
}

From source file:org.apache.whirr.service.cassandra.integration.CassandraServiceTest.java

@Test(timeout = TestConstants.ITEST_TIMEOUT)
public void testInstances() throws Exception {
    Set<String> endPoints = Sets.newLinkedHashSet();
    for (Instance instance : cluster.getInstances()) {
        Cassandra.Client client = client(instance);
        Map<String, List<String>> tr = client.describe_schema_versions();
        for (List<String> version : tr.values()) {
            endPoints.addAll(version);/* www  .j  a  va  2 s .  c  o m*/
        }
        client.getOutputProtocol().getTransport().close();
    }
    LOG.info("List of endpoints: " + endPoints);

    for (Instance instance : cluster.getInstances()) {
        String address = instance.getPrivateAddress().getHostAddress();
        assertTrue(address + " not in cluster!", endPoints.remove(address));
    }
    assertTrue("Unknown node returned: " + endPoints.toString(), endPoints.isEmpty());
}

From source file:com.fluidops.iwb.widget.admin.WikiManagementWidget.java

protected static FButton createSelectPresetButton(final WikiStorageBulkServiceImpl wsApi,
        final WikiPageSelectionTable wpTable) {

    return new FButton("b" + Rand.getIncrementalFluidUUID(), "Select preset") {
        @Override/* ww w.j  a v  a  2s . com*/
        public void onClick() {
            final FTextArea input = new FTextArea("inp");
            input.cols = 35;
            input.rows = 10;

            StringBuilder currentPreset = new StringBuilder();
            for (WikiPageMeta w : wpTable.getSelectedObjects()) {
                currentPreset.append(w.getPageUri().stringValue()).append("\n");
            }
            input.value = currentPreset.toString();
            FPopupWindow p = getPage().getPopupWindowInstance(
                    "Please configure your preset by adding one valid wiki page URI per line:");

            p.add(input);
            p.addButton("Ok", new Runnable() {
                @Override
                public void run() {
                    Set<String> selectedPreset = Sets.newHashSet(input.getText().split("\r?\n"));
                    List<WikiPageMeta> wm = wsApi
                            .getAllWikipages(new WikiStorageBulkServiceImpl.StringSetFilter(selectedPreset));

                    // retrieve table model to select all
                    FSelectableTableModel<WikiPageMeta> tm = wpTable.getModelSafe();
                    tm.setSelection(wm);

                    wpTable.setSortColumn(0, FTable.SORT_DESCENDING);
                    wpTable.populateView();

                    // the preset contains some invalid url, inform user
                    List<WikiPageMeta> selectedAfter = wpTable.getSelectedObjects();
                    if (selectedPreset.size() != selectedAfter.size()) {
                        for (WikiPageMeta w : selectedAfter)
                            selectedPreset.remove(w.getPageUri().stringValue());
                        throw new IllegalStateException(
                                "Preset contains URIs that are not known to the system: "
                                        + StringEscapeUtils.escapeHtml(selectedPreset.toString())
                                        + ". Please check your preset.");
                    }
                }
            });
            p.addCloseButton("Cancel");
            p.populateAndShow();

        }
    };
}

From source file:org.deri.iris.performance.IRISPerformanceTest.java

/**
 * Executes a set of datalog queries using the given configuration
 * @param queries The set of Datalog queries
 * @param config The configuration for the test suite
 * @return a list of IRISTestCase objects with the result of the test campaign
 *//*w ww .  j  a  v  a  2  s.com*/
public List<IRISTestCase> executeTests(final List<String> queries, final TestConfiguration config) {

    // Get the logger
    LOGGER = Logger.getLogger(IRISPerformanceTest.class.getName());

    // Construct a valid IRIS+- program using the queries and the configuration file
    String program = "";

    // add the query and its IRIS execution command to the program
    program += "/// Query ///\n";
    for (final String s : queries) {
        program += s + "\n";
        program += "?-" + s.substring(0, s.indexOf(":-")) + ".\n";
    }
    program += "\n";

    // If reasoning is enabled, add the TBOX to the program
    program += "/// TBox ///\n";
    if (config.getReasoning()) {
        String tboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/tbox";
        if (config.getExpressiveness().compareTo("RDFS") == 0) {
            tboxPath += "/rdfs";
        }
        if (config.getExpressiveness().compareTo("OWL-QL") == 0) {
            tboxPath += "/owlql";
        }
        final String tbox = loadFile(tboxPath + "/" + config.getDataset() + ".dtg");
        program += tbox + "\n";
    } else {
        program += "/// EMPTY ///\n";
    }

    // Add the SBox
    program += "/// SBox ///\n";
    String sboxPath = config.getTestHomePath() + "/" + config.getDataset() + "/sbox";
    if (config.getExpressiveness().compareTo("RDFS") == 0) {
        sboxPath += "/rdfs";
    }
    if (config.getExpressiveness().compareTo("OWL-QL") == 0) {
        sboxPath += "/owlql";
    }
    final String sbox = loadFile(sboxPath + "/" + config.getDataset() + ".dtg");
    program += sbox + "\n\n";

    LOGGER.debug(program);

    // Get the parser
    final Parser parser = new Parser();

    // Parse the program
    try {
        parser.parse(program);
    } catch (final ParserException e) {
        e.printStackTrace();
    }

    // Get the TGDs from the set of rules
    final List<IRule> tgds = RewritingUtils.getTGDs(parser.getRules(), parser.getQueries());

    // Get the query bodies
    final List<IRule> bodies = new ArrayList<IRule>(parser.getRules());
    final List<IRule> datalogQueries = RewritingUtils.getQueries(bodies, parser.getQueries());

    // Get the constraints from the set of rules
    final Set<IRule> constraints = RewritingUtils.getConstraints(parser.getRules(), parser.getQueries());

    // Get the SBox rules from the set of rules
    final List<IRule> storageRules = RewritingUtils.getSBoxRules(parser.getRules(), parser.getQueries());

    // Check that the TBox is FO-reducible
    IRuleSafetyProcessor ruleProc = new LinearReducibleRuleSafetyProcessor();
    try {
        ruleProc.process(tgds);
    } catch (final RuleUnsafeException e) {
        e.printStackTrace();
    }

    // Check that the SBox rules are Safe Datalog
    ruleProc = new StandardRuleSafetyProcessor();
    try {
        ruleProc.process(storageRules);
    } catch (final RuleUnsafeException e) {
        e.printStackTrace();
    }

    // Connect to the storage
    StorageManager.getInstance();
    try {
        StorageManager.connect(config.getDBVendor(), config.getDBProtocol(), config.getDBHost(),
                config.getDBPort(), config.getDBName(), config.getSchemaName(), config.getDBUsername(),
                config.getDBPassword());
    } catch (final SQLException e) {
        e.printStackTrace();
    }

    // Evaluate the queries
    final List<IRISTestCase> output = new LinkedList<IRISTestCase>();
    for (final IQuery q : parser.getQueries()) {
        // Generate a new test-case
        final IRISTestCase currentTest = new IRISTestCase();
        int nTask = -10;

        // Get the Factories
        final IRelationFactory rf = new RelationFactory();

        // Get the Rewriter Engine
        final ParallelRewriter rewriter = new ParallelRewriter(DecompositionStrategy.DECOMPOSE,
                RewritingLanguage.UCQ, SubCheckStrategy.TAIL, NCCheck.TAIL);

        // Get and log the rule corresponding to the query
        final IRule ruleQuery = getRuleQuery(q, datalogQueries);
        currentTest.setQuery(ruleQuery);

        final Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);

        // Compute and log the FO-Rewriting
        LOGGER.info("Computing TBox Rewriting");
        float duration = -System.nanoTime();
        final Set<IRule> rewriting = rewriter.getRewriting(ruleQuery, tgds, constraints, deps, exprs);
        duration = ((duration + System.nanoTime()) / 1000000);
        currentTest.getTasks()
                .add(new Task(nTask++, "TBox Rewriting", duration, 0, 0, "ms", rewriting.toString()));
        LOGGER.info("done.");
        int count = 0;
        for (final IRule r : rewriting) {
            LOGGER.debug("(Qr" + ++count + ")" + r);
        }

        // Produce the rewriting according to the Nyaya Data Model
        final IQueryRewriter ndmRewriter = new NDMRewriter(storageRules);

        // Create a buffer for the output
        final IRelation outRelation = rf.createRelation();

        // Get the SBox rewriting
        try {
            LOGGER.info("Computing SBox Rewriting");
            final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>();
            duration = -System.nanoTime();
            for (final IRule pr : rewriting) {
                sboxRewriting.addAll(ndmRewriter.getRewriting(pr));
            }
            duration = ((duration + System.nanoTime()) / 1000000);
            currentTest.getTasks()
                    .add(new Task(nTask++, "SBox Rewriting", duration, 0, 0, "ms", sboxRewriting.toString()));
            LOGGER.info("done.");
            count = 0;
            for (final IRule n : sboxRewriting) {
                LOGGER.debug("(Qn" + ++count + ")" + n);
            }

            // Produce the SQL rewriting for each query in the program
            final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting);

            // Get the SQL rewriting as Union of Conjunctive Queries (UCQ)
            LOGGER.info("Computing SQL Rewriting");
            duration = -System.nanoTime();
            final List<String> ucqSQLRewriting = new LinkedList<String>();
            ucqSQLRewriting.add(sqlRewriter.getUCQSQLRewriting("", 10000, 0));
            duration = ((duration + System.nanoTime()) / 1000000);
            currentTest.getTasks()
                    .add(new Task(nTask++, "SQL Rewriting", duration, 0, 0, "ms", ucqSQLRewriting.toString()));
            LOGGER.info("done.");
            count = 0;
            for (final String s : ucqSQLRewriting) {
                LOGGER.debug("(Qs" + ++count + ") " + s);
            }

            // Execute the UCQ
            LOGGER.info("Executing SQL");

            // float ansConstructOverall = 0;

            // The synchronized structure to store the output tuples
            final Set<ITuple> result = Collections.synchronizedSet(new HashSet<ITuple>());

            /*
             * Prepare a set of runnable objects representing each partial rewriting to be executed in parallel
             */
            final List<RunnableQuery> rql = new LinkedList<RunnableQuery>();
            for (final String cq : ucqSQLRewriting) {
                // Construct a Runnable Query
                rql.add(new RunnableQuery(cq, result, currentTest.getTasks()));
            }

            // Get an executor that allows a number of parallel threads equals to the number of available processors
            // ExecutorService queryExecutor =
            // Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()*5);
            final ExecutorService queryExecutor = Executors.newSingleThreadScheduledExecutor();

            // Execute all the partial rewritings in parallel
            float ucqExecOverall = -System.nanoTime();
            for (final RunnableQuery rq : rql) {
                queryExecutor.execute(rq);
            }
            queryExecutor.shutdown();
            if (queryExecutor.awaitTermination(1, TimeUnit.DAYS)) {
                LOGGER.info("done.");
            } else
                throw new InterruptedException("Timeout Occured");
            ucqExecOverall = ((ucqExecOverall + System.nanoTime()) / 1000000);
            StorageManager.disconnect();

            // inizio aggiunta
            float minTime = System.nanoTime();
            float maxTime = 0;
            float avgTime = 0;
            int n = 0;
            for (final Task t : currentTest.getTasks()) {
                if (t.getName().contains("Execution")) {
                    avgTime += (t.getFinalTime() - t.getInitTime()) / 1000000;
                    n++;
                    if (t.getFinalTime() > maxTime) {
                        maxTime = t.getFinalTime();
                    }
                    if (t.getInitTime() < minTime) {
                        minTime = t.getInitTime();
                    }
                }
            }
            ucqExecOverall = (maxTime - minTime) / 1000000;
            // fine aggiunta

            currentTest.getTasks()
                    .add(new Task(nTask++, "UCQ Overall Execution Time", ucqExecOverall, 0, 0, "ms"));

            // inizio aggiunta
            avgTime = (avgTime / n);
            System.out.println(n);
            currentTest.getTasks().add(new Task(nTask++, "UCQ Average Execution Time", avgTime, 0, 0, "ms"));
            Collections.sort(currentTest.getTasks());
            // fine aggiunta

            for (final ITuple t : result) {
                outRelation.add(t);
            }

        } catch (final SQLException e) {
            e.printStackTrace();
        } catch (final EvaluationException e) {
            e.printStackTrace();
        } catch (final InterruptedException e) {
            e.printStackTrace();
        }
        currentTest.setAnswer(outRelation);
        output.add(currentTest);
    }
    return (output);
}

From source file:org.everit.jira.hr.admin.SchemeUsersComponent.java

private void processSave(final HttpServletRequest req, final HttpServletResponse resp) {
    long schemeId = Long.parseLong(req.getParameter("schemeId"));
    String userName = req.getParameter("user");
    Date startDate = Date.valueOf(req.getParameter("start-date"));
    Date endDate = Date.valueOf(req.getParameter("end-date"));
    Date endDateExcluded = DateUtil.addDays(endDate, 1);

    Long userId = getUserId(userName);
    if (userId == null) {
        renderAlert("User does not exist", "error", resp);
        resp.setStatus(HttpServletResponse.SC_BAD_REQUEST);
        return;/*ww  w  .  ja v  a  2 s.  c  om*/
    }

    if (startDate.compareTo(endDate) > 0) {
        renderAlert("Start date must not be after end date", "error", resp);
        resp.setStatus(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }

    Set<String> schemeNamesWithOverlappingTimeRange = getSchemeNamesWithOverlappingTimeRange(userId, startDate,
            endDateExcluded, null);
    if (!schemeNamesWithOverlappingTimeRange.isEmpty()) {
        renderAlert(
                "The user is assigned overlapping with the specified date range to the"
                        + " following scheme(s): " + schemeNamesWithOverlappingTimeRange.toString(),
                "error", resp);
        resp.setStatus(HttpServletResponse.SC_BAD_REQUEST);
        return;
    }

    save(schemeId, userName, startDate, endDateExcluded);
    Long userCount = schemeUserCount(String.valueOf(schemeId));
    try (PartialResponseBuilder prb = new PartialResponseBuilder(resp)) {
        renderAlertOnPrb("Assiging user successful", "info", prb, resp.getLocale());
        prb.replace("#scheme-user-table", render(req, resp.getLocale(), "scheme-user-table"));
        prb.replace("#delete-schema-validation-dialog", (writer) -> {
            DeleteSchemaValidationComponent.INSTANCE.render(writer, resp.getLocale(), userCount);
        });
    }
}