Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:com.netflix.curator.framework.recipes.queue.TestDistributedQueue.java

@Test
public void testNoDuplicateProcessing() throws Exception {
    final int itemQty = 1000;
    final int consumerQty = 4;

    Timing timing = new Timing();

    CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(),
            timing.connection(), new ExponentialBackoffRetry(100, 3));
    client.start();//from   w  w  w  .ja v a  2s  . com
    try {
        DistributedQueue<TestQueueItem> producerQueue = QueueBuilder
                .builder(client, null, serializer, QUEUE_PATH).buildQueue();
        try {
            producerQueue.start();
            for (int i = 0; i < itemQty; ++i) {
                TestQueueItem item = new TestQueueItem(Integer.toString(i));
                producerQueue.put(item);
            }
            producerQueue.flushPuts(timing.multiple(2).seconds(), TimeUnit.SECONDS);
        } finally {
            producerQueue.close();
        }
    } finally {
        client.close();
    }

    final Set<String> consumedMessages = Sets.newHashSet();
    final Set<String> duplicateMessages = Sets.newHashSet();

    final CountDownLatch latch = new CountDownLatch(itemQty);
    List<DistributedQueue<TestQueueItem>> consumers = Lists.newArrayList();
    List<CuratorFramework> consumerClients = Lists.newArrayList();
    try {
        final QueueConsumer<TestQueueItem> ourQueue = new QueueConsumer<TestQueueItem>() {
            @Override
            public void consumeMessage(TestQueueItem message) {
                synchronized (consumedMessages) {
                    if (consumedMessages.contains(message.str)) {
                        duplicateMessages.add(message.str);
                    }
                    consumedMessages.add(message.str);
                }
                latch.countDown();
            }

            @Override
            public void stateChanged(CuratorFramework client, ConnectionState newState) {
            }
        };

        for (int i = 0; i < consumerQty; ++i) {
            CuratorFramework thisClient = CuratorFrameworkFactory.newClient(server.getConnectString(),
                    new RetryOneTime(1));
            consumerClients.add(thisClient);
            thisClient.start();

            DistributedQueue<TestQueueItem> thisConsumer = QueueBuilder
                    .builder(thisClient, ourQueue, serializer, QUEUE_PATH).lockPath("/a/locks").buildQueue();
            consumers.add(thisConsumer);
        }
        for (DistributedQueue<TestQueueItem> consumer : consumers) {
            consumer.start();
        }

        timing.awaitLatch(latch);
        Assert.assertTrue(duplicateMessages.size() == 0, duplicateMessages.toString());
    } finally {
        for (DistributedQueue<TestQueueItem> consumer : consumers) {
            IOUtils.closeQuietly(consumer);
        }
        for (CuratorFramework curatorFramework : consumerClients) {
            IOUtils.closeQuietly(curatorFramework);
        }
    }
}

From source file:fr.landel.utils.assertor.predicate.PredicateAssertorIterableTest.java

/**
 * Test method for {@link AssertorIterable#contains}.
 * //from  ww  w  .  java2 s .  c  o m
 * @throws IOException
 *             On not contain
 */
@Test
public void testContainsIterable() throws IOException {
    final String el1 = "element1";
    final String el2 = "element2";

    final Set<String> set = new HashSet<>();
    final Set<String> set2 = new HashSet<>();
    set.add(el1);
    set2.add(el1);

    Assertor.<Set<String>, String>ofIterable().containsAll(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");
    Assertor.<Set<String>, String>ofIterable().containsAny(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");
    Assertor.<Set<String>, String>ofIterable(EnumAnalysisMode.STREAM).containsAll(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");
    Assertor.<Set<String>, String>ofIterable(EnumAnalysisMode.STREAM).containsAny(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");
    Assertor.<Set<String>, String>ofIterable(EnumAnalysisMode.PARALLEL).containsAll(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");
    Assertor.<Set<String>, String>ofIterable(EnumAnalysisMode.PARALLEL).containsAny(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");

    set2.add(el2);
    Assertor.<Set<String>, String>ofIterable().containsAny(set2).that(set)
            .orElseThrow("iterable doesn't contain the list %s*");

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAll(set2).that(set)
                .orElseThrow("iterable doesn't contain the list %2$s*");
        fail(ERROR);
    }, IllegalArgumentException.class, "iterable doesn't contain the list " + set2.toString());

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAll(set2).that(set).orElseThrow(new IOException(),
                true);
        fail(ERROR);
    }, IOException.class);

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAll((Iterable<String>) null).that(set).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAny((Iterable<String>) null).that(set).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    set.clear();

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAll(set2).that(set).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class);

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAll(set2).that(set).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    assertException(() -> {
        Assertor.<Iterable<String>, String>ofIterable().contains(el1).that((Iterable<String>) null)
                .orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "the iterable cannot be null or empty");

    assertException(() -> {
        Assertor.<Iterable<String>, String>ofIterable().containsAny(set2).that((Iterable<String>) null)
                .orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    assertException(() -> {
        Assertor.<Set<String>, String>ofIterable().containsAll((Iterable<String>) null).that(set).orElseThrow();
        fail(ERROR);
    }, IllegalArgumentException.class, "neither iterables can be null or empty");

    set.add(null);
    Assertor.<Set<String>, String>ofIterable().contains(null).that(set).orElseThrow();
}

From source file:com.google.enterprise.connector.sharepoint.spiimpl.SharepointConnectorType.java

/**
 * Validates the values filled-in by the user at the connector's configuration
 * page.//www  .  j  ava  2s. c  o m
 */
private boolean validateConfigMap(final Map<String, String> configData, final ErrorDignostics ed) {
    if (configData == null) {
        LOGGER.warning("configData map is not found");
        return false;
    }
    LOGGER.info("push acls validate :" + configData.get("pushAcls"));
    if (!configData.containsKey(SPConstants.USE_CACHE_TO_STORE_LDAP_USER_GROUPS_MEMBERSHIP)) {
        this.useCacheToStoreLdapUserGroupsMembership = SPConstants.OFF;
    }
    if (null == configData.get(SPConstants.PUSH_ACLS)) {
        this.pushAcls = SPConstants.OFF;
    }

    FeedType feedType = null;
    String kdcServer = configData.get(SPConstants.KDC_SERVER);

    if (!Strings.isNullOrEmpty(kdcServer)) {
        kerberosSetUp(configData);
    } else {
        unregisterKerberosSetUp(configData);
    }

    // Check boxes (like PUSH_ACLS, USE_SP_SEARCH_VISIBILITY, etc) are
    // specified by the connector manager as either being set to "on" or being
    // nonexistent in configData. These need to be modified to "true" and
    // "false", respectively, so that they are interpreted correctly by the
    // connector. This must be done prior to the rest of the checks being
    // performed, or else if the save fails and the form is presented again,
    // they will be presented with the opposite value that they were saved
    // with.
    convertCheckBoxes(configData);

    for (String key : CONFIG_FIELDS) {
        final String val = configData.get(key);

        if (isRequired(key)) {
            if ((val == null) || val.equals(SPConstants.BLANK_STRING) || (val.length() == 0)) {
                ed.set(key,
                        rb.getString(SPConstants.REQ_FIELDS_MISSING) + SPConstants.SPACE + rb.getString(key));
                return false;
            } else if (key.equals(SPConstants.SHAREPOINT_URL)) {
                if (!isURL(val)) {
                    ed.set(key, rb.getString(SPConstants.MALFORMED_URL));
                    return false;
                }
                if (!isInFQDN(val)) {
                    ed.set(key, rb.getString(SPConstants.REQ_FQDN_URL));
                    return false;
                }
            } else if (key.equals(SPConstants.INCLUDED_URLS)) {
                final Set<String> invalidSet = validatePatterns(val);
                if (invalidSet != null) {
                    ed.set(SPConstants.INCLUDED_URLS,
                            rb.getString(SPConstants.INVALID_INCLUDE_PATTERN) + invalidSet.toString());
                    return false;
                }
            }
        } else if (key.equals(SPConstants.ALIAS_MAP) && (val != null)
                && !val.equals(SPConstants.BLANK_STRING)) {
            final Set<String> wrongEntries = new HashSet<String>();
            final String message = parseAlias(val, wrongEntries);
            if (message != null) {
                ed.set(SPConstants.ALIAS_MAP, rb.getString(message) + " " + wrongEntries);
                return false;
            }
        } else if (key.equals(SPConstants.EXCLUDED_URLS)) {
            final Set<String> invalidSet = validatePatterns(val);
            if (invalidSet != null) {
                ed.set(SPConstants.EXCLUDED_URLS,
                        rb.getString(SPConstants.INVALID_EXCLUDE_PATTERN) + invalidSet.toString());
                LOGGER.warning("Invalid Exclude pattern:" + val);
                return false;
            }
        } else if (key.equals(SPConstants.AUTHORIZATION)) {
            feedType = FeedType.getFeedType(val);
        } else if (!Strings.isNullOrEmpty(kdcServer) && key.equals(SPConstants.KDC_SERVER)) {
            boolean isFQDN = false;
            if (!Util.isFQDN(kdcServer)) {
                ed.set(SPConstants.KDC_SERVER, rb.getString(SPConstants.KERBEROS_KDC_HOST_BLANK));
                return false;
            } else {
                try {
                    Integer.parseInt(kdcServer.substring(0, kdcServer.indexOf(".")));
                } catch (NumberFormatException nfe) {
                    isFQDN = true;
                }
                if (!isFQDN && !validateIPAddress(kdcServer)) {
                    ed.set(SPConstants.KDC_SERVER, rb.getString(SPConstants.KERBEROS_KDC_HOST_BLANK));
                    return false;
                }
            }
        } else if (key.equals(SPConstants.SOCIAL_OPTION)) {
            if ((val != null)) {
                String option = val.trim();
                if ((!option.equalsIgnoreCase(SPConstants.BLANK_STRING))
                        && (!option.equalsIgnoreCase(SPConstants.SOCIAL_OPTION_YES))
                        && (!option.equalsIgnoreCase(SPConstants.SOCIAL_OPTION_NO))
                        && (!option.equalsIgnoreCase(SPConstants.SOCIAL_OPTION_ONLY))) {
                    LOGGER.warning("Invalid social option " + val);
                    ed.set(SPConstants.SOCIAL_OPTION, rb.getString(SPConstants.SOCIAL_OPTION_INVALID));
                    return false;
                }
                if (!option.equalsIgnoreCase(SPConstants.SOCIAL_OPTION_NO)) {
                    // Validate collection name is well-formed.
                    String collectionName = configData.get(SPConstants.SOCIAL_USER_PROFILE_COLLECTION);
                    if (!(SocialCollectionHandler.validateCollectionName(collectionName))) {
                        ed.set(SPConstants.SOCIAL_USER_PROFILE_COLLECTION,
                                rb.getString(SPConstants.SOCIAL_COLLECTION_INVALID));
                        return false;
                    }
                }
            }
        }
        setSharepointCredentials(key, val);
    }

    if ((username != null) && ((username.indexOf("@") != -1) || (username.indexOf("\\") != -1))
            && (domain != null) && !domain.equals(SPConstants.BLANK_STRING)) {
        ed.set(SPConstants.USERNAME, rb.getString(SPConstants.DUPLICATE_DOMAIN));
        return false;
    }

    try {
        sharepointClientContext = new SharepointClientContext(clientFactory, sharepointUrl, domain, kdcServer,
                username, password, "", "", "", includeURL, excludeURL, mySiteUrl, "", feedType,
                Boolean.parseBoolean(useSPSearchVisibility));
    } catch (final Exception e) {
        LOGGER.log(Level.SEVERE,
                "Failed to create SharePointClientContext with the received configuration values. ");
    }
    String status = checkPattern(sharepointUrl);
    if (status != null) {
        ed.set(null, rb.getString(SPConstants.SHAREPOINT_URL) + " " + status);
        return false;
    }
    status = null;

    if (FeedType.CONTENT_FEED == feedType) {
        status = checkGSConnectivity(sharepointUrl);
        if (!SPConstants.CONNECTIVITY_SUCCESS.equalsIgnoreCase(status)) {
            ed.set(null, rb.getString(SPConstants.BULKAUTH_ERROR_CRAWL_URL) + rb.getString(SPConstants.REASON)
                    + status);
            return false;
        }
    }
    status = null;

    status = checkConnectivity(sharepointUrl);
    if (!SPConstants.CONNECTIVITY_SUCCESS.equalsIgnoreCase(status)) {
        ed.set(null, rb.getString(SPConstants.CANNOT_CONNECT) + rb.getString(SPConstants.REASON) + status);
        return false;
    }
    status = null;

    if (!validateFeedAclsRelatedHtmlControls(ed)) {
        return false;
    }

    final SPType SPVersion = sharepointClientContext.checkSharePointType(sharepointUrl);
    if (SPType.SP2007 == SPVersion && mySiteUrl != null && !mySiteUrl.equals(SPConstants.BLANK_STRING)) {
        if (!isURL(mySiteUrl)) {
            ed.set(SPConstants.MYSITE_BASE_URL, rb.getString(SPConstants.MALFORMED_MYSITE_URL));
            return false;
        }
        if (!isInFQDN(mySiteUrl)) {
            ed.set(SPConstants.MYSITE_BASE_URL, rb.getString(SPConstants.REQ_FQDN_MYSITE_URL));
            return false;
        }

        status = checkPattern(mySiteUrl);
        if (status != null) {
            ed.set(null, rb.getString(SPConstants.MYSITE_BASE_URL) + " " + status);
            return false;
        }
        status = null;

        status = checkConnectivity(mySiteUrl);
        if (!SPConstants.CONNECTIVITY_SUCCESS.equalsIgnoreCase(status)) {
            ed.set(SPConstants.MYSITE_BASE_URL, rb.getString(SPConstants.CANNOT_CONNECT_MYSITE)
                    + rb.getString(SPConstants.REASON) + status);
            return false;
        }

        if (FeedType.CONTENT_FEED == feedType) {
            status = checkGSConnectivity(mySiteUrl);
            if (!SPConstants.CONNECTIVITY_SUCCESS.equalsIgnoreCase(status)) {
                ed.set(SPConstants.MYSITE_BASE_URL, rb.getString(SPConstants.BULKAUTH_ERROR_MYSITE_URL)
                        + rb.getString(SPConstants.REASON) + status);
                return false;
            }
        }
    }
    return true;
}

From source file:org.jactr.eclipse.runtime.launching.ACTRLaunchConfigurationUtils.java

@SuppressWarnings("unchecked")
static public void computeBundleDependencies(ILaunchConfigurationWorkingCopy configuration,
        Set<String> workspaceBundles, Set<String> targetBundles) throws CoreException {
    IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
    String projectName = configuration.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROJECT_NAME, "");
    IProject sourceProject = null;//from  w  ww. ja  va  2 s. c  o  m

    if (projectName.length() != 0)
        sourceProject = root.getProject(projectName);

    // IProject project = root.getProject(configuration.getAttribute(
    // LaunchConfigurationConstants.ACTR_PROJECT, ""));

    Collection<String> appDependencies = null;

    if (configuration.getAttribute(ACTRLaunchConstants.ATTR_ITERATIONS, 0) == 0)
        appDependencies = BundleUtilities.getDependencies(ACTRLaunchConstants.DEFAULT_APPLICATION_BUNDLE);
    else
        appDependencies = BundleUtilities.getDependencies(ACTRLaunchConstants.ITERATIVE_APPLICATION_BUNDLE);

    Collection<String> currentDependencies = Collections.EMPTY_SET;

    if (sourceProject != null && sourceProject.exists())
        currentDependencies = BundleUtilities.getDependencies(sourceProject);

    Collection<String> uniqueDependencies = new TreeSet<String>();
    for (String bundleId : appDependencies)
        uniqueDependencies.add(bundleId);

    for (String bundleId : currentDependencies)
        uniqueDependencies.add(bundleId);

    /*
     * now for the sensors
     */
    for (SensorDescriptor sensor : getRequiredSensors(configuration))
        for (String bundleId : BundleUtilities.getDependencies(sensor.getContributor()))
            uniqueDependencies.add(bundleId);

    /*
     * and instruments
     */
    for (InstrumentDescriptor instrument : getRequiredInstruments(configuration))
        for (String bundleId : BundleUtilities.getDependencies(instrument.getContributor()))
            uniqueDependencies.add(bundleId);

    /*
     * now we determine where they are coming from, we preference workspace
     * plugins over installed ones so that you can self-host
     */
    for (IPluginModelBase modelBase : PluginRegistry.getWorkspaceModels()) {
        String pluginId = modelBase.getPluginBase(true).getId();

        // not entirely clear how to get the project from the model..
        // this matters because if the project is closed, we shouldn't use it
        // IProject requiredProject = root.getProject();
        // if (requiredProject.isAccessible())
        if (pluginId != null && uniqueDependencies.remove(pluginId))
            workspaceBundles.add(pluginId);
    }

    /*
     * and the rest we assume are targets
     */
    targetBundles.addAll(uniqueDependencies);

    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug("workspace : " + workspaceBundles.toString());
        LOGGER.debug("target : " + targetBundles.toString());
    }
}

From source file:org.apache.pig.builtin.TestAvroStorage.java

private void verifyResults(String outPath, String expectedOutpath, String expectedCodec) throws IOException {
    FileSystem fs = FileSystem.getLocal(new Configuration());

    /* read in expected results*/
    Set<GenericData.Record> expected = getExpected(expectedOutpath);

    /* read in output results and compare */
    Path output = new Path(outPath);
    assertTrue("Output dir does not exists!", fs.exists(output) && fs.getFileStatus(output).isDir());

    Path[] paths = FileUtil.stat2Paths(fs.listStatus(output, hiddenPathFilter));
    assertTrue("Split field dirs not found!", paths != null);

    for (Path path : paths) {
        Path[] files = FileUtil.stat2Paths(fs.listStatus(path, hiddenPathFilter));
        assertTrue("No files found for path: " + path.toUri().getPath(), files != null);
        for (Path filePath : files) {
            assertTrue("This shouldn't be a directory", fs.isFile(filePath));

            GenericDatumReader<GenericData.Record> reader = new GenericDatumReader<GenericData.Record>();

            DataFileStream<GenericData.Record> in = new DataFileStream<GenericData.Record>(fs.open(filePath),
                    reader);//  ww w.ja va2s .c  om
            assertEquals("codec", expectedCodec, in.getMetaString("avro.codec"));
            int count = 0;
            while (in.hasNext()) {
                GenericData.Record obj = in.next();
                assertTrue(
                        "Avro result object found that's not expected: Found "
                                + (obj != null ? obj.getSchema() : "null") + ", " + obj.toString()
                                + "\nExpected " + (expected != null ? expected.toString() : "null") + "\n",
                        expected.contains(obj));
                count++;
            }
            in.close();
            assertEquals(expected.size(), count);
        }
    }
}

From source file:org.apache.ranger.plugin.model.validation.RangerPolicyValidator.java

boolean isValidResourceNames(final RangerPolicy policy, final List<ValidationFailureDetails> failures,
        final RangerServiceDef serviceDef) {

    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("==> RangerPolicyValidator.isValidResourceNames(%s, %s, %s)", policy, failures,
                serviceDef));//from w  w  w.  j ava2 s. c o m
    }

    boolean valid = true;
    Set<String> policyResources = getPolicyResources(policy);

    RangerServiceDefHelper defHelper = new RangerServiceDefHelper(serviceDef);
    Set<List<RangerResourceDef>> hierarchies = defHelper.getResourceHierarchies(policy.getPolicyType()); // this can be empty but not null!
    if (hierarchies.isEmpty()) {
        LOG.warn(
                "RangerPolicyValidator.isValidResourceNames: serviceDef does not have any resource hierarchies, possibly due to a old/migrated service def!  Skipping this check!");
    } else {
        /*
         * A policy is for a single hierarchy however, it doesn't specify which one.  So we have to guess which hierarchy(s) it possibly be for.  First, see if the policy could be for
         * any of the known hierarchies?  A candidate hierarchy is one whose resource levels are a superset of those in the policy.
         * Why?  What we want to catch at this stage is policies that straddles multiple hierarchies, e.g. db, udf and column for a hive policy.
         * This has the side effect of catch spurious levels specified on the policy, e.g. having a level "blah" on a hive policy.
         */
        Set<List<RangerResourceDef>> candidateHierarchies = filterHierarchies_hierarchyHasAllPolicyResources(
                policyResources, hierarchies, defHelper);
        if (candidateHierarchies.isEmpty()) {
            if (LOG.isDebugEnabled()) {
                LOG.debug(String.format(
                        "No compatible resource hierarchies found: resource[%s], service-def[%s], valid-resource-hierarchies[%s]",
                        policyResources.toString(), serviceDef.getName(),
                        toStringHierarchies_all(hierarchies, defHelper)));
            }
            ValidationErrorCode error;
            if (hierarchies.size() == 1) { // we can give a simpler message for single hierarchy service-defs which is the majority of cases
                error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_NO_COMPATIBLE_HIERARCHY_SINGLE;
            } else {
                error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_NO_COMPATIBLE_HIERARCHY;
            }
            failures.add(new ValidationFailureDetailsBuilder().field("policy resources")
                    .subField("incompatible").isSemanticallyIncorrect()
                    .becauseOf(error.getMessage(serviceDef.getName(),
                            toStringHierarchies_all(hierarchies, defHelper)))
                    .errorCode(error.getErrorCode()).build());
            valid = false;
        } else {
            if (LOG.isDebugEnabled()) {
                LOG.debug("isValidResourceNames: Found [" + candidateHierarchies.size()
                        + "] compatible hierarchies: "
                        + toStringHierarchies_all(candidateHierarchies, defHelper));
            }
            /*
             * Among the candidate hierarchies there should be at least one for which policy specifies all of the mandatory resources.  Note that there could be multiple
             * hierarchies that meet that criteria, e.g. a hive policy that specified only DB.  It is not clear if it belongs to DB->UDF or DB->TBL->COL hierarchy.
             * However, if both UDF and TBL were required then we can detect that policy does not specify mandatory levels for any of the candidate hierarchies.
             */
            Set<List<RangerResourceDef>> validHierarchies = filterHierarchies_mandatoryResourcesSpecifiedInPolicy(
                    policyResources, candidateHierarchies, defHelper);
            if (validHierarchies.isEmpty()) {
                ValidationErrorCode error;
                if (candidateHierarchies.size() == 1) { // we can provide better message if there is a single candidate hierarchy
                    error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_MISSING_MANDATORY_SINGLE;
                } else {
                    error = ValidationErrorCode.POLICY_VALIDATION_ERR_INVALID_RESOURCE_MISSING_MANDATORY;
                }
                failures.add(new ValidationFailureDetailsBuilder().field("policy resources")
                        .subField("missing mandatory").isSemanticallyIncorrect()
                        .becauseOf(error.getMessage(serviceDef.getName(),
                                toStringHierarchies_mandatory(candidateHierarchies, defHelper)))
                        .errorCode(error.getErrorCode()).build());
                valid = false;
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("isValidResourceNames: Found hierarchies with all mandatory fields specified: "
                            + toStringHierarchies_mandatory(validHierarchies, defHelper));
                }
            }
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("<== RangerPolicyValidator.isValidResourceNames(%s, %s, %s): %s", policy,
                failures, serviceDef, valid));
    }
    return valid;
}

From source file:io.snappydata.hydra.cluster.SnappyTest.java

public static synchronized void HydraTask_copyDiskFiles_gemToSnappyCluster() {
    Set<File> myDirList = getDirList("dirName_");
    if (diskDirExists)
        return;//ww w .ja v a2 s  .c o  m
    else {
        String dirName = snappyTest.generateLogDirName();
        File destDir = new File(dirName);
        String[] splitedName = RemoteTestModule.getMyClientName().split("snappy");
        String newName = splitedName[1];
        File currentDir = new File(".");
        for (File srcFile1 : currentDir.listFiles()) {
            if (!doneCopying) {
                if (srcFile1.getAbsolutePath().contains(newName)
                        && srcFile1.getAbsolutePath().contains("_disk")) {
                    if (myDirList.contains(srcFile1)) {
                        Log.getLogWriter().info("List contains entry for the file... " + myDirList.toString());
                    } else {
                        SnappyBB.getBB().getSharedMap().put(
                                "dirName_" + RemoteTestModule.getMyPid() + "_" + snappyTest.getMyTid(),
                                srcFile1);
                        File dir = new File(srcFile1.getAbsolutePath());
                        Log.getLogWriter().info("Match found for file: " + srcFile1.getAbsolutePath());
                        for (File srcFile : dir.listFiles()) {
                            try {
                                if (srcFile.isDirectory()) {
                                    FileUtils.copyDirectoryToDirectory(srcFile, destDir);
                                    Log.getLogWriter().info("Done copying diskDirFile directory from ::"
                                            + srcFile + "to " + destDir);
                                } else {
                                    FileUtils.copyFileToDirectory(srcFile, destDir);
                                    Log.getLogWriter().info(
                                            "Done copying diskDirFile from ::" + srcFile + "to " + destDir);
                                }
                                doneCopying = true;
                            } catch (IOException e) {
                                throw new TestException("Error occurred while copying data from file: "
                                        + srcFile + "\n " + e.getMessage());
                            }
                        }
                    }
                }
            }
        }
        diskDirExists = true;
    }
}

From source file:com.rappsantiago.weighttracker.profile.setup.ProfileSetupActivity.java

private boolean saveAndValidatePageData(int currentPage) {

    Fragment currentFragment = mPagerAdapter.getItem(currentPage);

    if (currentFragment instanceof PageWithData) {
        PageWithData currentFragmentData = (PageWithData) currentFragment;
        Bundle pageData = currentFragmentData.getProfileData();

        if (null == pageData) {
            return false;
        }/*from w ww  .  j a  v  a 2s.  c  o m*/

        Set<String> errors = new HashSet<>();

        // validate entries
        for (String key : pageData.keySet()) {
            Object obj = pageData.get(key);

            if (null == obj) {
                errors.add(key);
                continue;
            }

            if (obj instanceof String) { // name
                if (((String) obj).trim().isEmpty()) {
                    errors.add(key);
                }
            } else if (obj instanceof Double) { // weight, body fat index, height

                // body fat index is optional
                if (WeightHeightFragment.KEY_BODY_FAT_INDEX == key
                        || SetGoalsFragment.KEY_TARGET_BODY_FAT_INDEX == key) {
                    continue;
                }

                // inches is allowed to be 0 if foot is not less than or equal to 0
                if (WeightHeightFragment.KEY_HEIGHT_INCHES == key) {
                    if (errors.contains(WeightHeightFragment.KEY_HEIGHT)) {
                        if (0 >= ((Double) obj).doubleValue()) {
                            errors.add(key);
                        } else {
                            errors.remove(WeightHeightFragment.KEY_HEIGHT);
                        }
                    } else {
                        continue;
                    }
                }

                if (0 >= ((Double) obj).doubleValue()) {
                    errors.add(key);
                }
            } else if (obj instanceof Long) { // birthday, due date

                // due date is optional
                if (SetGoalsFragment.KEY_DUE_DATE == key) {
                    continue;
                }

                if (0 >= ((Long) obj).longValue()) {
                    errors.add(key);
                }
            }
        }

        boolean withNoErrors = errors.isEmpty();

        if (withNoErrors) {
            Log.d(TAG, "pageData = " + pageData);
            mProfileData.putAll(pageData);
        } else {
            Log.d(TAG, errors.toString());
            currentFragmentData.showErrorMessage(errors);
        }

        return withNoErrors;
    } else {
        return true;
    }
}

From source file:org.jahia.services.templates.JahiaTemplateManagerService.java

/**
 * Checks if the specified template is available either in one of the requested template sets or modules.
 *
 * @param templateName     the path of the template to be checked
 * @param templateSetNames the set of template sets and modules we should check for the presence of the specified template
 * @return <code>true</code> if the specified template is present; <code>false</code> otherwise
 *///  ww  w .j  a v a2s  .c om
public boolean isTemplatePresent(final String templateName, final Set<String> templateSetNames) {

    long timer = System.currentTimeMillis();
    if (logger.isDebugEnabled()) {
        logger.debug("Checking presense of the template {} in modules {}", templateName, templateSetNames);
    }

    if (StringUtils.isEmpty(templateName)) {
        throw new IllegalArgumentException("Template path is either null or empty");
    }
    if (templateSetNames == null || templateSetNames.isEmpty()) {
        throw new IllegalArgumentException("The template/module set to check is empty");
    }

    boolean present = true;
    try {
        present = JCRTemplate.getInstance().doExecuteWithSystemSession(new JCRCallback<Boolean>() {

            @Override
            public Boolean doInJCR(JCRSessionWrapper session) throws RepositoryException {
                return isTemplatePresent(templateName, templateSetNames, session);
            }
        });
    } catch (RepositoryException e) {
        logger.error("Unable to check presence of the template '" + templateName + "' in the modules '"
                + templateSetNames + "'. Cause: " + e.getMessage(), e);
    }

    if (logger.isDebugEnabled()) {
        logger.debug("Template {} {} in modules {} in {} ms",
                new String[] { templateName, present ? "found" : "cannot be found", templateSetNames.toString(),
                        String.valueOf(System.currentTimeMillis() - timer) });
    }

    return present;
}

From source file:org.apache.zeppelin.socket.NotebookServer.java

void permissionError(NotebookSocket conn, String op, String userName, Set<String> userAndRoles,
        Set<String> allowed) throws IOException {
    LOG.info("Cannot {}. Connection readers {}. Allowed readers {}", op, userAndRoles, allowed);

    conn.send(serializeMessage(new Message(OP.AUTH_INFO).put("info",
            "Insufficient privileges to " + op + "note.\n\n" + "Allowed users or roles: " + allowed.toString()
                    + "\n\n" + "But the user " + userName + " belongs to: " + userAndRoles.toString())));
}