List of usage examples for org.apache.hadoop.conf Configuration setBoolean
public void setBoolean(String name, boolean value)
name
property to a boolean
. From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemIntegrationTest.java
License:Open Source License
/** * Validates that we correctly build our Options object * from a Hadoop config.//from w w w . ja v a2s.c o m */ @Test public void testBuildOptionsFromConfig() throws IOException { GoogleHadoopFileSystem fs = new GoogleHadoopFileSystem(); Configuration config = loadConfig("projectId", "serviceAccount", "priveKeyFile"); GoogleCloudStorageFileSystemOptions.Builder optionsBuilder = fs.createOptionsBuilderFromConfig(config); GoogleCloudStorageFileSystemOptions options = optionsBuilder.build(); GoogleCloudStorageOptions gcsOptions = options.getCloudStorageOptions(); Assert.assertTrue(gcsOptions.isAutoRepairImplicitDirectoriesEnabled()); Assert.assertFalse(gcsOptions.isInferImplicitDirectoriesEnabled()); config.setBoolean(GoogleHadoopFileSystemBase.GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_KEY, false); config.setBoolean(GoogleHadoopFileSystemBase.GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_KEY, true); config.setLong(GoogleHadoopFileSystemBase.GCS_METADATA_CACHE_MAX_ENTRY_AGE_KEY, 2222L); config.setLong(GoogleHadoopFileSystemBase.GCS_METADATA_CACHE_MAX_INFO_AGE_KEY, 1111L); optionsBuilder = fs.createOptionsBuilderFromConfig(config); options = optionsBuilder.build(); Assert.assertEquals(2222L, options.getCacheMaxEntryAgeMillis()); Assert.assertEquals(1111L, options.getCacheMaxInfoAgeMillis()); gcsOptions = options.getCloudStorageOptions(); Assert.assertFalse(gcsOptions.isAutoRepairImplicitDirectoriesEnabled()); Assert.assertTrue(gcsOptions.isInferImplicitDirectoriesEnabled()); }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemIntegrationTest.java
License:Open Source License
@Test public void testInitializeThrowsWhenNoProjectIdConfigured() throws URISyntaxException, IOException { // Verify that incomplete config raises exception. String existingBucket = bucketName; Configuration config = new Configuration(); URI gsUri = new URI("gs://foobar/"); config.setBoolean(GoogleHadoopFileSystemBase.ENABLE_GCE_SERVICE_ACCOUNT_AUTH_KEY, false); config.setBoolean(HadoopCredentialConfiguration.BASE_KEY_PREFIX + HadoopCredentialConfiguration.ENABLE_NULL_CREDENTIAL_SUFFIX, true); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, existingBucket); // project ID is not set. expectedException.expect(IOException.class); expectedException.expectMessage(GoogleHadoopFileSystemBase.GCS_PROJECT_ID_KEY); new GoogleHadoopFileSystem().initialize(gsUri, config); }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemIntegrationTest.java
License:Open Source License
@Test public void testInitializeThrowsWhenCredentialsNotFound() throws URISyntaxException, IOException { String fakeClientId = "fooclient"; String existingBucket = bucketName; URI gsUri = new URI("gs://foobar/"); String fakeProjectId = "123456"; Configuration config = new Configuration(); config.setBoolean(GoogleHadoopFileSystemBase.ENABLE_GCE_SERVICE_ACCOUNT_AUTH_KEY, false); // Set project ID and client ID but no client secret. config.set(GoogleHadoopFileSystemBase.GCS_PROJECT_ID_KEY, fakeProjectId); config.set(GoogleHadoopFileSystemBase.GCS_CLIENT_ID_KEY, fakeClientId); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, existingBucket); expectedException.expect(IllegalStateException.class); expectedException.expectMessage("No valid credential configuration discovered"); new GoogleHadoopFileSystem().initialize(gsUri, config); }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemTestBase.java
License:Open Source License
/** * Helper to load GHFS-specific config values other than those from * the environment.//from w ww . j a va 2 s .c o m */ protected static Configuration loadConfig(String projectId, String serviceAccount, String privateKeyFile) { Assert.assertNotNull("Expected value for env var " + TestConfiguration.GCS_TEST_PROJECT_ID, projectId); Assert.assertNotNull("Expected value for env var " + TestConfiguration.GCS_TEST_SERVICE_ACCOUNT, serviceAccount); Assert.assertNotNull("Expected value for env var " + TestConfiguration.GCS_TEST_PRIVATE_KEYFILE, privateKeyFile); Configuration config = new Configuration(); config.set(GoogleHadoopFileSystemBase.GCS_PROJECT_ID_KEY, projectId); config.set(GoogleHadoopFileSystemBase.SERVICE_ACCOUNT_AUTH_EMAIL_KEY, serviceAccount); config.set(GoogleHadoopFileSystemBase.SERVICE_ACCOUNT_AUTH_KEYFILE_KEY, privateKeyFile); String systemBucketName = ghfsHelper.getUniqueBucketName("-system-bucket"); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, systemBucketName); config.setBoolean(GoogleHadoopFileSystemBase.GCS_CREATE_SYSTEM_BUCKET_KEY, true); config.setBoolean(GoogleHadoopFileSystemBase.GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_KEY, true); config.setBoolean(GoogleHadoopFileSystemBase.GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_KEY, false); return config; }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemTestBase.java
License:Open Source License
@Test public void testIncludedParentPathPredicates() throws URISyntaxException { Configuration configuration = new Configuration(); // 1 Disable all updates and then try to include all paths configuration.setBoolean(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE_KEY, false); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY, "/"); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY, ""); TimestampUpdatePredicate predicate = GoogleHadoopFileSystemBase.ParentTimestampUpdateIncludePredicate .create(configuration);//from w w w . ja v a2 s .c o m Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/foobar"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI(""))); // 2 Enable updates, set include to everything and exclude to everything configuration.setBoolean(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE_KEY, true); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY, "/"); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY, "/"); predicate = GoogleHadoopFileSystemBase.ParentTimestampUpdateIncludePredicate.create(configuration); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("/foobar"))); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI(""))); // 3 Enable specific paths, exclude everything: configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY, "/foobar,/baz"); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY, "/"); predicate = GoogleHadoopFileSystemBase.ParentTimestampUpdateIncludePredicate.create(configuration); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("asdf/foobar"))); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("asdf/baz"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/anythingElse"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/"))); // 4 set to defaults, set job history paths configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY, GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_DEFAULT); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY, GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_DEFAULT); configuration.set(GoogleHadoopFileSystemBase.MR_JOB_HISTORY_DONE_DIR_KEY, "/tmp/hadoop-yarn/done"); configuration.set(GoogleHadoopFileSystemBase.MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY, "/tmp/hadoop-yarn/staging/done"); predicate = GoogleHadoopFileSystemBase.ParentTimestampUpdateIncludePredicate.create(configuration); Assert.assertEquals("/tmp/hadoop-yarn/staging/done,/tmp/hadoop-yarn/done", configuration.get(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY)); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("gs://bucket/tmp/hadoop-yarn/staging/done/"))); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("gs://bucket/tmp/hadoop-yarn/done/"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("asdf/baz"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/anythingElse"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/"))); // 5 set to defaults, set job history paths with gs:// scheme configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY, GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_DEFAULT); configuration.set(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY, GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_DEFAULT); configuration.set(GoogleHadoopFileSystemBase.MR_JOB_HISTORY_DONE_DIR_KEY, "gs://foo-bucket/tmp/hadoop-yarn/done"); configuration.set(GoogleHadoopFileSystemBase.MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY, "gs://foo-bucket/tmp/hadoop-yarn/staging/done"); predicate = GoogleHadoopFileSystemBase.ParentTimestampUpdateIncludePredicate.create(configuration); Assert.assertEquals("gs://foo-bucket/tmp/hadoop-yarn/staging/done,gs://foo-bucket/tmp/hadoop-yarn/done", configuration.get(GoogleHadoopFileSystemBase.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY)); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("gs://foo-bucket/tmp/hadoop-yarn/staging/done/"))); Assert.assertTrue("Should be included", predicate.shouldUpdateTimestamp(new URI("gs://foo-bucket/tmp/hadoop-yarn/done/"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("asdf/baz"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/anythingElse"))); Assert.assertFalse("Should be ignored", predicate.shouldUpdateTimestamp(new URI("/"))); }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemTestHelper.java
License:Open Source License
/** * Helper for plumbing through an initUri and creating the proper Configuration object. * Calls FileSystem.initialize on {@code ghfs}. */// www. jav a 2 s . c o m private static void initializeInMemoryFileSystem(FileSystem ghfs, String initUriString) throws IOException { URI initUri; try { initUri = new URI(initUriString); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } String systemBucketName = "fake-test-system-bucket"; Configuration config = new Configuration(); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, systemBucketName); config.setBoolean(GoogleHadoopFileSystemBase.GCS_CREATE_SYSTEM_BUCKET_KEY, true); ghfs.initialize(initUri, config); }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopGlobalRootedFileSystemIntegrationTest.java
License:Open Source License
/** * Helper to load all the GHFS-specific config values from environment variables, such as those * needed for setting up the credentials of a real GoogleCloudStorage. *///from w ww . j av a 2s . co m protected static Configuration loadConfig() throws IOException { // Supply client-id, client-secret and project-id to GHFS // through a Configuration object instance. // TODO(user) : add helper to get multiple env vars in one // call and produce a friendlier message if value(s) are missing. String serviceAccount = TestConfiguration.getInstance().getServiceAccount(); String privateKey = TestConfiguration.getInstance().getPrivateKeyFile(); String projectId = TestConfiguration.getInstance().getProjectId(); Assert.assertNotNull(serviceAccount); Assert.assertNotNull(privateKey); Assert.assertNotNull(projectId); Configuration config = new Configuration(); config.set(GoogleHadoopFileSystemBase.GCS_PROJECT_ID_KEY, projectId); config.set(GoogleHadoopFileSystemBase.SERVICE_ACCOUNT_AUTH_EMAIL_KEY, serviceAccount); config.set(GoogleHadoopFileSystemBase.SERVICE_ACCOUNT_AUTH_KEYFILE_KEY, privateKey); String systemBucketName = ghfsHelper.getUniqueBucketName("-system-bucket"); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, systemBucketName); config.setBoolean(GoogleHadoopFileSystemBase.GCS_CREATE_SYSTEM_BUCKET_KEY, true); config.setBoolean(GoogleHadoopFileSystemBase.GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_KEY, true); config.setBoolean(GoogleHadoopFileSystemBase.GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_KEY, false); return config; }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopGlobalRootedFileSystemIntegrationTest.java
License:Open Source License
@Test public void testInitializeThrowsWhenNoProjectIdConfigured() throws URISyntaxException, IOException { // Verify that incomplete config raises exception. String existingBucket = bucketName; Configuration config = new Configuration(); URI gsUri = new URI("gsg://foobar/"); config.setBoolean(GoogleHadoopFileSystemBase.ENABLE_GCE_SERVICE_ACCOUNT_AUTH_KEY, false); config.setBoolean(HadoopCredentialConfiguration.BASE_KEY_PREFIX + HadoopCredentialConfiguration.ENABLE_NULL_CREDENTIAL_SUFFIX, true); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, existingBucket); // project ID is not set. expectedException.expect(IOException.class); expectedException.expectMessage(GoogleHadoopFileSystemBase.GCS_PROJECT_ID_KEY); new GoogleHadoopGlobalRootedFileSystem().initialize(gsUri, config); }
From source file:com.google.cloud.hadoop.fs.gcs.GoogleHadoopGlobalRootedFileSystemIntegrationTest.java
License:Open Source License
@Test public void testInitializeThrowsWhenCredentialsNotFound() throws URISyntaxException, IOException { String fakeClientId = "fooclient"; String existingBucket = bucketName; Configuration config = new Configuration(); URI gsUri = new URI("gsg://foobar/"); String fakeProjectId = "123456"; config = new Configuration(); config.setBoolean(GoogleHadoopFileSystemBase.ENABLE_GCE_SERVICE_ACCOUNT_AUTH_KEY, false); // Set project ID and client ID but no client secret. config.set(GoogleHadoopFileSystemBase.GCS_PROJECT_ID_KEY, fakeProjectId); config.set(GoogleHadoopFileSystemBase.GCS_CLIENT_ID_KEY, fakeClientId); config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, existingBucket); expectedException.expect(IllegalStateException.class); expectedException.expectMessage("No valid credential configuration discovered"); new GoogleHadoopGlobalRootedFileSystem().initialize(gsUri, config); }
From source file:com.google.cloud.hadoop.fs.gcs.InMemoryGoogleHadoopFileSystem.java
License:Open Source License
/** * Returns a sample Hadoop job configuration to be used for testing. * * @return a sample Hadoop Configuration for in-memory GHFS. * @throws IOException on IO Error./*from ww w. j av a 2s . c o m*/ */ public static Configuration getSampleConfiguration() throws IOException { // Set the Hadoop job configuration. Configuration config = new Configuration(); config.set("fs.gs.impl", InMemoryGoogleHadoopFileSystem.class.getName()); String systemBucketName = "test-system-bucket"; config.set(GoogleHadoopFileSystemBase.GCS_SYSTEM_BUCKET_KEY, systemBucketName); config.setBoolean(GoogleHadoopFileSystemBase.GCS_CREATE_SYSTEM_BUCKET_KEY, true); return config; }