Example usage for junit.framework Assert assertNull

List of usage examples for junit.framework Assert assertNull

Introduction

In this page you can find the example usage for junit.framework Assert assertNull.

Prototype

static public void assertNull(Object object) 

Source Link

Document

Asserts that an object is null.

Usage

From source file:org.apache.ambari.server.configuration.ConfigurationTest.java

@Test
public void testGetLdapServerProperties_WrongManagerPassword() throws Exception {
    final Properties ambariProperties = new Properties();
    ambariProperties.setProperty(Configuration.LDAP_MANAGER_PASSWORD_KEY, "somePassword");
    final Configuration configuration = new Configuration(ambariProperties);

    final LdapServerProperties ldapProperties = configuration.getLdapServerProperties();
    // if it's not a store alias and is not a file, it should be ignored
    Assert.assertNull(ldapProperties.getManagerPassword());
}

From source file:org.apache.ambari.server.security.encryption.CredentialStoreServiceTest.java

@Test
public void testGetCredential() throws Exception {
    String masterKey = "ThisissomeSecretPassPhrasse";
    String masterKeyLocation = keystore_dir.getAbsolutePath() + "/master";
    MasterKeyService masterKeyService = new MasterKeyServiceImpl(masterKey, masterKeyLocation, false);
    credentialStoreService.setMasterKeyService(masterKeyService);
    Assert.assertNull(credentialStoreService.getCredential(""));
    Assert.assertNull(credentialStoreService.getCredential(null));
    String password = "mypassword";
    credentialStoreService.addCredential("myalias", password);
    char[] credential = credentialStoreService.getCredential("myalias");
    Assert.assertEquals(password, new String(credential));
}

From source file:org.apache.ambari.server.serveraction.kerberos.KerberosOperationHandlerTest.java

@Test
public void testEscapeCharacters() throws KerberosOperationException {
    KerberosOperationHandler handler = createHandler();

    HashSet<Character> specialCharacters = new HashSet<Character>() {
        {/*from  w w w . java2  s  .c om*/
            add('/');
            add(',');
            add('\\');
            add('#');
            add('+');
            add('<');
            add('>');
            add(';');
            add('"');
            add('=');
            add(' ');
        }
    };

    Assert.assertEquals("\\/\\,\\\\\\#\\+\\<\\>\\;\\\"\\=\\ ",
            handler.escapeCharacters("/,\\#+<>;\"= ", specialCharacters, '\\'));
    Assert.assertNull(handler.escapeCharacters(null, specialCharacters, '\\'));
    Assert.assertEquals("", handler.escapeCharacters("", specialCharacters, '\\'));
    Assert.assertEquals("nothing_special_here",
            handler.escapeCharacters("nothing_special_here", specialCharacters, '\\'));
    Assert.assertEquals("\\/\\,\\\\\\#\\+\\<\\>\\;\\\"\\=\\ ",
            handler.escapeCharacters("/,\\#+<>;\"= ", specialCharacters, '\\'));

    Assert.assertEquals("nothing<>special#here!",
            handler.escapeCharacters("nothing<>special#here!", null, '\\'));
    Assert.assertEquals("nothing<>special#here!",
            handler.escapeCharacters("nothing<>special#here!", Collections.<Character>emptySet(), '\\'));
    Assert.assertEquals("nothing<>special#here!",
            handler.escapeCharacters("nothing<>special#here!", Collections.singleton('?'), '\\'));
    Assert.assertEquals("\\A's are special!",
            handler.escapeCharacters("A's are special!", Collections.singleton('A'), '\\'));
}

From source file:org.apache.camel.converter.soap.name.ServiceInterfaceStrategyTest.java

@Test
public void testServiceInterfaceStrategyWithClient() {
    ServiceInterfaceStrategy strategy = new ServiceInterfaceStrategy(CustomerService.class, true);
    QName elName = strategy.findQNameForSoapActionOrType("", GetCustomersByName.class);
    Assert.assertEquals("http://customerservice.example.com/", elName.getNamespaceURI());
    Assert.assertEquals("getCustomersByName", elName.getLocalPart());

    QName elName2 = strategy.findQNameForSoapActionOrType("getCustomersByName", GetCustomersByName.class);
    Assert.assertEquals("http://customerservice.example.com/", elName2.getNamespaceURI());
    Assert.assertEquals("getCustomersByName", elName2.getLocalPart());

    // Tests the case where the soap action is found but the in type is null
    QName elName3 = strategy.findQNameForSoapActionOrType("http://customerservice.example.com/getAllCustomers",
            null);//from  w  w w  .  java 2 s .  c o  m
    Assert.assertNull(elName3);

    try {
        elName = strategy.findQNameForSoapActionOrType("test", Class.class);
        Assert.fail();
    } catch (RuntimeCamelException e) {
        LOG.debug("Caught expected message: " + e.getMessage());
    }
}

From source file:org.apache.cloudstack.network.opendaylight.api.test.NeutronNodeAdapterTest.java

@Test
public <T> void gsonNeutronPortUnmarshalingNullTest() throws NeutronRestApiException {
    String json = null;/*  www .  j  a va2s  .  c om*/
    NeutronNodeWrapper returnValue = (NeutronNodeWrapper) gsonNeutronNode.fromJson(json,
            TypeToken.get(NeutronNodeWrapper.class).getType());

    Assert.assertNull(returnValue);
}

From source file:org.apache.hadoop.hbase.client.crosssite.TestCrossSiteHBaseTable.java

@Test
public void testPutAndScan() throws Exception {
    CrossSiteHBaseAdmin admin = new CrossSiteHBaseAdmin(TEST_UTIL.getConfiguration());
    String tableName = "testPutAndScan";
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("col1"));
    admin.createTable(desc);/*from w  w  w. j a  va  2s . co  m*/
    admin.close();

    CrossSiteHTable crossSiteHTable = new CrossSiteHTable(admin.getConfiguration(), tableName);
    Put p = new Put(Bytes.toBytes("hbase1,china"));
    p.add(Bytes.toBytes("col1"), Bytes.toBytes("q1"), Bytes.toBytes("100"));
    crossSiteHTable.put(p);

    p = new Put(Bytes.toBytes("hbase1,india"));
    p.add(Bytes.toBytes("col1"), Bytes.toBytes("q2"), Bytes.toBytes("101"));
    crossSiteHTable.put(p);

    Get get = new Get(Bytes.toBytes("hbase1,india"));
    Result result = crossSiteHTable.get(get);
    byte[] value = result.getValue(Bytes.toBytes("col1"), Bytes.toBytes("q2"));
    Assert.assertTrue(Bytes.equals(value, Bytes.toBytes("101")));

    Scan s = new Scan();
    s.setCaching(1);
    ResultScanner scanner = crossSiteHTable.getScanner(s);
    Result next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertNull(next);
    crossSiteHTable.close();
}

From source file:org.apache.hadoop.hbase.client.crosssite.TestCrossSiteHTableFailover.java

@Test
public void testGetAndScanFailover() throws Exception {
    this.admin.addCluster("hbase2", TEST_UTIL1.getClusterKey());
    String tableName = "testGetAndScanFailover";
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("col1").setScope(1));
    this.admin.createTable(desc);

    // This should allow our replication to start
    Pair<String, String> peer1 = new Pair<String, String>("peer1hbase2", TEST_UTIL2.getClusterKey());
    Pair<String, String> peer2 = new Pair<String, String>("peer2hbase2", TEST_UTIL3.getClusterKey());
    this.admin.addPeer("hbase2", peer1);
    this.admin.addPeer("hbase2", peer2);
    CrossSiteHTable crossSiteHTable = new CrossSiteHTable(this.admin.getConfiguration(), tableName);
    Put p = new Put(Bytes.toBytes("hbase2,china"));
    p.add(Bytes.toBytes("col1"), Bytes.toBytes("q1"), Bytes.toBytes("100"));
    crossSiteHTable.put(p);//from  w w  w  .  jav a2 s  . c  o  m

    p = new Put(Bytes.toBytes("hbase2,india"));
    p.add(Bytes.toBytes("col1"), Bytes.toBytes("q2"), Bytes.toBytes("101"));
    crossSiteHTable.put(p);

    Get get = new Get(Bytes.toBytes("hbase2,india"));
    Result result = crossSiteHTable.get(get);
    byte[] value = result.getValue(Bytes.toBytes("col1"), Bytes.toBytes("q2"));
    Assert.assertTrue(Bytes.equals(value, Bytes.toBytes("101")));

    Scan s = new Scan();
    s.setCaching(1);
    ResultScanner scanner = crossSiteHTable.getScanner(s);
    Result next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertNull(next);
    HTable table = new HTable(TEST_UTIL2.getConfiguration(), Bytes.toBytes(tableName + "_hbase2"));
    try {
        while (true) {
            s = new Scan();
            scanner = table.getScanner(s);
            Result[] results = scanner.next(2);
            if ((results != null && results.length == 2)) {
                break;
            }
            Thread.sleep(500);
        }
    } finally {
        table.close();
    }
    TEST_UTIL1.shutdownMiniCluster();
    TEST_UTIL2.shutdownMiniCluster();
    // Still the read should be served from the Peer
    result = crossSiteHTable.get(get);
    value = result.getValue(Bytes.toBytes("col1"), Bytes.toBytes("q2"));
    Assert.assertTrue(Bytes.equals(value, Bytes.toBytes("101")));

    scanner = crossSiteHTable.getScanner(s);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertNull(next);
    crossSiteHTable.close();
}

From source file:org.apache.hadoop.hbase.client.crosssite.TestCrossSiteHTableWithPeers.java

@Test
public void testAddPeersFollowedWithPutAndScan() throws Exception {
    this.admin.addCluster("hbase2", TEST_UTIL1.getClusterKey());
    String tableName = "testAddPeersFollowedWithPutAndScan";
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("col1").setScope(1));
    this.admin.createTable(desc);

    // This should allow our replication to start
    Pair<String, String> peer = new Pair<String, String>("peerhbase2", TEST_UTIL2.getClusterKey());
    this.admin.addPeer("hbase2", peer);
    CrossSiteHTable crossSiteHTable = new CrossSiteHTable(this.admin.getConfiguration(), tableName);
    Put p = new Put(Bytes.toBytes("hbase2,china"));
    p.add(Bytes.toBytes("col1"), Bytes.toBytes("q1"), Bytes.toBytes("100"));
    crossSiteHTable.put(p);//from  w  ww .  j  a va  2  s  .  co m

    p = new Put(Bytes.toBytes("hbase2,india"));
    p.add(Bytes.toBytes("col1"), Bytes.toBytes("q2"), Bytes.toBytes("101"));
    crossSiteHTable.put(p);

    Get get = new Get(Bytes.toBytes("hbase2,india"));
    Result result = crossSiteHTable.get(get);
    byte[] value = result.getValue(Bytes.toBytes("col1"), Bytes.toBytes("q2"));
    Assert.assertTrue(Bytes.equals(value, Bytes.toBytes("101")));

    Scan s = new Scan();
    s.setCaching(1);
    ResultScanner scanner = crossSiteHTable.getScanner(s);
    Result next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertNull(next);
    HTable table = new HTable(TEST_UTIL2.getConfiguration(), Bytes.toBytes(tableName + "_hbase2"));
    try {
        while (true) {
            s = new Scan();
            scanner = table.getScanner(s);
            Result[] results = scanner.next(2);
            if ((results != null && results.length == 2)) {
                break;
            }
            Thread.sleep(500);
        }
    } finally {
        table.close();
    }
    TEST_UTIL1.shutdownMiniCluster();
    // Still the read should be served from the Peer
    result = crossSiteHTable.get(get);
    value = result.getValue(Bytes.toBytes("col1"), Bytes.toBytes("q2"));
    Assert.assertTrue(Bytes.equals(value, Bytes.toBytes("101")));

    scanner = crossSiteHTable.getScanner(s);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertTrue(next != null);
    next = scanner.next();
    Assert.assertNull(next);
    crossSiteHTable.close();
}

From source file:org.apache.hadoop.hdfs.TestFileInputStreamCache.java

@Test
public void testExpiry() throws Exception {
    FileInputStreamCache cache = new FileInputStreamCache(1, 10);
    DatanodeID dnId = new DatanodeID("127.0.0.1", "localhost", "xyzzy", 8080, 9090, 7070, 6060);
    ExtendedBlock block = new ExtendedBlock("poolid", 123);
    TestFileDescriptorPair pair = new TestFileDescriptorPair();
    cache.put(dnId, block, pair.getFileInputStreams());
    Thread.sleep(cache.getExpiryTimeMs() * 100);
    FileInputStream fis[] = cache.get(dnId, block);
    Assert.assertNull(fis);
    pair.close();/*from w ww .  ja v  a2s  . com*/
    cache.close();
}

From source file:org.apache.hadoop.hdfs.TestFileInputStreamCache.java

@Test
public void testEviction() throws Exception {
    FileInputStreamCache cache = new FileInputStreamCache(1, 10000000);
    DatanodeID dnId = new DatanodeID("127.0.0.1", "localhost", "xyzzy", 8080, 9090, 7070, 6060);
    ExtendedBlock block = new ExtendedBlock("poolid", 123);
    TestFileDescriptorPair pair = new TestFileDescriptorPair();
    cache.put(dnId, block, pair.getFileInputStreams());
    DatanodeID dnId2 = new DatanodeID("127.0.0.1", "localhost", "xyzzy", 8081, 9091, 7071, 6061);
    TestFileDescriptorPair pair2 = new TestFileDescriptorPair();
    cache.put(dnId2, block, pair2.getFileInputStreams());
    FileInputStream fis[] = cache.get(dnId, block);
    Assert.assertNull(fis);
    FileInputStream fis2[] = cache.get(dnId2, block);
    Assert.assertTrue(pair2.compareWith(fis2));
    pair.close();//from  w  ww .j a v  a 2  s . c om
    cache.close();
}