Example usage for com.rabbitmq.client Channel close

List of usage examples for com.rabbitmq.client Channel close

Introduction

In this page you can find the example usage for com.rabbitmq.client Channel close.

Prototype

@Override
void close() throws IOException, TimeoutException;

Source Link

Document

Close this channel with the com.rabbitmq.client.AMQP#REPLY_SUCCESS close code and message 'OK'.

Usage

From source file:org.ballerinalang.messaging.rabbitmq.util.ChannelUtils.java

License:Open Source License

/**
 * Closes the channel./*from w  w w. j a  va  2 s .c  o  m*/
 *
 * @param channel RabbitMQ Channel object.
 * @throws IOException      If an I/O problem is encountered.
 * @throws TimeoutException Thrown the operation times out.
 */
public static void close(Channel channel) throws IOException, TimeoutException {
    channel.close();
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQIntegrationTest.java

License:Apache License

private void launchTest(XContentBuilder river, final int numMessages, final int numDocsPerMessage,
        InjectorHook injectorHook, boolean delete, boolean update) throws Exception {

    final String dbName = getDbName();
    logger.info(" --> create index [{}]", dbName);
    try {/*from  www . j a va2s  .  c o  m*/
        client().admin().indices().prepareDelete(dbName).get();
    } catch (IndexMissingException e) {
        // No worries.
    }
    try {
        createIndex(dbName);
    } catch (IndexMissingException e) {
        // No worries.
    }
    ensureGreen(dbName);

    logger.info("  -> Checking rabbitmq running");
    // We try to connect to RabbitMQ.
    // If it's not launched, we don't fail the test but only log it
    Channel channel = null;
    Connection connection = null;
    try {
        logger.info(" --> connecting to rabbitmq");
        ConnectionFactory factory = new ConnectionFactory();
        factory.setHost("localhost");
        factory.setPort(AMQP.PROTOCOL.PORT);
        connection = factory.newConnection();
    } catch (ConnectException ce) {
        throw new Exception("RabbitMQ service is not launched on localhost:" + AMQP.PROTOCOL.PORT
                + ". Can not start Integration test. " + "Launch `rabbitmq-server`.", ce);
    }

    try {
        logger.info("  -> Creating [{}] channel", dbName);
        channel = connection.createChannel();

        logger.info("  -> Creating queue [{}]", dbName);
        channel.queueDeclare(getDbName(), true, false, false, null);

        // We purge the queue in case of something is remaining there
        logger.info("  -> Purging [{}] channel", dbName);
        channel.queuePurge(getDbName());

        logger.info("  -> Put [{}] messages with [{}] documents each = [{}] docs", numMessages,
                numDocsPerMessage, numMessages * numDocsPerMessage);
        final Set<String> removed = new HashSet<String>();
        int nbUpdated = 0;
        for (int i = 0; i < numMessages; i++) {
            StringBuffer message = new StringBuffer();

            for (int j = 0; j < numDocsPerMessage; j++) {
                if (logger.isTraceEnabled()) {
                    logger.trace("  -> Indexing document [{}] - [{}][{}]", i + "_" + j, i, j);
                }
                message.append("{ \"index\" : { \"_index\" : \"" + dbName
                        + "\", \"_type\" : \"typex\", \"_id\" : \"" + i + "_" + j + "\" } }\n");
                message.append("{ \"field\" : \"" + i + "_" + j + "\",\"numeric\" : " + i * j + " }\n");

                // Sometime we update a document
                if (update && rarely()) {
                    String id = between(0, i) + "_" + between(0, j);
                    // We can only update if it has not been removed :)
                    if (!removed.contains(id)) {
                        logger.debug("  -> Updating document [{}] - [{}][{}]", id, i, j);
                        message.append("{ \"update\" : { \"_index\" : \"" + dbName
                                + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
                        message.append(
                                "{ \"doc\": { \"foo\" : \"bar\", \"field2\" : \"" + i + "_" + j + "\" }}\n");
                        nbUpdated++;
                    }
                }

                // Sometime we delete a document
                if (delete && rarely()) {
                    String id = between(0, i) + "_" + between(0, j);
                    if (!removed.contains(id)) {
                        logger.debug("  -> Removing document [{}] - [{}][{}]", id, i, j);
                        message.append("{ \"delete\" : { \"_index\" : \"" + dbName
                                + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
                        removed.add(id);
                    }
                }
            }

            channel.basicPublish("", dbName, null, message.toString().getBytes());
        }

        logger.info("  -> We removed [{}] docs and updated [{}] docs", removed.size(), nbUpdated);

        if (injectorHook != null) {
            logger.info("  -> Injecting extra data");
            injectorHook.inject();
        }

        logger.info(" --> create river");
        IndexResponse indexResponse = index("_river", dbName, "_meta", river);
        assertTrue(indexResponse.isCreated());

        logger.info("-->  checking that river [{}] was created", dbName);
        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                GetResponse response = client()
                        .prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
                return response.isExists();
            }
        }, 5, TimeUnit.SECONDS), equalTo(true));

        // Check that docs are still processed by the river
        logger.info(" --> waiting for expected number of docs: [{}]",
                numDocsPerMessage * numMessages - removed.size());
        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                try {
                    refresh();
                    int expected = numDocsPerMessage * numMessages - removed.size();
                    CountResponse response = client().prepareCount(dbName).get();
                    logger.debug("  -> got {} docs, expected {}", response.getCount(), expected);
                    return response.getCount() == expected;
                } catch (IndexMissingException e) {
                    return false;
                }
            }
        }, 20, TimeUnit.SECONDS), equalTo(true));
    } finally {
        if (channel != null && channel.isOpen()) {
            channel.close();
        }
        if (connection != null && connection.isOpen()) {
            connection.close();
        }

        // Deletes the river
        GetResponse response = client().prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status")
                .get();
        if (response.isExists()) {
            client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_meta").get();
            client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
        }

        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                GetResponse response = client()
                        .prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
                return response.isExists();
            }
        }, 5, TimeUnit.SECONDS), equalTo(false));
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverBothScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)
            .put("script.native.mock_script.type", MockScriptFactory.class).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("script_filter")
                    .field("script", "ctx.type1.field1 += param1").field("script_lang", "mvel")
                    .startObject("script_params").field("param1", 1).endObject().endObject()
                    .startObject("bulk_script_filter").field("script", "mock_script")
                    .field("script_lang", "native").endObject().endObject())
            .execute().actionGet();//from w ww.j a va2  s .  com

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 1 } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 2 } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(10000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverHeartbeatTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    Node node = NodeBuilder.nodeBuilder()
            .settings(ImmutableSettings.settingsBuilder().put("gateway.type", "none")).node();

    node.client().prepareIndex("_river", "test1", "_meta").setSource(jsonBuilder().startObject()
            .field("type", "rabbitmq").startObject("rabbitmq").field("heartbeat", "1s").endObject().endObject())
            .execute().actionGet();//  w w w.  ja va 2  s  . co m

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)
            .put("script.native.mock_script.type", MockScriptFactory.class).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("bulk_script_filter")
                    .field("script", "mock_script").field("script_lang", "native").endObject().endObject())
            .execute().actionGet();//w  w w.  ja  va  2 s .c  om

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field3\" : \"value3\" } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverSingleLineScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("script_filter")
                    .field("script", "ctx.type1.field1 += param1").field("script_lang", "mvel")
                    .startObject("script_params").field("param1", 1).endObject().endObject().endObject())
            .execute().actionGet();//www  .  j  a  va  2 s .  co  m

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 1 } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 2 } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(10000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    Node node = NodeBuilder.nodeBuilder()
            .settings(ImmutableSettings.settingsBuilder().put("gateway.type", "none")).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").endObject()).execute().actionGet();

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);/*from   ww  w  .  java  2 s .c om*/
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverWithCustomActionsTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    String rabbitHost = "rabbit-qa1";
    Node node = NodeBuilder.nodeBuilder().settings(
            ImmutableSettings.settingsBuilder().put("gateway.type", "none").put("cluster.name", "es-mqtest"))
            .node();/*ww w  . j  a  v  a  2 s  .  c o m*/

    node.client().prepareIndex("_river", "mqtest1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("rabbitmq")
                    .field("host", rabbitHost).endObject().startObject("index").field("ordered", "true")
                    .field("warnOnBulkErrors", "false").endObject().endObject())
            .execute().actionGet();

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost(rabbitHost);
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    Thread.sleep(5000);
    String message = "{ \"index\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"1\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    String messageWithWrongIndex = "{ \"index\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"This.Is.An.Invalid.Name\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"1\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    String mapping = "{ \"type2\" : { \"properties\" : {\"data\" : {\"dynamic\" : true,\"properties\" : {\"myString\" : {\"type\" : \"string\",\"boost\" : 1.0,\"index\" : \"not_analyzed\",\"store\" : \"no\"},\"myText\" : {\"type\" : \"string\",\"include_in_all\" : true,\"index\" : \"analyzed\",\"store\" : \"no\"}}}}}}";
    String mappingMessage = "{ \"_index\" : \"mqtest\", \"_type\" : \"type2\"}\n" + mapping;
    String partialmappingMessage = "{ \"_index\" : \"mqtest\", \"_type\" : \"type2\"}";
    String deleteByQuery = "{ \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_queryString\" : \"_id:1\"}\n";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    HashMap<String, Object> headers = new HashMap<String, Object>();
    headers.put("X-ES-Command", "mapping");
    BasicProperties props = MessageProperties.MINIMAL_BASIC;
    props = props.builder().headers(headers).build();
    ch.basicPublish("elasticsearch", "elasticsearch", props, mappingMessage.getBytes());
    headers.put("X-ES-Command", "deleteByQuery");
    props = props.builder().headers(headers).build();
    ch.basicPublish("elasticsearch", "elasticsearch", props, deleteByQuery.getBytes());
    Thread.sleep(5000);
    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());
    ch.basicPublish("elasticsearch", "elasticsearch", null, messageWithWrongIndex.getBytes());
    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());
    ch.basicPublish("elasticsearch", "elasticsearch", props, deleteByQuery.getBytes());
    Thread.sleep(5000);
    ch.close();
    conn.close();

    Thread.sleep(5000);
    Boolean exists = node.client().get(new GetRequest("mqtest").id("1")).get().isExists();
    ClusterState state = node.client().admin().cluster()
            .state(new ClusterStateRequest().filteredIndices("mqtest")).get().getState();
    ImmutableOpenMap<String, MappingMetaData> mappings = state.getMetaData().index("mqtest").mappings();
    MappingMetaData typeMap = mappings.get("type2");
    if (null != typeMap) {
        String gotMapping = typeMap.source().toString();
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQTestRunner.java

License:Apache License

@Test
public void test_all_messages_are_consumed() throws Exception {

    // We try to connect to RabbitMQ.
    // If it's not launched, we don't fail the test but only log it
    try {/* ww w  .  j a  v a2 s  .  co  m*/
        logger.info(" --> connecting to rabbitmq");
        ConnectionFactory cfconn = new ConnectionFactory();
        cfconn.setHost("localhost");
        cfconn.setPort(AMQP.PROTOCOL.PORT);
        Connection conn = cfconn.newConnection();

        Channel ch = conn.createChannel();
        ch.exchangeDeclare("elasticsearch", "direct", true);
        AMQP.Queue.DeclareOk queue = ch.queueDeclare("elasticsearch", true, false, false, null);

        // We purge the queue in case of something is remaining there
        ch.queuePurge("elasticsearch");

        logger.info(" --> sending messages");
        pushMessages(ch);

        logger.info(" --> create river");
        createIndex(INDEX);

        index("_river", "test", "_meta", river());

        // We need at some point to check if we have consumed the river
        int steps = timeout();
        long count = 0;

        while (true) {
            // We wait for one second
            Thread.sleep(1000);

            CountResponse response = client().prepareCount("test").execute().actionGet();
            count = response.getCount();

            steps--;
            if (steps < 0 || count == expectedDocuments()) {
                break;
            }
        }

        ch.close();
        conn.close();

        postInjectionTests();
    } catch (ConnectException e) {
        throw new Exception("RabbitMQ service is not launched on localhost:" + AMQP.PROTOCOL.PORT
                + ". Can not start Integration test. " + "Launch `rabbitmq-server`.", e);
    }
}

From source file:org.graylog2.messagehandlers.amqp.AMQPSubscriberThread.java

License:Open Source License

/**
 * Run the thread. Runs forever!//from w  w w  . j  a  v a2s. c  om
 */
@Override
public void run() {
    while (true) {
        Connection connection = null;
        Channel channel = null;
        QueueingConsumer consumer = new QueueingConsumer(channel);

        try {
            connection = broker.getConnection();
            channel = connection.createChannel();
            channel.basicConsume(this.queue.getName(), false, consumer);

            LOG.info("Successfully connected to queue '" + this.queue.getName() + "'");
        } catch (Exception e) {
            LOG.error("AMQP queue '" + this.queue.getName()
                    + "': Could not connect to AMQP broker or channel (Make sure that "
                    + "the queue exists. Retrying in " + SLEEP_INTERVAL + " seconds. (" + e.getMessage() + ")");

            // Retry after waiting for SLEEP_INTERVAL seconds.
            try {
                Thread.sleep(SLEEP_INTERVAL * 1000);
            } catch (InterruptedException foo) {
            }
            continue;
        }

        while (true) {
            try {
                QueueingConsumer.Delivery delivery;
                try {
                    delivery = consumer.nextDelivery();
                } catch (InterruptedException ie) {
                    continue;
                }

                // Handle the message. (Store in MongoDB etc)
                try {
                    handleMessage(delivery.getBody());
                } catch (Exception e) {
                    LOG.error("Could not handle AMQP message: " + e.toString());
                }

                try {
                    channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false);
                } catch (IOException e) {
                    LOG.error("Could not ack AMQP message: " + e.toString());
                }
            } catch (Exception e) {
                // Error while receiving. i.e. when AMQP broker breaks down.
                LOG.error("AMQP queue '" + this.queue.getName()
                        + "': Error while subscribed (rebuilding connection " + "in " + SLEEP_INTERVAL
                        + " seconds. (" + e.getMessage() + ")");

                // Better close connection stuff it is still active.
                try {
                    channel.close();
                    connection.close();
                } catch (IOException ex) {
                    // I don't care.
                } catch (AlreadyClosedException ex) {
                    // I don't care.
                }

                // Retry after waiting for SLEEP_INTERVAL seconds.
                try {
                    Thread.sleep(SLEEP_INTERVAL * 1000);
                } catch (InterruptedException foo) {
                }
                break;
            }
        }
    }
}