Example usage for com.rabbitmq.client Channel queueDeclare

List of usage examples for com.rabbitmq.client Channel queueDeclare

Introduction

In this page you can find the example usage for com.rabbitmq.client Channel queueDeclare.

Prototype

Queue.DeclareOk queueDeclare(String queue, boolean durable, boolean exclusive, boolean autoDelete,
        Map<String, Object> arguments) throws IOException;

Source Link

Document

Declare a queue

Usage

From source file:org.apache.nutch.indexer.history.HistoryIndexer.java

License:Apache License

private void sendEvent(final String message) {

    ConnectionFactory factory = new ConnectionFactory();
    final String host = conf.get("history.rabbitmq.host", "localhost");
    factory.setHost(host);//from  w w w . j a va2  s.  co m

    final String user = conf.get("history.rabbitmq.user");
    factory.setUsername(user);

    final String password = conf.get("history.rabbitmq.password");
    factory.setPassword(password);

    Connection connection = null;
    Channel channel = null;
    try {
        connection = factory.newConnection();
        channel = connection.createChannel();
    } catch (IOException | TimeoutException e) {
        final String errMsg = "failed opening connection or channel";
        LOG.error(errMsg, e);
        closeChannelAndConnection(channel, true);
        throw new RuntimeException(errMsg, e);
    }

    final String queueNamesStr = conf.get("history.rabbitmq.queue.names", "nlp,langdetector");
    final String[] queueNames = queueNamesStr.split(",");
    for (final String singleQueueName : queueNames) {
        try {
            channel.queueDeclare(singleQueueName, true, false, false, null);
            channel.basicPublish("", singleQueueName, null, message.getBytes());
            LOG.debug(" [x] Sent '" + message + "'");
        } catch (IOException e) {
            final String errMsg = String.format("failed sending message [%s] to queue [%s]", message,
                    singleQueueName);
            LOG.error(errMsg, e);
        }
    }

    closeChannelAndConnection(channel, false);
}

From source file:org.apache.synapse.message.store.impl.rabbitmq.RabbitMQStore.java

License:Open Source License

/**
 * Create a Queue to store messages, this will used queueName parameter
 *
 * @throws IOException : if its enable to create the queue or exchange
 *///  w w w .  java2s . co m
private void setQueue() throws IOException {
    Channel channel = null;
    try {
        channel = producerConnection.createChannel();
        try {
            channel.queueDeclarePassive(queueName);
        } catch (java.io.IOException e) {
            logger.info("Queue :" + queueName + " not found.Declaring queue.");
            if (!channel.isOpen()) {
                channel = producerConnection.createChannel();
            }
            //Hashmap with names and parameters can be used in the place of null argument
            //Eg: adding dead letter exchange to the queue
            //params: (queueName, durable, exclusive, autoDelete, paramMap)
            channel.queueDeclare(queueName, true, false, false, null);
        }
        //declaring exchange
        if (exchangeName != null) {
            try {
                channel.exchangeDeclarePassive(exchangeName);
            } catch (java.io.IOException e) {
                logger.info("Exchange :" + exchangeName + " not found. Declaring exchange.");
                if (!channel.isOpen()) {
                    channel = producerConnection.createChannel();
                }
                //params : ( exchangeName, exchangeType, durable, autoDelete, paramMap )
                channel.exchangeDeclare(exchangeName, "direct", true, false, null);
            }
            channel.queueBind(queueName, exchangeName, routeKey);
        }
    } finally {
        channel.close();
    }
}

From source file:org.apache.synapse.tranport.amqp.AMQPTwoWayProducerClient.java

License:Apache License

private static void produceAndConsume(String message, Channel channel, String requestQueueName,
        String replyQueueName) throws IOException, InterruptedException {

    AMQP.BasicProperties.Builder builder = new AMQP.BasicProperties().builder();
    String restCoID = Math.random() + "";
    builder.correlationId(restCoID);//from  w w  w. j ava2 s .  co  m
    System.out.println("Request correlation Id : " + restCoID);
    builder.replyTo(replyQueueName);
    channel.basicPublish("", requestQueueName, builder.build(), message.getBytes());
    System.out.println("[x] sent to '" + requestQueueName + "' the message '\n" + message + "'");

    channel.queueDeclare(replyQueueName, false, false, false, null);
    QueueingConsumer consumer = new QueueingConsumer(channel);
    channel.basicConsume(replyQueueName, true, consumer);
    System.out.println("Waiting for message on queue '" + replyQueueName + "'");
    while (true) {
        QueueingConsumer.Delivery delivery = consumer.nextDelivery();
        String replyMessage = new String(delivery.getBody());
        System.out.println("[x] received '" + replyMessage + "'");
        System.out.println("Response correlation Id : " + delivery.getProperties().getCorrelationId());
        break;

    }
}

From source file:org.ballerinalang.messaging.rabbitmq.util.ChannelUtils.java

License:Open Source License

/**
 * Declare a queue.//from   w  ww.ja v a2  s  . co m
 *
 * @param channel    RabbitMQ Channel object.
 * @param queueName  The name of the queue.
 * @param durable    True if we are declaring a durable queue (the queue will survive a server restart).
 * @param exclusive  True if we are declaring an exclusive queue (restricted to this connection).
 * @param autoDelete True if we are declaring an autodelete queue (server will delete it when no longer in use).
 */
public static void queueDeclare(Channel channel, String queueName, boolean durable, boolean exclusive,
        boolean autoDelete) {
    try {
        channel.queueDeclare(queueName, durable, exclusive, autoDelete, null);
    } catch (IOException exception) {
        String errorMessage = "An error occurred while declaring the queue: ";
        throw new RabbitMQConnectorException(errorMessage + exception.getMessage(), exception);
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQIntegrationTest.java

License:Apache License

private void launchTest(XContentBuilder river, final int numMessages, final int numDocsPerMessage,
        InjectorHook injectorHook, boolean delete, boolean update) throws Exception {

    final String dbName = getDbName();
    logger.info(" --> create index [{}]", dbName);
    try {//from   w w  w .  ja v  a  2s.  co m
        client().admin().indices().prepareDelete(dbName).get();
    } catch (IndexMissingException e) {
        // No worries.
    }
    try {
        createIndex(dbName);
    } catch (IndexMissingException e) {
        // No worries.
    }
    ensureGreen(dbName);

    logger.info("  -> Checking rabbitmq running");
    // We try to connect to RabbitMQ.
    // If it's not launched, we don't fail the test but only log it
    Channel channel = null;
    Connection connection = null;
    try {
        logger.info(" --> connecting to rabbitmq");
        ConnectionFactory factory = new ConnectionFactory();
        factory.setHost("localhost");
        factory.setPort(AMQP.PROTOCOL.PORT);
        connection = factory.newConnection();
    } catch (ConnectException ce) {
        throw new Exception("RabbitMQ service is not launched on localhost:" + AMQP.PROTOCOL.PORT
                + ". Can not start Integration test. " + "Launch `rabbitmq-server`.", ce);
    }

    try {
        logger.info("  -> Creating [{}] channel", dbName);
        channel = connection.createChannel();

        logger.info("  -> Creating queue [{}]", dbName);
        channel.queueDeclare(getDbName(), true, false, false, null);

        // We purge the queue in case of something is remaining there
        logger.info("  -> Purging [{}] channel", dbName);
        channel.queuePurge(getDbName());

        logger.info("  -> Put [{}] messages with [{}] documents each = [{}] docs", numMessages,
                numDocsPerMessage, numMessages * numDocsPerMessage);
        final Set<String> removed = new HashSet<String>();
        int nbUpdated = 0;
        for (int i = 0; i < numMessages; i++) {
            StringBuffer message = new StringBuffer();

            for (int j = 0; j < numDocsPerMessage; j++) {
                if (logger.isTraceEnabled()) {
                    logger.trace("  -> Indexing document [{}] - [{}][{}]", i + "_" + j, i, j);
                }
                message.append("{ \"index\" : { \"_index\" : \"" + dbName
                        + "\", \"_type\" : \"typex\", \"_id\" : \"" + i + "_" + j + "\" } }\n");
                message.append("{ \"field\" : \"" + i + "_" + j + "\",\"numeric\" : " + i * j + " }\n");

                // Sometime we update a document
                if (update && rarely()) {
                    String id = between(0, i) + "_" + between(0, j);
                    // We can only update if it has not been removed :)
                    if (!removed.contains(id)) {
                        logger.debug("  -> Updating document [{}] - [{}][{}]", id, i, j);
                        message.append("{ \"update\" : { \"_index\" : \"" + dbName
                                + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
                        message.append(
                                "{ \"doc\": { \"foo\" : \"bar\", \"field2\" : \"" + i + "_" + j + "\" }}\n");
                        nbUpdated++;
                    }
                }

                // Sometime we delete a document
                if (delete && rarely()) {
                    String id = between(0, i) + "_" + between(0, j);
                    if (!removed.contains(id)) {
                        logger.debug("  -> Removing document [{}] - [{}][{}]", id, i, j);
                        message.append("{ \"delete\" : { \"_index\" : \"" + dbName
                                + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
                        removed.add(id);
                    }
                }
            }

            channel.basicPublish("", dbName, null, message.toString().getBytes());
        }

        logger.info("  -> We removed [{}] docs and updated [{}] docs", removed.size(), nbUpdated);

        if (injectorHook != null) {
            logger.info("  -> Injecting extra data");
            injectorHook.inject();
        }

        logger.info(" --> create river");
        IndexResponse indexResponse = index("_river", dbName, "_meta", river);
        assertTrue(indexResponse.isCreated());

        logger.info("-->  checking that river [{}] was created", dbName);
        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                GetResponse response = client()
                        .prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
                return response.isExists();
            }
        }, 5, TimeUnit.SECONDS), equalTo(true));

        // Check that docs are still processed by the river
        logger.info(" --> waiting for expected number of docs: [{}]",
                numDocsPerMessage * numMessages - removed.size());
        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                try {
                    refresh();
                    int expected = numDocsPerMessage * numMessages - removed.size();
                    CountResponse response = client().prepareCount(dbName).get();
                    logger.debug("  -> got {} docs, expected {}", response.getCount(), expected);
                    return response.getCount() == expected;
                } catch (IndexMissingException e) {
                    return false;
                }
            }
        }, 20, TimeUnit.SECONDS), equalTo(true));
    } finally {
        if (channel != null && channel.isOpen()) {
            channel.close();
        }
        if (connection != null && connection.isOpen()) {
            connection.close();
        }

        // Deletes the river
        GetResponse response = client().prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status")
                .get();
        if (response.isExists()) {
            client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_meta").get();
            client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
        }

        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                GetResponse response = client()
                        .prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
                return response.isExists();
            }
        }, 5, TimeUnit.SECONDS), equalTo(false));
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverBothScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)
            .put("script.native.mock_script.type", MockScriptFactory.class).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("script_filter")
                    .field("script", "ctx.type1.field1 += param1").field("script_lang", "mvel")
                    .startObject("script_params").field("param1", 1).endObject().endObject()
                    .startObject("bulk_script_filter").field("script", "mock_script")
                    .field("script_lang", "native").endObject().endObject())
            .execute().actionGet();// w ww  .ja v a  2 s.co m

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 1 } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 2 } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(10000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverHeartbeatTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    Node node = NodeBuilder.nodeBuilder()
            .settings(ImmutableSettings.settingsBuilder().put("gateway.type", "none")).node();

    node.client().prepareIndex("_river", "test1", "_meta").setSource(jsonBuilder().startObject()
            .field("type", "rabbitmq").startObject("rabbitmq").field("heartbeat", "1s").endObject().endObject())
            .execute().actionGet();/* ww  w  .  j a  v  a 2 s.  c o  m*/

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)
            .put("script.native.mock_script.type", MockScriptFactory.class).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("bulk_script_filter")
                    .field("script", "mock_script").field("script_lang", "native").endObject().endObject())
            .execute().actionGet();/*from www  .  ja v  a 2 s  . co  m*/

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field3\" : \"value3\" } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverSingleLineScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("script_filter")
                    .field("script", "ctx.type1.field1 += param1").field("script_lang", "mvel")
                    .startObject("script_params").field("param1", 1).endObject().endObject().endObject())
            .execute().actionGet();/*from w w  w . j  a  v  a  2s.c o  m*/

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 1 } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 2 } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(10000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    Node node = NodeBuilder.nodeBuilder()
            .settings(ImmutableSettings.settingsBuilder().put("gateway.type", "none")).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").endObject()).execute().actionGet();

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);//from  w  w w  .j  av  a  2s  . c  o  m
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}