Example usage for com.rabbitmq.client ConnectionFactory setPort

List of usage examples for com.rabbitmq.client ConnectionFactory setPort

Introduction

In this page you can find the example usage for com.rabbitmq.client ConnectionFactory setPort.

Prototype

public void setPort(int port) 

Source Link

Document

Set the target port.

Usage

From source file:org.belio.mq.RabbitPublisher.java

@Override
public void open() throws IOException {
    synchronized (RabbitPublisher.class) {
        try {//  w ww.  j ava2 s  . c o  m
            // Create a connection factory and 
            hosts = mqproperties.getProperty("host").split(",");

            ConnectionFactory factory = new ConnectionFactory();
            factory.setHost(mqproperties.getProperty("host"));
            factory.setVirtualHost(mqproperties.getProperty("vhost"));
            factory.setUsername(mqproperties.getProperty("username"));
            factory.setPassword(mqproperties.getProperty("password"));
            factory.setPort(Integer.parseInt(mqproperties.getProperty("port")));
            // Create a new connection to MQ
            connection = factory.newConnection();
            // Create a new channel and declare it's type and exhange as well
            channel = connection.createChannel();
            channel.queueDeclare(queueType.name().concat(QUEUE_SUFFIX), true, false, false, null);
            channel.exchangeDeclare(queueType.name().concat(EXCHANGE_SUFFIX), mqproperties.getProperty("type"));
            channel.queueBind(queueType.name().concat(QUEUE_SUFFIX), queueType.name().concat(EXCHANGE_SUFFIX),
                    "");
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQIntegrationTest.java

License:Apache License

private void launchTest(XContentBuilder river, final int numMessages, final int numDocsPerMessage,
        InjectorHook injectorHook, boolean delete, boolean update) throws Exception {

    final String dbName = getDbName();
    logger.info(" --> create index [{}]", dbName);
    try {//from www.  ja  v a 2  s .com
        client().admin().indices().prepareDelete(dbName).get();
    } catch (IndexMissingException e) {
        // No worries.
    }
    try {
        createIndex(dbName);
    } catch (IndexMissingException e) {
        // No worries.
    }
    ensureGreen(dbName);

    logger.info("  -> Checking rabbitmq running");
    // We try to connect to RabbitMQ.
    // If it's not launched, we don't fail the test but only log it
    Channel channel = null;
    Connection connection = null;
    try {
        logger.info(" --> connecting to rabbitmq");
        ConnectionFactory factory = new ConnectionFactory();
        factory.setHost("localhost");
        factory.setPort(AMQP.PROTOCOL.PORT);
        connection = factory.newConnection();
    } catch (ConnectException ce) {
        throw new Exception("RabbitMQ service is not launched on localhost:" + AMQP.PROTOCOL.PORT
                + ". Can not start Integration test. " + "Launch `rabbitmq-server`.", ce);
    }

    try {
        logger.info("  -> Creating [{}] channel", dbName);
        channel = connection.createChannel();

        logger.info("  -> Creating queue [{}]", dbName);
        channel.queueDeclare(getDbName(), true, false, false, null);

        // We purge the queue in case of something is remaining there
        logger.info("  -> Purging [{}] channel", dbName);
        channel.queuePurge(getDbName());

        logger.info("  -> Put [{}] messages with [{}] documents each = [{}] docs", numMessages,
                numDocsPerMessage, numMessages * numDocsPerMessage);
        final Set<String> removed = new HashSet<String>();
        int nbUpdated = 0;
        for (int i = 0; i < numMessages; i++) {
            StringBuffer message = new StringBuffer();

            for (int j = 0; j < numDocsPerMessage; j++) {
                if (logger.isTraceEnabled()) {
                    logger.trace("  -> Indexing document [{}] - [{}][{}]", i + "_" + j, i, j);
                }
                message.append("{ \"index\" : { \"_index\" : \"" + dbName
                        + "\", \"_type\" : \"typex\", \"_id\" : \"" + i + "_" + j + "\" } }\n");
                message.append("{ \"field\" : \"" + i + "_" + j + "\",\"numeric\" : " + i * j + " }\n");

                // Sometime we update a document
                if (update && rarely()) {
                    String id = between(0, i) + "_" + between(0, j);
                    // We can only update if it has not been removed :)
                    if (!removed.contains(id)) {
                        logger.debug("  -> Updating document [{}] - [{}][{}]", id, i, j);
                        message.append("{ \"update\" : { \"_index\" : \"" + dbName
                                + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
                        message.append(
                                "{ \"doc\": { \"foo\" : \"bar\", \"field2\" : \"" + i + "_" + j + "\" }}\n");
                        nbUpdated++;
                    }
                }

                // Sometime we delete a document
                if (delete && rarely()) {
                    String id = between(0, i) + "_" + between(0, j);
                    if (!removed.contains(id)) {
                        logger.debug("  -> Removing document [{}] - [{}][{}]", id, i, j);
                        message.append("{ \"delete\" : { \"_index\" : \"" + dbName
                                + "\", \"_type\" : \"typex\", \"_id\" : \"" + id + "\" } }\n");
                        removed.add(id);
                    }
                }
            }

            channel.basicPublish("", dbName, null, message.toString().getBytes());
        }

        logger.info("  -> We removed [{}] docs and updated [{}] docs", removed.size(), nbUpdated);

        if (injectorHook != null) {
            logger.info("  -> Injecting extra data");
            injectorHook.inject();
        }

        logger.info(" --> create river");
        IndexResponse indexResponse = index("_river", dbName, "_meta", river);
        assertTrue(indexResponse.isCreated());

        logger.info("-->  checking that river [{}] was created", dbName);
        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                GetResponse response = client()
                        .prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
                return response.isExists();
            }
        }, 5, TimeUnit.SECONDS), equalTo(true));

        // Check that docs are still processed by the river
        logger.info(" --> waiting for expected number of docs: [{}]",
                numDocsPerMessage * numMessages - removed.size());
        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                try {
                    refresh();
                    int expected = numDocsPerMessage * numMessages - removed.size();
                    CountResponse response = client().prepareCount(dbName).get();
                    logger.debug("  -> got {} docs, expected {}", response.getCount(), expected);
                    return response.getCount() == expected;
                } catch (IndexMissingException e) {
                    return false;
                }
            }
        }, 20, TimeUnit.SECONDS), equalTo(true));
    } finally {
        if (channel != null && channel.isOpen()) {
            channel.close();
        }
        if (connection != null && connection.isOpen()) {
            connection.close();
        }

        // Deletes the river
        GetResponse response = client().prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status")
                .get();
        if (response.isExists()) {
            client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_meta").get();
            client().prepareDelete(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
        }

        assertThat(awaitBusy(new Predicate<Object>() {
            public boolean apply(Object obj) {
                GetResponse response = client()
                        .prepareGet(RiverIndexName.Conf.DEFAULT_INDEX_NAME, dbName, "_status").get();
                return response.isExists();
            }
        }, 5, TimeUnit.SECONDS), equalTo(false));
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverBothScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)
            .put("script.native.mock_script.type", MockScriptFactory.class).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("script_filter")
                    .field("script", "ctx.type1.field1 += param1").field("script_lang", "mvel")
                    .startObject("script_params").field("param1", 1).endObject().endObject()
                    .startObject("bulk_script_filter").field("script", "mock_script")
                    .field("script_lang", "native").endObject().endObject())
            .execute().actionGet();/*www . jav a 2 s  .com*/

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 1 } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 2 } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(10000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverHeartbeatTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    Node node = NodeBuilder.nodeBuilder()
            .settings(ImmutableSettings.settingsBuilder().put("gateway.type", "none")).node();

    node.client().prepareIndex("_river", "test1", "_meta").setSource(jsonBuilder().startObject()
            .field("type", "rabbitmq").startObject("rabbitmq").field("heartbeat", "1s").endObject().endObject())
            .execute().actionGet();/*from ww w . j  a  v  a  2 s .c o m*/

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)
            .put("script.native.mock_script.type", MockScriptFactory.class).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("bulk_script_filter")
                    .field("script", "mock_script").field("script_lang", "native").endObject().endObject())
            .execute().actionGet();//  w  ww.j a  va2 s .  c om

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field3\" : \"value3\" } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverSingleLineScriptTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    Settings settings = ImmutableSettings.settingsBuilder().put("gateway.type", "none")
            .put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build();
    Node node = NodeBuilder.nodeBuilder().settings(settings).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("script_filter")
                    .field("script", "ctx.type1.field1 += param1").field("script_lang", "mvel")
                    .startObject("script_params").field("param1", 1).endObject().endObject().endObject())
            .execute().actionGet();/*from   w w  w . j a  v  a  2 s  .  c om*/

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" :  { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 1 } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"3\" } }\n"
            + "{ \"type1\" :  { \"field1\" : 2 } }" + "";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();
    conn.close();

    Thread.sleep(10000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    Node node = NodeBuilder.nodeBuilder()
            .settings(ImmutableSettings.settingsBuilder().put("gateway.type", "none")).node();

    node.client().prepareIndex("_river", "test1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").endObject()).execute().actionGet();

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost("localhost");
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    String message = "{ \"index\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" } }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"test\", \"_type\" : \"type1\", \"_id\" : \"1\" }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    ch.close();/*from ww  w  .j  ava  2  s . c o m*/
    conn.close();

    Thread.sleep(100000);
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQRiverWithCustomActionsTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    String rabbitHost = "rabbit-qa1";
    Node node = NodeBuilder.nodeBuilder().settings(
            ImmutableSettings.settingsBuilder().put("gateway.type", "none").put("cluster.name", "es-mqtest"))
            .node();//www.ja va2  s . c o  m

    node.client().prepareIndex("_river", "mqtest1", "_meta")
            .setSource(jsonBuilder().startObject().field("type", "rabbitmq").startObject("rabbitmq")
                    .field("host", rabbitHost).endObject().startObject("index").field("ordered", "true")
                    .field("warnOnBulkErrors", "false").endObject().endObject())
            .execute().actionGet();

    ConnectionFactory cfconn = new ConnectionFactory();
    cfconn.setHost(rabbitHost);
    cfconn.setPort(AMQP.PROTOCOL.PORT);
    Connection conn = cfconn.newConnection();

    Channel ch = conn.createChannel();
    ch.exchangeDeclare("elasticsearch", "direct", true);
    ch.queueDeclare("elasticsearch", true, false, false, null);

    Thread.sleep(5000);
    String message = "{ \"index\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"1\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    String messageWithWrongIndex = "{ \"index\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }\n"
            + "{ \"delete\" : { \"_index\" : \"This.Is.An.Invalid.Name\", \"_type\" : \"type1\", \"_id\" : \"2\" } }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"2\"} }\n"
            + "{ \"create\" : { \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_id\" : \"1\" , \"_version\" : \"1\"} }\n"
            + "{ \"type1\" : { \"field1\" : \"value1\" } }";

    String mapping = "{ \"type2\" : { \"properties\" : {\"data\" : {\"dynamic\" : true,\"properties\" : {\"myString\" : {\"type\" : \"string\",\"boost\" : 1.0,\"index\" : \"not_analyzed\",\"store\" : \"no\"},\"myText\" : {\"type\" : \"string\",\"include_in_all\" : true,\"index\" : \"analyzed\",\"store\" : \"no\"}}}}}}";
    String mappingMessage = "{ \"_index\" : \"mqtest\", \"_type\" : \"type2\"}\n" + mapping;
    String partialmappingMessage = "{ \"_index\" : \"mqtest\", \"_type\" : \"type2\"}";
    String deleteByQuery = "{ \"_index\" : \"mqtest\", \"_type\" : \"type1\", \"_queryString\" : \"_id:1\"}\n";

    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());

    HashMap<String, Object> headers = new HashMap<String, Object>();
    headers.put("X-ES-Command", "mapping");
    BasicProperties props = MessageProperties.MINIMAL_BASIC;
    props = props.builder().headers(headers).build();
    ch.basicPublish("elasticsearch", "elasticsearch", props, mappingMessage.getBytes());
    headers.put("X-ES-Command", "deleteByQuery");
    props = props.builder().headers(headers).build();
    ch.basicPublish("elasticsearch", "elasticsearch", props, deleteByQuery.getBytes());
    Thread.sleep(5000);
    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());
    ch.basicPublish("elasticsearch", "elasticsearch", null, messageWithWrongIndex.getBytes());
    ch.basicPublish("elasticsearch", "elasticsearch", null, message.getBytes());
    ch.basicPublish("elasticsearch", "elasticsearch", props, deleteByQuery.getBytes());
    Thread.sleep(5000);
    ch.close();
    conn.close();

    Thread.sleep(5000);
    Boolean exists = node.client().get(new GetRequest("mqtest").id("1")).get().isExists();
    ClusterState state = node.client().admin().cluster()
            .state(new ClusterStateRequest().filteredIndices("mqtest")).get().getState();
    ImmutableOpenMap<String, MappingMetaData> mappings = state.getMetaData().index("mqtest").mappings();
    MappingMetaData typeMap = mappings.get("type2");
    if (null != typeMap) {
        String gotMapping = typeMap.source().toString();
    }
}

From source file:org.elasticsearch.river.rabbitmq.RabbitMQTestRunner.java

License:Apache License

@Test
public void test_all_messages_are_consumed() throws Exception {

    // We try to connect to RabbitMQ.
    // If it's not launched, we don't fail the test but only log it
    try {/*from  w  w w  .  ja v a  2s . com*/
        logger.info(" --> connecting to rabbitmq");
        ConnectionFactory cfconn = new ConnectionFactory();
        cfconn.setHost("localhost");
        cfconn.setPort(AMQP.PROTOCOL.PORT);
        Connection conn = cfconn.newConnection();

        Channel ch = conn.createChannel();
        ch.exchangeDeclare("elasticsearch", "direct", true);
        AMQP.Queue.DeclareOk queue = ch.queueDeclare("elasticsearch", true, false, false, null);

        // We purge the queue in case of something is remaining there
        ch.queuePurge("elasticsearch");

        logger.info(" --> sending messages");
        pushMessages(ch);

        logger.info(" --> create river");
        createIndex(INDEX);

        index("_river", "test", "_meta", river());

        // We need at some point to check if we have consumed the river
        int steps = timeout();
        long count = 0;

        while (true) {
            // We wait for one second
            Thread.sleep(1000);

            CountResponse response = client().prepareCount("test").execute().actionGet();
            count = response.getCount();

            steps--;
            if (steps < 0 || count == expectedDocuments()) {
                break;
            }
        }

        ch.close();
        conn.close();

        postInjectionTests();
    } catch (ConnectException e) {
        throw new Exception("RabbitMQ service is not launched on localhost:" + AMQP.PROTOCOL.PORT
                + ". Can not start Integration test. " + "Launch `rabbitmq-server`.", e);
    }
}

From source file:org.graylog2.inputs.amqp.AMQPConsumer.java

License:Open Source License

private Channel connect() throws IOException {
    ConnectionFactory factory = new ConnectionFactory();
    factory.setUsername(server.getConfiguration().getAmqpUsername());
    factory.setPassword(server.getConfiguration().getAmqpPassword());
    factory.setVirtualHost(server.getConfiguration().getAmqpVirtualhost());
    factory.setHost(server.getConfiguration().getAmqpHost());
    factory.setPort(server.getConfiguration().getAmqpPort());

    connection = factory.newConnection(Executors.newCachedThreadPool(
            new ThreadFactoryBuilder().setNameFormat("amqp-consumer-" + queueConfig.getId() + "-%d").build()));

    return connection.createChannel();
}