List of usage examples for com.rabbitmq.client Delivery getBody
public byte[] getBody()
From source file:at.ac.tuwien.dsg.cloud.utilities.messaging.lightweight.rabbitMq.channel.ReceivingChannelTest.java
@Test public void testLazyBindType() throws Exception { Discovery discovery = Mockito.mock(Discovery.class); Serializer serializer = Mockito.mock(Serializer.class); RabbitMqFactory rabbitMqFactory = Mockito.mock(RabbitMqFactory.class); ConnectionFactory factory = Mockito.mock(ConnectionFactory.class); Connection connection = Mockito.mock(Connection.class); Channel channel = Mockito.mock(Channel.class); DeclareOk declareOk = Mockito.mock(DeclareOk.class); QueueingConsumer consumer = Mockito.mock(QueueingConsumer.class); Delivery delivery = Mockito.mock(Delivery.class); RabbitMqMessage msg = Mockito.mock(RabbitMqMessage.class); String expectedQueue = "testQueue"; Mockito.when(discovery.discoverHost()).thenReturn("localhost"); Mockito.when(rabbitMqFactory.getConnectionFactory()).thenReturn(factory); Mockito.when(factory.newConnection()).thenReturn(connection); Mockito.when(rabbitMqFactory.getQueueingConsumer(channel)).thenReturn(consumer); Mockito.when(connection.createChannel()).thenReturn(channel); Mockito.when(channel.queueDeclare()).thenReturn(declareOk); Mockito.when(declareOk.getQueue()).thenReturn(expectedQueue); Mockito.when(consumer.getChannel()).thenReturn(channel); Mockito.when(consumer.nextDelivery()).thenReturn(delivery); Mockito.when(delivery.getBody()).thenReturn("test".getBytes()); Mockito.when(serializer.deserilize("test".getBytes(), RabbitMqMessage.class)).thenReturn(msg); ReceivingChannel subject = new ReceivingChannel(discovery, serializer, rabbitMqFactory); String expectedRoutingKey1 = "testType1"; String expectedRoutingKey2 = "testType2"; subject.bindType(expectedRoutingKey1); subject.bindType(expectedRoutingKey2); //due to lazy startup binding should not yet have been triggered Mockito.verify(channel, Mockito.never()).queueBind(expectedQueue, ARabbitChannel.EXCHANGE_NAME, expectedRoutingKey1);/*from ww w. j a v a 2s. co m*/ Mockito.verify(channel, Mockito.never()).queueBind(expectedQueue, ARabbitChannel.EXCHANGE_NAME, expectedRoutingKey2); //this call should trigger the binding to the queues RabbitMqMessage msgActual = subject.getDelivery(); Mockito.verify(channel, Mockito.times(1)).queueBind(expectedQueue, ARabbitChannel.EXCHANGE_NAME, expectedRoutingKey1); Mockito.verify(channel, Mockito.times(1)).queueBind(expectedQueue, ARabbitChannel.EXCHANGE_NAME, expectedRoutingKey2); Assert.assertEquals(msg, msgActual); }
From source file:bankreplyaggrigator.Aggregator.java
private static void handleMessage() throws IOException, InterruptedException { while (true) { Delivery delivery = consumer.nextDelivery(); String message = new String(delivery.getBody()); String correlationID = delivery.getProperties().getCorrelationId(); System.out.println("Message recived: " + message); Aggregate aggregate = (Aggregate) activeAggregates.get(correlationID); if (aggregate == null) { aggregate = new LoanAggregate(new BankLoan()); aggregate.addMessage(message); activeAggregates.put(correlationID, aggregate); } else {// www .j a va2s. c o m aggregate.addMessage(message); } publishResult(aggregate, correlationID); } }
From source file:com.hopped.running.rabbitmq.rpc.ARPCServer.java
License:Open Source License
/** * // w w w.j a va 2 s.co m * @param delivery * @return */ public byte[] processRequest(Delivery delivery) { try { RPCMessage invoker = RPCMessage.parseFrom(delivery.getBody()); String name = invoker.getMethod(); Object request = invoker.getRequestObject(); Method method = protocol.getMethod(name, request.getClass()); Object result = method.invoke(instance, request); return objectToByteArray(result); } catch (Exception e) { e.printStackTrace(); return null; } }
From source file:com.nesscomputing.event.amqp.AmqpEventReceiver.java
License:Apache License
@Override public boolean withDelivery(final Delivery delivery) throws IOException { if (delivery != null) { try {//from w w w .j av a2 s .c o m final NessEvent event = mapper.readValue(delivery.getBody(), NessEvent.class); eventsReceived.incrementAndGet(); eventDispatcher.dispatch(event); } catch (Exception e) { Throwables.propagateIfInstanceOf(e, IOException.class); // Make sure that we catch all possible exceptions here that could // be thrown by the deserializer. Otherwise, e.g. an IllegalArgumentException might // kill the receiver thread. LOG.warnDebug(e, "Could not parse message '%s', ignoring!", delivery); } } return true; }
From source file:com.surgeplay.visage.master.VisageMaster.java
License:Open Source License
@Override public void run() { try {//from w w w . j av a 2 s .co m Log.setLog(new LogShim(Visage.log)); long total = Runtime.getRuntime().totalMemory(); long max = Runtime.getRuntime().maxMemory(); if (Visage.debug) Visage.log.finer("Current heap size: " + humanReadableByteCount(total, false)); if (Visage.debug) Visage.log.finer("Max heap size: " + humanReadableByteCount(max, false)); if (total < max) { Visage.log.warning( "You have set your minimum heap size (Xms) lower than the maximum heap size (Xmx) - this can cause GC thrashing. It is strongly recommended to set them both to the same value."); } if (max < (1000 * 1000 * 1000)) { Visage.log.warning( "The heap size (Xmx) is less than one gigabyte; it is recommended to run Visage with a gigabyte or more. Use -Xms1G and -Xmx1G to do this."); } Visage.log.info("Setting up Jetty"); Server server = new Server( new InetSocketAddress(config.getString("http.bind"), config.getInt("http.port"))); List<String> expose = config.getStringList("expose"); String poweredBy; if (expose.contains("server")) { if (expose.contains("version")) { poweredBy = "Visage v" + Visage.VERSION; } else { poweredBy = "Visage"; } } else { poweredBy = null; } ResourceHandler resource = new ResourceHandler(); resource.setResourceBase(config.getString("http.static")); resource.setDirectoriesListed(false); resource.setWelcomeFiles(new String[] { "index.html" }); resource.setHandler(new VisageHandler(this)); if (!"/dev/null".equals(config.getString("log"))) { new File(config.getString("log")).getParentFile().mkdirs(); server.setRequestLog(new AsyncNCSARequestLog(config.getString("log"))); } GzipHandler gzip = new GzipHandler(); gzip.setHandler(new HeaderHandler("X-Powered-By", poweredBy, resource)); server.setHandler(gzip); String redisHost = config.getString("redis.host"); int redisPort = config.getInt("redis.port"); Visage.log.info("Connecting to Redis at " + redisHost + ":" + redisPort); resolverNum = config.getInt("redis.resolver-db"); skinNum = config.getInt("redis.skin-db"); JedisPoolConfig jpc = new JedisPoolConfig(); jpc.setMaxIdle(config.getInt("redis.max-idle-connections")); jpc.setMaxTotal(config.getInt("redis.max-total-connections")); jpc.setMinIdle(config.getInt("redis.min-idle-connections")); if (config.hasPath("redis.password")) { password = config.getString("redis.password"); } pool = new JedisPool(jpc, redisHost, redisPort); Visage.log.info("Connecting to RabbitMQ at " + config.getString("rabbitmq.host") + ":" + config.getInt("rabbitmq.port")); ConnectionFactory factory = new ConnectionFactory(); factory.setHost(config.getString("rabbitmq.host")); factory.setPort(config.getInt("rabbitmq.port")); factory.setRequestedHeartbeat(10); if (config.hasPath("rabbitmq.user")) { factory.setUsername(config.getString("rabbitmq.user")); factory.setPassword(config.getString("rabbitmq.password")); } String queue = config.getString("rabbitmq.queue"); Closer closer = Closer.create(); steve = ByteStreams.toByteArray(closer.register(ClassLoader.getSystemResourceAsStream("steve.png"))); alex = ByteStreams.toByteArray(closer.register(ClassLoader.getSystemResourceAsStream("alex.png"))); closer.close(); conn = factory.newConnection(); channel = conn.createChannel(); if (Visage.debug) Visage.log.finer("Setting up queue '" + queue + "'"); channel.queueDeclare(queue, false, false, true, null); channel.basicQos(1); if (Visage.debug) Visage.log.finer("Setting up reply queue"); replyQueue = channel.queueDeclare().getQueue(); consumer = new QueueingConsumer(channel); channel.basicConsume(replyQueue, consumer); if (config.getBoolean("slave.enable")) { Visage.log.info("Starting fallback slave"); fallback = new VisageSlave( config.getConfig("slave").withValue("rabbitmq", config.getValue("rabbitmq"))); fallback.start(); } Visage.log.info("Starting Jetty"); server.start(); Visage.log.info("Listening for finished jobs"); try { while (run) { Delivery delivery = consumer.nextDelivery(); if (Visage.trace) Visage.log.finest("Got delivery"); try { String corrId = delivery.getProperties().getCorrelationId(); if (queuedJobs.containsKey(corrId)) { if (Visage.trace) Visage.log.finest("Valid"); responses.put(corrId, delivery.getBody()); Runnable run = queuedJobs.get(corrId); queuedJobs.remove(corrId); if (Visage.trace) Visage.log.finest("Removed from queue"); run.run(); if (Visage.trace) Visage.log.finest("Ran runnable"); channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false); if (Visage.trace) Visage.log.finest("Ack'd"); } else { Visage.log.warning("Unknown correlation ID?"); channel.basicNack(delivery.getEnvelope().getDeliveryTag(), false, false); } } catch (Exception e) { Visage.log.log(Level.WARNING, "An unexpected error occured while attempting to process a response.", e); } } } catch (InterruptedException e) { } catch (Exception e) { Visage.log.log(Level.SEVERE, "An unexpected error occured in the master run loop.", e); System.exit(2); } try { Visage.log.info("Shutting down master"); server.stop(); pool.destroy(); conn.close(5000); } catch (Exception e) { Visage.log.log(Level.SEVERE, "A fatal error has occurred while shutting down the master.", e); } } catch (Exception e) { Visage.log.log(Level.SEVERE, "An unexpected error occured while initializing the master.", e); System.exit(1); } }
From source file:com.surgeplay.visage.slave.RenderThread.java
License:Open Source License
private void processDelivery(Delivery delivery) throws Exception { BasicProperties props = delivery.getProperties(); BasicProperties replyProps = new BasicProperties.Builder().correlationId(props.getCorrelationId()).build(); DataInputStream data = new DataInputStream( new InflaterInputStream(new ByteArrayInputStream(delivery.getBody()))); RenderMode mode = RenderMode.values()[data.readUnsignedByte()]; int width = data.readUnsignedShort(); int height = data.readUnsignedShort(); int supersampling = data.readUnsignedByte(); GameProfile profile = Profiles.readGameProfile(data); Map<String, String[]> params = Maps.newHashMap(); int len = data.readUnsignedShort(); for (int i = 0; i < len; i++) { String key = data.readUTF(); String[] val = new String[data.readUnsignedByte()]; for (int v = 0; v < val.length; v++) { val[v] = data.readUTF(); }/*from w w w . ja v a 2 s .co m*/ params.put(key, val); } byte[] skinData = new byte[data.readInt()]; data.readFully(skinData); BufferedImage skinRaw = new PngImage().read(new ByteArrayInputStream(skinData), false); BufferedImage skin = Images.toARGB(skinRaw); Visage.log.info("Received a job to render a " + width + "x" + height + " " + mode.name().toLowerCase() + " (" + supersampling + "x supersampling) for " + (profile == null ? "null" : profile.getName())); byte[] pngBys = draw(mode, width, height, supersampling, profile, skin, params); if (Visage.trace) Visage.log.finest("Got png bytes"); parent.channel.basicPublish("", props.getReplyTo(), replyProps, buildResponse(0, pngBys)); if (Visage.trace) Visage.log.finest("Published response"); parent.channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false); if (Visage.trace) Visage.log.finest("Ack'd message"); }
From source file:it.polimi.hegira.adapters.cassandra.Cassandra.java
License:Apache License
@Override protected AbstractDatabase fromMyModel(Metamodel mm) { log.debug(Thread.currentThread().getName() + " Cassandra consumer started "); //Thrift Deserializer TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); //retrieve thread number int thread_id = 0; if (TWTs_NO != 0) { thread_id = (int) (Thread.currentThread().getId() % TWTs_NO); }//from w ww . j a v a2 s .c o m //instantiate the cassandra transformer //the consistency level is not needed. Entity inserted with eventual consistency CassandraTransformer transformer = new CassandraTransformer(); //instantiate the TableManager TablesManager tablesManager = TablesManager.getTablesManager(); while (true) { log.debug(Thread.currentThread().getName() + " Extracting from the taskQueue" + thread_id + " TWTs_NO: " + TWTs_NO); try { //extract from the task queue Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(); if (delivery != null) { //deserialize and retrieve the metamodel Metamodel metaModel = new Metamodel(); deserializer.deserialize(metaModel, delivery.getBody()); //retrieve the Cassandra Model CassandraModel cassandraModel = transformer.fromMyModel(metaModel); //retrieve the table and tries perform the insert try { tablesManager.getTable(cassandraModel.getTable()).insert(cassandraModel); } catch (ConnectException ex) { log.error(Thread.currentThread().getName() + " - Not able to connect to Cassandra", ex); //nack taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); } catch (ClassNotFoundException ex) { log.error(Thread.currentThread().getName() + " - Error in during the insertion -", ex); //nack taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); } //send ack taskQueues.get(thread_id).sendAck(delivery); } else { log.debug(Thread.currentThread().getName() + " - The queue " + TaskQueue.getDefaultTaskQueueName() + " is empty"); return null; } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException ex) { log.error(Thread.currentThread().getName() + " - Cannot read next delivery from the queue " + TaskQueue.getDefaultTaskQueueName(), ex); } catch (TException ex) { log.error(Thread.currentThread().getName() + " - Error deserializing message ", ex); } catch (QueueException ex) { log.error(Thread.currentThread().getName() + " - Error sending an acknowledgment to the queue " + TaskQueue.getDefaultTaskQueueName(), ex); } } }
From source file:it.polimi.hegira.adapters.datastore.Datastore.java
License:Apache License
@Override protected AbstractDatabase fromMyModel(Metamodel mm) { // TWC/*ww w . j a v a 2 s. c om*/ //log.debug(Thread.currentThread().getName()+" Hi I'm the GAE consumer!"); List<Entity> batch = new ArrayList<Entity>(); TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); long k = 0; int thread_id = 0; if (TWTs_NO != 0) thread_id = (int) (Thread.currentThread().getId() % TWTs_NO); while (true) { log.debug(Thread.currentThread().getName() + " Extracting from the taskQueue" + thread_id + " TWTs_NO: " + TWTs_NO); try { Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(2000); if (delivery != null) { Metamodel myModel = new Metamodel(); deserializer.deserialize(myModel, delivery.getBody()); DatastoreTransformer dt = new DatastoreTransformer(connectionList.get(thread_id).ds); DatastoreModel fromMyModel = dt.fromMyModel(myModel); batch.add(fromMyModel.getEntity()); batch.add(fromMyModel.getFictitiousEntity()); taskQueues.get(thread_id).sendAck(delivery.getEnvelope().getDeliveryTag()); k++; if (k % 100 == 0) { putBatch(batch); log.debug(Thread.currentThread().getName() + " ===>100 entities. putting normal batch"); batch = new ArrayList<Entity>(); } else { if (k > 0) { //log.debug(Thread.currentThread().getName()+" ===>Nothing in the queue for me!"); putBatch(batch); log.debug(Thread.currentThread().getName() + " ===>less than 100 entities. putting short batch"); batch = new ArrayList<Entity>(); k = 0; } } } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException e) { log.error("Error consuming from the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (TException e) { log.error("Errore deserializing", e); } catch (QueueException e) { log.error("Couldn't send the ack to the queue " + TaskQueue.getDefaultTaskQueueName(), e); } } }
From source file:it.polimi.hegira.adapters.tables.Tables.java
License:Apache License
@Override protected AbstractDatabase fromMyModel(Metamodel mm) { //TWC/* w ww . j a v a2 s .co m*/ log.debug(Thread.currentThread().getName() + " Hi I'm the AZURE consumer!"); //Instantiate the Thrift Deserializer TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); int thread_id = (int) (Thread.currentThread().getId() % TWTs_NO); while (true) { try { log.debug(Thread.currentThread().getName() + " - getting taskQueue with id: " + thread_id); Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(); if (delivery != null) { Metamodel myModel = new Metamodel(); deserializer.deserialize(myModel, delivery.getBody()); AzureTablesTransformer att = new AzureTablesTransformer(); AzureTablesModel fromMyModel = att.fromMyModel(myModel); List<DynamicTableEntity> entities = fromMyModel.getEntities(); String tableName = fromMyModel.getTableName(); CloudTable tbl = createTable(tableName); if (tbl == null) { taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); return null; } for (DynamicTableEntity entity : entities) { TableResult ie = insertEntity(tableName, entity); if (ie == null) { taskQueues.get(thread_id).sendNack(delivery); log.info("Sending Nack!! for entity(/ies)"); return null; } count++; if (count % 2000 == 0) log.debug(Thread.currentThread().getName() + " Inserted " + count + " entities"); } taskQueues.get(thread_id).sendAck(delivery); } else { log.debug(Thread.currentThread().getName() + " - The queue " + TaskQueue.getDefaultTaskQueueName() + " is empty"); } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException e) { log.error(Thread.currentThread().getName() + " - Cannot read next delivery from the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (TException e) { log.error(Thread.currentThread().getName() + " - Error deserializing message ", e); } catch (QueueException e) { log.error(Thread.currentThread().getName() + " - Error sending an acknowledgment to the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (URISyntaxException e) { log.error(Thread.currentThread().getName() + " - Error operating on Azure Tables ", e); } catch (StorageException e) { log.error(Thread.currentThread().getName() + " - Error storing data on Azure Tables ", e); } } }
From source file:it.polimi.hegira.adapters.tables.Tables.java
License:Apache License
@Override protected AbstractDatabase fromMyModelPartitioned(Metamodel mm) { //TWC/*from w w w .ja va2 s . c o m*/ //log.debug(Thread.currentThread().getName()+" Hi I'm the AZURE consumer!"); //Instantiate the Thrift Deserializer TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); int thread_id = Integer.parseInt(Thread.currentThread().getName()); while (true) { try { //log.debug(Thread.currentThread().getName() + // " - getting taskQueue with id: "+thread_id); Delivery delivery = taskQueues.get(thread_id).getConsumer().nextDelivery(); if (delivery != null) { Metamodel myModel = new Metamodel(); deserializer.deserialize(myModel, delivery.getBody()); AzureTablesTransformer att = new AzureTablesTransformer(); AzureTablesModel fromMyModel = att.fromMyModel(myModel); List<DynamicTableEntity> entities = fromMyModel.getEntities(); String tableName = fromMyModel.getTableName(); CloudTable tbl = createTable(tableName); if (tbl == null) { taskQueues.get(thread_id).sendNack(delivery); log.info(Thread.currentThread().getName() + " - Sending Nack!! for entity(/ies)"); //return null; } for (DynamicTableEntity entity : entities) { TableResult ie = insertEntity(tableName, entity); if (ie == null) { //taskQueues.get(thread_id).sendNack(delivery); //log.info(Thread.currentThread().getName()+" - Sending Nack!! for entity(/ies)"); //return null; } count++; //if(count%100==0) //log.debug(Thread.currentThread().getName()+" Inserted "+count+" entities"); } taskQueues.get(thread_id).sendAck(delivery); //incrementing the VDPsCounters updateVDPsCounters(myModel); //////////////////////////////// } else { log.debug(Thread.currentThread().getName() + " - The queue " + TaskQueue.getDefaultTaskQueueName() + " is empty"); } } catch (ShutdownSignalException | ConsumerCancelledException | InterruptedException e) { log.error(Thread.currentThread().getName() + " - Cannot read next delivery from the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (TException e) { log.error(Thread.currentThread().getName() + " - Error deserializing message ", e); } catch (QueueException e) { log.error(Thread.currentThread().getName() + " - Error sending an acknowledgment to the queue " + TaskQueue.getDefaultTaskQueueName(), e); } catch (URISyntaxException e) { log.error(Thread.currentThread().getName() + " - Error operating on Azure Tables ", e); } catch (StorageException e) { log.error(Thread.currentThread().getName() + " - Error storing data on Azure Tables ", e); } } }