List of usage examples for io.netty.buffer Unpooled copiedBuffer
public static ByteBuf copiedBuffer(ByteBuffer... buffers)
From source file:com.microsoft.Malmo.MissionHandlers.RewardForCollectingItemImplementation.java
License:Open Source License
@Override public void onMessage(MalmoMessageType messageType, Map<String, String> data) { String bufstring = data.get("message"); ByteBuf buf = Unpooled.copiedBuffer(DatatypeConverter.parseBase64Binary(bufstring)); ItemStack itemStack = ByteBufUtils.readItemStack(buf); if (itemStack != null && itemStack.getItem() != null) { accumulateReward(this.params.getDimension(), itemStack); } else {/*from w w w . j a va 2s.co m*/ System.out.println("Error - couldn't understand the itemstack we received."); } }
From source file:com.mobius.software.mqtt.parser.test.MqttJsonTest.java
License:Open Source License
@BeforeClass public static void init() { parser = new MQJsonParser(); topic = new Topic(new Text("lookup"), QoS.AT_LEAST_ONCE); content = "John: i'll be back".getBytes(); buffContent = Unpooled.copiedBuffer(content); topics = new Topic[] { topic, topic, topic }; topicsName = new Text[] { new Text("some topic"), new Text("new topic"), new Text("my topic") }; returnCodes = new ArrayList<SubackCode>(); returnCodes.add(SubackCode.ACCEPTED_QOS0); returnCodes.add(SubackCode.ACCEPTED_QOS2); returnCodes.add(SubackCode.ACCEPTED_QOS1); }
From source file:com.mobius.software.mqtt.parser.test.TestPublish.java
License:Open Source License
@Test public void testPositiveByteContent() throws UnsupportedEncodingException, MalformedMessageException { ByteBuf buf = MQParser.encode(expected); MQMessage actual = MQParser.decode(Unpooled.copiedBuffer(buf)); assertTrue("Invalid binary content", ByteBufUtil.equals(buf, MQParser.encode(actual))); }
From source file:com.mylearn.netty.sample.websocket.client.WebSocketClient.java
License:Apache License
public void run() throws Exception { EventLoopGroup group = new NioEventLoopGroup(); try {// w w w .ja v a 2s . co m Bootstrap b = new Bootstrap(); String protocol = uri.getScheme(); if (!"ws".equals(protocol)) { throw new IllegalArgumentException("Unsupported protocol: " + protocol); } HttpHeaders customHeaders = new DefaultHttpHeaders(); customHeaders.add("MyHeader", "MyValue"); // Connect with V13 (RFC 6455 aka HyBi-17). You can change it to V08 or V00. // If you change it to V00, ping is not supported and remember to change // HttpResponseDecoder to WebSocketHttpResponseDecoder in the pipeline. final WebSocketClientHandler handler = new WebSocketClientHandler(WebSocketClientHandshakerFactory .newHandshaker(uri, WebSocketVersion.V13, null, false, customHeaders)); b.group(group).channel(NioSocketChannel.class).handler(new ChannelInitializer<SocketChannel>() { @Override public void initChannel(SocketChannel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); pipeline.addLast("http-codec", new HttpClientCodec()); pipeline.addLast("aggregator", new HttpObjectAggregator(8192)); pipeline.addLast("ws-handler", handler); } }); System.out.println("WebSocket Client connecting"); Channel ch = b.connect(uri.getHost(), uri.getPort()).sync().channel(); handler.handshakeFuture().sync(); // Send 10 messages and wait for responses System.out.println("WebSocket Client sending message"); for (int i = 0; i < 10; i++) { ch.writeAndFlush(new TextWebSocketFrame("Message #" + i)); } // Ping System.out.println("WebSocket Client sending ping"); ch.writeAndFlush(new PingWebSocketFrame(Unpooled.copiedBuffer(new byte[] { 1, 2, 3, 4, 5, 6 }))); // Close System.out.println("WebSocket Client sending close"); ch.writeAndFlush(new CloseWebSocketFrame()); // WebSocketClientHandler will close the connection when the server // responds to the CloseWebSocketFrame. ch.closeFuture().sync(); } finally { group.shutdownGracefully(); } }
From source file:com.net.ServerHandler.java
License:Apache License
@Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { System.out.println("nodedataService"); ByteBuf buf = (ByteBuf) msg;// w w w .j av a 2 s . co m byte[] req = new byte[buf.readableBytes()]; buf.readBytes(req); System.out.println( DateUtils.getDateTime() + "++" + req[0] + "+" + req[1] + (req[0] == -21 && req[1] == -112)); NetUtil.print16String(req); if (req[0] == -21 && req[1] == 96) {//"EB 60" //2? ?ID?? byte[] segment = new byte[4]; segment[0] = 0; segment[1] = 0; segment[2] = req[2]; segment[3] = req[3]; byte[] id = new byte[4]; id[0] = 0; id[1] = 0; id[2] = req[4]; id[3] = req[5]; String nodeid = NetUtil.bytesToInt(segment) + "_" + NetUtil.bytesToInt(id); this.setNodeId(nodeid); // bs[8] = req[2]; // bs[9] = req[3]; // bs[10] = req[4]; // bs[11] = req[5]; // bs[20] = (byte) (NetUtil.bytesToInt(req[2])+NetUtil.bytesToInt(req[3])+NetUtil.bytesToInt(req[4])+NetUtil.bytesToInt(req[5])); // ByteBuf resp = Unpooled.copiedBuffer(bs); byte[] bb = (byte[]) JedisUtils.getObject(ZTConstants.jedisKey.send_data_byte + nodeid); ByteBuf resp = Unpooled.copiedBuffer(bb); ctx.write(resp); } else if (req[0] == -21 && req[1] == -112) {//"EB 90" ?? byte[] segment = new byte[4]; segment[3] = 0; segment[2] = 0; segment[1] = req[2]; segment[0] = req[3]; byte[] id = new byte[4]; id[3] = 0; id[2] = 0; id[1] = req[40]; id[0] = req[41]; String data_nodeid = NetUtil.bytesToInt(segment) + "_" + NetUtil.bytesToInt(id); /*if(nodeId == null){ redisTemplate.convertAndSend(ZTConstants.channel.dataLog, ZTConstants.resultCode.noNodeId+":"+nodeId); } if(!data_nodeid.equals(nodeId)){ redisTemplate.convertAndSend(ZTConstants.channel.dataLog, ZTConstants.resultCode.nodeIdError+":"+nodeId); }*/ System.out.println(""); redisTemplate.convertAndSend(ZTConstants.channel.reciveData, req); } else if (req[0] == 81) {//4???51?? redisTemplate.convertAndSend(ZTConstants.channel.dataLog, ZTConstants.resultCode.success + ":" + nodeId); } }
From source file:com.netflix.client.netty.http.NettyClientTest.java
License:Apache License
@Test public void testPostWithByteBuf() throws Exception { Person myPerson = new Person("netty", 5); ObjectMapper mapper = new ObjectMapper(); byte[] raw = mapper.writeValueAsBytes(myPerson); ByteBuf buffer = Unpooled.copiedBuffer(raw); HttpClientRequest<ByteBuf> request = HttpClientRequest.createPost(SERVICE_URI + "testAsync/person") .withHeader("Content-type", "application/json") .withHeader("Content-length", String.valueOf(raw.length)).withContent(buffer); NettyHttpClient<ByteBuf, ByteBuf> observableClient = (NettyHttpClient<ByteBuf, ByteBuf>) RibbonTransport .newHttpClient(DefaultClientConfigImpl.getClientConfigWithDefaultValues() .set(CommonClientConfigKey.ReadTimeout, 10000)); Observable<HttpClientResponse<ByteBuf>> response = observableClient.submit(request); Person person = getPersonObservable(response).toBlocking().single(); assertEquals(myPerson, person);//w ww. ja v a2 s . c o m }
From source file:com.netflix.ribbon.transport.netty.http.NettyClientTest.java
License:Apache License
@Test public void testPostWithByteBuf() throws Exception { Person myPerson = new Person("netty", 5); ObjectMapper mapper = new ObjectMapper(); byte[] raw = mapper.writeValueAsBytes(myPerson); ByteBuf buffer = Unpooled.copiedBuffer(raw); HttpClientRequest<ByteBuf> request = HttpClientRequest.createPost(SERVICE_URI + "testAsync/person") .withHeader("Content-type", "application/json") .withHeader("Content-length", String.valueOf(raw.length)).withContent(buffer); LoadBalancingHttpClient<ByteBuf, ByteBuf> observableClient = RibbonTransport .newHttpClient(DefaultClientConfigImpl.getClientConfigWithDefaultValues() .set(CommonClientConfigKey.ReadTimeout, 10000)); Observable<HttpClientResponse<ByteBuf>> response = observableClient.submit(request); Person person = getPersonObservable(response).toBlocking().single(); assertEquals(myPerson, person);/*from w ww. j a v a 2 s . c om*/ }
From source file:com.ottogroup.bi.spqr.websocket.kafka.KafkaTopicWebSocketEmitter.java
License:Apache License
/** * @see java.lang.Runnable#run()//from w w w . j a v a2 s .c om */ public void run() { // prepare topic consumer settings and initialize the client Map<String, String> settings = new HashMap<>(); settings.put(KafkaTopicConsumer.CFG_OPT_KAFKA_GROUP_ID, groupId); settings.put(KafkaTopicConsumer.CFG_OPT_KAFKA_ZOOKEEPER_CONNECT, zookeeperConnect); settings.put(KafkaTopicConsumer.CFG_OPT_KAFKA_TOPIC, topicId); this.consumer = new KafkaTopicConsumer(this.messages, this.messageWaitStrategy, this.executorService); try { this.consumer.initialize(settings); } catch (RequiredInputMissingException e) { throw new RuntimeException(e); } // keep on running until externally halted (set running to 'false') this.running = true; while (this.running) { // fetch message from queue via provided wait strategy byte[] message = null; try { message = this.messageWaitStrategy.waitFor(messages); } catch (InterruptedException e) { // } // if the byte array contains anything, forward it to web socket, otherwise try to fetch a new message from queue if (message != null) { try { websocketChannel.writeAndFlush(new TextWebSocketFrame(Unpooled.copiedBuffer(message))); } catch (Exception e) { logger.error("Failed to write message to websocket. Error: " + e.getMessage()); } } } // shut down consumer and websocket channel this.consumer.shutdown(); this.websocketChannel.close(); if (logger.isDebugEnabled()) logger.debug("kafka topic to websocket emitter shut down"); }
From source file:com.ottogroup.bi.spqr.websocket.server.SPQRWebSocketServerHandler.java
License:Apache License
private void handleWebSocketFrame(ChannelHandlerContext ctx, WebSocketFrame frame) { // if client request to close socket connection: shut it down ;-) if (frame instanceof CloseWebSocketFrame) { handshaker.close(ctx.channel(), (CloseWebSocketFrame) frame.retain()); return;// ww w . ja v a 2 s. c o m } // not of expected type: return an error if (!(frame instanceof TextWebSocketFrame)) { throw new UnsupportedOperationException( String.format("%s frame types not supported", frame.getClass().getName())); } // only the first request is forwarded to the kafka topic emitter. if the client wishes to change // the topic a new connection must be established - connection migration is not supported by now // TODO add support for connection migration if (this.emitter == null) { // extract content from request and ensure that it is neither null nor empty String requestContent = ((TextWebSocketFrame) frame).text(); if (StringUtils.isBlank(requestContent)) { ctx.channel().writeAndFlush( "Invalid request received. Please see documentation for further infromation"); return; } // convert content into topic request representation and check if it contains a non-null and non-empty topic identifier KafkaTopicRequest topicRequest = null; try { topicRequest = this.jsonMapper.readValue(requestContent, KafkaTopicRequest.class); } catch (Exception e) { ctx.channel().writeAndFlush(new TextWebSocketFrame(Unpooled.copiedBuffer( "Invalid request format. Please see document for further information".getBytes()))); return; } if (StringUtils.isBlank(topicRequest.getTopicId())) { ctx.channel().writeAndFlush(new TextWebSocketFrame(Unpooled.copiedBuffer( "Invalid request, missing topic identifier. Please see document for further information" .getBytes()))); return; } // establish connection with kafka topic and start emitting content to websocket this.emitter = new KafkaTopicWebSocketEmitter(ctx.channel(), 1024, executorService, "localhost:2181", "client-id-" + System.currentTimeMillis(), topicRequest.getTopicId()); executorService.submit(this.emitter); } }
From source file:com.phei.netty.basic.TimeServerHandlerTwo.java
License:Apache License
@Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { System.out.println("receive:" + msg); String currentTime = "QUERY TIME ORDER".equalsIgnoreCase(String.valueOf(msg)) ? new java.util.Date(System.currentTimeMillis()).toString() + "\n" : "BAD ORDER\n"; ByteBuf resp = Unpooled.copiedBuffer(currentTime.getBytes()); ctx.write(resp);//from w w w . ja v a2 s. c o m }