List of usage examples for java.io DataOutputStream DataOutputStream
public DataOutputStream(OutputStream out)
From source file:com.machinepublishers.jbrowserdriver.diagnostics.HttpServer.java
public static void launch(int port) { if (loop.compareAndSet(false, true)) { new Thread(new Runnable() { @Override/*from ww w . j a va 2 s .c o m*/ public void run() { try (ServerSocket serverSocket = new ServerSocket(port, 50, InetAddress.getLoopbackAddress())) { listener.set(serverSocket); while (loop.get()) { try (Socket socket = serverSocket.accept(); DataOutputStream output = new DataOutputStream(socket.getOutputStream()); BufferedReader reader = new BufferedReader( new InputStreamReader(socket.getInputStream()))) { List<String> request = new ArrayList<String>(); for (String line; (line = reader.readLine()) != null;) { request.add(line); if (line.startsWith("GET / ")) { output.write(indexContent, 0, indexContent.length); output.write(indexBody, 0, indexBody.length); } else if (line.startsWith("POST / ")) { output.write(postContent, 0, postContent.length); output.write(postBody, 0, postBody.length); } else if (line.startsWith("GET /iframe.htm")) { output.write(iframeContent, 0, iframeContent.length); output.write(iframeBody, 0, iframeBody.length); } else if (line.startsWith("GET /redirect/site1 ")) { output.write(redirectContent); } else if (line.startsWith("GET /redirect/site2 ")) { output.write(iframeContent, 0, iframeContent.length); output.write(iframeBody, 0, iframeBody.length); } else if (line.startsWith("GET /wait-forever ")) { synchronized (HttpServer.class) { HttpServer.class.wait(); } } else if (line.startsWith("GET /image.png")) { output.write(imageContent, 0, imageContent.length); output.write(imageBody, 0, imageBody.length); } } previousRequest.set(request); previousRequestId.incrementAndGet(); } } } catch (Throwable t) { } } }).start(); } }
From source file:junit.org.rapidpm.microservice.demo.ServletTest.java
@Test public void testServletPostRequest() throws Exception { URL obj = new URL(url); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); //add reuqest header con.setRequestMethod("POST"); con.setRequestProperty("User-Agent", USER_AGENT); con.setRequestProperty("Accept-Language", "en-US,en;q=0.5"); String urlParameters = "sn=C02G8416DRJM&cn=&locale=&caller=&num=12345"; // Send post request con.setDoOutput(true);// w w w. j a va 2 s .c o m DataOutputStream wr = new DataOutputStream(con.getOutputStream()); wr.writeBytes(urlParameters); wr.flush(); wr.close(); int responseCode = con.getResponseCode(); System.out.println("\nSending 'POST' request to URL : " + url); System.out.println("Post parameters : " + urlParameters); System.out.println("Response Code : " + responseCode); BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream())); String inputLine; StringBuffer response = new StringBuffer(); while ((inputLine = in.readLine()) != null) { response.append(inputLine); } in.close(); //print result Assert.assertEquals("Hello World CDI Service", response.toString()); }
From source file:jetbrains.exodus.entitystore.FileSystemBlobVaultOld.java
protected FileSystemBlobVaultOld(@NotNull final String parentDirectory, @NotNull final String blobsDirectory, @NotNull final String blobExtension, @NotNull final BlobHandleGenerator blobHandleGenerator, final int expectedVersion) throws IOException { this.blobsDirectory = blobsDirectory; this.blobExtension = blobExtension; location = new File(parentDirectory, blobsDirectory); this.blobHandleGenerator = blobHandleGenerator; size = new AtomicLong(UNKNOWN_SIZE); //noinspection ResultOfMethodCallIgnored location.mkdirs();/* w ww.j a va 2 s. c o m*/ // load version final File versionFile = new File(location, VERSION_FILE); if (versionFile.exists()) { try (DataInputStream input = new DataInputStream(new FileInputStream(versionFile))) { version = input.readInt(); } if (expectedVersion != version) { throw new UnexpectedBlobVaultVersionException("Unexpected FileSystemBlobVault version: " + version); } } else { final File[] files = location.listFiles(); final boolean hasFiles = files != null && files.length > 0; if (!hasFiles) { version = expectedVersion; } else { version = EXPECTED_VERSION; if (expectedVersion != version) { throw new UnexpectedBlobVaultVersionException( "Unexpected FileSystemBlobVault version: " + version); } } try (DataOutputStream output = new DataOutputStream(new FileOutputStream(versionFile))) { output.writeInt(expectedVersion); } } }
From source file:gaffer.accumulostore.operation.spark.handler.GetJavaRDDOfElementsHandlerTest.java
@Test public void checkGetCorrectElementsInJavaRDDForEntitySeed() throws OperationException, IOException { final Graph graph1 = new Graph.Builder() .addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")) .addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")) .addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")) .storeProperties(getClass().getResourceAsStream("/store.properties")).build(); final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity(ENTITY_GROUP); entity.setVertex("" + i); final Edge edge1 = new Edge(EDGE_GROUP); edge1.setSource("" + i); edge1.setDestination("B"); edge1.setDirected(false);// ww w .j a va2 s . c o m edge1.putProperty("count", 2); final Edge edge2 = new Edge(EDGE_GROUP); edge2.setSource("" + i); edge2.setDestination("C"); edge2.setDirected(false); edge2.putProperty("count", 4); elements.add(edge1); elements.add(edge2); elements.add(entity); } final User user = new User(); graph1.execute(new AddElements(elements), user); final SparkConf sparkConf = new SparkConf().setMaster("local") .setAppName("testCheckGetCorrectElementsInJavaRDDForEntitySeed") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.registrator", "gaffer.serialisation.kryo.Registrator") .set("spark.driver.allowMultipleContexts", "true"); final JavaSparkContext sparkContext = new JavaSparkContext(sparkConf); // Create Hadoop configuration and serialise to a string final Configuration configuration = new Configuration(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); configuration.write(new DataOutputStream(baos)); final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8); // Check get correct edges for "1" GetJavaRDDOfElements<EntitySeed> rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>() .javaSparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); JavaRDD<Element> rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } final Set<Element> results = new HashSet<>(rdd.collect()); final Set<Element> expectedElements = new HashSet<>(); final Entity entity1 = new Entity(ENTITY_GROUP); entity1.setVertex("1"); final Edge edge1B = new Edge(EDGE_GROUP); edge1B.setSource("1"); edge1B.setDestination("B"); edge1B.setDirected(false); edge1B.putProperty("count", 2); final Edge edge1C = new Edge(EDGE_GROUP); edge1C.setSource("1"); edge1C.setDestination("C"); edge1C.setDirected(false); edge1C.putProperty("count", 4); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify entities only rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>().javaSparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))) .view(new View.Builder().entity(ENTITY_GROUP).build()).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); expectedElements.clear(); expectedElements.add(entity1); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify edges only rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>().javaSparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))).view(new View.Builder().edge(EDGE_GROUP).build()) .build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); expectedElements.clear(); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" and "5" Set<EntitySeed> seeds = new HashSet<>(); seeds.add(new EntitySeed("1")); seeds.add(new EntitySeed("5")); rddQuery = new GetJavaRDDOfElements.Builder<EntitySeed>().javaSparkContext(sparkContext).seeds(seeds) .build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); results.addAll(rdd.collect()); final Entity entity5 = new Entity(ENTITY_GROUP); entity5.setVertex("5"); final Edge edge5B = new Edge(EDGE_GROUP); edge5B.setSource("5"); edge5B.setDestination("B"); edge5B.setDirected(false); edge5B.putProperty("count", 2); final Edge edge5C = new Edge(EDGE_GROUP); edge5C.setSource("5"); edge5C.setDestination("C"); edge5C.setDirected(false); edge5C.putProperty("count", 4); expectedElements.clear(); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); expectedElements.add(entity5); expectedElements.add(edge5B); expectedElements.add(edge5C); assertEquals(expectedElements, results); sparkContext.stop(); }
From source file:com.cloudera.recordbreaker.analyzer.UnknownTextSchemaDescriptor.java
public byte[] getPayload() { // Serialize the parser, return the resulting string ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos); try {/*from ww w . j av a 2s . co m*/ this.typeTree.write(out); out.close(); } catch (IOException iex) { return new byte[0]; } finally { } return baos.toByteArray(); }
From source file:gaffer.accumulostore.operation.spark.handler.GetRDDOfElementsHandlerTest.java
@Test public void checkGetCorrectElementsInRDDForEntitySeed() throws OperationException, IOException { final Graph graph1 = new Graph.Builder() .addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")) .addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")) .addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")) .storeProperties(getClass().getResourceAsStream("/store.properties")).build(); final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity(ENTITY_GROUP); entity.setVertex("" + i); final Edge edge1 = new Edge(EDGE_GROUP); edge1.setSource("" + i); edge1.setDestination("B"); edge1.setDirected(false);//from w w w . j ava2 s. c o m edge1.putProperty("count", 2); final Edge edge2 = new Edge(EDGE_GROUP); edge2.setSource("" + i); edge2.setDestination("C"); edge2.setDirected(false); edge2.putProperty("count", 4); elements.add(edge1); elements.add(edge2); elements.add(entity); } final User user = new User(); graph1.execute(new AddElements(elements), user); final SparkConf sparkConf = new SparkConf().setMaster("local") .setAppName("testCheckGetCorrectElementsInRDDForEntitySeed") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.registrator", "gaffer.serialisation.kryo.Registrator") .set("spark.driver.allowMultipleContexts", "true"); final SparkContext sparkContext = new SparkContext(sparkConf); // Create Hadoop configuration and serialise to a string final Configuration configuration = new Configuration(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); configuration.write(new DataOutputStream(baos)); final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8); // Check get correct edges for "1" GetRDDOfElements<EntitySeed> rddQuery = new GetRDDOfElements.Builder<EntitySeed>() .sparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); RDD<Element> rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } Set<Element> results = new HashSet<>(); // NB: IDE suggests the cast in the following line is unnecessary but compilation fails without it Element[] returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } final Set<Element> expectedElements = new HashSet<>(); final Entity entity1 = new Entity(ENTITY_GROUP); entity1.setVertex("1"); final Edge edge1B = new Edge(EDGE_GROUP); edge1B.setSource("1"); edge1B.setDestination("B"); edge1B.setDirected(false); edge1B.putProperty("count", 2); final Edge edge1C = new Edge(EDGE_GROUP); edge1C.setSource("1"); edge1C.setDestination("C"); edge1C.setDirected(false); edge1C.putProperty("count", 4); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify entities only rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))) .view(new View.Builder().entity(ENTITY_GROUP).build()).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } expectedElements.clear(); expectedElements.add(entity1); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify edges only rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))).view(new View.Builder().edge(EDGE_GROUP).build()) .build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } expectedElements.clear(); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" and "5" Set<EntitySeed> seeds = new HashSet<>(); seeds.add(new EntitySeed("1")); seeds.add(new EntitySeed("5")); rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext).seeds(seeds).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } final Entity entity5 = new Entity(ENTITY_GROUP); entity5.setVertex("5"); final Edge edge5B = new Edge(EDGE_GROUP); edge5B.setSource("5"); edge5B.setDestination("B"); edge5B.setDirected(false); edge5B.putProperty("count", 2); final Edge edge5C = new Edge(EDGE_GROUP); edge5C.setSource("5"); edge5C.setDestination("C"); edge5C.setDirected(false); edge5C.putProperty("count", 4); expectedElements.clear(); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); expectedElements.add(entity5); expectedElements.add(edge5B); expectedElements.add(edge5C); assertEquals(expectedElements, results); sparkContext.stop(); }
From source file:net.minecraftforge.fml.repackage.com.nothome.delta.GDiffPatcher.java
/** * Patches to an output stream.//from w ww . j a v a 2s. co m */ public void patch(SeekableSource source, InputStream patch, OutputStream out) throws IOException { DataOutputStream outOS = new DataOutputStream(out); DataInputStream patchIS = new DataInputStream(patch); // the magic string is 'd1 ff d1 ff' + the version number if (patchIS.readUnsignedByte() != 0xd1 || patchIS.readUnsignedByte() != 0xff || patchIS.readUnsignedByte() != 0xd1 || patchIS.readUnsignedByte() != 0xff || patchIS.readUnsignedByte() != 0x04) { throw new PatchException("magic string not found, aborting!"); } while (true) { int command = patchIS.readUnsignedByte(); if (command == EOF) break; int length; int offset; if (command <= DATA_MAX) { append(command, patchIS, outOS); continue; } switch (command) { case DATA_USHORT: // ushort, n bytes following; append length = patchIS.readUnsignedShort(); append(length, patchIS, outOS); break; case DATA_INT: // int, n bytes following; append length = patchIS.readInt(); append(length, patchIS, outOS); break; case COPY_USHORT_UBYTE: offset = patchIS.readUnsignedShort(); length = patchIS.readUnsignedByte(); copy(offset, length, source, outOS); break; case COPY_USHORT_USHORT: offset = patchIS.readUnsignedShort(); length = patchIS.readUnsignedShort(); copy(offset, length, source, outOS); break; case COPY_USHORT_INT: offset = patchIS.readUnsignedShort(); length = patchIS.readInt(); copy(offset, length, source, outOS); break; case COPY_INT_UBYTE: offset = patchIS.readInt(); length = patchIS.readUnsignedByte(); copy(offset, length, source, outOS); break; case COPY_INT_USHORT: offset = patchIS.readInt(); length = patchIS.readUnsignedShort(); copy(offset, length, source, outOS); break; case COPY_INT_INT: offset = patchIS.readInt(); length = patchIS.readInt(); copy(offset, length, source, outOS); break; case COPY_LONG_INT: long loffset = patchIS.readLong(); length = patchIS.readInt(); copy(loffset, length, source, outOS); break; default: throw new IllegalStateException("command " + command); } } outOS.flush(); }
From source file:hd3gtv.embddb.network.DataBlock.java
byte[] getBytes(Protocol protocol) throws IOException { checkIfNotEmpty();// w w w.j a va 2s .co m ByteArrayOutputStream byte_array_out_stream = new ByteArrayOutputStream(Protocol.BUFFER_SIZE); DataOutputStream dos = new DataOutputStream(byte_array_out_stream); dos.write(Protocol.APP_SOCKET_HEADER_TAG); dos.writeInt(Protocol.VERSION); /** * Start header name */ dos.writeByte(0); byte[] request_name_data = request_name.getBytes(Protocol.UTF8); dos.writeInt(request_name_data.length); dos.write(request_name_data); /** * Start datas payload */ dos.writeByte(1); /** * Get datas from zip */ ZipOutputStream zos = new ZipOutputStream(dos); zos.setLevel(3); entries.forEach(entry -> { try { entry.toZip(zos); } catch (IOException e) { log.error("Can't add to zip", e); } }); zos.flush(); zos.finish(); zos.close(); dos.flush(); dos.close(); byte[] result = byte_array_out_stream.toByteArray(); if (log.isTraceEnabled()) { log.trace("Make raw datas for " + request_name + Hexview.LINESEPARATOR + Hexview.tracelog(result)); } return result; }
From source file:com.ict.dtube.tools.command.message.QueryMsgByIdSubCommand.java
private static String createBodyFile(MessageExt msg) throws IOException { DataOutputStream dos = null;//ww w.ja va 2s . c o m try { String bodyTmpFilePath = "/tmp/dtube/msgbodys"; File file = new File(bodyTmpFilePath); if (!file.exists()) { file.mkdirs(); } bodyTmpFilePath = bodyTmpFilePath + "/" + msg.getMsgId(); dos = new DataOutputStream(new FileOutputStream(bodyTmpFilePath)); dos.write(msg.getBody()); return bodyTmpFilePath; } finally { if (dos != null) dos.close(); } }
From source file:it.jnrpe.net.JNRPEProtocolPacket.java
/** * Validates the packet CRC./* w w w . j a va2s . co m*/ * * @throws BadCRCException * If the CRC can't be validated */ public void validate() throws BadCRCException { ByteArrayOutputStream bout = new ByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(bout); try { dout.writeShort(packetVersion); dout.writeShort(packetTypeCode); dout.writeInt(0); // NO CRC dout.writeShort(resultCode); dout.write(byteBufferAry); dout.write(dummyBytesAry); dout.close(); byte[] vBytes = bout.toByteArray(); CRC32 crcAlg = new CRC32(); crcAlg.update(vBytes); if (!(((int) crcAlg.getValue()) == crcValue)) { throw new BadCRCException("Bad CRC"); } } catch (IOException e) { // Never happens... throw new IllegalStateException(e.getMessage(), e); } }