List of usage examples for java.nio.file Files copy
public static long copy(Path source, OutputStream out) throws IOException
From source file:com.hortonworks.streamline.streams.actions.storm.topology.StormTopologyActionsImpl.java
private Path addArtifactsToJar(Path artifactsLocation) throws Exception { Path jarFile = Paths.get(stormJarLocation); if (artifactsLocation.toFile().isDirectory()) { File[] artifacts = artifactsLocation.toFile().listFiles(); if (artifacts != null && artifacts.length > 0) { Path newJar = Files.copy(jarFile, artifactsLocation.resolve(jarFile.getFileName())); List<String> artifactFileNames = Arrays.stream(artifacts).filter(File::isFile).map(File::getName) .collect(toList());/*from www . java 2s . c o m*/ List<String> commands = new ArrayList<>(); commands.add(javaJarCommand); commands.add("uf"); commands.add(newJar.toString()); artifactFileNames.stream().forEachOrdered(name -> { commands.add("-C"); commands.add(artifactsLocation.toString()); commands.add(name); }); ShellProcessResult shellProcessResult = executeShellProcess(commands); if (shellProcessResult.exitValue != 0) { LOG.error("Adding artifacts to jar command failed - exit code: {} / output: {}", shellProcessResult.exitValue, shellProcessResult.stdout); throw new RuntimeException( "Topology could not be deployed " + "successfully: fail to add artifacts to jar"); } LOG.debug("Added files {} to jar {}", artifactFileNames, jarFile); return newJar; } } else { LOG.debug("Artifacts directory {} does not exist, not adding any artifacts to jar", artifactsLocation); } return jarFile; }
From source file:eu.itesla_project.dymola.DymolaImpactAnalysis.java
private List<String> writeDymolaInputs(Path workingDir, List<Contingency> contingencies) throws IOException { LOGGER.info(" Start writing dymola inputs"); List<String> retList = new ArrayList<>(); DdbConfig ddbConfig = DdbConfig.load(); String jbossHost = ddbConfig.getJbossHost(); String jbossPort = ddbConfig.getJbossPort(); String jbossUser = ddbConfig.getJbossUser(); String jbossPassword = ddbConfig.getJbossPassword(); Path dymolaExportPath = workingDir.resolve(MO_EXPORT_DIRECTORY); if (!Files.exists(dymolaExportPath)) { Files.createDirectory(dymolaExportPath); }// w ww .j a v a 2s . c o m //retrieve modelica export parameters from configuration String modelicaVersion = config.getModelicaVersion(); String sourceEngine = config.getSourceEngine(); String sourceVersion = config.getSourceEngineVersion(); Path modelicaPowerSystemLibraryPath = Paths.get(config.getModelicaPowerSystemLibraryFile()); //write the modelica events file, to feed the modelica exporter Path eventsPath = workingDir.resolve(MODELICA_EVENTS_CSV_FILENAME); writeModelicaExporterContingenciesFile(eventsPath, contingencies); //these are only optional params needed if the source is eurostag Path modelicaLibPath = null; String slackId = config.getSlackId(); if ("".equals(slackId)) { slackId = null; // null when not specified () } LoadFlowFactory loadFlowFactory; try { loadFlowFactory = config.getLoadFlowFactoryClass().newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException(e); } LOGGER.info("Exporting modelica data for network {}, working state-id {} ", network, network.getStateManager().getWorkingStateId()); ModelicaMainExporter exporter = new ModelicaMainExporter(network, slackId, jbossHost, jbossPort, jbossUser, jbossPassword, modelicaVersion, sourceEngine, sourceVersion, modelicaLibPath, loadFlowFactory); exporter.export(dymolaExportPath); ModEventsExport eventsExporter = new ModEventsExport( dymolaExportPath.resolve(network.getId() + ".mo").toFile(), eventsPath.toFile()); eventsExporter.export(dymolaExportPath); LOGGER.info(" modelica data exported."); // now assemble the input files to feed dymola // one .zip per contingency; in the zip, the .mo file and the powersystem library //TODO here it is assumed that contingencies ids in csv file start from 0 (i.e. 0 is the first contingency); id should be decoupled from the implementation try (final Stream<Path> pathStream = Files.walk(dymolaExportPath)) { pathStream.filter((p) -> !p.toFile().isDirectory() && p.toFile().getAbsolutePath().contains("events_") && p.toFile().getAbsolutePath().endsWith(".mo")).forEach(p -> { GenericArchive archive = ShrinkWrap.createDomain().getArchiveFactory() .create(GenericArchive.class); try (FileSystem fileSystem = ShrinkWrapFileSystems.newFileSystem(archive)) { Path rootDir = fileSystem.getPath("/"); Files.copy(modelicaPowerSystemLibraryPath, rootDir.resolve(modelicaPowerSystemLibraryPath.getFileName())); Files.copy(Paths.get(p.toString()), rootDir.resolve(DymolaUtil.DYMOLA_SIM_MODEL_INPUT_PREFIX + ".mo")); String[] c = p.getFileName().toString().replace(".mo", "").split("_"); try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent().resolve( DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + c[c.length - 1] + ".zip"))) { archive.as(ZipExporter.class).exportTo(os); retList.add(new String(c[c.length - 1])); } catch (IOException e) { //e.printStackTrace(); throw new RuntimeException(e); } } catch (IOException e) { throw new RuntimeException(e); } }); } retList.sort(Comparator.<String>naturalOrder()); //prepare param inputs for indexes from indexes properties file LOGGER.info("writing input indexes parameters in .mat format - start "); try { Path baseWp43ConfigFile = PlatformConfig.CONFIG_DIR.resolve(WP43_CONFIG_FILE_NAME); HierarchicalINIConfiguration configuration = new HierarchicalINIConfiguration( baseWp43ConfigFile.toFile()); //fix params for smallsignal index (cfr EurostagImpactAnalysis sources) SubnodeConfiguration node = configuration.getSection("smallsignal"); node.setProperty("f_instant", Double.toString(parameters.getFaultEventInstant())); for (int i = 0; i < contingencies.size(); i++) { Contingency contingency = contingencies.get(i); if (contingency.getElements().isEmpty()) { throw new AssertionError("Empty contingency " + contingency.getId()); } Iterator<ContingencyElement> it = contingency.getElements().iterator(); // compute the maximum fault duration double maxDuration = getFaultDuration(it.next()); while (it.hasNext()) { maxDuration = Math.max(maxDuration, getFaultDuration(it.next())); } node.setProperty("f_duration", Double.toString(maxDuration)); } DymolaAdaptersMatParamsWriter writer = new DymolaAdaptersMatParamsWriter(configuration); for (String cId : retList) { String parFileNamePrefix = DymolaUtil.DYMOLA_SIM_MAT_OUTPUT_PREFIX + "_" + cId + "_wp43_"; String parFileNameSuffix = "_pars.mat"; String zippedParFileNameSuffix = "_pars.zip"; try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent() .resolve(DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + cId + zippedParFileNameSuffix))) { JavaArchive archive = ShrinkWrap.create(JavaArchive.class); Path sfile1 = ShrinkWrapFileSystems.newFileSystem(archive).getPath("/"); Arrays.asList(config.getIndexesNames()).forEach(indexName -> writer.write(indexName, sfile1.resolve(parFileNamePrefix + indexName + parFileNameSuffix))); archive.as(ZipExporter.class).exportTo(os); } catch (Exception e) { throw new RuntimeException(e); } } } catch (ConfigurationException exc) { throw new RuntimeException(exc); } LOGGER.info("writing input indexes parameters in .mat format - end - {}", retList); return retList; }
From source file:com.linkedin.pinot.controller.helix.core.realtime.PinotLLCRealtimeSegmentManager.java
/** * Extract the segment metadata file from the tar-zipped segment file that is expected to be in the * directory for the table./* w ww . j ava2 s. com*/ * Segment tar-zipped file path: DATADIR/rawTableName/segmentName * We extract the metadata into a file into a file in the same level,as in: DATADIR/rawTableName/segmentName.metadata * @param rawTableName Name of the table (not including the REALTIME extension) * @param segmentNameStr Name of the segment * @return SegmentMetadataImpl if it is able to extract the metadata file from the tar-zipped segment file. */ protected SegmentMetadataImpl extractSegmentMetadata(final String rawTableName, final String segmentNameStr) { final String baseDir = StringUtil.join("/", _controllerConf.getDataDir(), rawTableName); final String segFileName = StringUtil.join("/", baseDir, segmentNameStr); final File segFile = new File(segFileName); SegmentMetadataImpl segmentMetadata; Path metadataPath = null; try { InputStream is = TarGzCompressionUtils.unTarOneFile(new FileInputStream(segFile), V1Constants.MetadataKeys.METADATA_FILE_NAME); metadataPath = FileSystems.getDefault().getPath(baseDir, segmentNameStr + ".metadata"); Files.copy(is, metadataPath); segmentMetadata = new SegmentMetadataImpl(new File(metadataPath.toString())); } catch (Exception e) { throw new RuntimeException("Exception extacting and reading segment metadata for " + segmentNameStr, e); } finally { if (metadataPath != null) { FileUtils.deleteQuietly(new File(metadataPath.toString())); } } return segmentMetadata; }
From source file:io.personium.core.model.impl.fs.DavCmpFsImpl.java
/** * Overwrite resources..//w w w. j av a 2s .c om * @param contentType ContentType of the update file * @param inputStream Stream of update file * @param etag Etag * @return ResponseBuilder */ private ResponseBuilder doPutForUpdate(final String contentType, final InputStream inputStream, String etag) { // ?? long now = new Date().getTime(); // // TODO ?????????????? this.load(); // ?(???)?WebDav??????? // WebDav???????????404?? if (!this.exists()) { throw getNotFoundException().params(getUrl()); } // etag????????*?????????????? if (etag != null && !"*".equals(etag) && !matchesETag(etag)) { throw PersoniumCoreException.Dav.ETAG_NOT_MATCH; } try { // Update Content InputStream input = inputStream; if (PersoniumUnitConfig.isDavEncryptEnabled()) { // Perform encryption. DataCryptor cryptor = new DataCryptor(getCellId()); input = cryptor.encode(inputStream); } BufferedInputStream bufferedInput = new BufferedInputStream(input); File tmpFile = new File(getTempContentFilePath()); File contentFile = new File(getContentFilePath()); Files.copy(bufferedInput, tmpFile.toPath()); Files.delete(contentFile.toPath()); Files.move(tmpFile.toPath(), contentFile.toPath()); long writtenBytes = contentFile.length(); String encryptionType = DataCryptor.ENCRYPTION_TYPE_NONE; if (PersoniumUnitConfig.isDavEncryptEnabled()) { writtenBytes = ((CipherInputStream) input).getReadLengthBeforEncryption(); encryptionType = DataCryptor.ENCRYPTION_TYPE_AES; } // Update Metadata this.metaFile.setUpdated(now); this.metaFile.setContentType(contentType); this.metaFile.setContentLength(writtenBytes); this.metaFile.setEncryptionType(encryptionType); this.metaFile.save(); } catch (IOException ex) { throw PersoniumCoreException.Dav.FS_INCONSISTENCY_FOUND.reason(ex); } // response return javax.ws.rs.core.Response.ok().status(HttpStatus.SC_NO_CONTENT).header(HttpHeaders.ETAG, getEtag()); }
From source file:io.personium.core.model.impl.fs.DavCmpFsImplTest.java
/** * Test get().//from ww w. j a v a2 s . c o m * normal. * DavEncryptEnabled is false. * @throws Exception Unintended exception in test */ @Test public void get_Normal_encrypt_false() throws Exception { String contentPath = TEST_DIR_PATH + CONTENT_FILE; InputStream inputStream = null; File contentFile = new File(contentPath); try { inputStream = getSystemResourceAsStream("davFile/file01.txt"); Files.copy(inputStream, contentFile.toPath()); // -------------------- // Test method args // -------------------- String rangeHeaderField = null; // -------------------- // Mock settings // -------------------- davCmpFsImpl = PowerMockito.spy(DavCmpFsImpl.create("", null)); Whitebox.setInternalState(davCmpFsImpl, "fsPath", TEST_DIR_PATH); doReturn("text/plain").when(davCmpFsImpl).getContentType(); doReturn(98L).when(davCmpFsImpl).getContentLength(); doReturn(DataCryptor.ENCRYPTION_TYPE_NONE).when(davCmpFsImpl).getEncryptionType(); doReturn(CELL_ID).when(davCmpFsImpl).getCellId(); doReturn("\"1-1487652733383\"").when(davCmpFsImpl).getEtag(); // -------------------- // Expected result // -------------------- String sourceFileMD5 = md5Hex(getSystemResourceAsStream("davFile/file01.txt")); ResponseBuilder expected = Response.ok().header(HttpHeaders.CONTENT_LENGTH, 98L) .header(HttpHeaders.CONTENT_TYPE, "text/plain").header(ETAG, "\"1-1487652733383\"") .header(PersoniumCoreUtils.HttpHeaders.ACCEPT_RANGES, RangeHeaderHandler.BYTES_UNIT); // -------------------- // Run method // -------------------- ResponseBuilder actual = davCmpFsImpl.get(rangeHeaderField); // -------------------- // Confirm result // -------------------- assertThat(actual.build().getStatus(), is(expected.build().getStatus())); assertThat(actual.build().getMetadata().toString(), is(expected.build().getMetadata().toString())); StreamingOutputForDavFile entity = (StreamingOutputForDavFile) actual.build().getEntity(); ByteArrayOutputStream output = new ByteArrayOutputStream(); entity.write(output); assertThat(md5Hex(output.toByteArray()), is(sourceFileMD5)); } finally { if (inputStream != null) { inputStream.close(); } contentFile.delete(); } }
From source file:gob.dp.simco.registro.controller.RegistroController.java
private String uploadArchiveImage() { String nameArchive = getFilename(file1); String extencion = ""; if (StringUtils.isNoneBlank(nameArchive)) { switch (file1.getContentType()) { case "image/png": extencion = ".png"; break; case "image/jpeg": extencion = ".jpg"; break; case "image/gif": extencion = ".gif"; break; }//from w w w.j ava 2 s.com DateFormat fechaHora = new SimpleDateFormat("yyyyMMddHHmmss"); String formato = fechaHora.format(new Date()); String ruta = formato + extencion; File file = new File(ConstantesUtil.FILE_SYSTEM + ruta); try (InputStream input = file1.getInputStream()) { Files.copy(input, file.toPath()); } catch (IOException ex) { log.error("ERROR - uploadArchiveImage()" + ex); } return ruta; } return null; }
From source file:com.twosigma.beaker.core.rest.PluginServiceLocatorRest.java
private String generateNginxConfig() throws IOException, InterruptedException { java.nio.file.Path confDir = Paths.get(this.nginxServDir, "conf"); java.nio.file.Path logDir = Paths.get(this.nginxServDir, "logs"); java.nio.file.Path nginxClientTempDir = Paths.get(this.nginxServDir, "client_temp"); if (Files.notExists(confDir)) { confDir.toFile().mkdirs();/* w ww. ja v a2 s . c om*/ Files.copy(Paths.get(this.nginxDir + "/mime.types"), Paths.get(confDir.toString() + "/mime.types")); } if (Files.notExists(logDir)) { logDir.toFile().mkdirs(); } if (Files.notExists(nginxClientTempDir)) { nginxClientTempDir.toFile().mkdirs(); } String restartId = RandomStringUtils.random(12, false, true); String nginxConfig = this.nginxTemplate; StringBuilder pluginSection = new StringBuilder(); for (PluginConfig pConfig : this.plugins.values()) { String auth = encoder.encodeBase64String(("beaker:" + pConfig.getPassword()).getBytes()); String nginxRule = pConfig.getNginxRules(); if (this.nginxPluginRules.containsKey(nginxRule)) { nginxRule = this.nginxPluginRules.get(nginxRule); } else { if (nginxRule.equals("rest")) nginxRule = REST_RULES; else if (nginxRule.equals("ipython1")) nginxRule = IPYTHON1_RULES; else if (nginxRule.equals("ipython2")) nginxRule = IPYTHON2_RULES; else { throw new RuntimeException("unrecognized nginx rule: " + nginxRule); } } nginxRule = nginxRule.replace("%(port)s", Integer.toString(pConfig.getPort())).replace("%(auth)s", auth) .replace("%(base_url)s", (urlHash.isEmpty() ? "" : "/" + urlHash + "/") + pConfig.getBaseUrl()); pluginSection.append(nginxRule + "\n\n"); } String auth = encoder.encodeBase64String(("beaker:" + this.corePassword).getBytes()); String listenSection; String authCookieRule; String startPage; String hostName = "none"; // XXX hack try { // XXX should allow name to be set by user in bkConfig hostName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { logger.warn("warning: UnknownHostException from InetAddress.getLocalHost().getHostName(), ignored"); } if (this.listenInterface != null && !this.listenInterface.equals("*")) { hostName = this.listenInterface; } if (this.publicServer) { if (this.listenInterface != null && !this.listenInterface.equals("*")) { listenSection = "listen " + this.listenInterface + ":" + this.portBase + " ssl;\n"; } else { listenSection = "listen " + this.portBase + " ssl;\n"; } listenSection += "server_name " + hostName + ";\n"; if (this.useHttpsCert == null || this.useHttpsKey == null) { listenSection += "ssl_certificate " + this.nginxServDir + "/ssl_cert.pem;\n"; listenSection += "ssl_certificate_key " + this.nginxServDir + "/ssl_cert.pem;\n"; } else { listenSection += "ssl_certificate " + this.useHttpsCert + ";\n"; listenSection += "ssl_certificate_key " + this.useHttpsKey + ";\n"; } authCookieRule = "if ($http_cookie !~ \"BeakerAuth=" + this.authCookie + "\") {return 403;}"; startPage = "/login/login.html"; } else { if (this.listenInterface != null) { if (this.listenInterface.equals("*")) { listenSection = "listen " + this.servPort + ";\n"; } else { listenSection = "listen " + this.listenInterface + ":" + this.servPort + ";\n"; } } else { listenSection = "listen 127.0.0.1:" + this.servPort + ";\n"; } if (this.requirePassword) { authCookieRule = "if ($http_cookie !~ \"BeakerAuth=" + this.authCookie + "\") {return 403;}"; startPage = "/login/login.html"; } else { authCookieRule = ""; startPage = "/beaker/"; } } nginxConfig = nginxConfig.replace("%(plugin_section)s", pluginSection.toString()); nginxConfig = nginxConfig.replace("%(extra_rules)s", this.nginxExtraRules); nginxConfig = nginxConfig.replace("%(catch_outdated_requests_rule)s", this.showZombieLogging ? "" : this.CATCH_OUTDATED_REQUESTS_RULE); nginxConfig = nginxConfig.replace("%(user_folder)s", this.userFolder); nginxConfig = nginxConfig.replace("%(host)s", hostName); nginxConfig = nginxConfig.replace("%(port_main)s", Integer.toString(this.portBase)); nginxConfig = nginxConfig.replace("%(port_beaker)s", Integer.toString(this.corePort)); nginxConfig = nginxConfig.replace("%(port_clear)s", Integer.toString(this.servPort)); nginxConfig = nginxConfig.replace("%(listen_on)s", this.publicServer ? "*" : "127.0.0.1"); nginxConfig = nginxConfig.replace("%(listen_section)s", listenSection); nginxConfig = nginxConfig.replace("%(auth_cookie_rule)s", authCookieRule); nginxConfig = nginxConfig.replace("%(start_page)s", startPage); nginxConfig = nginxConfig.replace("%(port_restart)s", Integer.toString(this.restartPort)); nginxConfig = nginxConfig.replace("%(auth)s", auth); nginxConfig = nginxConfig.replace("%(sessionauth)s", this.authToken); nginxConfig = nginxConfig.replace("%(restart_id)s", restartId); nginxConfig = nginxConfig.replace("%(urlhash)s", urlHash.isEmpty() ? "" : urlHash + "/"); nginxConfig = nginxConfig.replace("%(static_dir)s", this.nginxStaticDir.replaceAll("\\\\", "/")); nginxConfig = nginxConfig.replace("%(nginx_dir)s", this.nginxServDir.replaceAll("\\\\", "/")); // Apparently on windows our jetty backends network stack can be // in a state where the spin/probe connection from the client gets // stuck and it does not fail until it times out. nginxConfig = nginxConfig.replace("%(proxy_connect_timeout)s", windows() ? "1" : "90"); java.nio.file.Path targetFile = Paths.get(this.nginxServDir, "conf/nginx.conf"); writePrivateFile(targetFile, nginxConfig); return restartId; }
From source file:io.personium.core.model.impl.fs.DavCmpFsImplTest.java
/** * Test get().//from w w w . j a v a2s . c om * normal. * DavEncryptEnabled is true. * @throws Exception Unintended exception in test */ @Test public void get_Normal_encrypt_true() throws Exception { String contentPath = TEST_DIR_PATH + CONTENT_FILE; InputStream inputStream = null; File contentFile = new File(contentPath); try { inputStream = getSystemResourceAsStream("davFile/encrypt01.txt"); Files.copy(inputStream, contentFile.toPath()); // -------------------- // Test method args // -------------------- String rangeHeaderField = null; // -------------------- // Mock settings // -------------------- davCmpFsImpl = PowerMockito.spy(DavCmpFsImpl.create("", null)); Whitebox.setInternalState(davCmpFsImpl, "fsPath", TEST_DIR_PATH); doReturn("text/plain").when(davCmpFsImpl).getContentType(); doReturn(98L).when(davCmpFsImpl).getContentLength(); doReturn(DataCryptor.ENCRYPTION_TYPE_AES).when(davCmpFsImpl).getEncryptionType(); DataCryptor.setKeyString(AES_KEY); doReturn(CELL_ID).when(davCmpFsImpl).getCellId(); doReturn("\"1-1487652733383\"").when(davCmpFsImpl).getEtag(); // -------------------- // Expected result // -------------------- String sourceFileMD5 = md5Hex(getSystemResourceAsStream("davFile/decrypt01.txt")); ResponseBuilder expected = Response.ok().header(HttpHeaders.CONTENT_LENGTH, 98L) .header(HttpHeaders.CONTENT_TYPE, "text/plain").header(ETAG, "\"1-1487652733383\"") .header(PersoniumCoreUtils.HttpHeaders.ACCEPT_RANGES, RangeHeaderHandler.BYTES_UNIT); // -------------------- // Run method // -------------------- ResponseBuilder actual = davCmpFsImpl.get(rangeHeaderField); // -------------------- // Confirm result // -------------------- assertThat(actual.build().getStatus(), is(expected.build().getStatus())); assertThat(actual.build().getMetadata().toString(), is(expected.build().getMetadata().toString())); StreamingOutputForDavFile entity = (StreamingOutputForDavFile) actual.build().getEntity(); ByteArrayOutputStream output = new ByteArrayOutputStream(); entity.write(output); assertThat(md5Hex(output.toByteArray()), is(sourceFileMD5)); } finally { if (inputStream != null) { inputStream.close(); } contentFile.delete(); } }
From source file:io.personium.core.model.impl.fs.DavCmpFsImplTest.java
/** * Test get().//w w w . j av a 2 s. c o m * normal. * Range specification. * DavEncryptEnabled is false. * @throws Exception Unintended exception in test */ @Test public void get_Normal_range_encrypt_false() throws Exception { String contentPath = TEST_DIR_PATH + CONTENT_FILE; InputStream inputStream = null; File contentFile = new File(contentPath); try { inputStream = getSystemResourceAsStream("davFile/decrypt01.txt"); Files.copy(inputStream, contentFile.toPath()); // -------------------- // Test method args // -------------------- String rangeHeaderField = "bytes=10-40"; // -------------------- // Mock settings // -------------------- davCmpFsImpl = PowerMockito.spy(DavCmpFsImpl.create("", null)); Whitebox.setInternalState(davCmpFsImpl, "fsPath", TEST_DIR_PATH); doReturn("text/plain").when(davCmpFsImpl).getContentType(); doReturn(98L).when(davCmpFsImpl).getContentLength(); doReturn(DataCryptor.ENCRYPTION_TYPE_NONE).when(davCmpFsImpl).getEncryptionType(); doReturn(CELL_ID).when(davCmpFsImpl).getCellId(); doReturn("\"1-1487652733383\"").when(davCmpFsImpl).getEtag(); // -------------------- // Expected result // -------------------- String sourceFileMD5 = md5Hex(getSystemResourceAsStream("davFile/range01.txt")); ResponseBuilder expected = Response.status(HttpStatus.SC_PARTIAL_CONTENT) .header(PersoniumCoreUtils.HttpHeaders.CONTENT_RANGE, "bytes 10-40/98") .header(HttpHeaders.CONTENT_LENGTH, 31L).header(HttpHeaders.CONTENT_TYPE, "text/plain") .header(ETAG, "\"1-1487652733383\"") .header(PersoniumCoreUtils.HttpHeaders.ACCEPT_RANGES, RangeHeaderHandler.BYTES_UNIT); // -------------------- // Run method // -------------------- ResponseBuilder actual = davCmpFsImpl.get(rangeHeaderField); // -------------------- // Confirm result // -------------------- assertThat(actual.build().getStatus(), is(expected.build().getStatus())); assertThat(actual.build().getMetadata().toString(), is(expected.build().getMetadata().toString())); StreamingOutputForDavFileWithRange entity = (StreamingOutputForDavFileWithRange) actual.build() .getEntity(); ByteArrayOutputStream output = new ByteArrayOutputStream(); entity.write(output); assertThat(md5Hex(output.toByteArray()), is(sourceFileMD5)); } finally { if (inputStream != null) { inputStream.close(); } contentFile.delete(); } }