List of usage examples for java.nio.file Path getParent
Path getParent();
From source file:com.hpe.caf.worker.testing.validation.ReferenceDataValidator.java
@Override public boolean isValid(Object testedPropertyValue, Object validatorPropertyValue) { if (testedPropertyValue == null && validatorPropertyValue == null) return true; ObjectMapper mapper = new ObjectMapper(); ContentFileTestExpectation expectation = mapper.convertValue(validatorPropertyValue, ContentFileTestExpectation.class); ReferencedData referencedData = mapper.convertValue(testedPropertyValue, ReferencedData.class); InputStream dataStream;/*from ww w . j av a 2 s . c o m*/ if (expectation.getExpectedContentFile() == null && expectation.getExpectedSimilarityPercentage() == 0) { return true; } try { System.out.println("About to retrieve content for " + referencedData.toString()); dataStream = ContentDataHelper.retrieveReferencedData(dataStore, codec, referencedData); System.out.println("Finished retrieving content for " + referencedData.toString()); } catch (DataSourceException e) { e.printStackTrace(); System.err.println("Failed to acquire referenced data."); e.printStackTrace(); TestResultHelper.testFailed("Failed to acquire referenced data. Exception message: " + e.getMessage(), e); return false; } try { String contentFileName = expectation.getExpectedContentFile(); Path contentFile = Paths.get(contentFileName); if (Files.notExists(contentFile) && !Strings.isNullOrEmpty(testSourcefileBaseFolder)) { contentFile = Paths.get(testSourcefileBaseFolder, contentFileName); } if (Files.notExists(contentFile)) { contentFile = Paths.get(testDataFolder, contentFileName); } byte[] expectedFileBytes = Files.readAllBytes(contentFile); if (expectation.getComparisonType() == ContentComparisonType.TEXT) { String actualText = IOUtils.toString(dataStream, StandardCharsets.UTF_8); String expectedText = new String(expectedFileBytes, StandardCharsets.UTF_8); if (expectation.getExpectedSimilarityPercentage() == 100) { boolean equals = actualText.equals(expectedText); if (!equals) { String message = "Expected and actual texts were different.\n\n*** Expected Text ***\n" + expectedText + "\n\n*** Actual Text ***\n" + actualText; System.err.println(message); if (throwOnValidationFailure) TestResultHelper.testFailed(message); return false; } return true; } double similarity = ContentComparer.calculateSimilarityPercentage(expectedText, actualText); System.out.println("Compared text similarity:" + similarity + "%"); if (similarity < expectation.getExpectedSimilarityPercentage()) { String message = "Expected similarity of " + expectation.getExpectedSimilarityPercentage() + "% but actual similarity was " + similarity + "%"; System.err.println(message); if (throwOnValidationFailure) TestResultHelper.testFailed(message); return false; } } else { byte[] actualDataBytes = IOUtils.toByteArray(dataStream); boolean equals = Arrays.equals(actualDataBytes, expectedFileBytes); if (!equals) { String actualContentFileName = contentFile.getFileName() + "_actual"; Path actualFilePath = Paths.get(contentFile.getParent().toString(), actualContentFileName); Files.deleteIfExists(actualFilePath); Files.write(actualFilePath, actualDataBytes, StandardOpenOption.CREATE); String message = "Data returned was different than expected for file: " + contentFileName + "\nActual content saved in file: " + actualFilePath.toString(); System.err.println(message); if (throwOnValidationFailure) TestResultHelper.testFailed(message); return false; } } } catch (IOException e) { e.printStackTrace(); TestResultHelper.testFailed("Error while processing reference data! " + e.getMessage(), e); return false; } return true; }
From source file:org.eclipse.winery.repository.importing.CSARImporter.java
/** * @param basePath the base path where to resolve files from. This is the directory of the * Definitions// w ww . j a va 2s. com * @param imports the list of imports to import. SIDE EFFECT: this list is modified. After this * method has run, the list contains the imports to be put into the wrapper element */ private void importImports(Path basePath, TOSCAMetaFile tmf, List<TImport> imports, final List<String> errors, boolean overwrite, final boolean asyncWPDParsing) throws IOException { for (Iterator<TImport> iterator = imports.iterator(); iterator.hasNext();) { TImport imp = iterator.next(); String importType = imp.getImportType(); String namespace = imp.getNamespace(); String loc = imp.getLocation(); if (namespace == null) { errors.add("not namespace-qualified imports are not supported."); continue; } if (loc == null) { errors.add("Empty location imports are not supported."); } else { if (importType.equals(Namespaces.TOSCA_NAMESPACE)) { if (!Util.isRelativeURI(loc)) { errors.add("Absolute URIs for definitions import not supported."); continue; } // URIs are encoded loc = Util.URLdecode(loc); Path defsPath = basePath.resolve(loc); // fallback for older CSARs, where the location is given from the root if (!Files.exists(defsPath)) { defsPath = basePath.getParent().resolve(loc); // the real existence check is done in importDefinitions } this.importDefinitions(tmf, defsPath, errors, overwrite, asyncWPDParsing); // imports of definitions don't have to be kept as these are managed by Winery iterator.remove(); } else { this.importOtherImport(basePath, imp, errors, importType, overwrite); } } } }
From source file:gov.noaa.pfel.coastwatch.util.FileVisitorDNLS.java
/** Invoked for a file in a directory. */ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { int oSize = directoryPA.size(); try {/* w ww. j a v a2 s . com*/ String name = file.getFileName().toString(); if (!fileNamePattern.matcher(name).matches()) { if (debugMode) String2.log(">> fileName doesn't match: name=" + name + " regex=" + fileNameRegex); return FileVisitResult.CONTINUE; } //getParent returns \\ or /, without trailing / String ttDir = String2.replaceAll(file.getParent().toString(), fromSlash, toSlash) + toSlash; if (debugMode) String2.log(">> add fileName: " + ttDir + name); directoryPA.add(ttDir); namePA.add(name); lastModifiedPA.add(attrs.lastModifiedTime().toMillis()); sizePA.add(attrs.size()); //for debugging only: //String2.log(ttDir + name + // " mod=" + attrs.lastModifiedTime().toMillis() + // " size=" + attrs.size()); } catch (Throwable t) { if (directoryPA.size() > oSize) directoryPA.remove(oSize); if (namePA.size() > oSize) namePA.remove(oSize); if (lastModifiedPA.size() > oSize) lastModifiedPA.remove(oSize); if (sizePA.size() > oSize) sizePA.remove(oSize); String2.log(MustBe.throwableToString(t)); } return FileVisitResult.CONTINUE; }
From source file:org.roda.core.model.ModelService.java
public File renameFolder(File folder, String newName, boolean replaceExisting, boolean reindexResources) throws AlreadyExistsException, GenericException, NotFoundException, RequestNotValidException, AuthorizationDeniedException {/* w w w . j av a 2s . c o m*/ Path basePath = RodaCoreFactory.getStoragePath(); StoragePath fileStoragePath = ModelUtils.getFileStoragePath(folder); Path fullPath = basePath.resolve(FSUtils.getStoragePathAsString(fileStoragePath, false)); if (FSUtils.exists(fullPath)) { FSUtils.move(fullPath, fullPath.getParent().resolve(newName), replaceExisting); if (reindexResources) { notifyAipUpdated(folder.getAipId()); } return retrieveFile(folder.getAipId(), folder.getRepresentationId(), folder.getPath(), newName); } else { throw new NotFoundException("Folder was moved or does not exist"); } }
From source file:org.apache.nifi.controller.repository.StandardProcessSession.java
@Override public FlowFile importFrom(final Path source, final boolean keepSourceFile, final FlowFile destination) { validateRecordState(destination);/*from ww w.jav a 2 s .c o m*/ // TODO: find a better solution. With Windows 7 and Java 7 (very early update, at least), Files.isWritable(source.getParent()) returns false, even when it should be true. if (!keepSourceFile && !Files.isWritable(source.getParent()) && !source.getParent().toFile().canWrite()) { // If we do NOT want to keep the file, ensure that we can delete it, or else error. throw new FlowFileAccessException("Cannot write to path " + source.getParent().toFile().getAbsolutePath() + " so cannot delete file; will not import."); } final StandardRepositoryRecord record = records.get(destination); final ContentClaim newClaim; final long claimOffset; try { newClaim = context.getContentRepository().create(context.getConnectable().isLossTolerant()); claimLog.debug("Creating ContentClaim {} for 'importFrom' for {}", newClaim, destination); } catch (final IOException e) { throw new FlowFileAccessException("Unable to create ContentClaim due to " + e.toString(), e); } claimOffset = 0L; long newSize = 0L; try { newSize = context.getContentRepository().importFrom(source, newClaim); bytesWritten += newSize; bytesRead += newSize; } catch (final Throwable t) { destroyContent(newClaim); throw new FlowFileAccessException( "Failed to import data from " + source + " for " + destination + " due to " + t.toString(), t); } removeTemporaryClaim(record); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()) .contentClaim(newClaim).contentClaimOffset(claimOffset).size(newSize) .addAttribute(CoreAttributes.FILENAME.key(), source.toFile().getName()).build(); record.setWorking(newFile, CoreAttributes.FILENAME.key(), source.toFile().getName()); if (!keepSourceFile) { deleteOnCommit.put(newFile, source); } return newFile; }
From source file:eu.itesla_project.dymola.DymolaImpactAnalysis.java
private List<String> writeDymolaInputs(Path workingDir, List<Contingency> contingencies) throws IOException { LOGGER.info(" Start writing dymola inputs"); List<String> retList = new ArrayList<>(); DdbConfig ddbConfig = DdbConfig.load(); String jbossHost = ddbConfig.getJbossHost(); String jbossPort = ddbConfig.getJbossPort(); String jbossUser = ddbConfig.getJbossUser(); String jbossPassword = ddbConfig.getJbossPassword(); Path dymolaExportPath = workingDir.resolve(MO_EXPORT_DIRECTORY); if (!Files.exists(dymolaExportPath)) { Files.createDirectory(dymolaExportPath); }//from www .j a v a2 s .c o m //retrieve modelica export parameters from configuration String modelicaVersion = config.getModelicaVersion(); String sourceEngine = config.getSourceEngine(); String sourceVersion = config.getSourceEngineVersion(); Path modelicaPowerSystemLibraryPath = Paths.get(config.getModelicaPowerSystemLibraryFile()); //write the modelica events file, to feed the modelica exporter Path eventsPath = workingDir.resolve(MODELICA_EVENTS_CSV_FILENAME); writeModelicaExporterContingenciesFile(eventsPath, contingencies); //these are only optional params needed if the source is eurostag Path modelicaLibPath = null; String slackId = config.getSlackId(); if ("".equals(slackId)) { slackId = null; // null when not specified () } LoadFlowFactory loadFlowFactory; try { loadFlowFactory = config.getLoadFlowFactoryClass().newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException(e); } LOGGER.info("Exporting modelica data for network {}, working state-id {} ", network, network.getStateManager().getWorkingStateId()); ModelicaMainExporter exporter = new ModelicaMainExporter(network, slackId, jbossHost, jbossPort, jbossUser, jbossPassword, modelicaVersion, sourceEngine, sourceVersion, modelicaLibPath, loadFlowFactory); exporter.export(dymolaExportPath); ModEventsExport eventsExporter = new ModEventsExport( dymolaExportPath.resolve(network.getId() + ".mo").toFile(), eventsPath.toFile()); eventsExporter.export(dymolaExportPath); LOGGER.info(" modelica data exported."); // now assemble the input files to feed dymola // one .zip per contingency; in the zip, the .mo file and the powersystem library //TODO here it is assumed that contingencies ids in csv file start from 0 (i.e. 0 is the first contingency); id should be decoupled from the implementation try (final Stream<Path> pathStream = Files.walk(dymolaExportPath)) { pathStream.filter((p) -> !p.toFile().isDirectory() && p.toFile().getAbsolutePath().contains("events_") && p.toFile().getAbsolutePath().endsWith(".mo")).forEach(p -> { GenericArchive archive = ShrinkWrap.createDomain().getArchiveFactory() .create(GenericArchive.class); try (FileSystem fileSystem = ShrinkWrapFileSystems.newFileSystem(archive)) { Path rootDir = fileSystem.getPath("/"); Files.copy(modelicaPowerSystemLibraryPath, rootDir.resolve(modelicaPowerSystemLibraryPath.getFileName())); Files.copy(Paths.get(p.toString()), rootDir.resolve(DymolaUtil.DYMOLA_SIM_MODEL_INPUT_PREFIX + ".mo")); String[] c = p.getFileName().toString().replace(".mo", "").split("_"); try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent().resolve( DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + c[c.length - 1] + ".zip"))) { archive.as(ZipExporter.class).exportTo(os); retList.add(new String(c[c.length - 1])); } catch (IOException e) { //e.printStackTrace(); throw new RuntimeException(e); } } catch (IOException e) { throw new RuntimeException(e); } }); } retList.sort(Comparator.<String>naturalOrder()); //prepare param inputs for indexes from indexes properties file LOGGER.info("writing input indexes parameters in .mat format - start "); try { Path baseWp43ConfigFile = PlatformConfig.CONFIG_DIR.resolve(WP43_CONFIG_FILE_NAME); HierarchicalINIConfiguration configuration = new HierarchicalINIConfiguration( baseWp43ConfigFile.toFile()); //fix params for smallsignal index (cfr EurostagImpactAnalysis sources) SubnodeConfiguration node = configuration.getSection("smallsignal"); node.setProperty("f_instant", Double.toString(parameters.getFaultEventInstant())); for (int i = 0; i < contingencies.size(); i++) { Contingency contingency = contingencies.get(i); if (contingency.getElements().isEmpty()) { throw new AssertionError("Empty contingency " + contingency.getId()); } Iterator<ContingencyElement> it = contingency.getElements().iterator(); // compute the maximum fault duration double maxDuration = getFaultDuration(it.next()); while (it.hasNext()) { maxDuration = Math.max(maxDuration, getFaultDuration(it.next())); } node.setProperty("f_duration", Double.toString(maxDuration)); } DymolaAdaptersMatParamsWriter writer = new DymolaAdaptersMatParamsWriter(configuration); for (String cId : retList) { String parFileNamePrefix = DymolaUtil.DYMOLA_SIM_MAT_OUTPUT_PREFIX + "_" + cId + "_wp43_"; String parFileNameSuffix = "_pars.mat"; String zippedParFileNameSuffix = "_pars.zip"; try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent() .resolve(DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + cId + zippedParFileNameSuffix))) { JavaArchive archive = ShrinkWrap.create(JavaArchive.class); Path sfile1 = ShrinkWrapFileSystems.newFileSystem(archive).getPath("/"); Arrays.asList(config.getIndexesNames()).forEach(indexName -> writer.write(indexName, sfile1.resolve(parFileNamePrefix + indexName + parFileNameSuffix))); archive.as(ZipExporter.class).exportTo(os); } catch (Exception e) { throw new RuntimeException(e); } } } catch (ConfigurationException exc) { throw new RuntimeException(exc); } LOGGER.info("writing input indexes parameters in .mat format - end - {}", retList); return retList; }
From source file:org.domainmath.gui.MainFrame.java
private void runFile2(Path path) { File file_selected = path.toFile(); String f = file_selected.getName(); String config_name = file_selected.getName().substring(0, f.lastIndexOf(".")) + ".rcfg"; File cfg = new File(file_selected.getParent() + File.separator + config_name); try {//www . j av a 2 s. co m JAXBContext jaxbContext = JAXBContext.newInstance(RunConfigurations.class); Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller(); RunConfigurations runConfig = (RunConfigurations) jaxbUnmarshaller.unmarshal(cfg); int selectedIndex = runConfig.getIndex(); List<RunConfiguration> runConfiguration = runConfig.getRunConfiguration(); ExternalEngine externalEngine = new ExternalEngine(this.octavePath + " --interactive"); externalEngine.setTitle(octavePath); externalEngine.setLocationRelativeTo(this); externalEngine.setVisible(true); externalEngine.execute("cd '" + path.getParent() + "'\n"); externalEngine.execute(runConfiguration.get(selectedIndex).getConfig()); } catch (JAXBException ex) { String name = path.getFileName().toString(); String ext = name.substring(name.lastIndexOf(".")); if (ext.equalsIgnoreCase(".m")) { String m_file_name = name.substring(0, name.indexOf(".m")); ExternalEngine externalEngine = new ExternalEngine(this.octavePath + " --interactive"); externalEngine.setTitle(octavePath); externalEngine.setLocationRelativeTo(this); externalEngine.setVisible(true); externalEngine.execute("cd '" + path.getParent() + "'\n"); externalEngine.execute(m_file_name + "()"); } } }
From source file:at.tfr.securefs.xnio.MessageHandlerImpl.java
@Override public void handleMessage(String json, MessageSender messageSender) throws IOException { log.debug("handleMessage: " + json); final Message message = objectMapper.readValue(json, Message.class); Path path = configuration.getBasePath().resolve(message.getPath()); if (!path.relativize(configuration.getBasePath()).toString().equals("..")) { throw new SecurityException("invalid path spec: " + message.getPath()); }/*w w w . j a v a2 s. c o m*/ try { final String uniqueKey = message.getUniqueKey(); // find the Channel for this data stream: StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>> info = activeStreams.getStreams() .get(uniqueKey); if (message.getType() == MessageType.OPEN && info != null) { log.warn("illegal state on Open stream: " + message); IoUtils.safeClose(info.getStream().getRightSide()); messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } switch (message.getType()) { case ERROR: log.info("error from Client: " + json); case CLOSE: { if (info != null) { IoUtils.safeClose(info.getStream().getRightSide()); } } break; case OPEN: { switch (message.getSubType()) { case READ: { final InputStream is = Files.newInputStream(path, StandardOpenOption.READ); final InputStream cis = new CipherInputStream(is, getCipher(message, Cipher.DECRYPT_MODE)); final ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker .createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelWriterBase(message) { @Override protected void write(Message message) { try { messageSender.send(message); } catch (Exception e) { log.warn("cannot write message=" + message + " : " + e, e); } } }); pipe.getLeftSide().getCloseSetter().set(new ChannelListener<StreamSourceChannel>() { @Override public void handleEvent(StreamSourceChannel channel) { activeStreams.getStreams().remove(uniqueKey); messageSender.send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); } }); pipe.getRightSide().getWriteSetter().set(new ChannelListener<StreamSinkChannel>() { private byte[] bytes = new byte[Constants.BUFFER_SIZE]; @Override public void handleEvent(StreamSinkChannel channel) { try { int count = 0; while ((count = cis.read(bytes, 0, bytes.length)) > 0) { if (count > 0) { Channels.writeBlocking(pipe.getRightSide(), ByteBuffer.wrap(bytes, 0, count)); } if (count < 0) { pipe.getRightSide().close(); } else { channel.resumeWrites(); } } } catch (Exception e) { log.warn("cannot read from cypher: " + e, e); IoUtils.safeClose(channel); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start sending data: pipe.getLeftSide().resumeReads(); pipe.getRightSide().resumeWrites(); } break; case WRITE: { Files.createDirectories(path.getParent()); OutputStream os = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); OutputStream cos = new CipherOutputStream(os, getCipher(message, Cipher.ENCRYPT_MODE)); ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker.createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { readChannel(message, cos, pipe, channel); } }); pipe.getLeftSide().getCloseSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { try { cos.close(); activeStreams.getStreams().remove(pipe.toString()); messageSender .send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); log.info("closed channel: " + pipe.toString()); } catch (IOException e) { log.warn("cannot close stream: message=" + message + " : " + e, e); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start receiving data: pipe.getLeftSide().resumeReads(); } break; default: messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); break; } } break; case DATA: { if (info != null) { Channels.writeBlocking(info.getStream().getRightSide(), ByteBuffer.wrap(message.getBytes())); } else { messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } } break; } } catch (IOException e) { log.warn("cannot handle message: " + message + " : " + e, e); throw e; } catch (Exception e) { log.warn("cannot handle message: " + message + " : " + e, e); throw new IOException("cannot handle message: " + message + " : " + e, e); } }
From source file:org.tallison.cc.CCGetter.java
private void fetch(CCIndexRecord r, Path rootDir, BufferedWriter writer) throws IOException { Path targFile = rootDir.resolve(r.getDigest().substring(0, 2) + "/" + r.getDigest()); if (Files.isRegularFile(targFile)) { writeStatus(r, FETCH_STATUS.ALREADY_IN_REPOSITORY, writer); logger.info("already retrieved:" + targFile.toAbsolutePath()); return;//from www. j a va 2 s .c o m } String url = AWS_BASE + r.getFilename(); URI uri = null; try { uri = new URI(url); } catch (URISyntaxException e) { logger.warn("Bad url: " + url); writeStatus(r, FETCH_STATUS.BAD_URL, writer); return; } CloseableHttpClient httpClient = HttpClients.createDefault(); HttpHost target = new HttpHost(uri.getHost()); String urlPath = uri.getRawPath(); if (uri.getRawQuery() != null) { urlPath += "?" + uri.getRawQuery(); } HttpGet httpGet = null; try { httpGet = new HttpGet(urlPath); } catch (Exception e) { logger.warn("bad path " + uri.toString(), e); writeStatus(r, FETCH_STATUS.BAD_URL, writer); return; } if (proxyHost != null && proxyPort > -1) { HttpHost proxy = new HttpHost(proxyHost, proxyPort, "http"); RequestConfig requestConfig = RequestConfig.custom().setProxy(proxy).build(); httpGet.setConfig(requestConfig); } httpGet.addHeader("Range", r.getOffsetHeader()); HttpCoreContext coreContext = new HttpCoreContext(); CloseableHttpResponse httpResponse = null; URI lastURI = null; try { httpResponse = httpClient.execute(target, httpGet, coreContext); RedirectLocations redirectLocations = (RedirectLocations) coreContext .getAttribute(DefaultRedirectStrategy.REDIRECT_LOCATIONS); if (redirectLocations != null) { for (URI redirectURI : redirectLocations.getAll()) { lastURI = redirectURI; } } else { lastURI = httpGet.getURI(); } } catch (IOException e) { logger.warn("IOException for " + uri.toString(), e); writeStatus(r, FETCH_STATUS.FETCHED_IO_EXCEPTION, writer); return; } lastURI = uri.resolve(lastURI); if (httpResponse.getStatusLine().getStatusCode() != 200 && httpResponse.getStatusLine().getStatusCode() != 206) { logger.warn("Bad status for " + uri.toString() + " : " + httpResponse.getStatusLine().getStatusCode()); writeStatus(r, FETCH_STATUS.FETCHED_NOT_200, writer); return; } Path tmp = null; Header[] headers = null; boolean isTruncated = false; try { //this among other parts is plagiarized from centic9's CommonCrawlDocumentDownload //probably saved me hours. Thank you, Dominik! tmp = Files.createTempFile("cc-getter", ""); try (InputStream is = new GZIPInputStream(httpResponse.getEntity().getContent())) { WARCRecord warcRecord = new WARCRecord(new FastBufferedInputStream(is), "", 0); ArchiveRecordHeader archiveRecordHeader = warcRecord.getHeader(); if (archiveRecordHeader.getHeaderFields().containsKey(WARCConstants.HEADER_KEY_TRUNCATED)) { isTruncated = true; } headers = LaxHttpParser.parseHeaders(warcRecord, "UTF-8"); Files.copy(warcRecord, tmp, StandardCopyOption.REPLACE_EXISTING); } } catch (IOException e) { writeStatus(r, null, headers, 0L, isTruncated, FETCH_STATUS.FETCHED_IO_EXCEPTION_READING_ENTITY, writer); deleteTmp(tmp); return; } String digest = null; long tmpLength = 0l; try (InputStream is = Files.newInputStream(tmp)) { digest = base32.encodeAsString(DigestUtils.sha1(is)); tmpLength = Files.size(tmp); } catch (IOException e) { writeStatus(r, null, headers, tmpLength, isTruncated, FETCH_STATUS.FETCHED_IO_EXCEPTION_SHA1, writer); logger.warn("IOException during digesting: " + tmp.toAbsolutePath()); deleteTmp(tmp); return; } if (Files.exists(targFile)) { writeStatus(r, digest, headers, tmpLength, isTruncated, FETCH_STATUS.ALREADY_IN_REPOSITORY, writer); deleteTmp(tmp); return; } try { Files.createDirectories(targFile.getParent()); Files.copy(tmp, targFile); } catch (IOException e) { writeStatus(r, digest, headers, tmpLength, isTruncated, FETCH_STATUS.FETCHED_EXCEPTION_COPYING_TO_REPOSITORY, writer); deleteTmp(tmp); } writeStatus(r, digest, headers, tmpLength, isTruncated, FETCH_STATUS.ADDED_TO_REPOSITORY, writer); deleteTmp(tmp); }