List of usage examples for java.nio.file Files size
public static long size(Path path) throws IOException
From source file:net.mozq.picto.core.ProcessCore.java
public static void findFiles(ProcessCondition processCondition, Consumer<ProcessData> processDataSetter, BooleanSupplier processStopper) throws IOException { Set<FileVisitOption> fileVisitOptionSet; if (processCondition.isFollowLinks()) { fileVisitOptionSet = EnumSet.of(FileVisitOption.FOLLOW_LINKS); } else {//from w w w .j ava2 s . co m fileVisitOptionSet = Collections.emptySet(); } Files.walkFileTree(processCondition.getSrcRootPath(), fileVisitOptionSet, processCondition.getDept(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (processStopper.getAsBoolean()) { return FileVisitResult.TERMINATE; } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (attrs.isDirectory()) { return FileVisitResult.SKIP_SUBTREE; } if (processStopper.getAsBoolean()) { return FileVisitResult.TERMINATE; } if (!processCondition.getPathFilter().accept(file, attrs)) { return FileVisitResult.SKIP_SUBTREE; } Path rootRelativeSubPath = processCondition.getSrcRootPath().relativize(file.getParent()); ImageMetadata imageMetadata = getImageMetadata(file); Date baseDate; if (processCondition.isChangeFileCreationDate() || processCondition.isChangeFileModifiedDate() || processCondition.isChangeFileAccessDate() || processCondition.isChangeExifDate()) { baseDate = getBaseDate(processCondition, file, attrs, imageMetadata); } else { baseDate = null; } String destSubPathname = processCondition.getDestSubPathFormat().format(varName -> { try { switch (varName) { case "Now": return new Date(); case "ParentSubPath": return rootRelativeSubPath.toString(); case "FileName": return file.getFileName().toString(); case "BaseName": return FileUtilz.getBaseName(file.getFileName().toString()); case "Extension": return FileUtilz.getExt(file.getFileName().toString()); case "Size": return Long.valueOf(Files.size(file)); case "CreationDate": return (processCondition.isChangeFileCreationDate()) ? baseDate : new Date(attrs.creationTime().toMillis()); case "ModifiedDate": return (processCondition.isChangeFileModifiedDate()) ? baseDate : new Date(attrs.lastModifiedTime().toMillis()); case "AccessDate": return (processCondition.isChangeFileAccessDate()) ? baseDate : new Date(attrs.lastAccessTime().toMillis()); case "PhotoTakenDate": return (processCondition.isChangeExifDate()) ? baseDate : getPhotoTakenDate(file, imageMetadata); case "Width": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXIF_IMAGE_WIDTH); case "Height": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXIF_IMAGE_LENGTH); case "FNumber": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FNUMBER); case "Aperture": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_APERTURE_VALUE); case "MaxAperture": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_MAX_APERTURE_VALUE); case "ISO": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_ISO); case "FocalLength": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FOCAL_LENGTH); // ? case "FocalLength35mm": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FOCAL_LENGTH_IN_35MM_FORMAT); case "ShutterSpeed": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_SHUTTER_SPEED_VALUE); case "Exposure": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE); // case "ExposureTime": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_TIME); // case "ExposureMode": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_MODE); case "ExposureProgram": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_PROGRAM); case "Brightness": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_BRIGHTNESS_VALUE); case "WhiteBalance": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_WHITE_BALANCE_1); case "LightSource": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_LIGHT_SOURCE); case "Lens": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS); case "LensMake": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_MAKE); case "LensModel": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_MODEL); case "LensSerialNumber": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_SERIAL_NUMBER); case "Make": return getEXIFStringValue(imageMetadata, TiffTagConstants.TIFF_TAG_MAKE); case "Model": return getEXIFStringValue(imageMetadata, TiffTagConstants.TIFF_TAG_MODEL); case "SerialNumber": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_SERIAL_NUMBER); case "Software": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_SOFTWARE); case "ProcessingSoftware": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_PROCESSING_SOFTWARE); case "OwnerName": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_OWNER_NAME); case "CameraOwnerName": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_CAMERA_OWNER_NAME); case "GPSLat": return getEXIFGpsLat(imageMetadata); case "GPSLatDeg": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 0); case "GPSLatMin": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 1); case "GPSLatSec": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 2); case "GPSLatRef": return getEXIFStringValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE_REF); case "GPSLon": return getEXIFGpsLon(imageMetadata); case "GPSLonDeg": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 0); case "GPSLonMin": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 1); case "GPSLonSec": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 2); case "GPSLonRef": return getEXIFStringValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE_REF); case "GPSAlt": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_ALTITUDE); case "GPSAltRef": return getEXIFIntValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_ALTITUDE_REF); default: throw new PictoInvalidDestinationPathException(Messages .getString("message.warn.invalid.destSubPath.varName", varName)); } } catch (PictoException e) { throw e; } catch (Exception e) { throw new PictoInvalidDestinationPathException( Messages.getString("message.warn.invalid.destSubPath.pattern"), e); } }); Path destSubPath = processCondition.getDestRootPath().resolve(destSubPathname).normalize(); if (!destSubPath.startsWith(processCondition.getDestRootPath())) { throw new PictoInvalidDestinationPathException( Messages.getString("message.warn.invalid.destination.path", destSubPath)); } ProcessData processData = new ProcessData(); processData.setSrcPath(file); processData.setSrcFileAttributes(attrs); processData.setDestPath(destSubPath); processData.setBaseDate(baseDate); processDataSetter.accept(processData); return FileVisitResult.CONTINUE; } }); }
From source file:de.digiway.rapidbreeze.server.model.download.Download.java
/** * Starts this {@linkplain Download}.// w w w. ja v a 2s . c om * */ void start() { switch (statusHandler.getCurrentStatus()) { case RUNNING: return; case PAUSE: statusHandler.newStatus(DownloadStatus.RUNNING); return; } try { long startAt = 0; if (Files.exists(tempFile)) { try { startAt = Files.size(tempFile); } catch (IOException ex) { // File might be removed in the meantime startAt = 0; } } StorageProviderDownloadClient storageDownload = getDownloadClient(); throttledInputStream = new ThrottledInputStream(storageDownload.start(url, startAt)); throttledInputStream.setThrottle(throttleMaxBytesPerSecond); sourceChannel = Channels.newChannel(throttledInputStream); targetChannel = FileChannel.open(tempFile, StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE); targetChannel.position(startAt); } catch (IOException | RuntimeException ex) { LOG.log(Level.SEVERE, "An exception occured during data transfer setup for " + Download.class.getSimpleName() + ":" + this, ex); closeChannels(); cachedUrlStatus = null; statusHandler.newException(ex); return; } done = false; statusHandler.newStatus(DownloadStatus.RUNNING); }
From source file:org.elasticsearch.xpack.qa.sql.security.SqlSecurityTestCase.java
@Before public void setInitialAuditLogOffset() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); }/*ww w .j a v a 2 s . co m*/ AccessController.doPrivileged((PrivilegedAction<Void>) () -> { if (false == Files.exists(AUDIT_LOG_FILE)) { auditLogWrittenBeforeTestStart = 0; return null; } if (false == Files.isRegularFile(AUDIT_LOG_FILE)) { throw new IllegalStateException( "expected tests.audit.logfile [" + AUDIT_LOG_FILE + "]to be a plain file but wasn't"); } try { auditLogWrittenBeforeTestStart = Files.size(AUDIT_LOG_FILE); } catch (IOException e) { throw new RuntimeException(e); } return null; }); }
From source file:fr.ortolang.diffusion.store.binary.BinaryStoreServiceBean.java
@Override @TransactionAttribute(TransactionAttributeType.SUPPORTS) public long size(String identifier) throws BinaryStoreServiceException, DataNotFoundException { Path path = getPathForIdentifier(identifier); if (!Files.exists(path)) { throw new DataNotFoundException("Unable to find an object with id [" + identifier + "] in the storage"); }/*from w ww. j av a 2 s. co m*/ try { return Files.size(path); } catch (Exception e) { throw new BinaryStoreServiceException(e); } }
From source file:com.sastix.cms.server.services.content.impl.HashedDirectoryServiceImpl.java
@Override public long getFileSize(String resourceURI, String tenantID) throws IOException { return Files.size(Paths.get(getAbsolutePath(resourceURI, tenantID))); }
From source file:com.facebook.buck.jvm.java.DefaultJavaLibraryIntegrationTest.java
@Test public void testBuildJavaLibraryWithoutSrcsAndVerifyAbi() throws IOException, CompressorException { setUpProjectWorkspaceForScenario("abi"); workspace.enableDirCache();/* www .j av a2s . c om*/ // Run `buck build`. BuildTarget target = BuildTargetFactory.newInstance("//:no_srcs"); ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName()); buildResult.assertSuccess("Successful build should exit with 0."); Path outputPath = CompilerOutputPaths.of(target, filesystem).getOutputJarPath().get(); Path outputFile = workspace.getPath(outputPath); assertTrue(Files.exists(outputFile)); // TODO(mbolin): When we produce byte-for-byte identical JAR files across builds, do: // // HashCode hashOfOriginalJar = Files.hash(outputFile, Hashing.sha1()); // // And then compare that to the output when //:no_srcs is built again with --no-cache. long sizeOfOriginalJar = Files.size(outputFile); // This verifies that the ABI key was written correctly. workspace.verify(); // Verify the build cache. Path buildCache = workspace.getPath(filesystem.getBuckPaths().getCacheDir()); assertTrue(Files.isDirectory(buildCache)); ArtifactCache dirCache = TestArtifactCaches.createDirCacheForTest(workspace.getDestPath(), buildCache); int totalArtifactsCount = DirArtifactCacheTestUtil.getAllFilesInCache(dirCache).size(); assertEquals("There should be two entries (a zip and metadata) per rule key type (default and input-" + "based) in the build cache.", 4, totalArtifactsCount); Sha1HashCode ruleKey = workspace.getBuildLog().getRuleKey(target.getFullyQualifiedName()); // Run `buck clean`. ProcessResult cleanResult = workspace.runBuckCommand("clean", "--keep-cache"); cleanResult.assertSuccess("Successful clean should exit with 0."); totalArtifactsCount = getAllFilesInPath(buildCache).size(); assertEquals("The build cache should still exist.", 4, totalArtifactsCount); // Corrupt the build cache! Path artifactZip = DirArtifactCacheTestUtil.getPathForRuleKey(dirCache, new RuleKey(ruleKey.asHashCode()), Optional.empty()); HashMap<String, byte[]> archiveContents = new HashMap<>(TarInspector.readTarZst(artifactZip)); archiveContents.put(outputPath.toString(), emptyJarFile()); writeTarZst(artifactZip, archiveContents); // Run `buck build` again. ProcessResult buildResult2 = workspace.runBuckCommand("build", target.getFullyQualifiedName()); buildResult2.assertSuccess("Successful build should exit with 0."); assertTrue(Files.isRegularFile(outputFile)); ZipFile outputZipFile = new ZipFile(outputFile.toFile()); assertEquals("The output file will be an empty zip if it is read from the build cache.", 0, outputZipFile.stream().count()); outputZipFile.close(); // Run `buck clean` followed by `buck build` yet again, but this time, specify `--no-cache`. ProcessResult cleanResult2 = workspace.runBuckCommand("clean", "--keep-cache"); cleanResult2.assertSuccess("Successful clean should exit with 0."); ProcessResult buildResult3 = workspace.runBuckCommand("build", "--no-cache", target.getFullyQualifiedName()); buildResult3.assertSuccess(); outputZipFile = new ZipFile(outputFile.toFile()); assertNotEquals("The contents of the file should no longer be pulled from the corrupted build cache.", 0, outputZipFile.stream().count()); outputZipFile.close(); assertEquals( "We cannot do a byte-for-byte comparision with the original JAR because timestamps might " + "have changed, but we verify that they are the same size, as a proxy.", sizeOfOriginalJar, Files.size(outputFile)); }
From source file:org.commonwl.view.cwl.CWLService.java
/** * Gets the Workflow object from internal parsing * @param workflowFile The workflow file to be parsed * @param packedWorkflowId The ID of the workflow object if the file is packed * @return The constructed workflow object */// w w w . j a va 2 s .co m public Workflow parseWorkflowNative(Path workflowFile, String packedWorkflowId) throws IOException { // Check file size limit before parsing long fileSizeBytes = Files.size(workflowFile); if (fileSizeBytes <= singleFileSizeLimit) { // Parse file as yaml JsonNode cwlFile = yamlStringToJson(readFileToString(workflowFile.toFile())); // Check packed workflow occurs if (packedWorkflowId != null) { boolean found = false; if (cwlFile.has(DOC_GRAPH)) { for (JsonNode jsonNode : cwlFile.get(DOC_GRAPH)) { if (extractProcess(jsonNode) == CWLProcess.WORKFLOW) { String currentId = jsonNode.get(ID).asText(); if (currentId.startsWith("#")) { currentId = currentId.substring(1); } if (currentId.equals(packedWorkflowId)) { cwlFile = jsonNode; found = true; break; } } } } if (!found) throw new WorkflowNotFoundException(); } else { // Check the current json node is a workflow if (extractProcess(cwlFile) != CWLProcess.WORKFLOW) { throw new WorkflowNotFoundException(); } } // Use filename for label if there is no defined one String label = extractLabel(cwlFile); if (label == null) { label = workflowFile.getFileName().toString(); } // Construct the rest of the workflow model Workflow workflowModel = new Workflow(label, extractDoc(cwlFile), getInputs(cwlFile), getOutputs(cwlFile), getSteps(cwlFile)); workflowModel.setCwltoolVersion(cwlTool.getVersion()); // Generate DOT graph StringWriter graphWriter = new StringWriter(); ModelDotWriter dotWriter = new ModelDotWriter(graphWriter); try { dotWriter.writeGraph(workflowModel); workflowModel.setVisualisationDot(graphWriter.toString()); } catch (IOException ex) { logger.error("Failed to create DOT graph for workflow: " + ex.getMessage()); } return workflowModel; } else { throw new IOException("File '" + workflowFile.getFileName() + "' is over singleFileSizeLimit - " + FileUtils.byteCountToDisplaySize(fileSizeBytes) + "/" + FileUtils.byteCountToDisplaySize(singleFileSizeLimit)); } }
From source file:io.undertow.server.handlers.SenderTestCase.java
@Test public void testAsyncTransfer() throws Exception { StringBuilder sb = new StringBuilder(TXS); for (int i = 0; i < TXS; ++i) { sb.append("a"); }/*www .j a va 2 s . co m*/ HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/transfer?blocking=false"); TestHttpClient client = new TestHttpClient(); try { HttpResponse result = client.execute(get); Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode()); Path file = Paths .get(SenderTestCase.class.getResource(SenderTestCase.class.getSimpleName() + ".class").toURI()); long length = Files.size(file); byte[] data = new byte[(int) length * TXS]; for (int i = 0; i < TXS; i++) { try (DataInputStream is = new DataInputStream(Files.newInputStream(file))) { is.readFully(data, (int) (i * length), (int) length); } } Assert.assertArrayEquals(data, HttpClientUtils.readRawResponse(result)); } finally { client.getConnectionManager().shutdown(); } }
From source file:com.disney.opa.util.AttachmentUtils.java
/** * This method is to create an attachment object and verifies the file existence on disk. * /*from ww w. ja v a 2s . com*/ * @param fileName * @param fileLabel * @param productId * @param productStateID * @param userID * @return an attachment */ public Attachment getAttachment(String fileName, String fileLabel, int productId, int productStateID, int userID) { Attachment attachment = new Attachment(); try { attachment.setProductID(productId); attachment.setProductStateID(productStateID); attachment.setName(fileLabel); attachment.setFilename(fileName); attachment.setUserID(userID); attachment.setMimeType(AttachmentUtils.getMimeType(fileName)); String imageFile = getRelativeOriginalFilePath(productId) + File.separator + fileName; attachment.setFilePath(imageFile); attachment.setThumbnailFilePath(null); Path path = Paths.get(getRootAttachmentPath() + imageFile); if (Files.exists(path)) { attachment.setFileSize(Files.size(path)); } } catch (Exception e) { log.error("Error in getAttachment, fileName: " + fileName + ", fileLabel: " + fileLabel + ", productId: " + productId + ", userID: " + userID, e); } return attachment; }