List of usage examples for java.nio.file Files createTempFile
public static Path createTempFile(String prefix, String suffix, FileAttribute<?>... attrs) throws IOException
From source file:edu.harvard.iq.dataverse.ingest.IngestServiceBean.java
public List<DataFile> createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType) throws IOException { List<DataFile> datafiles = new ArrayList<DataFile>(); String warningMessage = null; // save the file, in the temporary location for now: Path tempFile = null;// w w w .jav a 2s . c om if (getFilesTempDirectory() != null) { tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); // "temporary" location is the key here; this is why we are not using // the DataStore framework for this - the assumption is that // temp files will always be stored on the local filesystem. // -- L.A. Jul. 2014 logger.fine("Will attempt to save the file as: " + tempFile.toString()); Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); } else { throw new IOException("Temp directory is not configured."); } logger.fine("mime type supplied: " + suppliedContentType); // Let's try our own utilities (Jhove, etc.) to determine the file type // of the uploaded file. (We may already have a mime type supplied for this // file - maybe the type that the browser recognized on upload; or, if // it's a harvest, maybe the remote server has already given us the type // for this file... with our own type utility we may or may not do better // than the type supplied: // -- L.A. String recognizedType = null; String finalType = null; try { recognizedType = FileUtil.determineFileType(tempFile.toFile(), fileName); logger.fine("File utility recognized the file as " + recognizedType); if (recognizedType != null && !recognizedType.equals("")) { // is it any better than the type that was supplied to us, // if any? // This is not as trivial a task as one might expect... // We may need a list of "good" mime types, that should always // be chosen over other choices available. Maybe it should // even be a weighed list... as in, "application/foo" should // be chosen over "application/foo-with-bells-and-whistles". // For now the logic will be as follows: // // 1. If the contentType supplied (by the browser, most likely) // is some form of "unknown", we always discard it in favor of // whatever our own utilities have determined; // 2. We should NEVER trust the browser when it comes to the // following "ingestable" types: Stata, SPSS, R; // 2a. We are willing to TRUST the browser when it comes to // the CSV and XSLX ingestable types. // 3. We should ALWAYS trust our utilities when it comes to // ingestable types. if (suppliedContentType == null || suppliedContentType.equals("") || suppliedContentType.equalsIgnoreCase(MIME_TYPE_UNDETERMINED_DEFAULT) || suppliedContentType.equalsIgnoreCase(MIME_TYPE_UNDETERMINED_BINARY) || (ingestableAsTabular(suppliedContentType) && !suppliedContentType.equalsIgnoreCase(MIME_TYPE_CSV) && !suppliedContentType.equalsIgnoreCase(MIME_TYPE_CSV_ALT) && !suppliedContentType.equalsIgnoreCase(MIME_TYPE_XLSX)) || ingestableAsTabular(recognizedType) || recognizedType.equals("application/fits-gzipped") || recognizedType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE) || recognizedType.equals(MIME_TYPE_ZIP)) { finalType = recognizedType; } } } catch (Exception ex) { logger.warning("Failed to run the file utility mime type check on file " + fileName); } if (finalType == null) { finalType = (suppliedContentType == null || suppliedContentType.equals("")) ? MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; } // A few special cases: // if this is a gzipped FITS file, we'll uncompress it, and ingest it as // a regular FITS file: if (finalType.equals("application/fits-gzipped")) { InputStream uncompressedIn = null; String finalFileName = fileName; // if the file name had the ".gz" extension, remove it, // since we are going to uncompress it: if (fileName != null && fileName.matches(".*\\.gz$")) { finalFileName = fileName.replaceAll("\\.gz$", ""); } DataFile datafile = null; try { uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); datafile = createSingleDataFile(version, uncompressedIn, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT); } catch (IOException ioex) { datafile = null; } finally { if (uncompressedIn != null) { try { uncompressedIn.close(); } catch (IOException e) { } } } // If we were able to produce an uncompressed file, we'll use it // to create and return a final DataFile; if not, we're not going // to do anything - and then a new DataFile will be created further // down, from the original, uncompressed file. if (datafile != null) { // remove the compressed temp file: try { tempFile.toFile().delete(); } catch (SecurityException ex) { // (this is very non-fatal) logger.warning("Failed to delete temporary file " + tempFile.toString()); } datafiles.add(datafile); return datafiles; } // If it's a ZIP file, we are going to unpack it and create multiple // DataFile objects from its contents: } else if (finalType.equals("application/zip")) { ZipInputStream unZippedIn = null; ZipEntry zipEntry = null; int fileNumberLimit = systemConfig.getZipUploadFilesLimit(); try { Charset charset = null; /* TODO: (?) We may want to investigate somehow letting the user specify the charset for the filenames in the zip file... - otherwise, ZipInputStream bails out if it encounteres a file name that's not valid in the current charest (i.e., UTF-8, in our case). It would be a bit trickier than what we're doing for SPSS tabular ingests - with the lang. encoding pulldown menu - because this encoding needs to be specified *before* we upload and attempt to unzip the file. -- L.A. 4.0 beta12 logger.info("default charset is "+Charset.defaultCharset().name()); if (Charset.isSupported("US-ASCII")) { logger.info("charset US-ASCII is supported."); charset = Charset.forName("US-ASCII"); if (charset != null) { logger.info("was able to obtain charset for US-ASCII"); } } */ if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); } else { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); } while (true) { try { zipEntry = unZippedIn.getNextEntry(); } catch (IllegalArgumentException iaex) { // Note: // ZipInputStream documentation doesn't even mention that // getNextEntry() throws an IllegalArgumentException! // but that's what happens if the file name of the next // entry is not valid in the current CharSet. // -- L.A. warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; logger.warning(warningMessage); throw new IOException(); } if (zipEntry == null) { break; } // Note that some zip entries may be directories - we // simply skip them: if (!zipEntry.isDirectory()) { if (datafiles.size() > fileNumberLimit) { logger.warning("Zip upload - too many files."); warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + "); please upload a zip archive with fewer files, if you want them to be ingested " + "as individual DataFiles."; throw new IOException(); } String fileEntryName = zipEntry.getName(); logger.fine("ZipEntry, file: " + fileEntryName); if (fileEntryName != null && !fileEntryName.equals("")) { String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); // Check if it's a "fake" file - a zip archive entry // created for a MacOS X filesystem element: (these // start with "._") if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { // OK, this seems like an OK file entry - we'll try // to read it and create a DataFile with it: DataFile datafile = createSingleDataFile(version, unZippedIn, shortName, MIME_TYPE_UNDETERMINED_DEFAULT, false); if (!fileEntryName.equals(shortName)) { String categoryName = fileEntryName.replaceFirst("[\\/][^\\/]*$", ""); if (!"".equals(categoryName)) { logger.fine("setting category to " + categoryName); //datafile.getFileMetadata().setCategory(categoryName.replaceAll("[\\/]", "-")); datafile.getFileMetadata() .addCategoryByName(categoryName.replaceAll("[\\/]", "-")); } } if (datafile != null) { // We have created this datafile with the mime type "unknown"; // Now that we have it saved in a temporary location, // let's try and determine its real type: String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); try { recognizedType = FileUtil.determineFileType(new File(tempFileName), shortName); logger.fine("File utility recognized unzipped file as " + recognizedType); if (recognizedType != null && !recognizedType.equals("")) { datafile.setContentType(recognizedType); } } catch (Exception ex) { logger.warning("Failed to run the file utility mime type check on file " + fileName); } datafiles.add(datafile); } } } } unZippedIn.closeEntry(); } } catch (IOException ioex) { // just clear the datafiles list and let // ingest default to creating a single DataFile out // of the unzipped file. logger.warning("Unzipping failed; rolling back to saving the file as is."); if (warningMessage == null) { warningMessage = "Failed to unzip the file. Saving the file as is."; } datafiles.clear(); } finally { if (unZippedIn != null) { try { unZippedIn.close(); } catch (Exception zEx) { } } } if (datafiles.size() > 0) { // link the data files to the dataset/version: Iterator<DataFile> itf = datafiles.iterator(); while (itf.hasNext()) { DataFile datafile = itf.next(); datafile.setOwner(version.getDataset()); if (version.getFileMetadatas() == null) { version.setFileMetadatas(new ArrayList()); } version.getFileMetadatas().add(datafile.getFileMetadata()); datafile.getFileMetadata().setDatasetVersion(version); /* TODO!! // re-implement this in some way that does not use the // deprecated .getCategory() on FileMeatadata: if (datafile.getFileMetadata().getCategory() != null) { datafile.getFileMetadata().addCategoryByName(datafile.getFileMetadata().getCategory()); datafile.getFileMetadata().setCategory(null); -- done? see above? } */ version.getDataset().getFiles().add(datafile); } // remove the uploaded zip file: try { Files.delete(tempFile); } catch (IOException ioex) { // do nothing - it's just a temp file. logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); } // and return: return datafiles; } } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { // Shape files may have to be split into multiple files, // one zip archive per each complete set of shape files: //File rezipFolder = new File(this.getFilesTempDirectory()); File rezipFolder = this.getShapefileUnzipTempDirectory(); IngestServiceShapefileHelper shpIngestHelper; shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); boolean didProcessWork = shpIngestHelper.processFile(); if (!(didProcessWork)) { logger.severe("Processing of zipped shapefile failed."); return null; } for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { FileInputStream finalFileInputStream = new FileInputStream(finalFile); finalType = this.getContentType(finalFile); if (finalType == null) { logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); continue; } DataFile new_datafile = createSingleDataFile(version, finalFileInputStream, finalFile.getName(), finalType); if (new_datafile != null) { datafiles.add(new_datafile); } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } finalFileInputStream.close(); } // Delete the temp directory used for unzipping /* logger.fine("Delete temp shapefile unzip directory: " + rezipFolder.getAbsolutePath()); FileUtils.deleteDirectory(rezipFolder); // Delete rezipped files for (File finalFile : shpIngestHelper.getFinalRezippedFiles()){ if (finalFile.isFile()){ finalFile.delete(); } } */ if (datafiles.size() > 0) { return datafiles; } else { logger.severe("No files added from directory of rezipped shapefiles"); } return null; } // Finally, if none of the special cases above were applicable (or // if we were unable to unpack an uploaded file, etc.), we'll just // create and return a single DataFile: // (Note that we are passing null for the InputStream; that's because // we already have the file saved; we'll just need to rename it, below) DataFile datafile = createSingleDataFile(version, null, fileName, finalType); if (datafile != null) { fileService.generateStorageIdentifier(datafile); if (!tempFile.toFile() .renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { return null; } // MD5: MD5Checksum md5Checksum = new MD5Checksum(); try { datafile.setmd5( md5Checksum.CalculateMD5(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier())); } catch (Exception md5ex) { logger.warning("Could not calculate MD5 signature for new file " + fileName); } if (warningMessage != null) { createIngestFailureReport(datafile, warningMessage); datafile.SetIngestProblem(); } datafiles.add(datafile); return datafiles; } return null; }
From source file:info.pancancer.arch3.worker.WorkerRunnable.java
/** * Write the content of the job object to an INI file which will be used by the workflow. * * @param job//from w ww . j av a 2s .co m * - the job object which must contain a HashMap, which will be used to write an INI file. * @return A Path object pointing to the new file will be returned. * @throws IOException */ private Path writeINIFile(Job job) throws IOException { log.info("INI is: " + job.getIniStr()); EnumSet<PosixFilePermission> perms = EnumSet.of(PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_EXECUTE, PosixFilePermission.GROUP_READ, PosixFilePermission.GROUP_WRITE, PosixFilePermission.OTHERS_READ, PosixFilePermission.OTHERS_WRITE); FileAttribute<?> attrs = PosixFilePermissions.asFileAttribute(perms); Path pathToINI = Files.createTempFile("seqware_", ".ini", attrs); log.info("INI file: " + pathToINI.toString()); try (BufferedWriter bw = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(pathToINI.toFile()), StandardCharsets.UTF_8))) { bw.write(job.getIniStr()); bw.flush(); } return pathToINI; }
From source file:com.github.podd.resources.UploadArtifactResourceImpl.java
private InferredOWLOntologyID uploadFileAndLoadArtifactIntoPodd(final Representation entity) throws ResourceException { List<FileItem> items;/*from ww w . ja va2s. c o m*/ Path filePath = null; String contentType = null; // 1: Create a factory for disk-based file items final DiskFileItemFactory factory = new DiskFileItemFactory(1000240, this.tempDirectory.toFile()); // 2: Create a new file upload handler final RestletFileUpload upload = new RestletFileUpload(factory); final Map<String, String> props = new HashMap<String, String>(); try { // 3: Request is parsed by the handler which generates a list of // FileItems items = upload.parseRequest(this.getRequest()); for (final FileItem fi : items) { final String name = fi.getName(); if (name == null) { props.put(fi.getFieldName(), new String(fi.get(), StandardCharsets.UTF_8)); } else { // FIXME: Strip everything up to the last . out of the // filename so that // the filename can be used for content type determination // where // possible. // InputStream uploadedFileInputStream = // fi.getInputStream(); try { // Note: These are Java-7 APIs contentType = fi.getContentType(); props.put("Content-Type", fi.getContentType()); filePath = Files.createTempFile(this.tempDirectory, "ontologyupload-", name); final File file = filePath.toFile(); file.deleteOnExit(); fi.write(file); } catch (final IOException ioe) { throw ioe; } catch (final Exception e) { // avoid throwing a generic exception just because the // apache // commons library throws Exception throw new IOException(e); } } } } catch (final IOException | FileUploadException e) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, e); } this.log.info("props={}", props.toString()); if (filePath == null) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Did not submit a valid file and filename"); } this.log.info("filename={}", filePath.toAbsolutePath().toString()); this.log.info("contentType={}", contentType); RDFFormat format = null; // If the content type was application/octet-stream then use the file // name instead // Browsers attach this content type when they are not sure what the // real type is if (MediaType.APPLICATION_OCTET_STREAM.getName().equals(contentType)) { format = Rio.getParserFormatForFileName(filePath.getFileName().toString()); this.log.info("octet-stream contentType filename format={}", format); } // Otherwise use the content type directly in preference to using the // filename else if (contentType != null) { format = Rio.getParserFormatForMIMEType(contentType); this.log.info("non-octet-stream contentType format={}", format); } // If the content type choices failed to resolve the type, then try the // filename if (format == null) { format = Rio.getParserFormatForFileName(filePath.getFileName().toString()); this.log.info("non-content-type filename format={}", format); } // Or fallback to RDF/XML which at minimum is able to detect when the // document is // structurally invalid if (format == null) { this.log.warn("Could not determine RDF format from request so falling back to RDF/XML"); format = RDFFormat.RDFXML; } try (final InputStream inputStream = new BufferedInputStream( Files.newInputStream(filePath, StandardOpenOption.READ));) { return this.uploadFileAndLoadArtifactIntoPodd(inputStream, format, DanglingObjectPolicy.REPORT, DataReferenceVerificationPolicy.DO_NOT_VERIFY); } catch (final IOException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "File IO error occurred", e); } }
From source file:net.mozq.picto.core.ProcessCore.java
private static ProcessDataStatus process(ProcessCondition processCondition, ProcessData processData, Function<ProcessData, ProcessDataStatus> overwriteConfirm) throws IOException { ProcessDataStatus status;/*from w w w. ja va 2s .co m*/ Path destParentPath = processData.getDestPath().getParent(); if (destParentPath != null) { Files.createDirectories(destParentPath); } if (processCondition.isCheckDigest() || (processCondition.isChangeExifDate() && processData.getBaseDate() != null) || processCondition.isRemveExifTagsGps() || processCondition.isRemveExifTagsAll()) { Path destTempPath = null; try { destTempPath = Files.createTempFile(processData.getDestPath().getParent(), processData.getDestPath().getFileName().toString(), null); if (processCondition.isCheckDigest()) { String algorithm = FILE_DIGEST_ALGORITHM; MessageDigest srcMD = newMessageDigest(algorithm); try (InputStream is = new DigestInputStream( new BufferedInputStream(Files.newInputStream(processData.getSrcPath())), srcMD)) { Files.copy(is, destTempPath, OPTIONS_COPY_REPLACE); } byte[] srcDigest = srcMD.digest(); MessageDigest destMD = newMessageDigest(algorithm); try (InputStream is = new DigestInputStream( new BufferedInputStream(Files.newInputStream(destTempPath)), destMD)) { byte[] b = new byte[1024]; while (is.read(b) != -1) { } } byte[] destDigest = destMD.digest(); if (!isSame(srcDigest, destDigest)) { throw new PictoFileDigestMismatchException( Messages.getString("message.error.digest.mismatch")); } } else if (processCondition.isRemveExifTagsAll()) { ExifRewriter exifRewriter = new ExifRewriter(); try (OutputStream os = new BufferedOutputStream(Files.newOutputStream(destTempPath))) { exifRewriter.removeExifMetadata(processData.getSrcPath().toFile(), os); } catch (ImageReadException | ImageWriteException e) { throw new PictoFileChangeException(Messages.getString("message.error.edit.file"), e); } } else if (processCondition.isChangeExifDate() || processCondition.isRemveExifTagsGps()) { ImageMetadata imageMetadata = getImageMetadata(processData.getSrcPath()); TiffOutputSet outputSet = getOutputSet(imageMetadata); if (outputSet == null) { Files.copy(processData.getSrcPath(), destTempPath, OPTIONS_COPY_REPLACE); } else { if (processCondition.isChangeExifDate()) { SimpleDateFormat exifDateFormat = new SimpleDateFormat(EXIF_DATE_PATTERN); exifDateFormat.setTimeZone(processCondition.getTimeZone()); String exifBaseDate = exifDateFormat.format(processData.getBaseDate()); DecimalFormat exifSubsecFormat = new DecimalFormat(EXIF_SUBSEC_PATTERN); String exifBaseSubsec = exifSubsecFormat .format((int) (processData.getBaseDate().getTime() / 10) % 100); try { TiffOutputDirectory rootDirectory = outputSet.getRootDirectory(); TiffOutputDirectory exifDirectory = outputSet.getExifDirectory(); if (rootDirectory != null) { rootDirectory.removeField(TiffTagConstants.TIFF_TAG_DATE_TIME); rootDirectory.add(TiffTagConstants.TIFF_TAG_DATE_TIME, exifBaseDate); } if (exifDirectory != null) { exifDirectory.removeField(ExifTagConstants.EXIF_TAG_SUB_SEC_TIME); exifDirectory.add(ExifTagConstants.EXIF_TAG_SUB_SEC_TIME, exifBaseSubsec); exifDirectory.removeField(ExifTagConstants.EXIF_TAG_DATE_TIME_ORIGINAL); exifDirectory.add(ExifTagConstants.EXIF_TAG_DATE_TIME_ORIGINAL, exifBaseDate); exifDirectory.removeField(ExifTagConstants.EXIF_TAG_SUB_SEC_TIME_ORIGINAL); exifDirectory.add(ExifTagConstants.EXIF_TAG_SUB_SEC_TIME_ORIGINAL, exifBaseSubsec); exifDirectory.removeField(ExifTagConstants.EXIF_TAG_DATE_TIME_DIGITIZED); exifDirectory.add(ExifTagConstants.EXIF_TAG_DATE_TIME_DIGITIZED, exifBaseDate); exifDirectory.removeField(ExifTagConstants.EXIF_TAG_SUB_SEC_TIME_DIGITIZED); exifDirectory.add(ExifTagConstants.EXIF_TAG_SUB_SEC_TIME_DIGITIZED, exifBaseSubsec); } } catch (ImageWriteException e) { throw new PictoFileChangeException(Messages.getString("message.error.edit.file"), e); } } if (processCondition.isRemveExifTagsGps()) { outputSet.removeField(ExifTagConstants.EXIF_TAG_GPSINFO); } ExifRewriter exifRewriter = new ExifRewriter(); try (OutputStream os = new BufferedOutputStream(Files.newOutputStream(destTempPath))) { exifRewriter.updateExifMetadataLossless(processData.getSrcPath().toFile(), os, outputSet); } catch (ImageReadException | ImageWriteException e) { throw new PictoFileChangeException(Messages.getString("message.error.edit.file"), e); } } } Path destPath; if (processCondition.getOperationType() == OperationType.Overwrite) { destPath = processData.getSrcPath(); } else { destPath = processData.getDestPath(); } try { Files.move(destTempPath, destPath, OPTIONS_MOVE); if (processCondition.getOperationType() == OperationType.Move) { Files.deleteIfExists(processData.getSrcPath()); } status = ProcessDataStatus.Success; } catch (FileAlreadyExistsException e) { status = confirmOverwrite(processCondition, processData, overwriteConfirm); if (status == ProcessDataStatus.Processing) { // Overwrite Files.move(destTempPath, destPath, OPTIONS_MOVE_REPLACE); if (processCondition.getOperationType() == OperationType.Move) { Files.deleteIfExists(processData.getSrcPath()); } status = ProcessDataStatus.Success; } } } finally { if (destTempPath != null) { Files.deleteIfExists(destTempPath); } } } else { switch (processCondition.getOperationType()) { case Copy: try { Files.copy(processData.getSrcPath(), processData.getDestPath(), OPTIONS_COPY); status = ProcessDataStatus.Success; } catch (FileAlreadyExistsException e) { status = confirmOverwrite(processCondition, processData, overwriteConfirm); if (status == ProcessDataStatus.Processing) { Files.copy(processData.getSrcPath(), processData.getDestPath(), OPTIONS_COPY_REPLACE); status = ProcessDataStatus.Success; } } break; case Move: try { Files.move(processData.getSrcPath(), processData.getDestPath(), OPTIONS_MOVE); status = ProcessDataStatus.Success; } catch (FileAlreadyExistsException e) { status = confirmOverwrite(processCondition, processData, overwriteConfirm); if (status == ProcessDataStatus.Processing) { Files.move(processData.getSrcPath(), processData.getDestPath(), OPTIONS_MOVE_REPLACE); status = ProcessDataStatus.Success; } } break; case Overwrite: // NOP status = ProcessDataStatus.Success; break; default: throw new IllegalStateException(processCondition.getOperationType().toString()); } } if (status == ProcessDataStatus.Success) { FileTime creationFileTime = processData.getSrcFileAttributes().creationTime(); FileTime modifiedFileTime = processData.getSrcFileAttributes().lastModifiedTime(); FileTime accessFileTime = processData.getSrcFileAttributes().lastAccessTime(); if (processCondition.isChangeFileCreationDate() || processCondition.isChangeFileModifiedDate() || processCondition.isChangeFileAccessDate()) { if (processData.getBaseDate() != null) { FileTime baseFileTime = FileTime.fromMillis(processData.getBaseDate().getTime()); if (processCondition.isChangeFileCreationDate()) { creationFileTime = baseFileTime; } if (processCondition.isChangeFileModifiedDate()) { modifiedFileTime = baseFileTime; } if (processCondition.isChangeFileAccessDate()) { accessFileTime = baseFileTime; } } } BasicFileAttributeView attributeView = Files.getFileAttributeView(processData.getDestPath(), BasicFileAttributeView.class); attributeView.setTimes(modifiedFileTime, accessFileTime, creationFileTime); } return status; }
From source file:ddf.catalog.test.TestCatalog.java
@Test public void testContentDirectoryMonitor() throws Exception { startFeature(true, "content-core-directorymonitor"); final String TMP_PREFIX = "tcdm_"; Path tmpDir = Files.createTempDirectory(TMP_PREFIX); tmpDir.toFile().deleteOnExit();//from w ww . j av a2 s . c o m Path tmpFile = Files.createTempFile(tmpDir, TMP_PREFIX, "_tmp.xml"); tmpFile.toFile().deleteOnExit(); Files.copy(this.getClass().getClassLoader().getResourceAsStream("metacard5.xml"), tmpFile, StandardCopyOption.REPLACE_EXISTING); Map<String, Object> cdmProperties = new HashMap<>(); cdmProperties.putAll(getMetatypeDefaults("content-core-directorymonitor", "ddf.content.core.directorymonitor.ContentDirectoryMonitor")); cdmProperties.put("monitoredDirectoryPath", tmpDir.toString() + "/"); // Must end with / cdmProperties.put("directive", "STORE_AND_PROCESS"); createManagedService("ddf.content.core.directorymonitor.ContentDirectoryMonitor", cdmProperties); long startTime = System.nanoTime(); ValidatableResponse response = null; do { response = executeOpenSearch("xml", "q=*SysAdmin*"); if (response.extract().xmlPath().getList("metacards.metacard").size() == 1) { break; } try { TimeUnit.MILLISECONDS.sleep(50); } catch (InterruptedException e) { } } while (TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime) < TimeUnit.MINUTES.toMillis(1)); response.body("metacards.metacard.size()", equalTo(1)); }
From source file:com.eucalyptus.blockstorage.ceph.CephRbdProvider.java
@Override public StorageResource generateSnapshotDelta(String volumeId, String snapshotId, String snapPointId, String prevSnapshotId, String prevSnapPointId) throws EucalyptusCloudException { LOG.info("Generating snapshot delta volumeId=" + volumeId + ", snapshot=" + snapshotId + ", snapshotPointId=" + snapPointId + ", prevSnapshotId=" + prevSnapshotId + ", prevSnapPointId=" + prevSnapPointId);//from w ww . ja va 2 s . co m String diffName = null; try { String prevSnapPoint = null; if (StringUtils.isBlank(prevSnapPointId)) { prevSnapPoint = CephRbdInfo.SNAPSHOT_FOR_PREFIX + prevSnapshotId; } else if (prevSnapPointId.contains(CephRbdInfo.POOL_IMAGE_DELIMITER) && prevSnapPointId.contains(CephRbdInfo.IMAGE_SNAPSHOT_DELIMITER)) { CanonicalRbdObject prevSnap = CanonicalRbdObject.parse(prevSnapPointId); if (prevSnap != null && !Strings.isNullOrEmpty(prevSnap.getSnapshot())) { prevSnapPoint = prevSnap.getSnapshot(); } else { throw new EucalyptusCloudException( "Invalid snapshotPointId, expected pool/image@snapshot format but got " + prevSnapPointId); } } else { prevSnapPoint = prevSnapPointId; } Path diffPath = Files.createTempFile(Paths.get("/var/tmp"), snapshotId + "_" + prevSnapshotId + "_", ".diff"); Files.deleteIfExists(diffPath); // Delete the file before invoking rbd. rbd does not like the file being present diffName = diffPath.toString(); String[] cmd = new String[] { StorageProperties.EUCA_ROOT_WRAPPER, "rbd", "--id", cachedConfig.getCephUser(), "--keyring", cachedConfig.getCephKeyringFile(), "export-diff", snapPointId, diffName, "--from-snap", prevSnapPoint }; LOG.debug("Executing: " + Joiner.on(" ").skipNulls().join(cmd)); CommandOutput output = SystemUtil.runWithRawOutput(cmd); if (output != null) { LOG.debug("Dump from rbd command:\nreturn=" + output.returnValue + "\nstdout=" + output.output + "\nstderr=" + output.error); if (output.returnValue != 0) { throw new EucalyptusCloudException("Unable to execute rbd command. return=" + output.returnValue + ", stdout=" + output.output + ", stderr=" + output.error); } } return new FileResource(snapshotId, diffName); } catch (Exception e) { LOG.warn("Failed to generate snapshot delta between " + snapshotId + " and " + prevSnapshotId, e); try { if (!Strings.isNullOrEmpty(diffName)) { LOG.debug("Deleting file " + diffName); new File(diffName).delete(); } } catch (Exception ie) { LOG.warn("Failed to delete file " + diffName, ie); } throw new EucalyptusCloudException( "Failed to generate snapshot delta between " + snapshotId + " and " + prevSnapshotId, e); } }
From source file:com.mxhero.plugin.cloudstorage.onedrive.api.Items.java
/** * Simple upload stream./*from w w w. jav a 2 s.co m*/ * * @param path the path * @param inputStream the input stream * @param conflictBehavior the conflict behavior * @return the item */ private Item simpleUploadStream(final String path, final InputStream inputStream, final ConflictBehavior conflictBehavior) { File tmpFile = null; try { tmpFile = Files .createTempFile(new File(ApiEnviroment.tempUploadFolder.getValue()).toPath(), "odtmp", ".data") .toFile(); FileUtils.copyInputStreamToFile(inputStream, tmpFile); return simpleUpload(path, tmpFile, conflictBehavior); } catch (IOException e) { throw new IllegalArgumentException("couldnt read or store inputstream"); } finally { if (tmpFile != null) { FileUtils.deleteQuietly(tmpFile); } } }
From source file:com.google.cloud.dataflow.sdk.io.TextIOTest.java
License:asdf
private TextSource<String> prepareSource(byte[] data) throws IOException { Path path = Files.createTempFile(tempFolder, "tempfile", "ext"); Files.write(path, data);// ww w. j a va 2 s. c om return new TextSource<>(path.toString(), StringUtf8Coder.of()); }
From source file:ddf.test.itests.catalog.TestCatalog.java
@Test public void testContentDirectoryMonitor() throws Exception { final String TMP_PREFIX = "tcdm_"; Path tmpDir = Files.createTempDirectory(TMP_PREFIX); tmpDir.toFile().deleteOnExit();/*from w ww . ja va2s .c o m*/ Path tmpFile = Files.createTempFile(tmpDir, TMP_PREFIX, "_tmp.xml"); tmpFile.toFile().deleteOnExit(); Files.copy(this.getClass().getClassLoader().getResourceAsStream("metacard5.xml"), tmpFile, StandardCopyOption.REPLACE_EXISTING); Map<String, Object> cdmProperties = new HashMap<>(); cdmProperties.putAll(getMetatypeDefaults("content-core-directorymonitor", "org.codice.ddf.catalog.content.monitor.ContentDirectoryMonitor")); cdmProperties.put("monitoredDirectoryPath", tmpDir.toString() + "/"); createManagedService("org.codice.ddf.catalog.content.monitor.ContentDirectoryMonitor", cdmProperties); long startTime = System.nanoTime(); ValidatableResponse response = null; do { response = executeOpenSearch("xml", "q=*SysAdmin*"); if (response.extract().xmlPath().getList("metacards.metacard").size() == 1) { break; } try { TimeUnit.MILLISECONDS.sleep(50); } catch (InterruptedException e) { } } while (TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime) < TimeUnit.MINUTES.toMillis(1)); response.body("metacards.metacard.size()", equalTo(1)); }
From source file:org.apache.beam.sdk.io.TextIOTest.java
License:asdf
private TextSource prepareSource(byte[] data) throws IOException { Path path = Files.createTempFile(tempFolder, "tempfile", "ext"); Files.write(path, data);// w w w . j a v a 2 s . c om return new TextSource(ValueProvider.StaticValueProvider.of(path.toString())); }