List of usage examples for java.nio.file Files delete
public static void delete(Path path) throws IOException
From source file:org.codice.ddf.security.migratable.impl.SecurityMigratableTest.java
/** * Verify that when the PDP file, polciy files, and CRL do not exists, the version upgrade import * succeeds.//w w w . ja v a 2 s . c o m */ @Test public void testDoVersionUpgradeImportWhenNoFiles() throws IOException { // Setup export Path exportDir = tempDir.getRoot().toPath().toRealPath(); // Comment out CRL in etc/ws-security/server/encryption.properties Path serverEncptProps = ddfHome.resolve(Paths.get("etc", "ws-security", "server", "encryption.properties")); String tag = String.format(DDF_EXPORTED_TAG_TEMPLATE, DDF_EXPORTED_HOME); writeProperties(serverEncptProps, "_" + CRL_PROP_KEY, CRL.toString(), String.format("%s&%s", serverEncptProps.toRealPath().toString(), tag)); // Remove PDP file Path xacmlPolicy = ddfHome.resolve(PDP_POLICIES_DIR).resolve(XACML_POLICY); Files.delete(xacmlPolicy); // Remove policy files for (final String file : POLICY_FILES) { Path policy = ddfHome.resolve(SECURITY_POLICIES_DIR).resolve(file); Files.delete(policy); } // Perform export doExport(exportDir); // Setup import setup(DDF_IMPORTED_HOME, DDF_IMPORTED_TAG, IMPORTING_PRODUCT_VERSION); SecurityMigratable iSecurityMigratable = spy(new SecurityMigratable()); when(iSecurityMigratable.getVersion()).thenReturn("3.0"); List<Migratable> iMigratables = Arrays.asList(iSecurityMigratable); ConfigurationMigrationManager iConfigurationMigrationManager = new ConfigurationMigrationManager( iMigratables, systemService); MigrationReport importReport = iConfigurationMigrationManager.doImport(exportDir, this::print); // Verify import verify(iSecurityMigratable).doVersionUpgradeImport(any(ImportMigrationContext.class)); assertThat("The import report has errors.", importReport.hasErrors(), is(false)); assertThat("The import report has warnings.", importReport.hasWarnings(), is(false)); assertThat("Import was not successful.", importReport.wasSuccessful(), is(true)); }
From source file:org.tinymediamanager.core.movie.MovieRenamer.java
/** * Rename movie.//from w w w.ja v a 2 s .c om * * @param movie * the movie */ public static void renameMovie(Movie movie) { // FIXME: what? when? boolean posterRenamed = false; boolean fanartRenamed = false; boolean downloadMissingArtworks = false; // check if a datasource is set if (StringUtils.isEmpty(movie.getDataSource())) { LOGGER.error("no Datasource set"); return; } // if (!movie.isScraped()) { if (movie.getTitle().isEmpty()) { LOGGER.error("won't rename movie '" + movie.getPathNIO() + "' / '" + movie.getTitle() + "' not even title is set?"); return; } // all the good & needed mediafiles ArrayList<MediaFile> needed = new ArrayList<>(); ArrayList<MediaFile> cleanup = new ArrayList<>(); LOGGER.info("Renaming movie: " + movie.getTitle()); LOGGER.debug("movie year: " + movie.getYear()); LOGGER.debug("movie path: " + movie.getPathNIO()); LOGGER.debug("movie isDisc?: " + movie.isDisc()); LOGGER.debug("movie isMulti?: " + movie.isMultiMovieDir()); if (movie.getMovieSet() != null) { LOGGER.debug("movieset: " + movie.getMovieSet().getTitle()); } LOGGER.debug("path expression: " + MovieModuleManager.MOVIE_SETTINGS.getMovieRenamerPathname()); LOGGER.debug("file expression: " + MovieModuleManager.MOVIE_SETTINGS.getMovieRenamerFilename()); String newPathname = createDestinationForFoldername( MovieModuleManager.MOVIE_SETTINGS.getMovieRenamerPathname(), movie); String oldPathname = movie.getPathNIO().toString(); if (!newPathname.isEmpty()) { newPathname = movie.getDataSource() + File.separator + newPathname; Path srcDir = movie.getPathNIO(); Path destDir = Paths.get(newPathname); if (!srcDir.toAbsolutePath().equals(destDir.toAbsolutePath())) { boolean newDestIsMultiMovieDir = false; // re-evaluate multiMovieDir based on renamer settings // folder MUST BE UNIQUE, we need at least a T/E-Y combo or IMDBid // so if renaming just to a fixed pattern (eg "$S"), movie will downgrade to a MMD if (!isFolderPatternUnique(MovieModuleManager.MOVIE_SETTINGS.getMovieRenamerPathname())) { // FIXME: if we already in a normal dir - keep it? newDestIsMultiMovieDir = true; } // FIXME: add warning to GUI if downgrade!!!!!! LOGGER.debug("movie willBeMulti?: " + newDestIsMultiMovieDir); // ###################################################################### // ## 1) old = separate movie dir, and new too -> move folder // ###################################################################### if (!movie.isMultiMovieDir() && !newDestIsMultiMovieDir) { boolean ok = false; try { ok = Utils.moveDirectorySafe(srcDir, destDir); if (ok) { movie.setMultiMovieDir(false); movie.updateMediaFilePath(srcDir, destDir); movie.setPath(newPathname); movie.saveToDb(); // since we moved already, save it } } catch (Exception e) { LOGGER.error("error moving folder: ", e); MessageManager.instance.pushMessage(new Message(MessageLevel.ERROR, srcDir, "message.renamer.failedrename", new String[] { ":", e.getLocalizedMessage() })); } if (!ok) { // FIXME: when we were not able to rename folder, display error msg and abort!!! LOGGER.error("Could not move to destination '" + destDir + "' - NOT renaming folder"); return; } } else if (movie.isMultiMovieDir() && !newDestIsMultiMovieDir) { // ###################################################################### // ## 2) MMD movie -> normal movie (upgrade) // ###################################################################### LOGGER.trace("Upgrading movie into it's own dir :) " + newPathname); try { Files.createDirectories(destDir); } catch (Exception e) { LOGGER.error("Could not create destination '" + destDir + "' - NOT renaming folder ('upgrade' movie)"); // well, better not to rename return; } movie.setMultiMovieDir(false); downloadMissingArtworks = true; // yay - we upgraded our movie, so we could try to get additional artworks :) } else { // ###################################################################### // ## Can be // ## 3) MMD movie -> MMD movie (but foldername possible changed) // ## 4) normal movie -> MMD movie (downgrade) // ## either way - check & create dest folder // ###################################################################### LOGGER.trace("New movie path is a MMD :( " + newPathname); if (!Files.exists(destDir)) { // if existent, all is good -> MMD (FIXME: kinda, we *might* have another full movie in there) try { Files.createDirectories(destDir); } catch (Exception e) { LOGGER.error("Could not create destination '" + destDir + "' - NOT renaming folder ('MMD' movie)"); // well, better not to rename return; } } movie.setMultiMovieDir(true); } } // src == dest } // folder pattern empty else { LOGGER.info("Folder rename settings were empty - NOT renaming folder"); // set it to current for file renaming newPathname = movie.getPathNIO().toString(); } // ###################################################################### // ## mark ALL existing and known files for cleanup (clone!!) // ###################################################################### for (MovieNfoNaming s : MovieNfoNaming.values()) { String nfoFilename = movie.getNfoFilename(s); if (StringUtils.isBlank(nfoFilename)) { continue; } // mark all known variants for cleanup MediaFile del = new MediaFile(movie.getPathNIO().resolve(nfoFilename), MediaFileType.NFO); cleanup.add(del); } for (MoviePosterNaming s : MoviePosterNaming.values()) { MediaFile del = new MediaFile( movie.getPathNIO() .resolve(replaceInvalidCharacters(MovieArtworkHelper.getPosterFilename(s, movie))), MediaFileType.POSTER); cleanup.add(del); } for (MovieFanartNaming s : MovieFanartNaming.values()) { MediaFile del = new MediaFile( movie.getPathNIO() .resolve(replaceInvalidCharacters(MovieArtworkHelper.getFanartFilename(s, movie))), MediaFileType.FANART); cleanup.add(del); } // cleanup ALL MFs for (MediaFile del : movie.getMediaFiles()) { cleanup.add(new MediaFile(del)); } cleanup.removeAll(Collections.singleton(null)); // remove all NULL ones! // update movie path at end of renaming - we need the old one here!! // movie.setPath(newPathname); // movie.saveToDb(); // BASENAME String newVideoBasename = ""; if (!isFilePatternValid()) { // Template empty or not even title set, so we are NOT renaming any files // we keep the same name on renaming ;) newVideoBasename = movie.getVideoBasenameWithoutStacking(); LOGGER.warn("Filepattern is not valid - NOT renaming files!"); } else { // since we rename, generate the new basename MediaFile ftr = generateFilename(movie, movie.getMediaFiles(MediaFileType.VIDEO).get(0), newVideoBasename).get(0); // there can be only one newVideoBasename = FilenameUtils.getBaseName(ftr.getFilenameWithoutStacking()); } LOGGER.debug("Our new basename for renaming: " + newVideoBasename); // unneeded / more reliable with with java 7? // // ###################################################################### // // ## test VIDEO rename // // ###################################################################### // for (MediaFile vid : movie.getMediaFiles(MediaFileType.VIDEO)) { // LOGGER.debug("testing file " + vid.getFileAsPath()); // Path f = vid.getFileAsPath(); // boolean testRenameOk = false; // for (int i = 0; i < 5; i++) { // testRenameOk = f.renameTo(f); // haahaa, try to rename to itself :P // if (testRenameOk) { // break; // ok it worked, step out // } // // we had the case, that the renaemoTo didn't work, // // and even the exists did not work! // // so we skip this additional check, which results in not removing the movie file // // if (!f.exists()) { // // LOGGER.debug("Hmmm... file " + f + " does not even exists; delete from DB"); // // // delete from MF or ignore for later cleanup (but better now!) // // movie.removeFromMediaFiles(vid); // // testRenameOk = true; // we "tested" this ok // // break; // // } // try { // LOGGER.debug("rename did not work - sleep a while and try again..."); // Thread.sleep(1000); // } // catch (InterruptedException e) { // LOGGER.warn("I'm so excited - could not sleep"); // } // } // if (!testRenameOk) { // LOGGER.warn("File " + vid.getFileAsPath() + " is not accessible!"); // MessageManager.instance.pushMessage(new Message(MessageLevel.ERROR, vid.getFilename(), "message.renamer.failedrename")); // return; // } // } // ###################################################################### // ## rename VIDEO (move 1:1) // ###################################################################### for (MediaFile vid : movie.getMediaFiles(MediaFileType.VIDEO)) { LOGGER.trace("Rename 1:1 " + vid.getType() + " " + vid.getFileAsPath()); MediaFile newMF = generateFilename(movie, vid, newVideoBasename).get(0); // there can be only one boolean ok = moveFile(vid.getFileAsPath(), newMF.getFileAsPath()); if (ok) { vid.setFile(newMF.getFileAsPath()); // update } needed.add(vid); // add vid, since we're updating existing MF object } // ###################################################################### // ## rename POSTER, FANART (copy 1:N) // ###################################################################### // we can have multiple ones, just get the newest one and copy(overwrite) them to all needed ArrayList<MediaFile> mfs = new ArrayList<>(); mfs.add(movie.getNewestMediaFilesOfType(MediaFileType.FANART)); mfs.add(movie.getNewestMediaFilesOfType(MediaFileType.POSTER)); mfs.removeAll(Collections.singleton(null)); // remove all NULL ones! for (MediaFile mf : mfs) { LOGGER.trace("Rename 1:N " + mf.getType() + " " + mf.getFileAsPath()); ArrayList<MediaFile> newMFs = generateFilename(movie, mf, newVideoBasename); // 1:N for (MediaFile newMF : newMFs) { posterRenamed = true; fanartRenamed = true; boolean ok = copyFile(mf.getFileAsPath(), newMF.getFileAsPath()); if (ok) { needed.add(newMF); } } } // ###################################################################### // ## rename NFO (copy 1:N) - only TMM NFOs // ###################################################################### // we need to find the newest, valid TMM NFO MediaFile nfo = new MediaFile(); for (MediaFile mf : movie.getMediaFiles(MediaFileType.NFO)) { if (mf.getFiledate() >= nfo.getFiledate() && MovieConnectors.isValidNFO(mf.getFileAsPath())) { nfo = new MediaFile(mf); } } if (nfo.getFiledate() > 0) { // one valid found? copy our NFO to all variants ArrayList<MediaFile> newNFOs = generateFilename(movie, nfo, newVideoBasename); // 1:N if (newNFOs.size() > 0) { // ok, at least one has been set up for (MediaFile newNFO : newNFOs) { boolean ok = copyFile(nfo.getFileAsPath(), newNFO.getFileAsPath()); if (ok) { needed.add(newNFO); } } } else { // list was empty, so even remove this NFO cleanup.add(nfo); } } else { LOGGER.trace("No valid NFO found for this movie"); } // now iterate over all non-tmm NFOs, and add them for cleanup or not for (MediaFile mf : movie.getMediaFiles(MediaFileType.NFO)) { if (MovieConnectors.isValidNFO(mf.getFileAsPath())) { cleanup.add(mf); } else { if (MovieModuleManager.MOVIE_SETTINGS.isMovieRenamerNfoCleanup()) { cleanup.add(mf); } else { needed.add(mf); } } } // ###################################################################### // ## rename all other types (copy 1:1) // ###################################################################### mfs = new ArrayList<>(); mfs.addAll(movie.getMediaFilesExceptType(MediaFileType.VIDEO, MediaFileType.NFO, MediaFileType.POSTER, MediaFileType.FANART, MediaFileType.SUBTITLE)); mfs.removeAll(Collections.singleton(null)); // remove all NULL ones! for (MediaFile other : mfs) { LOGGER.trace("Rename 1:1 " + other.getType() + " " + other.getFileAsPath()); ArrayList<MediaFile> newMFs = generateFilename(movie, other, newVideoBasename); // 1:N newMFs.removeAll(Collections.singleton(null)); // remove all NULL ones! for (MediaFile newMF : newMFs) { boolean ok = copyFile(other.getFileAsPath(), newMF.getFileAsPath()); if (ok) { needed.add(newMF); } else { // FIXME: what to do? not copied/exception... keep it for now... needed.add(other); } } } // ###################################################################### // ## rename subtitles later, but ADD it to not clean up // ###################################################################### needed.addAll(movie.getMediaFiles(MediaFileType.SUBTITLE)); // ###################################################################### // ## invalidade image cache // ###################################################################### for (MediaFile gfx : movie.getMediaFiles()) { if (gfx.isGraphic()) { ImageCache.invalidateCachedImage(gfx.getFileAsPath()); } } // remove duplicate MediaFiles Set<MediaFile> newMFs = new LinkedHashSet<>(needed); needed.clear(); needed.addAll(newMFs); movie.removeAllMediaFiles(); movie.addToMediaFiles(needed); movie.setPath(newPathname); // update .actors for (MovieActor actor : movie.getActors()) { actor.setEntityRoot(newPathname); } movie.saveToDb(); // cleanup & rename subtitle files renameSubtitles(movie); movie.gatherMediaFileInformation(false); // rewrite NFO if it's a MP NFO and there was a change with poster/fanart if (MovieModuleManager.MOVIE_SETTINGS.getMovieConnector() == MovieConnectors.MP && (posterRenamed || fanartRenamed)) { movie.writeNFO(); } movie.saveToDb(); // ###################################################################### // ## CLEANUP - delete all files marked for cleanup, which are not "needed" // ###################################################################### LOGGER.info("Cleanup..."); for (int i = cleanup.size() - 1; i >= 0; i--) { // cleanup files which are not needed if (!needed.contains(cleanup.get(i))) { MediaFile cl = cleanup.get(i); if (cl.getFileAsPath().equals(Paths.get(movie.getDataSource())) || cl.getFileAsPath().equals(movie.getPathNIO()) || cl.getFileAsPath().equals(Paths.get(oldPathname))) { LOGGER.warn("Wohoo! We tried to remove complete datasource / movie folder. Nooo way...! " + cl.getType() + ": " + cl.getFileAsPath()); // happens when iterating eg over the getNFONaming and we return a "" string. // then the path+filename = movie path and we want to delete :/ // do not show an error anylonger, just silently ignore... // MessageManager.instance.pushMessage(new Message(MessageLevel.ERROR, cl.getFile(), "message.renamer.failedrename")); // return; // rename failed continue; } if (Files.exists(cl.getFileAsPath())) { // unneeded, but for not displaying wrong deletes in logger... LOGGER.debug("Deleting " + cl.getFileAsPath()); Utils.deleteFileWithBackup(cl.getFileAsPath(), movie.getDataSource()); } try (DirectoryStream<Path> directoryStream = Files .newDirectoryStream(cl.getFileAsPath().getParent())) { if (!directoryStream.iterator().hasNext()) { // no iterator = empty LOGGER.debug("Deleting empty Directory " + cl.getFileAsPath().getParent()); Files.delete(cl.getFileAsPath().getParent()); // do not use recursive her } } catch (IOException ex) { } } } if (downloadMissingArtworks) { LOGGER.debug("Yay - movie upgrade :) download missing artworks"); MovieArtworkHelper.downloadMissingArtwork(movie); } }
From source file:edu.harvard.iq.dataverse.ingest.IngestServiceBean.java
public List<DataFile> createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType) throws IOException { List<DataFile> datafiles = new ArrayList<DataFile>(); String warningMessage = null; // save the file, in the temporary location for now: Path tempFile = null;// ww w. j av a 2s. com if (getFilesTempDirectory() != null) { tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload"); // "temporary" location is the key here; this is why we are not using // the DataStore framework for this - the assumption is that // temp files will always be stored on the local filesystem. // -- L.A. Jul. 2014 logger.fine("Will attempt to save the file as: " + tempFile.toString()); Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING); } else { throw new IOException("Temp directory is not configured."); } logger.fine("mime type supplied: " + suppliedContentType); // Let's try our own utilities (Jhove, etc.) to determine the file type // of the uploaded file. (We may already have a mime type supplied for this // file - maybe the type that the browser recognized on upload; or, if // it's a harvest, maybe the remote server has already given us the type // for this file... with our own type utility we may or may not do better // than the type supplied: // -- L.A. String recognizedType = null; String finalType = null; try { recognizedType = FileUtil.determineFileType(tempFile.toFile(), fileName); logger.fine("File utility recognized the file as " + recognizedType); if (recognizedType != null && !recognizedType.equals("")) { // is it any better than the type that was supplied to us, // if any? // This is not as trivial a task as one might expect... // We may need a list of "good" mime types, that should always // be chosen over other choices available. Maybe it should // even be a weighed list... as in, "application/foo" should // be chosen over "application/foo-with-bells-and-whistles". // For now the logic will be as follows: // // 1. If the contentType supplied (by the browser, most likely) // is some form of "unknown", we always discard it in favor of // whatever our own utilities have determined; // 2. We should NEVER trust the browser when it comes to the // following "ingestable" types: Stata, SPSS, R; // 2a. We are willing to TRUST the browser when it comes to // the CSV and XSLX ingestable types. // 3. We should ALWAYS trust our utilities when it comes to // ingestable types. if (suppliedContentType == null || suppliedContentType.equals("") || suppliedContentType.equalsIgnoreCase(MIME_TYPE_UNDETERMINED_DEFAULT) || suppliedContentType.equalsIgnoreCase(MIME_TYPE_UNDETERMINED_BINARY) || (ingestableAsTabular(suppliedContentType) && !suppliedContentType.equalsIgnoreCase(MIME_TYPE_CSV) && !suppliedContentType.equalsIgnoreCase(MIME_TYPE_CSV_ALT) && !suppliedContentType.equalsIgnoreCase(MIME_TYPE_XLSX)) || ingestableAsTabular(recognizedType) || recognizedType.equals("application/fits-gzipped") || recognizedType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE) || recognizedType.equals(MIME_TYPE_ZIP)) { finalType = recognizedType; } } } catch (Exception ex) { logger.warning("Failed to run the file utility mime type check on file " + fileName); } if (finalType == null) { finalType = (suppliedContentType == null || suppliedContentType.equals("")) ? MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; } // A few special cases: // if this is a gzipped FITS file, we'll uncompress it, and ingest it as // a regular FITS file: if (finalType.equals("application/fits-gzipped")) { InputStream uncompressedIn = null; String finalFileName = fileName; // if the file name had the ".gz" extension, remove it, // since we are going to uncompress it: if (fileName != null && fileName.matches(".*\\.gz$")) { finalFileName = fileName.replaceAll("\\.gz$", ""); } DataFile datafile = null; try { uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile())); datafile = createSingleDataFile(version, uncompressedIn, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT); } catch (IOException ioex) { datafile = null; } finally { if (uncompressedIn != null) { try { uncompressedIn.close(); } catch (IOException e) { } } } // If we were able to produce an uncompressed file, we'll use it // to create and return a final DataFile; if not, we're not going // to do anything - and then a new DataFile will be created further // down, from the original, uncompressed file. if (datafile != null) { // remove the compressed temp file: try { tempFile.toFile().delete(); } catch (SecurityException ex) { // (this is very non-fatal) logger.warning("Failed to delete temporary file " + tempFile.toString()); } datafiles.add(datafile); return datafiles; } // If it's a ZIP file, we are going to unpack it and create multiple // DataFile objects from its contents: } else if (finalType.equals("application/zip")) { ZipInputStream unZippedIn = null; ZipEntry zipEntry = null; int fileNumberLimit = systemConfig.getZipUploadFilesLimit(); try { Charset charset = null; /* TODO: (?) We may want to investigate somehow letting the user specify the charset for the filenames in the zip file... - otherwise, ZipInputStream bails out if it encounteres a file name that's not valid in the current charest (i.e., UTF-8, in our case). It would be a bit trickier than what we're doing for SPSS tabular ingests - with the lang. encoding pulldown menu - because this encoding needs to be specified *before* we upload and attempt to unzip the file. -- L.A. 4.0 beta12 logger.info("default charset is "+Charset.defaultCharset().name()); if (Charset.isSupported("US-ASCII")) { logger.info("charset US-ASCII is supported."); charset = Charset.forName("US-ASCII"); if (charset != null) { logger.info("was able to obtain charset for US-ASCII"); } } */ if (charset != null) { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset); } else { unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile())); } while (true) { try { zipEntry = unZippedIn.getNextEntry(); } catch (IllegalArgumentException iaex) { // Note: // ZipInputStream documentation doesn't even mention that // getNextEntry() throws an IllegalArgumentException! // but that's what happens if the file name of the next // entry is not valid in the current CharSet. // -- L.A. warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is."; logger.warning(warningMessage); throw new IOException(); } if (zipEntry == null) { break; } // Note that some zip entries may be directories - we // simply skip them: if (!zipEntry.isDirectory()) { if (datafiles.size() > fileNumberLimit) { logger.warning("Zip upload - too many files."); warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit + "); please upload a zip archive with fewer files, if you want them to be ingested " + "as individual DataFiles."; throw new IOException(); } String fileEntryName = zipEntry.getName(); logger.fine("ZipEntry, file: " + fileEntryName); if (fileEntryName != null && !fileEntryName.equals("")) { String shortName = fileEntryName.replaceFirst("^.*[\\/]", ""); // Check if it's a "fake" file - a zip archive entry // created for a MacOS X filesystem element: (these // start with "._") if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) { // OK, this seems like an OK file entry - we'll try // to read it and create a DataFile with it: DataFile datafile = createSingleDataFile(version, unZippedIn, shortName, MIME_TYPE_UNDETERMINED_DEFAULT, false); if (!fileEntryName.equals(shortName)) { String categoryName = fileEntryName.replaceFirst("[\\/][^\\/]*$", ""); if (!"".equals(categoryName)) { logger.fine("setting category to " + categoryName); //datafile.getFileMetadata().setCategory(categoryName.replaceAll("[\\/]", "-")); datafile.getFileMetadata() .addCategoryByName(categoryName.replaceAll("[\\/]", "-")); } } if (datafile != null) { // We have created this datafile with the mime type "unknown"; // Now that we have it saved in a temporary location, // let's try and determine its real type: String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier(); try { recognizedType = FileUtil.determineFileType(new File(tempFileName), shortName); logger.fine("File utility recognized unzipped file as " + recognizedType); if (recognizedType != null && !recognizedType.equals("")) { datafile.setContentType(recognizedType); } } catch (Exception ex) { logger.warning("Failed to run the file utility mime type check on file " + fileName); } datafiles.add(datafile); } } } } unZippedIn.closeEntry(); } } catch (IOException ioex) { // just clear the datafiles list and let // ingest default to creating a single DataFile out // of the unzipped file. logger.warning("Unzipping failed; rolling back to saving the file as is."); if (warningMessage == null) { warningMessage = "Failed to unzip the file. Saving the file as is."; } datafiles.clear(); } finally { if (unZippedIn != null) { try { unZippedIn.close(); } catch (Exception zEx) { } } } if (datafiles.size() > 0) { // link the data files to the dataset/version: Iterator<DataFile> itf = datafiles.iterator(); while (itf.hasNext()) { DataFile datafile = itf.next(); datafile.setOwner(version.getDataset()); if (version.getFileMetadatas() == null) { version.setFileMetadatas(new ArrayList()); } version.getFileMetadatas().add(datafile.getFileMetadata()); datafile.getFileMetadata().setDatasetVersion(version); /* TODO!! // re-implement this in some way that does not use the // deprecated .getCategory() on FileMeatadata: if (datafile.getFileMetadata().getCategory() != null) { datafile.getFileMetadata().addCategoryByName(datafile.getFileMetadata().getCategory()); datafile.getFileMetadata().setCategory(null); -- done? see above? } */ version.getDataset().getFiles().add(datafile); } // remove the uploaded zip file: try { Files.delete(tempFile); } catch (IOException ioex) { // do nothing - it's just a temp file. logger.warning("Could not remove temp file " + tempFile.getFileName().toString()); } // and return: return datafiles; } } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) { // Shape files may have to be split into multiple files, // one zip archive per each complete set of shape files: //File rezipFolder = new File(this.getFilesTempDirectory()); File rezipFolder = this.getShapefileUnzipTempDirectory(); IngestServiceShapefileHelper shpIngestHelper; shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder); boolean didProcessWork = shpIngestHelper.processFile(); if (!(didProcessWork)) { logger.severe("Processing of zipped shapefile failed."); return null; } for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) { FileInputStream finalFileInputStream = new FileInputStream(finalFile); finalType = this.getContentType(finalFile); if (finalType == null) { logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'"); continue; } DataFile new_datafile = createSingleDataFile(version, finalFileInputStream, finalFile.getName(), finalType); if (new_datafile != null) { datafiles.add(new_datafile); } else { logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName()); } finalFileInputStream.close(); } // Delete the temp directory used for unzipping /* logger.fine("Delete temp shapefile unzip directory: " + rezipFolder.getAbsolutePath()); FileUtils.deleteDirectory(rezipFolder); // Delete rezipped files for (File finalFile : shpIngestHelper.getFinalRezippedFiles()){ if (finalFile.isFile()){ finalFile.delete(); } } */ if (datafiles.size() > 0) { return datafiles; } else { logger.severe("No files added from directory of rezipped shapefiles"); } return null; } // Finally, if none of the special cases above were applicable (or // if we were unable to unpack an uploaded file, etc.), we'll just // create and return a single DataFile: // (Note that we are passing null for the InputStream; that's because // we already have the file saved; we'll just need to rename it, below) DataFile datafile = createSingleDataFile(version, null, fileName, finalType); if (datafile != null) { fileService.generateStorageIdentifier(datafile); if (!tempFile.toFile() .renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) { return null; } // MD5: MD5Checksum md5Checksum = new MD5Checksum(); try { datafile.setmd5( md5Checksum.CalculateMD5(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier())); } catch (Exception md5ex) { logger.warning("Could not calculate MD5 signature for new file " + fileName); } if (warningMessage != null) { createIngestFailureReport(datafile, warningMessage); datafile.SetIngestProblem(); } datafiles.add(datafile); return datafiles; } return null; }
From source file:com.fratello.longevity.smooth.AppGUI.java
private void delStuff() { int totalSize = MasterList.size(); LabelMaxSize = 2 * totalSize - 1;// www . j ava2 s . co m int progCounter = 0; SpecialFile[] list = new SpecialFile[totalSize]; SentinelProgressLabel.setText("Analyzing file data"); SentinelProgressBar.setValue(0); SentinelProgressBar.setMaximum(2 * totalSize - 1); for (int i = 0; i < totalSize; i++) { list[i] = MasterList.get(i); list[i].setCompareFilter(cf); if (GUI_Stop || GUI_Pause) { GUI_Pause = true; if (GUI_Stop) { clearFields(); GUI_Stop = false; } GUI_Start = false; break; } publish(++progCounter); } for (int i = 0; i < totalSize - 1; i++) { if (list[i].equals(list[i + 1])) { try { Files.delete(list[i].getPath()); } catch (Exception e) { e.printStackTrace(); } } if (GUI_Stop || GUI_Pause) { GUI_Pause = true; if (GUI_Stop) { clearFields(); GUI_Stop = false; } GUI_Start = false; break; } publish(++progCounter); } }
From source file:com.spectralogic.ds3client.integration.Smoke_Test.java
@Test public void testRecoverReadJob() throws IOException, XmlProcessingException, JobRecoveryException, URISyntaxException { final String bucketName = "test_recover_read_job_bucket"; final String book1 = "beowulf.txt"; final String book2 = "ulysses.txt"; final Path objPath1 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book1); final Path objPath2 = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book2); final Ds3Object obj1 = new Ds3Object(book1, Files.size(objPath1)); final Ds3Object obj2 = new Ds3Object(book2, Files.size(objPath2)); final Path dirPath = FileSystems.getDefault().getPath("output"); if (!Files.exists(dirPath)) { Files.createDirectory(dirPath); }// ww w . j av a 2s .c o m try { HELPERS.ensureBucketExists(bucketName, envDataPolicyId); final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, Lists.newArrayList(obj1, obj2)); putJob.transfer(new ResourceObjectPutter(RESOURCE_BASE_NAME)); final FileChannel channel1 = FileChannel.open(dirPath.resolve(book1), StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); final Ds3ClientHelpers.Job readJob = HELPERS.startReadJob(bucketName, Lists.newArrayList(obj1, obj2)); final GetObjectResponse readResponse1 = client .getObject(new GetObjectRequest(bucketName, book1, channel1, readJob.getJobId().toString(), 0)); assertThat(readResponse1, is(notNullValue())); assertThat(readResponse1.getStatusCode(), is(equalTo(200))); // Interruption... final Ds3ClientHelpers.Job recoverJob = HELPERS.recoverReadJob(readJob.getJobId()); final FileChannel channel2 = FileChannel.open(dirPath.resolve(book2), StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); final GetObjectResponse readResponse2 = client.getObject( new GetObjectRequest(bucketName, book2, channel2, recoverJob.getJobId().toString(), 0)); assertThat(readResponse2, is(notNullValue())); assertThat(readResponse2.getStatusCode(), is(equalTo(200))); } finally { deleteAllContents(client, bucketName); for (final Path tempFile : Files.newDirectoryStream(dirPath)) { Files.delete(tempFile); } Files.delete(dirPath); } }
From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java
@Override public void deleteBucket(String bucketName) throws AmazonClientException { try {//from w w w . j ava 2 s . c o m Path bucket = base.resolve(bucketName); Files.walkFileTree(bucket, new SimpleFileVisitor<Path>() { @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new AmazonClientException(e); } }
From source file:org.schedulesdirect.grabber.Grabber.java
private void updateZip(NetworkEpgClient clnt) throws IOException, JSONException, JsonParseException { Set<String> completedListings = new HashSet<String>(); LOG.debug(String.format("Using %d worker threads", globalOpts.getMaxThreads())); pool = createThreadPoolExecutor();//from ww w . jav a 2s . c om start = System.currentTimeMillis(); File dest = grabOpts.getTarget(); cachedSeriesIds = new HashSet<String>(); boolean rmDest = false; if (dest.exists()) { ZipEpgClient zipClnt = null; try { zipClnt = new ZipEpgClient(dest); if (!zipClnt.getUserStatus().getLastServerRefresh() .before(clnt.getUserStatus().getLastServerRefresh())) { LOG.info( "Current cache file contains latest data from Schedules Direct server; use --force-download to force a new download from server."); boolean force = grabOpts.isForce(); if (!force) return; else LOG.warn("Forcing an update of data with the server due to user request!"); } } catch (Exception e) { if (grabOpts.isKeep()) { LOG.error("Existing cache is invalid, keeping by user request!", e); return; } else { LOG.warn("Existing cache is invalid, deleting it; use --keep-bad-cache to keep existing cache!", e); rmDest = true; } } finally { if (zipClnt != null) try { zipClnt.close(); } catch (IOException e) { } if (rmDest && !dest.delete()) throw new IOException("Unable to delete " + dest); } } freshZip = !dest.exists(); try (FileSystem vfs = FileSystems.newFileSystem(new URI(String.format("jar:%s", dest.toURI())), Collections.singletonMap("create", "true"))) { if (freshZip) { Path target = vfs.getPath(ZipEpgClient.ZIP_VER_FILE); Files.write(target, Integer.toString(ZipEpgClient.ZIP_VER).getBytes(ZipEpgClient.ZIP_CHARSET)); } ProgramCache progCache = ProgramCache.get(vfs); Path lineups = vfs.getPath("lineups.txt"); Files.deleteIfExists(lineups); Path scheds = vfs.getPath("/schedules/"); if (!Files.isDirectory(scheds)) Files.createDirectory(scheds); Path maps = vfs.getPath("/maps/"); PathUtils.removeDirectory(maps); Files.createDirectory(maps); Path progs = vfs.getPath("/programs/"); if (!Files.isDirectory(progs)) Files.createDirectory(progs); Path logos = vfs.getPath("/logos/"); if (!Files.isDirectory(logos)) Files.createDirectory(logos); Path md5s = vfs.getPath("/md5s/"); if (!Files.isDirectory(md5s)) Files.createDirectory(md5s); Path cache = vfs.getPath(LOGO_CACHE); if (Files.exists(cache)) { String cacheData = new String(Files.readAllBytes(cache), ZipEpgClient.ZIP_CHARSET); logoCache = Config.get().getObjectMapper().readValue(cacheData, JSONObject.class); } else logoCache = new JSONObject(); Path seriesInfo = vfs.getPath("/seriesInfo/"); if (!Files.isDirectory(seriesInfo)) Files.createDirectories(seriesInfo); loadSeriesInfoIds(seriesInfo); missingSeriesIds = Collections.synchronizedSet(new HashSet<String>()); loadRetryIds(vfs.getPath(SERIES_INFO_DATA)); JSONObject resp = Config.get().getObjectMapper().readValue( factory.get(DefaultJsonRequest.Action.GET, RestNouns.LINEUPS, clnt.getHash(), clnt.getUserAgent(), globalOpts.getUrl().toString()).submitForJson(null), JSONObject.class); if (!JsonResponseUtils.isErrorResponse(resp)) Files.write(lineups, resp.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET)); else LOG.error("Received error response when requesting lineup data!"); for (Lineup l : clnt.getLineups()) { buildStationList(); JSONObject o = Config.get().getObjectMapper() .readValue( factory.get(DefaultJsonRequest.Action.GET, l.getUri(), clnt.getHash(), clnt.getUserAgent(), globalOpts.getUrl().toString()).submitForJson(null), JSONObject.class); Files.write(vfs.getPath("/maps", ZipEpgClient.scrubFileName(String.format("%s.txt", l.getId()))), o.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET)); JSONArray stations = o.getJSONArray("stations"); JSONArray ids = new JSONArray(); for (int i = 0; i < stations.length(); ++i) { JSONObject obj = stations.getJSONObject(i); String sid = obj.getString("stationID"); if (stationList != null && !stationList.contains(sid)) LOG.debug(String.format("Skipped %s; not listed in station file", sid)); else if (completedListings.add(sid)) { ids.put(sid); if (!grabOpts.isNoLogos()) { if (logoCacheInvalid(obj)) pool.execute(new LogoTask(obj, vfs, logoCache)); else if (LOG.isDebugEnabled()) LOG.debug(String.format("Skipped logo for %s; already cached!", obj.optString("callsign", null))); } else if (!logosWarned) { logosWarned = true; LOG.warn("Logo downloads disabled by user request!"); } } else LOG.debug(String.format("Skipped %s; already downloaded.", sid)); //pool.setMaximumPoolSize(5); // Processing these new schedules takes all kinds of memory! if (ids.length() == grabOpts.getMaxSchedChunk()) { pool.execute(new ScheduleTask(ids, vfs, clnt, progCache, factory)); ids = new JSONArray(); } } if (ids.length() > 0) pool.execute(new ScheduleTask(ids, vfs, clnt, progCache, factory)); } pool.shutdown(); try { LOG.debug("Waiting for SchedLogoExecutor to terminate..."); if (pool.awaitTermination(15, TimeUnit.MINUTES)) LOG.debug("SchedLogoExecutor: Terminated successfully."); else { failedTask = true; LOG.warn( "SchedLogoExecutor: Termination timed out; some tasks probably didn't finish properly!"); } } catch (InterruptedException e) { failedTask = true; LOG.warn( "SchedLogoExecutor: Termination interrupted); some tasks probably didn't finish properly!"); } Files.write(cache, logoCache.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE, StandardOpenOption.CREATE); ScheduleTask.commit(vfs); pool = createThreadPoolExecutor(); //pool.setMaximumPoolSize(5); // Again, we've got memory problems String[] dirtyPrograms = progCache.getDirtyIds(); progCache.markAllClean(); progCache = null; LOG.info(String.format("Identified %d program ids requiring an update!", dirtyPrograms.length)); Collection<String> progIds = new ArrayList<String>(); for (String progId : dirtyPrograms) { progIds.add(progId); if (progIds.size() == grabOpts.getMaxProgChunk()) { pool.execute(new ProgramTask(progIds, vfs, clnt, factory, missingSeriesIds, "programs", null, false)); progIds.clear(); } } if (progIds.size() > 0) pool.execute( new ProgramTask(progIds, vfs, clnt, factory, missingSeriesIds, "programs", null, false)); pool.shutdown(); try { LOG.debug("Waiting for ProgramExecutor to terminate..."); if (pool.awaitTermination(15, TimeUnit.MINUTES)) { LOG.debug("ProgramExecutor: Terminated successfully."); Iterator<String> itr = missingSeriesIds.iterator(); while (itr.hasNext()) { String id = itr.next(); if (cachedSeriesIds.contains(id)) itr.remove(); } if (missingSeriesIds.size() > 0) { LOG.info(String.format("Grabbing %d series info programs!", missingSeriesIds.size())); Set<String> retrySet = new HashSet<>(); try { new ProgramTask(missingSeriesIds, vfs, clnt, factory, missingSeriesIds, "seriesInfo", retrySet, true).run(); } catch (RuntimeException e) { LOG.error("SeriesInfo task failed!", e); Grabber.failedTask = true; } Path seriesInfoData = vfs.getPath(SERIES_INFO_DATA); if (retrySet.size() > 0) { StringBuilder sb = new StringBuilder(); for (String id : retrySet) sb.append(id + "\n"); Files.write(seriesInfoData, sb.toString().getBytes(ZipEpgClient.ZIP_CHARSET), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE); } else if (Files.exists(seriesInfoData)) Files.delete(seriesInfoData); } } else { failedTask = true; LOG.warn("ProgramExecutor: Termination timed out; some tasks probably didn't finish properly!"); } } catch (InterruptedException e) { failedTask = true; LOG.warn("ProgramExecutor: Termination interrupted); some tasks probably didn't finish properly!"); } String userData = clnt.getUserStatus().toJson(); if (failedTask) { LOG.error("One or more tasks failed! Resetting last data refresh timestamp to zero."); SimpleDateFormat fmt = Config.get().getDateTimeFormat(); String exp = fmt.format(new Date(0L)); JSONObject o = Config.get().getObjectMapper().readValue(userData, JSONObject.class); o.put("lastDataUpdate", exp); userData = o.toString(2); } Path p = vfs.getPath(USER_DATA); Files.write(p, userData.getBytes(ZipEpgClient.ZIP_CHARSET), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE); removeIgnoredStations(vfs); } catch (URISyntaxException e1) { throw new RuntimeException(e1); } finally { Runtime rt = Runtime.getRuntime(); LOG.info(String.format("MemStats:%n\tFREE: %s%n\tUSED: %s%n\t MAX: %s", FileUtils.byteCountToDisplaySize(rt.freeMemory()), FileUtils.byteCountToDisplaySize(rt.totalMemory()), FileUtils.byteCountToDisplaySize(rt.maxMemory()))); } }
From source file:org.roda.core.storage.fedora.FedoraStorageService.java
@Override public DirectResourceAccess getDirectAccess(final StoragePath storagePath) { return new DirectResourceAccess() { Path temp = null;/*from w w w. j a va 2 s.co m*/ @Override public Path getPath() throws GenericException, RequestNotValidException, AuthorizationDeniedException, NotFoundException { Class<? extends Entity> entity = getEntity(storagePath); Path path; try { temp = Files.createTempDirectory("temp", getTempDirFilePermissions()); if (entity.equals(Container.class) || entity.equals(Directory.class)) { StorageService tempStorage = new FileStorageService(temp); tempStorage.copy(FedoraStorageService.this, storagePath, storagePath); path = temp; } else { path = temp.resolve(entity.getName()); Binary binary = getBinary(storagePath); ContentPayload payload = binary.getContent(); InputStream inputStream = payload.createInputStream(); Files.copy(inputStream, path); IOUtils.closeQuietly(inputStream); } } catch (IOException | AlreadyExistsException e) { throw new GenericException(e); } return path; } @Override public void close() throws IOException { if (temp != null) { Files.delete(temp); temp = null; } } }; }
From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java
public void clear() { try {//from www . ja v a2 s . c o m Files.walkFileTree(base, new SimpleFileVisitor<Path>() { @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { if (dir != base) Files.delete(dir); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } }); } catch (IOException e) { e.printStackTrace(); } }
From source file:org.eclipse.cdt.arduino.core.internal.board.ArduinoManager.java
public static void recursiveDelete(Path directory) throws IOException { Files.walkFileTree(directory, new SimpleFileVisitor<Path>() { @Override//from w ww. java 2 s .c o m public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } }); }