List of usage examples for java.io File setLastModified
public boolean setLastModified(long time)
From source file:org.isatools.isatab.export.sra.SraExporter.java
/** * The procedure to call for triggering the export */// w w w .j av a 2s.c o m public void export() { for (final Study study : store.valuesOfType(Study.class)) { // Go ahead only if there is some SRA assay boolean haveSra = false; for (Assay assay : study.getAssays()) { if ("sra".equals(AssayTypeEntries.getDispatchTargetIdFromLabels(assay))) { haveSra = true; break; } } if (!haveSra) { continue; } final Investigation investigation = study.getUniqueInvestigation(); if (investigation != null) { ndc.pushObject(investigation); } ndc.pushObject(study); String studyAcc = study.getAcc(); log.trace("SraExporter, Working on study " + studyAcc); // Prepare the submission SubmissionType xsubmission = SubmissionType.Factory.newInstance(); centerName = StringUtils.trimToNull(study.getSingleAnnotationValue("comment:SRA Center Name")); if (centerName == null) { throw new TabMissingValueException(MessageFormat.format( "The study ''{0}'' has no 'SRA Center Name', cannot export to SRA format", studyAcc)); } xsubmission.setCenterName(centerName); String submissionId = StringUtils .trimToNull(study.getSingleAnnotationValue("comment:SRA Submission ID")); if (submissionId == null) { log.warn(MessageFormat.format("The study ''{0}'' has no 'SRA Submission ID'", study.getAcc())); } else { xsubmission.setSubmissionId(submissionId); } brokerName = StringUtils.trimToNull(study.getSingleAnnotationValue("comment:SRA Broker Name")); if (brokerName == null) { log.warn(MessageFormat.format("The study ''{0}'' has no 'SRA Broker Name'", study.getAcc())); } else { xsubmission.setBrokerName(brokerName); } String labName = StringUtils.trimToNull(study.getSingleAnnotationValue("comment:SRA Lab Name")); if (labName == null) { log.warn(MessageFormat.format("The study ''{0}'' has no 'SRA Lab Name'", study.getAcc())); } else { xsubmission.setLabName(labName); } Date subDate = study.getSubmissionDate(); if (subDate != null) { Calendar xsubDate = Calendar.getInstance(); xsubDate.setTime(subDate); xsubmission.setSubmissionDate(xsubDate); } buildExportedSubmissionContacts(xsubmission, study); buildStudyActions(xsubmission, study); FILES xsubFiles = FILES.Factory.newInstance(); RunSetType runSet = RunSetType.Factory.newInstance(); ExperimentSetType expSet = ExperimentSetType.Factory.newInstance(); SampleSetType sampleSet = SampleSetType.Factory.newInstance(); final int minFiles = xsubFiles.sizeOfFILEArray(); STUDYDocument xstudyDoc = null; boolean isAssayOk = true; // Map<String, Set<String>> assayToBarcodes = getBarcodesByAssay(study); // // for (String materialName : assayToBarcodes.keySet()) { // System.out.println(materialName); // for (String barcode : assayToBarcodes.get(materialName)) { // System.out.println("\t" + barcode); // } // } for (Assay assay : study.getAssays()) { // todo investigate if ("sra".equals(AssayTypeEntries.getDispatchTargetIdFromLabels(assay))) { // Prepare the study xstudyDoc = buildExportedStudy(study); ndc.pushObject(assay); log.trace("SraExporter, Working on assay " + assay.getAcc()); // Adds up the information built with the assay if (!buildExportedAssay(assay, xsubFiles, runSet, expSet, sampleSet)) { isAssayOk = false; // Skip all the assay file if only a single assay is wrong, a partial export is too dangerous break; } ndc.popObject(); // assay } } // for Assay // If there is at least one run, we have to write the files in the export path, we need one // submission per study // if (isAssayOk && xstudyDoc != null) { int xfileSz = xsubFiles.sizeOfFILEArray(); // It doesn't make sense that we don't have at least one data file. This should prevent // any case of missing experimental elements (e.g.: only the samples are reported). // if (xfileSz <= minFiles) { log.warn(MessageFormat.format( "No data file defined for the study {0}, no SRA exporting done for this study", study.getAcc())); } else { try { xsubmission.setFILES(xsubFiles); String xSubmissionPath = exportPath + "/" + DataLocationManager.accession2FileName(studyAcc); File xsubmissionDir = new File(xSubmissionPath); if (!xsubmissionDir.exists()) { FileUtils.forceMkdir(xsubmissionDir); } log.debug("SRA exporter: writing SRA XML files for study " + studyAcc); SUBMISSIONDocument xsubmissionDoc = SUBMISSIONDocument.Factory.newInstance(); xsubmissionDoc.setSUBMISSION(xsubmission); FileUtils.writeStringToFile(new File(xSubmissionPath + "/submission.xml"), xsubmissionDoc.toString()); FileUtils.writeStringToFile(new File(xSubmissionPath + "/study.xml"), xstudyDoc.toString()); EXPERIMENTSETDocument expSetDoc = EXPERIMENTSETDocument.Factory.newInstance(); expSetDoc.setEXPERIMENTSET(expSet); // A modification is made on the XML to be output to remove any tags required for injection of elements into // the DOM during conversion. These tags are usually found as <INJECTED_TAG>. The SRAUtils.removeInjectedTags method // finds and replaces these tags with empty spaces. FileUtils.writeStringToFile(new File(xSubmissionPath + "/experiment_set.xml"), SRAUtils.removeInjectedTags(expSetDoc.toString())); RUNSETDocument runSetDoc = RUNSETDocument.Factory.newInstance(); runSetDoc.setRUNSET(runSet); FileUtils.writeStringToFile(new File(xSubmissionPath + "/run_set.xml"), runSetDoc.toString()); SAMPLESETDocument sampleSetDoc = SAMPLESETDocument.Factory.newInstance(); sampleSetDoc.setSAMPLESET(sampleSet); FileUtils.writeStringToFile(new File(xSubmissionPath + "/sample_set.xml"), sampleSetDoc.toString()); log.debug("SRA exporter: copying SRA data files for study " + studyAcc); for (int i = 4; i < xfileSz; i++) { FILE xfile = xsubFiles.getFILEArray(i); String fileName = xfile.getFilename(); String filePath = sourcePath + "/" + fileName; File srcFile = new File(filePath); if (!srcFile.exists()) { log.debug("WARNING: source data file '" + filePath + "' not found, ignoring"); continue; } String targetFilePath = xSubmissionPath + "/" + fileName; File targetFile = new File(targetFilePath); log.trace("Copying the file '" + filePath + "' to '" + targetFilePath + "'..."); FileUtils.copyFile(srcFile, targetFile, true); // needed, there's a bug in the previous function targetFile.setLastModified(srcFile.lastModified()); log.trace("...done"); } } catch (IOException ex) { throw new TabIOException(MessageFormat.format("Error during SRA export of study {0}: {1}", studyAcc, ex.getMessage()), ex); } } } // is assay OK ndc.popObject(); // study if (investigation != null) { ndc.popObject(); } // investigation } // for ( study ) // Finally, output some messages... if (nonRepeatedMessages.size() > 0) { log.warn("SRA export completed with the following warnings:"); for (String msg : nonRepeatedMessages) { log.warn(msg); } } }
From source file:ch.entwine.weblounge.contentrepository.impl.PreviewGeneratorWorker.java
/** * {@inheritDoc}//from w ww. j a v a2 s. c om * * @see java.lang.Runnable#run() */ public void run() { ResourceURI resourceURI = resource.getURI(); String resourceType = resourceURI.getType(); try { // Find the resource serializer ResourceSerializer<?, ?> serializer = contentRepository.getSerializerByType(resourceType); if (serializer == null) { logger.warn("Unable to index resources of type '{}': no resource serializer found", resourceType); return; } // Does the serializer come with a preview generator? PreviewGenerator previewGenerator = serializer.getPreviewGenerator(resource); if (previewGenerator == null) { logger.debug("Resource type '{}' does not support previews", resourceType); return; } // Create the scaled images String mimeType = "image/" + format; ResourceSerializer<?, ?> s = contentRepository.getSerializerByMimeType(mimeType); if (s == null) { logger.warn("No resource serializer is capable of dealing with resources of format '{}'", mimeType); return; } else if (!(s instanceof ImageResourceSerializer)) { logger.warn("Resource serializer lookup for format '{}' returned {}", format, s.getClass()); return; } // Find us an image serializer ImageResourceSerializer irs = (ImageResourceSerializer) s; ImagePreviewGenerator imagePreviewGenerator = (ImagePreviewGenerator) irs.getPreviewGenerator(format); if (imagePreviewGenerator == null) { logger.warn("Image resource serializer {} does not provide support for '{}'", irs, format); return; } // Now scale the original preview according to the existing styles for (Language l : languages) { if (!resource.supportsContentLanguage(l)) continue; // Have we been told to stop doing work in the meantime? if (canceled) return; // Create the original preview image for every language File originalPreview = null; if (!resource.supportsContentLanguage(l)) continue; originalPreview = createPreview(resource, null, l, previewGenerator, format); if (originalPreview == null || !originalPreview.exists() || originalPreview.length() == 0) { logger.warn("Preview generation for {} failed", resource); return; } long resourceLastModified = ResourceUtils.getModificationDate(resource, l).getTime(); // Create the remaining styles for (ImageStyle style : styles) { // Have we been told to stop doing work in the meantime? if (canceled) return; // The original has been produced already if (ImageScalingMode.None.equals(style.getScalingMode())) continue; FileInputStream fis = null; FileOutputStream fos = null; try { File scaledFile = ImageStyleUtils.createScaledFile(resource, l, style); // Create the file if it doesn't exist or if it is out dated. Note // that the last modified date of a file has a precision of seconds if (!scaledFile.isFile() || FileUtils.isFileOlder(scaledFile, new Date(resourceLastModified))) { logger.info("Creating preview at {}", scaledFile.getAbsolutePath()); fis = new FileInputStream(originalPreview); fos = new FileOutputStream(scaledFile); imagePreviewGenerator.createPreview(originalPreview, environment, l, style, format, fis, fos); scaledFile.setLastModified(Math.max(new Date().getTime(), resourceLastModified)); // Store the style definition used while creating the preview File baseDir = ImageStyleUtils.getDirectory(resource.getURI().getSite(), style); File definitionFile = new File(baseDir, "style.xml"); if (!definitionFile.isFile()) { logger.debug("Storing style definition at {}", definitionFile); definitionFile.createNewFile(); FileUtils.copyInputStreamToFile(IOUtils.toInputStream(style.toXml(), "UTF-8"), definitionFile); } } else { logger.debug("Skipping creation of existing '{}' preview of {}", style, resource); } } catch (Throwable t) { logger.error("Error scaling {}: {}", originalPreview, t.getMessage()); continue; } finally { IOUtils.closeQuietly(fis); IOUtils.closeQuietly(fos); } } } } finally { if (canceled) logger.debug("Preview operation for {} has been canceled", resource.getIdentifier()); contentRepository.previewCreated(resource); } }
From source file:org.alfresco.solr.AlfrescoCoreAdminHandler.java
private static void copyFile(File srcFile, File destFile, boolean preserveFileDate) throws IOException { try {//from ww w .java2 s. c o m if (destFile.exists()) { throw new IOException("File shoud not exist " + destFile); } FileInputStream input = new FileInputStream(srcFile); try { FileOutputStream output = new FileOutputStream(destFile); try { copy(input, output); } finally { try { output.close(); } catch (IOException io) { } } } finally { try { input.close(); } catch (IOException io) { } } // check copy if (srcFile.length() != destFile.length()) { throw new IOException("Failed to copy full from '" + srcFile + "' to '" + destFile + "'"); } if (preserveFileDate) { destFile.setLastModified(srcFile.lastModified()); } } catch (FileNotFoundException fnfe) { fnfe.printStackTrace(); } }
From source file:org.apache.jackrabbit.core.data.FileDataStore.java
/** * Creates a new data record.// ww w.jav a2 s . com * The stream is first consumed and the contents are saved in a temporary file * and the SHA-1 message digest of the stream is calculated. If a * record with the same SHA-1 digest (and length) is found then it is * returned. Otherwise the temporary file is moved in place to become * the new data record that gets returned. * * @param input binary stream * @return data record that contains the given stream * @throws DataStoreException if the record could not be created */ public DataRecord addRecord(InputStream input) throws DataStoreException { File temporary = null; try { temporary = newTemporaryFile(); DataIdentifier tempId = new DataIdentifier(temporary.getName()); usesIdentifier(tempId); // Copy the stream to the temporary file and calculate the // stream length and the message digest of the stream long length = 0; MessageDigest digest = MessageDigest.getInstance(DIGEST); OutputStream output = new DigestOutputStream(new FileOutputStream(temporary), digest); try { length = IOUtils.copyLarge(input, output); } finally { output.close(); } DataIdentifier identifier = new DataIdentifier(digest.digest()); File file; synchronized (this) { // Check if the same record already exists, or // move the temporary file in place if needed usesIdentifier(identifier); file = getFile(identifier); File parent = file.getParentFile(); if (!parent.isDirectory()) { parent.mkdirs(); } if (!file.exists()) { temporary.renameTo(file); if (!file.exists()) { throw new IOException("Can not rename " + temporary.getAbsolutePath() + " to " + file.getAbsolutePath() + " (media read only?)"); } } else { long now = System.currentTimeMillis(); if (file.lastModified() < now) { file.setLastModified(now); } } // Sanity checks on the record file. These should never fail, // but better safe than sorry... if (!file.isFile()) { throw new IOException("Not a file: " + file); } if (file.length() != length) { throw new IOException(DIGEST + " collision: " + file); } } // this will also make sure that // tempId is not garbage collected until here inUse.remove(tempId); return new FileDataRecord(identifier, file); } catch (NoSuchAlgorithmException e) { throw new DataStoreException(DIGEST + " not available", e); } catch (IOException e) { throw new DataStoreException("Could not add record", e); } finally { if (temporary != null) { temporary.delete(); } } }
From source file:org.cosmo.common.util.Util.java
public static File newFile(File file) throws IOException { String fileName = file.getAbsolutePath(); if (file.exists()) { Date fileDate = new Date(file.lastModified()); SimpleDateFormat format = new SimpleDateFormat("MM.dd_hh.mm.ss"); String dateStr = format.format(fileDate); int dot = fileName.lastIndexOf('.'); String backupFileName = (dot > 0) ? fileName.substring(0, dot) + "." + dateStr + fileName.substring(dot, fileName.length()) : fileName + "." + dateStr; File backupFile = (DumpInTmpDir) ? new File(TmpDir, new File(backupFileName).getName()) : new File(backupFileName); // if the backup file exists then recursive call this with // new modified date, ie another backup file name if (backupFile.exists()) { file.setLastModified(file.lastModified() + 1000); return newFile(fileName); } else {/*from w ww . j av a2 s . co m*/ // rename the current file to backupfile boolean successRename = file.renameTo(backupFile); for (int i = 0; !successRename && i < 10; i++) { try { Thread.sleep(500); } catch (InterruptedException e) { } successRename = file.renameTo(backupFile); } if (!successRename) { throw new IOException(Fmt.S( "File [%s] already exists. Fail to back up to [%s]. check for process holding this file handle.", file.getName(), backupFile)); } } } if (file.getParentFile() != null && !file.getParentFile().exists()) { file.getParentFile().mkdirs(); } return new File(fileName); }
From source file:de.undercouch.gradle.tasks.download.DownloadAction.java
/** * Save an HTTP response to a file//ww w. j a v a2 s . c o m * @param response the response to save * @param destFile the destination file * @throws IOException if the response could not be downloaded */ private void performDownload(HttpResponse response, File destFile) throws IOException { HttpEntity entity = response.getEntity(); if (entity == null) { return; } //get content length long contentLength = entity.getContentLength(); if (contentLength >= 0) { size = toLengthText(contentLength); } processedBytes = 0; loggedKb = 0; //open stream and start downloading InputStream is = entity.getContent(); stream(is, destFile); long newTimestamp = parseLastModified(response); if (onlyIfNewer && newTimestamp > 0) { destFile.setLastModified(newTimestamp); } }
From source file:org.apache.archiva.converter.artifact.LegacyToDefaultConverterTest.java
@Test public void testForcedUnmodifiedArtifact() throws Exception { // test unmodified artifact is still converted when set to force artifactConverter = applicationContext.getBean("artifactConverter#force-repository-converter", ArtifactConverter.class); Artifact artifact = createArtifact("test", "unmodified-artifact", "1.0.0"); Artifact pomArtifact = createPomArtifact(artifact); File sourceFile = new File(sourceRepository.getBasedir(), sourceRepository.pathOf(artifact)); File sourcePomFile = new File(sourceRepository.getBasedir(), sourceRepository.pathOf(pomArtifact)); File targetFile = new File(targetRepository.getBasedir(), targetRepository.pathOf(artifact)); File targetPomFile = new File(targetRepository.getBasedir(), targetRepository.pathOf(pomArtifact)); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()); long origTime = dateFormat.parse("2006-03-03").getTime(); targetFile.setLastModified(origTime); targetPomFile.setLastModified(origTime); sourceFile.setLastModified(dateFormat.parse("2006-01-01").getTime()); sourcePomFile.setLastModified(dateFormat.parse("2006-02-02").getTime()); artifactConverter.convert(artifact, targetRepository); checkSuccess(artifactConverter);/*from w ww .j a v a 2 s . c om*/ compareFiles(sourceFile, targetFile); compareFiles(sourcePomFile, targetPomFile); assertFalse("Check modified", origTime == targetFile.lastModified()); assertFalse("Check modified", origTime == targetPomFile.lastModified()); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata(artifact); File metadataFile = new File(targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata(metadata)); assertTrue("Check metadata created", metadataFile.exists()); }
From source file:meme.singularsyntax.mojo.JavaflowEnhanceMojo.java
private void enhanceClassFiles(String outputDir, File backupDir, List<String> classFileNames) throws MojoExecutionException { Log log = getLog();/*from w w w. jav a2 s. co m*/ ResourceTransformer transformer = new AsmClassTransformer(); for (String classFileName : classFileNames) { try { File source = new File(outputDir, classFileName); File destination = new File(String.format(CLASSFILE_REWRITE_TEMPLATE, source.getAbsolutePath())); File backupClassFile = new File(backupDir, classFileName); if (backupClassFile.exists() && (source.lastModified() <= backupClassFile.lastModified())) { log.info(source + " is up to date"); continue; } log.info(String.format("Enhancing class file bytecode for Javaflow: %s", source)); RewritingUtils.rewriteClassFile(source, transformer, destination); if (backupClassFile.exists()) { log.debug(String.format("Backup for original class file %s already exists - removing it", backupClassFile)); backupClassFile.delete(); } log.debug(String.format("Renaming original class file from %s to %s", source, backupClassFile)); FileUtils.moveFile(source, backupClassFile); log.debug(String.format("Renaming rewritten class file from %s to %s", destination, source)); FileUtils.moveFile(destination, source); backupClassFile.setLastModified(source.lastModified()); } catch (IOException e) { throw new MojoExecutionException(e.getMessage()); } } }
From source file:com.parse.ParseFileUtils.java
/** * Internal copy file method.//from ww w. j ava2 s. co m * This caches the original file length, and throws an IOException * if the output file length is different from the current input file length. * So it may fail if the file changes size. * It may also fail with "IllegalArgumentException: Negative size" if the input file is truncated part way * through copying the data and the new file size is less than the current position. * * @param srcFile the validated source file, must not be {@code null} * @param destFile the validated destination file, must not be {@code null} * @param preserveFileDate whether to preserve the file date * @throws IOException if an error occurs * @throws IOException if the output file length is not the same as the input file length after the copy completes * @throws IllegalArgumentException "Negative size" if the file is truncated so that the size is less than the position */ private static void doCopyFile(final File srcFile, final File destFile, final boolean preserveFileDate) throws IOException { if (destFile.exists() && destFile.isDirectory()) { throw new IOException("Destination '" + destFile + "' exists but is a directory"); } FileInputStream fis = null; FileOutputStream fos = null; FileChannel input = null; FileChannel output = null; try { fis = new FileInputStream(srcFile); fos = new FileOutputStream(destFile); input = fis.getChannel(); output = fos.getChannel(); final long size = input.size(); // TODO See IO-386 long pos = 0; long count = 0; while (pos < size) { final long remain = size - pos; count = remain > FILE_COPY_BUFFER_SIZE ? FILE_COPY_BUFFER_SIZE : remain; final long bytesCopied = output.transferFrom(input, pos, count); if (bytesCopied == 0) { // IO-385 - can happen if file is truncated after caching the size break; // ensure we don't loop forever } pos += bytesCopied; } } finally { ParseIOUtils.closeQuietly(output); ParseIOUtils.closeQuietly(fos); ParseIOUtils.closeQuietly(input); ParseIOUtils.closeQuietly(fis); } final long srcLen = srcFile.length(); // TODO See IO-386 final long dstLen = destFile.length(); // TODO See IO-386 if (srcLen != dstLen) { throw new IOException("Failed to copy full contents from '" + srcFile + "' to '" + destFile + "' Expected length: " + srcLen + " Actual: " + dstLen); } if (preserveFileDate) { destFile.setLastModified(srcFile.lastModified()); } }
From source file:com.mediaworx.intellij.opencmsplugin.sync.VfsAdapter.java
/** * pulls a VFS file to the RFS/*from w w w .j a va 2s . c om*/ * @param syncEntity the sync entity representing the file to be pulled */ public void pullFile(SyncEntity syncEntity) { if (!connected) { LOG.info("not connected"); return; } Document document = (Document) syncEntity.getVfsObject(); LOG.info("Pulling " + syncEntity.getVfsPath() + " to " + syncEntity.getOcmsModule().getLocalVfsRoot()); InputStream is = document.getContentStream().getStream(); File rfsFile = createRealFile(syncEntity); OutputStream os = null; try { os = new FileOutputStream(rfsFile); byte[] buffer = new byte[4096]; for (int n; (n = is.read(buffer)) != -1;) { os.write(buffer, 0, n); } } catch (IOException e) { LOG.info("There was an Exception writing to the local file " + syncEntity.getRfsPath() + ": " + e + "\n" + e.getMessage()); } finally { try { is.close(); } catch (IOException e) { // Do nothing } if (os != null) { try { os.close(); } catch (IOException e) { // Do nothing } } if (!rfsFile.setLastModified(document.getLastModificationDate().getTimeInMillis())) { LOG.info("there was an error setting the modification date for " + syncEntity.getRfsPath()); } } }