List of usage examples for java.io File createTempFile
public static File createTempFile(String prefix, String suffix) throws IOException
From source file:edu.umn.cs.spatialHadoop.util.FileUtil.java
/** * Copies a part of a file from a remote file system (e.g., HDFS) to a local * file. Returns a path to a local temporary file. * // w w w . j a v a2s .com * @param conf * @param split * @return * @throws IOException */ public static String copyFileSplit(Configuration conf, FileSplit split) throws IOException { FileSystem fs = split.getPath().getFileSystem(conf); // Special case of a local file. Skip copying the file if (fs instanceof LocalFileSystem && split.getStart() == 0) return split.getPath().toUri().getPath(); File destFile = File.createTempFile(split.getPath().getName(), "tmp"); // Special handling for HTTP files for more efficiency /*if (fs instanceof HTTPFileSystem && split.getStart() == 0) { URL website = split.getPath().toUri().toURL(); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); FileOutputStream fos = new FileOutputStream(destFile); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); fos.close(); return destFile.getAbsolutePath(); }*/ // Length of input file. We do not depend on split.length because it is // not // set by input format for performance reason. Setting it in the input // format would cost a lot of time because it runs on the client machine // while the record reader runs on slave nodes in parallel long length = fs.getFileStatus(split.getPath()).getLen(); FSDataInputStream in = fs.open(split.getPath()); in.seek(split.getStart()); ReadableByteChannel rbc = Channels.newChannel(in); // Prepare output file for write FileOutputStream out = new FileOutputStream(destFile); out.getChannel().transferFrom(rbc, 0, length); in.close(); out.close(); return destFile.getAbsolutePath(); }
From source file:io.druid.indexer.HdfsClasspathSetupTest.java
@BeforeClass public static void setupStatic() throws IOException, ClassNotFoundException { hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir"); hdfsTmpDir.deleteOnExit();//from w w w . j av a2s .com if (!hdfsTmpDir.delete()) { throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); } conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); miniCluster = new MiniDFSCluster.Builder(conf).build(); }
From source file:net.lmxm.ute.utils.FileSystemUtilsTest.java
/** * Test convert to file objects./*from w ww . j a v a2 s. c om*/ * * @throws IOException Signals that an I/O exception has occurred. */ @Test public void testConvertToFileObjects() throws IOException { // Create temp file that we can locate final File tempFile1 = File.createTempFile("UTE", ".TESTFILE"); FileUtils.touch(tempFile1); tempFile1.deleteOnExit(); final File tempFile2 = File.createTempFile("ute", ".testfile"); FileUtils.touch(tempFile2); tempFile2.deleteOnExit(); // Run the test final FileReference fileReference = new FileReference(); fileReference.setName("UTE*.TESTFILE"); final List<FileReference> fileReferences = new ArrayList<FileReference>(); fileReferences.add(fileReference); final List<File> files = FileSystemUtils.convertToFileObjects(TMP_DIR, fileReferences); assertNotNull(files); assertTrue(files.size() == 1); // Delete the temp file we created for this test tempFile1.delete(); tempFile2.delete(); }
From source file:Exporters.DocxService.java
public InputStream mergeDocx(final List<InputStream> streams) throws Docx4JException, IOException { WordprocessingMLPackage target = null; final File generated = File.createTempFile("generated", ".docx"); int chunkId = 0; Iterator<InputStream> it = streams.iterator(); while (it.hasNext()) { InputStream is = it.next(); if (is != null) { if (target == null) { // Copy first (master) document OutputStream os = new FileOutputStream(generated); os.write(IOUtils.toByteArray(is)); os.close();//from ww w . ja v a2 s .co m target = WordprocessingMLPackage.load(generated); } else { // Attach the others (Alternative input parts) insertDocx(target.getMainDocumentPart(), IOUtils.toByteArray(is), chunkId++); } } } if (target != null) { target.save(generated); return new FileInputStream(generated); } else { return null; } }
From source file:eu.udig.omsbox.view.actions.OmsScriptGenerationAction.java
public void run(IAction action) { if (view instanceof OmsBoxView) { OmsBoxView omsView = (OmsBoxView) view; try {//from w w w. j a va 2 s . c o m String script = omsView.generateScriptForSelectedModule(); if (script == null) { return; } Program program = Program.findProgram(".txt"); if (program != null) { File tempFile = File.createTempFile("omsbox_", ".oms"); if (tempFile == null || !tempFile.exists() || tempFile.getAbsolutePath() == null) { // try with user's home folder String ts = new DateTime().toString(OmsBoxConstants.dateTimeFormatterYYYYMMDDHHMMSS); String userHomePath = System.getProperty("user.home"); //$NON-NLS-1$ File userHomeFile = new File(userHomePath); if (!userHomeFile.exists()) { String message = "Unable to create the oms script both in the temp folder and user home. Check your permissions."; ExceptionDetailsDialog.openError(null, message, IStatus.ERROR, OmsBoxPlugin.PLUGIN_ID, new RuntimeException()); return; } tempFile = new File(userHomeFile, "omsbox_" + ts + ".oms"); } FileUtils.writeStringToFile(tempFile, script); program.execute(tempFile.getAbsolutePath()); // cleanup when leaving uDig // tempFile.deleteOnExit(); } else { // make it the good old way prompting FileDialog fileDialog = new FileDialog(view.getSite().getShell(), SWT.SAVE); String path = fileDialog.open(); if (path == null || path.length() < 1) { return; } FileUtils.writeStringToFile(new File(path), script); } } catch (Exception e) { e.printStackTrace(); String message = "An error ocurred while generating the script."; ExceptionDetailsDialog.openError(null, message, IStatus.ERROR, OmsBoxPlugin.PLUGIN_ID, e); } } }
From source file:com.wavemaker.commons.util.utils.ClassLoaderUtilsTest.java
@Test public void tempClassLoader_getResourceTest() throws Exception { File sourceJar = new ClassPathResource("com/wavemaker/commons/foojar.jar").getFile(); File jar = File.createTempFile("tempClassLoader_getClassTest", ".jar"); jar.deleteOnExit();//www. ja v a2 s . c o m FileUtils.copyFile(sourceJar, jar); try { ClassLoader cl = ClassLoaderUtils.getTempClassLoaderForFile(jar); InputStream is = ClassLoaderUtils.getResourceAsStream("foo/bar/baz/JarType.java", cl); assertNotNull(is); assertTrue(is.available() > 0); is.close(); } finally { jar.delete(); } }
From source file:io.druid.storage.hdfs.HdfsDataSegmentPullerTest.java
@BeforeClass public static void setupStatic() throws IOException { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); if (!hdfsTmpDir.delete()) { throw new IOE("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()); }/*from w w w .ja v a 2 s.c om*/ conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); miniCluster = new MiniDFSCluster.Builder(conf).build(); uriBase = miniCluster.getURI(0); final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data"); tmpFile.delete(); try { Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath()); try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) { Files.copy(tmpFile.toPath(), stream); } } finally { tmpFile.delete(); } }
From source file:edu.unc.lib.dl.IngestProcessorFactoryTest.java
@Test public void testFindMETSPackageProcessor() { try {/*from w ww . j a va 2 s.c om*/ File test = File.createTempFile("test", ".txt"); METSPackageSIP foo = new METSPackageSIP(new PID("test:1"), test, false); this.getSipProcessorFactory().getSIPProcessor(foo); } catch (IOException e) { log.debug(e); fail(e.getMessage()); } catch (IngestException e) { log.debug(e); fail(e.getMessage()); } }
From source file:eu.linda.analytics.formats.RDFInputFormat.java
@Override public AbstractList importData4weka(String query_id, boolean isForRDFOutput, Analytics analytics) { String queryURI = connectionController.getQueryURI(query_id); helpfulFunctions.nicePrintMessage("import data from uri " + queryURI); Instances data = null;// w w w. j a v a 2s .co m try { float timeToGetQuery = 0; long startTimeToGetQuery = System.currentTimeMillis(); URL url = new URL(queryURI); if (!helpfulFunctions.isURLResponsive(url)) { return null; } File tmpfile4lindaquery = File.createTempFile("tmpfile4lindaquery" + query_id, ".tmp"); FileUtils.copyURLToFile(url, tmpfile4lindaquery); System.out.println("Downloaded File Query: " + tmpfile4lindaquery); CSVLoader loader = new CSVLoader(); loader.setSource(tmpfile4lindaquery); if (isForRDFOutput) { loader.setStringAttributes("1,2"); } loader.setFieldSeparator(","); data = loader.getDataSet(); data.setClassIndex(data.numAttributes() - 1); FileInputStream fis = null; try { fis = new FileInputStream(tmpfile4lindaquery); System.out.println("fis.getChannel().size() " + fis.getChannel().size()); analytics.setData_size(analytics.getData_size() + fis.getChannel().size()); } finally { fis.close(); } // Get elapsed time in milliseconds long elapsedTimeToGetQueryMillis = System.currentTimeMillis() - startTimeToGetQuery; // Get elapsed time in seconds timeToGetQuery = elapsedTimeToGetQueryMillis / 1000F; analytics.setTimeToGet_data(analytics.getTimeToGet_data() + timeToGetQuery); System.out.println("timeToGetQuery" + timeToGetQuery); connectionController.updateLindaAnalyticsInputDataPerformanceTime(analytics); } catch (Exception ex) { Logger.getLogger(ArffInputFormat.class.getName()).log(Level.SEVERE, null, ex); } return data; }
From source file:com.stimulus.archiva.extraction.RTFExtractor.java
public Reader getText(InputStream is, Charset charset, IndexInfo indexInfo) throws ExtractionException { Reader reader = null;//from w w w . java2s. c om FileWriter writer = null; File file = null; try { reader = new InputStreamReader(is); file = File.createTempFile("extract_rtf", ".tmp"); indexInfo.addDeleteFile(file); writer = new FileWriter(file); DefaultStyledDocument doc = new DefaultStyledDocument(); new RTFEditorKit().read(reader, doc, 0); writer.write(doc.getText(0, doc.getLength())); } catch (Throwable ioe) { throw new ExtractionException("failed to parse rtf document", ioe, logger); } finally { if (reader != null) { try { reader.close(); } catch (IOException ioe) { } } if (writer != null) { try { writer.close(); } catch (IOException ioe) { } } } try { Reader outReader = new FileReader(file); indexInfo.addReader(outReader); return outReader; } catch (Exception ex) { throw new ExtractionException("failed to extract text from powerpoint document", ex, logger, ChainedException.Level.DEBUG); } }