List of usage examples for org.apache.hadoop.fs FileSystem create
public FSDataOutputStream create(Path f) throws IOException
From source file:com.pinterest.secor.io.FileReaderWriterFactoryTest.java
License:Apache License
private void mockDelimitedTextFileWriter(boolean isCompressed) throws Exception { PowerMockito.mockStatic(FileSystem.class); FileSystem fs = Mockito.mock(FileSystem.class); Mockito.when(FileSystem.get(Mockito.any(URI.class), Mockito.any(Configuration.class))).thenReturn(fs); Path fsPath = (!isCompressed) ? new Path(PATH) : new Path(PATH_GZ); GzipCodec codec = PowerMockito.mock(GzipCodec.class); PowerMockito.whenNew(GzipCodec.class).withNoArguments().thenReturn(codec); FSDataInputStream fileInputStream = Mockito.mock(FSDataInputStream.class); FSDataOutputStream fileOutputStream = Mockito.mock(FSDataOutputStream.class); Mockito.when(fs.open(fsPath)).thenReturn(fileInputStream); Mockito.when(fs.create(fsPath)).thenReturn(fileOutputStream); CompressionInputStream inputStream = Mockito.mock(CompressionInputStream.class); CompressionOutputStream outputStream = Mockito.mock(CompressionOutputStream.class); Mockito.when(codec.createInputStream(Mockito.any(InputStream.class))).thenReturn(inputStream); Mockito.when(codec.createOutputStream(Mockito.any(OutputStream.class))).thenReturn(outputStream); }
From source file:com.pinterest.secor.io.FileReaderWriterTest.java
License:Apache License
private void mockDelimitedTextFileReaderWriter(boolean isCompressed) throws Exception { PowerMockito.mockStatic(FileSystem.class); FileSystem fs = Mockito.mock(FileSystem.class); Mockito.when(FileSystem.get(Mockito.any(URI.class), Mockito.any(Configuration.class))).thenReturn(fs); Path fsPath = (!isCompressed) ? new Path(PATH) : new Path(PATH_GZ); GzipCodec codec = PowerMockito.mock(GzipCodec.class); PowerMockito.whenNew(GzipCodec.class).withNoArguments().thenReturn(codec); FSDataInputStream fileInputStream = Mockito.mock(FSDataInputStream.class); FSDataOutputStream fileOutputStream = Mockito.mock(FSDataOutputStream.class); Mockito.when(fs.open(fsPath)).thenReturn(fileInputStream); Mockito.when(fs.create(fsPath)).thenReturn(fileOutputStream); CompressionInputStream inputStream = Mockito.mock(CompressionInputStream.class); CompressionOutputStream outputStream = Mockito.mock(CompressionOutputStream.class); Mockito.when(codec.createInputStream(Mockito.any(InputStream.class))).thenReturn(inputStream); Mockito.when(codec.createOutputStream(Mockito.any(OutputStream.class))).thenReturn(outputStream); }
From source file:com.pinterest.secor.io.impl.DelimitedTextFileReaderWriter.java
License:Apache License
public DelimitedTextFileReaderWriter(LogFilePath path, CompressionCodec codec, FileReaderWriter.Type type) throws FileNotFoundException, IOException { Path fsPath = new Path(path.getLogFilePath()); FileSystem fs = FileUtil.getFileSystem(path.getLogFilePath()); if (type == FileReaderWriter.Type.Reader) { InputStream inputStream = fs.open(fsPath); this.mReader = (codec == null) ? new BufferedInputStream(inputStream) : new BufferedInputStream(codec.createInputStream(inputStream)); this.mOffset = path.getOffset(); this.mCountingStream = null; this.mWriter = null; } else if (type == FileReaderWriter.Type.Writer) { this.mCountingStream = new CountingOutputStream(fs.create(fsPath)); this.mWriter = (codec == null) ? new BufferedOutputStream(this.mCountingStream) : new BufferedOutputStream(codec.createOutputStream(this.mCountingStream)); this.mReader = null; } else {//from ww w . ja va2s .c o m throw new IllegalArgumentException("Undefined File Type: " + type); } }
From source file:com.pinterest.secor.util.FileUtil.java
License:Apache License
public static void touch(String path) throws IOException { FileSystem fs = getFileSystem(path); Path fsPath = new Path(path); fs.create(fsPath).close(); }
From source file:com.practicalHadoop.outputformat.MultpleDirectories.FileOutputCommitter.java
License:Apache License
private void markOutputDirSuccessful(JobContext context, String path) throws IOException { if (outputPath != null) { Path p = (path == null) ? outputPath : new Path(outputPath, path); LOG.warn("Mark Output success " + p); FileSystem fileSys = outputPath.getFileSystem(context.getConfiguration()); if (fileSys.exists(outputPath)) { // create a file in the folder to mark it Path filePath = new Path(p, SUCCEEDED_FILE_NAME); fileSys.create(filePath).close(); }/*from w w w . j a v a 2 s . c om*/ } }
From source file:com.redsqirl.workflow.server.connect.jdbc.JdbcStore.java
License:Open Source License
public static String writePassword(String connectionName, JdbcDetails details) { String passwordPathStr = "/user/" + System.getProperty("user.name") + "/.redsqirl/jdbc_password/password_" + connectionName;/*w w w . jav a 2 s.c om*/ Path passwordPath = new Path(passwordPathStr); try { FileSystem fileSystem = NameNodeVar.getFS(); if (fileSystem.exists(passwordPath)) { BufferedReader br = new BufferedReader(new InputStreamReader(fileSystem.open(passwordPath))); String line = br.readLine(); if (line == null || !line.equals(details.getPassword())) { fileSystem.delete(passwordPath, false); } br.close(); } if (!fileSystem.exists(passwordPath) && details.getPassword() != null) { if (!fileSystem.exists(passwordPath.getParent())) { fileSystem.mkdirs(passwordPath.getParent()); fileSystem.setPermission(passwordPath.getParent(), new FsPermission("700")); } FSDataOutputStream out = fileSystem.create(passwordPath); out.write(details.getPassword().getBytes()); out.close(); fileSystem.setPermission(passwordPath, new FsPermission("400")); } } catch (Exception e) { logger.error(e, e); } return passwordPathStr; }
From source file:com.redsqirl.workflow.server.datatype.PigTestUtils.java
License:Open Source License
public static void createHDFSFile(Path p, String containt) throws IOException { FileSystem fileSystem = NameNodeVar.getFS(); // Check if the file already exists if (fileSystem.exists(p)) { if (fileSystem.listStatus(p).length > 0) { logger.warn("File " + p.toString() + " already exists"); return; }/*from w w w.ja va2s. c o m*/ } else { fileSystem.mkdirs(p); } // Create a new file and write data to it. FSDataOutputStream out = fileSystem.create(new Path(p, "part-0000")); out.write(containt.getBytes()); out.close(); fileSystem.close(); }
From source file:com.redsqirl.workflow.server.datatype.PigTestUtils.java
License:Open Source License
public static void writeContent(Path p, String file, String content) throws IOException { FileSystem fs = NameNodeVar.getFS(); if (fs.exists(p)) { FSDataOutputStream out = fs.create(new Path(p, file)); out.write(content.getBytes());//w w w. ja v a 2 s . co m out.close(); fs.close(); } }
From source file:com.redsqirl.workflow.test.TestUtils.java
License:Open Source License
public static void createHDFSFile(Path p, String containt) throws IOException { FileSystem fileSystem = NameNodeVar.getFS(); // Check if the file already exists if (fileSystem.exists(p)) { logger.warn("File " + p.toString() + " already exists"); return;//from w w w . j a v a 2 s . c o m } // Create a new file and write data to it. fileSystem.mkdirs(p); FSDataOutputStream out = fileSystem.create(new Path(p, "part-0000")); out.write(containt.getBytes()); out.close(); fileSystem.close(); }
From source file:com.ricemap.spateDB.core.SpatialSite.java
License:Apache License
public static void setCells(JobConf job, CellInfo[] cellsInfo) throws IOException { Path tempFile;/* w ww. j a va2 s.co m*/ FileSystem fs = FileSystem.get(job); do { tempFile = new Path(job.getJobName() + "_" + (int) (Math.random() * 1000000) + ".cells"); } while (fs.exists(tempFile)); FSDataOutputStream out = fs.create(tempFile); out.writeInt(cellsInfo.length); for (CellInfo cell : cellsInfo) { cell.write(out); } out.close(); fs.deleteOnExit(tempFile); DistributedCache.addCacheFile(tempFile.toUri(), job); job.set(OUTPUT_CELLS, tempFile.getName()); LOG.info("Partitioning file into " + cellsInfo.length + " cells"); }