Example usage for org.apache.hadoop.fs FileSystem delete

List of usage examples for org.apache.hadoop.fs FileSystem delete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem delete.

Prototype

public abstract boolean delete(Path f, boolean recursive) throws IOException;

Source Link

Document

Delete a file.

Usage

From source file:com.practicalHadoop.outputformat.MultpleDirectories.FileOutputCommitter.java

License:Apache License

public void cleanupJob(JobContext context, String path) throws IOException {
    if (outputPath != null) {
        Path p = (path == null) ? outputPath : new Path(outputPath, path);
        Path tmpDir = new Path(p, FileOutputCommitter.TEMP_DIR_NAME);
        LOG.warn("Cleanup on " + tmpDir);
        FileSystem fileSys = tmpDir.getFileSystem(context.getConfiguration());
        if (fileSys.exists(tmpDir)) {
            fileSys.delete(tmpDir, true);
        }//w  ww . j ava 2  s .  c  o  m
    } else {
        LOG.warn("Output path is null in cleanup");
    }
}

From source file:com.practicalHadoop.outputformat.MultpleDirectories.FileOutputCommitter.java

License:Apache License

/**
 * Move all of the files from the work directory to the final output
 * @param context the task context//from ww w .  j av a2  s. c o m
 * @param fs the output file system
 * @param jobOutputDir the final output direcotry
 * @param taskOutput the work path
 * @throws IOException
 */
private void moveTaskOutputs(TaskAttemptContext context, FileSystem fs, Path jobOutputDir, Path taskOutput)
        throws IOException {
    TaskAttemptID attemptId = context.getTaskAttemptID();
    context.progress();
    if (fs.isFile(taskOutput)) {
        Path finalOutputPath = getFinalPath(jobOutputDir, taskOutput, workPath);
        if (!fs.rename(taskOutput, finalOutputPath)) {
            if (!fs.delete(finalOutputPath, true)) {
                throw new IOException("Failed to delete earlier output of task: " + attemptId);
            }
            if (!fs.rename(taskOutput, finalOutputPath)) {
                throw new IOException("Failed to save output of task: " + attemptId);
            }
        }
        LOG.debug("Moved " + taskOutput + " to " + finalOutputPath);
    } else if (fs.getFileStatus(taskOutput).isDir()) {
        FileStatus[] paths = fs.listStatus(taskOutput);
        Path finalOutputPath = getFinalPath(jobOutputDir, taskOutput, workPath);
        fs.mkdirs(finalOutputPath);
        if (paths != null) {
            for (FileStatus path : paths) {
                moveTaskOutputs(context, fs, jobOutputDir, path.getPath());
            }
        }
    }
}

From source file:com.project.test.parquet.TestParquetTBaseScheme.java

License:Apache License

@Test
public void testWrite() throws Exception {
    Path path = new Path(parquetOutputPath);
    final FileSystem fs = path.getFileSystem(new Configuration());
    if (fs.exists(path))
        fs.delete(path, true);

    Scheme sourceScheme = new TextLine(new Fields("first", "last"));
    Tap source = new Hfs(sourceScheme, txtInputPath);

    Scheme sinkScheme = new ParquetTBaseScheme(Name.class);
    Tap sink = new Hfs(sinkScheme, parquetOutputPath);

    Pipe assembly = new Pipe("namecp");
    assembly = new Each(assembly, new PackThriftFunction());
    Flow flow = new HadoopFlowConnector().connect("namecp", source, sink, assembly);

    flow.complete();/*from w  ww  .j ava2  s.  c  o m*/
}

From source file:com.project.test.parquet.TestParquetTBaseScheme.java

License:Apache License

private void doRead(Scheme sourceScheme) throws Exception {
    createFileForRead();//from w  w  w.  ja  va  2  s.  c om

    Path path = new Path(txtOutputPath);
    final FileSystem fs = path.getFileSystem(new Configuration());
    if (fs.exists(path))
        fs.delete(path, true);

    Tap source = new Hfs(sourceScheme, parquetInputPath);

    Scheme sinkScheme = new TextLine(new Fields("first", "last"));
    Tap sink = new Hfs(sinkScheme, txtOutputPath);

    Pipe assembly = new Pipe("namecp");
    assembly = new Each(assembly, new UnpackThriftFunction());
    Flow flow = new HadoopFlowConnector().connect("namecp", source, sink, assembly);

    flow.complete();
    String result = FileUtils.readFileToString(new File(txtOutputPath + "/part-00000"));
    assertEquals("Alice\tPractice\nBob\tHope\nCharlie\tHorse\n", result);
}

From source file:com.project.test.parquet.TestParquetTBaseScheme.java

License:Apache License

private void createFileForRead() throws Exception {
    final Path fileToCreate = new Path(parquetInputPath + "/names.parquet");

    final Configuration conf = new Configuration();
    final FileSystem fs = fileToCreate.getFileSystem(conf);
    if (fs.exists(fileToCreate))
        fs.delete(fileToCreate, true);

    TProtocolFactory protocolFactory = new TCompactProtocol.Factory();
    TaskAttemptID taskId = new TaskAttemptID("local", 0, true, 0, 0);
    ThriftToParquetFileWriter w = new ThriftToParquetFileWriter(fileToCreate,
            ContextUtil.newTaskAttemptContext(conf, taskId), protocolFactory, Name.class);

    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final TProtocol protocol = protocolFactory.getProtocol(new TIOStreamTransport(baos));

    Name n1 = new Name();
    n1.setFirst_name("Alice");
    n1.setLast_name("Practice");
    Name n2 = new Name();
    n2.setFirst_name("Bob");
    n2.setLast_name("Hope");
    Name n3 = new Name();
    n3.setFirst_name("Charlie");
    n3.setLast_name("Horse");

    n1.write(protocol);/*  www .  ja  va 2  s .  co m*/
    w.write(new BytesWritable(baos.toByteArray()));
    baos.reset();
    n2.write(protocol);
    w.write(new BytesWritable(baos.toByteArray()));
    baos.reset();
    n3.write(protocol);
    w.write(new BytesWritable(baos.toByteArray()));
    w.close();
}

From source file:com.project.test.parquet.TestParquetTupleScheme.java

License:Apache License

@Test
public void testFieldProjection() throws Exception {
    createFileForRead();/*from  w w  w .  ja  v a  2  s . co m*/

    Path path = new Path(txtOutputPath);
    final FileSystem fs = path.getFileSystem(new Configuration());
    if (fs.exists(path))
        fs.delete(path, true);

    Scheme sourceScheme = new ParquetTupleScheme(new Fields("last_name"));
    Tap source = new Hfs(sourceScheme, parquetInputPath);

    Scheme sinkScheme = new TextLine(new Fields("last_name"));
    Tap sink = new Hfs(sinkScheme, txtOutputPath);

    Pipe assembly = new Pipe("namecp");
    assembly = new Each(assembly, new ProjectedTupleFunction());
    Flow flow = new HadoopFlowConnector().connect("namecp", source, sink, assembly);

    flow.complete();
    String result = FileUtils.readFileToString(new File(txtOutputPath + "/part-00000"));
    assertEquals("Practice\nHope\nHorse\n", result);
}

From source file:com.project.test.parquet.TestParquetTupleScheme.java

License:Apache License

public void testReadWrite(String inputPath) throws Exception {
    createFileForRead();/*from   w  w  w .j a  va  2 s.  c o  m*/

    Path path = new Path(txtOutputPath);
    final FileSystem fs = path.getFileSystem(new Configuration());
    if (fs.exists(path))
        fs.delete(path, true);

    Scheme sourceScheme = new ParquetTupleScheme(new Fields("first_name", "last_name"));
    Tap source = new Hfs(sourceScheme, inputPath);

    Scheme sinkScheme = new TextLine(new Fields("first", "last"));
    Tap sink = new Hfs(sinkScheme, txtOutputPath);

    Pipe assembly = new Pipe("namecp");
    assembly = new Each(assembly, new UnpackTupleFunction());
    Flow flow = new HadoopFlowConnector().connect("namecp", source, sink, assembly);

    flow.complete();
    String result = FileUtils.readFileToString(new File(txtOutputPath + "/part-00000"));
    assertEquals("Alice\tPractice\nBob\tHope\nCharlie\tHorse\n", result);
}

From source file:com.rapleaf.hank.hadoop.DomainBuilderOutputCommitter.java

License:Apache License

public static void cleanupJob(String domainName, JobConf conf) throws IOException {
    Path tmpOutputPath = new Path(DomainBuilderProperties.getTmpOutputPath(domainName, conf));

    // Delete temporary output path
    FileSystem fs = tmpOutputPath.getFileSystem(conf);
    if (fs.exists(tmpOutputPath)) {
        LOG.info("Deleting temporary output path " + tmpOutputPath);
        fs.delete(tmpOutputPath, true);
    }//from w w  w.  ja v  a 2 s . c  om
}

From source file:com.redsqirl.workflow.server.connect.HDFSInterface.java

License:Open Source License

/**
 * Delete Path from HDFS/*w w  w  .j  av  a 2  s  . c  o m*/
 * 
 * @param path
 * @return Error Message
 * @throws RemoteException
 */
@Override
public String delete(String path) throws RemoteException {
    String error = null;
    boolean ok;
    HdfsFileChecker fCh = new HdfsFileChecker(path);
    if (fCh.isFile() || fCh.isDirectory()) {
        try {
            FileSystem fs = NameNodeVar.getFS();
            ok = fs.delete(new Path(path), true);
            // fs.close();
            if (!ok) {
                error = LanguageManagerWF.getText("HdfsInterface.deletefail", new Object[] { path });
            }
        } catch (IOException e) {
            ok = false;
            error = LanguageManagerWF.getText("HdfsInterface.cannotdelete", new Object[] { path });
            logger.error(error);
            logger.error(e.getMessage());
        }
    } else {
        error = LanguageManagerWF.getText("HdfsInterface.notdir", new Object[] { path });
    }
    // fCh.close();
    if (error != null) {
        logger.debug(error);
    }
    return error;
}

From source file:com.redsqirl.workflow.server.connect.jdbc.JdbcStore.java

License:Open Source License

public static String writePassword(String connectionName, JdbcDetails details) {
    String passwordPathStr = "/user/" + System.getProperty("user.name") + "/.redsqirl/jdbc_password/password_"
            + connectionName;/*from w  w w  .ja  v a2  s.c  o  m*/
    Path passwordPath = new Path(passwordPathStr);

    try {
        FileSystem fileSystem = NameNodeVar.getFS();
        if (fileSystem.exists(passwordPath)) {
            BufferedReader br = new BufferedReader(new InputStreamReader(fileSystem.open(passwordPath)));
            String line = br.readLine();
            if (line == null || !line.equals(details.getPassword())) {
                fileSystem.delete(passwordPath, false);
            }
            br.close();
        }
        if (!fileSystem.exists(passwordPath) && details.getPassword() != null) {
            if (!fileSystem.exists(passwordPath.getParent())) {
                fileSystem.mkdirs(passwordPath.getParent());
                fileSystem.setPermission(passwordPath.getParent(), new FsPermission("700"));
            }
            FSDataOutputStream out = fileSystem.create(passwordPath);
            out.write(details.getPassword().getBytes());
            out.close();
            fileSystem.setPermission(passwordPath, new FsPermission("400"));
        }
    } catch (Exception e) {
        logger.error(e, e);
    }
    return passwordPathStr;
}