Example usage for org.apache.hadoop.fs FileSystem close

List of usage examples for org.apache.hadoop.fs FileSystem close

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Close this FileSystem instance.

Usage

From source file:com.bizosys.hsearch.console.ui.SearchServlet.java

License:Apache License

public void actionCreate(HttpServletRequest req, String projectName, String webappsClassDirPath,
        String schemaFileLoc) throws FileNotFoundException, UnsupportedEncodingException, IOException {
    String schemaXmlContent = req.getParameter("schema");
    schemaXmlContent = (null == schemaXmlContent) ? StringUtils.Empty : schemaXmlContent.trim();
    if (schemaXmlContent.length() == 0)
        throw new RuntimeException("Schema content is missing.");

    File schemaFile = new File(schemaFileLoc);
    PrintWriter writer = null;/*from w  w  w .  jav  a 2  s . c o m*/
    FileWriter fw = null;
    try {
        writer = new PrintWriter(schemaFile, "UTF-8");
        writer.write(schemaXmlContent);
        writer.flush();

        SetupServlet.create(projectName);

        File prjectFile = new File(webappsClassDirPath + "/projects.txt");
        fw = new FileWriter(prjectFile, true);
        fw.write(projectName);
        fw.write('\n');
    } finally {
        if (null != writer)
            writer.close();

        if (null != fw) {
            fw.flush();
            fw.close();
        }
    }

    if (schemaFile.exists()) {

        FileSystem fs = null;
        FSDataOutputStream hdfsFile = null;
        try {
            fs = FileSystem.get(conf);
            Path schemaHdfsFilePath = new Path(schemaFile.getName());

            hdfsFile = fs.create(schemaHdfsFilePath, fs.exists(schemaHdfsFilePath));
            hdfsFile.write(FileReaderUtil.getBytes(new File(schemaFile.getAbsolutePath())));
        } catch (Exception ex) {
            throw new IOException(
                    "Unable to create @ hadoop Please check permission on dfs " + schemaFile.getName(), ex);
        } finally {
            if (null != hdfsFile)
                hdfsFile.close();
            if (null != fs)
                fs.close();
        }

    }
}

From source file:com.bizosys.hsearch.console.ui.SearchServlet.java

License:Apache License

private String getFileData(String path, int readLineCount) throws IOException {
    StringBuilder sb = new StringBuilder();
    BufferedReader br = null;/* w  ww  . j a  v a  2 s .  c  o  m*/
    FileSystem fs = null;
    int lineNo = 1;
    try {
        Path hadoopPath = new Path(path);
        fs = FileSystem.get(conf);
        if (fs.exists(hadoopPath)) {
            br = new BufferedReader(new InputStreamReader(fs.open(hadoopPath)));
            String line = null;
            boolean first = true;
            while ((line = br.readLine()) != null) {
                if (lineNo > readLineCount)
                    break;
                if (first)
                    first = false;
                else
                    sb.append('\n');
                sb.append(line);
                lineNo++;
            }
        }
    } catch (FileNotFoundException fex) {
        System.err.println("Cannot read from path " + path);
        throw new IOException(fex);
    } catch (Exception pex) {
        System.err.println("Error : " + path);
        throw new IOException(pex);
    } finally {
        if (null != br)
            try {
                br.close();
            } catch (Exception e) {
            }
        if (null != fs)
            try {
                fs.close();
            } catch (Exception e) {
            }
    }
    return sb.toString();
}

From source file:com.bizosys.hsearch.console.ui.SearchServlet.java

License:Apache License

private String getFileData(String path) throws IOException {
    StringBuilder sb = new StringBuilder();
    BufferedReader br = null;//from   ww  w .  j a v a  2 s.com
    FileSystem fs = null;
    try {
        Path hadoopPath = new Path(path);
        fs = FileSystem.get(conf);
        if (fs.exists(hadoopPath)) {
            br = new BufferedReader(new InputStreamReader(fs.open(hadoopPath)));
            String line = null;
            boolean first = true;
            while ((line = br.readLine()) != null) {
                if (first)
                    first = false;
                else
                    sb.append('\n');
                sb.append(line);
            }
        }
    } catch (FileNotFoundException fex) {
        System.err.println("Cannot read from path " + path);
        throw new IOException(fex);
    } catch (Exception pex) {
        System.err.println("Error : " + path);
        throw new IOException(pex);
    } finally {
        if (null != br)
            try {
                br.close();
            } catch (Exception e) {
            }
        if (null != fs)
            try {
                fs.close();
            } catch (Exception e) {
            }
    }
    return sb.toString();
}

From source file:com.chinamobile.bcbsp.bspstaff.BSPStaff.java

License:Apache License

private void writeMigratePartition(HashMap<String, Integer> migrateMap, StaffSSControllerInterface sssc)
        throws IOException {
    String migratePartitionString = this.migrateDirBase + "/"
            + String.valueOf(this.currentSuperStepCounter / Constants.K) + "/"
            + String.valueOf(this.getPartition());
    LOG.info("ljn test : writeMigratePartition " + migratePartitionString);
    this.migratePartitionDir = this.migrateDirBase + "/"
            + String.valueOf(this.currentSuperStepCounter / Constants.K);
    Path migratePartitionPath = new Path(migratePartitionString);
    FileSystem fsFileSystem = FileSystem.get(this.getConf().getConf());
    FSDataOutputStream fsOutput = fsFileSystem.create(migratePartitionPath);
    for (String vid : migrateMap.keySet()) {
        // LOG.info("ljn test : write " + vid + " :" + migrateMap.get(vid));
        String str = vid + ":" + migrateMap.get(vid) + "\n";
        fsOutput.write(str.getBytes("UTF-8"));
        // Text a = new Text(vid + ":" + migrateMap.get(vid));
        // a.write(fsOutput);
    }/*from w w  w .ja  va 2  s.com*/
    fsFileSystem.close();
}

From source file:com.cip.crane.agent.utils.TaskHelper.java

License:Open Source License

private void writeFileToHdfs(String srcFile, String destFile) throws IOException {
    File file = new File(srcFile);
    if (!file.exists()) {
        throw new FileNotFoundException("File not found");
    }/*from ww w.  ja  v  a2 s .  co m*/
    byte[] buf = new byte[BUFFER_SIZE];
    FileInputStream input = new FileInputStream(file);
    FileSystem fs = FileSystem.get(URI.create(destFile), conf);
    Path destPath = new Path(destFile);
    if (fs.exists(destPath)) {
        fs.delete(destPath, true);
    }
    FSDataOutputStream hdfsoutput = fs.create(destPath, (short) 2);
    int num = input.read(buf);
    while (num != (-1)) {// ?
        hdfsoutput.write(buf, 0, num);// ?
        hdfsoutput.flush();// ?
        num = input.read(buf);// ??
    }
    input.close();
    hdfsoutput.close();
    fs.close();
}

From source file:com.cip.crane.agent.utils.TaskHelper.java

License:Open Source License

@SuppressWarnings("unused")
private void readFileFromHdfs(String srcFile, String destFile) throws IOException, FileNotFoundException {
    File file = new File(destFile);
    if (file.exists()) {
        file.delete();/*from w  w  w  .  j av a  2s.  c  om*/
    }
    byte[] buf = new byte[BUFFER_SIZE];
    FileOutputStream fos = new FileOutputStream(file);
    FileSystem fs;
    FSDataInputStream hdfsInput;
    try {
        fs = FileSystem.get(URI.create(srcFile), conf);
        hdfsInput = fs.open(new Path(srcFile));
        int num = hdfsInput.read(buf);
        while (num != (-1)) {// ?
            fos.write(buf, 0, num);// ?
            fos.flush();// ?
            num = hdfsInput.read(buf);// ??
        }
        hdfsInput.close();
        fos.close();
        fs.close();
    } catch (IOException e) {
        if (file.exists()) {
            file.delete();
        }
        throw e;
    }
}

From source file:com.cloudera.circus.test.TestXTest.java

License:Open Source License

@Test
@TestHadoop//from  w w  w  .  j  a va2 s .c  om
public void testHadoopMinicluster() throws Exception {
    JobConf conf = getHadoopConf();
    Assert.assertNotNull(conf);
    FileSystem fs = FileSystem.get(conf);
    Assert.assertNotNull(fs);
    Assert.assertEquals(fs.getUri().getScheme(), "hdfs");
    Assert.assertTrue(fs.exists(getHadoopTestDir()));
    fs.close();
    JobClient jobClient = new JobClient(conf);
    Assert.assertNotNull(jobClient);
    jobClient.close();
}

From source file:com.cloudera.circus.test.TestXTest.java

License:Open Source License

@Test
@TestHadoop//from  www.  java 2s . com
public void testHadoopFileSystem() throws Exception {
    JobConf conf = getHadoopConf();
    FileSystem fs = FileSystem.get(conf);
    try {
        OutputStream os = fs.create(new Path(getHadoopTestDir(), "foo"));
        os.write(new byte[] { 1 });
        os.close();
        InputStream is = fs.open(new Path(getHadoopTestDir(), "foo"));
        Assert.assertEquals(is.read(), 1);
        Assert.assertEquals(is.read(), -1);
        is.close();
    } finally {
        fs.close();
    }
}

From source file:com.cloudera.circus.test.TestXTest.java

License:Open Source License

@Test
@TestHadoop//from  w w w .j a  va 2 s  .  c om
public void testHadoopMapReduce() throws Exception {
    JobConf conf = getHadoopConf();
    FileSystem fs = FileSystem.get(conf);
    JobClient jobClient = new JobClient(conf);
    try {
        Path inputDir = new Path(getHadoopTestDir(), "input");
        Path outputDir = new Path(getHadoopTestDir(), "output");

        fs.mkdirs(inputDir);
        Writer writer = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
        writer.write("a\n");
        writer.write("b\n");
        writer.write("c\n");
        writer.close();

        JobConf jobConf = getHadoopConf();
        jobConf.setInt("mapred.map.tasks", 1);
        jobConf.setInt("mapred.map.max.attempts", 1);
        jobConf.setInt("mapred.reduce.max.attempts", 1);
        jobConf.set("mapred.input.dir", inputDir.toString());
        jobConf.set("mapred.output.dir", outputDir.toString());
        final RunningJob runningJob = jobClient.submitJob(jobConf);
        waitFor(60 * 1000, true, new Predicate() {
            @Override
            public boolean evaluate() throws Exception {
                return runningJob.isComplete();
            }
        });
        Assert.assertTrue(runningJob.isSuccessful());
        Assert.assertTrue(fs.exists(new Path(outputDir, "part-00000")));
        BufferedReader reader = new BufferedReader(
                new InputStreamReader(fs.open(new Path(outputDir, "part-00000"))));
        Assert.assertTrue(reader.readLine().trim().endsWith("a"));
        Assert.assertTrue(reader.readLine().trim().endsWith("b"));
        Assert.assertTrue(reader.readLine().trim().endsWith("c"));
        Assert.assertNull(reader.readLine());
        reader.close();
    } finally {
        fs.close();
        jobClient.close();
    }
}

From source file:com.cloudera.flume.handlers.hdfs.TestDFSWrite.java

License:Apache License

@Test
public void testDirectWrite() throws IOException {
    FlumeConfiguration conf = FlumeConfiguration.get();

    Path path = new Path("file:///tmp/testfile");
    FileSystem hdfs = path.getFileSystem(conf);
    hdfs.deleteOnExit(path);/*from   w ww  . jav  a  2s  . c om*/

    String STRING = "Hello World";

    // writing
    FSDataOutputStream dos = hdfs.create(path);
    dos.writeUTF(STRING);
    dos.close();

    // reading
    FSDataInputStream dis = hdfs.open(path);
    String s = dis.readUTF();
    System.out.println(s);
    assertEquals(STRING, s);

    dis.close();

    hdfs.close();
}