Example usage for java.io FileInputStream getChannel

List of usage examples for java.io FileInputStream getChannel

Introduction

In this page you can find the example usage for java.io FileInputStream getChannel.

Prototype

public FileChannel getChannel() 

Source Link

Document

Returns the unique java.nio.channels.FileChannel FileChannel object associated with this file input stream.

Usage

From source file:org.apache.hadoop.io.nativeio.TestSharedFileDescriptorFactory.java

@Test(timeout = 10000)
public void testReadAndWrite() throws Exception {
    File path = new File(TEST_BASE, "testReadAndWrite");
    path.mkdirs();//from w  ww  .j a va 2  s.  c o m
    SharedFileDescriptorFactory factory = SharedFileDescriptorFactory.create("woot_",
            new String[] { path.getAbsolutePath() });
    FileInputStream inStream = factory.createDescriptor("testReadAndWrite", 4096);
    FileOutputStream outStream = new FileOutputStream(inStream.getFD());
    outStream.write(101);
    inStream.getChannel().position(0);
    Assert.assertEquals(101, inStream.read());
    inStream.close();
    outStream.close();
    FileUtil.fullyDelete(path);
}

From source file:com.mgmtp.perfload.perfalyzer.util.BinnedFilesMerger.java

public void mergeFiles() throws IOException {
    if (!inputDir.isDirectory()) {
        throw new IllegalArgumentException("The input File must be a directory");
    }/*from   ww  w .j  a v  a 2s .c  o  m*/

    StrTokenizer tokenizer = StrTokenizer.getCSVInstance();
    tokenizer.setDelimiterChar(DELIMITER);
    Map<String, FileChannel> destChannels = newHashMap();
    List<OutputStream> outputStreams = newArrayList();
    File[] filesInInputDirectory = inputDir.listFiles();

    try {
        for (File file : filesInInputDirectory) {
            FileInputStream fis = null;
            try {
                fis = new FileInputStream(file);
                for (Scanner scanner = new Scanner(fis.getChannel(), Charsets.UTF_8.name()); scanner
                        .hasNext();) {
                    String line = scanner.nextLine();
                    tokenizer.reset(line);

                    List<String> tokenList = tokenizer.getTokenList();
                    String key = tokenList.get(sortCriteriaColumn);
                    FileChannel destChannel = destChannels.get(key);
                    if (destChannel == null) {
                        FileOutputStream fos = new FileOutputStream(
                                new File(outputDir, FILE_TYPE + "_" + key + ".out"));
                        outputStreams.add(fos);
                        destChannel = fos.getChannel();
                        destChannels.put(key, destChannel);

                        //Write the Header...... Has to be improved
                        IoUtilities.writeLineToChannel(destChannel, getHeader(), Charsets.UTF_8);
                    }

                    StrBuilder outputLine = new StrBuilder();
                    for (String s : tokenList) {
                        StrBuilderUtils.appendEscapedAndQuoted(outputLine, DELIMITER, s);
                    }
                    IoUtilities.writeLineToChannel(destChannel, outputLine.toString(), Charsets.UTF_8);
                }
            } finally {
                closeQuietly(fis);
            }
        }
    } finally {
        outputStreams.forEach(IOUtils::closeQuietly);
    }

}

From source file:fr.acxio.tools.agia.alfresco.ContentFileDeleteWriterTest.java

@Test
public void testWriteCannotDelete() throws Exception {
    ContentFileDeleteWriter aWriter = new ContentFileDeleteWriter();

    File aOriginFile = new File("src/test/resources/testFiles/content1.pdf");
    File aDestinationFile1 = new File("target/content4.pdf");
    FileCopyUtils.copy(aOriginFile, aDestinationFile1);

    List<NodeList> aData = new ArrayList<NodeList>();
    aData.add(createNodeList(aDestinationFile1.getAbsolutePath()));

    assertTrue(aDestinationFile1.exists());

    FileInputStream aInputStream = new FileInputStream(aDestinationFile1);
    FileLock aLock = aInputStream.getChannel().lock(0L, Long.MAX_VALUE, true); // shared lock

    aWriter.write(aData);//from w  w w  .  ja  v a  2  s  . co m

    aLock.release();
    aInputStream.close();

    assertTrue(aDestinationFile1.exists());
}

From source file:com.joyent.manta.client.multipart.AbstractMultipartManager.java

@Override
public PART uploadPart(final UPLOAD upload, final int partNumber, final InputStream inputStream)
        throws IOException {
    Validate.notNull(inputStream, "InputStream must not be null");

    if (inputStream.getClass().equals(FileInputStream.class)) {
        final FileInputStream fin = (FileInputStream) inputStream;
        final long contentLength = fin.getChannel().size();
        return uploadPart(upload, partNumber, contentLength, inputStream);
    }//ww  w .  j  ava 2s.  co m

    HttpEntity entity = new MantaInputStreamEntity(inputStream, ContentType.APPLICATION_OCTET_STREAM);

    return uploadPart(upload, partNumber, entity, null);
}

From source file:fr.acxio.tools.agia.alfresco.ContentFileDeleteWriterTest.java

@Test
public void testWriteCannotDeleteThrowException() throws Exception {
    ContentFileDeleteWriter aWriter = new ContentFileDeleteWriter();
    aWriter.setIgnoreErrors(false);//w  w  w . j a v a 2 s.  c  o  m

    File aOriginFile = new File("src/test/resources/testFiles/content1.pdf");
    File aDestinationFile1 = new File("target/content5.pdf");
    FileCopyUtils.copy(aOriginFile, aDestinationFile1);

    List<NodeList> aData = new ArrayList<NodeList>();
    aData.add(createNodeList(aDestinationFile1.getAbsolutePath()));

    assertTrue(aDestinationFile1.exists());

    FileInputStream aInputStream = new FileInputStream(aDestinationFile1);
    FileLock aLock = aInputStream.getChannel().lock(0L, Long.MAX_VALUE, true); // shared lock

    try {
        aWriter.write(aData);
        assertTrue(aDestinationFile1.exists());
        fail("Must throw an exception");
    } catch (IOException e) {
        // Fall through
    } finally {
        aLock.release();
        aInputStream.close();
    }
}

From source file:org.apache.tajo.storage.text.DelimitedLineReader.java

public void init() throws IOException {
    if (is != null) {
        throw new IOException(this.getClass() + " was already initialized.");
    }// w  ww  .java2s.  c  o m

    if (fs == null) {
        fs = FileScanner.getFileSystem((TajoConf) conf, fragment.getPath());
    }

    pos = startOffset = fragment.getStartKey();
    end = fragment.getEndKey();

    if (codec != null) {
        fis = fs.open(fragment.getPath());

        decompressor = CodecPool.getDecompressor(codec);
        is = new DataInputStream(codec.createInputStream(fis, decompressor));

        ByteBuf buf = BufferPool.directBuffer(bufferSize);
        lineReader = new ByteBufLineReader(new ByteBufInputChannel(is), buf);
    } else {
        if (fs instanceof LocalFileSystem) {
            File file;
            try {
                if (fragment.getPath().toUri().getScheme() != null) {
                    file = new File(fragment.getPath().toUri());
                } else {
                    file = new File(fragment.getPath().toString());
                }
            } catch (IllegalArgumentException iae) {
                throw new IOException(iae);
            }
            FileInputStream inputStream = new FileInputStream(file);
            FileChannel channel = inputStream.getChannel();
            channel.position(startOffset);
            is = inputStream;
            lineReader = new ByteBufLineReader(new LocalFileInputChannel(inputStream),
                    BufferPool.directBuffer((int) Math.min(bufferSize, fragment.getLength())));
        } else {
            fis = fs.open(fragment.getPath());
            fis.seek(startOffset);
            is = fis;
            lineReader = new ByteBufLineReader(new FSDataInputChannel(fis),
                    BufferPool.directBuffer((int) Math.min(bufferSize, fragment.getLength())));
        }
    }
    eof = false;
}

From source file:org.apache.solr.core.CoreContainer.java

/** Copies a src file to a dest file:
 *  used to circumvent the platform discrepancies regarding renaming files.
 *//*from  w w w .  j  a v  a 2s.c  o m*/
public static void fileCopy(File src, File dest) throws IOException {
    IOException xforward = null;
    FileInputStream fis = null;
    FileOutputStream fos = null;
    FileChannel fcin = null;
    FileChannel fcout = null;
    try {
        fis = new FileInputStream(src);
        fos = new FileOutputStream(dest);
        fcin = fis.getChannel();
        fcout = fos.getChannel();
        // do the file copy 32Mb at a time
        final int MB32 = 32 * 1024 * 1024;
        long size = fcin.size();
        long position = 0;
        while (position < size) {
            position += fcin.transferTo(position, MB32, fcout);
        }
    } catch (IOException xio) {
        xforward = xio;
    } finally {
        if (fis != null)
            try {
                fis.close();
                fis = null;
            } catch (IOException xio) {
            }
        if (fos != null)
            try {
                fos.close();
                fos = null;
            } catch (IOException xio) {
            }
        if (fcin != null && fcin.isOpen())
            try {
                fcin.close();
                fcin = null;
            } catch (IOException xio) {
            }
        if (fcout != null && fcout.isOpen())
            try {
                fcout.close();
                fcout = null;
            } catch (IOException xio) {
            }
    }
    if (xforward != null) {
        throw xforward;
    }
}

From source file:net.librec.data.convertor.appender.SocialDataAppender.java

/**
 * Read data from the data file. Note that we didn't take care of the
 * duplicated lines.//w ww . j  a v a 2s  .c  o  m
 *
 * @param inputDataPath
 *            the path of the data file
 * @throws IOException if I/O error occurs during reading
 */
private void readData(String inputDataPath) throws IOException {
    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();
    // BiMap {raw id, inner id} userIds, itemIds
    final List<File> files = new ArrayList<File>();
    final ArrayList<Long> fileSizeList = new ArrayList<Long>();
    SimpleFileVisitor<Path> finder = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            fileSizeList.add(file.toFile().length());
            files.add(file.toFile());
            return super.visitFile(file, attrs);
        }
    };
    Files.walkFileTree(Paths.get(inputDataPath), finder);
    long allFileSize = 0;
    for (Long everyFileSize : fileSizeList) {
        allFileSize = allFileSize + everyFileSize.longValue();
    }
    // loop every dataFile collecting from walkFileTree
    for (File dataFile : files) {
        FileInputStream fis = new FileInputStream(dataFile);
        FileChannel fileRead = fis.getChannel();
        ByteBuffer buffer = ByteBuffer.allocate(BSIZE);
        int len;
        String bufferLine = new String();
        byte[] bytes = new byte[BSIZE];
        while ((len = fileRead.read(buffer)) != -1) {
            buffer.flip();
            buffer.get(bytes, 0, len);
            bufferLine = bufferLine.concat(new String(bytes, 0, len)).replaceAll("\r", "\n");
            String[] bufferData = bufferLine.split("(\n)+");
            boolean isComplete = bufferLine.endsWith("\n");
            int loopLength = isComplete ? bufferData.length : bufferData.length - 1;
            for (int i = 0; i < loopLength; i++) {
                String line = new String(bufferData[i]);
                String[] data = line.trim().split("[ \t,]+");
                String userA = data[0];
                String userB = data[1];
                Double rate = (data.length >= 3) ? Double.valueOf(data[2]) : 1.0;
                if (userIds.containsKey(userA) && userIds.containsKey(userB)) {
                    int row = userIds.get(userA);
                    int col = userIds.get(userB);
                    dataTable.put(row, col, rate);
                    colMap.put(col, row);
                }
            }
            if (!isComplete) {
                bufferLine = bufferData[bufferData.length - 1];
            }
            buffer.clear();
        }
        fileRead.close();
        fis.close();
    }
    int numRows = userIds.size(), numCols = userIds.size();
    // build rating matrix
    userSocialMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);
    // release memory of data table
    dataTable = null;
}

From source file:com.mgmtp.perfload.perfalyzer.normalization.Normalizer.java

public void normalize(final File file) throws IOException {
    checkState(!file.isAbsolute(), "'file' must be relative");

    String filePath = file.getPath();
    String[] pathElements = split(getPath(filePath), SystemUtils.FILE_SEPARATOR); // strip out dir

    StrBuilder sb = new StrBuilder();
    for (int i = 0; i < pathElements.length; ++i) {
        if (i == 1) {
            continue; // strip out dir, e. g. perfmon-logs, measuring-logs
        }//w w w  . j a va 2s.c  o  m
        sb.appendSeparator(SystemUtils.FILE_SEPARATOR);
        sb.append(pathElements[i]);
    }
    String dirPath = sb.toString();

    Map<String, FileChannel> channels = newHashMap();
    List<OutputStream> outputStreams = newArrayList();
    FileInputStream fis = null;
    try {
        fis = new FileInputStream(new File(sourceDir, filePath)); //relative to source dir
        for (Scanner scanner = new Scanner(fis.getChannel(), Charsets.UTF_8.name()); scanner.hasNext();) {
            String line = scanner.nextLine();
            if (trimToNull(line) == null || line.startsWith("#")) {
                continue;
            }
            List<ChannelData> channelDataList = normalizingStrategy.normalizeLine(line);
            for (ChannelData channelData : channelDataList) {

                FileChannel channel = channels.get(channelData.getChannelKey());
                if (channel == null) {
                    String baseName = channelData.getChannelBaseName();
                    String key = channelData.getChannelKey();
                    String fileName = new File(dirPath, String.format("[%s][%s].csv", baseName, key)).getPath();
                    File destFile = new File(destDir, fileName);
                    destFile.getParentFile().mkdirs();
                    FileOutputStream fos = new FileOutputStream(destFile);
                    outputStreams.add(fos);
                    channel = fos.getChannel();
                    channels.put(channelData.getChannelKey(), channel);
                }

                writeLineToChannel(channel, channelData.getValue(), Charsets.UTF_8);
            }
        }
    } finally {
        outputStreams.forEach(IOUtils::closeQuietly);
        closeQuietly(fis);
    }
}

From source file:fr.acxio.tools.agia.tasks.FileCopyTaskletTest.java

@Test
public void testCannotDeleteOrigin() throws Exception {
    FileCopyTasklet aTasklet = new FileCopyTasklet();
    aTasklet.setOrigin(new FileSystemResource("src/test/resources/testFiles/input.csv"));
    aTasklet.setDestination(new FileSystemResource("target/input-copy4.csv"));
    aTasklet.execute(null, null);//from  www .ja  v  a  2 s . c  o  m

    File aOrigin = aTasklet.getDestination().getFile();
    assertTrue(aOrigin.exists());
    FileInputStream aInputStream = new FileInputStream(aOrigin);
    FileLock aLock = aInputStream.getChannel().lock(0L, Long.MAX_VALUE, true); // shared lock

    aTasklet.setDeleteOrigin(true);
    aTasklet.setOrigin(new FileSystemResource("target/input-copy4.csv"));
    aTasklet.setDestination(new FileSystemResource("target/input-copy5.csv"));
    try {
        aTasklet.execute(null, null);
        fail("Must throw a FileCopyException");
    } catch (FileCopyException e) {
        // Fallthrough
    } finally {
        aLock.release();
        aInputStream.close();
    }
}