Example usage for java.nio.channels FileChannel close

List of usage examples for java.nio.channels FileChannel close

Introduction

In this page you can find the example usage for java.nio.channels FileChannel close.

Prototype

public final void close() throws IOException 

Source Link

Document

Closes this channel.

Usage

From source file:gephi.spade.panel.fcsFile.java

/**
 * readFile ---/*ww w  . j av  a 2  s  .c  o m*/
 * <p>
 * A helper function to read all the fields in the TEXT segment of the FCS
 * file.
 * </p>
 *
 * <p>
 * This helper function should only be called once by the constructor as it
 * is quite expensive.
 * </p>
 *
 * @param extractEventsP
 *            boolean flag indicating whether to extract events in the
 *            underlying file.
 * @throws <code>java.io.FileNotFoundException</code> if the file is not
 *         found.
 * @throws <code>java.io.IOException</code> if an IO exception occurred.
 */
private void readFile(boolean extractEventsP) throws FileNotFoundException, IOException {
    // Open a file input stream to the file
    FileInputStream fis = new FileInputStream(file);

    // Create a byte array to hold the version
    byte[] versionArray = new byte[VERSION_SIZE];

    // Read the version into the byte array
    int numRead = fis.read(versionArray);

    if (numRead < VERSION_SIZE) {
        // If the number of bytes read is less than the number of bytes in
        // the version string, then the file is too small to be an FCS file.
        isFCSP = false;

        // Close the file input stream
        fis.close();

        // Quit
        return;
    }

    // Decode the version using the default encoding
    version = new String(versionArray);

    // Determine whether the file is an FCS file by whether the version
    // string starts with the FCS_PREFIX
    isFCSP = version.startsWith(FCS_PREFIX);

    if (!isFCSP) {
        // If the file is not an FCS file, then close the file and quit.
        // Close the file input stream
        fis.close();

        // Quit
        return;
    }

    /**
     * At this point, we are pretty sure that the file is an FCS file. So,
     * we parse it.
     */
    /**
     * Get the standard HEADER stuff
     */
    // Skip 4 bytes to get to byte 10
    fis.skip(4);

    // Create a byte array to hold the HEADER
    byte[] headerArray = new byte[48];

    // Read the header into the byte array
    numRead = fis.read(headerArray);

    if (numRead < 48) {
        // If the number of bytes read is less than 48, then the file is too
        // small to be an FCS file.
        isFCSP = false;

        // Close the file input stream
        fis.close();

        // Quit
        return;
    }

    try {
        // Try to parse the TEXT segment start and end and DATA segment
        // start and end
        textStart = Integer.parseInt((new String(headerArray, 0, 8)).trim());
        textEnd = Integer.parseInt((new String(headerArray, 8, 8)).trim());
        dataStart = Integer.parseInt((new String(headerArray, 16, 8)).trim());
        dataEnd = Integer.parseInt((new String(headerArray, 24, 8)).trim());
    } catch (NumberFormatException nfe) {
        // If a NumberFormatException occured, then quit because there's
        // nothing we can do without the TEXT or DATA segment.
        // Close the file input stream
        fis.close();

        return;
    }

    /**
     * Get the ANALYSIS segment limits
     */
    try {
        // Try to parse the analysisStart and analysisEnd
        analysisStart = Integer.parseInt((new String(headerArray, 32, 8)).trim());
        analysisEnd = Integer.parseInt((new String(headerArray, 40, 8)).trim());
    } catch (NumberFormatException nfe) {
        // If a NumberFormatException occured, then set the ANALYSIS start
        // and end to 0 since this segment is optional.
        analysisStart = 0;
        analysisEnd = 0;
    }

    /**
     * Use NIO to read the OTHER and TEXT segments
     */
    // Get the channel for the input file
    FileChannel fc = fis.getChannel();

    // Move the channel's position back to 0
    fc.position(0);

    // Map the TEXT segment to memory
    MappedByteBuffer mbb = fc.map(FileChannel.MapMode.READ_ONLY, 0, textEnd + 1);

    /**
     * Create the character decoder for parsing characters
     */
    decoder = charset.newDecoder();

    /**
     * Get the OTHER segment
     */
    mbb.limit(textStart);
    mbb.position(58);
    CharBuffer other = decoder.decode(mbb.slice());

    /**
     * Get the TEXT segment
     */
    mbb.limit(textEnd + 1);
    mbb.position(textStart);
    text = decoder.decode(mbb.slice()).toString();

    /**
     * Close the file since we have the string version of the TEXT segment
     */
    // Close the file channel
    fc.close();

    // Close the file input stream
    fis.close();

    /**
     * Decode the TEXT segment
     */
    // The first character of the primary TEXT segment contains the
    // delimiter character
    delimiter = text.charAt(0);

    /**
     * Key/Value Pairs
     */
    // Generate all the pairs
    String[] pairs;

    if (delimiter == '\\') {
        // If the delimiter character is a backslash, then we have to escape
        // it in the regular expression.
        pairs = text.split("[\\\\]");
    } else {
        // Otherwise, we can just split it normally by using the character
        // in the regular expression.
        pairs = text.split("[" + Character.toString(delimiter) + "]");
    }

    /**
     * Calculate the number of pairs --- The number of pairs is the length
     * of the pairs array minus 1 divided by 2. The one is due to the empty
     * first element from the Java split above.
     */
    int numPairs = (pairs.length - 1) / 2;

    // Create a mapping for each key and its value
    settings = new Properties();

    // Loop through the TEXT segment we just split to get the keys and
    // values
    // The key is in (i * 2) + 1 to account for the empty first element.
    // The value is in (i * 2) + 2 to account for the empty first element.
    for (int i = 0; i < numPairs; i++) {
        settings.setProperty(pairs[(i * 2) + 1].trim(), pairs[(i * 2) + 2].trim());
    }

    // Go through all the key/value pairs and parse them
    parseSettings();

    /**
     * Extract Events
     */
    if (extractEventsP) {
        // If we are extracting data, then do so.
        extractEvents();
    }
}

From source file:edu.harvard.iq.dvn.core.web.servlet.FileDownloadServlet.java

public void streamData(FileChannel in, WritableByteChannel out, String varHeader) {

    long position = 0;
    long howMany = 32 * 1024;

    try {/*  ww  w .j a v  a  2s.co m*/
        // If we are streaming a TAB-delimited file, we will need to add the
        // variable header line:

        if (varHeader != null) {
            ByteBuffer varHeaderByteBuffer = ByteBuffer.wrap(varHeader.getBytes());
            out.write(varHeaderByteBuffer);
        }

        while (position < in.size()) {
            in.transferTo(position, howMany, out);
            position += howMany;
        }

        in.close();
        out.close();
    } catch (IOException ex) {
        // whatever. we don't care at this point.
    }

}

From source file:org.sakaiproject.search.index.impl.JDBCClusterIndexStore.java

/**
 * updat this save this local segment into the db
 * //  ww  w .  j  ava  2  s.  c o m
 * @param connection
 * @param addsi
 */
protected void updateDBPatchFilesystem(Connection connection) throws SQLException, IOException {

    PreparedStatement segmentUpdate = null;
    PreparedStatement segmentInsert = null;
    FileChannel packetStream = null;
    FileInputStream packetFIS = null;
    FileChannel sharedStream = null;
    FileOutputStream sharedFOS = null;

    File packetFile = null;
    File sharedFinalFile = null;
    File sharedTempFile = null;
    long newVersion = System.currentTimeMillis();
    try {
        sharedTempFile = new File(getSharedTempFileName(INDEX_PATCHNAME));
        sharedFinalFile = new File(getSharedFileName(INDEX_PATCHNAME, sharedStructuredStorage));
        packetFile = clusterStorage.packPatch();
        if (packetFile.exists()) {
            packetFIS = new FileInputStream(packetFile);
            packetStream = packetFIS.getChannel();
            File sharedTempFileParent = sharedTempFile.getParentFile();
            if (!sharedTempFileParent.exists() && !sharedTempFileParent.mkdirs()) {
                log.warn("couldn't create " + sharedTempFileParent.getPath());
            }
            sharedFOS = new FileOutputStream(sharedTempFile);
            sharedStream = sharedFOS.getChannel();

            doBlockedStream(packetStream, sharedStream);

            packetStream.close();
            sharedStream.close();

            segmentUpdate = connection
                    .prepareStatement("update search_segments set  version_ = ?, size_ = ? where name_ = ? ");
            segmentInsert = connection
                    .prepareStatement("insert into search_segments ( name_, version_, size_ ) values ( ?,?,?)");

            segmentUpdate.clearParameters();
            segmentUpdate.setLong(1, newVersion);
            segmentUpdate.setLong(2, packetFile.length());
            segmentUpdate.setString(3, INDEX_PATCHNAME);
            if (segmentUpdate.executeUpdate() != 1) {
                segmentInsert.clearParameters();
                segmentInsert.setString(1, INDEX_PATCHNAME);
                segmentInsert.setLong(2, newVersion);
                segmentInsert.setLong(3, packetFile.length());
                if (segmentInsert.executeUpdate() != 1) {
                    throw new SQLException(" Failed to add patch packet  ");
                }
            }

            long st = System.currentTimeMillis();
            if (!sharedTempFile.renameTo(sharedFinalFile)) {
                log.warn("Couldn't rename file " + sharedTempFile.getPath() + " to "
                        + sharedFinalFile.getPath());
            }
            if (searchService.hasDiagnostics()) {
                log.info("Renamed " + sharedTempFile.getPath() + " to " + sharedFinalFile.getPath() + " in "
                        + (System.currentTimeMillis() - st) + "ms");
            }
        } else {
            log.warn("Packet file does not exist " + packetFile.getPath());
        }

    } finally {

        try {
            if (packetStream != null) {
                packetStream.close();
                packetFIS.close();
            }
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            packetFile.delete();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            if (sharedStream != null) {
                sharedStream.close();
                sharedFOS.close();
            }
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            sharedTempFile.delete();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            segmentUpdate.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            segmentInsert.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }

}

From source file:org.sakaiproject.search.index.impl.JDBCClusterIndexStore.java

/**
 * updat this save this local segment into the db
 * //from   ww  w .j  a  v a  2 s. c  o  m
 * @param connection
 * @param addsi
 */
protected void updateDBSegmentFilesystem(Connection connection, SegmentInfo addsi)
        throws SQLException, IOException {

    PreparedStatement segmentUpdate = null;
    PreparedStatement segmentInsert = null;
    FileChannel packetStream = null;
    FileInputStream packetFIS = null;
    FileChannel sharedStream = null;
    FileOutputStream sharedFOS = null;
    File packetFile = null;
    File sharedFinalFile = null;
    File sharedTempFile = null;
    long newVersion = System.currentTimeMillis();
    try {
        sharedTempFile = new File(getSharedTempFileName(addsi.getName()));
        sharedFinalFile = new File(getSharedFileName(addsi.getName(), sharedStructuredStorage));
        packetFile = clusterStorage.packSegment(addsi, newVersion);
        if (packetFile.exists()) {
            packetFIS = new FileInputStream(packetFile);
            packetStream = packetFIS.getChannel();
            File parentFile = sharedTempFile.getParentFile();
            if (!parentFile.exists() && !parentFile.mkdirs()) {
                log.warn("Unable to create directory " + sharedTempFile.getParentFile().getPath());
            }
            sharedFOS = new FileOutputStream(sharedTempFile);
            sharedStream = sharedFOS.getChannel();

            // Copy file contents from source to destination
            doBlockedStream(packetStream, sharedStream);

            packetStream.close();
            sharedStream.close();

            segmentUpdate = connection.prepareStatement(
                    "update search_segments set  version_ = ?, size_ = ? where name_ = ? and version_ = ?");
            segmentInsert = connection
                    .prepareStatement("insert into search_segments ( name_, version_, size_ ) values ( ?,?,?)");
            if (addsi.isInDb()) {
                segmentUpdate.clearParameters();
                segmentUpdate.setLong(1, newVersion);
                segmentUpdate.setLong(2, packetFile.length());
                segmentUpdate.setString(3, addsi.getName());
                segmentUpdate.setLong(4, addsi.getVersion());
                if (segmentUpdate.executeUpdate() != 1) {
                    throw new SQLException(" ant Find packet to update " + addsi);
                }
            } else {
                segmentInsert.clearParameters();
                segmentInsert.setString(1, addsi.getName());
                segmentInsert.setLong(2, newVersion);
                segmentInsert.setLong(3, packetFile.length());
                if (segmentInsert.executeUpdate() != 1) {
                    throw new SQLException(" Failed to insert packet  " + addsi);
                }
            }
            addsi.setVersion(newVersion);
            File sharedParentFile = sharedFinalFile.getParentFile();
            if (!sharedParentFile.exists() && !sharedParentFile.mkdirs()) {
                log.warn("Couln't create directory " + sharedParentFile.getPath());
            }
            long st = System.currentTimeMillis();
            if (!sharedTempFile.renameTo(sharedFinalFile)) {
                log.warn("Couldn't rename " + sharedTempFile.getPath() + " to " + sharedFinalFile.getPath());

            }
            if (searchService.hasDiagnostics()) {
                log.info("Renamed " + sharedTempFile.getPath() + " to " + sharedFinalFile.getPath() + " in "
                        + (System.currentTimeMillis() - st) + "ms");
            }

            log.info("DB Updated " + addsi);
        } else {
            log.warn("Packet file does not exist " + packetFile.getPath());
        }

    } finally {
        try {
            packetStream.close();
            packetFIS.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            packetFile.delete();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            sharedStream.close();
            sharedFOS.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            sharedTempFile.delete();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            segmentUpdate.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
        try {
            segmentInsert.close();
        } catch (Exception ex) {
            log.debug(ex);
        }
    }

}

From source file:net.librec.data.convertor.TextDataConvertor.java

/**
 * Read data from the data file. Note that we didn't take care of the
 * duplicated lines./*from   w w  w  .  j  a v  a 2  s  .  c o  m*/
 *
 * @param dataColumnFormat
 *            the format of input data file
 * @param inputDataPath
 *            the path of input data file
 * @param binThold
 *            the threshold to binarize a rating. If a rating is greater
 *            than the threshold, the value will be 1; otherwise 0. To
 *            disable this appender, i.e., keep the original rating value,
 *            set the threshold a negative value
 * @throws IOException
 *            if the <code>inputDataPath</code> is not valid.
 */
private void readData(String dataColumnFormat, String inputDataPath, double binThold) throws IOException {
    LOG.info(String.format("Dataset: %s", StringUtil.last(inputDataPath, 38)));
    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();
    // BiMap {raw id, inner id} userIds, itemIds
    if (this.userIds == null) {
        this.userIds = HashBiMap.create();
    }
    if (this.itemIds == null) {
        this.itemIds = HashBiMap.create();
    }
    final List<File> files = new ArrayList<File>();
    final ArrayList<Long> fileSizeList = new ArrayList<Long>();
    SimpleFileVisitor<Path> finder = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            fileSizeList.add(file.toFile().length());
            files.add(file.toFile());
            return super.visitFile(file, attrs);
        }
    };
    Files.walkFileTree(Paths.get(inputDataPath), finder);
    LOG.info("All dataset files " + files.toString());
    long allFileSize = 0;
    for (Long everyFileSize : fileSizeList) {
        allFileSize = allFileSize + everyFileSize.longValue();
    }
    LOG.info("All dataset files size " + Long.toString(allFileSize));
    int readingFileCount = 0;
    long loadAllFileByte = 0;
    // loop every dataFile collecting from walkFileTree
    for (File dataFile : files) {
        LOG.info("Now loading dataset file " + dataFile.toString().substring(
                dataFile.toString().lastIndexOf(File.separator) + 1, dataFile.toString().lastIndexOf(".")));
        readingFileCount += 1;
        loadFilePathRate = readingFileCount / (float) files.size();
        long readingOneFileByte = 0;
        FileInputStream fis = new FileInputStream(dataFile);
        FileChannel fileRead = fis.getChannel();
        ByteBuffer buffer = ByteBuffer.allocate(BSIZE);
        int len;
        String bufferLine = new String();
        byte[] bytes = new byte[BSIZE];
        while ((len = fileRead.read(buffer)) != -1) {
            readingOneFileByte += len;
            loadDataFileRate = readingOneFileByte / (float) fileRead.size();
            loadAllFileByte += len;
            loadAllFileRate = loadAllFileByte / (float) allFileSize;
            buffer.flip();
            buffer.get(bytes, 0, len);
            bufferLine = bufferLine.concat(new String(bytes, 0, len));
            bufferLine = bufferLine.replaceAll("\r", "\n");
            String[] bufferData = bufferLine.split("(\n)+");
            boolean isComplete = bufferLine.endsWith("\n");
            int loopLength = isComplete ? bufferData.length : bufferData.length - 1;
            for (int i = 0; i < loopLength; i++) {
                String line = new String(bufferData[i]);
                String[] data = line.trim().split("[ \t,]+");
                String user = data[0];
                String item = data[1];
                Double rate = ((dataColumnFormat.equals("UIR") || dataColumnFormat.equals("UIRT"))
                        && data.length >= 3) ? Double.valueOf(data[2]) : 1.0;

                // binarize the rating for item recommendation task
                if (binThold >= 0) {
                    rate = rate > binThold ? 1.0 : 0.0;
                }

                // inner id starting from 0
                int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
                userIds.put(user, row);

                int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
                itemIds.put(item, col);

                dataTable.put(row, col, rate);
                colMap.put(col, row);
                // record rating's issuing time
                if (StringUtils.equals(dataColumnFormat, "UIRT") && data.length >= 4) {
                    if (timeTable == null) {
                        timeTable = HashBasedTable.create();
                    }
                    // convert to million-seconds
                    long mms = 0L;
                    try {
                        mms = Long.parseLong(data[3]); // cannot format
                        // 9.7323480e+008
                    } catch (NumberFormatException e) {
                        mms = (long) Double.parseDouble(data[3]);
                    }
                    long timestamp = timeUnit.toMillis(mms);
                    timeTable.put(row, col, timestamp);
                }
            }
            if (!isComplete) {
                bufferLine = bufferData[bufferData.length - 1];
            }
            buffer.clear();
        }
        fileRead.close();
        fis.close();
    }
    int numRows = numUsers(), numCols = numItems();
    // build rating matrix
    preferenceMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);
    if (timeTable != null)
        datetimeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);
    // release memory of data table
    dataTable = null;
    timeTable = null;
}

From source file:com.cerema.cloud2.lib.resources.files.ChunkedUploadRemoteFileOperation.java

@Override
protected int uploadFile(OwnCloudClient client) throws IOException {
    int status = -1;

    FileChannel channel = null;
    RandomAccessFile raf = null;/*from   www  .j  a  va  2s .  co m*/
    try {
        File file = new File(mLocalPath);
        raf = new RandomAccessFile(file, "r");
        channel = raf.getChannel();
        mEntity = new ChunkFromFileChannelRequestEntity(channel, mMimeType, CHUNK_SIZE, file);
        synchronized (mDataTransferListeners) {
            ((ProgressiveDataTransferer) mEntity).addDatatransferProgressListeners(mDataTransferListeners);
        }

        long offset = 0;
        String uriPrefix = client.getWebdavUri() + WebdavUtils.encodePath(mRemotePath) + "-chunking-"
                + Math.abs((new Random()).nextInt(9000) + 1000) + "-";
        long totalLength = file.length();
        long chunkCount = (long) Math.ceil((double) totalLength / CHUNK_SIZE);
        String chunkSizeStr = String.valueOf(CHUNK_SIZE);
        String totalLengthStr = String.valueOf(file.length());
        for (int chunkIndex = 0; chunkIndex < chunkCount; chunkIndex++, offset += CHUNK_SIZE) {
            if (chunkIndex == chunkCount - 1) {
                chunkSizeStr = String.valueOf(CHUNK_SIZE * chunkCount - totalLength);
            }
            if (mPutMethod != null) {
                mPutMethod.releaseConnection(); // let the connection available
                                                // for other methods
            }
            mPutMethod = new PutMethod(uriPrefix + chunkCount + "-" + chunkIndex);
            if (mRequiredEtag != null && mRequiredEtag.length() > 0) {
                mPutMethod.addRequestHeader(IF_MATCH_HEADER, "\"" + mRequiredEtag + "\"");
            }
            mPutMethod.addRequestHeader(OC_CHUNKED_HEADER, OC_CHUNKED_HEADER);
            mPutMethod.addRequestHeader(OC_CHUNK_SIZE_HEADER, chunkSizeStr);
            mPutMethod.addRequestHeader(OC_TOTAL_LENGTH_HEADER, totalLengthStr);
            ((ChunkFromFileChannelRequestEntity) mEntity).setOffset(offset);
            mPutMethod.setRequestEntity(mEntity);
            if (mCancellationRequested.get()) {
                mPutMethod.abort();
                // next method will throw an exception
            }
            status = client.executeMethod(mPutMethod);

            if (status == 400) {
                InvalidCharacterExceptionParser xmlParser = new InvalidCharacterExceptionParser();
                InputStream is = new ByteArrayInputStream(mPutMethod.getResponseBodyAsString().getBytes());
                try {
                    mForbiddenCharsInServer = xmlParser.parseXMLResponse(is);

                } catch (Exception e) {
                    mForbiddenCharsInServer = false;
                    Log_OC.e(TAG, "Exception reading exception from server", e);
                }
            }

            client.exhaustResponse(mPutMethod.getResponseBodyAsStream());
            Log_OC.d(TAG, "Upload of " + mLocalPath + " to " + mRemotePath + ", chunk index " + chunkIndex
                    + ", count " + chunkCount + ", HTTP result status " + status);

            if (!isSuccess(status))
                break;
        }

    } finally {
        if (channel != null)
            channel.close();
        if (raf != null)
            raf.close();
        if (mPutMethod != null)
            mPutMethod.releaseConnection(); // let the connection available for other methods
    }
    return status;
}

From source file:gephi.spade.panel.fcsFile.java

/**
 * extractEvents ---/* www  . j  a va  2 s.co m*/
 * <p>
 * Extracts the events from the FCS file using NIO.
 * </p>
 *
 * @throws <code>java.io.FileNotFoundException</code> if the file is not
 *         found.
 * @throws <code>java.io.IOException</code> if an IO exception occurred.
 */
private void extractEvents() throws FileNotFoundException, IOException {
    if ((dataStart >= dataEnd) || (totalEvents <= 0)) {
        // If the byte offset of the start of the DATA segment is greater
        // than or equal to the end of the DATA segment or the number of
        // events is equal to 0, then create an empty array of events.
        eventList = new double[0][parameters];

        return;
    }

    // Open a file input stream to the file
    FileInputStream fis = new FileInputStream(file);

    // Get the channel for the file
    FileChannel fc = fis.getChannel();

    // Map the DATA segment to memory
    MappedByteBuffer data;

    try {
        data = fc.map(FileChannel.MapMode.READ_ONLY, dataStart, dataEnd - dataStart + 1);
    } catch (Throwable t) {
        // Try again with a workaround to see if we can compensate for off-by-one errors that
        // some FCS files have been known to incorporate in the ENDDATA property.
        data = fc.map(FileChannel.MapMode.READ_ONLY, dataStart, dataEnd - dataStart);
    }

    /**
     * We don't need to worry about endian-ness here since ASCII is one
     * byte, and float and double are IEEE standards.
     */
    if (dataType != null) {
        if (dataType.equalsIgnoreCase("I")) {
            // If the data type is "I", then it is binary integer.
            readBinIntData(data);
        } else if (dataType.equalsIgnoreCase("F")) {
            // If the data type is "F", then it is floating point.
            readFloatData(data);
        } else if (dataType.equalsIgnoreCase("D")) {
            // If the data type is "D", then it is double precision floating
            // point
            readDoubleData(data);
        } else if (dataType.equalsIgnoreCase("A")) {
            // If the data type is "A", then it is ASCII.
            readASCIIData(data);
        }
    }

    // Close the file channel
    fc.close();

    // Close the file input stream
    fis.close();
}

From source file:com.healthmarketscience.jackcess.Database.java

/**
 * Create a new database by reading it in from a FileChannel.
 * @param file the File to which the channel is connected 
 * @param channel File channel of the database.  This needs to be a
 *    FileChannel instead of a ReadableByteChannel because we need to
 *    randomly jump around to various points in the file.
 * @param autoSync whether or not to enable auto-syncing on write.  if
 *                 {@code true}, writes will be immediately flushed to disk.
 *                 This leaves the database in a (fairly) consistent state
 *                 on each write, but can be very inefficient for many
 *                 updates.  if {@code false}, flushing to disk happens at
 *                 the jvm's leisure, which can be much faster, but may
 *                 leave the database in an inconsistent state if failures
 *                 are encountered during writing.
 * @param fileFormat version of new database (if known)
 * @param charset Charset to use, if {@code null}, uses default
 * @param timeZone TimeZone to use, if {@code null}, uses default
 *///from  w w  w. j  a v a 2s  .c  o  m
protected Database(File file, FileChannel channel, boolean autoSync, FileFormat fileFormat, Charset charset,
        TimeZone timeZone, CodecProvider provider) throws IOException {
    boolean success = false;
    try {
        _file = file;
        _format = JetFormat.getFormat(channel);
        _charset = ((charset == null) ? getDefaultCharset(_format) : charset);
        _columnOrder = getDefaultColumnOrder();
        _fileFormat = fileFormat;
        _pageChannel = new PageChannel(channel, _format, autoSync);
        _timeZone = ((timeZone == null) ? getDefaultTimeZone() : timeZone);
        if (provider == null) {
            provider = DefaultCodecProvider.INSTANCE;
        }
        // note, it's slighly sketchy to pass ourselves along partially
        // constructed, but only our _format and _pageChannel refs should be
        // needed
        _pageChannel.initialize(this, provider);
        _buffer = _pageChannel.createPageBuffer();
        readSystemCatalog();
        success = true;

    } finally {
        if (!success && (channel != null)) {
            // something blew up, shutdown the channel (quietly)
            try {
                channel.close();
            } catch (Exception ignored) {
                // we don't care
            }
        }
    }
}

From source file:com.android.mms.transaction.NotificationTransaction.java

public int checkPduResult() {
    if (!mPduFile.exists()) {
        Log.e(MmsApp.TXN_TAG, "checkPduResult MMS Fail, no pduFile = " + mPduFile);
        return SmsManager.MMS_ERROR_UNSPECIFIED;
    }//from ww w.ja v  a2  s .com
    FileChannel channel = null;
    FileInputStream fs = null;
    RetrieveConf retrieveConf;
    try {
        fs = new FileInputStream(mPduFile);
        channel = fs.getChannel();
        ByteBuffer byteBuffer = ByteBuffer.allocate((int) channel.size());
        while ((channel.read(byteBuffer)) > 0) {
            // do nothing
            // System.out.println("reading");
        }
        final GenericPdu pdu = (new PduParser(byteBuffer.array(),
                PduParserUtil.shouldParseContentDisposition(mSubId))).parse();
        if (pdu == null || !(pdu instanceof RetrieveConf)) {
            Log.e(MmsApp.TXN_TAG, "checkPduResult: invalid parsed PDU");
            return SmsManager.MMS_ERROR_UNSPECIFIED;
        }
        retrieveConf = (RetrieveConf) pdu;

        // Store the downloaded message
        PduPersister persister = PduPersister.getPduPersister(mContext);
        Uri messageUri = persister.persist(pdu, Telephony.Mms.Inbox.CONTENT_URI, true/*createThreadId*/,
                true/*groupMmsEnabled*/, null/*preOpenedFiles*/);
        if (messageUri == null) {
            Log.e(MmsApp.TXN_TAG, "checkPduResult: can not persist message");
            return SmsManager.MMS_ERROR_UNSPECIFIED;
        }
        mMessageUri = messageUri.toString();
        // Update some of the properties of the message
        final ContentValues values = new ContentValues();
        values.put(Telephony.Mms.DATE, System.currentTimeMillis() / 1000L);
        values.put(Telephony.Mms.READ, 0);
        values.put(Telephony.Mms.SEEN, 0);
        String creator = ActivityThread.currentPackageName();
        if (!TextUtils.isEmpty(creator)) {
            values.put(Telephony.Mms.CREATOR, creator);
        }
        values.put(Telephony.Mms.SUBSCRIPTION_ID, mSubId);
        if (SqliteWrapper.update(mContext, mContext.getContentResolver(), messageUri, values, null/*where*/,
                null/*selectionArg*/) != 1) {
            Log.e(MmsApp.TXN_TAG, "persistIfRequired: can not update message");
        }
        // Delete the corresponding NotificationInd
        SqliteWrapper.delete(mContext, mContext.getContentResolver(), Telephony.Mms.CONTENT_URI,
                LOCATION_SELECTION,
                new String[] { Integer.toString(PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND), mContentLocation });
        return Activity.RESULT_OK;
    } catch (IOException e) {
        e.printStackTrace();
        return SmsManager.MMS_ERROR_UNSPECIFIED;
    } catch (MmsException e) {
        e.printStackTrace();
        return SmsManager.MMS_ERROR_UNSPECIFIED;
    } catch (SQLiteException e) {
        e.printStackTrace();
        return SmsManager.MMS_ERROR_UNSPECIFIED;
    } catch (RuntimeException e) {
        e.printStackTrace();
        return SmsManager.MMS_ERROR_UNSPECIFIED;
    } finally {
        if (mPduFile != null) {
            mPduFile.delete();
        }
        try {
            if (channel != null) {
                channel.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        try {
            if (fs != null) {
                fs.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:com.clustercontrol.agent.job.PublicKeyThread.java

/**
 *  ?Authorized_key????<BR>//from   www  .j a v a  2 s. co m
 * 
 * @param publicKey
 * @return true : ?false:
 */
private synchronized boolean deleteKey(String publicKey) {
    m_log.debug("delete key start");

    if (SKIP_KEYFILE_UPDATE) {
        m_log.info("skipped deleting publicKey");
        return true;
    }

    Charset charset = Charset.forName("UTF-8");
    CharsetEncoder encoder = charset.newEncoder();
    CharsetDecoder decoder = charset.newDecoder();

    //???
    String fileName = AgentProperties.getProperty(execUser.toLowerCase() + AUTHORIZED_KEY_PATH);
    if (fileName == null || fileName.length() == 0)
        return false;

    //File?
    File fi = new File(fileName);

    RandomAccessFile randomAccessFile = null;
    FileChannel channel = null;
    FileLock lock = null;
    boolean delete = false;
    try {
        //RandomAccessFile?
        randomAccessFile = new RandomAccessFile(fi, "rw");
        //FileChannel?
        channel = randomAccessFile.getChannel();

        // 
        for (int i = 0; i < (FILELOCK_TIMEOUT / FILELOCK_WAIT); i++) {
            if (null != (lock = channel.tryLock())) {
                break;
            }
            m_log.info("waiting for locked file... [" + (i + 1) + "/" + (FILELOCK_TIMEOUT / FILELOCK_WAIT)
                    + " : " + fileName + "]");
            Thread.sleep(FILELOCK_WAIT);
        }
        if (null == lock) {
            m_log.warn("file locking timeout.");
            return false;
        }

        // (?)
        synchronized (authKeyLock) {
            //??
            ByteBuffer buffer = ByteBuffer.allocate((int) channel.size());

            //??
            channel.read(buffer);

            // ???????????0?
            buffer.flip();

            //??
            String contents = decoder.decode(buffer).toString();

            // ?
            m_log.debug("contents " + contents.length() + " : " + contents);

            //??
            List<String> keyCheck = new ArrayList<String>();
            StringTokenizer tokenizer = new StringTokenizer(contents, "\n");
            while (tokenizer.hasMoreTokens()) {
                keyCheck.add(tokenizer.nextToken());
            }

            //??????
            int s = keyCheck.lastIndexOf(publicKey);
            if (s != -1) {
                // ?
                m_log.debug("remobe key : " + keyCheck.get(s));
                keyCheck.remove(s);
            }

            //?????
            encoder.reset();
            buffer.clear();

            int i;
            if (keyCheck.size() > 0) {
                for (i = 0; i < keyCheck.size() - 1; i++) {
                    encoder.encode(CharBuffer.wrap(keyCheck.get(i) + "\n"), buffer, false);
                }
                encoder.encode(CharBuffer.wrap(keyCheck.get(i)), buffer, true);
            }

            //???
            buffer.flip();
            channel.truncate(0);
            channel.position(0);
            channel.write(buffer);
        }

        delete = true;
    } catch (IOException e) {
        m_log.error(e.getMessage(), e);
    } catch (RuntimeException e) {
        m_log.error(e.getMessage(), e);
    } catch (InterruptedException e) {
        m_log.error(e.getMessage(), e);
    } finally {
        try {
            if (channel != null) {
                channel.close();
            }
            if (randomAccessFile != null) {
                randomAccessFile.close();
            }
            //?
            if (lock != null) {
                lock.release();
            }
        } catch (Exception e) {
        }
    }

    return delete;
}