List of usage examples for java.io RandomAccessFile close
public void close() throws IOException
From source file:org.apache.hadoop.hdfs.server.namenode.TestEditLog.java
/** * Test edit log failover from a corrupt edit log */// w w w. j ava 2s.com @Test public void testEditLogFailOverFromCorrupt() throws IOException { File f1 = new File(TEST_DIR + "/failover0"); File f2 = new File(TEST_DIR + "/failover1"); List<URI> editUris = ImmutableList.of(f1.toURI(), f2.toURI()); NNStorage storage = setupEdits(editUris, 3); final long startErrorTxId = 1 * TXNS_PER_ROLL + 1; final long endErrorTxId = 2 * TXNS_PER_ROLL; File[] files = new File(f1, "current").listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { if (name.startsWith(NNStorage.getFinalizedEditsFileName(startErrorTxId, endErrorTxId))) { return true; } return false; } }); assertEquals(1, files.length); long fileLen = files[0].length(); LOG.debug("Corrupting Log File: " + files[0] + " len: " + fileLen); RandomAccessFile rwf = new RandomAccessFile(files[0], "rw"); rwf.seek(fileLen - 4); // seek to checksum bytes int b = rwf.readInt(); rwf.seek(fileLen - 4); rwf.writeInt(b + 1); rwf.close(); FSEditLog editlog = getFSEditLog(storage); editlog.initJournalsForWrite(); long startTxId = 1; Collection<EditLogInputStream> streams = null; try { streams = editlog.selectInputStreams(startTxId, 4 * TXNS_PER_ROLL); readAllEdits(streams, startTxId); } catch (IOException e) { LOG.error("edit log failover didn't work", e); fail("Edit log failover didn't work"); } finally { IOUtils.cleanup(null, streams.toArray(new EditLogInputStream[0])); } }
From source file:com.frand.easyandroid.http.FFFileRespHandler.java
public int copy(InputStream input, RandomAccessFile out) throws IOException { interrupt = false;/* w w w . ja va 2 s . c o m*/ if (input == null || out == null) { return -1; } byte[] buffer = new byte[BUFFER_SIZE]; BufferedInputStream in = new BufferedInputStream(input, BUFFER_SIZE); int count = 0, n = 0; long errorBlockTimePreviousTime = -1, expireTime = 0; try { out.seek(out.length()); previousTime = System.currentTimeMillis(); while (!interrupt) { n = in.read(buffer, 0, BUFFER_SIZE); if (n == -1) { break; } out.write(buffer, 0, n); count += n; if (networkSpeed == 0) { if (errorBlockTimePreviousTime > 0) { expireTime = System.currentTimeMillis() - errorBlockTimePreviousTime; if (expireTime > TIME_OUT) { throw new ConnectTimeoutException("connection time out."); } } else { errorBlockTimePreviousTime = System.currentTimeMillis(); } } else { expireTime = 0; errorBlockTimePreviousTime = -1; } } } finally { try { out.close(); } catch (IOException e) { e.printStackTrace(); } } return count; }
From source file:de.blinkt.openvpn.ActivityDashboard.java
private String loadLogFromFile() { String log = ""; try {// ww w.j a v a 2 s . c om RandomAccessFile fp = new RandomAccessFile(getCacheDir() + "/vpnlog.txt", "r"); // RandomAccessFile fp = new RandomAccessFile(Environment.getExternalStorageDirectory().getAbsolutePath() + "/vpnlog.txt", "r"); while (true) { byte[] buf = new byte[0x1000]; int size = fp.read(buf); if (size <= 0) break; log += new String(buf); } fp.close(); } catch (IOException e) { e.printStackTrace(); } return log; }
From source file:edu.rit.flick.DefaultFlickFile.java
@Override public File inflate(final Configuration configuration, final File fileIn, final File fileOut) { try {/*from ww w . j a va 2s . co m*/ if (!getDefaultDeflatedExtension().endsWith(Files.getFileExtension(fileIn.getName()))) { final File decompressedFile = archiveFile(fileIn, true); if (decompressedFile != null && configuration.getFlag(DELETE_FLAG)) if (!FileUtils.deleteQuietly(fileIn)) System.err.printf(FILE_COULD_NOT_BE_DELETED_WARNING_FORMAT, fileIn.getPath()); return decompressedFile; } final LongAdder unzippedContentsSize = new LongAdder(); final long inputFileSize = FileUtils.sizeOf(fileIn); final long t0 = System.currentTimeMillis(); flickFile.extractAll(fileOut.getPath()); final RandomAccessFile raf = new RandomAccessFile(flickFile.getFile(), InternalZipConstants.READ_MODE); final HeaderReader hr = new HeaderReader(raf); final ZipModel zm = hr.readAllHeaders(); final CentralDirectory centralDirectory = zm.getCentralDirectory(); @SuppressWarnings("unchecked") final List<FileHeader> fhs = Collections.checkedList(centralDirectory.getFileHeaders(), FileHeader.class); final List<File> files = fhs.stream().map(fh -> { final File file = FileUtils.getFile(fileOut.getPath(), File.separator, fh.getFileName()); unzippedContentsSize.add(file.length()); return file; }).collect(Collectors.toList()); if (!configuration.getFlag(KEEP_ZIPPED_FLAG)) // Traverse directory and look for files to decompress for (final File file : files) { File decompressedFile = null; if (!file.isDirectory()) decompressedFile = archiveFile(file, false); if (decompressedFile != null) { unzippedContentsSize.add(-FileUtils.sizeOf(file)); unzippedContentsSize.add(FileUtils.sizeOf(decompressedFile)); file.delete(); } } raf.close(); if (configuration.getFlag(DELETE_FLAG)) if (!FileUtils.deleteQuietly(fileIn)) System.err.printf(FILE_COULD_NOT_BE_DELETED_WARNING_FORMAT, fileIn.getPath()); final double overallTime = (System.currentTimeMillis() - t0) / 1000d; if (configuration.getFlag(VERBOSE_FLAG)) { // Get the percent deflation on the compressed file final double percDeflated = 100 * unzippedContentsSize.doubleValue() / inputFileSize; System.out.printf(VERBOSE_DECOMPRESSION_INFO_FORMAT, fileIn.getName(), overallTime, percDeflated); } } catch (final Exception e) { e.printStackTrace(); } return fileOut; }
From source file:org.apache.hadoop.hdfs.server.namenode.TestEditLog.java
@Test public void testEditChecksum() throws Exception { // start a cluster Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = null;//from w w w .j a va2 s . c o m FileSystem fileSys = null; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); final FSEditLog editLog = fsimage.getEditLog(); fileSys.mkdirs(new Path("/tmp")); Iterator<StorageDirectory> iter = fsimage.getStorage().dirIterator(NameNodeDirType.EDITS); LinkedList<StorageDirectory> sds = new LinkedList<StorageDirectory>(); while (iter.hasNext()) { sds.add(iter.next()); } editLog.close(); cluster.shutdown(); for (StorageDirectory sd : sds) { File editFile = NNStorage.getFinalizedEditsFile(sd, 1, 3); assertTrue(editFile.exists()); long fileLen = editFile.length(); LOG.debug("Corrupting Log File: " + editFile + " len: " + fileLen); RandomAccessFile rwf = new RandomAccessFile(editFile, "rw"); rwf.seek(fileLen - 4); // seek to checksum bytes int b = rwf.readInt(); rwf.seek(fileLen - 4); rwf.writeInt(b + 1); rwf.close(); } try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES).format(false).build(); fail("should not be able to start"); } catch (IOException e) { // expected assertNotNull("Cause of exception should be ChecksumException", e.getCause()); assertEquals("Cause of exception should be ChecksumException", ChecksumException.class, e.getCause().getClass()); } }
From source file:org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.java
static private void truncateBlock(File blockFile, File metaFile, long oldlen, long newlen) throws IOException { LOG.info("truncateBlock: blockFile=" + blockFile + ", metaFile=" + metaFile + ", oldlen=" + oldlen + ", newlen=" + newlen); if (newlen == oldlen) { return;/* w w w. j a v a2 s. co m*/ } if (newlen > oldlen) { throw new IOException( "Cannot truncate block to from oldlen (=" + oldlen + ") to newlen (=" + newlen + ")"); } DataChecksum dcs = BlockMetadataHeader.readHeader(metaFile).getChecksum(); int checksumsize = dcs.getChecksumSize(); int bpc = dcs.getBytesPerChecksum(); long n = (newlen - 1) / bpc + 1; long newmetalen = BlockMetadataHeader.getHeaderSize() + n * checksumsize; long lastchunkoffset = (n - 1) * bpc; int lastchunksize = (int) (newlen - lastchunkoffset); byte[] b = new byte[Math.max(lastchunksize, checksumsize)]; RandomAccessFile blockRAF = new RandomAccessFile(blockFile, "rw"); try { //truncate blockFile blockRAF.setLength(newlen); //read last chunk blockRAF.seek(lastchunkoffset); blockRAF.readFully(b, 0, lastchunksize); } finally { blockRAF.close(); } //compute checksum dcs.update(b, 0, lastchunksize); dcs.writeValue(b, 0, false); //update metaFile RandomAccessFile metaRAF = new RandomAccessFile(metaFile, "rw"); try { metaRAF.setLength(newmetalen); metaRAF.seek(newmetalen - checksumsize); metaRAF.write(b, 0, checksumsize); } finally { metaRAF.close(); } }
From source file:captureplugin.CapturePlugin.java
/** * Check the programs after data update. *//*from www . jav a 2 s .c om*/ public void handleTvDataUpdateFinished() { mNeedsUpdate = true; if (mAllowedToShowDialog) { mNeedsUpdate = false; DeviceIf[] devices = mConfig.getDeviceArray(); final DefaultTableModel model = new DefaultTableModel() { public boolean isCellEditable(int row, int column) { return false; } }; model.setColumnCount(5); model.setColumnIdentifiers(new String[] { mLocalizer.msg("device", "Device"), Localizer.getLocalization(Localizer.I18N_CHANNEL), mLocalizer.msg("date", "Date"), ProgramFieldType.START_TIME_TYPE.getLocalizedName(), ProgramFieldType.TITLE_TYPE.getLocalizedName() }); JTable table = new JTable(model); table.getTableHeader().setReorderingAllowed(false); table.getTableHeader().setResizingAllowed(false); table.getColumnModel().getColumn(0).setCellRenderer(new DefaultTableCellRenderer() { public Component getTableCellRendererComponent(JTable renderTable, Object value, boolean isSelected, boolean hasFocus, int row, int column) { Component c = super.getTableCellRendererComponent(renderTable, value, isSelected, hasFocus, row, column); if (value instanceof DeviceIf) { if (((DeviceIf) value).getDeleteRemovedProgramsAutomatically() && !isSelected) { c.setForeground(Color.red); } } return c; } }); int[] columnWidth = new int[5]; for (int i = 0; i < columnWidth.length; i++) { columnWidth[i] = UiUtilities.getStringWidth(table.getFont(), model.getColumnName(i)) + 10; } for (DeviceIf device : devices) { Program[] deleted = device.checkProgramsAfterDataUpdateAndGetDeleted(); if (deleted != null && deleted.length > 0) { for (Program p : deleted) { if (device.getDeleteRemovedProgramsAutomatically() && !p.isExpired() && !p.isOnAir()) { device.remove(UiUtilities.getLastModalChildOf(getParentFrame()), p); } else { device.removeProgramWithoutExecution(p); } if (!p.isExpired()) { Object[] o = new Object[] { device, p.getChannel().getName(), p.getDateString(), p.getTimeString(), p.getTitle() }; for (int i = 0; i < columnWidth.length; i++) { columnWidth[i] = Math.max(columnWidth[i], UiUtilities.getStringWidth(table.getFont(), o[i].toString()) + 10); } model.addRow(o); } } } device.getProgramList(); } if (model.getRowCount() > 0) { int sum = 0; for (int i = 0; i < columnWidth.length; i++) { table.getColumnModel().getColumn(i).setPreferredWidth(columnWidth[i]); if (i < columnWidth.length - 1) { table.getColumnModel().getColumn(i).setMaxWidth(columnWidth[i]); } sum += columnWidth[i]; } JScrollPane scrollPane = new JScrollPane(table); scrollPane.setPreferredSize(new Dimension(450, 250)); if (sum > 500) { table.setAutoResizeMode(JTable.AUTO_RESIZE_OFF); scrollPane.getViewport().setPreferredSize( new Dimension(sum, scrollPane.getViewport().getPreferredSize().height)); } JButton export = new JButton(mLocalizer.msg("exportList", "Export list")); export.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser chooser = new JFileChooser(); chooser.setDialogType(JFileChooser.SAVE_DIALOG); chooser.setFileFilter(new FileFilter() { public boolean accept(File f) { return f.isDirectory() || f.toString().toLowerCase().endsWith(".txt"); } public String getDescription() { return "*.txt"; } }); chooser.setSelectedFile(new File("RemovedPrograms.txt")); if (chooser.showSaveDialog( UiUtilities.getLastModalChildOf(getParentFrame())) == JFileChooser.APPROVE_OPTION) { if (chooser.getSelectedFile() != null) { String file = chooser.getSelectedFile().getAbsolutePath(); if (!file.toLowerCase().endsWith(".txt") && file.indexOf('.') == -1) { file = file + ".txt"; } if (file.indexOf('.') != -1) { try { RandomAccessFile write = new RandomAccessFile(file, "rw"); write.setLength(0); String eolStyle = File.separator.equals("/") ? "\n" : "\r\n"; for (int i = 0; i < model.getRowCount(); i++) { StringBuilder line = new StringBuilder(); for (int j = 0; j < model.getColumnCount(); j++) { line.append(model.getValueAt(i, j)).append(' '); } line.append(eolStyle); write.writeBytes(line.toString()); } write.close(); } catch (Exception ee) { } } } } } }); Object[] message = { mLocalizer.msg("deletedText", "The data was changed and the following programs were deleted:"), scrollPane, export }; JOptionPane pane = new JOptionPane(); pane.setMessage(message); pane.setMessageType(JOptionPane.PLAIN_MESSAGE); final JDialog d = pane.createDialog(UiUtilities.getLastModalChildOf(getParentFrame()), mLocalizer.msg("CapturePlugin", "CapturePlugin") + " - " + mLocalizer.msg("deletedTitle", "Deleted programs")); d.setResizable(true); d.setModal(false); SwingUtilities.invokeLater(new Runnable() { public void run() { d.setVisible(true); } }); } } }
From source file:org.chililog.server.workbench.StaticFileRequestHandler.java
/** * Process the message/*from w w w . ja va2 s . c o m*/ */ @Override public void processMessage(ChannelHandlerContext ctx, MessageEvent e) throws Exception { HttpRequest request = (HttpRequest) e.getMessage(); // We don't handle 100 Continue because we only allow GET method. if (request.getMethod() != HttpMethod.GET) { sendError(ctx, e, METHOD_NOT_ALLOWED, null); return; } // Check final String filePath = convertUriToPhysicalFilePath(request.getUri()); if (filePath == null) { sendError(ctx, e, FORBIDDEN, null); return; } File file = new File(filePath); if (file.isHidden() || !file.exists()) { sendError(ctx, e, NOT_FOUND, String.format("%s not exist", file.getCanonicalPath())); return; } if (!file.isFile()) { sendError(ctx, e, FORBIDDEN, String.format("%s not a file", file.getCanonicalPath())); return; } // Cache Validation String ifModifiedSince = request.getHeader(HttpHeaders.Names.IF_MODIFIED_SINCE); if (!StringUtils.isBlank(ifModifiedSince)) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US); Date ifModifiedSinceDate = dateFormatter.parse(ifModifiedSince); // Only compare up to the second because the datetime format we send to the client does not have milliseconds long ifModifiedSinceDateSeconds = ifModifiedSinceDate.getTime() / 1000; long fileLastModifiedSeconds = file.lastModified() / 1000; if (ifModifiedSinceDateSeconds == fileLastModifiedSeconds) { sendNotModified(ctx, e); return; } } // Open file for sending back RandomAccessFile raf; try { raf = new RandomAccessFile(file, "r"); } catch (FileNotFoundException fnfe) { sendError(ctx, e, NOT_FOUND, null); return; } long fileLength = raf.length(); // Log writeLogEntry(e, OK, null); // Create the response HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); setContentLength(response, fileLength); setContentTypeHeader(response, file); setDateAndCacheHeaders(response, file); // Write the content. Channel ch = e.getChannel(); ChannelFuture writeFuture; if (AppProperties.getInstance().getWorkbenchSslEnabled()) { // Cannot use zero-copy with HTTPS // Write the initial line and the header. ch.write(response); // Write chunks writeFuture = ch.write(new ChunkedFile(raf, 0, fileLength, 8192)); } else { // Now that we are using Execution Handlers, we cannot do zero-copy. // Do as per with compression (which is what most browser will ask for) byte[] buffer = new byte[(int) fileLength]; raf.readFully(buffer); raf.close(); response.setContent(ChannelBuffers.copiedBuffer(buffer)); writeFuture = ch.write(response); /* * // No encryption - use zero-copy. // However zero-copy does not seem to work with compression // Only use * zero-copy for large files like movies and music // Write the initial line and the header. * ch.write(response); // Zero-copy final FileRegion region = new DefaultFileRegion(raf.getChannel(), 0, * fileLength); writeFuture = ch.write(region); writeFuture.addListener(new ChannelFutureProgressListener() * { public void operationComplete(ChannelFuture future) { region.releaseExternalResources(); } public void * operationProgressed(ChannelFuture future, long amount, long current, long total) { * _logger.debug("Zero-Coping file %s: %d / %d (+%d) bytes", filePath, current, total, amount); } }); */ } // Decide whether to close the connection or not. if (!isKeepAlive(request)) { // Close the connection when the whole content is written out. writeFuture.addListener(ChannelFutureListener.CLOSE); } }
From source file:com.limegroup.gnutella.metadata.MP3DataEditor.java
public int commitMetaData(String filename) { if (LOG.isDebugEnabled()) LOG.debug("committing mp3 file"); if (!LimeXMLUtils.isMP3File(filename)) return LimeXMLReplyCollection.INCORRECT_FILETYPE; File f = null;/*from w w w .java 2s . c o m*/ RandomAccessFile file = null; try { try { f = new File(filename); FileUtils.setWriteable(f); file = new RandomAccessFile(f, "rw"); } catch (IOException e) { return LimeXMLReplyCollection.FILE_DEFECTIVE; } long length = 0; try { length = file.length(); if (length < 128) //could not write - file too small return LimeXMLReplyCollection.FILE_DEFECTIVE; file.seek(length - 128); } catch (IOException ee) { return LimeXMLReplyCollection.RW_ERROR; } //1. Try to write out the ID3v2 data first int ret = -1; try { ret = writeID3V2DataToDisk(f); } catch (IOException iox) { return LimeXMLReplyCollection.RW_ERROR; } catch (ID3v2Exception e) { //catches both ID3v2 related exceptions ret = writeID3V1DataToDisk(file); } return ret; } finally { if (file != null) { try { file.close(); } catch (IOException ignored) { } } } }
From source file:org.apache.flume.channel.file.TestFlumeEventQueue.java
@Test(expected = BadCheckpointException.class) public void testCorruptInflightPuts() throws Exception { RandomAccessFile inflight = null; try {//from ww w . j a v a 2 s. c o m queue = new FlumeEventQueue(backingStore, backingStoreSupplier.getInflightTakes(), backingStoreSupplier.getInflightPuts()); long txnID1 = new Random().nextInt(Integer.MAX_VALUE - 1); long txnID2 = txnID1 + 1; queue.addWithoutCommit(new FlumeEventPointer(1, 1), txnID1); queue.addWithoutCommit(new FlumeEventPointer(2, 1), txnID1); queue.addWithoutCommit(new FlumeEventPointer(2, 2), txnID2); queue.checkpoint(true); TimeUnit.SECONDS.sleep(3L); inflight = new RandomAccessFile(backingStoreSupplier.getInflightPuts(), "rw"); inflight.seek(0); inflight.writeInt(new Random().nextInt()); queue = new FlumeEventQueue(backingStore, backingStoreSupplier.getInflightTakes(), backingStoreSupplier.getInflightPuts()); SetMultimap<Long, Long> deserializedMap = queue.deserializeInflightPuts(); Assert.assertTrue(deserializedMap.get(txnID1).contains(new FlumeEventPointer(1, 1).toLong())); Assert.assertTrue(deserializedMap.get(txnID1).contains(new FlumeEventPointer(2, 1).toLong())); Assert.assertTrue(deserializedMap.get(txnID2).contains(new FlumeEventPointer(2, 2).toLong())); } finally { inflight.close(); } }