Example usage for java.io RandomAccessFile writeInt

List of usage examples for java.io RandomAccessFile writeInt

Introduction

In this page you can find the example usage for java.io RandomAccessFile writeInt.

Prototype

public final void writeInt(int v) throws IOException 

Source Link

Document

Writes an int to the file as four bytes, high byte first.

Usage

From source file:org.commoncrawl.service.crawler.CrawlSegmentLog.java

public static void writeHeader(File logFilePath, int recordCount) throws IOException {
    RandomAccessFile stream = new RandomAccessFile(logFilePath, "rw");
    try {/*ww w  .j a  v  a2 s .  co m*/
        stream.seek(0);
        stream.writeInt(LogFileHeaderBytes);
        stream.writeInt(recordCount);
    } finally {
        // stream.getFD().sync();
        stream.close();
    }
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static void corruptBlock(Path file, Block blockNum, int numDataNodes, long offset,
        MiniDFSCluster cluster) throws IOException {
    long id = blockNum.getBlockId();

    // Now deliberately remove/truncate data blocks from the block.
    ///*from   w  w  w.  j a  v  a 2 s.c om*/
    for (int i = 0; i < numDataNodes; i++) {
        File[] dirs = getDataNodeDirs(i, cluster);

        for (int j = 0; j < dirs.length; j++) {
            File[] blocks = dirs[j].listFiles();
            assertTrue("Blocks do not exist in data-dir", (blocks != null) && (blocks.length >= 0));
            for (int idx = 0; idx < blocks.length; idx++) {
                if (blocks[idx].getName().startsWith("blk_" + id) && !blocks[idx].getName().endsWith(".meta")) {
                    // Corrupt
                    File f = blocks[idx];
                    RandomAccessFile raf = new RandomAccessFile(f, "rw");
                    raf.seek(offset);
                    int data = raf.readInt();
                    raf.seek(offset);
                    raf.writeInt(data + 1);
                    LOG.info("Corrupted block " + blocks[idx]);
                }
            }
        }
    }
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static void corruptBlock(Path file, Block blockNum, int numDataNodes, boolean delete,
        MiniDFSCluster cluster) throws IOException {
    long id = blockNum.getBlockId();

    // Now deliberately remove/truncate data blocks from the block.
    int numDeleted = 0;
    int numCorrupted = 0;
    for (int i = 0; i < numDataNodes; i++) {
        File[] dirs = getDataNodeDirs(i, cluster);

        for (int j = 0; j < dirs.length; j++) {
            File[] blocks = dirs[j].listFiles();
            assertTrue("Blocks do not exist in data-dir", (blocks != null) && (blocks.length >= 0));
            for (int idx = 0; idx < blocks.length; idx++) {
                LOG.info("block file: " + blocks[idx]);
                if (blocks[idx].getName().startsWith("blk_" + id) && !blocks[idx].getName().endsWith(".meta")) {
                    if (delete) {
                        blocks[idx].delete();
                        LOG.info("Deleted block " + blocks[idx]);
                        numDeleted++;//www .j a  v  a  2 s. co  m
                    } else {
                        // Corrupt
                        File f = blocks[idx];
                        long seekPos = f.length() / 2;
                        RandomAccessFile raf = new RandomAccessFile(f, "rw");
                        raf.seek(seekPos);
                        int data = raf.readInt();
                        raf.seek(seekPos);
                        raf.writeInt(data + 1);
                        LOG.info("Corrupted block " + blocks[idx]);
                        numCorrupted++;
                    }
                }
            }
        }
    }
    assertTrue("Nothing corrupted or deleted", (numCorrupted + numDeleted) > 0);
}

From source file:WordList.java

public static void writeWords(String filename, String[] words) throws IOException {
    // Open the file for read/write access ("rw"). We only need to write,
    // but have to request read access as well
    RandomAccessFile f = new RandomAccessFile(filename, "rw");

    // This array will hold the positions of each word in the file
    long wordPositions[] = new long[words.length];

    // Reserve space at the start of the file for the wordPositions array
    // and the length of that array. 4 bytes for length plus 8 bytes for
    // each long value in the array.
    f.seek(4L + (8 * words.length));/*w w w  .  j a v  a2 s .co m*/

    // Now, loop through the words and write them out to the file,
    // recording the start position of each word. Note that the
    // text is written in the UTF-8 encoding, which uses 1, 2, or 3 bytes
    // per character, so we can't assume that the string length equals
    // the string size on the disk. Also note that the writeUTF() method
    // records the length of the string so it can be read by readUTF().
    for (int i = 0; i < words.length; i++) {
        wordPositions[i] = f.getFilePointer(); // record file position
        f.writeUTF(words[i]); // write word
    }

    // Now go back to the beginning of the file and write the positions
    f.seek(0L); // Start at beginning
    f.writeInt(wordPositions.length); // Write array length
    for (int i = 0; i < wordPositions.length; i++)
        // Loop through array
        f.writeLong(wordPositions[i]); // Write array element
    f.close(); // Close the file when done.
}

From source file:com.btoddb.fastpersitentqueue.JournalFileTest.java

@Test
public void testInitForReadingThenClose() throws IOException {
    UUID id = new UUID();
    RandomAccessFile raFile = new RandomAccessFile(theFile, "rw");
    raFile.writeInt(1);
    Utils.writeUuidToFile(raFile, id);/*from w ww. j  ava 2 s . co  m*/
    raFile.writeLong(123);
    raFile.close();

    JournalFile jf1 = new JournalFile(theFile);
    jf1.initForReading();
    assertThat(jf1.getVersion(), is(JournalFile.VERSION));
    assertThat(jf1.getId(), is(id));
    assertThat(jf1.getNumberOfEntries(), is(123L));

    assertThat(jf1.isOpen(), is(true));
    assertThat(jf1.isWriteMode(), is(false));
    assertThat(jf1.getFilePosition(), is((long) JournalFile.HEADER_SIZE));
    jf1.close();

    assertThat(jf1.isOpen(), is(false));
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestEditLogFileInputStream.java

/**
 * Regression test for HDFS-8965 which verifies that
 * FSEditLogFileInputStream#scanOp verifies Op checksums.
 *///w ww. j  av a  2s .  co m
@Test(timeout = 60000)
public void testScanCorruptEditLog() throws Exception {
    Configuration conf = new Configuration();
    File editLog = new File(System.getProperty("test.build.data", "/tmp"), "testCorruptEditLog");

    LOG.debug("Creating test edit log file: " + editLog);
    EditLogFileOutputStream elos = new EditLogFileOutputStream(conf, editLog.getAbsoluteFile(), 8192);
    elos.create(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
    FSEditLogOp.OpInstanceCache cache = new FSEditLogOp.OpInstanceCache();
    FSEditLogOp.MkdirOp mkdirOp = FSEditLogOp.MkdirOp.getInstance(cache);
    mkdirOp.reset();
    mkdirOp.setRpcCallId(123);
    mkdirOp.setTransactionId(1);
    mkdirOp.setInodeId(789L);
    mkdirOp.setPath("/mydir");
    PermissionStatus perms = PermissionStatus.createImmutable("myuser", "mygroup",
            FsPermission.createImmutable((short) 0777));
    mkdirOp.setPermissionStatus(perms);
    elos.write(mkdirOp);
    mkdirOp.reset();
    mkdirOp.setRpcCallId(456);
    mkdirOp.setTransactionId(2);
    mkdirOp.setInodeId(123L);
    mkdirOp.setPath("/mydir2");
    perms = PermissionStatus.createImmutable("myuser", "mygroup", FsPermission.createImmutable((short) 0666));
    mkdirOp.setPermissionStatus(perms);
    elos.write(mkdirOp);
    elos.setReadyToFlush();
    elos.flushAndSync(false);
    elos.close();
    long fileLen = editLog.length();

    LOG.debug("Corrupting last 4 bytes of edit log file " + editLog + ", whose length is " + fileLen);
    RandomAccessFile rwf = new RandomAccessFile(editLog, "rw");
    rwf.seek(fileLen - 4);
    int b = rwf.readInt();
    rwf.seek(fileLen - 4);
    rwf.writeInt(b + 1);
    rwf.close();

    EditLogFileInputStream elis = new EditLogFileInputStream(editLog);
    Assert.assertEquals(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION, elis.getVersion(true));
    Assert.assertEquals(1, elis.scanNextOp());
    LOG.debug("Read transaction 1 from " + editLog);
    try {
        elis.scanNextOp();
        Assert.fail("Expected scanNextOp to fail when op checksum was corrupt.");
    } catch (IOException e) {
        LOG.debug("Caught expected checksum error when reading corrupt " + "transaction 2", e);
        GenericTestUtils.assertExceptionContains("Transaction is corrupt.", e);
    }
    elis.close();
}

From source file:au.org.ala.layers.grid.GridClassBuilder.java

public static HashMap<Integer, GridClass> buildFromGrid(String filePath) throws IOException {
    File wktDir = new File(filePath);
    wktDir.mkdirs();// www.  j av a  2  s  .c o  m

    int[] wktMap = null;

    //track values for the SLD
    ArrayList<Integer> maxValues = new ArrayList<Integer>();
    ArrayList<String> labels = new ArrayList<String>();

    HashMap<Integer, GridClass> classes = new HashMap<Integer, GridClass>();
    Properties p = new Properties();
    p.load(new FileReader(filePath + ".txt"));

    boolean mergeProperties = false;

    Map<String, Set<Integer>> groupedKeys = new HashMap<String, Set<Integer>>();
    Map<Integer, Integer> translateKeys = new HashMap<Integer, Integer>();
    Map<String, Integer> translateValues = new HashMap<String, Integer>();
    ArrayList<Integer> keys = new ArrayList<Integer>();
    for (String key : p.stringPropertyNames()) {
        try {
            int k = Integer.parseInt(key);
            keys.add(k);

            //grouping of property file keys by value
            String value = p.getProperty(key);
            Set<Integer> klist = groupedKeys.get(value);
            if (klist == null)
                klist = new HashSet<Integer>();
            else
                mergeProperties = true;
            klist.add(k);
            groupedKeys.put(value, klist);

            if (!translateValues.containsKey(value))
                translateValues.put(value, translateValues.size() + 1);
            translateKeys.put(k, translateValues.get(value));

        } catch (NumberFormatException e) {
            logger.info("Excluding shape key '" + key + "'");
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        }
    }

    java.util.Collections.sort(keys);

    Grid g = new Grid(filePath);
    boolean generateWkt = false; //((long) g.nrows) * ((long) g.ncols) < (long) Integer.MAX_VALUE;

    if (mergeProperties) {
        g.replaceValues(translateKeys);

        if (!new File(filePath + ".txt.old").exists())
            FileUtils.moveFile(new File(filePath + ".txt"), new File(filePath + ".txt.old"));

        StringBuilder sb = new StringBuilder();
        for (String value : translateValues.keySet()) {
            sb.append(translateValues.get(value)).append("=").append(value).append('\n');
        }
        FileUtils.writeStringToFile(new File(filePath + ".txt"), sb.toString());

        return buildFromGrid(filePath);
    }

    if (generateWkt) {
        for (String name : groupedKeys.keySet()) {
            try {
                Set<Integer> klist = groupedKeys.get(name);

                String key = klist.iterator().next().toString();
                int k = Integer.parseInt(key);

                GridClass gc = new GridClass();
                gc.setName(name);
                gc.setId(k);

                if (klist.size() == 1)
                    klist = null;

                logger.info("getting wkt for " + filePath + " > " + key);

                Map wktIndexed = Envelope.getGridSingleLayerEnvelopeAsWktIndexed(
                        filePath + "," + key + "," + key, klist, wktMap);

                //write class wkt
                File zipFile = new File(filePath + File.separator + key + ".wkt.zip");
                ZipOutputStream zos = null;
                try {
                    zos = new ZipOutputStream(new FileOutputStream(zipFile));
                    zos.putNextEntry(new ZipEntry(key + ".wkt"));
                    zos.write(((String) wktIndexed.get("wkt")).getBytes());
                    zos.flush();
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                } finally {
                    if (zos != null) {
                        try {
                            zos.close();
                        } catch (Exception e) {
                            logger.error(e.getMessage(), e);
                        }
                    }
                }
                BufferedOutputStream bos = null;
                try {
                    bos = new BufferedOutputStream(
                            new FileOutputStream(filePath + File.separator + key + ".wkt"));
                    bos.write(((String) wktIndexed.get("wkt")).getBytes());
                    bos.flush();
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                } finally {
                    if (bos != null) {
                        try {
                            bos.close();
                        } catch (Exception e) {
                            logger.error(e.getMessage(), e);
                        }
                    }
                }
                logger.info("wkt written to file");
                gc.setArea_km(SpatialUtil.calculateArea((String) wktIndexed.get("wkt")) / 1000.0 / 1000.0);

                //store map
                wktMap = (int[]) wktIndexed.get("map");

                //write wkt index
                FileWriter fw = null;
                try {
                    fw = new FileWriter(filePath + File.separator + key + ".wkt.index");
                    fw.append((String) wktIndexed.get("index"));
                    fw.flush();
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                } finally {
                    if (fw != null) {
                        try {
                            fw.close();
                        } catch (Exception e) {
                            logger.error(e.getMessage(), e);
                        }
                    }
                }
                //write wkt index a binary, include extents (minx, miny, maxx, maxy) and area (sq km)
                int minPolygonNumber = 0;
                int maxPolygonNumber = 0;

                RandomAccessFile raf = null;
                try {
                    raf = new RandomAccessFile(filePath + File.separator + key + ".wkt.index.dat", "rw");

                    String[] index = ((String) wktIndexed.get("index")).split("\n");

                    for (int i = 0; i < index.length; i++) {
                        if (index[i].length() > 1) {
                            String[] cells = index[i].split(",");
                            int polygonNumber = Integer.parseInt(cells[0]);
                            raf.writeInt(polygonNumber); //polygon number
                            int polygonStart = Integer.parseInt(cells[1]);
                            raf.writeInt(polygonStart); //character offset

                            if (i == 0) {
                                minPolygonNumber = polygonNumber;
                            } else if (i == index.length - 1) {
                                maxPolygonNumber = polygonNumber;
                            }
                        }
                    }
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                } finally {
                    if (raf != null) {
                        try {
                            raf.close();
                        } catch (Exception e) {
                            logger.error(e.getMessage(), e);
                        }
                    }
                }

                //for SLD
                maxValues.add(gc.getMaxShapeIdx());
                labels.add(name.replace("\"", "'"));
                gc.setMinShapeIdx(minPolygonNumber);
                gc.setMaxShapeIdx(maxPolygonNumber);

                logger.info("getting multipolygon for " + filePath + " > " + key);
                MultiPolygon mp = Envelope.getGridEnvelopeAsMultiPolygon(filePath + "," + key + "," + key);
                gc.setBbox(mp.getEnvelope().toText().replace(" (", "(").replace(", ", ","));

                classes.put(k, gc);

                try {
                    //write class kml
                    zos = null;
                    try {
                        zos = new ZipOutputStream(
                                new FileOutputStream(filePath + File.separator + key + ".kml.zip"));

                        zos.putNextEntry(new ZipEntry(key + ".kml"));
                        Encoder encoder = new Encoder(new KMLConfiguration());
                        encoder.setIndenting(true);
                        encoder.encode(mp, KML.Geometry, zos);
                        zos.flush();
                    } catch (Exception e) {
                        logger.error(e.getMessage(), e);
                    } finally {
                        if (zos != null) {
                            try {
                                zos.close();
                            } catch (Exception e) {
                                logger.error(e.getMessage(), e);
                            }
                        }
                    }
                    logger.info("kml written to file");

                    final SimpleFeatureType TYPE = DataUtilities.createType("class",
                            "the_geom:MultiPolygon,id:Integer,name:String");
                    FeatureJSON fjson = new FeatureJSON();
                    SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(TYPE);
                    SimpleFeature sf = featureBuilder.buildFeature(null);

                    //write class geojson
                    zos = null;
                    try {
                        zos = new ZipOutputStream(
                                new FileOutputStream(filePath + File.separator + key + ".geojson.zip"));
                        zos.putNextEntry(new ZipEntry(key + ".geojson"));
                        featureBuilder.add(mp);
                        featureBuilder.add(k);
                        featureBuilder.add(name);

                        fjson.writeFeature(sf, zos);
                        zos.flush();
                    } catch (Exception e) {
                        logger.error(e.getMessage(), e);
                    } finally {
                        if (zos != null) {
                            try {
                                zos.close();
                            } catch (Exception e) {
                                logger.error(e.getMessage(), e);
                            }
                        }
                    }
                    logger.info("geojson written to file");

                    //write class shape file
                    File newFile = new File(filePath + File.separator + key + ".shp");
                    ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory();
                    Map<String, Serializable> params = new HashMap<String, Serializable>();
                    params.put("url", newFile.toURI().toURL());
                    params.put("create spatial index", Boolean.FALSE);
                    ShapefileDataStore newDataStore = null;
                    try {
                        newDataStore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params);
                        newDataStore.createSchema(TYPE);
                        newDataStore.forceSchemaCRS(DefaultGeographicCRS.WGS84);
                        Transaction transaction = new DefaultTransaction("create");
                        String typeName = newDataStore.getTypeNames()[0];
                        SimpleFeatureSource featureSource = newDataStore.getFeatureSource(typeName);
                        SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource;
                        featureStore.setTransaction(transaction);
                        List<SimpleFeature> features = new ArrayList<SimpleFeature>();

                        DefaultFeatureCollection collection = new DefaultFeatureCollection();
                        collection.addAll(features);
                        featureStore.setTransaction(transaction);

                        features.add(sf);
                        featureStore.addFeatures(collection);
                        transaction.commit();
                        transaction.close();
                    } catch (Exception e) {
                        logger.error(e.getMessage(), e);
                    } finally {
                        if (newDataStore != null) {
                            try {
                                newDataStore.dispose();
                            } catch (Exception e) {
                                logger.error(e.getMessage(), e);
                            }
                        }
                    }

                    zos = null;
                    try {
                        zos = new ZipOutputStream(
                                new FileOutputStream(filePath + File.separator + key + ".shp.zip"));
                        //add .dbf .shp .shx .prj
                        String[] exts = { ".dbf", ".shp", ".shx", ".prj" };
                        for (String ext : exts) {
                            zos.putNextEntry(new ZipEntry(key + ext));
                            FileInputStream fis = null;
                            try {
                                fis = new FileInputStream(filePath + File.separator + key + ext);
                                byte[] buffer = new byte[1024];
                                int size;
                                while ((size = fis.read(buffer)) > 0) {
                                    zos.write(buffer, 0, size);
                                }
                            } catch (Exception e) {
                                logger.error(e.getMessage(), e);
                            } finally {
                                if (fis != null) {
                                    try {
                                        fis.close();
                                    } catch (Exception e) {
                                        logger.error(e.getMessage(), e);
                                    }
                                }
                            }
                            //remove unzipped files
                            new File(filePath + File.separator + key + ext).delete();
                        }
                        zos.flush();
                    } catch (Exception e) {
                        logger.error(e.getMessage(), e);
                    } finally {
                        if (zos != null) {
                            try {
                                zos.close();
                            } catch (Exception e) {
                                logger.error(e.getMessage(), e);
                            }
                        }
                    }
                    logger.info("shape file written to zip");
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                }
            } catch (Exception e) {
                logger.error(e.getMessage(), e);
            }
        }

        //write polygon mapping
        g.writeGrid(filePath + File.separator + "polygons", wktMap, g.xmin, g.ymin, g.xmax, g.ymax, g.xres,
                g.yres, g.nrows, g.ncols);

        //copy the header file to get it exactly the same, but change the data type
        copyHeaderAsInt(filePath + ".grd", filePath + File.separator + "polygons.grd");
    } else {
        //build classes without generating polygons
        Map<Float, float[]> info = new HashMap<Float, float[]>();
        for (int j = 0; j < keys.size(); j++) {
            info.put(keys.get(j).floatValue(), new float[] { 0, Float.NaN, Float.NaN, Float.NaN, Float.NaN });
        }

        g.getClassInfo(info);

        for (int j = 0; j < keys.size(); j++) {
            int k = keys.get(j);
            String key = String.valueOf(k);

            String name = p.getProperty(key);

            GridClass gc = new GridClass();
            gc.setName(name);
            gc.setId(k);

            //for SLD
            maxValues.add(Integer.valueOf(key));
            labels.add(name.replace("\"", "'"));
            gc.setMinShapeIdx(Integer.valueOf(key));
            gc.setMaxShapeIdx(Integer.valueOf(key));

            float[] stats = info.get(keys.get(j).floatValue());

            //only include if area > 0
            if (stats[0] > 0) {
                gc.setBbox("POLYGON((" + stats[1] + " " + stats[2] + "," + stats[1] + " " + stats[4] + ","
                        + stats[3] + " " + stats[4] + "," + stats[3] + " " + stats[2] + "," + stats[1] + " "
                        + stats[2] + "))");

                gc.setArea_km((double) stats[0]);
                classes.put(k, gc);
            }
        }
    }

    //write sld
    exportSLD(filePath + File.separator + "polygons.sld", new File(filePath + ".txt").getName(), maxValues,
            labels);

    writeProjectionFile(filePath + File.separator + "polygons.prj");

    //write .classes.json
    ObjectMapper mapper = new ObjectMapper();
    mapper.writeValue(new File(filePath + ".classes.json"), classes);

    return classes;
}

From source file:name.martingeisse.stackd.server.section.storage.FolderBasedSectionStorage.java

/**
 * /*from  w ww. j  a va  2 s. com*/
 */
private void saveSectionToFile(final InputStream in, final RandomAccessFile access, final int tocIndex)
        throws IOException {

    // write the section to the end of the file
    final int dataAddress = (int) access.length();
    access.seek(dataAddress);
    final byte[] compressedCubeData = IOUtils.toByteArray(in);
    access.write(compressedCubeData);

    // update the ToC entry
    access.seek(tocIndex * 12);
    access.writeInt(dataAddress);
    access.writeInt(compressedCubeData.length);
    access.writeInt(0);

}

From source file:org.apache.hadoop.hdfs.server.namenode.TestFileJournalManager.java

/** 
 * Corrupt an edit log file after the start segment transaction
 */// w w w.  j  a  v  a 2 s .  c o m
private void corruptAfterStartSegment(File f) throws IOException {
    RandomAccessFile raf = new RandomAccessFile(f, "rw");
    raf.seek(0x20); // skip version and first tranaction and a bit of next transaction
    for (int i = 0; i < 1000; i++) {
        raf.writeInt(0xdeadbeef);
    }
    raf.close();
}

From source file:org.apache.flume.tools.TestFileChannelIntegrityTool.java

public void doTestFixCorruptEvents(boolean withCheckpoint) throws Exception {
    Set<String> corruptFiles = new HashSet<String>();
    File[] files = dataDir.listFiles(new FilenameFilter() {
        @Override/*from  w  ww .ja  va  2  s.  c o m*/
        public boolean accept(File dir, String name) {
            if (name.contains("lock") || name.contains("meta")) {
                return false;
            }
            return true;
        }
    });
    Random random = new Random();
    int corrupted = 0;
    for (File dataFile : files) {
        LogFile.SequentialReader reader = new LogFileV3.SequentialReader(dataFile, null);
        RandomAccessFile handle = new RandomAccessFile(dataFile, "rw");
        long eventPosition1 = reader.getPosition();
        LogRecord rec = reader.next();
        //No point corrupting commits, so ignore them
        if (rec == null || rec.getEvent().getClass().getName().equals("org.apache.flume.channel.file.Commit")) {
            handle.close();
            reader.close();
            continue;
        }
        long eventPosition2 = reader.getPosition();
        rec = reader.next();
        handle.seek(eventPosition1 + 100);
        handle.writeInt(random.nextInt());
        corrupted++;
        corruptFiles.add(dataFile.getName());
        if (rec == null || rec.getEvent().getClass().getName().equals("org.apache.flume.channel.file.Commit")) {
            handle.close();
            reader.close();
            continue;
        }
        handle.seek(eventPosition2 + 100);
        handle.writeInt(random.nextInt());
        corrupted++;
        handle.close();
        reader.close();

    }
    FileChannelIntegrityTool tool = new FileChannelIntegrityTool();
    tool.run(new String[] { "-l", dataDir.toString() });
    FileChannel channel = new FileChannel();
    channel.setName("channel");
    String cp;
    if (withCheckpoint) {
        cp = origCheckpointDir.toString();
    } else {
        FileUtils.deleteDirectory(checkpointDir);
        Assert.assertTrue(checkpointDir.mkdirs());
        cp = checkpointDir.toString();
    }
    ctx.put(FileChannelConfiguration.CHECKPOINT_DIR, cp);
    ctx.put(FileChannelConfiguration.DATA_DIRS, dataDir.toString());
    channel.configure(ctx);
    channel.start();
    Transaction tx = channel.getTransaction();
    tx.begin();
    int i = 0;
    while (channel.take() != null) {
        i++;
    }
    tx.commit();
    tx.close();
    channel.stop();
    Assert.assertEquals(25 - corrupted, i);
    files = dataDir.listFiles(new FilenameFilter() {
        @Override
        public boolean accept(File dir, String name) {
            if (name.contains(".bak")) {
                return true;
            }
            return false;
        }
    });
    Assert.assertEquals(corruptFiles.size(), files.length);
    for (File file : files) {
        String name = file.getName();
        name = name.replaceAll(".bak", "");
        Assert.assertTrue(corruptFiles.remove(name));
    }
    Assert.assertTrue(corruptFiles.isEmpty());
}