Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:org.apache.giraph.partition.DiskBackedOnlineComputePartitionStore.java

/**
 * Load a partition from disk. It deletes the files after the load, except
 * for the edges, if the graph is static.
 * // w w w.j  a v a  2 s .c  om
 * @param id
 *            The id of the partition to load
 * @param numVertices
 *            The number of vertices contained on disk
 * @return The partition
 * @throws IOException
 */
private Partition<I, V, E, M> loadPartition(Integer id, int numVertices) throws IOException {
    Partition<I, V, E, M> partition = conf.createPartition(id, context);
    File file = new File(getVerticesPath(id));
    //      if (LOG.isInfoEnabled()) {
    //         LOG.info("loadPartition: reading partition vertices "
    //               + partition.getId() + ", size=" + file.length() + " from "
    //               + file.getAbsolutePath());
    //      }
    DataInputStream inputStream = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
    for (int i = 0; i < numVertices; ++i) {
        Vertex<I, V, E, M> vertex = conf.createVertex();
        readVertexData(inputStream, vertex);
        partition.putVertex(vertex);
    }
    inputStream.close();
    file.delete();
    // if(!withEdges && hotPartitionIds!=null &&
    // hotPartitionIds.contains(id)){
    // return partition;
    // }
    file = new File(getEdgesPath(id));
    inputStream = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
    for (int i = 0; i < numVertices; ++i) {
        readOutEdges(inputStream, partition);
    }
    inputStream.close();
    /*
     * If the graph is static, keep the file around.
     */
    if (!conf.isStaticGraph()) {
        file.delete();
    }
    return partition;
}

From source file:org.apache.giraph.graph.BspServiceWorker.java

@Override
public void loadCheckpoint(long superstep) {
    // Algorithm:
    // Examine all the partition owners and load the ones
    // that match my hostname and id from the master designated checkpoint
    // prefixes./* w w w . j a  v  a  2s .  com*/
    long startPos = 0;
    int loadedPartitions = 0;
    for (PartitionOwner partitionOwner : workerGraphPartitioner.getPartitionOwners()) {
        if (partitionOwner.getWorkerInfo().equals(getWorkerInfo())) {
            String metadataFile = partitionOwner.getCheckpointFilesPrefix() + CHECKPOINT_METADATA_POSTFIX;
            String partitionsFile = partitionOwner.getCheckpointFilesPrefix() + CHECKPOINT_VERTICES_POSTFIX;
            try {
                int partitionId = -1;
                DataInputStream metadataStream = getFs().open(new Path(metadataFile));
                int partitions = metadataStream.readInt();
                for (int i = 0; i < partitions; ++i) {
                    startPos = metadataStream.readLong();
                    partitionId = metadataStream.readInt();
                    if (partitionId == partitionOwner.getPartitionId()) {
                        break;
                    }
                }
                if (partitionId != partitionOwner.getPartitionId()) {
                    throw new IllegalStateException("loadCheckpoint: " + partitionOwner + " not found!");
                }
                metadataStream.close();
                Partition<I, V, E, M> partition = new Partition<I, V, E, M>(getConfiguration(), partitionId);
                DataInputStream partitionsStream = getFs().open(new Path(partitionsFile));
                if (partitionsStream.skip(startPos) != startPos) {
                    throw new IllegalStateException(
                            "loadCheckpoint: Failed to skip " + startPos + " on " + partitionsFile);
                }
                partition.readFields(partitionsStream);
                partitionsStream.close();
                if (LOG.isInfoEnabled()) {
                    LOG.info("loadCheckpoint: Loaded partition " + partition);
                }
                if (getPartitionMap().put(partitionId, partition) != null) {
                    throw new IllegalStateException(
                            "loadCheckpoint: Already has partition owner " + partitionOwner);
                }
                ++loadedPartitions;
            } catch (IOException e) {
                throw new RuntimeException("loadCheckpoing: Failed to get partition owner " + partitionOwner,
                        e);
            }
        }
    }
    if (LOG.isInfoEnabled()) {
        LOG.info("loadCheckpoint: Loaded " + loadedPartitions + " partitions of out "
                + workerGraphPartitioner.getPartitionOwners().size() + " total.");
    }
    // Communication service needs to setup the connections prior to
    // processing vertices
    commService.setup();
}

From source file:it.classhidra.core.controller.bsController.java

public static String getPropertyMultipart(String key, HttpServletRequest req) {

    try {/*w ww.  j a  v a  2 s .  co  m*/
        String file = (String) req.getAttribute("multipart/form-data");
        DataInputStream in = null;
        String contentType = req.getContentType();
        if (contentType != null && contentType.indexOf("multipart/form-data") != -1) {
            if (file == null) {
                in = new DataInputStream(req.getInputStream());
                int formDataLength = req.getContentLength();
                byte dataBytes[] = new byte[formDataLength];
                int bytesRead = 0;
                int totalBytesRead = 0;
                while (totalBytesRead < formDataLength) {
                    bytesRead = in.read(dataBytes, totalBytesRead, formDataLength);
                    totalBytesRead += bytesRead;
                }
                file = new String(dataBytes, 0, dataBytes.length, "ASCII");
                in.close();
                req.setAttribute("multipart/form-data", file);
            }

            String check = "Content-Disposition: form-data; name=\"" + key + "\"";

            int pos = file.indexOf(check);

            if (pos > -1) {
                int pos1 = file.indexOf("-----------------------------", pos);
                if (pos1 > -1) {
                    String result = file.substring(pos + check.length(), pos1);
                    result = result.replace('\n', ' ').replace('\r', ' ');
                    return result.trim();
                }
            }
        }

    } catch (Exception e) {
        new bsControllerException(e, iStub.log_DEBUG);

        return null;
    }
    return null;
}

From source file:it.unimi.dsi.sux4j.io.ChunkedHashStore.java

/** Returns an iterator over the chunks of this chunked hash store.
 *
 * @return an iterator over the chunks of this chunked hash store.
 *///from   ww w.  j  av a 2 s  .  com

public Iterator<Chunk> iterator() {
    if (closed)
        throw new IllegalStateException("This " + getClass().getSimpleName() + " has been closed ");
    for (DataOutputStream d : dos)
        try {
            d.flush();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

    int m = 0;
    for (int i = 0; i < virtualDiskChunks; i++) {
        int s = 0;
        for (int j = 0; j < diskChunkStep; j++)
            s += count[i * diskChunkStep + j];
        if (s > m)
            m = s;
    }

    final int maxCount = m;

    return new AbstractObjectIterator<Chunk>() {
        private int chunk;
        private FastBufferedInputStream fbis;
        private int last;
        private int chunkSize;
        private final long[] buffer0 = new long[maxCount];
        private final long[] buffer1 = new long[maxCount];
        private final long[] buffer2 = new long[maxCount];
        private final long[] data = hashMask != 0 ? null : new long[maxCount];

        public boolean hasNext() {
            return chunk < chunks;
        }

        @SuppressWarnings("unchecked")
        public Chunk next() {
            if (!hasNext())
                throw new NoSuchElementException();
            final long[] buffer0 = this.buffer0;

            if (chunk % (chunks / virtualDiskChunks) == 0) {
                final int diskChunk = (int) (chunk / (chunks / virtualDiskChunks));
                final long[] buffer1 = this.buffer1, buffer2 = this.buffer2;

                chunkSize = 0;
                try {
                    if (diskChunkStep == 1) {
                        fbis = new FastBufferedInputStream(new FileInputStream(file[diskChunk]));
                        chunkSize = count[diskChunk];
                    } else {
                        final FileInputStream[] fis = new FileInputStream[diskChunkStep];
                        for (int i = 0; i < fis.length; i++) {
                            fis[i] = new FileInputStream(file[diskChunk * diskChunkStep + i]);
                            chunkSize += count[diskChunk * diskChunkStep + i];
                        }
                        fbis = new FastBufferedInputStream(new SequenceInputStream(
                                new IteratorEnumeration(Arrays.asList(fis).iterator())));
                    }
                    final DataInputStream dis = new DataInputStream(fbis);

                    final long triple[] = new long[3];
                    int count = 0;
                    for (int j = 0; j < chunkSize; j++) {
                        triple[0] = dis.readLong();
                        triple[1] = dis.readLong();
                        triple[2] = dis.readLong();

                        if (DEBUG)
                            System.err.println("From disk: " + Arrays.toString(triple));

                        if (filter == null || filter.evaluate(triple)) {
                            buffer0[count] = triple[0];
                            buffer1[count] = triple[1];
                            buffer2[count] = triple[2];
                            if (hashMask == 0)
                                data[count] = dis.readLong();
                            count++;
                        } else if (hashMask == 0)
                            dis.readLong(); // Discard data
                    }

                    chunkSize = count;
                    dis.close();
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }

                it.unimi.dsi.fastutil.Arrays.quickSort(0, chunkSize, new AbstractIntComparator() {
                    private static final long serialVersionUID = 0L;

                    public int compare(final int x, final int y) {
                        int t = Long.signum(buffer0[x] - buffer0[y]);
                        if (t != 0)
                            return t;
                        t = Long.signum(buffer1[x] - buffer1[y]);
                        if (t != 0)
                            return t;
                        return Long.signum(buffer2[x] - buffer2[y]);
                    }
                }, new Swapper() {
                    public void swap(final int x, final int y) {
                        final long e0 = buffer0[x], e1 = buffer1[x], e2 = buffer2[x];
                        buffer0[x] = buffer0[y];
                        buffer1[x] = buffer1[y];
                        buffer2[x] = buffer2[y];
                        buffer0[y] = e0;
                        buffer1[y] = e1;
                        buffer2[y] = e2;
                        if (hashMask == 0) {
                            final long v = data[x];
                            data[x] = data[y];
                            data[y] = v;
                        }
                    }
                });

                if (DEBUG) {
                    for (int i = 0; i < chunkSize; i++)
                        System.err.println(buffer0[i] + ", " + buffer1[i] + ", " + buffer2[i]);
                }

                if (!checkedForDuplicates && chunkSize > 1)
                    for (int i = chunkSize - 1; i-- != 0;)
                        if (buffer0[i] == buffer0[i + 1] && buffer1[i] == buffer1[i + 1]
                                && buffer2[i] == buffer2[i + 1])
                            throw new ChunkedHashStore.DuplicateException();
                if (chunk == chunks - 1)
                    checkedForDuplicates = true;
                last = 0;
            }

            final int start = last;
            while (last < chunkSize && (chunkShift == Long.SIZE ? 0 : buffer0[last] >>> chunkShift) == chunk)
                last++;
            chunk++;

            return new Chunk(buffer0, buffer1, buffer2, data, hashMask, start, last);
        }
    };
}

From source file:com.intel.xdk.device.Device.java

public void getRemoteData(String requestUrl, String requestMethod, String requestBody, String successCallback,
        String errorCallback) {/*from w  w  w.j ava  2s.  c o m*/
    Log.d("getRemoteData", "url: " + requestUrl + ", method: " + requestMethod + ", body: " + requestBody);

    try {
        URL url = new URL(requestUrl);
        connection = (HttpURLConnection) url.openConnection();

        connection.setDoInput(true);
        connection.setDoOutput(true);
        connection.setUseCaches(false);

        connection.setRequestMethod(requestMethod);

        //Write requestBody
        DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream());
        outputStream.writeBytes(requestBody);
        outputStream.flush();
        outputStream.close();

        //Get response code and response message
        int responseCode = connection.getResponseCode();
        String responseMessage = connection.getResponseMessage();

        //Get response Message
        DataInputStream inputStream = new DataInputStream(connection.getInputStream());
        if (responseCode == 200) {
            String temp;
            String responseBody = "";
            while ((temp = inputStream.readLine()) != null) {
                responseBody += temp;
            }
            //callbackContext.success(responseBody);
            String js = "javascript:" + successCallback + "('" + responseBody + "');";
            injectJS(js);
        } else {
            //callbackContext.error("Fail to get the response, response code: " + responseCode + ", response message: " + responseMessage);
            String js = "javascript:" + errorCallback + "(" + "'response code :" + responseCode + "');";
            injectJS(js);
        }

        inputStream.close();
    } catch (IOException e) {
        Log.d("request", e.getMessage());
    }
}

From source file:org.apache.sshd.server.sftp.SftpSubsystem.java

public void run() {
    DataInputStream dis = null;
    try {//from   ww w .  j  a  v a 2 s  .c o  m
        dis = new DataInputStream(in);
        while (true) {
            int length = dis.readInt();
            if (length < 5) {
                throw new IllegalArgumentException();
            }
            Buffer buffer = new Buffer(length + 4);
            buffer.putInt(length);
            int nb = length;
            while (nb > 0) {
                int l = dis.read(buffer.array(), buffer.wpos(), nb);
                if (l < 0) {
                    throw new IllegalArgumentException();
                }
                buffer.wpos(buffer.wpos() + l);
                nb -= l;
            }
            process(buffer);
        }
    } catch (Throwable t) {
        if (!closed && !(t instanceof EOFException)) { // Ignore han
            log.error("Exception caught in SFTP subsystem", t);
        }
    } finally {
        if (dis != null) {
            try {
                dis.close();
            } catch (IOException ioe) {
                log.error("Could not close DataInputStream", ioe);
            }
        }

        if (handles != null) {
            for (Map.Entry<String, Handle> entry : handles.entrySet()) {
                Handle handle = entry.getValue();
                try {
                    handle.close();
                } catch (IOException ioe) {
                    log.error("Could not close open handle: " + entry.getKey(), ioe);
                }
            }
        }
        dis = null;

        callback.onExit(0);
    }
}

From source file:org.apache.jsp.fileUploader_jsp.java

private String UpdateToShare(byte[] bytes, String mimeType, String title, String description, String prevId,
        Set<String> communities, boolean isJson, String type, boolean newShare, HttpServletRequest request,
        HttpServletResponse response) {/* ww  w  .j av a2 s.c o  m*/
    String charset = "UTF-8";
    String url = "";
    try {
        if (isJson) {
            //first check if bytes are actually json
            try {
                new JsonParser().parse(new String(bytes));
            } catch (Exception ex) {
                return "Failed, file was not valid JSON";
            }
            if (newShare)
                url = API_ROOT + "social/share/add/json/" + URLEncoder.encode(type, charset) + "/"
                        + URLEncoder.encode(title, charset) + "/" + URLEncoder.encode(description, charset)
                        + "/";
            else
                url = API_ROOT + "social/share/update/json/" + prevId + "/" + URLEncoder.encode(type, charset)
                        + "/" + URLEncoder.encode(title, charset) + "/"
                        + URLEncoder.encode(description, charset) + "/";
        } else {
            if (newShare)
                url = API_ROOT + "social/share/add/binary/" + URLEncoder.encode(title, charset) + "/"
                        + URLEncoder.encode(description, charset) + "/";
            else
                url = API_ROOT + "social/share/update/binary/" + prevId + "/"
                        + URLEncoder.encode(title, charset) + "/" + URLEncoder.encode(description, charset)
                        + "/";
        }

        if (localCookie)
            CookieHandler.setDefault(cm);
        URLConnection connection = new URL(url).openConnection();
        connection.setDoOutput(true);
        connection.setRequestProperty("Accept-Charset", charset);
        String cookieVal = getBrowserInfiniteCookie(request);
        if (cookieVal != null) {
            connection.addRequestProperty("Cookie", "infinitecookie=" + cookieVal);
            connection.setDoInput(true);
            connection.setDoOutput(true);
            connection.setRequestProperty("Accept-Charset", "UTF-8");
        }
        if (mimeType != null && mimeType.length() > 0)
            connection.setRequestProperty("Content-Type", mimeType + ";charset=" + charset);
        DataOutputStream output = new DataOutputStream(connection.getOutputStream());
        output.write(bytes);
        DataInputStream responseStream = new DataInputStream(connection.getInputStream());

        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        int nRead;
        byte[] data = new byte[16384];
        while ((nRead = responseStream.read(data, 0, data.length)) != -1) {
            buffer.write(data, 0, nRead);
        }

        String json = buffer.toString();
        String newCookie = getConnectionInfiniteCookie(connection);
        if (newCookie != null && response != null) {
            setBrowserInfiniteCookie(response, newCookie, request.getServerPort());
        }
        buffer.flush();
        buffer.close();
        output.close();
        responseStream.close();

        if (isJson) {
            jsonResponse jr = new Gson().fromJson(json, jsonResponse.class);
            if (jr == null) {
                return "Failed: " + json;
            }
            if (jr.response.success == true) {
                if (jr.data != null && jr.data._id != null) {
                    addRemoveCommunities(jr.data._id, communities, request, response);
                    return jr.data._id; //When a new upload, mr.data contains the ShareID for the upload
                }
            }
            return "Upload Failed: " + jr.response.message;
        } else {
            modResponse mr = new Gson().fromJson(json, modResponse.class);
            if (mr == null) {
                return "Failed: " + json;
            }
            if (mr.response.success == true) {
                if (prevId != null && mr.data == null) {
                    addRemoveCommunities(prevId, communities, request, response);
                    return prevId;
                } else {
                    addRemoveCommunities(mr.data, communities, request, response);
                    return mr.data; //When a new upload, mr.data contains the ShareID for the upload
                }
            } else {
                return "Upload Failed: " + mr.response.message;
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
        return "Upload Failed: " + e.getMessage();
    }
}

From source file:com.intel.xdk.device.Device.java

public void getRemoteDataWithID(String requestUrl, String requestMethod, String requestBody, int uuid,
        String successCallback, String errorCallback) {
    try {//w  w w. ja v  a 2  s.com
        URL url = new URL(requestUrl);
        connection = (HttpURLConnection) url.openConnection();

        connection.setDoInput(true);
        connection.setDoOutput(true);
        connection.setUseCaches(false);

        connection.setRequestMethod(requestMethod);

        //Write requestBody
        DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream());
        outputStream.writeBytes(requestBody);
        outputStream.writeBytes("&uuid=" + uuid);
        outputStream.flush();
        outputStream.close();

        //Get response code and response message
        int responseCode = connection.getResponseCode();
        String responseMessage = connection.getResponseMessage();

        //Get response Message
        DataInputStream inputStream = new DataInputStream(connection.getInputStream());
        if (responseCode == 200) {
            String temp;
            String responseBody = "";
            while ((temp = inputStream.readLine()) != null) {
                responseBody += temp;
            }
            //callbackContext.success(responseBody);
            String js = "javascript:" + successCallback + "(" + uuid + ", '" + responseBody + "');";
            injectJS(js);
        } else {
            //callbackContext.error("Fail to get the response, response code: " + responseCode + ", response message: " + responseMessage);
            String js = "javascript:" + errorCallback + "(" + uuid + ", '" + "Fail to get the response" + "');";
            injectJS(js);
        }

        inputStream.close();
    } catch (IOException e) {
        Log.d("request", e.getMessage());
    }
}

From source file:org.apache.hadoop.fs.dfsioe.TestDFSIOEnh.java

@Deprecated
protected static void analyzeResult(FileSystem fs, int testType, long execTime, String resFileName, int nrFiles,
        long fileSize, long tStart, int plotInterval, long sampleUnit, int threshold, String tputResFileName,
        boolean tputReportEach, boolean tputReportTotal) throws IOException {

    //the original report
    //TestDFSIO.analyzeResult(fs,testType,execTime,resFileName);

    long tasks = 0;
    long size = 0;
    long time = 0;
    float rate = 0;
    float sqrate = 0;

    Path reduceFile;//w w  w . j a va  2 s.  c o  m
    if (testType == TEST_TYPE_WRITE)
        reduceFile = new Path(DfsioeConfig.getInstance().getWriteDir(fsConfig), "part-00000");
    else
        reduceFile = new Path(DfsioeConfig.getInstance().getReadDir(fsConfig), "part-00000");

    //long time = 0;
    float loggingTime = 0;
    String line;
    ArrayList<String> wrSamples = new ArrayList<String>();

    int maxslot = (int) (execTime / plotInterval) + 1;
    int[] concurrency = new int[maxslot + 1];
    for (int i = 0; i < maxslot + 1; i++)
        concurrency[i] = 0;

    DataInputStream in = null;
    BufferedReader lines = null;
    try {
        in = new DataInputStream(fs.open(reduceFile));
        lines = new BufferedReader(new InputStreamReader(in));
        while ((line = lines.readLine()) != null) {
            StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%");
            String attr = tokens.nextToken();
            if (attr.endsWith(":time")) {
                time = Long.parseLong(tokens.nextToken());
            } else if (attr.endsWith(":logging_time")) {
                loggingTime = Float.parseFloat(tokens.nextToken());
            } else if (attr.endsWith(":tput_samples")) {
                String[] tags = attr.split(":");
                wrSamples.add(tags[1]);
                wrSamples.add(tokens.nextToken());
            } else if (attr.endsWith(":io_start_end")) {
                String[] t = tokens.nextToken().split(";");
                int start = (int) ((Long.parseLong(t[0]) - tStart) / plotInterval) + 1;
                int end = (int) ((Long.parseLong(t[1]) - tStart) / plotInterval) - 1;
                if (start < 0)
                    start = 0;
                for (int i = start; i <= end; i++) {
                    if (i > concurrency.length - 1)
                        break;
                    concurrency[i]++;
                }
            } else if (attr.endsWith(":tasks")) {
                tasks = Long.parseLong(tokens.nextToken());
            } else if (attr.endsWith(":size")) {
                size = Long.parseLong(tokens.nextToken());
            } else if (attr.endsWith(":rate")) {
                rate = Float.parseFloat(tokens.nextToken());
            } else if (attr.endsWith(":sqrate")) {
                sqrate = Float.parseFloat(tokens.nextToken());
            }
        }
    } finally {
        if (in != null)
            in.close();
        if (lines != null)
            lines.close();
    }

    double med = rate / 1000 / tasks;
    double stdDev = Math.sqrt(Math.abs(sqrate / 1000 / tasks - med * med));
    String resultLines[] = { "----- TestDFSIO ----- : "
            + ((testType == TEST_TYPE_WRITE) ? "write" : (testType == TEST_TYPE_READ) ? "read" : "unknown"),
            "           Date & time: " + new Date(System.currentTimeMillis()),
            "       Number of files: " + tasks, "Total MBytes processed: " + size / MEGA,
            "     Throughput mb/sec: " + size * 1000.0 / (time * MEGA), "Average IO rate mb/sec: " + med,
            " IO rate std deviation: " + stdDev, "    Test exec time sec: " + (float) execTime / 1000, "" };

    String[] tputResultLines = analyzeTputSamples(wrSamples, nrFiles, fileSize, tStart, execTime, concurrency,
            plotInterval, sampleUnit, threshold, tputResFileName, tputReportTotal, tputReportEach);

    String enhResultLines[] = { "-- Extended Metrics --   : "
            + ((testType == TEST_TYPE_WRITE) ? "write" : (testType == TEST_TYPE_READ) ? "read" : "unknown"),
            "Result file name         : " + tputResFileName,
            "Sampling overhead        : " + (loggingTime / time) * 100 + "%",
            "Reference Start Time     : " + String.valueOf(tStart) };

    PrintStream res = new PrintStream(new FileOutputStream(new File(resFileName), true));

    for (int i = 0; i < resultLines.length; i++) {
        LOG.info(resultLines[i]);
        res.println(resultLines[i]);
    }

    for (int i = 0; i < enhResultLines.length; i++) {
        LOG.info(enhResultLines[i]);
        res.println(enhResultLines[i]);
    }

    for (int j = 0; j < tputResultLines.length; j++) {
        LOG.info(tputResultLines[j]);
        res.println(tputResultLines[j]);
    }
    res.close();
}

From source file:com.cohesionforce.dis.BinaryConverter.java

public void run() {
    int count = 0;

    System.out.println("Opening file to convert: " + inputFile);
    FileInputStream fis;// w w  w  .ja va2 s .  c o m
    try {
        fis = new FileInputStream(inputFile);
    } catch (FileNotFoundException e1) {
        e1.printStackTrace();
        return;
    }

    DataInputStream dis = new DataInputStream(fis);
    startWriters();

    System.out.println("Starting to convert PDUs");

    while (done == false) {
        byte buffer[] = new byte[MAX_PDU_SIZE];

        byte pduType;
        try {
            pduType = dis.readByte();
            int pduSize = dis.readInt();
            int skip = dis.read(buffer, 0, 19);
            assert (skip == 19);
            int numberRead = dis.read(buffer, 0, pduSize);
            assert (numberRead == pduSize);
            ++count;

            // Convert the byte array to an object
            Object object;
            object = unmarshaller.getPdu(buffer);
            if (object != null) {
                logPdu(object, pduType);
            }
        } catch (EOFException e) {
            done = true;
        } catch (IOException e) {
            e.printStackTrace();
        } catch (Exception e) {
            done = true;
            e.printStackTrace();
        }

        if (count % 100000 == 0) {
            System.out.println("Converted " + count + " PDUs");
        }
    } // end loop
    try {
        dis.close();
    } catch (IOException e1) {
        e1.printStackTrace();
    }
    System.out.print("Waiting on writers to clear their queues");

    boolean emptyQueue = false;
    while (!emptyQueue) {
        emptyQueue = true;
        for (LogWriter<?> writer : writers) {
            // If any queue is not empty, sleep and check again
            if (!writer.getQueue().isEmpty()) {
                try {
                    emptyQueue = false;
                    System.out.print(".");
                    Thread.sleep(1000);
                    break;
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    System.out.println("");
    System.out.println("PDUs converted: " + count);
    System.out.println("Shutting down logging threads");
    threadGroup.interrupt();
    int tries = 0;
    while (threadGroup.activeCount() > 0 && tries < 10) {
        try {
            Thread.sleep(2000);
        } catch (InterruptedException e) {
        }
        ++tries;
    }
    System.out.println("Completed logging threads shutdown");

}