Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:org.kurento.repository.test.RangePutTests.java

protected void uploadFileWithSeqPUTs(RepositoryHttpRecorder recorder, File fileToUpload,
        RepositoryItem repositoryItem) throws Exception {

    recorder.setAutoTerminationTimeout(500000);
    String url = recorder.getURL();

    DataInputStream is = null;

    try {/*from   w w w.j a  va  2  s  .c om*/

        is = new DataInputStream(new FileInputStream(fileToUpload));

        int sentBytes = 0;

        byte[] info = new byte[40000];

        int readBytes;

        int numRequest = 0;

        while ((readBytes = is.read(info)) != -1) {

            ResponseEntity<String> response = putContent(url, Arrays.copyOf(info, readBytes), sentBytes);

            sentBytes += readBytes;

            log.info(numRequest + ": " + response.toString());

            assertEquals("Returned response: " + response.getBody(), HttpStatus.OK, response.getStatusCode());

            if (numRequest == 3) {

                // Simulating retry

                response = putContent(url, Arrays.copyOf(info, readBytes), sentBytes - readBytes);

                log.info(numRequest + ": " + response.toString());

                assertEquals("Returned response: " + response.getBody(), HttpStatus.OK,
                        response.getStatusCode());

            } else if (numRequest == 4) {

                // Simulating retry with new data

                byte[] newInfo = new byte[500];
                int newReadBytes = is.read(newInfo);

                response = putContent(url,
                        concat(Arrays.copyOf(info, readBytes), Arrays.copyOf(newInfo, newReadBytes)),
                        sentBytes - readBytes);

                sentBytes += newReadBytes;

                log.info(numRequest + ": " + response.toString());

                assertEquals("Returned response: " + response.getBody(), HttpStatus.OK,
                        response.getStatusCode());

            } else if (numRequest == 5) {

                // Simulating send ahead data

                response = putContent(url, Arrays.copyOf(info, readBytes), sentBytes + 75000);

                log.info(numRequest + ": " + response.toString());

                assertEquals("Returned response: " + response.getBody(), HttpStatus.NOT_IMPLEMENTED,
                        response.getStatusCode());

            }

            numRequest++;
        }

    } finally {
        if (is != null) {
            try {
                is.close();
            } catch (IOException e) {
            }
        }

        recorder.stop();
    }
}

From source file:org.apache.hadoop.mapred.CoronaJobInProgress.java

/**
 * Read input splits and create a map per split.
 *//*from   ww w  . ja va2  s .c o  m*/
public void initTasks() throws IOException {
    // log job info
    jobHistory.logSubmitted(jobFile.toString(), this.startTime, this.jobTrackerId);
    // log the job priority
    JobClient.RawSplit[] splits = null;
    splits = JobClient.getAndRemoveCachedSplits(jobId);
    if (splits == null) {
        FileSystem fs = jobFile.getFileSystem(jobConf);
        Path splitFile = new Path(jobFile.getParent(), "job.split");
        LOG.info("Reading splits from " + splitFile);
        DataInputStream splitFileIn = fs.open(splitFile);
        try {
            splits = JobClient.readSplitFile(splitFileIn);
        } finally {
            splitFileIn.close();
        }
    }
    initTasksFromSplits(splits);
    jobHistory.logInited(this.launchTime, numMapTasks, numReduceTasks);
}

From source file:org.cloudata.core.tabletserver.TabletMapFile.java

public boolean loadIndex() throws IOException {
    DataInputStream in = null;

    GPath idxPath = new GPath(filePath, IDX_FILE);
    GPath dataPath = new GPath(filePath, DATA_FILE);
    try {/*www  .ja  v  a  2  s .  co  m*/
        int fileCount = 0;
        if (fs.exists(idxPath))
            fileCount++;
        if (fs.exists(dataPath))
            fileCount++;
        if (fileCount == 0) {
            return false;
        } else if (fileCount == 1) {
            LOG.error("data or idx file not exists. tabletInfo=" + tabletInfo + ", column=" + columnName
                    + ", fileId=" + fileId);
            return false;
        }

        long dataFileLength = fs.getLength(new GPath(filePath, DATA_FILE));
        boolean idxFileError = false;
        long lastIndexOffset = 0;

        //LOG.debug("loadIndex:" + idxPath);
        in = new DataInputStream(fs.open(idxPath));
        MapFileIndexRecord mapFileIndexRecord = null;
        try {
            while (true) {
                mapFileIndexRecord = new MapFileIndexRecord();
                mapFileIndexRecord.read(in);
                if (mapFileIndexRecord.getOffset() > dataFileLength) {
                    idxFileError = true;
                    lastIndexOffset = mapFileIndexRecord.getOffset();
                    break;
                }
                mapFileIndexRecords.add(mapFileIndexRecord);
            }
        } catch (EOFException eof) {

        }

        if (idxFileError) {
            throw new IOException("Can't load map file. index offset(" + lastIndexOffset
                    + ") greate than data file length(" + dataFileLength + ").[path=" + filePath + "]");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new IOException(e.getMessage());
    } finally {
        if (in != null)
            in.close();
    }

    //    try {
    //      synchronized(this) {
    //        dfsBlockInfos = fs.getBlockInfos(dataPath);
    //      }
    //    } catch (Exception e) {
    //      e.printStackTrace();
    //    }
    return true;
}

From source file:au.edu.usq.fascinator.harvester.callista.CallistaHarvester.java

/**
 * Harvest the next set of files, and return their Object IDs
 *
 * @return Set<String> The set of object IDs just harvested
 * @throws HarvesterException is there are errors
 *///from w  ww  .j  av  a 2  s. com
@Override
public Set<String> getObjectIdList() throws HarvesterException {
    Set<String> fileObjectIdList = new HashSet<String>();

    // Data streams - get CSV data
    FileInputStream fstream;
    try {
        fstream = new FileInputStream(csvData);
    } catch (FileNotFoundException ex) {
        // We tested for this earlier
        throw new HarvesterException("Could not find file", ex);
    }
    DataInputStream in = new DataInputStream(fstream);
    BufferedReader br = new BufferedReader(new InputStreamReader(in));

    int i = 0;
    int j = 0;
    boolean stop = false;
    // Line by line from buffered reader
    String line;
    try {
        while ((line = br.readLine()) != null && !stop) {
            // Parse the CSV for this line
            String[][] values;
            try {
                values = CSVParser.parse(new StringReader(line));
            } catch (IOException ex) {
                log.error("Error parsing CSV file", ex);
                throw new HarvesterException("Error parsing CSV file", ex);
            }

            for (String[] columns : values) {
                // Ignore the header row
                if (columns[0].equals("RESEARCHER_ID")) {
                    for (String column : columns) {
                        // Print if debugging
                        //log.debug("HEADING {}: '{}'", j, column);
                        j++;
                    }
                    j = 0;

                    // Store normal data rows
                } else {
                    i++;
                    if (i % 500 == 0) {
                        log.info("Parsing row {}", i);
                    }
                    String rId = columns[0];
                    if (!parsedData.containsKey(rId)) {
                        // New researcher, add an empty list
                        parsedData.put(rId, new ArrayList());
                    }
                    parsedData.get(rId).add(columns);
                }
            }

            // Check our record limit if debugging
            if (limit != -1 && i >= limit) {
                stop = true;
                log.debug("Stopping at debugging limit");
            }
        }
        in.close();
    } catch (IOException ex) {
        log.error("Error reading from CSV file", ex);
        throw new HarvesterException("Error reading from CSV file", ex);
    }
    log.info("Parse complete: {} rows", i);

    // Process parsed data
    i = 0;
    for (String key : parsedData.keySet()) {
        // Create the new record
        JsonConfigHelper json = new JsonConfigHelper();
        JsonConfigHelper packageJson = new JsonConfigHelper();
        packageJson.set("viewId", "default");
        packageJson.set("packageType", "name-authority");
        json.set("id", key);
        json.set("step", "pending");
        json.set("modified", "false");

        List<JsonConfigHelper> authors = new ArrayList();
        for (String[] columns : parsedData.get(key)) {
            try {
                // IDS
                store("studentId", columns[1], json);
                store("employeeId", columns[2], json);
                // Preferred Name exists
                String pName = null;
                if (columns[5] != null && !columns[5].equals("")) {
                    pName = columns[3] + " " + columns[5] + " " + columns[7];
                }
                store("preferedName", pName, json);
                // Name title
                store("nameTitle", columns[3], json);
                // First name
                store("firstName", columns[4], json);
                // Second name
                if (columns[6] != null) {
                    store("secondName", columns[6], json);
                }
                // Surname
                store("surname", columns[7], json);
                // Full name
                String fName = null;
                if (columns[6] != null) {
                    // We have a middle name
                    fName = columns[3] + " " + columns[4] + " " + columns[6] + " " + columns[7];
                } else {
                    fName = columns[3] + " " + columns[4] + " " + columns[7];
                }
                store("fullName", fName, json);
                store("title", fName, json);

                json.set("pageTitle", fName);

                store("description", "Authority record for '" + fName + "'", json);
                packageJson.set("title", fName);
                packageJson.set("description", "Authority record for '" + fName + "'");
                // Email
                store("email", columns[8], json);

                // Author data used in publication
                JsonConfigHelper auth = new JsonConfigHelper();
                auth.set("author", columns[9]);
                auth.set("orgUnitId", columns[11]);
                auth.set("orgUnit", columns[12]);
                auth.set("expiry", columns[13]);
                authors.add(auth);

                // Catch any data mismatches during storage
            } catch (Exception ex) {
                log.error("line: {}", columns);
                log.error("Error parsing record '{}'", key, ex);
            }
        }

        // Add author data
        if (!authors.isEmpty()) {
            // TODO: Work-around for #656
            json.set("authors", "===REPLACE=ME===");
            String list = "[" + StringUtils.join(authors, ",") + "]";
            String jsonString = json.toString();
            jsonString = jsonString.replace("\"===REPLACE=ME===\"", list);
            try {
                json = new JsonConfigHelper(jsonString);
            } catch (IOException ex) {
                json = null;
                log.error("Error parsing json '{}': ", jsonString);
            }
        }

        // Add an empty package manifest
        // TODO: Work-around for #656
        packageJson.set("manifest", "===REPLACE=ME===");
        String jsonString = packageJson.toString();
        jsonString = jsonString.replace("\"===REPLACE=ME===\"", "{}");
        try {
            packageJson = new JsonConfigHelper(jsonString);
        } catch (IOException ex) {
            packageJson = null;
            log.error("Error parsing json '{}': ", jsonString);
        }

        i++;
        if (i % 500 == 0) {
            log.info("Object count: {}", i);
        }
        if (json != null && packageJson != null) {
            try {
                String oid = storeJson(json.toString(), packageJson.toString(), key);
                fileObjectIdList.add(oid);
            } catch (StorageException ex) {
                log.error("Error during storage: ", ex);
            }
        }
    }
    log.info("Object creation complete: {} objects", i);

    return fileObjectIdList;
}

From source file:com.cloudmaster.cmp.util.AlarmSystem.transfer.HttpSender.java

public ResponseObject send(Object object, Map<String, String> paramMap) throws Exception {
    ResponseObject rs = new ResponseObject();
    ByteArrayOutputStream bOs = null;
    DataOutputStream dOs = null;//ww  w .j a  v a 2  s  .  c  o m
    DataInputStream dIs = null;
    HttpClient client;
    PostMethod meth = null;
    byte[] rawData;
    try {
        client = new HttpClient();
        client.setConnectionTimeout(this.timeout);
        client.setTimeout(this.datatimeout);
        client.setHttpConnectionFactoryTimeout(this.timeout);

        meth = new PostMethod(paramMap.get("SERVER_URL"));
        // meth = new UTF8PostMethod(url);
        meth.getParams().setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, ENCODING);
        // meth.addParameter(SERVER_ARGS, new String(rawData,"UTF-8"));
        meth.setRequestBody(object.toString());
        System.out.println(object.toString());

        /**
         * "type"="ruleSync",XML? "syncType"="***"
         * 1??2??3? "ruleName"="***"
         * XML?XML???XML
         */
        meth.addRequestHeader("type", paramMap.get("type"));
        meth.addRequestHeader("syncType", paramMap.get("syncType"));
        meth.addRequestHeader("ruleName", URLEncoder.encode(paramMap.get("ruleName"), "UTF-8"));
        client.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
                new DefaultHttpMethodRetryHandler(1, false));

        client.executeMethod(meth);

        dIs = new DataInputStream(meth.getResponseBodyAsStream());

        if (meth.getStatusCode() == HttpStatus.SC_OK) {

            Header errHeader = meth.getResponseHeader(HDR_ERROR);

            if (errHeader != null) {
                rs.setError(meth.getResponseBodyAsString());
                return rs;
            }

            rs = ResponseObject.fromStream(dIs);

            return rs;
        } else {
            meth.releaseConnection();
            throw new IOException("Connection failure: " + meth.getStatusLine().toString());
        }
    } finally {
        if (meth != null) {
            meth.releaseConnection();
        }
        if (bOs != null) {
            bOs.close();
        }
        if (dOs != null) {
            dOs.close();
        }
        if (dIs != null) {
            dIs.close();
        }
    }
}

From source file:net.timewalker.ffmq4.storage.data.impl.AbstractBlockBasedDataStore.java

private final void loadAllocationTable() throws DataStoreException {
    log.debug(//from  w  w w . ja  va2  s  .  co m
            "[" + descriptor.getName() + "] Loading allocation table " + allocationTableFile.getAbsolutePath());
    DataInputStream in = null;
    try {
        in = new DataInputStream(new BufferedInputStream(new FileInputStream(allocationTableFile), 16384));

        this.blockCount = in.readInt();
        this.blockSize = in.readInt();
        this.firstBlock = in.readInt();

        this.flags = new byte[blockCount];
        this.allocatedSize = new int[blockCount];
        this.previousBlock = new int[blockCount];
        this.nextBlock = new int[blockCount];
        this.blocksInUse = 0;
        int msgCount = 0;
        for (int n = 0; n < blockCount; n++) {
            flags[n] = in.readByte();
            allocatedSize[n] = in.readInt();
            previousBlock[n] = in.readInt();
            nextBlock[n] = in.readInt();

            if (allocatedSize[n] != -1) {
                blocksInUse++;

                if ((flags[n] & FLAG_START_BLOCK) > 0)
                    msgCount++;
            }
        }
        this.locks = new FastBitSet(blockCount);
        this.size = msgCount;

        log.debug("[" + descriptor.getName() + "] " + msgCount + " entries found");
    } catch (EOFException e) {
        throw new DataStoreException("Allocation table is truncated : " + allocationTableFile.getAbsolutePath(),
                e);
    } catch (IOException e) {
        throw new DataStoreException(
                "Cannot initialize allocation table : " + allocationTableFile.getAbsolutePath(), e);
    } finally {
        if (in != null) {
            try {
                in.close();
            } catch (IOException e) {
                log.error("[" + descriptor.getName() + "] Could not close file input stream", e);
            }
        }
    }
}

From source file:org.mwc.cmap.xyplot.views.XYPlotView.java

private void rtfToClipboard(final String fName, final Dimension dim) {
    // Issue #520 - Copy WMF embedded in RTF
    ByteArrayOutputStream os = null;
    DataInputStream dis = null;
    try {/*from   w w  w .j  a v  a 2 s  .com*/
        os = new ByteArrayOutputStream();
        RTFWriter writer = new RTFWriter(os);
        File file = new File(fName);
        byte[] data = new byte[(int) file.length()];
        dis = new DataInputStream(new FileInputStream(file));
        dis.readFully(data);
        writer.writeHeader();
        writer.writeEmfPicture(data, dim.getWidth(), dim.getHeight());
        writer.writeTail();

        RTFTransfer rtfTransfer = RTFTransfer.getInstance();
        Clipboard clipboard = new Clipboard(Display.getDefault());
        Object[] rtfData = new Object[] { os.toString() };
        clipboard.setContents(rtfData, new Transfer[] { rtfTransfer });
    } catch (final Exception e1) {
        IStatus status = new Status(IStatus.ERROR, PlotViewerPlugin.PLUGIN_ID, e1.getLocalizedMessage(), e1);
        XYPlotPlugin.getDefault().getLog().log(status);
    } finally {
        if (os != null) {
            try {
                os.close();
            } catch (IOException e1) {
                // ignore
            }
        }
        if (dis != null) {
            try {
                dis.close();
            } catch (IOException e1) {
                // ignore
            }
        }
    }

}

From source file:mp.teardrop.PlaybackService.java

/**
 * Initializes the service state, loading songs saved from the disk into the
 * song timeline.//from  w w w.  ja v  a2  s  .  c o m
 *
 * @return The loaded value for mState.
 */
public int loadState() {
    int state = 0;

    try {
        DataInputStream in = new DataInputStream(openFileInput(STATE_FILE));

        if (in.readLong() == STATE_FILE_MAGIC && in.readInt() == STATE_VERSION) {
            mPendingSeek = in.readInt();
            mPendingSeekSong = in.readLong();
            mTimeline.readState(getSharedPreferences(PREFS_SAVED_SONGS, 0));
            state |= mTimeline.getShuffleMode() << SHIFT_SHUFFLE;
            state |= mTimeline.getFinishAction() << SHIFT_FINISH;
        }

        in.close();
    } catch (EOFException e) {
        Log.w("OrchidMP", "Failed to load state", e);
    } catch (IOException e) {
        Log.w("OrchidMP", "Failed to load state", e);
    } catch (JSONException e) {
        Log.w("OrchidMP", "Failed to load state", e);
    }

    return state;
}

From source file:org.dcm4che3.tool.jpg2dcm.Jpg2Dcm.java

public void convert(CommandLine cl, File jpgFile, File dcmFile) throws IOException {
    jpgHeaderLen = 0;// w  w  w  .  jav  a  2 s. co  m
    jpgLen = (int) jpgFile.length();
    DataInputStream jpgInput = new DataInputStream(new BufferedInputStream(new FileInputStream(jpgFile)));
    try {
        Attributes attrs = new Attributes();
        try {
            if (cl.hasOption("mpeg") && cl.hasOption("c"))
                attrs = SAXReader.parse(cl.getOptionValue("c"));
            else if (cl.hasOption("c"))
                attrs = SAXReader.parse(cl.getOptionValue("c"));
        } catch (Exception e) {
            throw new FileNotFoundException("Configuration XML file not found");
        }
        attrs.setString(Tag.SpecificCharacterSet, VR.CS, charset);
        if (noAPPn || missingRowsColumnsSamplesPMI(attrs)) {
            readHeader(attrs, jpgInput);
        }
        ensureUS(attrs, Tag.BitsAllocated, 8);
        ensureUS(attrs, Tag.BitsStored,
                attrs.getInt(Tag.BitsAllocated, (buffer[jpgHeaderLen] & 0xff) > 8 ? 16 : 8));
        ensureUS(attrs, Tag.HighBit, attrs.getInt(Tag.BitsStored, (buffer[jpgHeaderLen] & 0xff)) - 1);
        ensureUS(attrs, Tag.PixelRepresentation, 0);
        ensureUID(attrs, Tag.StudyInstanceUID);
        ensureUID(attrs, Tag.SeriesInstanceUID);
        ensureUID(attrs, Tag.SOPInstanceUID);
        Date now = new Date();
        attrs.setDate(Tag.InstanceCreationDate, VR.DA, now);
        attrs.setDate(Tag.InstanceCreationTime, VR.TM, now);
        Attributes fmi = attrs.createFileMetaInformation(transferSyntax);
        DicomOutputStream dos = new DicomOutputStream(dcmFile);
        try {
            dos.writeDataset(fmi, attrs);
            dos.writeHeader(Tag.PixelData, VR.OB, -1);
            if (!cl.hasOption("mpeg")) {
                dos.writeHeader(Tag.Item, null, 0);
                dos.writeHeader(Tag.Item, null, (jpgLen + 1) & ~1);
                dos.write(buffer, 0, jpgHeaderLen);
            }
            int r;
            while ((r = jpgInput.read(buffer)) > 0) {
                dos.write(buffer, 0, r);
            }
            if (!cl.hasOption("mpeg")) {
                if ((jpgLen & 1) != 0) {
                    dos.write(0);
                }
            }
            dos.writeHeader(Tag.SequenceDelimitationItem, null, 0);
        } finally {
            dos.close();
        }
    } finally {
        jpgInput.close();
    }
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

private String[] getStrings(S3Object sobj) throws IOException {
    this.s3clientLock.readLock().lock();
    try {//w ww.j av a2  s.  c om
        boolean encrypt = false;
        boolean compress = false;
        boolean lz4compress = false;

        int cl = (int) sobj.getObjectMetadata().getContentLength();

        byte[] data = new byte[cl];
        DataInputStream in = null;
        try {
            in = new DataInputStream(sobj.getObjectContent());
            in.readFully(data);

        } catch (Exception e) {
            throw new IOException(e);
        } finally {
            try {
                in.close();
            } catch (Exception e) {
            }
        }
        Map<String, String> mp = this.getUserMetaData(sobj.getObjectMetadata());
        if (mp.containsKey("md5sum")) {
            try {
                byte[] shash = BaseEncoding.base64().decode(mp.get("md5sum"));
                byte[] chash;
                chash = ServiceUtils.computeMD5Hash(data);
                if (!Arrays.equals(shash, chash))
                    throw new IOException("download corrupt at " + sobj.getKey());
            } catch (NoSuchAlgorithmException e) {
                throw new IOException(e);
            }
        }
        int size = Integer.parseInt((String) mp.get("size"));
        if (mp.containsKey("encrypt")) {
            encrypt = Boolean.parseBoolean((String) mp.get("encrypt"));
        }
        if (mp.containsKey("compress")) {
            compress = Boolean.parseBoolean((String) mp.get("compress"));
        } else if (mp.containsKey("lz4compress")) {

            lz4compress = Boolean.parseBoolean((String) mp.get("lz4compress"));
        }
        byte[] ivb = null;
        if (mp.containsKey("ivspec"))
            ivb = BaseEncoding.base64().decode(mp.get("ivspec"));
        if (encrypt) {
            if (ivb != null)
                data = EncryptUtils.decryptCBC(data, new IvParameterSpec(ivb));
            else
                data = EncryptUtils.decryptCBC(data);
        }
        if (compress)
            data = CompressionUtils.decompressZLIB(data);
        else if (lz4compress) {
            data = CompressionUtils.decompressLz4(data, size);
        }
        String hast = new String(data);
        SDFSLogger.getLog().debug("reading hashes " + (String) mp.get("hashes") + " from " + sobj.getKey());
        String[] st = hast.split(",");
        return st;
    } finally {
        this.s3clientLock.readLock().unlock();
    }
}