Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:com.openhr.company.UploadCompLicenseFile.java

@Override
public ActionForward execute(ActionMapping map, ActionForm form, HttpServletRequest request,
        HttpServletResponse response) throws Exception {
    // checks if the request actually contains upload file
    if (!ServletFileUpload.isMultipartContent(request)) {
        PrintWriter writer = response.getWriter();
        writer.println("Request does not contain upload data");
        writer.flush();/*from   www .j a  v  a2 s  . c  o m*/
        return map.findForward("HRHome");
    }

    // configures upload settings
    DiskFileItemFactory factory = new DiskFileItemFactory();
    factory.setSizeThreshold(THRESHOLD_SIZE);
    factory.setRepository(new File(System.getProperty("java.io.tmpdir")));

    ServletFileUpload upload = new ServletFileUpload(factory);
    upload.setSizeMax(MAX_REQUEST_SIZE);

    // constructs the directory path to store upload file
    String uploadPath = UPLOAD_DIRECTORY;
    // creates the directory if it does not exist
    File uploadDir = new File(uploadPath);
    if (!uploadDir.exists()) {
        uploadDir.mkdir();
    }

    try {
        // parses the request's content to extract file data
        List formItems = upload.parseRequest(request);
        Iterator iter = formItems.iterator();

        // iterates over form's fields
        while (iter.hasNext()) {
            FileItem item = (FileItem) iter.next();
            // processes only fields that are not form fields
            if (!item.isFormField()) {
                String fileName = new File(item.getName()).getName();
                String filePath = uploadPath + File.separator + fileName;
                File storeFile = new File(filePath);

                // saves the file on disk
                item.write(storeFile);

                // Read the file object contents and parse it and store it in the repos
                FileInputStream fstream = new FileInputStream(storeFile);
                DataInputStream in = new DataInputStream(fstream);
                BufferedReader br = new BufferedReader(new InputStreamReader(in));
                String strLine;

                //Read File Line By Line
                while ((strLine = br.readLine()) != null) {
                    System.out.print("Processing line - " + strLine);

                    String[] lineColumns = strLine.split(COMMA);

                    if (lineColumns.length < 8) {
                        br.close();
                        in.close();
                        fstream.close();
                        throw new Exception("The required columns are missing in the line - " + strLine);
                    }

                    // Format is - CompID,CompName,Branch,Address,From,To,LicenseKey,FinStartMonth
                    String companyId = lineColumns[0];
                    String companyName = lineColumns[1];
                    String branchName = lineColumns[2];
                    String address = lineColumns[3];
                    String fromDateStr = lineColumns[4];
                    String toDateStr = lineColumns[5];
                    String licenseKey = lineColumns[6];
                    String finStartMonthStr = lineColumns[7];

                    Date fromDate = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss", Locale.ENGLISH)
                            .parse(fromDateStr);
                    Date toDate = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss", Locale.ENGLISH).parse(toDateStr);
                    address = address.replace(";", ",");

                    List<Company> eComp = CompanyFactory.findByName(companyName);
                    if (eComp == null || eComp.isEmpty()) {
                        Company company = new Company();
                        company.setCompanyId(companyId);
                        company.setName(companyName);
                        company.setFystart(Integer.parseInt(finStartMonthStr));

                        Branch branch = new Branch();
                        branch.setAddress(address);
                        branch.setCompanyId(company);
                        branch.setName(branchName);

                        Licenses license = new Licenses();
                        license.setActive(1);
                        license.setCompanyId(company);
                        license.setFromdate(fromDate);
                        license.setTodate(toDate);
                        license.formLicenseKey();

                        System.out.println("License key formed - " + license.getLicensekey());
                        System.out.println("License key given - " + licenseKey);
                        if (license.getLicensekey().equalsIgnoreCase(licenseKey)) {
                            CompanyFactory.insert(company);
                            BranchFactory.insert(branch);
                            LicenseFactory.insert(license);
                        } else {
                            br.close();
                            in.close();
                            fstream.close();

                            throw new Exception("License is tampared. Contact Support.");
                        }
                    } else {
                        // Company is present, so update it.
                        Company company = eComp.get(0);
                        List<Licenses> licenses = LicenseFactory.findByCompanyId(company.getId());

                        Licenses newLicense = new Licenses();
                        newLicense.setActive(1);
                        newLicense.setCompanyId(company);
                        newLicense.setFromdate(fromDate);
                        newLicense.setTodate(toDate);
                        newLicense.formLicenseKey();

                        System.out.println("License key formed - " + newLicense.getLicensekey());
                        System.out.println("License key given - " + licenseKey);

                        if (newLicense.getLicensekey().equalsIgnoreCase(licenseKey)) {
                            for (Licenses lic : licenses) {
                                if (lic.getActive().compareTo(1) == 0) {
                                    lic.setActive(0);
                                    LicenseFactory.update(lic);
                                }
                            }

                            LicenseFactory.insert(newLicense);
                        } else {
                            br.close();
                            in.close();
                            fstream.close();

                            throw new Exception("License is tampared. Contact Support.");
                        }
                    }
                }

                //Close the input stream
                br.close();
                in.close();
                fstream.close();
            }
        }
        System.out.println("Upload has been done successfully!");
    } catch (Exception ex) {
        System.out.println("There was an error: " + ex.getMessage());
        ex.printStackTrace();
    }

    return map.findForward("CompLicHome");
}

From source file:edu.msu.cme.rdp.readseq.readers.core.SFFCore.java

public ReadBlock readReadBlock() throws IOException {
    try {//from www . j  a va2  s .  c o m
        DataInput seqFile = super.getDataInput();

        ReadBlock ret = new ReadBlock();

        /*
         * READ BLOCK HEADER
         */

        ret.headerLength = seqFile.readShort();
        ret.nameLength = seqFile.readShort();

        int tmp = (ret.headerLength << 16) | ret.nameLength;
        if (tmp == mftMagicNumber) { //We ended up in the index...certainly possible
            return null;
        }

        ret.numBases = seqFile.readInt();
        ret.clipQualLeft = seqFile.readUnsignedShort();
        ret.clipQualRight = seqFile.readUnsignedShort();
        ret.clipAdapterLeft = seqFile.readUnsignedShort();
        ret.clipAdapterRight = seqFile.readUnsignedShort();

        byte[] readName = new byte[ret.nameLength];
        super.read(readName);

        int dataOffset = ret.headerLength - (ret.nameLength + READ_BLOCK_STATIC_SIZE);
        if (dataOffset < 0) {
            throw new IOException("Illegal ReadBlock header length (" + ret.headerLength
                    + "), it would have me seek back in to the readblock");
        }

        seqFile.skipBytes(dataOffset);

        /*
         * READ BLOCK DATA
         */

        byte[] flowgramIndex = new byte[ret.numBases];
        byte[] bases = new byte[ret.numBases];
        byte[] quality = new byte[ret.numBases];

        byte[] homopolymerStretchEstimates = new byte[(commonHeader.flowLength) * 2];

        super.read(homopolymerStretchEstimates);
        super.read(flowgramIndex);
        super.read(bases);
        super.read(quality);

        DataInputStream flowgramStream = new DataInputStream(
                new ByteArrayInputStream(homopolymerStretchEstimates));

        short[] flowgrams = new short[commonHeader.flowLength];
        for (int index = 0; index < commonHeader.flowLength; index++) {
            flowgrams[index] = flowgramStream.readShort();
        }
        flowgramStream.close();

        ret.name = new String(readName);
        ret.flowgrams = flowgrams;
        ret.flowIndex = flowgramIndex;
        ret.seq = new String(bases);
        ret.qual = quality;
        int bytesRead = homopolymerStretchEstimates.length + flowgramIndex.length + bases.length
                + quality.length;

        alignToBoundary(bytesRead);

        return ret;
    } catch (EOFException e) {
        return null;
    }
}

From source file:com.datatorrent.contrib.hdht.HDHTWalManager.java

/**
 * Copy old WAL files to current location from startPosition to End Position in old WAL.
 * @param startPosition/*w w  w . java  2  s. c  o  m*/
 * @param endPosition
 * @param oldWalKey
 */
public void copyWALFiles(WalPosition startPosition, WalPosition endPosition, long oldWalKey) {
    try {
        for (long i = startPosition.fileId; i < endPosition.fileId; i++) {
            if (bfs.exists(oldWalKey, WAL_FILE_PREFIX + i)) {
                DataInputStream in = bfs.getInputStream(oldWalKey, WAL_FILE_PREFIX + i);
                DataOutputStream out = bfs.getOutputStream(walKey, WAL_FILE_PREFIX + walFileId);

                IOUtils.copyLarge(in, out);
                in.close();
                out.close();
                walFileId++;
            }
        }
        // Copy last file upto end position offset
        copyWalPart(startPosition, endPosition, oldWalKey);
        if (maxWalFileSize > 0 && walSize > maxWalFileSize) {
            writer.close();
            writer = null;
            walFileId++;
            walSize = 0;
        }
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}

From source file:edu.cornell.med.icb.goby.alignments.perms.PermutationReader.java

private void makeIndex(FastBufferedInputStream inputStream) throws IOException {
    input.position(0);/*from   w ww . j a  va  2  s.  c  o  m*/
    final ObjectArrayList<Block> blocks = new ObjectArrayList<Block>();

    final DataInputStream dataInput = new DataInputStream(
            new FastBufferedInputStream(new FileInputStream(basename + ".perm")));
    try {
        long offset = 0;

        while (dataInput.available() > 0) {

            final Block block = new Block();
            block.offset = offset;
            block.n = dataInput.readInt();
            block.firstSmallIndex = dataInput.readInt();
            dataInput.skip(block.n * 4L);
            blocks.add(block);
            offset += block.n * 4L + 8L;
        }
        Collections.sort(blocks, SMALL_INDEX_COMPARATOR);
        indexBlocks = blocks.toArray(new Block[blocks.size()]);
    } finally {
        dataInput.close();
    }
}

From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer.java

private void changeLayoutVersion(File src, File dest, int newVersion) throws IOException {
    DataInputStream in = null;
    DataOutputStream out = null;/*  w  w  w  .j av a 2s.  c  o m*/

    try {
        in = new DataInputStream(new FileInputStream(src));
        out = new DataOutputStream(new FileOutputStream(dest));

        in.readInt();
        out.writeInt(newVersion);

        byte[] b = new byte[1024];
        while (in.read(b) > 0) {
            out.write(b);
        }
    } finally {
        if (in != null)
            in.close();
        if (out != null)
            out.close();
    }
}

From source file:org.apache.hadoop.hive.ql.io.TestRCFile.java

public void testRCFileHeader(char[] expected, Configuration conf) throws IOException, SerDeException {
    writeTest(fs, 10000, file, bytesArray, conf);
    DataInputStream di = fs.open(file, 10000);
    byte[] bytes = new byte[3];
    di.read(bytes);//from   w w w. j  a v a2 s .c  o  m
    for (int i = 0; i < expected.length; i++) {
        assertTrue("Headers did not match", bytes[i] == expected[i]);
    }
    di.close();
}

From source file:ReadWriteStreams.java

public void readStream() {
    try {//ww  w. j  av  a 2s .co  m
        // Careful: Make sure this is big enough!
        // Better yet, test and reallocate if necessary      
        byte[] recData = new byte[50];

        // Read from the specified byte array
        ByteArrayInputStream strmBytes = new ByteArrayInputStream(recData);

        // Read Java data types from the above byte array
        DataInputStream strmDataType = new DataInputStream(strmBytes);

        for (int i = 1; i <= rs.getNumRecords(); i++) {
            // Get data into the byte array
            rs.getRecord(i, recData, 0);

            // Read back the data types      
            System.out.println("Record #" + i);
            System.out.println("UTF: " + strmDataType.readUTF());
            System.out.println("Boolean: " + strmDataType.readBoolean());
            System.out.println("Int: " + strmDataType.readInt());
            System.out.println("--------------------");

            // Reset so read starts at beginning of array 
            strmBytes.reset();
        }

        strmBytes.close();
        strmDataType.close();

    } catch (Exception e) {
        db(e.toString());
    }
}

From source file:com.phonegap.FileTransfer.java

/**
 * Uploads the specified file to the server URL provided using an HTTP 
 * multipart request. /*from w  w  w .ja  va  2 s.c om*/
 * @param file      Full path of the file on the file system
 * @param server        URL of the server to receive the file
 * @param fileKey       Name of file request parameter
 * @param fileName      File name to be used on server
 * @param mimeType      Describes file content type
 * @param params        key:value pairs of user-defined parameters
 * @return FileUploadResult containing result of upload request
 */
public FileUploadResult upload(String file, String server, final String fileKey, final String fileName,
        final String mimeType, JSONObject params, boolean trustEveryone) throws IOException, SSLException {
    // Create return object
    FileUploadResult result = new FileUploadResult();

    // Get a input stream of the file on the phone
    InputStream fileInputStream = getPathFromUri(file);

    HttpURLConnection conn = null;
    DataOutputStream dos = null;

    int bytesRead, bytesAvailable, bufferSize;
    long totalBytes;
    byte[] buffer;
    int maxBufferSize = 8096;

    //------------------ CLIENT REQUEST
    // open a URL connection to the server 
    URL url = new URL(server);

    // Open a HTTP connection to the URL based on protocol 
    if (url.getProtocol().toLowerCase().equals("https")) {
        // Using standard HTTPS connection. Will not allow self signed certificate
        if (!trustEveryone) {
            conn = (HttpsURLConnection) url.openConnection();
        }
        // Use our HTTPS connection that blindly trusts everyone.
        // This should only be used in debug environments
        else {
            // Setup the HTTPS connection class to trust everyone
            trustAllHosts();
            HttpsURLConnection https = (HttpsURLConnection) url.openConnection();
            // Save the current hostnameVerifier
            defaultHostnameVerifier = https.getHostnameVerifier();
            // Setup the connection not to verify hostnames 
            https.setHostnameVerifier(DO_NOT_VERIFY);
            conn = https;
        }
    }
    // Return a standard HTTP conneciton
    else {
        conn = (HttpURLConnection) url.openConnection();
    }

    // Allow Inputs
    conn.setDoInput(true);

    // Allow Outputs
    conn.setDoOutput(true);

    // Don't use a cached copy.
    conn.setUseCaches(false);

    // Use a post method.
    conn.setRequestMethod("POST");
    conn.setRequestProperty("Connection", "Keep-Alive");
    conn.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + BOUNDRY);

    // Set the cookies on the response
    String cookie = CookieManager.getInstance().getCookie(server);
    if (cookie != null) {
        conn.setRequestProperty("Cookie", cookie);
    }

    dos = new DataOutputStream(conn.getOutputStream());

    // Send any extra parameters
    try {
        for (Iterator iter = params.keys(); iter.hasNext();) {
            Object key = iter.next();
            dos.writeBytes(LINE_START + BOUNDRY + LINE_END);
            dos.writeBytes("Content-Disposition: form-data; name=\"" + key.toString() + "\"; ");
            dos.writeBytes(LINE_END + LINE_END);
            dos.writeBytes(params.getString(key.toString()));
            dos.writeBytes(LINE_END);
        }
    } catch (JSONException e) {
        Log.e(LOG_TAG, e.getMessage(), e);
    }

    dos.writeBytes(LINE_START + BOUNDRY + LINE_END);
    dos.writeBytes("Content-Disposition: form-data; name=\"" + fileKey + "\";" + " filename=\"" + fileName
            + "\"" + LINE_END);
    dos.writeBytes("Content-Type: " + mimeType + LINE_END);
    dos.writeBytes(LINE_END);

    // create a buffer of maximum size
    bytesAvailable = fileInputStream.available();
    bufferSize = Math.min(bytesAvailable, maxBufferSize);
    buffer = new byte[bufferSize];

    // read file and write it into form...
    bytesRead = fileInputStream.read(buffer, 0, bufferSize);
    totalBytes = 0;

    while (bytesRead > 0) {
        totalBytes += bytesRead;
        result.setBytesSent(totalBytes);
        dos.write(buffer, 0, bufferSize);
        bytesAvailable = fileInputStream.available();
        bufferSize = Math.min(bytesAvailable, maxBufferSize);
        bytesRead = fileInputStream.read(buffer, 0, bufferSize);
    }

    // send multipart form data necesssary after file data...
    dos.writeBytes(LINE_END);
    dos.writeBytes(LINE_START + BOUNDRY + LINE_START + LINE_END);

    // close streams
    fileInputStream.close();
    dos.flush();
    dos.close();

    //------------------ read the SERVER RESPONSE
    StringBuffer responseString = new StringBuffer("");
    DataInputStream inStream = new DataInputStream(conn.getInputStream());
    String line;
    while ((line = inStream.readLine()) != null) {
        responseString.append(line);
    }
    Log.d(LOG_TAG, "got response from server");
    Log.d(LOG_TAG, responseString.toString());

    // send request and retrieve response
    result.setResponseCode(conn.getResponseCode());
    result.setResponse(responseString.toString());

    inStream.close();
    conn.disconnect();

    // Revert back to the proper verifier and socket factories
    if (trustEveryone && url.getProtocol().toLowerCase().equals("https")) {
        ((HttpsURLConnection) conn).setHostnameVerifier(defaultHostnameVerifier);
        HttpsURLConnection.setDefaultSSLSocketFactory(defaultSSLSocketFactory);
    }

    return result;
}

From source file:com.alphabetbloc.accessmrs.services.SyncAdapter.java

/**
 * Downloads a stream of Patient Table and Obs Table from OpenMRS, stores it
 * to temp file//  www .ja va2s .c  om
 * 
 * @param httpclient
 * 
 * @return the temporary file
 */
private File downloadObsStream(HttpClient client, SyncResult syncResult) {

    // No accurate download size on stream, so hack periodic update
    SyncManager.sLoopCount.set(10);
    SyncManager.sLoopProgress.set(0);
    mExecutor.schedule(new Runnable() {
        public void run() {
            // increase 1%/7s (i.e. slower than 1min timeout)
            SyncManager.sLoopProgress.getAndIncrement();
            if (SyncManager.sLoopProgress.get() < 9)
                mExecutor.schedule(this, 7000, TimeUnit.MILLISECONDS);
        }
    }, 1000, TimeUnit.MILLISECONDS);

    // Download File
    File tempFile = null;
    try {
        tempFile = File.createTempFile(".omrs", "-stream", mContext.getFilesDir());
        BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(tempFile));
        DataInputStream dis = NetworkUtils.getOdkStream(client, NetworkUtils.getPatientDownloadUrl());

        if (App.DEBUG)
            Log.v("SYNC BENCHMARK", "Download with buffer size=\n" + 8192);
        if (dis != null) {

            byte[] buffer = new byte[8192]; // increasing this from 4096 to
            // improve performance (testing)
            int count = 0;
            while ((count = dis.read(buffer)) > 0) {
                bos.write(buffer, 0, count);
            }

            bos.close();
            dis.close();
        }

    } catch (Exception e) {
        FileUtils.deleteFile(tempFile.getAbsolutePath());
        e.printStackTrace();
        ++syncResult.stats.numIoExceptions;
        return null;
    }

    return tempFile;
}

From source file:org.apache.hama.bsp.JobInProgress.java

public synchronized void initTasks() throws IOException {
    if (tasksInited) {
        return;//from   w w w.  j a va 2s  . c  o m
    }

    Path sysDir = new Path(this.master.getSystemDir());
    FileSystem fs = sysDir.getFileSystem(conf);
    if (jobSplit != null) {
        DataInputStream splitFile = fs.open(new Path(this.jobSplit));

        BSPJobClient.RawSplit[] splits;
        try {
            splits = BSPJobClient.readSplitFile(splitFile);
        } finally {
            splitFile.close();
        }
        LOG.debug("numBSPTasks: " + numBSPTasks + ", splits.length: " + splits.length);

        // adjust number of BSP tasks to actual number of splits
        this.tasks = new TaskInProgress[numBSPTasks];
        for (int i = 0; i < splits.length; i++) {
            tasks[i] = new TaskInProgress(getJobID(), this.jobFile.toString(), splits[i], this.conf, this, i);
        }

        for (int i = splits.length; i < numBSPTasks; i++) {
            tasks[i] = new TaskInProgress(getJobID(), this.jobFile.toString(), null, this.conf, this, i);
        }

    } else {
        this.tasks = new TaskInProgress[numBSPTasks];
        for (int i = 0; i < numBSPTasks; i++) {
            tasks[i] = new TaskInProgress(getJobID(), this.jobFile.toString(), null, this.conf, this, i);
        }
    }
    this.taskToGroomMap = new HashMap<Task, GroomServerStatus>(2 * tasks.length);

    this.taskCountInGroomMap = new HashMap<GroomServerStatus, Integer>();

    this.recoveryTasks = new HashSet<TaskInProgress>(2 * tasks.length);

    // Update job status
    this.status = new JobStatus(this.status.getJobID(), this.profile.getUser(), 0L, 0L, JobStatus.RUNNING,
            counters);

    // delete all nodes belonging to that job before start
    MasterSyncClient syncClient = master.getSyncClient();
    syncClient.registerJob(this.getJobID().toString());

    tasksInited = true;

    Class<?> taskAllocatorClass = conf.getClass(Constants.TASK_ALLOCATOR_CLASS,
            BestEffortDataLocalTaskAllocator.class, TaskAllocationStrategy.class);
    this.taskAllocationStrategy = (TaskAllocationStrategy) ReflectionUtils.newInstance(taskAllocatorClass,
            new Object[0]);

    if (conf.getBoolean(Constants.FAULT_TOLERANCE_FLAG, false)) {

        Class<?> ftClass = conf.getClass(Constants.FAULT_TOLERANCE_CLASS, AsyncRcvdMsgCheckpointImpl.class,
                BSPFaultTolerantService.class);
        if (ftClass != null) {
            try {
                faultToleranceService = ((BSPFaultTolerantService<?>) ReflectionUtils.newInstance(ftClass,
                        new Object[0])).constructMasterFaultTolerance(jobId, maxTaskAttempts, tasks, conf,
                                master.getSyncClient(), taskAllocationStrategy);
                LOG.info("Initialized fault tolerance service with " + ftClass.getCanonicalName());
            } catch (Exception e) {
                throw new IOException(e);
            }
        }
    }

    LOG.info("Job is initialized.");
}