Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:org.cloudata.core.tablet.ColumnValue.java

public ColumnValue copyColumnValue() {
    try {//from   w w w  . jav a 2 s.  c om
        ColumnValue columnValue = new ColumnValue();

        byte[] buf = new byte[columnValue.size()];

        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        DataOutputStream out = new DataOutputStream(bout);
        this.write(out);

        buf = bout.toByteArray();
        DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf));

        columnValue.readFields(in);

        out.close();
        in.close();
        return columnValue;
    } catch (Exception e) {
        LOG.error(e);
        return null;
    }
}

From source file:com.adaptris.security.StdOutput.java

/**
 * Split this encrypted payload into it's constituent parts.
 * // ww  w .ja  v  a 2s .  c o m
 * @see #readEncryptedMesage(byte[])
 * @throws IOException if we can't read the message
 * @throws Base64Exception if the message is not correctly encoded
 */
private void split() throws IOException {

    ByteArrayInputStream byteStream = new ByteArrayInputStream(Base64.decodeBase64(message));
    DataInputStream in = new DataInputStream(byteStream);
    setSessionVector(read(in));
    setSessionKey(read(in));
    setEncryptedData(read(in));
    setSignature(read(in));
    in.close();
    byteStream.close();

}

From source file:ZipExploder.java

/**
 * copy a single entry from the archive//from  w w w.  j av a2  s .  com
 * 
 * @param destDir
 * @param zf
 * @param ze
 * @throws IOException
 */
public void copyFileEntry(String destDir, ZipFile zf, ZipEntry ze) throws IOException {
    DataInputStream dis = new DataInputStream(new BufferedInputStream(zf.getInputStream(ze)));
    try {
        copyFileEntry(destDir, ze.isDirectory(), ze.getName(), dis);
    } finally {
        try {
            dis.close();
        } catch (IOException ioe) {
        }
    }
}

From source file:ImageLoaderMIDlet.java

public Image loadImage(String url) throws IOException {
    HttpConnection hpc = null;/*from w w w .j  a  v a  2 s  .  co  m*/
    DataInputStream dis = null;
    try {
        hpc = (HttpConnection) Connector.open(url);
        int length = (int) hpc.getLength();
        byte[] data = new byte[length];
        dis = new DataInputStream(hpc.openInputStream());
        dis.readFully(data);
        return Image.createImage(data, 0, data.length);
    } finally {
        if (hpc != null)
            hpc.close();
        if (dis != null)
            dis.close();
    }
}

From source file:net.urosk.reportEngine.ReportsServlet.java

public void handleRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    String reportDesign = request.getParameter("__report");
    String type = request.getParameter("__format");
    String outputFilename = request.getParameter("__filename");
    String attachment = request.getParameter("attachment");

    //check parameters
    StringBuffer msg = new StringBuffer();

    // checkers/*from  w  w w .j  av  a  2  s.  c o  m*/
    if (isEmpty(reportDesign)) {
        msg.append("<BR>__report can not be empty");
    }

    OutputType outputType = null;

    try {
        outputType = OutputType.valueOf(type.toUpperCase());
    } catch (Exception e) {
        msg.append("Undefined report __format: " + type + ". Set __format=" + OutputType.values());
    }

    // checkers
    if (isEmpty(outputFilename)) {
        msg.append("<BR>__filename can not be empty");
    }

    try {

        ServletOutputStream out = response.getOutputStream();
        ServletContext context = request.getSession().getServletContext();

        // output error
        if (StringUtils.isNotEmpty(msg.toString())) {
            out.print(msg.toString());
            return;
        }

        ReportDef def = new ReportDef();
        def.setDesignFileName(reportDesign);
        def.setOutputType(outputType);

        @SuppressWarnings("unchecked")
        Map<String, String[]> params = request.getParameterMap();
        Iterator<String> i = params.keySet().iterator();

        while (i.hasNext()) {
            String key = i.next();
            String value = params.get(key)[0];
            def.getParameters().put(key, value);
        }

        try {

            String createdFile = birtReportEngine.createReport(def);

            File file = new File(createdFile);

            String mimetype = context.getMimeType(file.getAbsolutePath());

            String inlineOrAttachment = (attachment != null) ? "attachment" : "inline";

            response.setContentType((mimetype != null) ? mimetype : "application/octet-stream");
            response.setContentLength((int) file.length());
            response.setHeader("Content-Disposition",
                    inlineOrAttachment + "; filename=\"" + outputFilename + "\"");

            DataInputStream in = new DataInputStream(new FileInputStream(file));

            byte[] bbuf = new byte[1024];
            int length;
            while ((in != null) && ((length = in.read(bbuf)) != -1)) {
                out.write(bbuf, 0, length);
            }

            in.close();

        } catch (Exception e) {

            logger.error(e, e);
            out.print(e.getMessage());
        } finally {
            out.flush();
            out.close();
        }

        logger.info("Free memory: " + (Runtime.getRuntime().freeMemory() / 1024L * 1024L));

    } catch (Exception e) {
        logger.error(e, e);

    } finally {

    }

}

From source file:com.linkedin.pinot.index.writer.FixedByteWidthRowColDataFileWriterTest.java

@Test
public void testMultiCol() throws Exception {

    File file = new File("test_single_col_writer.dat");
    file.delete();//from w w  w  .java2  s .  co  m
    int rows = 100;
    int cols = 2;
    int[] columnSizes = new int[] { 4, 4 };
    FixedByteSingleValueMultiColWriter writer = new FixedByteSingleValueMultiColWriter(file, rows, cols,
            columnSizes);
    int[][] data = new int[rows][cols];
    Random r = new Random();
    for (int i = 0; i < rows; i++) {
        for (int j = 0; j < cols; j++) {
            data[i][j] = r.nextInt();
            writer.setInt(i, j, data[i][j]);
        }
    }
    writer.close();
    DataInputStream dis = new DataInputStream(new FileInputStream(file));
    for (int i = 0; i < rows; i++) {
        for (int j = 0; j < cols; j++) {
            Assert.assertEquals(dis.readInt(), data[i][j]);
        }
    }
    dis.close();
    file.delete();
}

From source file:com.nemesis.admin.UploadServlet.java

/**
 * @param request//from w  w w . ja  v  a2  s  .  com
 * @param response
 * @throws javax.servlet.ServletException
 * @throws java.io.IOException
 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
 * response)
 *
 */
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    if (request.getParameter("getfile") != null && !request.getParameter("getfile").isEmpty()) {
        File file = getFile(request, request.getParameter("getfile"));
        if (file.exists()) {
            int bytes = 0;
            try (ServletOutputStream op = response.getOutputStream()) {
                response.setContentType(getMimeType(file));
                response.setContentLength((int) file.length());
                response.setHeader("Content-Disposition", "inline; filename=\"" + file.getName() + "\"");

                byte[] bbuf = new byte[1024];
                DataInputStream in = new DataInputStream(new FileInputStream(file));

                while ((in != null) && ((bytes = in.read(bbuf)) != -1)) {
                    op.write(bbuf, 0, bytes);
                }

                in.close();
                op.flush();
            }
        }
    } else if (request.getParameter("delfile") != null && !request.getParameter("delfile").isEmpty()) {
        File file = getFile(request, request.getParameter("delfile"));
        if (file.exists()) {
            file.delete(); // TODO:check and report success
        }
    } else if (request.getParameter("getthumb") != null && !request.getParameter("getthumb").isEmpty()) {
        File file = getFile(request, request.getParameter("getthumb"));
        if (file.exists()) {
            System.out.println(file.getAbsolutePath());
            String mimetype = getMimeType(file);
            if (mimetype.endsWith("png") || mimetype.endsWith("jpeg") || mimetype.endsWith("jpg")
                    || mimetype.endsWith("gif")) {
                BufferedImage im = ImageIO.read(file);
                if (im != null) {
                    int newWidth = 75;
                    if (request.getParameter("w") != null) {
                        try {
                            newWidth = Integer.parseInt(request.getParameter("w"));
                        } catch (Exception e) {
                            //Se mantiene el valor por defecto de 75
                        }
                    }

                    BufferedImage thumb = Scalr.resize(im, newWidth);
                    if (request.getParameter("h") != null) {
                        try {
                            thumb = Scalr.crop(thumb, newWidth, Integer.parseInt(request.getParameter("h")));
                        } catch (IllegalArgumentException | ImagingOpException e) {
                            //Se mantienen las proporciones.
                        }
                    }

                    ByteArrayOutputStream os = new ByteArrayOutputStream();
                    if (mimetype.endsWith("png")) {
                        ImageIO.write(thumb, "PNG", os);
                        response.setContentType("image/png");
                    } else if (mimetype.endsWith("jpeg")) {
                        ImageIO.write(thumb, "jpg", os);
                        response.setContentType("image/jpeg");
                    } else if (mimetype.endsWith("jpg")) {
                        ImageIO.write(thumb, "jpg", os);
                        response.setContentType("image/jpeg");
                    } else {
                        ImageIO.write(thumb, "GIF", os);
                        response.setContentType("image/gif");
                    }
                    try (ServletOutputStream srvos = response.getOutputStream()) {
                        response.setContentLength(os.size());
                        response.setHeader("Content-Disposition",
                                "inline; filename=\"" + file.getName() + "\"");
                        os.writeTo(srvos);
                        srvos.flush();
                    }
                }
            }
        } // TODO: check and report success
    } else {
        PrintWriter writer = response.getWriter();
        writer.write("call POST with multipart form data");
    }
}

From source file:com.acmeair.reporter.parser.ResultParser.java

protected <E> IndividualChartResults getData(String fileName) {
    IndividualChartResults results = new IndividualChartResults();
    try {/*  w  w  w  .  j a v a 2  s .c  o  m*/
        FileInputStream fstream = new FileInputStream(fileName);
        // Get the object of DataInputStream
        DataInputStream in = new DataInputStream(fstream);
        BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String strLine;

        while ((strLine = br.readLine()) != null) {
            processLine(results, strLine);
        }
        in.close();
    } catch (Exception e) {
        System.err.println("Error: " + e.getMessage());
    }

    addUp(results.getInputList());
    overallResults.setAllTimeList(results.getTimeList());
    return results;
}

From source file:mvm.rya.accumulo.pig.AccumuloStorage.java

@Override
public WritableComparable<?> getSplitComparable(InputSplit inputSplit) throws IOException {
    //cannot get access to the range directly
    AccumuloInputFormat.RangeInputSplit rangeInputSplit = (AccumuloInputFormat.RangeInputSplit) inputSplit;
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    rangeInputSplit.write(out);//from  w  ww  . j  a  v  a 2s .  c o  m
    out.close();
    DataInputStream stream = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
    Range range = new Range();
    range.readFields(stream);
    stream.close();
    return range;
}

From source file:com.acme.io.JsonLoader.java

/**
 * Get a schema for the data to be loaded.  
 * @param location Location as returned by 
 * {@link LoadFunc#relativeToAbsolutePath(String, org.apache.hadoop.fs.Path)}
 * @param job The {@link Job} object - this should be used only to obtain 
 * cluster properties through {@link Job#getConfiguration()} and not to
 * set/query any runtime job information.  
 * @return schema for the data to be loaded. This schema should represent
 * all tuples of the returned data.  If the schema is unknown or it is
 * not possible to return a schema that represents all returned data,
 * then null should be returned. The schema should not be affected by
 * pushProjection, ie. getSchema should always return the original schema
 * even after pushProjection//from  ww  w.j a v  a2 s.  co m
 * @throws IOException if an exception occurs while determining the schema
 */
public ResourceSchema getSchema(String location, Job job) throws IOException {
    // Open the schema file and read the schema
    // Get an HDFS handle.
    FileSystem fs = FileSystem.get(job.getConfiguration());
    DataInputStream in = fs.open(new Path(location + "/_schema"));
    String line = in.readLine();
    in.close();

    // Parse the schema
    ResourceSchema s = new ResourceSchema(Utils.getSchemaFromString(line));
    if (s == null) {
        throw new IOException("Unable to parse schema found in file " + location + "/_schema");
    }

    // Now that we have determined the schema, store it in our
    // UDFContext properties object so we have it when we need it on the
    // backend
    UDFContext udfc = UDFContext.getUDFContext();
    Properties p = udfc.getUDFProperties(this.getClass(), new String[] { udfcSignature });
    p.setProperty("pig.jsonloader.schema", line);

    return s;
}