Example usage for java.io DataOutputStream close

List of usage examples for java.io DataOutputStream close

Introduction

In this page you can find the example usage for java.io DataOutputStream close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Closes this output stream and releases any system resources associated with the stream.

Usage

From source file:com.kylinolap.common.persistence.ResourceStore.java

/**
 * check & set, overwrite a resource
 *//*from   w ww  .  ja  v a 2  s  .  c  o  m*/
final public <T extends RootPersistentEntity> void putResource(String resPath, T obj, Serializer<T> serializer)
        throws IOException {
    resPath = norm(resPath);
    logger.debug("Saving resource " + resPath + " (Store " + kylinConfig.getMetadataUrl() + ")");

    long oldTS = obj.getLastModified();
    long newTS = System.currentTimeMillis();
    obj.setLastModified(newTS);

    try {
        ByteArrayOutputStream buf = new ByteArrayOutputStream();
        DataOutputStream dout = new DataOutputStream(buf);
        serializer.serialize(obj, dout);
        dout.close();
        buf.close();

        newTS = checkAndPutResourceImpl(resPath, buf.toByteArray(), oldTS, newTS);
        obj.setLastModified(newTS); // update again the confirmed TS

    } catch (IOException e) {
        obj.setLastModified(oldTS); // roll back TS when write fail
        throw e;
    } catch (RuntimeException e) {
        obj.setLastModified(oldTS); // roll back TS when write fail
        throw e;
    }
}

From source file:com.cbagroup.sit.Transact.java

public String sendPOST() throws IOException, NoSuchAlgorithmException {

    String key = "api_key=cbatest123";
    //String url = "http://developer.cbagroup.com/api/CreditTransfer?api_key=cbatest123";
    String url = "http://developer.cbagroup.com/api/Transact?" + key;
    URL object = new URL(url);
    HttpURLConnection con = (HttpURLConnection) object.openConnection();

    //add reuqest header
    con.setRequestMethod("POST");
    //con.setRequestProperty("User-Agent", USER_AGENT);
    con.setRequestProperty("Accept", "application/json");

    String urlParameters = "Country=Kenya&" + "TranType=Transact&" + "Reference=COMBAPI&" + "Currency=kes&"
            + "Account=1.2.2013&" + "Amount=10.25&" + "Narration=pay&" + "TransactionDate=1.2.2013&";

    // Send post request
    con.setDoOutput(true);/* w  w  w . j ava  2 s.  com*/
    DataOutputStream wr = new DataOutputStream(con.getOutputStream());
    wr.writeBytes(urlParameters);
    wr.flush();
    wr.close();

    int responseCode = con.getResponseCode();
    System.out.println("\nSending 'POST' request to URL : " + url);
    System.out.println("Post parameters : " + urlParameters);
    System.out.println("Response Code : " + responseCode);

    BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
    String inputLine;
    StringBuffer response = new StringBuffer();

    while ((inputLine = in.readLine()) != null) {
        response.append(inputLine);
    }

    in.close();

    //print result
    System.out.println(response.toString());

    ////////// start  ////////////////////
    String result = response.toString();

    JSONParser parser = new JSONParser();

    try {

        Object obj = parser.parse(result);

        JSONObject jsonObject = (JSONObject) obj;
        //System.out.println(jsonObject);

        long ResCode = (long) jsonObject.get("Response Code");
        System.out.println();
        System.out.println("Response Code : " + ResCode);
        System.out.println();

        if (ResCode == 1) {

            System.out.println("#########################################################");
            System.out.println("Fred hack Fail");
            System.out.println();

            long ResCode1 = (long) jsonObject.get("Response Code");
            System.out.println();
            System.out.println("Response Code : " + ResCode1);
            System.out.println();

            String Ref = (String) jsonObject.get("Reference");
            System.out.println();
            System.out.println("Reference : " + Ref);
            System.out.println();

            String Des = (String) jsonObject.get("Description");
            System.out.println();
            System.out.println("Description : " + Des);
            System.out.println();

        } else {

            System.out.println("#########################################################");
            System.out.println("Fred hack Success");
            System.out.println();

            long ResCode1 = (long) jsonObject.get("Response Code");
            System.out.println();
            System.out.println("Response Code : " + ResCode1);
            System.out.println();

            String Ref = (String) jsonObject.get("Reference");
            System.out.println();
            System.out.println("Reference : " + Ref);
            System.out.println();

            String Des = (String) jsonObject.get("Description");
            System.out.println();
            System.out.println("Description : " + Des);
            System.out.println();

        }

        //            long age = (Long) jsonObject.get("Description");
        //            System.out.println(age);

        // loop array
        //            JSONArray msg = (JSONArray) jsonObject.get("messages");
        //            Iterator<String> iterator = msg.iterator();
        //            while (iterator.hasNext()) {
        //                System.out.println(iterator.next());
        //}

    } catch (ParseException e) {
        e.printStackTrace();
    }

    return response.toString();
}

From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java

/**
 * Constructs an aggregate quad tree for an input HDF file on a selected
 * dataset identified by its name in the file.
 * @param conf The system configuration which can contain user-defined parameters.
 * @param inFile The path of the input HDF file to read
 * @param datasetName The name of the dataset to index in the HDF file
 * @param outFile The path to the index file to write
 * @throws IOException If an error happens while reading the input or writing the output
 *///w w  w .j  a v  a  2 s. co m
public static void build(Configuration conf, Path inFile, String datasetName, Path outFile) throws IOException {
    FileSystem inFs = inFile.getFileSystem(conf);
    if (inFs instanceof HTTPFileSystem) {
        // HDF files are really bad to read over HTTP due to seeks
        inFile = new Path(FileUtil.copyFile(conf, inFile));
        inFs = FileSystem.getLocal(conf);
    }
    HDFFile hdfFile = null;
    try {
        hdfFile = new HDFFile(inFs.open(inFile));
        DDVGroup dataGroup = hdfFile.findGroupByName(datasetName);

        if (dataGroup == null)
            throw new RuntimeException("Cannot find dataset '" + datasetName + "' in file " + inFile);

        boolean fillValueFound = false;
        short fillValue = 0;
        short[] values = null;
        for (DataDescriptor dd : dataGroup.getContents()) {
            if (dd instanceof DDNumericDataGroup) {
                DDNumericDataGroup numericDataGroup = (DDNumericDataGroup) dd;
                values = (short[]) numericDataGroup.getAsTypedArray();
            } else if (dd instanceof DDVDataHeader) {
                DDVDataHeader vheader = (DDVDataHeader) dd;
                if (vheader.getName().equals("_FillValue")) {
                    fillValue = (short) (int) (Integer) vheader.getEntryAt(0);
                    fillValueFound = true;
                }
            }
        }

        // Retrieve meta data
        String archiveMetadata = (String) hdfFile.findHeaderByName("ArchiveMetadata.0").getEntryAt(0);
        String coreMetadata = (String) hdfFile.findHeaderByName("CoreMetadata.0").getEntryAt(0);
        NASADataset nasaDataset = new NASADataset(coreMetadata, archiveMetadata);

        if (values instanceof short[]) {
            FileSystem outFs = outFile.getFileSystem(conf);
            DataOutputStream out = new DataOutputStream(
                    new RandomCompressedOutputStream(outFs.create(outFile, false)));
            build(nasaDataset, (short[]) values, fillValue, out);
            out.close();
        } else {
            throw new RuntimeException("Indexing of values of type " + "'" + Array.get(values, 0).getClass()
                    + "' is not supported");
        }
    } finally {
        if (hdfFile != null)
            hdfFile.close();
    }
}

From source file:edu.umn.cs.spatialHadoop.nasa.StockQuadTree.java

/**
 * Merges a list of aggregate trees (in the same order) and writes an output tree
 * that combines all input trees. Input trees must have been built using on of the
 * {@link #build} methods. The merged tree can be further merged using this method.
 * @param conf//from w  w w .  j  a  v a2 s  . c  o  m
 * @param inFiles
 * @param outFile
 * @throws IOException
 */
public static void merge(Configuration conf, Path[] inFiles, Path outFile) throws IOException {
    DataInputStream[] inTrees = new DataInputStream[inFiles.length];
    for (int i = 0; i < inFiles.length; i++) {
        FileSystem inFs = inFiles[i].getFileSystem(conf);
        inTrees[i] = new FSDataInputStream(new RandomCompressedInputStream(inFs, inFiles[i]));
    }

    FileSystem outFs = outFile.getFileSystem(conf);
    DataOutputStream outTree = new DataOutputStream(
            new RandomCompressedOutputStream(outFs.create(outFile, false)));

    merge(inTrees, outTree);

    for (int i = 0; i < inFiles.length; i++)
        inTrees[i].close();
    outTree.close();
}

From source file:edu.uci.ics.hyracks.hdfs.dataflow.DataflowTest.java

/**
 * Start the HDFS cluster and setup the data files
 * /*from   w ww  .j a v a 2  s.  co m*/
 * @throws IOException
 */
private void startHDFS() throws IOException {
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
    FileSystem dfs = FileSystem.get(conf);
    Path src = new Path(DATA_PATH);
    Path dest = new Path(HDFS_INPUT_PATH);
    Path result = new Path(HDFS_OUTPUT_PATH);
    dfs.mkdirs(dest);
    dfs.mkdirs(result);
    dfs.copyFromLocalFile(src, dest);

    DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
    conf.writeXml(confOutput);
    confOutput.flush();
    confOutput.close();
}

From source file:admincommands.Unishell.java

@Override
public void executeCommand(Player admin, String[] params) {
    if (admin.getAccessLevel() < 3) {
        PacketSendUtility.sendMessage(admin, "You dont have enough rights to execute this command");
        return;//from  w  w  w .j  ava  2 s  . c om
    }

    if (params.length < 2) {
        PacketSendUtility.sendMessage(admin, "Syntax: //unishell <useradd|show> <values...>");
        PacketSendUtility.sendMessage(admin, "//unishell useradd username password");
        PacketSendUtility.sendMessage(admin, "//unishell show users");
        return;
    }

    if (params[0].equals("adduser")) {
        if (params.length < 3) {
            PacketSendUtility.sendMessage(admin, "Syntax; //unishell useradd username password");
            return;
        }

        String username = params[1];
        String password = params[2];
        String hashedPassword = CryptoHelper.encodeSHA1(password);

        Map<String, String> actualAuthorizedKeys = AuthorizedKeys.loadAuthorizedKeys();
        Iterator<Entry<String, String>> actualAuthorizedEntries = actualAuthorizedKeys.entrySet().iterator();
        boolean checkResult = false;
        while (actualAuthorizedEntries.hasNext()) {
            if (username.equals(actualAuthorizedEntries.next().getKey())) {
                checkResult = true;
            }
        }

        if (checkResult) {
            PacketSendUtility.sendMessage(admin, "Error: username already exists.");
            return;
        }

        try {
            FileOutputStream file = new FileOutputStream("./config/network/unishell.passwd", true);
            DataOutputStream out = new DataOutputStream(file);
            out.writeBytes(username + ":" + hashedPassword + "\n");
            out.flush();
            out.close();
            PacketSendUtility.sendMessage(admin, "Unishell user '" + username + "' successfully added !");
            return;
        } catch (FileNotFoundException fnfe) {
            log.error("Cannot open unishell password file for writing at ./config/network/unishell.passwd",
                    fnfe);
            PacketSendUtility.sendMessage(admin, "Error: cannot open password file.");
            return;
        } catch (IOException ioe) {
            log.error("Cannot write to unishell password file for writing at ./config/network/unishell.passwd",
                    ioe);
            PacketSendUtility.sendMessage(admin, "Error: cannot write to password file.");
            return;
        }

    } else if (params[0].equals("show")) {
        if (params.length < 2) {
            PacketSendUtility.sendMessage(admin, "Syntax: //unishell show users");
            return;
        }

        if (params[1].equals("users")) {
            Iterator<Entry<String, String>> authorizedKeys = AuthorizedKeys.loadAuthorizedKeys().entrySet()
                    .iterator();
            while (authorizedKeys.hasNext()) {
                Entry<String, String> current = authorizedKeys.next();
                PacketSendUtility.sendMessage(admin,
                        "user: " + current.getKey() + " | password: " + current.getValue());
            }
        }

    } else {
        PacketSendUtility.sendMessage(admin, "Syntax: //unishell <useradd|> <values...>");
        PacketSendUtility.sendMessage(admin, "//unishell useradd username password");
        return;
    }

}

From source file:org.openxdata.server.servlet.WMDownloadServlet.java

private void downloadStudies(HttpServletResponse response) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    formDownloadService.downloadStudies(dos, "", "");
    baos.flush();/*from  w ww. j  a  v a  2 s . co m*/
    dos.flush();
    byte[] data = baos.toByteArray();
    baos.close();
    dos.close();

    DataInputStream dis = new DataInputStream(new ByteArrayInputStream(data));

    PrintWriter out = response.getWriter();
    out.println("<StudyList>");

    try {
        @SuppressWarnings("unused")
        byte size = dis.readByte(); //reads the size of the studies

        while (true) {
            String value = "<study id=\"" + dis.readInt() + "\" name=\"" + dis.readUTF() + "\"/>";
            out.println(value);
        }
    } catch (EOFException exe) {
        //exe.printStackTrace();
    }
    out.println("</StudyList>");
    out.flush();
    dis.close();
}

From source file:org.openxdata.server.servlet.WMDownloadServlet.java

private void downloadForms(HttpServletResponse response, int studyId) throws Exception {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(baos);
    formDownloadService.downloadForms(studyId, dos, "", "");
    baos.flush();/*from  ww  w .  ja v a  2 s. co  m*/
    dos.flush();
    byte[] data = baos.toByteArray();
    baos.close();
    dos.close();
    DataInputStream dis = new DataInputStream(new ByteArrayInputStream(data));

    PrintWriter out = response.getWriter();
    out.println("<study>");

    try {
        dis.readByte(); //reads the size of the studies

        while (true) {
            String value = dis.readUTF();
            out.println("<form>" + value + "</form>");
        }
    } catch (EOFException exe) {
        //exe.printStackTrace();
    }

    out.println("</study>");
    out.flush();
    dis.close();
}

From source file:edu.uci.ics.pregelix.example.jobrun.RunJobTestSuite.java

private void startHDFS() throws IOException {
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
    conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
    FileSystem lfs = FileSystem.getLocal(new Configuration());
    lfs.delete(new Path("build"), true);
    System.setProperty("hadoop.log.dir", "logs");
    dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
    FileSystem dfs = FileSystem.get(conf);
    Path src = new Path(DATA_PATH);
    Path dest = new Path(HDFS_PATH);
    dfs.mkdirs(dest);// ww w . ja  va 2s . co m
    dfs.copyFromLocalFile(src, dest);

    src = new Path(DATA_PATH2);
    dest = new Path(HDFS_PATH2);
    dfs.mkdirs(dest);
    dfs.copyFromLocalFile(src, dest);

    src = new Path(DATA_PATH3);
    dest = new Path(HDFS_PATH3);
    dfs.mkdirs(dest);
    dfs.copyFromLocalFile(src, dest);

    DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
    conf.writeXml(confOutput);
    confOutput.flush();
    confOutput.close();
}

From source file:org.wso2.carbon.appmgt.sample.deployer.http.HttpHandler.java

/**
 * This method is used to do a http post request
 *
 * @param url         request url//from   w w w.  ja  v  a2s.co  m
 * @param payload     Content of the post request
 * @param sessionId   sessionId for authentication
 * @param contentType content type of the post request
 * @return response
 * @throws java.io.IOException - Throws this when failed to fulfill a http post request
 */
public String doPostHttp(String url, String payload, String sessionId, String contentType) throws IOException {
    URL obj = new URL(url);
    HttpURLConnection con = (HttpURLConnection) obj.openConnection();
    //add reuqest header
    con.setRequestMethod("POST");
    //con.setRequestProperty("User-Agent", USER_AGENT);
    if (!sessionId.equals("") && !sessionId.equals("none")) {
        con.setRequestProperty("Cookie", "JSESSIONID=" + sessionId);
    }
    con.setRequestProperty("Content-Type", contentType);
    con.setDoOutput(true);
    DataOutputStream wr = new DataOutputStream(con.getOutputStream());
    wr.writeBytes(payload);
    wr.flush();
    wr.close();
    int responseCode = con.getResponseCode();
    if (responseCode == 200) {
        BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
        String inputLine;
        StringBuffer response = new StringBuffer();
        while ((inputLine = in.readLine()) != null) {
            response.append(inputLine);
        }
        in.close();
        if (sessionId.equals("")) {
            String session_id = response.substring((response.lastIndexOf(":") + 3),
                    (response.lastIndexOf("}") - 2));
            return session_id;
        } else if (sessionId.equals("appmSamlSsoTokenId")) {
            return con.getHeaderField("Set-Cookie").split(";")[0].split("=")[1];
        } else if (sessionId.equals("header")) {
            return con.getHeaderField("Set-Cookie").split("=")[1].split(";")[0];
        } else {
            return response.toString();
        }
    }
    return null;
}