Example usage for java.io DataOutputStream write

List of usage examples for java.io DataOutputStream write

Introduction

In this page you can find the example usage for java.io DataOutputStream write.

Prototype

public synchronized void write(int b) throws IOException 

Source Link

Document

Writes the specified byte (the low eight bits of the argument b) to the underlying output stream.

Usage

From source file:org.apache.fontbox.ttf.TTFSubFont.java

private byte[] buildNameTable() throws IOException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(bos);

    LOG.debug("Building table [name]...");

    NamingTable n = this.baseTTF.getNaming();
    List<NameRecord> nameRecords = null;
    if (n != null) {
        nameRecords = n.getNameRecords();
    } else {/*  w w w.  j  a v a2 s  .c  o m*/
        // sometimes there is no naming table in an embedded subfonts
        // create some dummies
        nameRecords = new ArrayList<NameRecord>();
        NameRecord nr = new NameRecord();
        nr.setPlatformId(NameRecord.PLATFORM_WINDOWS);
        nr.setPlatformEncodingId(NameRecord.PLATFORM_ENCODING_WINDOWS_UNICODE);
        nr.setLanguageId(0);
        nr.setNameId(NameRecord.NAME_FONT_FAMILY_NAME);
        nr.setString("PDFBox-Dummy-Familyname");
        nameRecords.add(nr);
        nr = new NameRecord();
        nr.setPlatformId(NameRecord.PLATFORM_WINDOWS);
        nr.setPlatformEncodingId(NameRecord.PLATFORM_ENCODING_WINDOWS_UNICODE);
        nr.setLanguageId(0);
        nr.setNameId(NameRecord.NAME_FULL_FONT_NAME);
        nr.setString("PDFBox-Dummy-Fullname");
        nameRecords.add(nr);
    }
    int numberOfRecords = nameRecords.size();
    int nrep = 0;
    for (int i = 0; i < numberOfRecords; ++i) {
        NameRecord nr = nameRecords.get(i);
        if (replicateNameRecord(nr)) {
            LOG.debug("Writing name record [" + nr.getNameId() + "], [" + nr.getString() + "],");
            ++nrep;
        }
    }
    writeUint16(dos, 0);
    writeUint16(dos, nrep);
    writeUint16(dos, 2 * 3 + (2 * 6) * nrep);

    byte[][] names = new byte[nrep][];
    int j = 0;
    for (int i = 0; i < numberOfRecords; ++i) {
        NameRecord nr = nameRecords.get(i);
        if (replicateNameRecord(nr)) {
            int platform = nr.getPlatformId();
            int encoding = nr.getPlatformEncodingId();
            String charset = "ISO-8859-1";
            if (platform == 3 && encoding == 1) {
                charset = "UTF-16BE";
            } else if (platform == 2) {
                if (encoding == 0) {
                    charset = "US-ASCII";
                } else if (encoding == 1) {
                    //not sure is this is correct??
                    charset = "UTF16-BE";
                } else if (encoding == 2) {
                    charset = "ISO-8859-1";
                }
            }
            String value = nr.getString();
            if (nr.getNameId() == 6 && this.nameSuffix != null) {
                value += this.nameSuffix;
            }
            names[j] = value.getBytes(charset);
            ++j;
        }
    }

    int offset = 0;
    j = 0;
    for (int i = 0; i < numberOfRecords; ++i) {
        NameRecord nr = nameRecords.get(i);
        if (replicateNameRecord(nr)) {
            writeUint16(dos, nr.getPlatformId());
            writeUint16(dos, nr.getPlatformEncodingId());
            writeUint16(dos, nr.getLanguageId());
            writeUint16(dos, nr.getNameId());
            writeUint16(dos, names[j].length);
            writeUint16(dos, offset);
            offset += names[j].length;
            ++j;
        }
    }

    for (int i = 0; i < nrep; ++i) {
        dos.write(names[i]);
    }
    dos.flush();
    LOG.debug("Finished table [name].");
    return bos.toByteArray();
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

private int showConf(Hive db, ShowConfDesc showConf) throws Exception {
    ConfVars conf = HiveConf.getConfVars(showConf.getConfName());
    if (conf == null) {
        throw new HiveException("invalid configuration name " + showConf.getConfName());
    }//from w w w.  j  a v a2s  .  c om
    String description = conf.getDescription();
    String defaultValue = conf.getDefaultValue();
    DataOutputStream output = getOutputStream(showConf.getResFile());
    try {
        if (defaultValue != null) {
            output.write(defaultValue.getBytes());
        }
        output.write(separator);
        output.write(conf.typeString().getBytes());
        output.write(separator);
        if (description != null) {
            output.write(description.replaceAll(" *\n *", " ").getBytes());
        }
        output.write(terminator);
    } finally {
        output.close();
    }
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

private int showTxns(Hive db, ShowTxnsDesc desc) throws HiveException {
    // Call the metastore to get the currently queued and running compactions.
    GetOpenTxnsInfoResponse rsp = db.showTransactions();

    // Write the results into the file
    DataOutputStream os = getOutputStream(desc.getResFile());
    try {/*from ww w . ja  va  2 s .  com*/
        // Write a header
        os.writeBytes("Transaction ID");
        os.write(separator);
        os.writeBytes("Transaction State");
        os.write(separator);
        os.writeBytes("Started Time");
        os.write(separator);
        os.writeBytes("Last Heartbeat Time");
        os.write(separator);
        os.writeBytes("User");
        os.write(separator);
        os.writeBytes("Hostname");
        os.write(terminator);

        for (TxnInfo txn : rsp.getOpen_txns()) {
            os.writeBytes(Long.toString(txn.getId()));
            os.write(separator);
            os.writeBytes(txn.getState().toString());
            os.write(separator);
            os.writeBytes(Long.toString(txn.getStartedTime()));
            os.write(separator);
            os.writeBytes(Long.toString(txn.getLastHeartbeatTime()));
            os.write(separator);
            os.writeBytes(txn.getUser());
            os.write(separator);
            os.writeBytes(txn.getHostname());
            os.write(terminator);
        }
    } catch (IOException e) {
        LOG.warn("show transactions: " + stringifyException(e));
        return 1;
    } finally {
        IOUtils.closeStream(os);
    }
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

private int showCompactions(Hive db, ShowCompactionsDesc desc) throws HiveException {
    // Call the metastore to get the status of all known compactions (completed get purged eventually)
    ShowCompactResponse rsp = db.showCompactions();

    // Write the results into the file
    final String noVal = " --- ";

    DataOutputStream os = getOutputStream(desc.getResFile());
    try {//from  ww w.j a  v a 2 s .co m
        // Write a header
        os.writeBytes("Database");
        os.write(separator);
        os.writeBytes("Table");
        os.write(separator);
        os.writeBytes("Partition");
        os.write(separator);
        os.writeBytes("Type");
        os.write(separator);
        os.writeBytes("State");
        os.write(separator);
        os.writeBytes("Worker");
        os.write(separator);
        os.writeBytes("Start Time");
        os.write(separator);
        os.writeBytes("Duration(ms)");
        os.write(separator);
        os.writeBytes("HadoopJobId");
        os.write(terminator);

        if (rsp.getCompacts() != null) {
            for (ShowCompactResponseElement e : rsp.getCompacts()) {
                os.writeBytes(e.getDbname());
                os.write(separator);
                os.writeBytes(e.getTablename());
                os.write(separator);
                String part = e.getPartitionname();
                os.writeBytes(part == null ? noVal : part);
                os.write(separator);
                os.writeBytes(e.getType().toString());
                os.write(separator);
                os.writeBytes(e.getState());
                os.write(separator);
                String wid = e.getWorkerid();
                os.writeBytes(wid == null ? noVal : wid);
                os.write(separator);
                os.writeBytes(e.isSetStart() ? Long.toString(e.getStart()) : noVal);
                os.write(separator);
                os.writeBytes(e.isSetEndTime() ? Long.toString(e.getEndTime() - e.getStart()) : noVal);
                os.write(separator);
                os.writeBytes(e.isSetHadoopJobId() ? e.getHadoopJobId() : noVal);
                os.write(terminator);
            }
        }
    } catch (IOException e) {
        LOG.warn("show compactions: " + stringifyException(e));
        return 1;
    } finally {
        IOUtils.closeStream(os);
    }
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

/**
 * Write a list of indexes to a file.//from  www .  j  a  va  2s . c om
 *
 * @param db
 *          The database in question.
 * @param showIndexes
 *          These are the indexes we're interested in.
 * @return Returns 0 when execution succeeds and above 0 if it fails.
 * @throws HiveException
 *           Throws this exception if an unexpected error occurs.
 */
private int showIndexes(Hive db, ShowIndexesDesc showIndexes) throws HiveException {
    // get the indexes for the table and populate the output
    String tableName = showIndexes.getTableName();
    Table tbl = null;
    List<Index> indexes = null;

    tbl = db.getTable(tableName);

    indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1);

    // In case the query is served by HiveServer2, don't pad it with spaces,
    // as HiveServer2 output is consumed by JDBC/ODBC clients.
    boolean isOutputPadded = !SessionState.get().isHiveServerQuery();

    // write the results in the file
    DataOutputStream outStream = getOutputStream(showIndexes.getResFile());
    try {
        if (showIndexes.isFormatted()) {
            // column headers
            outStream.write(MetaDataFormatUtils.getIndexColumnsHeader().getBytes(StandardCharsets.UTF_8));
            outStream.write(terminator);
            outStream.write(terminator);
        }

        for (Index index : indexes) {
            outStream.write(MetaDataFormatUtils.getIndexInformation(index, isOutputPadded)
                    .getBytes(StandardCharsets.UTF_8));
        }
    } catch (FileNotFoundException e) {
        LOG.info("show indexes: " + stringifyException(e));
        throw new HiveException(e.toString());
    } catch (IOException e) {
        LOG.info("show indexes: " + stringifyException(e));
        throw new HiveException(e.toString());
    } catch (Exception e) {
        throw new HiveException(e.toString());
    } finally {
        IOUtils.closeStream(outStream);
    }

    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

/**
 * Write a list of the user defined functions to a file.
 * @param db/*from w w  w.  ja v a  2 s  . c  o  m*/
 *
 * @param showFuncs
 *          are the functions we're interested in.
 * @return Returns 0 when execution succeeds and above 0 if it fails.
 * @throws HiveException
 *           Throws this exception if an unexpected error occurs.
 */
private int showFunctions(Hive db, ShowFunctionsDesc showFuncs) throws HiveException {
    // get the tables for the desired patten - populate the output stream
    Set<String> funcs = null;
    if (showFuncs.getPattern() != null) {
        LOG.info("pattern: " + showFuncs.getPattern());
        if (showFuncs.getIsLikePattern()) {
            funcs = FunctionRegistry.getFunctionNamesByLikePattern(showFuncs.getPattern());
        } else {
            console.printInfo("SHOW FUNCTIONS is deprecated, please use SHOW FUNCTIONS LIKE instead.");
            funcs = FunctionRegistry.getFunctionNames(showFuncs.getPattern());
        }
        LOG.info("results : " + funcs.size());
    } else {
        funcs = FunctionRegistry.getFunctionNames();
    }

    // write the results in the file
    DataOutputStream outStream = getOutputStream(showFuncs.getResFile());
    try {
        SortedSet<String> sortedFuncs = new TreeSet<String>(funcs);
        // To remove the primitive types
        sortedFuncs.removeAll(serdeConstants.PrimitiveTypes);
        Iterator<String> iterFuncs = sortedFuncs.iterator();

        while (iterFuncs.hasNext()) {
            // create a row per table name
            outStream.writeBytes(iterFuncs.next());
            outStream.write(terminator);
        }
    } catch (FileNotFoundException e) {
        LOG.warn("show function: " + stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.warn("show function: " + stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e);
    } finally {
        IOUtils.closeStream(outStream);
    }
    return 0;
}

From source file:org.apache.fontbox.ttf.TTFSubFont.java

private byte[] buildPostTable() throws IOException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(bos);
    LOG.debug("Building table [post]...");
    PostScriptTable p = this.baseTTF.getPostScript();
    if (p == null) {
        // sometimes there is no post table in an embedded subfonts
        // create a dummy
        p = new PostScriptTable();
    }/*from   w w  w . ja v  a  2  s  .com*/
    String[] glyphNames = p.getGlyphNames();
    /*
    Fixed    format    Format of this table
    Fixed    italicAngle    Italic angle in degrees
    FWord    underlinePosition    Underline position
    FWord    underlineThickness    Underline thickness
    uint32    isFixedPitch    Font is monospaced; set to 1 if the font is monospaced and 0 otherwise 
    (N.B., to maintain compatibility with older versions of the TrueType spec, accept any non-zero value
     as meaning that the font is monospaced)
    uint32    minMemType42    Minimum memory usage when a TrueType font is downloaded as a Type 42 font
    uint32    maxMemType42    Maximum memory usage when a TrueType font is downloaded as a Type 42 font
    uint32    minMemType1    Minimum memory usage when a TrueType font is downloaded as a Type 1 font
    uint32    maxMemType1    Maximum memory usage when a TrueType font is downloaded as a Type 1 font
    uint16    numberOfGlyphs    number of glyphs
    uint16    glyphNameIndex[numberOfGlyphs]    Ordinal number of this glyph in 'post' string tables. 
    This is not an offset.
    Pascal string    names[numberNewGlyphs]  glyph names with length bytes [variable] (a Pascal string)
     */
    writeFixed(dos, 2.0);
    writeFixed(dos, p.getItalicAngle());
    writeSint16(dos, p.getUnderlinePosition());
    writeSint16(dos, p.getUnderlineThickness());
    writeUint32(dos, p.getIsFixedPitch());
    writeUint32(dos, p.getMinMemType42());
    writeUint32(dos, p.getMaxMemType42());
    writeUint32(dos, p.getMimMemType1());
    writeUint32(dos, p.getMaxMemType1());
    writeUint16(dos, baseTTF.getHorizontalHeader().getNumberOfHMetrics());

    List<String> additionalNames = new ArrayList<String>();
    Map<String, Integer> additionalNamesIndices = new HashMap<String, Integer>();

    if (glyphNames == null) {
        Encoding enc = MacRomanEncoding.INSTANCE;
        int[] gidToUC = this.baseCmap.getGlyphIdToCharacterCode();
        for (Integer glyphId : this.glyphIds) {
            int uc = gidToUC[glyphId.intValue()];
            String name = null;
            if (uc < 0x8000) {
                try {
                    name = enc.getNameFromCharacter((char) uc);
                } catch (IOException e) {
                    // TODO
                }
            }
            if (name == null) {
                name = String.format(Locale.ENGLISH, "uni%04X", uc);
            }
            Integer macId = Encoding.MAC_GLYPH_NAMES_INDICES.get(name);
            if (macId == null) {
                Integer idx = additionalNamesIndices.get(name);
                if (idx == null) {
                    idx = additionalNames.size();
                    additionalNames.add(name);
                    additionalNamesIndices.put(name, idx);
                }
                writeUint16(dos, idx.intValue() + 258);
            } else {
                writeUint16(dos, macId.intValue());
            }
        }
    } else {
        for (Integer glyphId : this.glyphIds) {
            String name = glyphNames[glyphId.intValue()];
            Integer macId = Encoding.MAC_GLYPH_NAMES_INDICES.get(name);
            if (macId == null) {
                Integer idx = additionalNamesIndices.get(name);
                if (idx == null) {
                    idx = additionalNames.size();
                    additionalNames.add(name);
                    additionalNamesIndices.put(name, idx);
                }
                writeUint16(dos, idx.intValue() + 258);
            } else {
                writeUint16(dos, macId.intValue());
            }
        }
    }

    for (String name : additionalNames) {
        LOG.debug("additionalName=[" + name + "].");
        byte[] buf = name.getBytes("US-ASCII");
        writeUint8(dos, buf.length);
        dos.write(buf);
    }
    dos.flush();
    LOG.debug("Finished table [post].");
    return bos.toByteArray();
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

private int showCreateDatabase(Hive db, DataOutputStream outStream, String databaseName) throws Exception {
    Database database = db.getDatabase(databaseName);

    StringBuilder createDb_str = new StringBuilder();
    createDb_str.append("CREATE DATABASE `").append(database.getName()).append("`\n");
    if (database.getDescription() != null) {
        createDb_str.append("COMMENT\n  '");
        createDb_str.append(HiveStringUtils.escapeHiveCommand(database.getDescription())).append("'\n");
    }/*  w  ww  .  j  ava 2  s .  c o  m*/
    createDb_str.append("LOCATION\n  '");
    createDb_str.append(database.getLocationUri()).append("'\n");
    String propertiesToString = propertiesToString(database.getParameters(), null);
    if (!propertiesToString.isEmpty()) {
        createDb_str.append("WITH DBPROPERTIES (\n");
        createDb_str.append(propertiesToString).append(")\n");
    }

    outStream.write(createDb_str.toString().getBytes("UTF-8"));
    return 0;
}

From source file:com.yeahka.android.lepos.Device.java

/**
 * ?//  w ww. ja va2  s  .c o  m
 *
 * @param actionUrl ?
 * @param file      
 * @return
 * @author "Char"
 * @create_date 2015-8-18
 */
private ResultModel sendPhotoToQuickenLoansWebServer(String actionUrl, File file) {
    try {
        int TIME_OUT = 10 * 1000; // 
        String CHARSET = "utf-8"; // ?
        String result = null;
        String BOUNDARY = UUID.randomUUID().toString(); //  ??
        String PREFIX = "--", LINE_END = "\r\n";
        String CONTENT_TYPE = "multipart/form-data"; // 

        URL url = new URL(actionUrl);
        //            HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        HttpURLConnection conn = MyHttps.getHttpURLConnection(url);
        conn.setReadTimeout(TIME_OUT);
        conn.setConnectTimeout(TIME_OUT);
        conn.setDoInput(true); // ??
        conn.setDoOutput(true); // ??
        conn.setUseCaches(false); // ??
        conn.setRequestMethod("POST"); // ?
        conn.setRequestProperty("Charset", CHARSET); // ?
        conn.setRequestProperty("connection", "keep-alive");
        conn.setRequestProperty("Content-Type", CONTENT_TYPE + ";boundary=" + BOUNDARY);

        if (file != null) {
            /**
             * ?
             */
            DataOutputStream dos = new DataOutputStream(conn.getOutputStream());
            StringBuffer sb = new StringBuffer();
            sb.append(PREFIX);
            sb.append(BOUNDARY);
            sb.append(LINE_END);
            /**
             * ?? name???key ?key ??
             * filename??????
             */

            sb.append("Content-Disposition: form-data; name=\"file\"; filename=\"" + file.getName() + "\""
                    + LINE_END);
            sb.append("Content-Type: application/octet-stream; charset=" + CHARSET + LINE_END);
            sb.append(LINE_END);
            dos.write(sb.toString().getBytes());
            InputStream is = new FileInputStream(file);
            byte[] bytes = new byte[1024];
            int len = 0;
            while ((len = is.read(bytes)) != -1) {
                dos.write(bytes, 0, len);
            }
            is.close();
            dos.write(LINE_END.getBytes());
            byte[] end_data = (PREFIX + BOUNDARY + PREFIX + LINE_END).getBytes();
            dos.write(end_data);
            dos.flush();
        }
        // ??
        int res = conn.getResponseCode();
        InputStream in = conn.getInputStream();
        InputStreamReader isReader = new InputStreamReader(in);
        BufferedReader bufReader = new BufferedReader(isReader);
        String line = null;
        String data = "";

        StringBuilder sb2 = new StringBuilder();
        if (res == 200) {
            while ((line = bufReader.readLine()) != null) {
                data += line;
            }
        } else {
            return new ResultModel(Device.TRANSACTION_NET_FAIL);
        }
        in.close();
        conn.disconnect();
        return new ResultModel(data);
    } catch (MalformedURLException e) {
        return new ResultModel(Device.TRANSACTION_NET_FAIL, e);
    } catch (IOException e) {
        return new ResultModel(Device.TRANSACTION_NET_FAIL, e);
    } catch (NoSuchAlgorithmException e) {
        e.printStackTrace();
        return new ResultModel(Device.TRANSACTION_NET_FAIL, e);
    } catch (KeyManagementException e) {
        e.printStackTrace();
        return new ResultModel(Device.TRANSACTION_NET_FAIL, e);
    }
}

From source file:org.apache.hadoop.hive.ql.exec.DDLTask.java

private int showCreateTable(Hive db, DataOutputStream outStream, String tableName) throws HiveException {
    final String EXTERNAL = "external";
    final String TEMPORARY = "temporary";
    final String LIST_COLUMNS = "columns";
    final String TBL_COMMENT = "tbl_comment";
    final String LIST_PARTITIONS = "partitions";
    final String SORT_BUCKET = "sort_bucket";
    final String SKEWED_INFO = "tbl_skewedinfo";
    final String ROW_FORMAT = "row_format";
    final String TBL_LOCATION = "tbl_location";
    final String TBL_PROPERTIES = "tbl_properties";
    boolean needsLocation = true;
    StringBuilder createTab_str = new StringBuilder();

    Table tbl = db.getTable(tableName, false);
    List<String> duplicateProps = new ArrayList<String>();
    try {/* w ww.  j a  v a 2s. c  o m*/
        needsLocation = doesTableNeedLocation(tbl);

        if (tbl.isView()) {
            String createTab_stmt = "CREATE VIEW `" + tableName + "` AS " + tbl.getViewExpandedText();
            outStream.write(createTab_stmt.getBytes(StandardCharsets.UTF_8));
            return 0;
        }

        createTab_str.append("CREATE <" + TEMPORARY + "><" + EXTERNAL + ">TABLE `");
        createTab_str.append(tableName + "`(\n");
        createTab_str.append("<" + LIST_COLUMNS + ">)\n");
        createTab_str.append("<" + TBL_COMMENT + ">\n");
        createTab_str.append("<" + LIST_PARTITIONS + ">\n");
        createTab_str.append("<" + SORT_BUCKET + ">\n");
        createTab_str.append("<" + SKEWED_INFO + ">\n");
        createTab_str.append("<" + ROW_FORMAT + ">\n");
        if (needsLocation) {
            createTab_str.append("LOCATION\n");
            createTab_str.append("<" + TBL_LOCATION + ">\n");
        }
        createTab_str.append("TBLPROPERTIES (\n");
        createTab_str.append("<" + TBL_PROPERTIES + ">)\n");
        ST createTab_stmt = new ST(createTab_str.toString());

        // For cases where the table is temporary
        String tbl_temp = "";
        if (tbl.isTemporary()) {
            duplicateProps.add("TEMPORARY");
            tbl_temp = "TEMPORARY ";
        }
        // For cases where the table is external
        String tbl_external = "";
        if (tbl.getTableType() == TableType.EXTERNAL_TABLE) {
            duplicateProps.add("EXTERNAL");
            tbl_external = "EXTERNAL ";
        }

        // Columns
        String tbl_columns = "";
        List<FieldSchema> cols = tbl.getCols();
        List<String> columns = new ArrayList<String>();
        for (FieldSchema col : cols) {
            String columnDesc = "  `" + col.getName() + "` " + col.getType();
            if (col.getComment() != null) {
                columnDesc = columnDesc + " COMMENT '" + HiveStringUtils.escapeHiveCommand(col.getComment())
                        + "'";
            }
            columns.add(columnDesc);
        }
        tbl_columns = StringUtils.join(columns, ", \n");

        // Table comment
        String tbl_comment = "";
        String tabComment = tbl.getProperty("comment");
        if (tabComment != null) {
            duplicateProps.add("comment");
            tbl_comment = "COMMENT '" + HiveStringUtils.escapeHiveCommand(tabComment) + "'";
        }

        // Partitions
        String tbl_partitions = "";
        List<FieldSchema> partKeys = tbl.getPartitionKeys();
        if (partKeys.size() > 0) {
            tbl_partitions += "PARTITIONED BY ( \n";
            List<String> partCols = new ArrayList<String>();
            for (FieldSchema partKey : partKeys) {
                String partColDesc = "  `" + partKey.getName() + "` " + partKey.getType();
                if (partKey.getComment() != null) {
                    partColDesc = partColDesc + " COMMENT '"
                            + HiveStringUtils.escapeHiveCommand(partKey.getComment()) + "'";
                }
                partCols.add(partColDesc);
            }
            tbl_partitions += StringUtils.join(partCols, ", \n");
            tbl_partitions += ")";
        }

        // Clusters (Buckets)
        String tbl_sort_bucket = "";
        List<String> buckCols = tbl.getBucketCols();
        if (buckCols.size() > 0) {
            duplicateProps.add("SORTBUCKETCOLSPREFIX");
            tbl_sort_bucket += "CLUSTERED BY ( \n  ";
            tbl_sort_bucket += StringUtils.join(buckCols, ", \n  ");
            tbl_sort_bucket += ") \n";
            List<Order> sortCols = tbl.getSortCols();
            if (sortCols.size() > 0) {
                tbl_sort_bucket += "SORTED BY ( \n";
                // Order
                List<String> sortKeys = new ArrayList<String>();
                for (Order sortCol : sortCols) {
                    String sortKeyDesc = "  " + sortCol.getCol() + " ";
                    if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) {
                        sortKeyDesc = sortKeyDesc + "ASC";
                    } else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) {
                        sortKeyDesc = sortKeyDesc + "DESC";
                    }
                    sortKeys.add(sortKeyDesc);
                }
                tbl_sort_bucket += StringUtils.join(sortKeys, ", \n");
                tbl_sort_bucket += ") \n";
            }
            tbl_sort_bucket += "INTO " + tbl.getNumBuckets() + " BUCKETS";
        }

        // Skewed Info
        StringBuilder tbl_skewedinfo = new StringBuilder();
        SkewedInfo skewedInfo = tbl.getSkewedInfo();
        if (skewedInfo != null && !skewedInfo.getSkewedColNames().isEmpty()) {
            tbl_skewedinfo
                    .append("SKEWED BY (" + StringUtils.join(skewedInfo.getSkewedColNames(), ",") + ")\n");
            tbl_skewedinfo.append("  ON (");
            List<String> colValueList = new ArrayList<String>();
            for (List<String> colValues : skewedInfo.getSkewedColValues()) {
                colValueList.add("('" + StringUtils.join(colValues, "','") + "')");
            }
            tbl_skewedinfo.append(StringUtils.join(colValueList, ",") + ")");
            if (tbl.isStoredAsSubDirectories()) {
                tbl_skewedinfo.append("\n  STORED AS DIRECTORIES");
            }
        }

        // Row format (SerDe)
        StringBuilder tbl_row_format = new StringBuilder();
        StorageDescriptor sd = tbl.getTTable().getSd();
        SerDeInfo serdeInfo = sd.getSerdeInfo();
        Map<String, String> serdeParams = serdeInfo.getParameters();
        tbl_row_format.append("ROW FORMAT SERDE \n");
        tbl_row_format
                .append("  '" + HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n");
        if (tbl.getStorageHandler() == null) {
            // If serialization.format property has the default value, it will not to be included in
            // SERDE properties
            if (MetaStoreUtils.DEFAULT_SERIALIZATION_FORMAT
                    .equals(serdeParams.get(serdeConstants.SERIALIZATION_FORMAT))) {
                serdeParams.remove(serdeConstants.SERIALIZATION_FORMAT);
            }
            if (!serdeParams.isEmpty()) {
                appendSerdeParams(tbl_row_format, serdeParams).append(" \n");
            }
            tbl_row_format.append("STORED AS INPUTFORMAT \n  '"
                    + HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n");
            tbl_row_format.append(
                    "OUTPUTFORMAT \n  '" + HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'");
        } else {
            duplicateProps.add(META_TABLE_STORAGE);
            tbl_row_format.append("STORED BY \n  '"
                    + HiveStringUtils.escapeHiveCommand(tbl.getParameters().get(META_TABLE_STORAGE)) + "' \n");
            // SerDe Properties
            if (!serdeParams.isEmpty()) {
                appendSerdeParams(tbl_row_format, serdeInfo.getParameters());
            }
        }
        String tbl_location = "  '" + HiveStringUtils.escapeHiveCommand(sd.getLocation()) + "'";

        // Table properties
        duplicateProps.addAll(Arrays.asList(StatsSetupConst.TABLE_PARAMS_STATS_KEYS));
        String tbl_properties = propertiesToString(tbl.getParameters(), duplicateProps);

        createTab_stmt.add(TEMPORARY, tbl_temp);
        createTab_stmt.add(EXTERNAL, tbl_external);
        createTab_stmt.add(LIST_COLUMNS, tbl_columns);
        createTab_stmt.add(TBL_COMMENT, tbl_comment);
        createTab_stmt.add(LIST_PARTITIONS, tbl_partitions);
        createTab_stmt.add(SORT_BUCKET, tbl_sort_bucket);
        createTab_stmt.add(SKEWED_INFO, tbl_skewedinfo);
        createTab_stmt.add(ROW_FORMAT, tbl_row_format);
        // Table location should not be printed with hbase backed tables
        if (needsLocation) {
            createTab_stmt.add(TBL_LOCATION, tbl_location);
        }
        createTab_stmt.add(TBL_PROPERTIES, tbl_properties);

        outStream.write(createTab_stmt.render().getBytes(StandardCharsets.UTF_8));
    } catch (IOException e) {
        LOG.info("show create table: " + stringifyException(e));
        return 1;
    }

    return 0;
}