Example usage for org.apache.commons.lang3 StringUtils repeat

List of usage examples for org.apache.commons.lang3 StringUtils repeat

Introduction

In this page you can find the example usage for org.apache.commons.lang3 StringUtils repeat.

Prototype

public static String repeat(final char ch, final int repeat) 

Source Link

Document

<p>Returns padding using the specified delimiter repeated to a given length.</p> <pre> StringUtils.repeat('e', 0) = "" StringUtils.repeat('e', 3) = "eee" StringUtils.repeat('e', -2) = "" </pre> <p>Note: this method doesn't not support padding with <a href="http://www.unicode.org/glossary/#supplementary_character">Unicode Supplementary Characters</a> as they require a pair of char s to be represented.

Usage

From source file:org.apache.flex.utilities.converter.retrievers.utils.ProgressBar.java

protected void drawOutput(long current) {
    final int transferredPercent = (int) Math.round(((double) current / (double) total) * (double) 100);
    final int segmentsTransferred = transferredPercent / 2;
    final int segmentsRest = 50 - segmentsTransferred;
    System.out.print("\r" + String.format(" %3d", transferredPercent) + "% ["
            + StringUtils.repeat("=", segmentsTransferred)
            + ((segmentsRest > 0) ? ">" + StringUtils.repeat(" ", segmentsRest - 1) : "") + "] ");
}

From source file:org.apache.flink.api.java.Utils.java

private static <T> String getSerializerTree(TypeInformation<T> ti, int indent) {
    String ret = "";
    if (ti instanceof CompositeType) {
        ret += StringUtils.repeat(' ', indent) + ti.getClass().getSimpleName() + "\n";
        CompositeType<T> cti = (CompositeType<T>) ti;
        String[] fieldNames = cti.getFieldNames();
        for (int i = 0; i < cti.getArity(); i++) {
            TypeInformation<?> fieldType = cti.getTypeAt(i);
            ret += StringUtils.repeat(' ', indent + 2) + fieldNames[i] + ":"
                    + getSerializerTree(fieldType, indent);
        }//w  w  w .  j a va  2 s  .c o  m
    } else {
        if (ti instanceof GenericTypeInfo) {
            ret += StringUtils.repeat(' ', indent) + "GenericTypeInfo (" + ti.getTypeClass().getSimpleName()
                    + ")\n";
            ret += getGenericTypeTree(ti.getTypeClass(), indent + 4);
        } else {
            ret += StringUtils.repeat(' ', indent) + ti.toString() + "\n";
        }
    }
    return ret;
}

From source file:org.apache.flink.api.java.Utils.java

private static String getGenericTypeTree(Class<?> type, int indent) {
    String ret = "";
    for (Field field : type.getDeclaredFields()) {
        if (Modifier.isStatic(field.getModifiers()) || Modifier.isTransient(field.getModifiers())) {
            continue;
        }// w w  w.  j  av a2  s  .  c  o m
        ret += StringUtils.repeat(' ', indent) + field.getName() + ":" + field.getType().getName()
                + (field.getType().isEnum() ? " (is enum)" : "") + "\n";
        if (!field.getType().isPrimitive()) {
            ret += getGenericTypeTree(field.getType(), indent + 4);
        }
    }
    return ret;
}

From source file:org.apache.hadoop.hbase.KeyValueTestUtil.java

protected static String toStringWithPadding(final KeyValue kv, final int maxRowLength, int maxFamilyLength,
        int maxQualifierLength, int maxTimestampLength, boolean includeMeta) {
    String leadingLengths = "";
    String familyLength = kv.getFamilyLength() + " ";
    if (includeMeta) {
        leadingLengths += Strings.padFront(kv.getKeyLength() + "", '0', 4);
        leadingLengths += " ";
        leadingLengths += Strings.padFront(kv.getValueLength() + "", '0', 4);
        leadingLengths += " ";
        leadingLengths += Strings.padFront(kv.getRowLength() + "", '0', 2);
        leadingLengths += " ";
    }//  w w w  . j a v a 2s .  c o  m
    int spacesAfterRow = maxRowLength - getRowString(kv).length() + 2;
    int spacesAfterFamily = maxFamilyLength - getFamilyString(kv).length() + 2;
    int spacesAfterQualifier = maxQualifierLength - getQualifierString(kv).length() + 1;
    int spacesAfterTimestamp = maxTimestampLength - Long.valueOf(kv.getTimestamp()).toString().length() + 1;
    return leadingLengths + getRowString(kv) + StringUtils.repeat(' ', spacesAfterRow) + familyLength
            + getFamilyString(kv) + StringUtils.repeat(' ', spacesAfterFamily) + getQualifierString(kv)
            + StringUtils.repeat(' ', spacesAfterQualifier) + getTimestampString(kv)
            + StringUtils.repeat(' ', spacesAfterTimestamp) + getTypeString(kv) + " " + getValueString(kv);
}

From source file:org.apache.hadoop.hbase.util.Strings.java

/**
 * Push the input string to the right by appending a character before it, usually a space.
 * @param input the string to pad//  w w w . j  a  v a  2s.c o m
 * @param padding the character to repeat to the left of the input string
 * @param length the desired total length including the padding
 * @return padding characters + input
 */
public static String padFront(String input, char padding, int length) {
    if (input.length() > length) {
        throw new IllegalArgumentException("input \"" + input + "\" longer than maxLength=" + length);
    }
    int numPaddingCharacters = length - input.length();
    return StringUtils.repeat(padding, numPaddingCharacters) + input;
}

From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java

private StringBuilder dump(StringBuilder sb) {
    Deque<ASTNode> stack = new ArrayDeque<ASTNode>();
    stack.push(this);
    int tabLength = 0;

    while (!stack.isEmpty()) {
        ASTNode next = stack.peek();/* ww  w.ja  va2  s .  c o  m*/

        if (!next.visited) {
            sb.append(StringUtils.repeat(" ", tabLength * 3));
            sb.append(next.toString());
            sb.append("\n");

            if (next.children != null) {
                for (int i = next.children.size() - 1; i >= 0; i--) {
                    stack.push((ASTNode) next.children.get(i));
                }
            }

            tabLength++;
            next.visited = true;
        } else {
            tabLength--;
            next.visited = false;
            stack.pop();
        }
    }

    return sb;
}

From source file:org.apache.impala.analysis.AnalyzeDDLTest.java

@Test
public void TestCreateTable() throws AnalysisException {
    AnalyzesOk("create table functional.new_table (i int)");
    AnalyzesOk("create table if not exists functional.alltypes (i int)");
    AnalysisError("create table functional.alltypes", "Table already exists: functional.alltypes");
    AnalysisError("create table functional.alltypes (i int)", "Table already exists: functional.alltypes");
    AnalyzesOk("create table functional.new_table (i int) row format delimited fields " + "terminated by '|'");

    AnalyzesOk("create table new_table (i int) PARTITIONED BY (d decimal)");
    AnalyzesOk("create table new_table (i int) PARTITIONED BY (d decimal(3,1))");
    AnalyzesOk("create table new_table(d1 decimal, d2 decimal(10), d3 decimal(5, 2))");
    AnalysisError("create table new_table (i int) PARTITIONED BY (d decimal(40,1))",
            "Decimal precision must be <= 38: 40");

    AnalyzesOk("create table new_table(s1 varchar(1), s2 varchar(32672), " + "s3 varchar(65535))");
    AnalysisError("create table new_table(s1 varchar(0))", "Varchar size must be > 0: 0");
    AnalysisError("create table new_table(s1 varchar(65536))", "Varchar size must be <= 65535: 65536");
    AnalysisError("create table new_table(s1 char(0))", "Char size must be > 0: 0");
    AnalysisError("create table new_table(s1 Char(256))", "Char size must be <= 255: 256");
    AnalyzesOk("create table new_table (i int) PARTITIONED BY (s varchar(3))");
    AnalyzesOk("create table functional.new_table (c char(250))");
    AnalyzesOk("create table new_table (i int) PARTITIONED BY (c char(3))");

    {/*from ww w  .j ava 2  s. com*/
        // Check that long_properties fail at the analysis layer
        String long_property_key = "";
        for (int i = 0; i < MetaStoreUtil.MAX_PROPERTY_KEY_LENGTH; ++i) {
            long_property_key += 'k';
        }
        String long_property_value = "";
        for (int i = 0; i < MetaStoreUtil.MAX_PROPERTY_VALUE_LENGTH; ++i) {
            long_property_value += 'v';
        }

        // At this point long_property_{key_value} are actually not quite long enough to
        // cause analysis to fail.

        AnalyzesOk("create table new_table (i int) " + "with serdeproperties ('" + long_property_key + "'='"
                + long_property_value + "') " + "tblproperties ('" + long_property_key + "'='"
                + long_property_value + "')");

        long_property_key += 'X';
        long_property_value += 'X';
        // Now that long_property_{key,value} are one character longer, they are too long
        // for the analyzer.

        AnalysisError("create table new_table (i int) " + "tblproperties ('" + long_property_key + "'='value')",
                "Property key length must be <= " + MetaStoreUtil.MAX_PROPERTY_KEY_LENGTH + ": "
                        + (MetaStoreUtil.MAX_PROPERTY_KEY_LENGTH + 1));

        AnalysisError("create table new_table (i int) " + "tblproperties ('key'='" + long_property_value + "')",
                "Property value length must be <= " + MetaStoreUtil.MAX_PROPERTY_VALUE_LENGTH + ": "
                        + (MetaStoreUtil.MAX_PROPERTY_VALUE_LENGTH + 1));

        AnalysisError(
                "create table new_table (i int) " + "with serdeproperties ('" + long_property_key
                        + "'='value')",
                "Serde property key length must be <= " + MetaStoreUtil.MAX_PROPERTY_KEY_LENGTH + ": "
                        + (MetaStoreUtil.MAX_PROPERTY_KEY_LENGTH + 1));

        AnalysisError(
                "create table new_table (i int) " + "with serdeproperties ('key'='" + long_property_value
                        + "')",
                "Serde property value length must be <= " + MetaStoreUtil.MAX_PROPERTY_VALUE_LENGTH + ": "
                        + (MetaStoreUtil.MAX_PROPERTY_VALUE_LENGTH + 1));
    }

    // Supported file formats. Exclude Avro since it is tested separately.
    String[] fileFormats = { "TEXTFILE", "SEQUENCEFILE", "PARQUET", "PARQUETFILE", "RCFILE" };
    String[] fileFormatsStr = { "TEXT", "SEQUENCE_FILE", "PARQUET", "PARQUET", "RC_FILE" };
    int formatIndx = 0;
    for (String format : fileFormats) {
        for (String create : ImmutableList.of("create table", "create external table")) {
            AnalyzesOk(String.format(
                    "%s new_table (i int) " + "partitioned by (d decimal) comment 'c' stored as %s", create,
                    format));
            // No column definitions.
            AnalysisError(String.format("%s new_table " + "partitioned by (d decimal) comment 'c' stored as %s",
                    create, format), "Table requires at least 1 column");
        }
        AnalysisError(String.format("create table t (i int primary key) stored as %s", format),
                String.format("Unsupported column options for file format " + "'%s': 'i INT PRIMARY KEY'",
                        fileFormatsStr[formatIndx]));
        AnalysisError(String.format("create table t (i int, primary key(i)) stored as %s", format),
                "Only Kudu tables can specify a PRIMARY KEY");
        formatIndx++;
    }

    // Note: Backslashes need to be escaped twice - once for Java and once for Impala.
    // For example, if this were a real query the value '\' would be stored in the
    // metastore for the ESCAPED BY field.
    AnalyzesOk("create table functional.new_table (i int) row format delimited fields "
            + "terminated by '\\t' escaped by '\\\\' lines terminated by '\\n'");
    AnalyzesOk("create table functional.new_table (i int) row format delimited fields "
            + "terminated by '\\001' escaped by '\\002' lines terminated by '\\n'");
    AnalyzesOk("create table functional.new_table (i int) row format delimited fields "
            + "terminated by '-2' escaped by '-3' lines terminated by '\\n'");
    AnalyzesOk("create table functional.new_table (i int) row format delimited fields "
            + "terminated by '-128' escaped by '127' lines terminated by '40'");

    AnalysisError(
            "create table functional.new_table (i int) row format delimited "
                    + "fields terminated by '-2' escaped by '128' lines terminated by '\\n'",
            "ESCAPED BY values and LINE/FIELD terminators must be specified as a single "
                    + "character or as a decimal value in the range [-128:127]: 128");
    AnalysisError(
            "create table functional.new_table (i int) row format delimited "
                    + "fields terminated by '-2' escaped by '127' lines terminated by '255'",
            "ESCAPED BY values and LINE/FIELD terminators must be specified as a single "
                    + "character or as a decimal value in the range [-128:127]: 255");
    AnalysisError(
            "create table functional.new_table (i int) row format delimited "
                    + "fields terminated by '-129' escaped by '127' lines terminated by '\\n'",
            "ESCAPED BY values and LINE/FIELD terminators must be specified as a single "
                    + "character or as a decimal value in the range [-128:127]: -129");
    AnalysisError(
            "create table functional.new_table (i int) row format delimited "
                    + "fields terminated by '||' escaped by '\\\\' lines terminated by '\\n'",
            "ESCAPED BY values and LINE/FIELD terminators must be specified as a single "
                    + "character or as a decimal value in the range [-128:127]: ||");

    // IMPALA-2251: it should not be possible to create text tables with the same
    // delimiter character used for multiple purposes.
    AnalysisError(
            "create table functional.broken_text_table (c int) "
                    + "row format delimited fields terminated by '\001' lines terminated by '\001'",
            "Field delimiter and line delimiter have same value: byte 1");
    AnalysisError(
            "create table functional.broken_text_table (c int) "
                    + "row format delimited lines terminated by '\001'",
            "Field delimiter and line delimiter have same value: byte 1");
    AnalysisError(
            "create table functional.broken_text_table (c int) "
                    + "row format delimited fields terminated by '\012'",
            "Field delimiter and line delimiter have same value: byte 10");
    AnalyzesOk("create table functional.broken_text_table (c int) " + "row format delimited escaped by '\001'",
            "Field delimiter and escape character have same value: byte 1. "
                    + "Escape character will be ignored");
    AnalyzesOk(
            "create table functional.broken_text_table (c int) "
                    + "row format delimited escaped by 'x' lines terminated by 'x'",
            "Line delimiter and escape character have same value: byte 120. "
                    + "Escape character will be ignored");

    AnalysisError("create table db_does_not_exist.new_table (i int)",
            "Database does not exist: db_does_not_exist");
    AnalysisError("create table new_table (i int, I string)", "Duplicate column name: i");
    AnalysisError("create table new_table (c1 double, col2 int, c1 double, c4 string)",
            "Duplicate column name: c1");
    AnalysisError("create table new_table (i int, s string) PARTITIONED BY (i int)",
            "Duplicate column name: i");
    AnalysisError("create table new_table (i int) PARTITIONED BY (C int, c2 int, c int)",
            "Duplicate column name: c");

    // Unsupported partition-column types.
    AnalysisError("create table new_table (i int) PARTITIONED BY (t timestamp)",
            "Type 'TIMESTAMP' is not supported as partition-column type in column: t");

    // Caching ops
    AnalyzesOk("create table cached_tbl(i int) partitioned by(j int) " + "cached in 'testPool'");
    AnalyzesOk("create table cached_tbl(i int) partitioned by(j int) uncached");
    AnalyzesOk("create table cached_tbl(i int) partitioned by(j int) "
            + "location '/test-warehouse/' cached in 'testPool'");
    AnalyzesOk(
            "create table cached_tbl(i int) partitioned by(j int) " + "location '/test-warehouse/' uncached");
    AnalysisError(
            "create table cached_tbl(i int) location "
                    + "'file:///test-warehouse/cache_tbl' cached in 'testPool'",
            "Location 'file:/test-warehouse/cache_tbl' cannot be cached. "
                    + "Please retry without caching: CREATE TABLE ... UNCACHED");

    // Invalid database name.
    AnalysisError("create table `???`.new_table (x int) PARTITIONED BY (y int)", "Invalid database name: ???");
    // Invalid table/view name.
    AnalysisError("create table functional.`^&*` (x int) PARTITIONED BY (y int)",
            "Invalid table/view name: ^&*");
    // Invalid column names.
    AnalysisError("create table new_table (`???` int) PARTITIONED BY (i int)",
            "Invalid column/field name: ???");
    AnalysisError("create table new_table (i int) PARTITIONED BY (`^&*` int)",
            "Invalid column/field name: ^&*");
    // Test HMS constraint on comment length.
    AnalyzesOk(String.format("create table t (i int comment '%s')",
            StringUtils.repeat("c", MetaStoreUtil.CREATE_MAX_COMMENT_LENGTH)));
    AnalysisError(
            String.format("create table t (i int comment '%s')",
                    StringUtils.repeat("c", MetaStoreUtil.CREATE_MAX_COMMENT_LENGTH + 1)),
            "Comment of column 'i' exceeds maximum length of 256 characters:");

    // Valid URI values.
    AnalyzesOk("create table tbl (i int) location '/test-warehouse/new_table'");
    AnalyzesOk("create table tbl (i int) location " + "'hdfs://localhost:20500/test-warehouse/new_table'");
    AnalyzesOk("create table tbl (i int) location " + "'file:///test-warehouse/new_table'");
    AnalyzesOk("create table tbl (i int) location " + "'s3n://bucket/test-warehouse/new_table'");
    AnalyzesOk("ALTER TABLE functional_seq_snap.alltypes SET LOCATION " + "'file://test-warehouse/new_table'");

    // Invalid URI values.
    AnalysisError("create table functional.foo (x int) location " + "'foofs://test-warehouse/new_table'",
            "No FileSystem for scheme: foofs");
    AnalysisError("create table functional.foo (x int) location " + "'  '", "URI path cannot be empty.");
    AnalysisError(
            "ALTER TABLE functional_seq_snap.alltypes SET LOCATION " + "'foofs://test-warehouse/new_table'",
            "No FileSystem for scheme: foofs");
    AnalysisError("ALTER TABLE functional_seq_snap.alltypes SET LOCATION " + "'  '",
            "URI path cannot be empty.");

    // Create table PRODUCED BY DATA SOURCE
    final String DATA_SOURCE_NAME = "TestDataSource1";
    catalog_.addDataSource(new DataSource(DATA_SOURCE_NAME, "/foo.jar", "foo.Bar", "V1"));
    AnalyzesOk("CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " + DATA_SOURCE_NAME);
    AnalyzesOk("CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " + DATA_SOURCE_NAME.toLowerCase());
    AnalyzesOk("CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " + DATA_SOURCE_NAME + "(\"\")");
    AnalyzesOk("CREATE TABLE DataSrcTable1 (a tinyint, b smallint, c int, d bigint, "
            + "e float, f double, g boolean, h string) PRODUCED BY DATA SOURCE " + DATA_SOURCE_NAME);
    AnalysisError("CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " + "not_a_data_src(\"\")",
            "Data source does not exist");
    for (Type t : Type.getSupportedTypes()) {
        PrimitiveType type = t.getPrimitiveType();
        if (DataSourceTable.isSupportedPrimitiveType(type) || t.isNull())
            continue;
        String typeSpec = type.name();
        if (type == PrimitiveType.CHAR || type == PrimitiveType.DECIMAL || type == PrimitiveType.VARCHAR) {
            typeSpec += "(10)";
        }
        AnalysisError(
                "CREATE TABLE DataSrcTable1 (x " + typeSpec + ") PRODUCED " + "BY DATA SOURCE "
                        + DATA_SOURCE_NAME,
                "Tables produced by an external data source do not support the column type: " + type.name());
    }
}

From source file:org.apache.impala.catalog.ArrayType.java

@Override
protected String prettyPrint(int lpad) {
    String leftPadding = StringUtils.repeat(' ', lpad);
    if (itemType_.isScalarType())
        return leftPadding + toSql();
    // Pass in the padding to make sure nested fields are aligned properly,
    // even if we then strip the top-level padding.
    String structStr = itemType_.prettyPrint(lpad);
    structStr = structStr.substring(lpad);
    return String.format("%sARRAY<%s>", leftPadding, structStr);
}

From source file:org.apache.impala.catalog.MapType.java

@Override
protected String prettyPrint(int lpad) {
    String leftPadding = StringUtils.repeat(' ', lpad);
    if (valueType_.isScalarType())
        return leftPadding + toSql();
    // Pass in the padding to make sure nested fields are aligned properly,
    // even if we then strip the top-level padding.
    String structStr = valueType_.prettyPrint(lpad);
    structStr = structStr.substring(lpad);
    return String.format("%sMAP<%s,%s>", leftPadding, keyType_.toSql(), structStr);
}

From source file:org.apache.impala.catalog.StructType.java

@Override
protected String prettyPrint(int lpad) {
    String leftPadding = StringUtils.repeat(' ', lpad);
    ArrayList<String> fieldsSql = Lists.newArrayList();
    for (StructField f : fields_)
        fieldsSql.add(f.prettyPrint(lpad + 2));
    return String.format("%sSTRUCT<\n%s\n%s>", leftPadding, Joiner.on(",\n").join(fieldsSql), leftPadding);
}