Example usage for org.apache.commons.lang3 StringUtils repeat

List of usage examples for org.apache.commons.lang3 StringUtils repeat

Introduction

In this page you can find the example usage for org.apache.commons.lang3 StringUtils repeat.

Prototype

public static String repeat(final String str, final String separator, final int repeat) 

Source Link

Document

Repeat a String repeat times to form a new String, with a String separator injected each time.

Usage

From source file:annis.administration.DefaultAdministrationDao.java

public List<String> listIndexDefinitions(boolean used, List<String> tables) {
    String scansOp = used ? "!=" : "=";
    String sql = "SELECT pg_get_indexdef(x.indexrelid) AS indexdef " + "FROM pg_index x, pg_class c "
            + "WHERE x.indexrelid = c.oid " + "AND c.relname IN ( "
            + StringUtils.repeat("?", ",", tables.size()) + ") " + "AND pg_stat_get_numscans(x.indexrelid) "
            + scansOp + " 0";
    return jdbcTemplate.query(sql, tables.toArray(), stringRowMapper());
}

From source file:annis.administration.DefaultAdministrationDao.java

public List<String> listIndexDefinitions(String... tables) {
    String sql = "" + "SELECT pg_get_indexdef(x.indexrelid) AS indexdef "
            + "FROM pg_index x, pg_class c, pg_indexes i " + "WHERE x.indexrelid = c.oid "
            + "AND c.relname = i.indexname " + "AND i.tablename IN ( "
            + StringUtils.repeat("?", ",", tables.length) + " )";
    return jdbcTemplate.query(sql, tables, new ParameterizedSingleColumnRowMapper<String>());
}

From source file:annis.administration.DefaultAdministrationDao.java

public List<String> listUsedIndexes(String... tables) {
    String sql = "" + "SELECT pg_get_indexdef(x.indexrelid) AS indexdef "
            + "FROM pg_index x, pg_class c, pg_indexes i " + "WHERE x.indexrelid = c.oid "
            + "AND c.relname = i.indexname " + "AND i.tablename IN ( "
            + StringUtils.repeat("?", ",", tables.length) + " ) "
            + "AND pg_stat_get_numscans(x.indexrelid) != 0";
    return jdbcTemplate.query(sql, tables, new ParameterizedSingleColumnRowMapper<String>());
}

From source file:annis.administration.AdministrationDao.java

private List<String> listIndexesOnTables(List<String> tables) {
    String sql = "" + "SELECT indexname " + "FROM pg_indexes " + "WHERE tablename IN ("
            + StringUtils.repeat("?", ",", tables.size()) + ") " + "AND lower(indexname) NOT IN "
            + "   (SELECT lower(conname) FROM pg_constraint WHERE contype in ('p', 'u'))";

    return getJdbcTemplate().query(sql, tables.toArray(), stringRowMapper());
}

From source file:annis.administration.AdministrationDao.java

public List<String> listIndexDefinitions(boolean used, List<String> tables) {
    String scansOp = used ? "!=" : "=";
    String sql = "SELECT pg_get_indexdef(x.indexrelid) AS indexdef " + "FROM pg_index x, pg_class c "
            + "WHERE x.indexrelid = c.oid " + "AND c.relname IN ( "
            + StringUtils.repeat("?", ",", tables.size()) + ") " + "AND pg_stat_get_numscans(x.indexrelid) "
            + scansOp + " 0";
    return getJdbcTemplate().query(sql, tables.toArray(), stringRowMapper());
}

From source file:annis.administration.AdministrationDao.java

public List<String> listIndexDefinitions(String... tables) {
    String sql = "" + "SELECT pg_get_indexdef(x.indexrelid) AS indexdef "
            + "FROM pg_index x, pg_class c, pg_indexes i " + "WHERE x.indexrelid = c.oid "
            + "AND c.relname = i.indexname " + "AND i.tablename IN ( "
            + StringUtils.repeat("?", ",", tables.length) + " )";
    return getJdbcTemplate().query(sql, tables, new ParameterizedSingleColumnRowMapper<String>());
}

From source file:annis.administration.AdministrationDao.java

public List<String> listUsedIndexes(String... tables) {
    String sql = "" + "SELECT pg_get_indexdef(x.indexrelid) AS indexdef "
            + "FROM pg_index x, pg_class c, pg_indexes i " + "WHERE x.indexrelid = c.oid "
            + "AND c.relname = i.indexname " + "AND i.tablename IN ( "
            + StringUtils.repeat("?", ",", tables.length) + " ) "
            + "AND pg_stat_get_numscans(x.indexrelid) != 0";
    return getJdbcTemplate().query(sql, tables, new ParameterizedSingleColumnRowMapper<String>());
}

From source file:org.apache.falcon.catalog.CatalogPartitionHandler.java

private void registerPartitions(Configuration conf, CatalogStorage storage, Path staticPath,
        List<String> staticPartition) throws FalconException {
    try {//from  w  w  w  .  j av  a 2  s  .co  m
        FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(conf);
        if (!fs.exists(staticPath)) {
            //Do nothing if the output path doesn't exist
            return;
        }

        List<String> partitionColumns = getPartitionColumns(conf, storage);
        int dynamicPartCols = partitionColumns.size() - staticPartition.size();
        Path searchPath = staticPath;
        if (dynamicPartCols > 0) {
            searchPath = new Path(staticPath, StringUtils.repeat("*", "/", dynamicPartCols));
        }

        //Figure out the dynamic partitions from the directories on hdfs
        FileStatus[] files = fs.globStatus(searchPath, PATH_FILTER);
        Map<List<String>, String> partitions = new HashMap<List<String>, String>();
        for (FileStatus file : files) {
            List<String> dynamicParts = getDynamicPartitions(file.getPath(), staticPath);
            List<String> partitionValues = new ArrayList<String>(staticPartition);
            partitionValues.addAll(dynamicParts);
            LOG.debug("Final partition - " + partitionValues);
            partitions.put(partitionValues, file.getPath().toString());
        }

        List<List<String>> existPartitions = listPartitions(conf, storage, staticPartition);
        Collection<List<String>> targetPartitions = partitions.keySet();

        Collection<List<String>> partitionsForDrop = CollectionUtils.subtract(existPartitions,
                targetPartitions);
        Collection<List<String>> partitionsForAdd = CollectionUtils.subtract(targetPartitions, existPartitions);
        Collection<List<String>> partitionsForUpdate = CollectionUtils.intersection(existPartitions,
                targetPartitions);

        for (List<String> partition : partitionsForDrop) {
            dropPartitions(conf, storage, partition);
        }

        for (List<String> partition : partitionsForAdd) {
            addPartition(conf, storage, partition, partitions.get(partition));
        }

        for (List<String> partition : partitionsForUpdate) {
            updatePartition(conf, storage, partition, partitions.get(partition));
        }
    } catch (IOException e) {
        throw new FalconException(e);
    }
}

From source file:org.apache.nifi.processors.standard.PutDatabaseRecord.java

SqlAndIncludedColumns generateInsert(final RecordSchema recordSchema, final String tableName,
        final TableSchema tableSchema, final DMLSettings settings)
        throws IllegalArgumentException, SQLException {

    final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema,
            settings.translateFieldNames);

    for (final String requiredColName : tableSchema.getRequiredColumnNames()) {
        final String normalizedColName = normalizeColumnName(requiredColName, settings.translateFieldNames);
        if (!normalizedFieldNames.contains(normalizedColName)) {
            String missingColMessage = "Record does not have a value for the Required column '"
                    + requiredColName + "'";
            if (settings.failUnmappedColumns) {
                getLogger().error(missingColMessage);
                throw new IllegalArgumentException(missingColMessage);
            } else if (settings.warningUnmappedColumns) {
                getLogger().warn(missingColMessage);
            }//www  . j  av  a 2  s.  com
        }
    }

    final StringBuilder sqlBuilder = new StringBuilder();
    sqlBuilder.append("INSERT INTO ");
    if (settings.quoteTableName) {
        sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(tableName)
                .append(tableSchema.getQuotedIdentifierString());
    } else {
        sqlBuilder.append(tableName);
    }
    sqlBuilder.append(" (");

    // iterate over all of the fields in the record, building the SQL statement by adding the column names
    List<String> fieldNames = recordSchema.getFieldNames();
    final List<Integer> includedColumns = new ArrayList<>();
    if (fieldNames != null) {
        int fieldCount = fieldNames.size();
        AtomicInteger fieldsFound = new AtomicInteger(0);

        for (int i = 0; i < fieldCount; i++) {
            RecordField field = recordSchema.getField(i);
            String fieldName = field.getFieldName();

            final ColumnDescription desc = tableSchema.getColumns()
                    .get(normalizeColumnName(fieldName, settings.translateFieldNames));
            if (desc == null && !settings.ignoreUnmappedFields) {
                throw new SQLDataException(
                        "Cannot map field '" + fieldName + "' to any column in the database");
            }

            if (desc != null) {
                if (fieldsFound.getAndIncrement() > 0) {
                    sqlBuilder.append(", ");
                }

                if (settings.escapeColumnNames) {
                    sqlBuilder.append(tableSchema.getQuotedIdentifierString()).append(desc.getColumnName())
                            .append(tableSchema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }
                includedColumns.add(i);
            }
        }

        // complete the SQL statements by adding ?'s for all of the values to be escaped.
        sqlBuilder.append(") VALUES (");
        sqlBuilder.append(StringUtils.repeat("?", ",", includedColumns.size()));
        sqlBuilder.append(")");

        if (fieldsFound.get() == 0) {
            throw new SQLDataException("None of the fields in the record map to the columns defined by the "
                    + tableName + " table");
        }
    }
    return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns);
}

From source file:org.kontalk.xmppserver.presence.JDBCPresenceRepository.java

@Override
public void initRepository(String resource_uri, Map<String, String> params) throws DBInitException {
    if (initialized) {
        return;/*from  w  w  w  .  ja v a2s. co m*/
    }

    super.initRepository(resource_uri, params);

    initialized = true;
    log.log(Level.INFO, "Initializing message repository: {0}", resource_uri);

    try {
        DataRepository data_repo = getRepository();

        String extraSql;

        Map<String, Object> props = XMPPServer.getConfigurator().getProperties("message-router");
        adminUsers = (String[]) props.get(Configurable.ADMINS_PROP_KEY);
        if (adminUsers != null && adminUsers.length > 0) {
            String placeholders = StringUtils.repeat("?", ",", adminUsers.length);
            extraSql = String.format(EXPIRED_USERS_EXTRA_SQL, placeholders);
        } else {
            extraSql = "";
        }

        data_repo.initPreparedStatement(GET_EXPIRED_USERS_QUERY_ID,
                String.format(GET_EXPIRED_USERS_QUERY_SQL, extraSql));
        data_repo.initPreparedStatement(GET_LOGOUT_QUERY_ID, GET_LOGOUT_QUERY_SQL);
    } catch (Exception e) {
        log.log(Level.WARNING, "Error initializing message repository", e);
    }
}