Example usage for java.lang StringBuffer deleteCharAt

List of usage examples for java.lang StringBuffer deleteCharAt

Introduction

In this page you can find the example usage for java.lang StringBuffer deleteCharAt.

Prototype

@Override
public synchronized StringBuffer deleteCharAt(int index) 

Source Link

Usage

From source file:cx.fbn.nevernote.threads.IndexRunner.java

private String removeTags(String text) {
    StringBuffer buffer = new StringBuffer(text);
    boolean inTag = false;
    for (int i = buffer.length() - 1; i >= 0; i--) {
        if (buffer.charAt(i) == '>')
            inTag = true;/*from w w  w  . j av a2  s  .  co m*/
        if (buffer.charAt(i) == '<')
            inTag = false;
        if (inTag || buffer.charAt(i) == '<')
            buffer.deleteCharAt(i);
    }

    return buffer.toString();
}

From source file:com.wabacus.system.dataset.select.report.value.RelationalDBReportDataSetValueProvider.java

private String parseStatiSqlWithoutCondition(List<StatisticItemBean> lstStatitemBeansWithoutCondition,
        String sql) {//from w  w  w  .  ja v a  2  s  . c  o  m
    if (lstStatitemBeansWithoutCondition == null || lstStatitemBeansWithoutCondition.size() == 0)
        return "";
    StringBuffer statisticColumnsBuf = new StringBuffer();
    for (StatisticItemBean statItemBeanTmp : lstStatitemBeansWithoutCondition) {
        statisticColumnsBuf.append(statItemBeanTmp.getValue()).append(" as ")
                .append(statItemBeanTmp.getProperty()).append(",");
    }
    if (statisticColumnsBuf.length() > 0
            && statisticColumnsBuf.charAt(statisticColumnsBuf.length() - 1) == ',') {
        statisticColumnsBuf.deleteCharAt(statisticColumnsBuf.length() - 1);
    }
    String sqlStati = "";
    if (statisticColumnsBuf.length() > 0) {
        sqlStati = "select " + statisticColumnsBuf.toString() + " from (" + sql + ") wx_tableStati";
    }
    return sqlStati;
}

From source file:com.flexive.sqlParser.FxStatement.java

/**
 * Generates a debug string./*from   w  w  w. j av a  2  s  . c o  m*/
 *
 * @return a debug string of the statement
 * @throws com.flexive.shared.exceptions.FxSqlSearchException
 *          if the function failed
 */
public String printDebug() throws FxSqlSearchException {
    try {
        StringBuffer result = new StringBuffer(1024);
        if (this.rootBrace == null) {
            return String.valueOf(this.getType());
        } else {
            printDebug(result, this.rootBrace);
            result = result.deleteCharAt(0);
        }
        result.append(("\n##################################################\n"));
        int pos = 0;
        for (SelectedValue v : getSelectedValues()) {
            result.append(("Selected[" + (pos++) + "]: " + v.toString() + "\n"));
        }

        pos = 0;
        result.append("Order by: ");
        for (Value v : getOrderByValues()) {
            if ((pos++) > 0)
                result.append(",");
            result.append(v.toString());
        }
        if (pos == 0) {
            result.append(("Order: n/a\n"));
        }
        result.append("\n");

        result.append("Search Languages: ");
        pos = 0;
        for (String v : getTableByType(Table.TYPE.CONTENT).getSearchLanguages()) {
            if ((pos++) > 0)
                result.append(",");
            result.append(v);
        }
        result.append("\n");
        result.append("Cache Key: ");
        try {
            result.append(getCacheKey());
        } catch (Throwable t) {
            result.append(t.getMessage());
        }
        result.append("\n");
        result.append(("Parser execution time: " + this.getParserExecutionTime() + " ms\n"));
        return result.toString();
    } catch (Throwable t) {
        throw new FxSqlSearchException(LOG, t, "ex.sqlSearch.printDebugFailed");
    }
}

From source file:org.talend.components.salesforce.runtime.SalesforceWriter.java

private void handleReject(IndexedRecord input, Error[] resultErrors, String[] changedItemKeys, int batchIdx)
        throws IOException {
    String changedItemKey = null;
    if (batchIdx < changedItemKeys.length) {
        if (changedItemKeys[batchIdx] != null) {
            changedItemKey = changedItemKeys[batchIdx];
        } else {//  w w w.j  a v  a 2 s  .c  o m
            changedItemKey = String.valueOf(batchIdx + 1);
        }
    } else {
        changedItemKey = "Batch index out of bounds";
    }
    StringBuilder errors = SalesforceRuntime.addLog(resultErrors, changedItemKey, logWriter);
    if (exceptionForErrors) {
        if (errors.toString().length() > 0) {
            if (logWriter != null) {
                logWriter.close();
            }
            throw new IOException(errors.toString());
        }
    } else {
        rejectCount++;
        Schema outSchema = sprops.schemaReject.schema.getValue();
        if (outSchema == null || outSchema.getFields().size() == 0) {
            return;
        }
        if (input.getSchema().equals(outSchema)) {
            rejectedWrites.add(input);
        } else {
            IndexedRecord reject = new GenericData.Record(outSchema);
            for (Schema.Field outField : reject.getSchema().getFields()) {
                Object outValue = null;
                Schema.Field inField = input.getSchema().getField(outField.name());
                if (inField != null) {
                    outValue = input.get(inField.pos());
                } else if (resultErrors.length > 0) {
                    Error error = resultErrors[0];
                    if (TSalesforceOutputProperties.FIELD_ERROR_CODE.equals(outField.name())) {
                        outValue = error.getStatusCode() != null ? error.getStatusCode().toString() : null;
                    } else if (TSalesforceOutputProperties.FIELD_ERROR_FIELDS.equals(outField.name())) {
                        StringBuffer fields = new StringBuffer();
                        for (String field : error.getFields()) {
                            fields.append(field);
                            fields.append(",");
                        }
                        if (fields.length() > 0) {
                            fields.deleteCharAt(fields.length() - 1);
                        }
                        outValue = fields.toString();
                    } else if (TSalesforceOutputProperties.FIELD_ERROR_MESSAGE.equals(outField.name())) {
                        outValue = error.getMessage();
                    }
                }
                reject.put(outField.pos(), outValue);
            }
            rejectedWrites.add(reject);
        }
        Property<OutputAction> outputAction = sprops.outputAction;
        LOGGER.info(MESSAGES.getMessage("info.rejectedRecord",
                sprops.outputAction.getPossibleValuesDisplayName(outputAction.getValue()).toLowerCase(),
                dataCount));
    }
}

From source file:org.talend.components.salesforce.SalesforceRuntime.java

protected void populateResultMessage(Map<String, String> resultMessage, Error[] errors) {
    for (Error error : errors) {
        if (error.getStatusCode() != null) {
            resultMessage.put("StatusCode", error.getStatusCode().toString());
        }/*w ww  .  ja  v  a 2s .  c o  m*/
        if (error.getFields() != null) {
            StringBuffer fields = new StringBuffer();
            for (String field : error.getFields()) {
                fields.append(field);
                fields.append(",");
            }
            if (fields.length() > 0) {
                fields.deleteCharAt(fields.length() - 1);
            }
            resultMessage.put("Fields", fields.toString());
        }
        resultMessage.put("Message", error.getMessage());
    }
}

From source file:com.amazonaws.services.kinesis.aggregators.StreamAggregator.java

public void initialize(String shardId) throws Exception {
    // Set System properties to allow entity expansion of unlimited items in
    // response documents from AWS API
    ////from   ww w  .j a va2s.  com
    // see https://blogs.oracle.com/joew/entry/jdk_7u45_aws_issue_123 for
    // more information
    System.setProperty("entityExpansionLimit", "0");
    System.setProperty("jdk.xml.entityExpansionLimit", "0");

    this.shardId = shardId;

    // establish we are running on the lowest shard on the basis of hash
    // range
    AmazonKinesisClient kinesisClient = new AmazonKinesisClient(this.config.getKinesisCredentialsProvider());
    if (this.config.getRegionName() != null) {
        region = Region.getRegion(Regions.fromName(this.config.getRegionName()));
        kinesisClient.setRegion(region);
    }

    try {
        if (this.shardId
                .equals(StreamAggregatorUtils.getFirstShardName(kinesisClient, this.config.getStreamName()))) {
            this.isFirstShardWorker = true;
            logInfo("Aggregator taking Primary Thread Responsibility");
        }
    } catch (Exception e) {
        logWarn("Unable to establish if Worker Thread is Primary");
    }

    validateConfig();

    // set the default aggregator type
    if (this.aggregatorType == null) {
        this.aggregatorType = AggregatorType.COUNT;
    }

    if (this.dataExtractor == null)
        throw new InvalidConfigurationException(
                "Unable to create Aggregator Instance without a configured IDataStore");

    // set the aggregator type on the data extractor
    this.dataExtractor.setAggregatorType(this.aggregatorType);
    this.dataExtractor.validate();

    // create connections to dynamo and kinesis
    ClientConfiguration clientConfig = new ClientConfiguration().withSocketTimeout(60000);
    this.dynamoClient = new AmazonDynamoDBAsyncClient(this.config.getDynamoDBCredentialsProvider(),
            clientConfig);
    if (region != null)
        this.dynamoClient.setRegion(region);

    this.kinesisClient = new AmazonKinesisClient(this.config.getKinesisCredentialsProvider());
    if (region != null)
        this.kinesisClient.setRegion(region);

    inventory = new InventoryModel(this.dynamoClient);

    // get the latest sequence number checkpointed for this named aggregator
    // on this shard
    InventoryStatus lastUpdate = inventory.getLastUpdate(this.streamName, this.applicationName, this.namespace,
            this.shardId);
    if (lastUpdate != null && lastUpdate.getHighSeq() != null) {
        // set the current high sequence to the last high sequence
        this.highSeq = new BigInteger(lastUpdate.getHighSeq());
    }

    // log that we are now starting up
    inventory.update(this.streamName, this.applicationName, this.namespace, this.shardId, null, null,
            System.currentTimeMillis(), InventoryModel.STATE.STARTING);

    // set the table name we will use for aggregated values
    if (this.tableName == null) {
        this.tableName = StreamAggregatorUtils.getTableName(config.getApplicationName(), this.getNamespace());
    }

    if (this.environment != null && !this.environment.equals(""))
        this.tableName = String.format("%s.%s", this.environment, this.tableName);

    // resolve the basic data being aggregated
    String labelColumn = StreamAggregatorUtils.methodToColumn(dataExtractor.getAggregateLabelName());
    String dateColumn = dataExtractor.getDateValueName() == null ? DEFAULT_DATE_VALUE
            : dataExtractor.getDateValueName();

    // configure the default dynamo data store
    if (this.dataStore == null) {
        this.dataStore = new DynamoDataStore(this.dynamoClient, this.kinesisClient, this.aggregatorType,
                this.streamName, this.tableName, labelColumn, dateColumn).withStorageCapacity(this.readCapacity,
                        this.writeCapacity);
        this.dataStore.setRegion(region);
    }
    this.dataStore.initialise();

    // configure the cache so it can do its work
    cache = new AggregateCache(this.shardId).withCredentials(this.config.getKinesisCredentialsProvider())
            .withAggregateType(this.aggregatorType).withTableName(this.tableName).withLabelColumn(labelColumn)
            .withDateColumn(dateColumn).withDataStore(this.dataStore);

    // create a cloudwatch client for the cache to publish against if needed
    if (this.publishMetrics && this.metricsEmitter == null) {
        this.metricsEmitter = new CloudWatchMetricsEmitter(this.tableName,
                this.config.getCloudWatchCredentialsProvider());
    }

    if (this.metricsEmitter != null) {
        if (this.config.getRegionName() != null)
            this.metricsEmitter.setRegion(region);
    }
    // add the metrics publisher to the cache if we are bound to the lowest
    // shard
    if (this.metricsEmitter != null) {
        cache.withMetricsEmitter(this.metricsEmitter);
    }
    cache.initialise();

    // set the user agent
    StringBuilder userAgent = new StringBuilder(ClientConfiguration.DEFAULT_USER_AGENT);
    userAgent.append(" ");
    userAgent.append(this.AWSApplication);
    userAgent.append("/");
    userAgent.append(this.version);
    this.config.getKinesisClientConfiguration().setUserAgent(userAgent.toString());

    // log startup state
    StringBuffer sb = new StringBuffer();
    for (TimeHorizon t : timeHorizons) {
        sb.append(String.format("%s,", t.name()));
    }
    sb.deleteCharAt(sb.length() - 1);

    logInfo(String.format(
            "Amazon Kinesis Stream Aggregator Online\nStream: %s\nApplication: %s\nNamespace: %s\nWorker: %s\nGranularity: %s\nContent Extracted With: %s",
            streamName, applicationName, this.namespace, this.config.getWorkerIdentifier(), sb.toString(),
            dataExtractor.getClass().getName()));
    if (this.highSeq != null)
        logInfo(String.format("Processing Data from Seq: %s", this.highSeq));
    online = true;
}

From source file:org.jsweet.input.typescriptdef.ast.Scanner.java

protected String getCurrentDeclarationName() {
    StringBuffer sb = new StringBuffer();
    for (Visitable v : getStack()) {
        if (v instanceof Declaration) {
            sb.append(((Declaration) v).getName());
            sb.append('.');
        }//from   ww  w.  j  a  v a 2  s .co m
    }
    if (!getStack().isEmpty() && !(sb.length() == 0)) {
        sb.deleteCharAt(sb.length() - 1);
    }
    return sb.toString();
}

From source file:org.jvnet.hudson.plugins.thinbackup.backup.BackupSet.java

@Override
public String toString() {
    final StringBuffer strBuf = new StringBuffer();

    strBuf.append("[FULL backup: ");
    if (fullBackupName != null) {
        strBuf.append(fullBackupName);/*www .  j  a  v a  2 s .  c om*/
    } else {
        strBuf.append("NONE");
    }
    strBuf.append("; DIFF backups: ");
    boolean hasDiffs = false;
    if (diffBackupsNames != null) {
        for (final String diffBackup : diffBackupsNames) {
            strBuf.append(diffBackup);
            strBuf.append(",");
        }
        if (diffBackupsNames.size() > 0) {
            strBuf.deleteCharAt(strBuf.length() - 1);
            hasDiffs = true;
        }
    }
    if (!hasDiffs) {
        strBuf.append("NONE");
    }
    strBuf.append("]");

    return strBuf.toString();
}

From source file:com.fmguler.ven.QueryGenerator.java

/**
 * Generates update query for the specified object
 * @param object the object to generate update query for
 * @return the update SQL query/*from   w w w  .ja  v  a  2s .com*/
 */
public String generateUpdateQuery(Object object) throws VenException {
    BeanWrapper wr = new BeanWrapperImpl(object);
    String objectName = Convert.toSimpleName(object.getClass().getName());
    String tableName = Convert.toDB(objectName);
    PropertyDescriptor[] pdArr = wr.getPropertyDescriptors();

    StringBuffer query = new StringBuffer("update " + tableName + " set ");
    for (int i = 0; i < pdArr.length; i++) {
        Class fieldClass = pdArr[i].getPropertyType(); //field class
        String columnName = Convert.toDB(pdArr[i].getName()); //column name
        String fieldName = pdArr[i].getName(); //field name
        if (dbClasses.contains(fieldClass)) { //direct database field (Integer,String,Date, etc)
            query.append(columnName.equals("order") ? "\"order\"" : columnName).append("=:").append(fieldName);
            query.append(",");
        }
        if (fieldClass.getPackage() != null && domainPackages.contains(fieldClass.getPackage().getName())) { //object
            query.append(columnName).append("_id=:").append(fieldName).append(".id");
            query.append(",");
        }
    }
    query.deleteCharAt(query.length() - 1);
    query.append(" where id = :id ;");
    return query.toString();
}