List of usage examples for org.apache.commons.lang StringUtils capitalize
public static String capitalize(String str)
Capitalizes a String changing the first letter to title case as per Character#toTitleCase(char) .
From source file:org.apache.cayenne.dbsync.merge.builders.DefaultBuilder.java
public String getRandomJavaName() { int count = dataFactory.getNumberBetween(1, 5); StringBuilder res = new StringBuilder(); for (int i = 0; i < count; i++) { res.append(StringUtils.capitalize(dataFactory.getRandomWord())); }/*from w w w.java2s. co m*/ return StringUtils.uncapitalize(res.toString()); }
From source file:org.apache.cayenne.dbsync.merge.builders.ObjEntityBuilder.java
@Override public ObjEntity build() { if (obj.getName() == null) { obj.setName(StringUtils.capitalize(getRandomJavaName())); } return obj; }
From source file:org.apache.cayenne.migration.MigrationGenerator.java
private String className(DataMap map) { return StringUtils.capitalize(map.getName()) + "0"; }
From source file:org.apache.cayenne.migration.MigrationGenerator.java
protected void createTable(DbEntity entity) { String tableName = tableName(entity); buffer.append("\t\tMigrationTableNew " + tableName + " = db.createTable(\"" + fullyQualifiedTableName(entity) + "\");\n"); for (DbAttribute attribute : entity.getAttributes()) { String type = nameForJdbcType(attribute.getType()); type = StringUtils.capitalize(type); buffer.append("\t\t"); if (type == null) { // fixed point if (attribute.getScale() >= 0) { buffer.append(String.format("%s.addColumn(\"%s\", %d, %d, %d", tableName, attribute.getName(), attribute.getType(), attribute.getMaxLength(), attribute.getScale())); // character } else if (attribute.getMaxLength() >= 0) { buffer.append(String.format("%s.addColumn(\"%s\", %d, %d", tableName, attribute.getName(), attribute.getType(), attribute.getMaxLength())); // other } else { buffer.append(String.format("%s.addColumn(\"%s\", %d", tableName, attribute.getName(), attribute.getType())); }/*from w ww . j a va2s.c om*/ } else { // fixed point if (isFixedPoint(attribute.getType()) && attribute.getScale() >= 0) { buffer.append(String.format("%s.add%sColumn(\"%s\", %d, %d", tableName, type, attribute.getName(), attribute.getMaxLength(), attribute.getScale())); // character } else if (hasLength(attribute.getType()) && attribute.getMaxLength() >= 0) { buffer.append(String.format("%s.add%sColumn(\"%s\", %d", tableName, type, attribute.getName(), attribute.getMaxLength())); // other } else { buffer.append(String.format("%s.add%sColumn(\"%s\"", tableName, type, attribute.getName())); } } if (attribute.isMandatory()) { buffer.append(", MANDATORY, null"); } buffer.append(");\n"); } for (DbAttribute attribute : entity.getPrimaryKeys()) { buffer.append(String.format("\t\t%s.addPrimaryKey(\"%s\");\n", tableName, attribute.getName())); } buffer.append("\n"); }
From source file:org.apache.cayenne.migration.Migrator.java
Migration createMigrationClassForVersion(DataMap map, int version) { String className = migrationsPackage + "." + StringUtils.capitalize(map.getName()) + version; Class<?> clazz;// ww w . java2 s .co m try { clazz = Class.forName(className); Migration instance = (Migration) clazz.getConstructor(DataNode.class).newInstance(node); return instance; } catch (Exception e) { //log.debug("Migration class not found: " + className + "; stopping at version " + (version-1) + "."); return null; } }
From source file:org.apache.cloudstack.spring.lifecycle.registry.ExtensionRegistry.java
@PostConstruct public void init() { if (name == null) { for (String part : beanName.replaceAll("([A-Z])", " $1").split("\\s+")) { part = StringUtils.capitalize(part.toLowerCase()); name = name == null ? part : name + " " + part; }//from w ww .ja v a 2 s . c o m } if (preRegistered != null) { for (Object o : preRegistered) { register(o); } } }
From source file:org.apache.hadoop.hbase.metrics.impl.HBaseMetrics2HadoopMetricsAdapter.java
/** * Iterates over the MetricRegistry and adds them to the {@code builder}. * * @param builder A record builder/* w w w. j a va2s .co m*/ */ public void snapshotAllMetrics(MetricRegistry metricRegistry, MetricsRecordBuilder builder) { Map<String, Metric> metrics = metricRegistry.getMetrics(); for (Map.Entry<String, Metric> e : metrics.entrySet()) { // Always capitalize the name String name = StringUtils.capitalize(e.getKey()); Metric metric = e.getValue(); if (metric instanceof Gauge) { addGauge(name, (Gauge<?>) metric, builder); } else if (metric instanceof Counter) { addCounter(name, (Counter) metric, builder); } else if (metric instanceof Histogram) { addHistogram(name, (Histogram) metric, builder); } else if (metric instanceof Meter) { addMeter(name, (Meter) metric, builder); } else if (metric instanceof Timer) { addTimer(name, (Timer) metric, builder); } else { LOG.info("Ignoring unknown Metric class " + metric.getClass().getName()); } } }
From source file:org.apache.hadoop.mapreduce.v2.app.webapp.AppController.java
/** * Render the /tasks page/*from ww w. jav a2s. com*/ */ public void tasks() { try { requireJob(); } catch (Exception e) { renderText(e.getMessage()); return; } if (app.getJob() != null) { try { String tt = $(TASK_TYPE); tt = tt.isEmpty() ? "All" : StringUtils.capitalize( org.apache.hadoop.util.StringUtils.toLowerCase(MRApps.taskType(tt).toString())); setTitle(join(tt, " Tasks for ", $(JOB_ID))); } catch (Exception e) { LOG.error("Failed to render tasks page with task type : " + $(TASK_TYPE) + " for job id : " + $(JOB_ID), e); badRequest(e.getMessage()); } } render(tasksPage()); }
From source file:org.apache.hadoop.metrics2.lib.MethodMetric.java
static String nameFrom(Method method) { String methodName = method.getName(); if (methodName.startsWith("get")) { return StringUtils.capitalize(methodName.substring(3)); }//from w w w . ja va 2s . c o m return StringUtils.capitalize(methodName); }
From source file:org.apache.hadoop.metrics2.lib.MetricMutableQuantiles.java
/** * Instantiates a new {@link MetricMutableQuantiles} for a metric that rolls itself over on the * specified time interval.//from w w w . j av a 2 s. c om * * @param name of the metric * @param description long-form textual description of the metric * @param sampleName type of items in the stream (e.g., "Ops") * @param valueName type of the values * @param interval rollover interval (in seconds) of the estimator */ public MetricMutableQuantiles(String name, String description, String sampleName, String valueName, int interval) { String ucName = StringUtils.capitalize(name); String usName = StringUtils.capitalize(sampleName); String uvName = StringUtils.capitalize(valueName); String desc = StringUtils.uncapitalize(description); String lsName = StringUtils.uncapitalize(sampleName); String lvName = StringUtils.uncapitalize(valueName); numInfo = info(ucName + "Num" + usName, String.format("Number of %s for %s with %ds interval", lsName, desc, interval)); // Construct the MetricsInfos for the quantiles, converting to percentiles quantileInfos = new MetricsInfo[quantiles.length]; String nameTemplate = "%s%dthPercentile%dsInterval%s"; String descTemplate = "%d percentile %s with %d second interval for %s"; for (int i = 0; i < quantiles.length; i++) { int percentile = (int) (100 * quantiles[i].quantile); quantileInfos[i] = info(String.format(nameTemplate, ucName, percentile, interval, uvName), String.format(descTemplate, percentile, lvName, interval, desc)); } estimator = new MetricSampleQuantiles(quantiles); executor = new MetricsExecutorImpl(); this.interval = interval; executor.getExecutor().scheduleAtFixedRate(new RolloverSample(this), interval, interval, TimeUnit.SECONDS); }