List of usage examples for org.apache.commons.lang StringUtils upperCase
public static String upperCase(String str)
Converts a String to upper case as per String#toUpperCase() .
From source file:reconf.client.elements.FullPropertyElement.java
public static String from(String product, String component, String name) { return StringUtils.upperCase(StringUtils.defaultString(product)) + "/" + StringUtils.upperCase(StringUtils.defaultString(component)) + "/" + StringUtils.upperCase(StringUtils.defaultString(name)); }
From source file:reconf.client.setup.DatabaseManager.java
public String get(String fullProperty, Method method) { Connection conn = null;/* ww w. j a v a2 s .com*/ PreparedStatement stmt = null; ResultSet rs = null; try { conn = getConnection(); stmt = conn.prepareStatement("SELECT VALUE " + "FROM PUBLIC.CLS_METHOD_PROP_VALUE_V2 " + "WHERE FULL_PROP = ? " + "AND NAM_CLASS = ? " + "AND NAM_METHOD = ? "); stmt.setString(1, StringUtils.upperCase(fullProperty)); stmt.setString(2, method.getDeclaringClass().getName()); stmt.setString(3, method.getName()); rs = stmt.executeQuery(); if (rs.next()) { return rs.getClob(1) == null ? null : rs.getClob(1).getCharacterStream() == null ? null : IOUtils.toString(rs.getClob(1).getCharacterStream()); } return null; } catch (Exception e) { LoggerHolder.getLog().warn(msg.format("error.db", "get"), e); throw new RuntimeException(e); } finally { close(rs); close(stmt); close(conn); } }
From source file:reconf.client.setup.DatabaseManager.java
public boolean temporaryUpsert(String fullProperty, Method method, String value) { Connection conn = null;//from w w w . j a va 2s . c om PreparedStatement stmt = null; try { conn = getConnection(); if (needToInsert(fullProperty, method)) { stmt = conn.prepareStatement(TEMPORARY_INSERT); stmt.setString(1, method.getDeclaringClass().getName()); stmt.setString(2, method.getName()); stmt.setString(3, StringUtils.upperCase(fullProperty)); stmt.setString(4, value); stmt.setTimestamp(5, new Timestamp(System.currentTimeMillis())); } else { stmt = conn.prepareStatement(TEMPORARY_UPDATE); stmt.setString(1, value); stmt.setTimestamp(2, new Timestamp(System.currentTimeMillis())); stmt.setString(3, StringUtils.upperCase(fullProperty)); stmt.setString(4, method.getDeclaringClass().getName()); stmt.setString(5, method.getName()); stmt.setString(6, value); } boolean result = 0 != stmt.executeUpdate(); return result; } catch (Exception e) { LoggerHolder.getLog().warn(msg.format("error.db", ("temporaryUpsert")), e); throw new RuntimeException(e); } finally { close(stmt); close(conn); } }
From source file:reconf.client.setup.DatabaseManager.java
public boolean isNew(String fullProperty, Method method, String value) { Connection conn = null;//w w w . j av a2s.c o m PreparedStatement stmt = null; ResultSet rs = null; try { conn = getConnection(); if (needToInsert(fullProperty, method)) { return true; } stmt = conn.prepareStatement(CHECK_IS_NEW); stmt.setString(1, StringUtils.upperCase(fullProperty)); stmt.setString(2, method.getDeclaringClass().getName()); stmt.setString(3, method.getName()); stmt.setString(4, value); rs = stmt.executeQuery(); return rs.next(); } catch (Exception e) { LoggerHolder.getLog().warn(msg.format("error.db", ("isNew")), e); throw new RuntimeException(e); } finally { close(rs); close(stmt); close(conn); } }
From source file:reconf.client.setup.DatabaseManager.java
private boolean needToInsert(String fullProperty, Method method) { Connection conn = null;/* w w w . ja v a2 s. c om*/ PreparedStatement stmt = null; ResultSet rs = null; try { conn = getConnection(); stmt = conn.prepareStatement("SELECT 1 " + "FROM PUBLIC.CLS_METHOD_PROP_VALUE_V2 " + "WHERE FULL_PROP = ? " + "AND NAM_CLASS = ? " + "AND NAM_METHOD = ? "); stmt.setString(1, StringUtils.upperCase(fullProperty)); stmt.setString(2, method.getDeclaringClass().getName()); stmt.setString(3, method.getName()); rs = stmt.executeQuery(); return rs.next() ? false : true; } catch (Exception e) { LoggerHolder.getLog().warn(msg.format("error.db", "needToInsert"), e); throw new RuntimeException(e); } finally { close(rs); close(stmt); close(conn); } }
From source file:reconf.client.setup.DatabaseManager.java
private Collection<String> toUpper(Collection<String> arg) { Set<String> result = new LinkedHashSet<String>(); for (String str : arg) { result.add("'" + StringUtils.upperCase(str) + "'"); }/*from w w w.j a v a 2 s. c om*/ return result; }
From source file:solidbase.core.plugins.DumpJSON.java
public boolean execute(CommandProcessor processor, Command command, boolean skip) throws SQLException { if (!triggerPattern.matcher(command.getCommand()).matches()) return false; if (command.isTransient()) { /* DUMP JSON DATE_CREATED ON | OFF */ SQLTokenizer tokenizer = new SQLTokenizer( SourceReaders.forString(command.getCommand(), command.getLocation())); // TODO Maybe DUMP JSON CONFIG or DUMP JSON SET // TODO What about other configuration settings? tokenizer.get("DUMP"); tokenizer.get("JSON"); tokenizer.get("DATE_CREATED"); // FIXME This should be CREATED_DATE Token t = tokenizer.get("ON", "OFF"); tokenizer.get((String) null); // TODO I think we should have a scope that is restricted to the current file and a scope that gets inherited when running or including another file. AbstractScope scope = processor.getContext().getScope(); scope.set("solidbase.dump_json.dateCreated", t.eq("ON")); // TODO Make this a constant return true; }/*from www . j a v a2 s. co m*/ if (skip) return true; Parsed parsed = parse(command); AbstractScope scope = processor.getContext().getScope(); Object object = scope.get("solidbase.dump_json.dateCreated"); boolean dateCreated = object == null || object instanceof Boolean && (Boolean) object; Resource jsvResource = new FileResource(new File(parsed.fileName)); // Relative to current folder try { OutputStream out = jsvResource.getOutputStream(); if (parsed.gzip) out = new BufferedOutputStream(new GZIPOutputStream(out, 65536), 65536); // TODO Ctrl-C, close the outputstream? JSONWriter jsonWriter = new JSONWriter(out); try { Statement statement = processor.createStatement(); try { ResultSet result = statement.executeQuery(parsed.query); ResultSetMetaData metaData = result.getMetaData(); // Define locals int columns = metaData.getColumnCount(); int[] types = new int[columns]; String[] names = new String[columns]; boolean[] ignore = new boolean[columns]; FileSpec[] fileSpecs = new FileSpec[columns]; String schemaNames[] = new String[columns]; String tableNames[] = new String[columns]; // Analyze metadata for (int i = 0; i < columns; i++) { int col = i + 1; String name = metaData.getColumnName(col).toUpperCase(); types[i] = metaData.getColumnType(col); if (types[i] == Types.DATE && parsed.dateAsTimestamp) types[i] = Types.TIMESTAMP; names[i] = name; if (parsed.columns != null) { ColumnSpec columnSpec = parsed.columns.get(name); if (columnSpec != null) if (columnSpec.skip) ignore[i] = true; else fileSpecs[i] = columnSpec.toFile; } if (parsed.coalesce != null && parsed.coalesce.notFirst(name)) ignore[i] = true; // TODO STRUCT serialize // TODO This must be optional and not the default else if (types[i] == 2002 || JDBCSupport.toTypeName(types[i]) == null) ignore[i] = true; tableNames[i] = StringUtils .upperCase(StringUtils.defaultIfEmpty(metaData.getTableName(col), null)); schemaNames[i] = StringUtils .upperCase(StringUtils.defaultIfEmpty(metaData.getSchemaName(col), null)); } if (parsed.coalesce != null) parsed.coalesce.bind(names); // Write header JSONObject properties = new JSONObject(); properties.set("version", "1.0"); properties.set("format", "record-stream"); properties.set("description", "SolidBase JSON Data Dump File"); properties.set("createdBy", new JSONObject("product", "SolidBase", "version", "2.0.0")); if (dateCreated) { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); properties.set("createdDate", format.format(new Date())); } if (parsed.binaryFileName != null) { // TODO FIXME Should be wrapped in a SourceException: solidbase.solidstack.io.FatalURISyntaxException: java.net.URISyntaxException: Illegal character in path at index 1: &{folder}/JIADHOCCH Resource binResource = Resources.getResource(parsed.binaryFileName); Resource resource = Resources.getResource(parsed.fileName); properties.set("binaryFile", binResource.getPathFrom(resource).toString()); } JSONArray fields = new JSONArray(); properties.set("fields", fields); for (int i = 0; i < columns; i++) if (!ignore[i]) { JSONObject field = new JSONObject(); field.set("schemaName", schemaNames[i]); field.set("tableName", tableNames[i]); field.set("name", names[i]); field.set("type", JDBCSupport.toTypeName(types[i])); // TODO Better error message when type is not recognized, for example Oracle's 2007 for a user type FileSpec spec = fileSpecs[i]; if (spec != null && !spec.generator.isDynamic()) { Resource fileResource = new FileResource(spec.generator.fileName); field.set("file", fileResource.getPathFrom(jsvResource).toString()); } fields.add(field); } FileSpec binaryFile = parsed.binaryFileName != null ? new FileSpec(true, parsed.binaryFileName, 0) : null; jsonWriter.writeFormatted(properties, 120); jsonWriter.getWriter().write('\n'); Counter counter = null; if (parsed.logRecords > 0) counter = new FixedCounter(parsed.logRecords); else if (parsed.logSeconds > 0) counter = new TimedCounter(parsed.logSeconds); try { while (result.next()) { Object[] values = new Object[columns]; for (int i = 0; i < values.length; i++) values[i] = JDBCSupport.getValue(result, types, i); if (parsed.coalesce != null) parsed.coalesce.coalesce(values); JSONArray array = new JSONArray(); for (int i = 0; i < columns; i++) if (!ignore[i]) { Object value = values[i]; if (value == null) { array.add(null); continue; } // TODO 2 columns can't be written to the same dynamic filename FileSpec spec = fileSpecs[i]; if (spec != null) // The column is redirected to its own file { String relFileName = null; int startIndex; if (spec.binary) { if (spec.generator.isDynamic()) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.out = fileResource.getOutputStream(); spec.index = 0; relFileName = fileResource.getPathFrom(jsvResource).toString(); } else if (spec.out == null) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.out = fileResource.getOutputStream(); } if (value instanceof Blob) { InputStream in = ((Blob) value).getBinaryStream(); startIndex = spec.index; byte[] buf = new byte[4096]; for (int read = in.read(buf); read >= 0; read = in.read(buf)) { spec.out.write(buf, 0, read); spec.index += read; } in.close(); } else if (value instanceof byte[]) { startIndex = spec.index; spec.out.write((byte[]) value); spec.index += ((byte[]) value).length; } else throw new SourceException(names[i] + " (" + value.getClass().getName() + ") is not a binary column. Only binary columns like BLOB, RAW, BINARY VARYING can be written to a binary file", command.getLocation()); if (spec.generator.isDynamic()) { spec.out.close(); JSONObject ref = new JSONObject(); ref.set("file", relFileName); ref.set("size", spec.index - startIndex); array.add(ref); } else { JSONObject ref = new JSONObject(); ref.set("index", startIndex); ref.set("length", spec.index - startIndex); array.add(ref); } } else { if (spec.generator.isDynamic()) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.writer = new DeferringWriter(spec.threshold, fileResource, jsonWriter.getEncoding()); spec.index = 0; relFileName = fileResource.getPathFrom(jsvResource).toString(); } else if (spec.writer == null) { String fileName = spec.generator.generateFileName(result); Resource fileResource = new FileResource(fileName); spec.writer = new OutputStreamWriter(fileResource.getOutputStream(), jsonWriter.getEncoding()); } if (value instanceof Blob || value instanceof byte[]) throw new SourceException(names[i] + " is a binary column. Binary columns like BLOB, RAW, BINARY VARYING cannot be written to a text file", command.getLocation()); if (value instanceof Clob) { Reader in = ((Clob) value).getCharacterStream(); startIndex = spec.index; char[] buf = new char[4096]; for (int read = in.read(buf); read >= 0; read = in.read(buf)) { spec.writer.write(buf, 0, read); spec.index += read; } in.close(); } else { String val = value.toString(); startIndex = spec.index; spec.writer.write(val); spec.index += val.length(); } if (spec.generator.isDynamic()) { DeferringWriter writer = (DeferringWriter) spec.writer; if (writer.isBuffered()) array.add(writer.clearBuffer()); else { JSONObject ref = new JSONObject(); ref.set("file", relFileName); ref.set("size", spec.index - startIndex); array.add(ref); } writer.close(); } else { JSONObject ref = new JSONObject(); ref.set("index", startIndex); ref.set("length", spec.index - startIndex); array.add(ref); } } } else if (value instanceof Clob) array.add(((Clob) value).getCharacterStream()); else if (binaryFile != null && (value instanceof Blob || value instanceof byte[])) { if (binaryFile.out == null) { String fileName = binaryFile.generator.generateFileName(null); Resource fileResource = new FileResource(fileName); binaryFile.out = fileResource.getOutputStream(); if (parsed.binaryGzip) binaryFile.out = new BufferedOutputStream( new GZIPOutputStream(binaryFile.out, 65536), 65536); // TODO Ctrl-C, close the outputstream? } int startIndex = binaryFile.index; if (value instanceof Blob) { InputStream in = ((Blob) value).getBinaryStream(); byte[] buf = new byte[4096]; for (int read = in.read(buf); read >= 0; read = in.read(buf)) { binaryFile.out.write(buf, 0, read); binaryFile.index += read; } in.close(); } else { binaryFile.out.write((byte[]) value); binaryFile.index += ((byte[]) value).length; } JSONObject ref = new JSONObject(); ref.set("index", startIndex); ref.set("length", binaryFile.index - startIndex); array.add(ref); } else array.add(value); } for (ListIterator<Object> i = array.iterator(); i.hasNext();) { Object value = i.next(); if (value instanceof java.sql.Date || value instanceof java.sql.Time || value instanceof java.sql.Timestamp || value instanceof java.sql.RowId) i.set(value.toString()); } jsonWriter.write(array); jsonWriter.getWriter().write('\n'); if (counter != null && counter.next()) processor.getProgressListener() .println("Exported " + counter.total() + " records."); } if (counter != null && counter.needFinal()) processor.getProgressListener().println("Exported " + counter.total() + " records."); } finally { // Close files that have been left open for (FileSpec fileSpec : fileSpecs) if (fileSpec != null) { if (fileSpec.out != null) fileSpec.out.close(); if (fileSpec.writer != null) fileSpec.writer.close(); } if (binaryFile != null && binaryFile.out != null) binaryFile.out.close(); } } finally { processor.closeStatement(statement, true); } } finally { jsonWriter.close(); } } catch (IOException e) { throw new SystemException(e); } return true; }
From source file:stormy.pythian.model.instance.FeatureType.java
public static FeatureType from(String name) { return FeatureType.valueOf(StringUtils.upperCase(name)); }
From source file:su.opencode.shuffler.RenamingVisitor.java
protected String constantName(List<String> name) { StringBuilder sb = new StringBuilder(); for (String part : name) { if (StringUtils.isBlank(part)) continue; sb.append("_").append(StringUtils.upperCase(part)); }/*from w w w . jav a 2 s.co m*/ sb.deleteCharAt(0); return sb.toString(); }
From source file:tk.mybatis.springboot.controller.CommonController.java
private String generateServiceByTbName(String tbname) { String[] partNames = tbname.split("_"); String returnServiceName = ""; if (partNames != null && partNames.length > 1) { for (int i = 1; i < partNames.length; i++) { returnServiceName += StringUtils.upperCase(partNames[i].substring(0, 1)) .concat(partNames[i].substring(1)); }/*from ww w. j av a 2 s . co m*/ return partNames[0] + returnServiceName; } else if (partNames != null && partNames.length == 1) { return partNames[0]; } return null; }