List of usage examples for java.lang StringBuffer setLength
@Override public synchronized void setLength(int newLength)
From source file:org.apache.hadoop.mapred.TestFixedLengthInputFormat.java
private ArrayList<String> createFile(Path targetFile, CompressionCodec codec, int recordLen, int numRecords) throws IOException { ArrayList<String> recordList = new ArrayList<String>(numRecords); OutputStream ostream = localFs.create(targetFile); if (codec != null) { ostream = codec.createOutputStream(ostream); }/*from w ww . ja va2 s.c om*/ Writer writer = new OutputStreamWriter(ostream); try { StringBuffer sb = new StringBuffer(); for (int i = 0; i < numRecords; i++) { for (int j = 0; j < recordLen; j++) { sb.append(chars[charRand.nextInt(chars.length)]); } String recordData = sb.toString(); recordList.add(recordData); writer.write(recordData); sb.setLength(0); } } finally { writer.close(); } return recordList; }
From source file:ome.services.blitz.repo.CheckedPath.java
/** * Returns a new {@link CheckedPath} that has the given path appended * to the end of this instances path. A check is made that the name does * not contain "/" (i.e. subpaths) nor that it is ".." or ".". * {@link CheckedPath}s generated with this method always return a * <code>null</code> hash.//w w w . j a v a2 s . c o m * * @param name * @return */ public CheckedPath child(String name) throws ValidationException { if (name == null || "".equals(name)) { throw new ValidationException(null, null, "null or empty name"); } else if (SPECIAL_DIRS.contains(name)) { final StringBuffer message = new StringBuffer(); message.append("Only proper child name is allowed, not "); for (final String dir : SPECIAL_DIRS) { message.append('\''); message.append(dir); message.append('\''); message.append(", "); } message.setLength(message.length() - 2); // remove trailing ", " message.append('.'); throw new ValidationException(null, null, message.toString()); } else if (name.indexOf(FsFile.separatorChar) >= 0) { throw new ValidationException(null, null, "No subpaths allowed. Path contains '" + FsFile.separatorChar + "'"); } final FsFile fullChild = FsFile.concatenate(this.fsFile, new FsFile(name)); return new CheckedPath(new File(original, name), fullChild); }
From source file:org.apache.ddlutils.TestBase.java
/** * Compresses the whitespaces in the given string to a single space. Also * recognizes special delimiter chars and removes whitespaces before them. * //from w w w.ja va2 s . com * @param original The original string * @return The resulting string */ private String compressWhitespaces(String original) { StringBuffer result = new StringBuffer(); char oldChar = ' '; char curChar; for (int idx = 0; idx < original.length(); idx++) { curChar = original.charAt(idx); if (Character.isWhitespace(curChar)) { if (oldChar != ' ') { oldChar = ' '; result.append(oldChar); } } else { if ((curChar == ',') || (curChar == ';') || (curChar == '(') || (curChar == ')')) { if ((oldChar == ' ') && (result.length() > 0)) { // we're removing whitespaces before commas/semicolons result.setLength(result.length() - 1); } } if ((oldChar == ',') || (oldChar == ';')) { // we're adding a space after commas/semicolons if necessary result.append(' '); } result.append(curChar); oldChar = curChar; } } return result.toString(); }
From source file:org.kxml.parser.XmlParser.java
ParseEvent parseCData() throws IOException { final StringBuffer buf = readTo('[', new StringBuffer()); if (!buf.toString().equals("CDATA")) { throw new DefaultParserException("Invalid CDATA start!", null); }//from ww w. j a v a2 s .c om buf.setLength(0); readChar(); // skip '[' int c0 = readChar(); int c1 = readChar(); while (true) { int c2 = readChar(); if (c2 == -1) { throw new DefaultParserException(UNEXPECTED_EOF, null); } if (c0 == ']' && c1 == ']' && c2 == '>') { break; } buf.append((char) c0); c0 = c1; c1 = c2; } return new ParseEvent(Xml.TEXT, buf.toString()); }
From source file:org.apache.hadoop.hive.ql.dataToDB.BaseDBExternalDataLoad.java
private void insertSingleFile(Path file_path, Statement stat, String delimiter) throws HiveException { try {//from w w w . j a v a 2s . co m FSDataInputStream fdis = fs.open(file_path); Object reader; if (newinsert) { reader = new MyLineReader(fdis); } else { reader = new BufferedReader(new InputStreamReader(fdis)); } String line = ""; String deli = delimiter; if (deli == null || deli.isEmpty()) { deli = new String(new char[] { '\01' }); } List<FieldSchema> fss = this.cols; int recordcnt = 1; String basesql = "insert into " + config.getDbTable() + " values "; int insertsize = HiveConf.getIntVar(config.getConf(), HiveConf.ConfVars.HIVEBIROWNUMPERINSERT); if (insertsize <= 0 || insertsize >= 100000) { insertsize = 10000; } ArrayList<String> valuelist = new ArrayList<String>(insertsize); StringBuffer lineBuffer = new StringBuffer(); while (readLine(reader, lineBuffer)) { line = lineBuffer.toString(); lineBuffer.setLength(0); ArrayList<Integer> arrays = getIndexes(line, deli); String values = "("; int m = arrays.get(0); int n = arrays.get(1); int count = 0; for (int j = 1; j < arrays.size(); j++) { String c = ""; n = arrays.get(j); if (n == (m + 1)) { c = toSQLInsertStr(fss.get(count).getType(), ""); } else { c = toSQLInsertStr(fss.get(count).getType(), line.substring(m + 1, n)); } m = n; if (count == 0) values += c; else values += "," + c; count++; } values += ")"; valuelist.add(values); if (recordcnt % insertsize == 0) { insertValues(stat, basesql, valuelist); valuelist.clear(); } if (recordcnt % 10000 == 0 && SessionState.get() != null) SessionState.get().ssLog("Load reocord to postgre:" + recordcnt); recordcnt++; } if (!valuelist.isEmpty()) { insertValues(stat, basesql, valuelist); valuelist.clear(); } closestream(reader); } catch (IOException e) { LOG.debug(e.getMessage()); throw new HiveException(e.getMessage()); } catch (SQLException e) { LOG.debug(e.getMessage()); throw new HiveException(e.getMessage()); } }
From source file:org.apache.hadoop.hive.ql.dataToDB.BaseDBExternalDataLoad.java
private void insertSingleFile(Path file_path, Statement stat) throws HiveException { try {//from ww w. j a v a 2s .com FSDataInputStream fdis = fs.open(file_path); Object reader; if (newinsert) { reader = new MyLineReader(fdis); } else { reader = new BufferedReader(new InputStreamReader(fdis)); } String line = ""; String deli = config.getTable().getSerdeParam(Constants.FIELD_DELIM); if (deli == null || deli.isEmpty()) { deli = new String(new char[] { '\01' }); } List<FieldSchema> fss = config.getTable().getCols(); int recordcnt = 1; String basesql = "insert into " + config.getDbTable() + " values "; int insertsize = HiveConf.getIntVar(config.getConf(), HiveConf.ConfVars.HIVEBIROWNUMPERINSERT); if (insertsize <= 0 || insertsize >= 100000) { insertsize = 10000; } ArrayList<String> valuelist = new ArrayList<String>(insertsize); StringBuffer lineBuffer = new StringBuffer(); while (readLine(reader, lineBuffer)) { line = lineBuffer.toString(); lineBuffer.setLength(0); ArrayList<Integer> arrays = getIndexes(line, deli); String values = "("; int m = arrays.get(0); int n = arrays.get(1); int count = 0; for (int j = 1; j < arrays.size(); j++) { String c = ""; n = arrays.get(j); if (n == (m + 1)) { c = toSQLInsertStr(fss.get(count).getType(), ""); } else { c = toSQLInsertStr(fss.get(count).getType(), line.substring(m + 1, n)); } m = n; if (count == 0) values += c; else values += "," + c; count++; } values += ")"; valuelist.add(values); if (recordcnt % insertsize == 0) { insertValues(stat, basesql, valuelist); valuelist.clear(); } if (recordcnt % 10000 == 0 && SessionState.get() != null) SessionState.get().ssLog("Load reocord to postgre:" + recordcnt); recordcnt++; } if (!valuelist.isEmpty()) { insertValues(stat, basesql, valuelist); valuelist.clear(); } closestream(reader); } catch (IOException e) { LOG.debug(e.getMessage()); throw new HiveException(e.getMessage()); } catch (SQLException e) { LOG.debug(e.getMessage()); throw new HiveException(e.getMessage()); } }
From source file:eu.earthobservatory.org.StrabonEndpoint.client.SPARQLEndpoint.java
/** * Executes a SPARQL query on the Endpoint and get the results * in the format specified by stSPARQLQueryResultFormat, which is * an instance of class (or a subclass) {@link TupleQueryResultFormat}. * /*from ww w . j a va 2 s . c om*/ * @param sparqlQuery * @param format * @return * @throws IOException */ public EndpointResult query(String sparqlQuery, stSPARQLQueryResultFormat format) throws IOException { assert (format != null); // create a post method to execute HttpPost method = new HttpPost(getConnectionURL()); // set the query parameter List<NameValuePair> params = new ArrayList<NameValuePair>(); params.add(new BasicNameValuePair("query", sparqlQuery)); UrlEncodedFormEntity encodedEntity = new UrlEncodedFormEntity(params, Charset.defaultCharset()); method.setEntity(encodedEntity); // set the content type method.setHeader("Content-Type", "application/x-www-form-urlencoded"); // set the accept format method.addHeader("Accept", format.getDefaultMIMEType()); try { // response that will be filled next String responseBody = ""; // execute the method HttpResponse response = hc.execute(method); int statusCode = response.getStatusLine().getStatusCode(); // If the response does not enclose an entity, there is no need // to worry about connection release HttpEntity entity = response.getEntity(); if (entity != null) { InputStream instream = entity.getContent(); try { BufferedReader reader = new BufferedReader(new InputStreamReader(instream)); StringBuffer strBuf = new StringBuffer(); // do something useful with the response String nextLine; while ((nextLine = reader.readLine()) != null) { strBuf.append(nextLine + "\n"); } // remove last newline character if (strBuf.length() > 0) { strBuf.setLength(strBuf.length() - 1); } responseBody = strBuf.toString(); } catch (IOException ex) { // In case of an IOException the connection will be released // back to the connection manager automatically throw ex; } catch (RuntimeException ex) { // In case of an unexpected exception you may want to abort // the HTTP request in order to shut down the underlying // connection and release it back to the connection manager. method.abort(); throw ex; } finally { // Closing the input stream will trigger connection release instream.close(); } } return new EndpointResult(statusCode, response.getStatusLine().getReasonPhrase(), responseBody); } catch (IOException e) { throw e; } finally { // release the connection. method.releaseConnection(); } }
From source file:com.silverpeas.tags.navigation.MenuTag.java
/** * Construction des onglets dans le menu. * @param out//from w ww. j av a 2s . c om * @param rootTopic * @param level */ private void browse(JspWriter out, NodeDetail rootTopic, int level, boolean display) { try { List<NodeDetail> themes = getSubTopics(themetracker, rootTopic.getId()); Iterator<NodeDetail> iTheme = themes.iterator(); if (!themes.isEmpty()) { if (level == 1) { print(out, "<ul id='" + id + "'>", display); } else { print(out, "<ul id='" + buildId(PARENT_TOPIC_ID_PREFIX, rootTopic) + "'>", display); } } StringBuffer html = new StringBuffer(); while (iTheme.hasNext()) { NodeDetail theme = (NodeDetail) iTheme.next(); if (excludeTopicsNamed == null || theme.getName().equalsIgnoreCase(excludeTopicsNamed) == false) { html.setLength(0); html.append("<li id='"); html.append(buildId(TOPIC_ID_PREFIX, theme)); html.append("' class='"); html.append(getClassNameByLevel(theme)); html.append("'>"); html.append("<a href='"); html.append(generateFullSemanticPath(theme, getPrefixIdByLevel(theme))); html.append("' title='"); html.append(StringEscapeUtils.escapeHtml(theme.getDescription())); html.append("'><span>"); html.append(theme.getName()); html.append("</span></a>"); print(out, html.toString(), display); if ((maxDeepLevel != null && Integer.valueOf(maxDeepLevel) > level) || maxDeepLevel == null) { if (display) { browse(out, theme, level + 1, true); } else { boolean d = idTopicSubRoot.equalsIgnoreCase(String.valueOf(rootTopic.getId())) || idTopicSubRoot.equalsIgnoreCase(String.valueOf(theme.getId())); browse(out, theme, level + 1, d); } } print(out, "</li>", display); if (classNameSeparator != null && level == 1 && iTheme.hasNext()) print(out, "<li class='" + classNameSeparator + "'></li>", display); } } if (!themes.isEmpty()) { print(out, "</ul>", display); } } catch (Exception e) { e.printStackTrace(); } }
From source file:com.pureinfo.dolphin.query.QueryEntity.java
/** * @see java.lang.Object#toString()//from ww w . ja va2 s . c o m */ public String toString() { StringBuffer sbuff = new StringBuffer(); try { sbuff.append("alias=").append(m_sAlias); sbuff.append(", type=").append(m_metadata.getName()); sbuff.append(", join-type=").append(m_nJoinType); sbuff.append(", join-condition=").append(m_sJoinCondition); if (m_depends != null) { sbuff.append(", depends=").append(StringUtils.join(m_depends, ',')); } sbuff.append(", joined=").append(m_bJoined); return sbuff.toString(); } finally { sbuff.setLength(0); } }
From source file:org.eredlab.g4.ccl.net.tftp.TFTPRequestPacket.java
/*** * Creates a request packet of a given type based on a received * datagram. Assumes the datagram is at least length 4, else an * ArrayIndexOutOfBoundsException may be thrown. * <p>//from www . j av a 2s. c o m * @param type The type of the request (either TFTPPacket.READ_REQUEST or * TFTPPacket.WRITE_REQUEST). * @param datagram The datagram containing the received request. * @throws TFTPPacketException If the datagram isn't a valid TFTP * request packet of the appropriate type. ***/ TFTPRequestPacket(int type, DatagramPacket datagram) throws TFTPPacketException { super(type, datagram.getAddress(), datagram.getPort()); byte[] data; int index, length; String mode; StringBuffer buffer; data = datagram.getData(); if (getType() != data[1]) throw new TFTPPacketException("TFTP operator code does not match type."); buffer = new StringBuffer(); index = 2; length = datagram.getLength(); while (index < length && data[index] != 0) { buffer.append((char) data[index]); ++index; } _filename = buffer.toString(); if (index >= length) throw new TFTPPacketException("Bad filename and mode format."); buffer.setLength(0); ++index; // need to advance beyond the end of string marker while (index < length && data[index] != 0) { buffer.append((char) data[index]); ++index; } mode = buffer.toString().toLowerCase(); length = _modeStrings.length; for (index = 0; index < length; index++) { if (mode.equals(_modeStrings[index])) { _mode = index; break; } } if (index >= length) { throw new TFTPPacketException("Unrecognized TFTP transfer mode: " + mode); // May just want to default to binary mode instead of throwing // exception. //_mode = TFTP.OCTET_MODE; } }