List of usage examples for java.lang StringBuffer lastIndexOf
@Override public int lastIndexOf(String str)
From source file:org.deeplearning4j.hadoop.util.HdfsUtils.java
/** * Adapted from /*from w w w . j ava2s . c om*/ * http://terrier.org/docs/v3.5/javadoc/org/terrier/utility/io/HadoopUtility.html#saveClassPathToJob%28org.apache.hadoop.mapred.JobConf%29 * @param jobConf * @throws IOException */ public static List<Path> saveClassPathToJob(JobConf jobConf) throws Exception { String hdfs = getHost(jobConf); HdfsLock lock = new HdfsLock(hdfs); String hdfs2 = getHdfs(jobConf); if (jobConf.get(HDFS_HOST) != null) { if (lock.isLocked()) { List<Path> ret = lock.getPaths(); StringBuffer files = new StringBuffer(); StringBuffer classPath = new StringBuffer(); for (Path path : ret) { files.append(hdfs2 + path.toString()); files.append(","); classPath.append(hdfs2 + path.toString()); classPath.append(":"); jobConf.addResource(path.toUri().toURL()); } String classPathToSet = classPath.toString().substring(0, classPath.lastIndexOf(":")); String filesToSet = files.toString().substring(0, files.lastIndexOf(",")); log.info("Setting class path " + classPathToSet); log.info("Using files " + filesToSet); jobConf.set("mapred.cache.files", filesToSet); jobConf.set("mapred.job.classpath.files", classPathToSet); return ret; } } List<Path> paths = new ArrayList<Path>(); log.info("Copying classpath to job"); final String[] jars = findJarFiles(new String[] { System.getenv().get("CLASSPATH"), System.getProperty("java.class.path"), System.getProperty("surefire.test.class.path") }); final FileSystem defFS = FileSystem.get(jobConf); int numFilesWritten = 0; for (String jarFile : jars) { //class path issues if (jarFile.contains("hadoop-client")) { log.info("Skipping hadoop-client"); continue; } else if (jarFile.contains("mapreduce-run")) { log.info("Skipping map reduce run"); continue; } Path srcJarFilePath = new Path("file:///" + jarFile); String filename = srcJarFilePath.getName(); Path tmpJarFilePath = makeFile(jobConf, filename); log.info("Uploading " + jarFile + " to " + tmpJarFilePath.toString()); try { defFS.copyFromLocalFile(srcJarFilePath, tmpJarFilePath); jobConf.addResource(tmpJarFilePath); paths.add(tmpJarFilePath); numFilesWritten++; } catch (Exception e) { for (Path path : paths) { if (defFS.exists(path)) defFS.delete(path, true); } lock.close(); log.error(String.format("Exception writing to hdfs; rolling back %d jar files ", numFilesWritten), e); throw new IOException("Couldn't write jar file " + jarFile); } } try { lock.create(paths); } catch (KeeperException.SessionExpiredException e) { lock = new HdfsLock(hdfs); lock.create(paths); } lock.close(); //resolve any differences by removing clashing names in the files (archives are removed from files) Set<Path> remove = new HashSet<Path>(); for (Path path : paths) { boolean exists = false; try { exists = defFS.exists(path); } catch (IllegalArgumentException e) { exists = false; } if (!exists) remove.add(path); } paths.removeAll(remove); return paths; }
From source file:org.apache.hadoop.hbase.backup.master.FullTableBackupProcedure.java
/** * Get backup request meta data dir as string. * @param backupInfo backup context/*ww w.j a v a 2s . c om*/ * @return meta data dir */ private static String obtainBackupMetaDataStr(BackupInfo backupInfo) { StringBuffer sb = new StringBuffer(); sb.append("type=" + backupInfo.getType() + ",tablelist="); for (TableName table : backupInfo.getTables()) { sb.append(table + ";"); } if (sb.lastIndexOf(";") > 0) { sb.delete(sb.lastIndexOf(";"), sb.lastIndexOf(";") + 1); } sb.append(",targetRootDir=" + backupInfo.getTargetRootDir()); return sb.toString(); }
From source file:org.jboss.test.bpel.ws.consumption.partner.resource.ResourceDictionaryFactory.java
protected String getBaseName(String sourceLanguage) { StringBuffer baseName = new StringBuffer(getClass().getName()); baseName.setLength(baseName.lastIndexOf(".") + 1); baseName.append(sourceLanguage);/*from w ww. j a v a 2 s . c o m*/ return baseName.toString(); }
From source file:hydrograph.ui.expression.editor.evaluate.EvaluateExpression.java
private String getTargetException(String error) { String tagetException = "Target exception:"; StringBuffer buffer = new StringBuffer(error); if (buffer.lastIndexOf(tagetException) > -1) buffer.delete(0, buffer.lastIndexOf(tagetException)); return buffer.toString(); }
From source file:control.UploadFile.java
private File checkExist(String fileName) { fileName = "new" + fileName.substring(fileName.lastIndexOf('.')); File f = new File(saveFile + "/" + fileName); if (f.exists()) { StringBuffer sb = new StringBuffer(fileName); sb.insert(sb.lastIndexOf("."), "-" + new Date().getTime()); f = new File(saveFile + "/" + sb.toString()); }// ww w.ja v a 2s . com return f; }
From source file:web.UploadFile.java
private File checkExist(String filename) { File f = new File(filePath + "/" + filename); if (f.exists()) { StringBuffer sb = new StringBuffer(filename); sb.insert(sb.lastIndexOf("."), "-" + new Date().getTime()); f = new File(filePath + "/" + sb.toString()); }/*from w w w . j av a2 s .c o m*/ return f; }
From source file:hydrograph.ui.expression.editor.datastructure.MethodDetails.java
private void createPlaceHolderFromSource(IMethod iMethod, String className) throws JavaModelException { StringBuffer buffer = new StringBuffer(iMethod.getSource()); int indexOfPlaceHolder = buffer.lastIndexOf("@see"); if (indexOfPlaceHolder != -1 && iMethod.getParameterNames() != null && iMethod.getParameterNames().length > 0) { buffer = buffer.delete(0, indexOfPlaceHolder + 4); buffer = buffer.delete(buffer.indexOf("\n") + 1, buffer.capacity()); if (StringUtils.contains(buffer.toString(), className + Constants.DOT + iMethod.getElementName())) { placeHolder = StringUtils.trim(buffer.toString()); } else//from w ww . j ava 2 s .com placeHolder = createDefaultPlaceHolder(iMethod, className); } else { placeHolder = createDefaultPlaceHolder(iMethod, className); } }
From source file:org.exoplatform.wiki.rendering.internal.parser.confluence.ConfluenceLinkReferenceParser.java
private String divideAfterLast(StringBuffer buffer, char divider) { if (buffer.length() == 0) { return null; }// ww w. j a v a2 s.co m return divideAfter(buffer, buffer.lastIndexOf(Character.toString(divider))); }
From source file:com.ts.control.UploadFile.java
private File checkExist(String fileName) { File f = new File(saveFile + "/" + fileName); if (f.exists()) { StringBuffer sb = new StringBuffer(fileName); sb.insert(sb.lastIndexOf("."), "-" + new Date().getTime()); f = new File(saveFile + "/" + sb.toString()); }/* ww w .ja va 2s . c o m*/ return f; }
From source file:by.iharkaratkou.TestServlet.java
private File checkExist(String fileName) { File f = new File(saveFile + "/" + fileName); // File f = new File("d:\\eclipse_workspace\\upload\\LICENSE"); System.out.println("fileName: " + saveFile + "/" + fileName); System.out.println("f.exists(): " + f.exists()); if (f.exists()) { StringBuffer sb = new StringBuffer(fileName); System.out.println("sb: " + sb); sb.insert(sb.lastIndexOf("."), "-" + new Date().getTime()); f = new File(saveFile + "/" + sb.toString()); System.out.println("sb.toString(): " + sb.toString()); }/*from w w w. java2s . co m*/ return f; }