List of usage examples for org.apache.commons.lang StringUtils join
public static String join(Collection<?> collection, String separator)
Joins the elements of the provided Collection
into a single String containing the provided elements.
From source file:com.spotify.hdfs2cass.CassandraPartitionerTest.java
@Test public void testGetPartition() throws Exception { final int maxNodes = 5; final List<String> tokenRanges = new ArrayList<String>(); BigInteger start = BigInteger.ZERO; BigInteger step = RandomPartitioner.MAXIMUM.divide(BigInteger.valueOf(maxNodes)); for (int i = 0; i < maxNodes - 1; i++) { BigInteger end = start.add(step); tokenRanges.add(String.format("%d:%d", start, end)); start = end.add(BigInteger.ONE); }//from w w w.j a v a 2 s . c o m tokenRanges.add(String.format("%d:0", start)); final JobConf conf = new JobConf(); conf.set(ClusterInfo.SPOTIFY_CASSANDRA_TOKENS_PARAM, StringUtils.join(tokenRanges, ",")); conf.set(ClusterInfo.SPOTIFY_CASSANDRA_PARTITIONER_PARAM, "org.apache.cassandra.dht.RandomPartitioner"); CassandraPartitioner instance = new CassandraPartitioner(); instance.configure(conf); Text key = new Text("foobar"); assertEquals(2, instance.getPartition(key, null, 5)); key = new Text("someotherkey"); assertEquals(1, instance.getPartition(key, null, 5)); key = new Text("1ce5cf4b861941f4aa799ae39ac9daa4"); assertEquals(4, instance.getPartition(key, null, 5)); }
From source file:com.delphix.session.module.rmi.impl.RmiMethodOrdering.java
public RmiMethodOrdering(Class<?> clazz) { Map<String, Method> methodMap = Maps.newHashMap(); for (Method m : clazz.getMethods()) { List<String> paramNames = Lists.newArrayList(); for (Class<?> paramType : m.getParameterTypes()) { paramNames.add(paramType.getCanonicalName()); }// w ww . j av a 2 s . co m String str = String.format("%s(%s)", m.getName(), StringUtils.join(paramNames, ", ")); methodMap.put(str, m); } List<String> sortedNames = new ArrayList<String>(methodMap.keySet()); Collections.sort(sortedNames); for (String name : sortedNames) { Method m = methodMap.get(name); placement.put(m, methods.size()); methods.add(m); } }
From source file:mitm.common.util.RegExprUtilsTest.java
@Test public void testSplitRegExp() { assertEquals("test,1234", StringUtils.join(RegExprUtils.splitRegExp("/test/ 1234"), ",")); assertEquals(" test , 1234 ", StringUtils.join(RegExprUtils.splitRegExp(" / test / 1234 "), ",")); assertEquals(",", StringUtils.join(RegExprUtils.splitRegExp("//"), ",")); assertEquals("test,", StringUtils.join(RegExprUtils.splitRegExp("/test/"), ",")); assertEquals("test,/1234/", StringUtils.join(RegExprUtils.splitRegExp("/test/ /1234/"), ",")); }
From source file:com.github.hexocraft.random.items.command.RiCommandDelete.java
public RiCommandDelete(RandomItemsPlugin plugin) { super("delete", plugin); this.setAliases(Lists.newArrayList("d")); this.setDescription(StringUtils.join(plugin.messages.cDelete, "\n")); this.setPermission(Permissions.CREATE.toString()); this.addArgument(new CommandArgument<String>("name", ArgTypeRandomPool.get(), true, true, plugin.messages.cDeleteArgName)); }
From source file:com.healthcit.cacure.test.AbstractIntegrationTestCase.java
protected boolean existsInDb(final String tableName, final Long... id) { return 0 < simpleJdbcTemplate.queryForInt( "select count(*) from " + tableName + " where id in (" + StringUtils.join(id, ",") + ")"); }
From source file:com.netflix.paas.cassandra.keys.KeyspaceKey.java
public KeyspaceKey(ClusterKey clusterKey, String keyspaceName) { this.clusterKey = clusterKey; this.keyspaceName = keyspaceName; this.schemaName = StringUtils.join(new String[] { clusterKey.getClusterName(), keyspaceName }, "."); }
From source file:com.amazonaws.mturk.addon.HITDataWriter.java
protected synchronized void writeLinePrivate(String[] fieldValues) throws IOException { if (fieldValues == null || fieldValues.length == 0) { throw new IllegalArgumentException("fieldValues is empty."); }/*from w w w . ja v a 2 s . co m*/ String output = StringUtils.join(fieldValues, delim); getFile().saveString(output + "\n", true); // append }
From source file:com.bigdata.dastor.streaming.StreamOut.java
/** * Split out files for all tables on disk locally for each range and then stream them to the target endpoint. */// ww w . j av a2s . c o m public static void transferRanges(InetAddress target, String tableName, Collection<Range> ranges, Runnable callback) { assert ranges.size() > 0; logger.debug("Beginning transfer process to " + target + " for ranges " + StringUtils.join(ranges, ", ")); /* * (1) dump all the memtables to disk. * (2) anticompaction -- split out the keys in the range specified * (3) transfer the data. */ try { Table table = Table.open(tableName); logger.info("Flushing memtables for " + tableName + "..."); for (Future f : table.flush()) { try { f.get(); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } logger.info("Performing anticompaction ..."); /* Get the list of files that need to be streamed */ transferSSTables(target, table.forceAntiCompaction(ranges, target), tableName); // SSTR GC deletes the file when done } catch (IOException e) { throw new IOError(e); } finally { StreamOutManager.remove(target); } if (callback != null) callback.run(); }
From source file:com.baifendian.swordfish.execserver.job.storm.StormSubmitArgsUtil.java
private static List<String> jarArgs(StormJarParam stormJarParam) { List<String> args = new ArrayList<>(); args.add(JAR);//from w w w . ja v a 2 s. c o m //add mainJar args.add(stormJarParam.getMainJar().getRes()); //add mainClass args.add(stormJarParam.getMainClass()); //add Jars List<ResourceInfo> jars = stormJarParam.getJars(); if (CollectionUtils.isNotEmpty(jars)) { args.add(JARS); args.add(StringUtils.join(jars.stream().map(p -> p.getRes()).toArray(), ",")); } if (StringUtils.isNotEmpty(stormJarParam.getArgs())) { args.add(stormJarParam.getArgs()); } if (StringUtils.isNotEmpty(stormJarParam.getArtifacts())) { args.add(ARTIFACTS); args.add(stormJarParam.getArtifacts()); } if (StringUtils.isNotEmpty(stormJarParam.getArtifactRepositories())) { args.add(ARTIFACTREPOSITORIES); args.add(stormJarParam.getArtifactRepositories()); } return args; }
From source file:azkaban.flow.GroupedFlow.java
@Override public String getName() { return StringUtils.join(Iterables.transform(Arrays.asList(flows), new Function<Flow, String>() { @Override//from ww w. jav a2 s . c o m public String apply(Flow flow) { return flow.getName(); } }).iterator(), " + "); }