List of usage examples for org.apache.commons.lang StringUtils removeEnd
public static String removeEnd(String str, String remove)
Removes a substring only if it is at the end of a source string, otherwise returns the source string.
From source file:org.apache.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer.java
/** * Checks to see if the provided term appears in other documents. * @param term the term to search for.// w w w .j a v a 2 s . co m * @param currentDocId the current document ID that the search term exists in. * @return {@code true} if the term was found in other documents. {@code false} otherwise. */ private boolean doesTermExistInOtherDocs(final String term, final int currentDocId, final Text docIdText) { try { final String freeTextDocTableName = getFreeTextDocTablename(conf); final Scanner scanner = getScanner(freeTextDocTableName); final String t = StringUtils.removeEnd(term, "*").toLowerCase(); final Text queryTerm = ColumnPrefixes.getTermColFam(t); // perform query and read results scanner.fetchColumnFamily(queryTerm); for (final Entry<Key, Value> entry : scanner) { final Key key = entry.getKey(); final Text row = key.getRow(); final int rowId = Integer.parseInt(row.toString()); // We only want to check other documents from the one we're deleting if (rowId != currentDocId) { final Text columnFamily = key.getColumnFamily(); final String columnFamilyValue = columnFamily.toString(); // Check that the value has the term prefix if (columnFamilyValue.startsWith(ColumnPrefixes.TERM_CF_PREFIX.toString())) { final Text text = ColumnPrefixes.removePrefix(columnFamily); final String value = text.toString(); if (value.equals(term)) { return true; } } } } } catch (final IOException e) { logger.error("Error searching for the existance of the term in other documents", e); } return false; }
From source file:org.apache.rya.mongodb.document.util.DisjunctiveNormalFormConverter.java
/** * Creates a document visibility boolean expression string into Disjunctive * Normal Form (DNF). Expressions use this format in DNF:<pre> * (P1 & P2 & P3 ... Pn) | (Q1 & Q2 ... Qm) ... * </pre>/*from ww w. j a v a 2 s . c om*/ * @param documentVisibility the {@link DocumentVisibility}. * @return a new {@link DocumentVisibility} with its expression in DNF. */ public static DocumentVisibility convertToDisjunctiveNormalForm(final DocumentVisibility documentVisibility) { // Find all the terms used in the expression final List<String> terms = findNodeTerms(documentVisibility.getParseTree(), documentVisibility.getExpression()); // Create an appropriately sized truth table that has the correct 0's // and 1's in place based on the number of terms. // This size should be [numberOfTerms][2 ^ numberOfTerms]. final byte[][] truthTable = createTruthTableInputs(terms); // Go through each row in the truth table. // If the row has a 1 for the term then create an Authorization for it // and test if it works. // If the row passes then that means all the terms that were a 1 and // were used can be AND'ed together to pass the expression. // All the rows that pass can be OR'd together. // Disjunction Normal Form: (P1 & P2 & P3 ... Pn) | (Q1 & Q2 ... Qm) ... final List<List<String>> termRowsThatPass = new ArrayList<>(); for (final byte[] row : truthTable) { final List<String> termRowToCheck = new ArrayList<>(); // If the truth table input is a 1 then include the corresponding // term that it matches. for (int i = 0; i < row.length; i++) { final byte entry = row[i]; if (entry == 1) { termRowToCheck.add(terms.get(i)); } } final List<String> authList = new ArrayList<>(); for (final String auth : termRowToCheck) { String formattedAuth = auth; formattedAuth = StringUtils.removeStart(formattedAuth, "\""); formattedAuth = StringUtils.removeEnd(formattedAuth, "\""); authList.add(formattedAuth); } final Authorizations auths = new Authorizations(authList.toArray(new String[0])); final boolean hasAccess = DocumentVisibilityUtil.doesUserHaveDocumentAccess(auths, documentVisibility, false); if (hasAccess) { boolean alreadyCoveredBySimplerTerms = false; // If one 'AND' group is (A&C) and another is (A&B&C) then we // can drop (A&B&C) since it is already covered by simpler terms // (it's a subset) for (final List<String> existingTermRowThatPassed : termRowsThatPass) { alreadyCoveredBySimplerTerms = termRowToCheck.containsAll(existingTermRowThatPassed); if (alreadyCoveredBySimplerTerms) { break; } } if (!alreadyCoveredBySimplerTerms) { termRowsThatPass.add(termRowToCheck); } } } // Rebuild the term rows that passed as a document visibility boolean // expression string. final StringBuilder sb = new StringBuilder(); boolean isFirst = true; final boolean hasMultipleGroups = termRowsThatPass.size() > 1; for (final List<String> termRowThatPassed : termRowsThatPass) { if (isFirst) { isFirst = false; } else { sb.append("|"); } if (hasMultipleGroups && termRowThatPassed.size() > 1) { sb.append("("); } sb.append(Joiner.on("&").join(termRowThatPassed)); if (hasMultipleGroups && termRowThatPassed.size() > 1) { sb.append(")"); } } log.trace(sb.toString()); final DocumentVisibility dnfDv = new DocumentVisibility(sb.toString()); return dnfDv; }
From source file:org.apache.sling.resourceresolver.impl.legacy.LegacyResourceProviderWhiteboard.java
private static String normalizePath(final String path) { String result = path;/*from ww w. j av a2 s .c om*/ result = StringUtils.removeEnd(path, "/"); if (result != null && !result.startsWith("/")) { result = "/" + result; } return result; }
From source file:org.apache.solr.handler.component.AlfrescoHttpShardHandlerFactory.java
@Override public void init(PluginInfo info) { NamedList args = info.initArgs;/*w w w . ja v a2 s .c o m*/ this.soTimeout = getParameter(args, HttpClientUtil.PROP_SO_TIMEOUT, soTimeout); this.scheme = getParameter(args, INIT_URL_SCHEME, null); if (StringUtils.endsWith(this.scheme, "://")) { this.scheme = StringUtils.removeEnd(this.scheme, "://"); } this.connectionTimeout = getParameter(args, HttpClientUtil.PROP_CONNECTION_TIMEOUT, connectionTimeout); this.maxConnectionsPerHost = getParameter(args, HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, maxConnectionsPerHost); this.corePoolSize = getParameter(args, INIT_CORE_POOL_SIZE, corePoolSize); this.maximumPoolSize = getParameter(args, INIT_MAX_POOL_SIZE, maximumPoolSize); this.keepAliveTime = getParameter(args, MAX_THREAD_IDLE_TIME, keepAliveTime); this.queueSize = getParameter(args, INIT_SIZE_OF_QUEUE, queueSize); this.accessPolicy = getParameter(args, INIT_FAIRNESS_POLICY, accessPolicy); // magic sysprop to make tests reproducible: set by SolrTestCaseJ4. String v = System.getProperty("tests.shardhandler.randomSeed"); if (v != null) { r.setSeed(Long.parseLong(v)); } BlockingQueue<Runnable> blockingQueue = (this.queueSize == -1) ? new SynchronousQueue<Runnable>(this.accessPolicy) : new ArrayBlockingQueue<Runnable>(this.queueSize, this.accessPolicy); this.commExecutor = new ThreadPoolExecutor(this.corePoolSize, this.maximumPoolSize, this.keepAliveTime, TimeUnit.SECONDS, blockingQueue, new DefaultSolrThreadFactory("httpShardExecutor")); ModifiableSolrParams clientParams = new ModifiableSolrParams(); clientParams.set(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, maxConnectionsPerHost); clientParams.set(HttpClientUtil.PROP_MAX_CONNECTIONS, 10000); clientParams.set(HttpClientUtil.PROP_SO_TIMEOUT, soTimeout); clientParams.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, connectionTimeout); clientParams.set(HttpClientUtil.PROP_USE_RETRY, false); this.defaultClient = HttpClientUtil.createClient(clientParams); this.loadbalancer = createLoadbalancer(defaultClient); }
From source file:org.apache.solr.handler.component.HttpShardHandlerFactory.java
@Override public void init(PluginInfo info) { StringBuilder sb = new StringBuilder(); NamedList args = info.initArgs;// w w w . jav a 2 s. c o m this.soTimeout = getParameter(args, HttpClientUtil.PROP_SO_TIMEOUT, soTimeout, sb); this.scheme = getParameter(args, INIT_URL_SCHEME, null, sb); if (StringUtils.endsWith(this.scheme, "://")) { this.scheme = StringUtils.removeEnd(this.scheme, "://"); } String strategy = getParameter(args, "metricNameStrategy", UpdateShardHandlerConfig.DEFAULT_METRICNAMESTRATEGY, sb); this.metricNameStrategy = KNOWN_METRIC_NAME_STRATEGIES.get(strategy); if (this.metricNameStrategy == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown metricNameStrategy: " + strategy + " found. Must be one of: " + KNOWN_METRIC_NAME_STRATEGIES.keySet()); } this.connectionTimeout = getParameter(args, HttpClientUtil.PROP_CONNECTION_TIMEOUT, connectionTimeout, sb); this.maxConnectionsPerHost = getParameter(args, HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, maxConnectionsPerHost, sb); this.maxConnections = getParameter(args, HttpClientUtil.PROP_MAX_CONNECTIONS, maxConnections, sb); this.corePoolSize = getParameter(args, INIT_CORE_POOL_SIZE, corePoolSize, sb); this.maximumPoolSize = getParameter(args, INIT_MAX_POOL_SIZE, maximumPoolSize, sb); this.keepAliveTime = getParameter(args, MAX_THREAD_IDLE_TIME, keepAliveTime, sb); this.queueSize = getParameter(args, INIT_SIZE_OF_QUEUE, queueSize, sb); this.accessPolicy = getParameter(args, INIT_FAIRNESS_POLICY, accessPolicy, sb); log.debug("created with {}", sb); // magic sysprop to make tests reproducible: set by SolrTestCaseJ4. String v = System.getProperty("tests.shardhandler.randomSeed"); if (v != null) { r.setSeed(Long.parseLong(v)); } BlockingQueue<Runnable> blockingQueue = (this.queueSize == -1) ? new SynchronousQueue<Runnable>(this.accessPolicy) : new ArrayBlockingQueue<Runnable>(this.queueSize, this.accessPolicy); this.commExecutor = new ExecutorUtil.MDCAwareThreadPoolExecutor(this.corePoolSize, this.maximumPoolSize, this.keepAliveTime, TimeUnit.SECONDS, blockingQueue, new DefaultSolrThreadFactory("httpShardExecutor")); ModifiableSolrParams clientParams = getClientParams(); httpRequestExecutor = new InstrumentedHttpRequestExecutor(this.metricNameStrategy); clientConnectionManager = new InstrumentedPoolingHttpClientConnectionManager( HttpClientUtil.getSchemaRegisteryProvider().getSchemaRegistry()); this.defaultClient = HttpClientUtil.createClient(clientParams, clientConnectionManager, false, httpRequestExecutor); this.loadbalancer = createLoadbalancer(defaultClient); }
From source file:org.apache.storm.verify.VerifyUtils.java
public static void verifyHbase(String hBaseUrl, String tableName, String columnFamily, Map<String, Class> columnNamesToTypes, List<String> expectedRows) throws Exception { List<String> lines = new ArrayList<String>(); final Configuration hbConfig = HBaseConfiguration.create(); hbConfig.set("hbase.rootdir", hBaseUrl); HTable table = new HTable(hbConfig, tableName); ResultScanner results = table.getScanner(columnFamily.getBytes()); for (Result result = results.next(); (result != null); result = results.next()) { String line = ""; for (Map.Entry<String, Class> columnNameAndType : columnNamesToTypes.entrySet()) { byte[] bytes = result.getValue(columnFamily.getBytes(), columnNameAndType.getKey().getBytes()); String colVal = toString(bytes, columnNameAndType.getValue()); line += colVal + FIELD_SEPERATOR; }// ww w. j a va2 s . c om line = StringUtils.removeEnd(line, FIELD_SEPERATOR); lines.add(line); } Collections.sort(lines); Collections.sort(expectedRows); assert lines.equals(expectedRows) : "expectedRows = " + expectedRows + " actualRows = " + lines; }
From source file:org.apache.storm.verify.VerifyUtils.java
public static void verifyCassandra(String cassandraUrl, String keyspaceName, String columnFamily, final Map<String, Class> columnNamesToTypes, List<String> expectedRows) throws Exception { ColumnFamily<String, String> CF_STANDARD1 = ColumnFamily.newColumnFamily(columnFamily, StringSerializer.get(), StringSerializer.get()); AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder().forCluster("ClusterName") .forKeyspace(keyspaceName)// w w w .j av a 2 s . c o m .withAstyanaxConfiguration( new AstyanaxConfigurationImpl().setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)) .withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl("MyConnectionPool") .setMaxConnsPerHost(1).setSeeds(cassandraUrl)) .withConnectionPoolMonitor(new CountingConnectionPoolMonitor()) .buildKeyspace(ThriftFamilyFactory.getInstance()); context.start(); Keyspace keyspace = context.getClient(); final List<String> actualLines = Lists.newArrayList(); boolean result = new AllRowsReader.Builder<String, String>(keyspace, CF_STANDARD1).withPageSize(100) // Read 100 rows at a time .withConcurrencyLevel(1) // just use one thread. .withPartitioner(null) // this will use keyspace's partitioner .forEachRow(new Function<Row<String, String>, Boolean>() { @Override public Boolean apply(@Nullable Row<String, String> row) { ColumnList<String> columns = row.getColumns(); String line = ""; for (Map.Entry<String, Class> entry : columnNamesToTypes.entrySet()) { String columnName = entry.getKey(); line += VerifyUtils.toString(columns.getByteArrayValue(columnName, null), entry.getValue()); line += FIELD_SEPERATOR; } actualLines.add(StringUtils.removeEnd(line, FIELD_SEPERATOR)); return true; } }).build().call(); Collections.sort(actualLines); Collections.sort(expectedRows); assert actualLines.equals(expectedRows) : "expectedRows = " + expectedRows + " actualRows = " + actualLines; }
From source file:org.apache.stratos.cartridge.agent.test.JavaCartridgeAgentTest.java
/** * Get current folder path/*from w ww. ja v a2 s. co m*/ * * @return */ private static String getResourcesFolderPath() { return StringUtils.removeEnd(JavaCartridgeAgentTest.class.getResource("/").getPath(), File.separator); }
From source file:org.apache.stratos.integration.common.rest.RestClient.java
/** * Get resources folder path//from w ww.jav a 2 s .co m * * @return the resource path */ private String getResourcesFolderPath() { String path = getClass().getResource("/").getPath(); return StringUtils.removeEnd(path, File.separator); }
From source file:org.apache.stratos.integration.tests.SampleApplicationTests.java
@BeforeClass public void setUp() { String path = getClass().getResource("/").getPath(); path = StringUtils.removeEnd(path, File.separator); System.setProperty("jndi.properties.dir", path); testLogAppender = new TestLogAppender(); Logger.getRootLogger().addAppender(testLogAppender); Logger.getRootLogger().setLevel(Level.INFO); }