List of usage examples for org.apache.commons.lang3 StringUtils countMatches
public static int countMatches(final CharSequence str, final char ch)
Counts how many times the char appears in the given string.
A null or empty ("") String input returns 0 .
StringUtils.countMatches(null, *) = 0 StringUtils.countMatches("", *) = 0 StringUtils.countMatches("abba", 0) = 0 StringUtils.countMatches("abba", 'a') = 2 StringUtils.countMatches("abba", 'b') = 2 StringUtils.countMatches("abba", 'x') = 0
From source file:org.apache.drill.common.expression.fn.JodaDateValidator.java
/** * Replaces all postgres patterns from {@param pattern}, * available in postgresToJodaMap keys to jodaTime equivalents. * * @param pattern date pattern in postgres format * @return date pattern with replaced patterns in joda format *///from w w w . j av a 2 s .c o m public static String toJodaFormat(String pattern) { // replaces escape character for text delimiter StringBuilder builder = new StringBuilder( pattern.replaceAll(POSTGRES_ESCAPE_CHARACTER, JODA_ESCAPE_CHARACTER)); int start = 0; // every time search of postgres token in pattern will start from this index. int minPos; // min position of the longest postgres token do { // finds first value with max length minPos = builder.length(); PostgresDateTimeConstant firstMatch = null; for (PostgresDateTimeConstant postgresPattern : postgresToJodaMap.keySet()) { // keys sorted in length decreasing // at first search longer tokens to consider situation where some tokens are the parts of large tokens // example: if pattern contains a token "DDD", token "DD" would be skipped, as a part of "DDD". int pos; // some tokens can't be in upper camel casing, so we ignore them here. // example: DD, DDD, MM, etc. if (postgresPattern.hasCamelCasing()) { // finds postgres tokens in upper camel casing // example: Month, Mon, Day, Dy, etc. pos = builder.indexOf(StringUtils.capitalize(postgresPattern.getName()), start); if (pos >= 0 && pos < minPos) { firstMatch = postgresPattern; minPos = pos; if (minPos == start) { break; } } } // finds postgres tokens in lower casing pos = builder.indexOf(postgresPattern.getName().toLowerCase(), start); if (pos >= 0 && pos < minPos) { firstMatch = postgresPattern; minPos = pos; if (minPos == start) { break; } } // finds postgres tokens in upper casing pos = builder.indexOf(postgresPattern.getName().toUpperCase(), start); if (pos >= 0 && pos < minPos) { firstMatch = postgresPattern; minPos = pos; if (minPos == start) { break; } } } // replaces postgres token, if found and it does not escape character if (minPos < builder.length() && firstMatch != null) { String jodaToken = postgresToJodaMap.get(firstMatch); // checks that token is not a part of escape sequence if (StringUtils.countMatches(builder.subSequence(0, minPos), JODA_ESCAPE_CHARACTER) % 2 == 0) { int offset = minPos + firstMatch.getName().length(); builder.replace(minPos, offset, jodaToken); start = minPos + jodaToken.length(); } else { int endEscapeCharacter = builder.indexOf(JODA_ESCAPE_CHARACTER, minPos); if (endEscapeCharacter >= 0) { start = endEscapeCharacter; } else { break; } } } } while (minPos < builder.length()); return builder.toString(); }
From source file:org.apache.drill.exec.physical.impl.TestLocalExchange.java
private static void testHelper(boolean isMuxOn, boolean isDeMuxOn, String query, int expectedNumMuxes, int expectedNumDeMuxes, String[] baselineColumns, List<Object[]> baselineValues) throws Exception { setupHelper(isMuxOn, isDeMuxOn);//from ww w. ja va 2s.c o m String plan = getPlanInString("EXPLAIN PLAN FOR " + query, JSON_FORMAT); System.out.println("Plan: " + plan); if (isMuxOn) { // # of hash exchanges should be = # of mux exchanges + # of demux exchanges assertEquals("HashExpr on the hash column should not happen", 2 * expectedNumMuxes + expectedNumDeMuxes, StringUtils.countMatches(plan, HASH_EXPR_NAME)); jsonExchangeOrderChecker(plan, isDeMuxOn, expectedNumMuxes, "castint\\(hash64asdouble\\(.*\\) \\) "); } else { assertEquals("HashExpr on the hash column should not happen", 0, StringUtils.countMatches(plan, HASH_EXPR_NAME)); } // Make sure the plan has mux and demux exchanges (TODO: currently testing is rudimentary, // need to move it to sophisticated testing once we have better planning test tools are available) assertEquals("Wrong number of MuxExchanges are present in the plan", expectedNumMuxes, StringUtils.countMatches(plan, MUX_EXCHANGE)); assertEquals("Wrong number of DeMuxExchanges are present in the plan", expectedNumDeMuxes, StringUtils.countMatches(plan, DEMUX_EXCHANGE)); // Run the query and verify the output TestBuilder testBuilder = testBuilder().sqlQuery(query).unOrdered().baselineColumns(baselineColumns); for (Object[] baselineRecord : baselineValues) { testBuilder.baselineValues(baselineRecord); } testBuilder.go(); testHelperVerifyPartitionSenderParallelization(plan, isMuxOn, isDeMuxOn); }
From source file:org.apache.hive.jdbc.TestJdbcWithMiniHS2ErasureCoding.java
/** * Check that the expected string occurs correctly in the output string. * @param output string to probe/* w ww .j a v a 2 s . co m*/ * @param expectedString string to find in output * @param expectedCount the expected number of occurrences of the expected string */ private void assertMatchAndCount(String output, String expectedString, int expectedCount) { assertTrue("Did not find expected '" + expectedString + "' in text " + output, output.contains(expectedString)); assertEquals("wrong count of matches of '" + expectedString + "' in text " + output, expectedCount, StringUtils.countMatches(output, expectedString)); }
From source file:org.apache.nifi.fingerprint.FingerprintFactoryTest.java
@Test public void testResourceValueInFingerprint() throws IOException { final String fingerprint = fingerprinter.createFingerprint(getResourceBytes("/nifi/fingerprint/flow1a.xml"), null);//w ww . j a va2 s. c o m assertEquals(3, StringUtils.countMatches(fingerprint, "success")); assertTrue(fingerprint.contains("In Connection")); }
From source file:org.apache.nifi.processors.elasticsearch.TestScrollElasticsearchHttp.java
private void runAndVerifySuccess() { runner.enqueue("".getBytes(), new HashMap<String, String>() { {// ww w .j a va 2 s . co m put("identifier", "28039652140"); } }); // Must run once for each of the 3 pages runner.run(3, true, true); runner.assertAllFlowFilesTransferred(ScrollElasticsearchHttp.REL_SUCCESS, 2); final MockFlowFile out = runner.getFlowFilesForRelationship(ScrollElasticsearchHttp.REL_SUCCESS).get(0); assertNotNull(out); int numHits = runner.getFlowFilesForRelationship(ScrollElasticsearchHttp.REL_SUCCESS).stream().map(ff -> { String page = new String(ff.toByteArray()); return StringUtils.countMatches(page, "{\"timestamp\""); }).reduce((a, b) -> a + b).get(); Assert.assertEquals(3, numHits); }
From source file:org.apache.nifi.processors.hive.PutHiveQL.java
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final int batchSize = context.getProperty(BATCH_SIZE).asInteger(); final List<FlowFile> flowFiles = session.get(batchSize); if (flowFiles.isEmpty()) { return;//from w w w. ja v a2 s . co m } final long startNanos = System.nanoTime(); final Charset charset = Charset.forName(context.getProperty(CHARSET).getValue()); final HiveDBCPService dbcpService = context.getProperty(HIVE_DBCP_SERVICE) .asControllerService(HiveDBCPService.class); final String statementDelimiter = context.getProperty(STATEMENT_DELIMITER).getValue(); try (final Connection conn = dbcpService.getConnection()) { for (FlowFile flowFile : flowFiles) { try { final String script = getHiveQL(session, flowFile, charset); String regex = "(?<!\\\\)" + Pattern.quote(statementDelimiter); String[] hiveQLs = script.split(regex); int loc = 1; for (String hiveQL : hiveQLs) { getLogger().debug("HiveQL: {}", new Object[] { hiveQL }); if (!StringUtils.isEmpty(hiveQL.trim())) { final PreparedStatement stmt = conn.prepareStatement(hiveQL.trim()); // Get ParameterMetadata // Hive JDBC Doesn't support this yet: // ParameterMetaData pmd = stmt.getParameterMetaData(); // int paramCount = pmd.getParameterCount(); int paramCount = StringUtils.countMatches(hiveQL, "?"); if (paramCount > 0) { loc = setParameters(loc, stmt, paramCount, flowFile.getAttributes()); } // Execute the statement stmt.execute(); } } // Emit a Provenance SEND event final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); session.getProvenanceReporter().send(flowFile, dbcpService.getConnectionURL(), transmissionMillis, true); session.transfer(flowFile, REL_SUCCESS); } catch (final SQLException e) { if (e instanceof SQLNonTransientException) { getLogger().error("Failed to update Hive for {} due to {}; routing to failure", new Object[] { flowFile, e }); session.transfer(flowFile, REL_FAILURE); } else { getLogger().error( "Failed to update Hive for {} due to {}; it is possible that retrying the operation will succeed, so routing to retry", new Object[] { flowFile, e }); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_RETRY); } } } } catch (final SQLException sqle) { // There was a problem getting the connection, yield and retry the flowfiles getLogger().error( "Failed to get Hive connection due to {}; it is possible that retrying the operation will succeed, so routing to retry", new Object[] { sqle }); session.transfer(flowFiles, REL_RETRY); context.yield(); } }
From source file:org.apache.nifi.processors.standard.TestUpdateRecord.java
@Test public void testUpdateComplexArrays() throws InitializationException, IOException { final JsonTreeReader jsonReader = new JsonTreeReader(); runner.addControllerService("reader", jsonReader); final String inputSchemaText = new String( Files.readAllBytes(Paths.get("src/test/resources/TestUpdateRecord/schema/multi-arrays.avsc"))); final String outputSchemaText = new String( Files.readAllBytes(Paths.get("src/test/resources/TestUpdateRecord/schema/multi-arrays.avsc"))); runner.setProperty(jsonReader, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY); runner.setProperty(jsonReader, SchemaAccessUtils.SCHEMA_TEXT, inputSchemaText); runner.enableControllerService(jsonReader); final JsonRecordSetWriter jsonWriter = new JsonRecordSetWriter(); runner.addControllerService("writer", jsonWriter); runner.setProperty(jsonWriter, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY); runner.setProperty(jsonWriter, SchemaAccessUtils.SCHEMA_TEXT, outputSchemaText); runner.setProperty(jsonWriter, "Pretty Print JSON", "true"); runner.setProperty(jsonWriter, "Schema Write Strategy", "full-schema-attribute"); runner.setProperty(UpdateRecord.REPLACEMENT_VALUE_STRATEGY, UpdateRecord.RECORD_PATH_VALUES); runner.enableControllerService(jsonWriter); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[*]", "/peoples[3]"); runner.run();//w ww .j a v a 2 s . c om runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); String content = new String( runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).toByteArray()); int count = StringUtils.countMatches(content, "Mary Doe"); assertEquals(4, count); runner.removeProperty("/peoples[*]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[1]", "/peoples[3]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); content = new String(runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).toByteArray()); count = StringUtils.countMatches(content, "Mary Doe"); assertEquals(2, count); runner.removeProperty("/peoples[1]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0..1]", "/peoples[3]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); String expectedOutput = new String(Files.readAllBytes( Paths.get("src/test/resources/TestUpdateRecord/output/updateArrays/multi-arrays-0and1.json"))); runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).assertContentEquals(expectedOutput); runner.removeProperty("/peoples[0..1]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0,2]", "/peoples[3]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); expectedOutput = new String(Files.readAllBytes( Paths.get("src/test/resources/TestUpdateRecord/output/updateArrays/multi-arrays-0and2.json"))); runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).assertContentEquals(expectedOutput); runner.removeProperty("/peoples[0,2]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0,1..2]", "/peoples[3]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); content = new String(runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).toByteArray()); count = StringUtils.countMatches(content, "Mary Doe"); assertEquals(4, count); runner.removeProperty("/peoples[0,1..2]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0..-1][./name != 'Mary Doe']", "/peoples[3]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); content = new String(runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).toByteArray()); count = StringUtils.countMatches(content, "Mary Doe"); assertEquals(4, count); runner.removeProperty("/peoples[0..-1][./name != 'Mary Doe']"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0..-1][./name != 'Mary Doe']/addresses[*]", "/peoples[3]/addresses[0]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); content = new String(runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).toByteArray()); count = StringUtils.countMatches(content, "1 nifi road"); assertEquals(13, count); runner.removeProperty("/peoples[0..-1][./name != 'Mary Doe']/addresses[*]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0..-1][./name != 'Mary Doe']/addresses[0,1..2]", "/peoples[3]/addresses[0]"); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); expectedOutput = new String(Files.readAllBytes( Paths.get("src/test/resources/TestUpdateRecord/output/updateArrays/multi-arrays-streets.json"))); runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).assertContentEquals(expectedOutput); runner.removeProperty("/peoples[0..-1][./name != 'Mary Doe']/addresses[0,1..2]"); runner.clearTransferState(); runner.enqueue(Paths.get("src/test/resources/TestUpdateRecord/input/multi-arrays.json")); runner.setProperty("/peoples[0..-1][./name != 'Mary Doe']/addresses[0,1..2]/city", "newCity"); runner.setProperty(UpdateRecord.REPLACEMENT_VALUE_STRATEGY, UpdateRecord.LITERAL_VALUES); runner.run(); runner.assertAllFlowFilesTransferred(UpdateRecord.REL_SUCCESS, 1); content = new String(runner.getFlowFilesForRelationship(UpdateRecord.REL_SUCCESS).get(0).toByteArray()); count = StringUtils.countMatches(content, "newCity"); assertEquals(9, count); runner.removeProperty("/peoples[0..-1][./name != 'Mary Doe']/addresses[0,1..2]/city"); }
From source file:org.apache.nutch.fetcher.JsoupUtil.java
public static String generateItemName(String strUrl) { String output = ""; int countSlash = StringUtils.countMatches(strUrl, "/"); if (countSlash > 2) { output = generateItemName(strUrl.substring(0, strUrl.lastIndexOf('/'))); } else {/* w ww. j ava2 s .c o m*/ output = strUrl.substring(strUrl.lastIndexOf('/') + 1, strUrl.length()); } return output; }
From source file:org.apache.sling.testing.mock.jcr.AbstractItem.java
@Override public int getDepth() throws RepositoryException { if (StringUtils.equals("/", getPath())) { return 0; } else {/*from w w w . j a v a 2 s . com*/ return StringUtils.countMatches(getPath(), "/"); } }
From source file:org.apache.stratos.cli.StratosApplication.java
/** * @return {@code true} if required properties are loaded *//*from www .j ava2s . co m*/ private boolean loadRequiredProperties() { if (logger.isDebugEnabled()) { logger.debug("Loading properties..."); } // Load properties String stratosURL = null; String username = null; String password = null; stratosURL = System.getenv(CliConstants.STRATOS_URL_ENV_PROPERTY); username = System.getenv(CliConstants.STRATOS_USERNAME_ENV_PROPERTY); password = System.getenv(CliConstants.STRATOS_PASSWORD_ENV_PROPERTY); int slashCount = StringUtils.countMatches(stratosURL, "/"); int colonCount = StringUtils.countMatches(stratosURL, ":"); if (!(colonCount == 2 && (slashCount == 3 || slashCount == 2))) { if (logger.isDebugEnabled()) { logger.debug("Invalid STRATOS_URL"); } System.out.println("Invalid STRATOS_URL. Please enter correct STRATOS_URL"); return false; } if (StringUtils.isBlank(stratosURL)) { if (logger.isDebugEnabled()) { logger.debug("Required configuration not found."); } // Stratos Controller details are not set. System.out.format("Could not find required \"%s\" variable in your environment.%n", CliConstants.STRATOS_URL_ENV_PROPERTY); return false; } else { if (logger.isDebugEnabled()) { logger.debug("Required configuration found. Validating {}", stratosURL); } UrlValidator urlValidator = new UrlValidator(new String[] { "https" }, UrlValidator.ALLOW_LOCAL_URLS); if (!urlValidator.isValid(stratosURL)) { if (logger.isDebugEnabled()) { logger.debug("Stratos Controller URL {} is not valid", stratosURL); } System.out.format( "The \"%s\" variable in your environment is not a valid URL. You have provided \"%s\".%n" + "Please provide the Stratos Controller URL as follows%nhttps://<host>:<port>%n", CliConstants.STRATOS_URL_ENV_PROPERTY, stratosURL); return false; } if (logger.isDebugEnabled()) { logger.debug("Stratos Controller URL {} is valid.", stratosURL); logger.debug("Adding the values to context."); } context.put(CliConstants.STRATOS_URL_ENV_PROPERTY, stratosURL); context.put(CliConstants.STRATOS_USERNAME_ENV_PROPERTY, username); context.put(CliConstants.STRATOS_PASSWORD_ENV_PROPERTY, password); return true; } }