Example usage for org.apache.commons.lang3 StringEscapeUtils unescapeJava

List of usage examples for org.apache.commons.lang3 StringEscapeUtils unescapeJava

Introduction

In this page you can find the example usage for org.apache.commons.lang3 StringEscapeUtils unescapeJava.

Prototype

public static final String unescapeJava(final String input) 

Source Link

Document

Unescapes any Java literals found in the String .

Usage

From source file:org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat.java

@SuppressWarnings("deprecation")
private HiveDruidSplit[] getInputSplits(Configuration conf) throws IOException {
    String address = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_DRUID_BROKER_DEFAULT_ADDRESS);
    if (StringUtils.isEmpty(address)) {
        throw new IOException("Druid broker address not specified in configuration");
    }/*from   w  w  w .j  a  v a  2 s.  c o m*/
    String druidQuery = StringEscapeUtils.unescapeJava(conf.get(Constants.DRUID_QUERY_JSON));
    String druidQueryType;
    if (StringUtils.isEmpty(druidQuery)) {
        // Empty, maybe because CBO did not run; we fall back to
        // full Select query
        if (LOG.isWarnEnabled()) {
            LOG.warn("Druid query is empty; creating Select query");
        }
        String dataSource = conf.get(Constants.DRUID_DATA_SOURCE);
        if (dataSource == null) {
            throw new IOException("Druid data source cannot be empty");
        }
        druidQuery = createSelectStarQuery(dataSource);
        druidQueryType = Query.SELECT;
    } else {
        druidQueryType = conf.get(Constants.DRUID_QUERY_TYPE);
        if (druidQueryType == null) {
            throw new IOException("Druid query type not recognized");
        }
    }

    // hive depends on FileSplits
    Job job = new Job(conf);
    JobContext jobContext = ShimLoader.getHadoopShims().newJobContext(job);
    Path[] paths = FileInputFormat.getInputPaths(jobContext);

    // We need to deserialize and serialize query so intervals are written in the JSON
    // Druid query with user timezone, as this is default Hive time semantics.
    // Then, create splits with the Druid queries.
    switch (druidQueryType) {
    case Query.TIMESERIES:
    case Query.TOPN:
    case Query.GROUP_BY:
        return new HiveDruidSplit[] {
                new HiveDruidSplit(deserializeSerialize(druidQuery), paths[0], new String[] { address }) };
    case Query.SELECT:
        SelectQuery selectQuery = DruidStorageHandlerUtils.JSON_MAPPER.readValue(druidQuery, SelectQuery.class);
        boolean distributed = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_DRUID_SELECT_DISTRIBUTE);
        if (distributed) {
            return distributeSelectQuery(conf, address, selectQuery, paths[0]);
        } else {
            return splitSelectQuery(conf, address, selectQuery, paths[0]);
        }
    default:
        throw new IOException("Druid query type not recognized");
    }
}

From source file:org.apache.james.jmap.methods.integration.cucumber.GetMessagesMethodStepdefs.java

@Then("^the textBody of the message is \"([^\"]*)\"$")
public void assertTextBodyOfTheFirstMessage(String textBody) throws Throwable {
    assertThat(jsonPath.<String>read(FIRST_MESSAGE + ".textBody"))
            .isEqualTo(StringEscapeUtils.unescapeJava(textBody));
}

From source file:org.apache.james.jmap.methods.integration.cucumber.GetMessagesMethodStepdefs.java

@Then("^the htmlBody of the message is \"([^\"]*)\"$")
public void assertHtmlBodyOfTheFirstMessage(String htmlBody) throws Throwable {
    assertThat(jsonPath.<String>read(FIRST_MESSAGE + ".htmlBody"))
            .isEqualTo(StringEscapeUtils.unescapeJava(htmlBody));
}

From source file:org.apache.james.jmap.methods.integration.cucumber.GetMessagesMethodStepdefs.java

@Then("^the preview of the message is \"([^\"]*)\"$")
public void assertPreviewOfTheFirstMessage(String preview) throws Throwable {
    String actual = jsonPath.<String>read(FIRST_MESSAGE + ".preview").replace("\n", " ");
    assertThat(actual).isEqualToIgnoringWhitespace(StringEscapeUtils.unescapeJava(preview));
}

From source file:org.apache.jmeter.functions.UnEscape.java

/** {@inheritDoc} */
@Override/*from w  ww .  j a v  a  2s  .co m*/
public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException {

    String rawString = ((CompoundVariable) values[0]).execute();
    return StringEscapeUtils.unescapeJava(rawString);

}

From source file:org.apache.lucene.analysis.kr.test.KoreanAnalyzerTest.java

public void testJavaEscape() throws Exception {

    String str = StringEscapeUtils.unescapeHtml4("&#48085;");
    System.out.println(str);//  w  ww  . j  a v a 2  s. co  m

    //??
    String han = StringEscapeUtils.unescapeJava("0x3400");
    han = StringEscapeUtils.escapeJava("?");

    System.out.println(han);

}

From source file:org.apache.marmotta.ldclient.provider.freebase.FreebaseProvider.java

/**
 * Fixes Freebase deficiencies on Turtle serialization, doing
 * some dirty things they may be semantically wrong.
 *
 * @param is stream with the raw data//from   w  ww  . j av a 2s.c  o  m
 * @return fixed stream
 */
private InputStream fix(InputStream is, String encoding) throws IOException {
    BufferedReader br = new BufferedReader(new InputStreamReader(is));
    StringBuffer sb = new StringBuffer();
    String line;
    while ((line = br.readLine()) != null) {
        Matcher literalMatcher = FREEBASE_LITERAL_PATTERN.matcher(line);
        if (literalMatcher.matches()) {
            //literal found
            try {
                final String literal = literalMatcher.group(2);
                final String fixed = fixLiteral(literal);
                log.debug("literal: --{}--{}", literal, fixed);
                String triple = literalMatcher.group(1) + "    \"" + fixed + "\"";
                if (literalMatcher.group(3) != null) {
                    triple += literalMatcher.group(3);
                }
                log.debug("new triple: {}", triple);
                sb.append("    " + triple + literalMatcher.group(5));
                sb.append(("\n"));
            } catch (Exception e) {
                log.debug("Error fixing line, so triple ignored: {}", e.getMessage());
                log.trace("error on line: {}", line);
                warrantyClosing(sb, line);
            }
        } else {
            Matcher tripleMatcher = FREEBASE_TRIPLE_PATTERN.matcher(line);
            if (tripleMatcher.matches()) {
                String p = tripleMatcher.group(1);
                if (p.indexOf("..") >= 0) {
                    log.debug("ignoring line due wrong property: {}", p);
                    warrantyClosing(sb, line);
                } else {
                    String o = tripleMatcher.group(2);
                    if (o.charAt(0) == '<') {
                        try {
                            URI uri = URI.create(o.substring(1, o.length() - 1));
                            sb.append("    " + p + "    <" + uri.toString() + ">" + tripleMatcher.group(3));
                            sb.append("\n");
                        } catch (RuntimeException e) {
                            log.debug("Object uri not valid: {}", o.substring(1, o.length() - 1));
                            warrantyClosing(sb, line);
                        }
                    } else {
                        if (o.contains("$")) {
                            o = o.replaceAll(Pattern.quote("$"), Matcher.quoteReplacement("\\$"));
                        } else if (o.contains("\\u")) {
                            o = StringEscapeUtils.unescapeJava(o);
                        } else if (o.contains("\\x")) {
                            o = org.apache.marmotta.commons.util.StringUtils.fixLatin1(o);
                        }
                        sb.append("    " + p + "    " + o + tripleMatcher.group(3));
                        sb.append("\n");
                    }
                }
            } else {
                log.debug("default fallback");
                sb.append(line);
                sb.append("\n");
            }
        }
    }
    //System.out.println(sb.toString());
    return new ByteArrayInputStream(sb.toString().getBytes());
}

From source file:org.apache.marmotta.ldclient.provider.freebase.FreebaseProvider.java

private String fixLiteral(String literal) throws UnsupportedEncodingException {

    //non-escaped quotes
    literal = literal.replaceAll("\"", "'");

    //wrong charset
    if (literal.contains("\\x")) {
        literal = org.apache.marmotta.commons.util.StringUtils.fixLatin1(literal);
    }//from   w  w w  .  jav a  2s  . c  o m

    //wrong unicode encoding
    if (literal.contains("\\u")) {
        literal = StringEscapeUtils.unescapeJava(literal);
    }

    return literal;
}

From source file:org.apache.nifi.csv.CSVUtils.java

private static char getUnescapedChar(final PropertyContext context, final PropertyDescriptor property) {
    return StringEscapeUtils.unescapeJava(context.getProperty(property).getValue()).charAt(0);
}

From source file:org.apache.nifi.csv.TestCSVRecordReader.java

@Test
public void testMultipleRecordsEscapedWithSpecialChar() throws IOException, MalformedRecordException {

    char delimiter = StringEscapeUtils.unescapeJava("\u0001").charAt(0);

    final CSVFormat format = CSVFormat.DEFAULT.withFirstRecordAsHeader().withTrim().withQuote('"')
            .withDelimiter(delimiter);/* www.j  a  va  2  s  .c  om*/
    final List<RecordField> fields = getDefaultFields();
    fields.replaceAll(f -> f.getFieldName().equals("balance") ? new RecordField("balance", doubleDataType) : f);

    final RecordSchema schema = new SimpleRecordSchema(fields);

    try (final InputStream fis = new FileInputStream(
            new File("src/test/resources/csv/multi-bank-account_escapedchar.csv"));
            final CSVRecordReader reader = createReader(fis, schema, format)) {

        final Object[] firstRecord = reader.nextRecord().getValues();
        final Object[] firstExpectedValues = new Object[] { "1", "John Doe", 4750.89D, "123 My Street",
                "My City", "MS", "11111", "USA" };
        Assert.assertArrayEquals(firstExpectedValues, firstRecord);

        final Object[] secondRecord = reader.nextRecord().getValues();
        final Object[] secondExpectedValues = new Object[] { "2", "Jane Doe", 4820.09D, "321 Your Street",
                "Your City", "NY", "33333", "USA" };
        Assert.assertArrayEquals(secondExpectedValues, secondRecord);

        assertNull(reader.nextRecord());
    }
}