List of usage examples for java.nio.charset StandardCharsets UTF_16
Charset UTF_16
To view the source code for java.nio.charset StandardCharsets UTF_16.
Click Source Link
From source file:org.sonarsource.sonarlint.core.container.analysis.filesystem.FileMetadataTest.java
@Test public void non_ascii_utf_16() throws Exception { File tempFile = temp.newFile(); FileUtils.write(tempFile, "fo\r\nbr\r\n\u1D11Ebaz\r\n", StandardCharsets.UTF_16, true); FileMetadata.Metadata metadata = new FileMetadata().readMetadata(tempFile, StandardCharsets.UTF_16); assertThat(metadata.lines).isEqualTo(4); assertThat(metadata.originalLineOffsets).containsOnly(0, 5, 10, 18); }
From source file:org.sonar.scanner.scan.filesystem.ByteCharsetDetectorTest.java
@Test public void failAnsii() { when(validation.isUTF8(any(byte[].class), anyBoolean())).thenReturn(new Result(Validation.MAYBE, null)); when(validation.isUTF16(any(byte[].class), anyBoolean())) .thenReturn(Result.newValid(StandardCharsets.UTF_16)); when(validation.isValidUTF16(any(byte[].class), anyBoolean())).thenReturn(true); assertThat(charsets.detect(new byte[1])).isEqualTo(null); }
From source file:com.joyent.manta.client.MantaClientPutIT.java
@Test public final void testPutWithStringUTF16() throws IOException { final String name = UUID.randomUUID().toString(); final String path = testPathPrefix + name; MantaObject response = mantaClient.put(path, TEST_DATA, StandardCharsets.UTF_16); String contentType = response.getContentType(); Assert.assertEquals(contentType, "text/plain; charset=UTF-16", "Content type wasn't detected correctly"); try (MantaObjectInputStream object = mantaClient.getAsInputStream(path)) { String actual = IOUtils.toString(object, StandardCharsets.UTF_16); Assert.assertEquals(actual, TEST_DATA, "Uploaded string didn't match expectation"); }/*from ww w . j a va 2 s .com*/ }
From source file:org.sonar.scanner.scan.filesystem.ByteCharsetDetectorTest.java
@Test public void tryUserAnsii() { when(validation.isUTF8(any(byte[].class), anyBoolean())).thenReturn(new Result(Validation.MAYBE, null)); when(validation.isUTF16(any(byte[].class), anyBoolean())) .thenReturn(Result.newValid(StandardCharsets.UTF_16)); when(validation.isValidUTF16(any(byte[].class), anyBoolean())).thenReturn(true); when(validation.tryDecode(any(byte[].class), eq(StandardCharsets.ISO_8859_1))).thenReturn(true); charsets = new ByteCharsetDetector(validation, StandardCharsets.ISO_8859_1); assertThat(charsets.detect(new byte[1])).isEqualTo(StandardCharsets.ISO_8859_1); }
From source file:org.sonar.api.batch.fs.internal.FileMetadataTest.java
@Test public void non_ascii_utf_16() throws Exception { File tempFile = temp.newFile(); FileUtils.write(tempFile, "fo\r\nbr\r\n\u1D11Ebaz\r\n", StandardCharsets.UTF_16, true); Metadata metadata = new FileMetadata().readMetadata(new FileInputStream(tempFile), StandardCharsets.UTF_16, tempFile.getName());/*w w w.java 2 s . c o m*/ assertThat(metadata.lines()).isEqualTo(4); assertThat(metadata.nonBlankLines()).isEqualTo(3); assertThat(metadata.hash()) .isEqualTo(md5Hex("fo\nbr\n\u1D11Ebaz\n".getBytes(StandardCharsets.UTF_8))); assertThat(metadata.originalLineOffsets()).containsOnly(0, 5, 10, 18); }
From source file:org.sonarsource.sonarlint.core.mediumtest.StandaloneIssueMediumTest.java
@Test public void fileEncoding() throws IOException { ClientInputFile inputFile = prepareInputFile("foo.xoo", "function xoo() {\n" + " var xoo1, xoo2;\n" + " var xoo; //NOSONAR\n" + "}", false, StandardCharsets.UTF_16); final List<Issue> issues = new ArrayList<>(); sonarlint.analyze(new StandaloneAnalysisConfiguration(baseDir.toPath(), temp.newFolder().toPath(), Arrays.asList(inputFile), ImmutableMap.of()), issue -> issues.add(issue)); assertThat(issues).extracting("ruleKey", "startLine", "startLineOffset", "inputFile.path").containsOnly( tuple("xoo:HasTag", 1, 9, inputFile.getPath()), tuple("xoo:HasTag", 2, 6, inputFile.getPath()), tuple("xoo:HasTag", 2, 12, inputFile.getPath())); }
From source file:org.apache.nifi.security.util.crypto.HashService.java
/** * Returns the raw {@code byte[]} hash of the specified value. * * @param algorithm the hash algorithm to use * @param value the value to hash (cannot be {@code null} but can be an empty String) * @param charset the charset to use//from ww w.j ava 2 s. com * @return the hash value in bytes */ public static byte[] hashValueRaw(HashAlgorithm algorithm, String value, Charset charset) { if (value == null) { throw new IllegalArgumentException("The value cannot be null"); } /** See the note on {@link HashServiceTest#testHashValueShouldHandleUTF16BOMIssue()} */ if (charset == StandardCharsets.UTF_16) { logger.warn( "The charset provided was UTF-16, but Java will insert a Big Endian BOM in the decoded message before hashing, so switching to UTF-16BE"); charset = StandardCharsets.UTF_16BE; } return hashValueRaw(algorithm, value.getBytes(charset)); }
From source file:org.polymap.p4.data.importer.prompts.CharsetPrompt.java
private void initCharsets() { charsets = new TreeMap<String, Charset>(); for (Charset charset : Charset.availableCharsets().values()) { charsets.put(displayName(charset), charset); }// w ww.j a va 2s. c o m displayNames = new ListOrderedSet<String>(); // add all defaults on top displayNames.add(displayName(StandardCharsets.ISO_8859_1)); displayNames.add(displayName(StandardCharsets.US_ASCII)); displayNames.add(displayName(StandardCharsets.UTF_8)); displayNames.add(displayName(StandardCharsets.UTF_16)); displayNames.add(displayName(StandardCharsets.UTF_16BE)); displayNames.add(displayName(StandardCharsets.UTF_16LE)); // a separator charsets.put(SEPARATOR, selection); displayNames.add(SEPARATOR); // add the rest for (String displayName : charsets.keySet()) { displayNames.add(displayName); } }
From source file:org.sonar.api.batch.fs.internal.FileMetadata.java
/** * For testing purpose//from w ww . j a va2s . c om */ public Metadata readMetadata(Reader reader) { LineCounter lineCounter = new LineCounter("fromString", StandardCharsets.UTF_16); FileHashComputer fileHashComputer = new FileHashComputer("fromString"); LineOffsetCounter lineOffsetCounter = new LineOffsetCounter(); CharHandler[] handlers = { lineCounter, fileHashComputer, lineOffsetCounter }; try { read(reader, handlers); } catch (IOException e) { throw new IllegalStateException("Should never occur", e); } return new Metadata(lineCounter.lines(), lineCounter.nonBlankLines(), fileHashComputer.getHash(), lineOffsetCounter.getOriginalLineOffsets(), lineOffsetCounter.getLastValidOffset()); }
From source file:it.uniud.ailab.dcore.launchers.Launcher.java
/** * Load the document trying different charsets. The charset tried, are, in * order:/*from w ww . j a v a 2 s. com*/ * <ul> * <li>UTF-16;</li> * <li>UTF-8;</li> * <li>US-ASCII.</li> * </ul> * * @param filePath the path of the document * @return the text of the document * @throws IOException if the charset is not supported */ private static String loadDocument(File filePath) throws IOException { String document = ""; IOException exception = null; // try different charsets. if none is recognized, throw the // exception detected when reading. try { document = String.join(" ", Files.readAllLines(filePath.toPath(), StandardCharsets.UTF_8)); } catch (java.nio.charset.MalformedInputException e) { exception = e; } if (exception != null) { try { exception = null; document = String.join(" ", Files.readAllLines(filePath.toPath(), StandardCharsets.UTF_16)); } catch (java.nio.charset.MalformedInputException e) { exception = e; } } if (exception != null) { try { exception = null; document = String.join(" ", Files.readAllLines(filePath.toPath(), StandardCharsets.US_ASCII)); } catch (java.nio.charset.MalformedInputException e) { exception = e; } } // no charset has been recognized if (exception != null) { throw exception; } return document; }