List of usage examples for java.nio.charset StandardCharsets UTF_16BE
Charset UTF_16BE
To view the source code for java.nio.charset StandardCharsets UTF_16BE.
Click Source Link
From source file:org.apache.nifi.security.util.crypto.HashService.java
/** * Returns a {@link List} of supported {@link Charset}s on this platform. This is not a complete * list, as only the charsets in {@link StandardCharsets} are returned to be consistent across * JVM instances.// ww w . j av a 2 s .c o m * * @return the list of charsets */ public static List<Charset> getSupportedCharsets() { return Arrays.asList(StandardCharsets.US_ASCII, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, StandardCharsets.UTF_16BE, StandardCharsets.UTF_16LE, StandardCharsets.UTF_16); }
From source file:org.sonar.scanner.scan.filesystem.CharsetValidationTest.java
@Test public void detectUTF16Ascii() throws CharacterCodingException { String text = "some text to test"; byte[] utf16be = encode(text, StandardCharsets.UTF_16BE); byte[] utf16le = encode(text, StandardCharsets.UTF_16LE); byte[] utf8 = encode(text, StandardCharsets.UTF_8); byte[] iso88591 = encode(text, StandardCharsets.ISO_8859_1); byte[] utf32 = encode(text, Charset.forName("UTF-32LE")); assertThat(charsets.isUTF16(utf16le, true).charset()).isEqualTo(StandardCharsets.UTF_16LE); assertThat(charsets.isUTF16(utf16be, true).charset()).isEqualTo(StandardCharsets.UTF_16BE); // not enough nulls -> we don't know assertThat(charsets.isUTF16(iso88591, true).valid()).isEqualTo(Validation.MAYBE); assertThat(charsets.isUTF16(utf8, true).valid()).isEqualTo(Validation.MAYBE); // fail based on double nulls assertThat(charsets.isUTF16(utf32, true).valid()).isEqualTo(Validation.NO); }
From source file:org.apache.nifi.security.util.crypto.HashService.java
/** * Returns the raw {@code byte[]} hash of the specified value. * * @param algorithm the hash algorithm to use * @param value the value to hash (cannot be {@code null} but can be an empty String) * @param charset the charset to use//from w w w . ja v a 2 s .com * @return the hash value in bytes */ public static byte[] hashValueRaw(HashAlgorithm algorithm, String value, Charset charset) { if (value == null) { throw new IllegalArgumentException("The value cannot be null"); } /** See the note on {@link HashServiceTest#testHashValueShouldHandleUTF16BOMIssue()} */ if (charset == StandardCharsets.UTF_16) { logger.warn( "The charset provided was UTF-16, but Java will insert a Big Endian BOM in the decoded message before hashing, so switching to UTF-16BE"); charset = StandardCharsets.UTF_16BE; } return hashValueRaw(algorithm, value.getBytes(charset)); }
From source file:com.github.anba.es6draft.chakra.ChakraTest.java
private static Charset charsetFor(BOMInputStream bis, Charset defaultCharset) throws IOException { ByteOrderMark bom = bis.getBOM();// w w w . j a v a 2 s.c om if (ByteOrderMark.UTF_8.equals(bom)) { return StandardCharsets.UTF_8; } if (ByteOrderMark.UTF_16LE.equals(bom)) { return StandardCharsets.UTF_16LE; } if (ByteOrderMark.UTF_16BE.equals(bom)) { return StandardCharsets.UTF_16BE; } return defaultCharset; }
From source file:org.polymap.p4.data.importer.prompts.CharsetPrompt.java
private void initCharsets() { charsets = new TreeMap<String, Charset>(); for (Charset charset : Charset.availableCharsets().values()) { charsets.put(displayName(charset), charset); }//from ww w. j a v a 2 s . c o m displayNames = new ListOrderedSet<String>(); // add all defaults on top displayNames.add(displayName(StandardCharsets.ISO_8859_1)); displayNames.add(displayName(StandardCharsets.US_ASCII)); displayNames.add(displayName(StandardCharsets.UTF_8)); displayNames.add(displayName(StandardCharsets.UTF_16)); displayNames.add(displayName(StandardCharsets.UTF_16BE)); displayNames.add(displayName(StandardCharsets.UTF_16LE)); // a separator charsets.put(SEPARATOR, selection); displayNames.add(SEPARATOR); // add the rest for (String displayName : charsets.keySet()) { displayNames.add(displayName); } }
From source file:com.microsoft.azure.management.datalake.store.uploader.StringExtensionsTests.java
@Test public void StringExtensions_FindNewLine_UTF16BigEndian() { for (Triple<String, Integer, Integer> t : TestDataUTF16) { byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_16BE); byte[] largerBuffer = new byte[exactBuffer.length + 100]; System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length); int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, null); Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer); int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, null); Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer); int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, null); Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer); int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, null); Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer); }/*from w w w. j a va 2 s .c o m*/ for (Triple<String, Integer, Integer> t : TestDataUTF16CustomDelim) { byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_16BE); byte[] largerBuffer = new byte[exactBuffer.length + 100]; System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length); int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, customDelim); Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer); int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, customDelim); Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer); int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, customDelim); Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer); int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, customDelim); Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer); } }
From source file:com.sonymobile.android.media.internal.ISOBMFFParser.java
private String readID3String(int frameSize) { String metadataString = null; try {// www . java 2 s . c o m int encoding = mDataSource.readByte(); if (frameSize > 1) { if (encoding == 0) { // ISO 8859-1 byte[] buffer = new byte[frameSize - 1]; mDataSource.read(buffer); metadataString = new String(buffer, StandardCharsets.ISO_8859_1); } else if (encoding == 2) { // UTF-16 int bom = mDataSource.readShort(); byte[] buffer = new byte[frameSize - 3]; short little_endian = (short) 0xFFFE; mDataSource.read(buffer); if (bom == little_endian) { metadataString = new String(buffer, StandardCharsets.UTF_16LE); } else { metadataString = new String(buffer, StandardCharsets.UTF_16BE); } } else if (encoding == 3) { // UTF-8 byte[] buffer = new byte[frameSize - 1]; mDataSource.read(buffer); metadataString = new String(buffer, StandardCharsets.UTF_8); } else { // UCS-2 int bom = mDataSource.readShort(); byte[] buffer = new byte[frameSize - 3]; mDataSource.read(buffer); short little_endian = (short) 0xFFFE; if (bom == little_endian) { for (int i = 0; i < buffer.length; i += 2) { byte tempByte = buffer[i]; buffer[i] = buffer[i + 1]; buffer[i + 1] = tempByte; } } metadataString = new String(buffer, "UCS-2"); } } } catch (IOException e) { if (LOGS_ENABLED) Log.e(TAG, "IOEception reading ID3 string", e); } return metadataString; }
From source file:com.sonymobile.android.media.internal.ISOBMFFParser.java
private boolean parseDataBox(BoxHeader header) { try {//from www .jav a2 s . com if (mCurrentMetaDataKey != null) { String metaDataValue; if (mCurrentMetaDataKey == KEY_DISC_NUMBER) { mDataSource.skipBytes(4); // skip type mDataSource.skipBytes(4); // skip locale mDataSource.skipBytes(2); // skip album id type // and album id len int diskNumber = mDataSource.readShort(); mDataSource.skipBytes(2); // skip total number of disks metaDataValue = Integer.toString(diskNumber); } else { byte[] typefield = new byte[4]; mDataSource.read(typefield); int type = typefield[0]; int flag = (typefield[1] | typefield[2] | typefield[3]); Charset encoding = StandardCharsets.UTF_8; if (type == 2) { encoding = StandardCharsets.UTF_16BE; } mDataSource.skipBytes(4); // skip locale for now byte[] data = new byte[(int) header.boxDataSize - 8]; mDataSource.read(data); if (mCurrentMetaDataKey == KEY_TRACK_NUMBER) { int trackNumber = (data[2] << 8) & 0xFF | data[3]; int trackTotalNumber = (data[4] << 8) & 0xFF | data[5]; metaDataValue = trackNumber + "/" + trackTotalNumber; } else if (mCurrentMetaDataKey == KEY_COMPILATION) { metaDataValue = Byte.toString(data[0]); } else if (mCurrentMetaDataKey == KEY_GENRE) { if (type == 0 && flag == 1) { metaDataValue = new String(data, encoding); } else { int genre = data[data.length - 1]; genre--; if (genre < 0) { genre = 255; } metaDataValue = Integer.toString(genre); } } else { metaDataValue = new String(data, encoding); } } mMetaDataValues.put(mCurrentMetaDataKey, metaDataValue); } } catch (IOException e) { if (LOGS_ENABLED) Log.e(TAG, "could not read data", e); return false; } return true; }