List of usage examples for java.nio.charset StandardCharsets US_ASCII
Charset US_ASCII
To view the source code for java.nio.charset StandardCharsets US_ASCII.
Click Source Link
From source file:org.apache.hc.client5.http.impl.auth.BasicScheme.java
/** * @since 4.3//from w w w.j a v a2 s. c om */ public BasicScheme(final Charset charset) { this.paramMap = new HashMap<>(); this.charset = charset != null ? charset : StandardCharsets.US_ASCII; this.complete = false; }
From source file:org.apache.hc.client5.http.impl.auth.BasicScheme.java
public BasicScheme() { this(StandardCharsets.US_ASCII); }
From source file:org.apache.nifi.security.util.crypto.HashService.java
/** * Returns a {@link List} of supported {@link Charset}s on this platform. This is not a complete * list, as only the charsets in {@link StandardCharsets} are returned to be consistent across * JVM instances.//from w ww. ja va 2 s .c o m * * @return the list of charsets */ public static List<Charset> getSupportedCharsets() { return Arrays.asList(StandardCharsets.US_ASCII, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, StandardCharsets.UTF_16BE, StandardCharsets.UTF_16LE, StandardCharsets.UTF_16); }
From source file:org.apache.tika.parser.dbf.DBFFileHeader.java
private static DBFColumnHeader readCol(InputStream is) throws IOException, TikaException { byte[] fieldRecord = new byte[32]; IOUtils.readFully(is, fieldRecord);//from www .ja va 2 s .com DBFColumnHeader col = new DBFColumnHeader(); col.name = new byte[11]; System.arraycopy(fieldRecord, 0, col.name, 0, 10); int colType = fieldRecord[11] & 0xFF; if (colType < 0) { throw new IOException("File truncated before coltype in header"); } col.setType(colType); col.fieldLength = fieldRecord[16] & 0xFF; if (col.fieldLength < 0) { throw new TikaException( "Field length for column " + col.getName(StandardCharsets.US_ASCII) + " is < 0"); } else if (col.fieldLength > DBFReader.MAX_FIELD_LENGTH) { throw new TikaException("Field length (" + col.fieldLength + ") is greater than DBReader.MAX_FIELD_LENGTH (" + DBFReader.MAX_FIELD_LENGTH + ")"); } col.decimalCount = fieldRecord[17] & 0xFF; return col; }
From source file:com.joyent.manta.serialization.EncryptedMultipartUploadSerializationHelperTest.java
public void canConcatenateByteArrays() { byte[] array1 = "This is the first array. ".getBytes(StandardCharsets.US_ASCII); byte[] array2 = "This is the second array. ".getBytes(StandardCharsets.US_ASCII); byte[] array3 = "This is the third array.".getBytes(StandardCharsets.US_ASCII); String expected = "This is the first array. This is the second array. This is the third array."; byte[] concatenated = EncryptedMultipartUploaSerializationHelper.addAll(array1, array2, array3); String actual = new String(concatenated, StandardCharsets.US_ASCII); Assert.assertEquals(actual, expected); }
From source file:fixio.netty.codec.FixMessageDecoderTest.java
private List<Object> decode(String message) throws Exception { String[] tags = message.split("\u0001"); List<Object> result = new ArrayList<>(); for (String tag : tags) { decoder.decode(null, Unpooled.wrappedBuffer(tag.getBytes(StandardCharsets.US_ASCII)), result); }/*from www .j a va2s .c om*/ return result; }
From source file:com.github.sebhoss.identifier.service.SuppliedIdentifiers.java
private static byte[] getBytes(final String value) { return value.getBytes(StandardCharsets.US_ASCII); }
From source file:csv.sorting.PrepareWeatherData.java
private static void writeSortedFileByIndices(Path csvFileIn, List<Integer> indices, Path csvFileOut) { try {/*from ww w . ja v a 2 s .c o m*/ List<String> csvDataList = new ArrayList<>(); // This is sorting for the dumb (like me). Read the entire CSV file, skipping the first line: try (Stream<String> lines = Files.lines(csvFileIn, StandardCharsets.US_ASCII).skip(1)) { csvDataList = lines.collect(Collectors.toList()); } // Now write the sorted file: try (BufferedWriter writer = Files.newBufferedWriter(csvFileOut)) { for (Integer index : indices) { writer.write(csvDataList.get(index)); writer.newLine(); } } } catch (Exception e) { throw new RuntimeException(e); } }
From source file:org.apache.tika.parser.dbf.DBFCell.java
private String getFormattedDate() { byte[] dateBytes = getBytes(); if (dateBytes.length < 8) { return ""; }/*from w ww. j a va 2s. co m*/ String year = new String(dateBytes, 0, 4, StandardCharsets.US_ASCII); String month = new String(dateBytes, 4, 2, StandardCharsets.US_ASCII); String day = new String(dateBytes, 6, 2, StandardCharsets.US_ASCII); //test to see that these values make any sense for (String s : new String[] { year, month, day }) { try { Integer.parseInt(s); } catch (NumberFormatException e) { return ""; } } return String.format(Locale.ROOT, "%s/%s/%s", month, day, year); }
From source file:com.google.cloud.runtimes.builder.buildsteps.docker.StageDockerArtifactBuildStep.java
@Override protected void doBuild(Path directory, Map<String, String> metadata) throws BuildStepException { try {/* w w w .ja v a 2s . c om*/ // TODO wrap this in a try block and log a more friendly message if not found Path artifact = getArtifact(directory, metadata); logger.info("Found artifact {}", artifact); // make staging dir Path stagingDir = directory.resolve(DOCKER_STAGING_DIR); if (Files.exists(stagingDir)) { logger.info("Found a docker staging directory in provided sources. Cleaning {}", stagingDir.toString()); FileUtils.deleteDirectory(stagingDir.toFile()); } Files.createDirectory(stagingDir); metadata.put(BuildStepMetadataConstants.DOCKER_STAGING_PATH, stagingDir.toString()); logger.info("Preparing docker files in {}", stagingDir); // copy the artifact into the staging dir Files.copy(artifact, stagingDir.resolve(artifact.getFileName())); // copy the .dockerignore file into staging dir, if it exists Path dockerIgnoreFile = directory.resolve(DOCKER_IGNORE_FILE); if (Files.isRegularFile(dockerIgnoreFile)) { Files.copy(dockerIgnoreFile, stagingDir.resolve(DOCKER_IGNORE_FILE)); } // Generate dockerfile String dockerfile = dockerfileGenerator.generateDockerfile(artifact.getFileName()); Path dockerFileDest = stagingDir.resolve("Dockerfile"); try (BufferedWriter writer = Files.newBufferedWriter(dockerFileDest, StandardCharsets.US_ASCII)) { writer.write(dockerfile); } } catch (IOException | ArtifactNotFoundException | TooManyArtifactsException e) { throw new BuildStepException(e); } }