Example usage for java.nio.charset StandardCharsets US_ASCII

List of usage examples for java.nio.charset StandardCharsets US_ASCII

Introduction

In this page you can find the example usage for java.nio.charset StandardCharsets US_ASCII.

Prototype

Charset US_ASCII

To view the source code for java.nio.charset StandardCharsets US_ASCII.

Click Source Link

Document

Seven-bit ASCII, also known as ISO646-US, also known as the Basic Latin block of the Unicode character set.

Usage

From source file:io.undertow.server.security.SpnegoAuthenticationTestCase.java

@Test
public void testSpnegoSuccess() throws Exception {

    final TestHttpClient client = new TestHttpClient();
    HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL());
    HttpResponse result = client.execute(get);
    assertEquals(StatusCodes.UNAUTHORIZED, result.getStatusLine().getStatusCode());
    Header[] values = result.getHeaders(WWW_AUTHENTICATE.toString());
    String header = getAuthHeader(NEGOTIATE, values);
    assertEquals(NEGOTIATE.toString(), header);
    HttpClientUtils.readResponse(result);

    Subject clientSubject = login("jduke", "theduke".toCharArray());

    Subject.doAs(clientSubject, new PrivilegedExceptionAction<Void>() {

        @Override//from  ww w  . java 2s  .  co  m
        public Void run() throws Exception {
            GSSManager gssManager = GSSManager.getInstance();
            GSSName serverName = gssManager
                    .createName("HTTP/" + DefaultServer.getDefaultServerAddress().getHostString(), null);

            GSSContext context = gssManager.createContext(serverName, SPNEGO, null,
                    GSSContext.DEFAULT_LIFETIME);

            byte[] token = new byte[0];

            boolean gotOur200 = false;
            while (!context.isEstablished()) {
                token = context.initSecContext(token, 0, token.length);

                if (token != null && token.length > 0) {
                    HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL());
                    get.addHeader(AUTHORIZATION.toString(),
                            NEGOTIATE + " " + FlexBase64.encodeString(token, false));
                    HttpResponse result = client.execute(get);

                    Header[] headers = result.getHeaders(WWW_AUTHENTICATE.toString());
                    if (headers.length > 0) {
                        String header = getAuthHeader(NEGOTIATE, headers);

                        byte[] headerBytes = header.getBytes(StandardCharsets.US_ASCII);
                        // FlexBase64.decode() returns byte buffer, which can contain backend array of greater size.
                        // when on such ByteBuffer is called array(), it returns the underlying byte array including the 0 bytes
                        // at the end, which makes the token invalid. => using Base64 mime decoder, which returnes directly properly sized byte[].
                        token = Base64.getMimeDecoder().decode(ArrayUtils.subarray(headerBytes,
                                NEGOTIATE.toString().length() + 1, headerBytes.length));
                    }

                    if (result.getStatusLine().getStatusCode() == StatusCodes.OK) {
                        Header[] values = result.getHeaders("ProcessedBy");
                        assertEquals(1, values.length);
                        assertEquals("ResponseHandler", values[0].getValue());
                        HttpClientUtils.readResponse(result);
                        assertSingleNotificationType(EventType.AUTHENTICATED);
                        gotOur200 = true;
                    } else if (result.getStatusLine().getStatusCode() == StatusCodes.UNAUTHORIZED) {
                        assertTrue("We did get a header.", headers.length > 0);

                        HttpClientUtils.readResponse(result);

                    } else {
                        fail(String.format("Unexpected status code %d",
                                result.getStatusLine().getStatusCode()));
                    }
                }
            }

            assertTrue(gotOur200);
            assertTrue(context.isEstablished());
            return null;
        }
    });
}

From source file:org.apache.hc.client5.http.impl.auth.TestBasicScheme.java

@Test
public void testBasicAuthentication() throws Exception {
    final AuthChallenge authChallenge = parse("Basic realm=\"test\"");

    final BasicScheme authscheme = new BasicScheme();
    authscheme.processChallenge(authChallenge, null);

    final HttpHost host = new HttpHost("somehost", 80);
    final AuthScope authScope = new AuthScope(host, "test", null);
    final UsernamePasswordCredentials creds = new UsernamePasswordCredentials("testuser",
            "testpass".toCharArray());
    final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
    credentialsProvider.setCredentials(authScope, creds);

    final HttpRequest request = new BasicHttpRequest("GET", "/");
    Assert.assertTrue(authscheme.isResponseReady(host, credentialsProvider, null));
    final String authResponse = authscheme.generateAuthResponse(host, request, null);

    final String expected = "Basic "
            + new String(Base64.encodeBase64("testuser:testpass".getBytes(StandardCharsets.US_ASCII)),
                    StandardCharsets.US_ASCII);
    Assert.assertEquals(expected, authResponse);
    Assert.assertEquals("test", authscheme.getRealm());
    Assert.assertTrue(authscheme.isChallengeComplete());
    Assert.assertFalse(authscheme.isConnectionBased());
}

From source file:org.apache.tika.parser.apple.AppleSingleFileParser.java

private long processFieldEntries(InputStream stream, List<FieldInfo> fieldInfoList, Metadata embeddedMetadata,
        long bytesRead) throws IOException, TikaException {
    byte[] buffer = null;
    for (FieldInfo f : fieldInfoList) {
        long diff = f.offset - bytesRead;
        //just in case
        IOUtils.skipFully(stream, diff);
        bytesRead += diff;//w w w .  j a v a2s  .c o  m
        if (f.entryId == REAL_NAME) {
            if (f.length > Integer.MAX_VALUE) {
                throw new TikaException("File name length can't be > integer max");
            }
            buffer = new byte[(int) f.length];
            IOUtils.readFully(stream, buffer);
            bytesRead += f.length;
            String originalFileName = new String(buffer, 0, buffer.length, StandardCharsets.US_ASCII);
            embeddedMetadata.set(TikaCoreProperties.ORIGINAL_RESOURCE_NAME, originalFileName);
        } else if (f.entryId != DATA_FORK) {
            IOUtils.skipFully(stream, f.length);
            bytesRead += f.length;
        }
    }
    return bytesRead;
}

From source file:org.sejda.sambox.output.DefaultCOSWriterTest.java

@Test
public void visitCOSNull() throws Exception {
    victim.visit(COSNull.NULL);//from w  w w .  j a  va2 s.co  m
    verify(writer).write("null".getBytes(StandardCharsets.US_ASCII));
}

From source file:org.sejda.sambox.pdmodel.graphics.color.PDICCBased.java

/**
 * Returns true if the given profile is represents sRGB.
 *//*from ww  w. j a v  a 2s .  c  o  m*/
private boolean is_sRGB(ICC_Profile profile) {
    byte[] bytes = Arrays.copyOfRange(profile.getData(ICC_Profile.icSigHead), ICC_Profile.icHdrModel,
            ICC_Profile.icHdrModel + 7);
    String deviceModel = new String(bytes, StandardCharsets.US_ASCII).trim();
    return deviceModel.equals("sRGB");
}

From source file:org.asynchttpclient.ntlm.NtlmTest.java

@Test(expectedExceptions = NtlmEngineException.class)
public void testGenerateType3MsgThworsExceptionWhenType2IndicatorNotPresent() throws IOException {
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    buf.write("NTLMSSP".getBytes(StandardCharsets.US_ASCII));
    buf.write(0);//w ww.j av  a  2  s  .c o m
    // type 2 indicator
    buf.write(3);
    buf.write(0);
    buf.write(0);
    buf.write(0);
    buf.write("challenge".getBytes());
    NtlmEngine engine = new NtlmEngine();
    engine.generateType3Msg("username", "password", "localhost", "workstation",
            Base64.encode(buf.toByteArray()));
    buf.close();
    fail("An NtlmEngineException must have occurred as type 2 indicator is incorrect");
}

From source file:de.micromata.genome.logging.spi.ifiles.IndexDirectory.java

public void renameFile(File oldFile, File newFile) throws IOException {
    String oldName = getDirectoryNameFromFile(oldFile);

    indexByteBuffer.position(HEADER_SIZE);
    byte[] nameBuffer = new byte[LOG_FILE_NAME_SIZE];
    while (indexByteBuffer.position() + ROW_SIZE < indexChannel.size()) {
        indexByteBuffer.getInt();//  w  w  w  .j  a  v a 2 s .c o  m
        indexByteBuffer.get(nameBuffer);
        String trimmed = new String(nameBuffer).trim();
        String fnt = StringUtils.substring(oldName, 0, LOG_FILE_NAME_SIZE);
        if (StringUtils.equalsIgnoreCase(trimmed, fnt) == true) {
            String lwwrite = fileToStoredName(newFile);
            indexByteBuffer.position(indexByteBuffer.position() - LOG_FILE_NAME_SIZE);
            indexByteBuffer.put(lwwrite.getBytes(StandardCharsets.US_ASCII));
            break;
        }

    }
}

From source file:org.apache.hc.client5.http.impl.auth.BasicScheme.java

@Override
public String generateAuthResponse(final HttpHost host, final HttpRequest request, final HttpContext context)
        throws AuthenticationException {
    if (this.buffer == null) {
        this.buffer = new ByteArrayBuilder(64).charset(this.charset);
    } else {/* w  ww.jav  a  2  s  .  c  om*/
        this.buffer.reset();
    }
    this.buffer.append(this.username).append(":").append(this.password);
    if (this.base64codec == null) {
        this.base64codec = new Base64(0);
    }
    final byte[] encodedCreds = this.base64codec.encode(this.buffer.toByteArray());
    this.buffer.reset();
    return "Basic " + new String(encodedCreds, 0, encodedCreds.length, StandardCharsets.US_ASCII);
}

From source file:com.st.maven.debian.DebianPackageMojo.java

@Override
public void execute() throws MojoExecutionException {

    validate();//from  w w  w.  ja v  a  2s .co  m
    fillDefaults();

    freemarkerConfig.setDefaultEncoding("UTF-8");
    freemarkerConfig.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
    freemarkerConfig.setClassForTemplateLoading(DebianPackageMojo.class, "/");
    freemarkerConfig.setTimeZone(TimeZone.getTimeZone("GMT"));

    Config config = new Config();
    config.setArtifactId(project.getArtifactId());
    config.setDescription(project.getDescription());
    config.setGroup(unixGroupId);
    config.setUser(unixUserId);
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
    sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
    config.setVersion(sdf.format(new Date()));
    Developer dev = project.getDevelopers().get(0);
    String maintainer = dev.getName() + " <" + dev.getEmail() + ">";
    config.setMaintainer(maintainer);
    config.setName(project.getName());
    config.setDescription(project.getDescription());
    config.setDepends(formatDependencies(osDependencies));
    config.setInstallDir("/home/" + unixUserId + "/" + project.getArtifactId());
    if (section == null || section.trim().length() == 0) {
        config.setSection("java");
    } else {
        config.setSection(section);
    }
    if (arch == null || arch.trim().length() == 0) {
        config.setArch("all");
    } else {
        config.setArch(arch);
    }
    if (priority == null || priority.trim().length() == 0) {
        config.setPriority(Priority.STANDARD);
    } else {
        config.setPriority(Priority.valueOf(priority.toUpperCase(Locale.UK)));
    }

    ArFileOutputStream aros = null;
    try {
        File debFile = new File(project.getBuild().getDirectory() + File.separator + project.getArtifactId()
                + "-" + config.getVersion() + ".deb");
        getLog().info("Building deb: " + debFile.getAbsolutePath());
        aros = new ArFileOutputStream(debFile.getAbsolutePath());
        aros.putNextEntry(createEntry("debian-binary"));
        aros.write("2.0\n".getBytes(StandardCharsets.US_ASCII));
        aros.closeEntry();
        aros.putNextEntry(createEntry("control.tar.gz"));
        fillControlTar(config, aros);
        aros.closeEntry();
        aros.putNextEntry(createEntry("data.tar.gz"));
        fillDataTar(config, aros);
        aros.closeEntry();
        if (attachArtifact) {
            VersionableAttachedArtifact artifact = new VersionableAttachedArtifact(project.getArtifact(), "deb",
                    config.getVersion());
            artifact.setFile(debFile);
            artifact.setResolved(true);
            project.addAttachedArtifact(artifact);
        }
    } catch (Exception e) {
        throw new MojoExecutionException("unable to create .deb file", e);
    } finally {
        if (aros != null) {
            try {
                aros.close();
            } catch (IOException e) {
                throw new MojoExecutionException("unable to close .deb file", e);
            }
        }
    }
}

From source file:ai.susi.json.JsonDataset.java

/**
 * define a data set: an indexed JsonDump where the index is held in RAM
 * @param dump_dir the path where the subdirectories for this data set shall be stored
 * @param dump_file_prefix a prefix for the file names
 * @param index_keys the names of the json property keys where their content shall be indexed by this field
 * @param mode the indexing mode, either completely in RAM with Mode.COMPRESSED or with file handles with Mode.REWRITABLE
 * @throws IOException//from   w w  w  . j  a  va2  s  .c  om
 */
public JsonDataset(File dump_dir, String dump_file_prefix, Column[] columns, String dateFieldName,
        String dateFieldFormat, JsonRepository.Mode mode, final boolean dailyDump, int count)
        throws IOException {

    // initialize class objects
    int concurrency = Runtime.getRuntime().availableProcessors();
    this.indexDump = new JsonRepository(dump_dir, dump_file_prefix, null, mode, dailyDump, concurrency);
    this.index = new ConcurrentHashMap<>();
    this.minifier = new JsonMinifier();
    this.columns = new HashMap<>();
    this.dateFieldName = dateFieldName == null ? "" : dateFieldName;
    this.dateFieldFormat = this.dateFieldName.length() == 0 ? null : new SimpleDateFormat(dateFieldFormat);
    for (Column column : columns)
        this.columns.put(column.key, column.caseInsensitive);

    // assign for each index key one JsonFactory index
    for (Column col : columns)
        this.index.put(col.key, new JsonFactoryIndex());

    // start reading of the JsonDump
    final Collection<File> dumps = indexDump.getOwnDumps(count);

    // for each reader one threqd is started which does Json parsing and indexing
    if (dumps != null)
        for (final File dump : dumps) {
            final JsonReader reader = indexDump.getDumpReader(dump);
            DAO.log("loading " + reader.getName());
            Thread[] indexerThreads = new Thread[concurrency];
            for (int i = 0; i < concurrency; i++) {
                indexerThreads[i] = new Thread() {
                    public void run() {
                        JsonFactory jsonHandle;
                        try {
                            while ((jsonHandle = reader.take()) != JsonStreamReader.POISON_JSON_MAP) {
                                JSONObject op = jsonHandle.getJSON();
                                JsonFactory jsonFactory;
                                if (jsonHandle instanceof JsonRandomAccessFile.JsonHandle) {
                                    JsonRandomAccessFile.JsonHandle handle = (JsonRandomAccessFile.JsonHandle) jsonHandle;
                                    assert reader instanceof JsonRandomAccessFile;
                                    // create the file json handle which does not contain the json any more
                                    // but only the file handle
                                    jsonFactory = ((JsonRandomAccessFile) reader)
                                            .getJsonFactory(handle.getIndex(), handle.getLength());
                                } else {
                                    assert JsonDataset.this.indexDump
                                            .getMode() == JsonRepository.COMPRESSED_MODE;
                                    // create the json minifier object which contains the json in minified version
                                    // before we create the minifier, we remove the meta keys from the json to further minify it
                                    for (byte[] meta_key : JsonRepository.META_KEYS) {
                                        op.remove(new String(meta_key, StandardCharsets.US_ASCII));
                                    }
                                    jsonFactory = JsonDataset.this.minifier.minify(op);
                                }
                                // the resulting json factory is written to each search index
                                for (Map.Entry<String, Boolean> column : JsonDataset.this.columns.entrySet()) {
                                    String searchKey = column.getKey();
                                    boolean case_insensitive = column.getValue();
                                    JsonFactoryIndex factoryIndex = JsonDataset.this.index.get(searchKey);
                                    Object searchValue = op.has(searchKey) ? op.get(searchKey) : null;
                                    if (searchValue != null) {
                                        if (searchValue instanceof String) {
                                            factoryIndex
                                                    .put(case_insensitive ? ((String) searchValue).toLowerCase()
                                                            : (String) searchValue, jsonFactory);
                                        } else {
                                            factoryIndex.put(searchValue, jsonFactory);
                                        }
                                    }
                                }
                            }
                        } catch (InterruptedException e) {
                            Log.getLog().warn(e);
                        } catch (IOException e) {
                            Log.getLog().warn(e);
                        }
                    }
                };
                indexerThreads[i].start();
            }
            // wait for the completion of each task
            for (int i = 0; i < concurrency; i++) {
                try {
                    indexerThreads[i].join();
                } catch (InterruptedException e) {
                }
            }
        }
}