Example usage for javax.crypto KeyGenerator generateKey

List of usage examples for javax.crypto KeyGenerator generateKey

Introduction

In this page you can find the example usage for javax.crypto KeyGenerator generateKey.

Prototype

public final SecretKey generateKey() 

Source Link

Document

Generates a secret key.

Usage

From source file:org.openintents.safe.CryptoHelper.java

/**
 * encrypt a file using a random session key
 *
 * @param contentResolver is used to be able to read the stream
 * @param fileUri         is the stream or file to read from
 * @return Uri to the created plaintext file
 * @throws Exception//from  w ww  .j  ava 2 s  .  c o  m
 * @author Peli
 */
public Uri encryptFileWithSessionKey(ContentResolver contentResolver, Uri fileUri)
        throws CryptoHelperException {
    if (debug) {
        Log.d(TAG, "Encrypt with session key");
    }
    status = false; // assume failure
    if (password == null) {
        String msg = "Must call setPassword before runing encrypt.";
        throw new CryptoHelperException(msg);
    }

    String outputPath = "";
    try {
        InputStream is;
        if (fileUri.getScheme().equals("file")) {
            is = new java.io.FileInputStream(fileUri.getPath());
            outputPath = fileUri.getPath() + OISAFE_EXTENSION;
        } else {
            is = contentResolver.openInputStream(fileUri);
            outputPath = getTemporaryFileName();
        }

        FileOutputStream os = new FileOutputStream(outputPath);

        byte[] cipherSessionKey = {};
        //         byte[] ciphertext = {};

        // First create a session key
        SecretKey sessionKey = null;
        byte[] sessionKeyEncoded = null;
        //         String sessionKeyString = null;
        try {
            KeyGenerator keygen;
            keygen = KeyGenerator.getInstance("AES");
            keygen.init(256); // needs 96 bytes
            //keygen.init(128); // needs 64 bytes
            sessionKey = keygen.generateKey();
            sessionKeyEncoded = sessionKey.getEncoded();
            //            sessionKeyString = new String(sessionKeyEncoded);
        } catch (NoSuchAlgorithmException e) {
            Log.e(TAG, "generateMasterKey(): " + e.toString());
            return null;
        }

        // Encrypt the session key using the master key
        try {
            pbeCipher.init(Cipher.ENCRYPT_MODE, pbeKey, pbeParamSpec);
            cipherSessionKey = pbeCipher.doFinal(sessionKeyEncoded);
            status = true;
        } catch (IllegalBlockSizeException | BadPaddingException | InvalidAlgorithmParameterException
                | InvalidKeyException e) {
            Log.e(TAG, "encryptWithSessionKey(): " + e.toString());
        }
        if (!status) {
            return null;
        }
        status = false;

        String stringCipherVersion = "A";
        byte[] bytesCipherVersion = stringCipherVersion.getBytes();
        os.write(bytesCipherVersion, 0, bytesCipherVersion.length);

        os.write(cipherSessionKey, 0, cipherSessionKey.length);

        if (debug) {
            Log.d(TAG, "bytesCipherVersion.length: " + bytesCipherVersion.length);
        }
        if (debug) {
            Log.d(TAG, "cipherSessionKey.length: " + cipherSessionKey.length);
        }

        Trivium tri = new Trivium();
        try {
            tri.setupKey(Trivium.MODE_ENCRYPT, sessionKeyEncoded, 0);
            tri.setupNonce(sessionKeyEncoded, 10);

            // Create the byte array to hold the data
            final int bytesLen = 4096; // buffer length
            byte[] bytesIn = new byte[bytesLen];
            byte[] bytesOut = new byte[bytesLen];

            int offset = 0;
            int numRead = 0;
            while ((numRead = is.read(bytesIn, 0, bytesLen)) >= 0) {
                tri.process(bytesIn, 0, bytesOut, 0, numRead);

                os.write(bytesOut, 0, numRead);
                offset += numRead;
            }

            // Ensure all the bytes have been read in
            if (offset < is.available()) {
                throw new IOException("Could not completely read file ");
            }

            // Close the input stream and return bytes
            is.close();
            os.close();

            // Securely delete the original file:
            SecureDelete.delete(new File(fileUri.getPath()));
            status = true;

        } catch (ESJException e) {
            Log.e(TAG, "Error encrypting file", e);
        }
    } catch (FileNotFoundException e) {
        Log.e(TAG, "File not found", e);
    } catch (IOException e) {
        Log.e(TAG, "IO Exception", e);
    }

    if (status == false) {
        return null;
    }
    return Uri.fromFile(new File(outputPath)); //Uri.parse("file://" + outputPath); // TODO: UUEncode
}

From source file:com.cloud.server.ConfigurationServerImpl.java

private void updateSSOKey() {
    try {/*from  w  ww.j a v a 2 s . c  om*/
        String encodedKey = null;

        // Algorithm for SSO Keys is SHA1, should this be configurable?
        KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
        SecretKey key = generator.generateKey();
        encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());

        _configDao.update(Config.SSOKey.key(), Config.SSOKey.getCategory(), encodedKey);
    } catch (NoSuchAlgorithmException ex) {
        s_logger.error("error generating sso key", ex);
    }
}

From source file:com.feilong.tools.security.symmetric.SymmetricEncryption.java

/**
 * ?./*from  w  w w. j a  v a2  s .c om*/
 * 
 * @param _keyString
 *            
 * @return Key
 * @throws NoSuchAlgorithmException
 *             the no such algorithm exception
 * @see <a href="http://blog.csdn.net/hbcui1984/article/details/5753083">Linux?AES</a>
 * @see KeyGenerator
 * @see SecureRandom
 */
private Key getKey(String _keyString) throws NoSuchAlgorithmException {
    // KeyGenerator ????????? KeyGenerator ??
    KeyGenerator keyGenerator = KeyGenerator.getInstance(algorithm);

    // SHA1PRNG: It is just ensuring the random number generated is as close to "truly random" as possible.
    // Easily guessable random numbers break encryption.

    // ???? (RNG) ???
    //TODO
    SecureRandom secureRandom = SecureRandom.getInstance("SHA1PRNG");

    // SecureRandom??getInstance?setSeed
    //  :windowslinux?
    // javax.crypto.BadPaddingException: Given final block not properly padded
    secureRandom.setSeed(_keyString.getBytes());

    keyGenerator.init(secureRandom);

    Key _key = keyGenerator.generateKey();
    keyGenerator = null;
    return _key;
}

From source file:org.apache.hadoop.mapreduce.JobSubmitter.java

/**
 * Internal method for submitting jobs to the system.
 * /* w  w w .  j  ava  2  s .  c o m*/
 * <p>The job submission process involves:
 * <ol>
 *   <li>
 *   Checking the input and output specifications of the job.
 *   </li>
 *   <li>
 *   Computing the {@link InputSplit}s for the job.
 *   </li>
 *   <li>
 *   Setup the requisite accounting information for the 
 *   {@link DistributedCache} of the job, if necessary.
 *   </li>
 *   <li>
 *   Copying the job's jar and configuration to the map-reduce system
 *   directory on the distributed file-system. 
 *   </li>
 *   <li>
 *   Submitting the job to the <code>JobTracker</code> and optionally
 *   monitoring it's status.
 *   </li>
 * </ol></p>
 * @param job the configuration to submit
 * @param cluster the handle to the Cluster
 * @throws ClassNotFoundException
 * @throws InterruptedException
 * @throws IOException
 */
JobStatus submitJobInternal(Job job, Cluster cluster)
        throws ClassNotFoundException, InterruptedException, IOException {

    //validate the jobs output specs 
    checkSpecs(job);

    Configuration conf = job.getConfiguration();
    addMRFrameworkToDistributedCache(conf);

    Path jobStagingArea = JobSubmissionFiles.getStagingDir(cluster, conf);
    //configure the command line options correctly on the submitting dfs
    InetAddress ip = InetAddress.getLocalHost();
    if (ip != null) {
        submitHostAddress = ip.getHostAddress();
        submitHostName = ip.getHostName();
        conf.set(MRJobConfig.JOB_SUBMITHOST, submitHostName);
        conf.set(MRJobConfig.JOB_SUBMITHOSTADDR, submitHostAddress);
    }
    JobID jobId = submitClient.getNewJobID();
    job.setJobID(jobId);
    Path submitJobDir = new Path(jobStagingArea, jobId.toString());
    JobStatus status = null;
    try {
        conf.set(MRJobConfig.USER_NAME, UserGroupInformation.getCurrentUser().getShortUserName());
        conf.set("hadoop.http.filter.initializers",
                "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer");
        conf.set(MRJobConfig.MAPREDUCE_JOB_DIR, submitJobDir.toString());
        LOG.debug("Configuring job " + jobId + " with " + submitJobDir + " as the submit dir");
        // get delegation token for the dir
        TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { submitJobDir }, conf);

        populateTokenCache(conf, job.getCredentials());

        // generate a secret to authenticate shuffle transfers
        if (TokenCache.getShuffleSecretKey(job.getCredentials()) == null) {
            KeyGenerator keyGen;
            try {

                int keyLen = CryptoUtils.isShuffleEncrypted(conf)
                        ? conf.getInt(MRJobConfig.MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS,
                                MRJobConfig.DEFAULT_MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS)
                        : SHUFFLE_KEY_LENGTH;
                keyGen = KeyGenerator.getInstance(SHUFFLE_KEYGEN_ALGORITHM);
                keyGen.init(keyLen);
            } catch (NoSuchAlgorithmException e) {
                throw new IOException("Error generating shuffle secret key", e);
            }
            SecretKey shuffleKey = keyGen.generateKey();
            TokenCache.setShuffleSecretKey(shuffleKey.getEncoded(), job.getCredentials());
        }

        copyAndConfigureFiles(job, submitJobDir);

        Path submitJobFile = JobSubmissionFiles.getJobConfPath(submitJobDir);

        // Create the splits for the job
        LOG.debug("Creating splits at " + jtFs.makeQualified(submitJobDir));
        int maps = writeSplits(job, submitJobDir);
        conf.setInt(MRJobConfig.NUM_MAPS, maps);
        LOG.info("number of splits:" + maps);

        // write "queue admins of the queue to which job is being submitted"
        // to job file.
        String queue = conf.get(MRJobConfig.QUEUE_NAME, JobConf.DEFAULT_QUEUE_NAME);
        AccessControlList acl = submitClient.getQueueAdmins(queue);
        conf.set(toFullPropertyName(queue, QueueACL.ADMINISTER_JOBS.getAclName()), acl.getAclString());

        // removing jobtoken referrals before copying the jobconf to HDFS
        // as the tasks don't need this setting, actually they may break
        // because of it if present as the referral will point to a
        // different job.
        TokenCache.cleanUpTokenReferral(conf);

        if (conf.getBoolean(MRJobConfig.JOB_TOKEN_TRACKING_IDS_ENABLED,
                MRJobConfig.DEFAULT_JOB_TOKEN_TRACKING_IDS_ENABLED)) {
            // Add HDFS tracking ids
            ArrayList<String> trackingIds = new ArrayList<String>();
            for (Token<? extends TokenIdentifier> t : job.getCredentials().getAllTokens()) {
                trackingIds.add(t.decodeIdentifier().getTrackingId());
            }
            conf.setStrings(MRJobConfig.JOB_TOKEN_TRACKING_IDS,
                    trackingIds.toArray(new String[trackingIds.size()]));
        }

        // Set reservation info if it exists
        ReservationId reservationId = job.getReservationId();
        if (reservationId != null) {
            conf.set(MRJobConfig.RESERVATION_ID, reservationId.toString());
        }

        // Write job file to submit dir
        writeConf(conf, submitJobFile);
        Limits.reset(conf);

        //
        // Now, actually submit the job (using the submit name)
        //
        printTokens(jobId, job.getCredentials());
        status = submitClient.submitJob(jobId, submitJobDir.toString(), job.getCredentials());
        if (status != null) {
            return status;
        } else {
            throw new IOException("Could not launch job");
        }
    } finally {
        if (status == null) {
            LOG.info("Cleaning up the staging area " + submitJobDir);
            if (jtFs != null && submitJobDir != null)
                jtFs.delete(submitJobDir, true);

        }
    }
}

From source file:org.structr.util.StructrLicenseManager.java

private boolean checkVolumeLicense(final Map<String, String> properties, final String serversString) {

    try {/* w w  w . j  av  a2 s .  c o  m*/

        final KeyGenerator kgen = KeyGenerator.getInstance("AES");
        final byte[] data = write(properties).getBytes("utf-8");
        final String name = properties.get(NameKey);
        final byte[] expected = name.getBytes("utf-8");

        kgen.init(128);

        for (final String part : serversString.split("[, ]+")) {

            final String address = part.trim();

            if (StringUtils.isNotBlank(address)) {

                try {

                    logger.info("Trying to verify volume license with server {}", address);

                    final long t0 = System.currentTimeMillis();
                    final SecretKey aesKey = kgen.generateKey(); // symmetric stream key
                    final byte[] ivspec = RandomUtils.nextBytes(16); // initialization vector for stream cipher
                    final byte[] key = encryptSessionKey(aesKey.getEncoded());
                    final byte[] encryptedIV = encryptSessionKey(ivspec);
                    final byte[] encryptedData = encryptData(data, aesKey, ivspec);
                    final byte[] response = sendAndReceive(address, key, encryptedIV, encryptedData);
                    final boolean result = verify(expected, response);

                    if (result == true) {
                        logger.info("License verified in {} ms", System.currentTimeMillis() - t0);
                    }

                    return result;

                } catch (Throwable t) {
                    logger.warn("Unable to verify volume license: {}", t.getMessage());
                }
            }
        }

    } catch (Throwable t) {
        t.printStackTrace();
    }

    return false;
}

From source file:com.mirth.connect.server.controllers.DefaultConfigurationController.java

/**
 * Instantiates the encryptor and digester using the configuration properties. If the properties
 * are not found, reasonable defaults are used.
 * /* ww w  . j a  va 2  s.c om*/
 * @param provider
 *            The provider to use (ex. BC)
 * @param keyStore
 *            The keystore from which to load the secret encryption key
 * @param keyPassword
 *            The secret key password
 * @throws Exception
 */
private void configureEncryption(Provider provider, KeyStore keyStore, char[] keyPassword) throws Exception {
    SecretKey secretKey = null;

    if (!keyStore.containsAlias(SECRET_KEY_ALIAS)) {
        logger.debug("encryption key not found, generating new one");
        KeyGenerator keyGenerator = KeyGenerator.getInstance(encryptionConfig.getEncryptionAlgorithm(),
                provider);
        keyGenerator.init(encryptionConfig.getEncryptionKeyLength());
        secretKey = keyGenerator.generateKey();
        KeyStore.SecretKeyEntry entry = new KeyStore.SecretKeyEntry(secretKey);
        keyStore.setEntry(SECRET_KEY_ALIAS, entry, new KeyStore.PasswordProtection(keyPassword));
    } else {
        logger.debug("found encryption key in keystore");
        secretKey = (SecretKey) keyStore.getKey(SECRET_KEY_ALIAS, keyPassword);
    }

    /*
     * Now that we have a secret key, store it in the encryption settings so that we can use it
     * to encryption things client side.
     */
    encryptionConfig.setSecretKey(secretKey.getEncoded());

    encryptor = new KeyEncryptor();
    encryptor.setProvider(provider);
    encryptor.setKey(secretKey);
    encryptor.setFormat(Output.BASE64);

    digester = new Digester();
    digester.setProvider(provider);
    digester.setAlgorithm(encryptionConfig.getDigestAlgorithm());
    digester.setFormat(Output.BASE64);
}

From source file:org.apache.hadoop.mapreduce.v2.app.MRAppMaster.java

/**
 * Obtain the tokens needed by the job and put them in the UGI
 * @param conf/*from   w  ww. j a v  a  2s.co m*/
 */
protected void initJobCredentialsAndUGI(Configuration conf) {

    try {
        this.currentUser = UserGroupInformation.getCurrentUser();
        this.jobCredentials = ((JobConf) conf).getCredentials();
        if (CryptoUtils.isEncryptedSpillEnabled(conf)) {
            int keyLen = conf.getInt(MRJobConfig.MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS,
                    MRJobConfig.DEFAULT_MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS);
            KeyGenerator keyGen = KeyGenerator.getInstance(INTERMEDIATE_DATA_ENCRYPTION_ALGO);
            keyGen.init(keyLen);
            encryptedSpillKey = keyGen.generateKey().getEncoded();
        } else {
            encryptedSpillKey = new byte[] { 0 };
        }
    } catch (IOException e) {
        throw new YarnRuntimeException(e);
    } catch (NoSuchAlgorithmException e) {
        throw new YarnRuntimeException(e);
    }
}

From source file:com.datatorrent.lib.io.fs.AbstractFileOutputOperatorTest.java

@Test
public void testChainFilters() throws NoSuchAlgorithmException, IOException {
    EvenOddHDFSExactlyOnceWriter writer = new EvenOddHDFSExactlyOnceWriter();
    KeyGenerator keygen = KeyGenerator.getInstance("AES");
    keygen.init(128);/*from   w  ww.ja  v a 2s  .c  om*/
    final SecretKey secretKey = keygen.generateKey();
    byte[] iv = "TestParam16bytes".getBytes();
    final IvParameterSpec ivps = new IvParameterSpec(iv);
    FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream> chainStreamProvider = new FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream>();
    chainStreamProvider.addStreamProvider(new FilterStreamCodec.GZipFilterStreamProvider());

    // The filter is to keep track of the offsets to handle multi member gzip issue with openjdk
    // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4691425
    final CounterFilterStreamContext evenCounterContext = new CounterFilterStreamContext();
    final CounterFilterStreamContext oddCounterContext = new CounterFilterStreamContext();
    chainStreamProvider.addStreamProvider(
            new FilterStreamProvider.SimpleFilterReusableStreamProvider<CounterFilterOutputStream, OutputStream>() {
                @Override
                protected FilterStreamContext<CounterFilterOutputStream> createFilterStreamContext(
                        OutputStream outputStream) throws IOException {
                    if (evenCounterContext.isDoInit()) {
                        evenCounterContext.init(outputStream);
                        return evenCounterContext;
                    } else {
                        oddCounterContext.init(outputStream);
                        return oddCounterContext;
                    }
                }
            });
    chainStreamProvider.addStreamProvider(
            new FilterStreamProvider.SimpleFilterReusableStreamProvider<CipherOutputStream, OutputStream>() {
                @Override
                protected FilterStreamContext<CipherOutputStream> createFilterStreamContext(
                        OutputStream outputStream) throws IOException {
                    try {
                        Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
                        cipher.init(Cipher.ENCRYPT_MODE, secretKey, ivps);
                        return new FilterStreamCodec.CipherFilterStreamContext(outputStream, cipher);
                    } catch (Exception e) {
                        throw new IOException(e);
                    }
                }
            });
    writer.setFilterStreamProvider(chainStreamProvider);

    File evenFile = new File(testMeta.getDir(), EVEN_FILE);
    File oddFile = new File(testMeta.getDir(), ODD_FILE);

    List<Long> evenOffsets = new ArrayList<Long>();
    List<Long> oddOffsets = new ArrayList<Long>();

    writer.setFilePath(testMeta.getDir());
    writer.setAlwaysWriteToTmp(false);
    writer.setup(testMeta.testOperatorContext);

    for (int i = 0; i < 10; ++i) {
        writer.beginWindow(i);
        for (int j = 0; j < 1000; ++j) {
            writer.input.put(i);
        }
        writer.endWindow();
        if ((i % 2) == 1) {
            writer.beforeCheckpoint(i);
            evenOffsets.add(evenCounterContext.getCounter());
            oddOffsets.add(oddCounterContext.getCounter());
        }
    }

    writer.teardown();

    /*
    evenOffsets.add(evenFile.length());
    oddOffsets.add(oddFile.length());
    */

    checkCompressedFile(evenFile, evenOffsets, 0, 5, 1000, secretKey, iv);
    checkCompressedFile(oddFile, oddOffsets, 1, 5, 1000, secretKey, iv);
}

From source file:com.cloud.user.AccountManagerImpl.java

private String createUserSecretKey(long userId) {
    try {/*from  ww  w  .j  av  a 2s .co  m*/
        UserVO updatedUser = _userDao.createForUpdate();
        String encodedKey = null;
        int retryLimit = 10;
        UserVO userBySecretKey = null;
        do {
            KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
            SecretKey key = generator.generateKey();
            encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());
            userBySecretKey = _userDao.findUserBySecretKey(encodedKey);
            retryLimit--;
        } while ((userBySecretKey != null) && (retryLimit >= 0));

        if (userBySecretKey != null) {
            return null;
        }

        updatedUser.setSecretKey(encodedKey);
        _userDao.update(userId, updatedUser);
        return encodedKey;
    } catch (NoSuchAlgorithmException ex) {
        s_logger.error("error generating secret key for user id=" + userId, ex);
    }
    return null;
}

From source file:com.cloud.user.AccountManagerImpl.java

private String createUserApiKey(long userId) {
    try {/*w  w w .java2s .  c om*/
        UserVO updatedUser = _userDao.createForUpdate();

        String encodedKey = null;
        Pair<User, Account> userAcct = null;
        int retryLimit = 10;
        do {
            // FIXME: what algorithm should we use for API keys?
            KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
            SecretKey key = generator.generateKey();
            encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());
            userAcct = _accountDao.findUserAccountByApiKey(encodedKey);
            retryLimit--;
        } while ((userAcct != null) && (retryLimit >= 0));

        if (userAcct != null) {
            return null;
        }
        updatedUser.setApiKey(encodedKey);
        _userDao.update(userId, updatedUser);
        return encodedKey;
    } catch (NoSuchAlgorithmException ex) {
        s_logger.error("error generating secret key for user id=" + userId, ex);
    }
    return null;
}