Example usage for com.amazonaws.auth InstanceProfileCredentialsProvider InstanceProfileCredentialsProvider

List of usage examples for com.amazonaws.auth InstanceProfileCredentialsProvider InstanceProfileCredentialsProvider

Introduction

In this page you can find the example usage for com.amazonaws.auth InstanceProfileCredentialsProvider InstanceProfileCredentialsProvider.

Prototype

@Deprecated
public InstanceProfileCredentialsProvider() 

Source Link

Usage

From source file:com.erudika.para.utils.Config.java

License:Apache License

private static String[] getAwsCredentials() {
    InstanceProfileCredentialsProvider ipcp = new InstanceProfileCredentialsProvider();
    try {// www .j a  va 2  s . c o  m
        ipcp.refresh();
        return new String[] { ipcp.getCredentials().getAWSAccessKeyId(),
                ipcp.getCredentials().getAWSSecretKey() };
    } catch (Exception e) {
        return new String[] { "", "" };
    }
}

From source file:com.eucalyptus.simpleworkflow.common.client.WorkflowClientStandalone.java

License:Open Source License

private AWSCredentialsProvider getCredentialsProvider() {
    AWSCredentialsProvider provider = null;
    if (this.credentialPropertyFile != null) {
        provider = new AWSCredentialsProvider() {
            private String accessKey = null;
            private String secretAccessKey = null;

            private void readProperty() throws FileNotFoundException, IOException {
                final FileInputStream stream = new FileInputStream(new File(credentialPropertyFile));
                try {
                    Properties credentialProperties = new Properties();
                    credentialProperties.load(stream);

                    if (credentialProperties.getProperty("accessKey") == null
                            || credentialProperties.getProperty("secretKey") == null) {
                        throw new IllegalArgumentException("The specified file (" + credentialPropertyFile
                                + ") doesn't contain the expected properties 'accessKey' "
                                + "and 'secretKey'.");
                    }//from  w  ww  . jav a2s.co m
                    accessKey = credentialProperties.getProperty("accessKey");
                    secretAccessKey = credentialProperties.getProperty("secretKey");
                } finally {
                    try {
                        stream.close();
                    } catch (final IOException e) {
                    }
                }
            }

            @Override
            public AWSCredentials getCredentials() {
                if (this.accessKey == null || this.secretAccessKey == null) {
                    try {
                        readProperty();
                    } catch (final Exception ex) {
                        throw new RuntimeException("Failed to read credentials file", ex);
                    }
                }
                return new BasicAWSCredentials(accessKey, secretAccessKey);
            }

            @Override
            public void refresh() {
                this.accessKey = null;
            }
        };
    } else {
        provider = new InstanceProfileCredentialsProvider();
    }

    return provider;
}

From source file:com.facebook.presto.dynamodb.DynamodbConfig.java

License:Apache License

public AWSCredentialsProvider getCredentials() {
    if (accessKey == null && secretAccessKey == null) {
        return new InstanceProfileCredentialsProvider();
    }/*from w w w.ja  v  a  2 s .com*/
    return new StaticCredentialsProvider(new BasicAWSCredentials(getAccessKey(), getSecretAccessKey()));
}

From source file:com.facebook.presto.hive.s3.PrestoS3FileSystem.java

License:Apache License

private AWSCredentialsProvider getAwsCredentialsProvider(URI uri, Configuration conf) {
    Optional<AWSCredentials> credentials = getAwsCredentials(uri, conf);
    if (credentials.isPresent()) {
        return new AWSStaticCredentialsProvider(credentials.get());
    }/*from   w w  w  . ja  v  a  2  s .  c  om*/

    if (useInstanceCredentials) {
        return new InstanceProfileCredentialsProvider();
    }

    String providerClass = conf.get(S3_CREDENTIALS_PROVIDER);
    if (!isNullOrEmpty(providerClass)) {
        return getCustomAWSCredentialsProvider(uri, conf, providerClass);
    }

    throw new RuntimeException("S3 credentials not configured");
}

From source file:com.getcake.sparkjdbc.SparkJDBCServer.java

License:Apache License

public void start(String[] args) throws Exception {
    int sparkWebMinThreads, sparkWebMaxThreads, sparkWebIdleTimeoutMillis, sparkWebPort;

    try {//from w w  w. ja v a 2 s  .co m
        if (args == null || args.length != 1) {
            log("=== SparkJDBCServer " + CODE_VERSION + " - usage: <spark config file>");
            System.exit(-1);
        }
        sparkConfigFileName = args[0];
        log("=== SparkJDBCServer " + CODE_VERSION + " - sparkConfigFileName: " + sparkConfigFileName);

        geoController = GeoController.getInstance();
        // properties = AwsUtil.loadProperties(sparkConfigFileName);       
        properties = geoController.initWithMsSqlDao(sparkConfigFileName);
        sparkMaster = properties.getProperty("spark.master");
        appName = properties.getProperty("spark.app.name");

        sparkConf = new SparkConf();
        if ("local".equalsIgnoreCase(sparkMaster)) {
            sparkConf.setMaster(sparkMaster);
            sparkConf.setAppName(appName);
            sparkConf.set("spark.executor.memory", properties.getProperty("spark.executor.memory"));
            sparkConf.set("spark.driver.memory", properties.getProperty("spark.driver.memory"));
        }

        log("sparkMaster: " + sparkMaster);
        log("spark.executor.memory: " + sparkConf.get("spark.executor.memory"));
        log("spark.driver.memory: " + sparkConf.get("spark.driver.memory"));

        javaSparkContext = new JavaSparkContext(sparkConf);

        if ("ProfileCredentialsProvider".equalsIgnoreCase(properties.getProperty("credentialsProvider"))) {
            log("credentialsProvider: ProfileCredentialsProvider");
            credentialsProvider = new ProfileCredentialsProvider();
        } else {
            log("credentialsProvider: InstanceProfileCredentialsProvider");
            credentialsProvider = new InstanceProfileCredentialsProvider();
        }
        hadoopConf = javaSparkContext.sc().hadoopConfiguration();

        hadoopConf.set("fs.s3.awsAccessKeyId", credentialsProvider.getCredentials().getAWSAccessKeyId());
        hadoopConf.set("fs.s3.awsSecretAccessKey", credentialsProvider.getCredentials().getAWSSecretKey());
        hadoopConf.set("fs.s3.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem");

        hadoopConf.set("fs.s3n.awsAccessKeyId", credentialsProvider.getCredentials().getAWSAccessKeyId());
        hadoopConf.set("fs.s3n.awsSecretAccessKey", credentialsProvider.getCredentials().getAWSSecretKey());
        hadoopConf.set("fs.s3n.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem");

        hadoopConf.set("fs.s3a.awsxxAccessKeyId", credentialsProvider.getCredentials().getAWSAccessKeyId());
        hadoopConf.set("fs.s3a.awsSecretAccessKey", credentialsProvider.getCredentials().getAWSSecretKey());
        hadoopConf.set("fs.s3a.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem");
        hadoopConf.set("fs.s3a.connection.ssl.enabled", "false");
        hadoopConf.set("fs.s3a.connection.maximum", "false");

        hiveContext = new HiveContext(javaSparkContext.sc());

        // hiveContext.sparkContext().addSparkListener(listener);
        // javaSparkContext.add

        // DataFrame citiesDF = hiveContext.read().load("C:/Projects/GeoServices/geodata/cities.csv");
        // citiesDF.registerTempTable("cities");

        // hiveContext.sql("CREATE TABLE IF NOT EXISTS cities (locationId INT, country STRING, region String, city String, latitude float, longitude float, metroCode String )");
        // hiveContext.sql("LOAD DATA LOCAL INPATH 'C:/Projects/GeoServices/geodata/cities.csv' INTO TABLE cities");
        // hiveContext.sql("CREATE TABLE IF NOT EXISTS cities (country STRING)");
        // hiveContext.sql("LOAD DATA LOCAL INPATH 'C:/Projects/GeoServices/geodata/cities-sample.csv' INTO TABLE cities");
        // log ("HiveThriftServer2.startWithContext loaded table cities");
        // HiveThriftServer2.listener_$eq(arg0);

        topLevelDataPath = properties.getProperty("topLevelDataPath");
        region = properties.getProperty("region");
        if (region != null) {
            topLevelDataPath += "-" + region;
        }

        initGeoData(topLevelDataPath);

        HiveThriftServer2.startWithContext(hiveContext);
        log("=== Spark JDBC Server started");

        sparkWebMinThreads = Integer.parseInt(properties.getProperty("sparkWebMinThreads"));
        sparkWebMaxThreads = Integer.parseInt(properties.getProperty("sparkWebMaxThreads"));
        sparkWebIdleTimeoutMillis = Integer.parseInt(properties.getProperty("sparkWebIdleTimeoutMillis"));
        sparkWebPort = Integer.parseInt(properties.getProperty("sparkWebPort"));
        Spark.port(sparkWebPort);
        Spark.threadPool(sparkWebMaxThreads, sparkWebMinThreads, sparkWebIdleTimeoutMillis);

        get("/geoservices/status", (request, response) -> {
            return "Spark JDBC Server Working";
        });

        post("/sparksql/geosourcefilepath", (request, response) -> {
            geoSourceFilePath = request.queryParams("geosourcefilepath");
            return "geoSourceFilePath set to " + geoSourceFilePath;
        });

        get("/sparksql/geosourcefilepath", (request, response) -> {
            return geoSourceFilePath;
        });

        post("/sparksql/geodataversion", (request, response) -> {
            geoDataVersion = request.queryParams("geodataversion");
            return geoDataVersion;
        });

        get("/sparksql/geodataversion", (request, response) -> {
            return geoDataVersion;
        });

        post("/sparksql/geodata", (request, response) -> {
            try {
                geoDataVersion = request.queryParams("geodataversion");
                return loadGeoDataByVersion(geoDataVersion);
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                exc.printStackTrace();
                log("/sparksql/geodata", exc);
                return exc.getLocalizedMessage();
            }
        });

        delete("/sparksql/geodata", (request, response) -> {
            try {
                geoDataVersion = request.queryParams("geodataversion");
                return unloadGeoDataByVersion(geoDataVersion);
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                exc.printStackTrace();
                log("/sparksql/geodata", exc);
                return exc.getLocalizedMessage();
            }
        });

        get("/sparksql/geodata", (request, response) -> {
            return geoDataVersion;
        });

        post("/sparksql/geotable", (request, response) -> {
            String tableName = null, fullPathTableName = null, respMsg;

            try {
                tableName = request.queryParams("tablename");
                fullPathTableName = geoSourceFilePath + geoDataVersion + "/" + tableName + geoDataVersion
                        + ".csv";
                return loadTable(tableName + geoDataVersion, fullPathTableName);
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                respMsg = "error loading table: " + fullPathTableName + " - err:" + exc.getLocalizedMessage();
                exc.printStackTrace();
                log("/sparksql/loadtable", exc);
                return respMsg;
            }
        });

        // new 
        post("/sparksql/table", (request, response) -> {
            String tableName = null, fullPathTableName = null, respMsg, fileName, metaFileName, fileListName;
            Boolean tmpheaderInCSVFileFlag = headerInCSVFileFlag;

            try {
                tableName = request.queryParams("tablename");
                metaFileName = request.queryParams("metafilename");
                fileListName = request.queryParams("filelistname");

                fileName = request.queryParams("filename");
                /* headerInCSVFileStr = request.queryParams("headerincsvfile");
                if (headerInCSVFileStr != null) {
                   headerInCSVFileFlag = Boolean.parseBoolean(headerInCSVFileStr);
                } */
                fullPathTableName = geoSourceFilePath + "/" + fileName;
                metaFileName = geoSourceFilePath + "/" + metaFileName;
                return loadFilesWithMeta(tableName, fullPathTableName, metaFileName, fileListName);
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                respMsg = "error loading table: " + fullPathTableName + " - err:" + exc.getLocalizedMessage();
                exc.printStackTrace();
                log("/sparksql/loadtable", exc);
                return respMsg;
            } finally {
                // headerInCSVFileFlag = tmpheaderInCSVFileFlag;
            }
        });

        delete("/sparksql/table", (request, response) -> {
            String tableName = null, fullPathTableName = "N/A", respMsg, fileName;

            try {
                tableName = request.queryParams("tablename");
                return unloadTable(tableName, fullPathTableName);
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                respMsg = "error loading table: " + fullPathTableName + " - err:" + exc.getLocalizedMessage();
                exc.printStackTrace();
                log("/sparksql/loadtable", exc);
                return respMsg;
            }
        });

        post("/sparksql/mssqldata", (request, response) -> {
            StringBuilder respMsg;

            try {
                respMsg = new StringBuilder();
                geoDataVersion = request.queryParams("geodataversion");
                respMsg.append(geoController.exportMsSqlGeoData(geoDataVersion));
                respMsg.append(System.getProperty("line.separator"));
                return respMsg.append(loadGeoDataByVersion(geoDataVersion));
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                exc.printStackTrace();
                log("/sparksql/geodata", exc);
                return exc.getLocalizedMessage();
            }
        });

        post("/sparksql/mssqlversioncheck", (request, response) -> {
            StringBuilder respMsg;
            MsSqlExportCheckResp msSqlExportCheckResp;
            ObjectMapper jsonMapper;

            try {
                jsonMapper = new ObjectMapper();
                respMsg = new StringBuilder();
                msSqlExportCheckResp = geoController.exportMsSqlGeoData();
                if (msSqlExportCheckResp.newIpVersion == null
                        || msSqlExportCheckResp.newIpVersion.trim().length() == 0) {
                    return jsonMapper.writeValueAsString(msSqlExportCheckResp);
                }
                respMsg.append(msSqlExportCheckResp.detailMsg);
                respMsg.append(System.getProperty("line.separator"));
                respMsg.append(loadGeoDataByVersion(msSqlExportCheckResp.newIpVersion));
                msSqlExportCheckResp.detailMsg = respMsg.toString();
                return jsonMapper.writeValueAsString(msSqlExportCheckResp);
            } catch (Throwable exc) {
                response.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
                log("", exc);
                return exc.getLocalizedMessage();
            }
        });

        log("=== Spark JDBC Web Services started");
    } catch (Throwable exc) {
        log("main", exc);
    }
}

From source file:com.github.scizeron.logback.appender.SqsAppender.java

License:Apache License

/**
 * /*from   w  w w. j ava  2s.c  o  m*/
 * @return
 */
private AWSCredentialsProvider getCredentials() {
    return new AWSCredentialsProviderChain(new StaticCredentialsProvider(new AppenderCredentials()),
            new SystemPropertiesCredentialsProvider(), new EnvironmentVariableCredentialsProvider(),
            new ProfileCredentialsProvider(), new InstanceProfileCredentialsProvider());
}

From source file:com.gu.logback.appender.kinesis.helpers.CustomCredentialsProviderChain.java

License:Open Source License

public CustomCredentialsProviderChain() {
    super(new ClasspathPropertiesFileCredentialsProvider(), new InstanceProfileCredentialsProvider(),
            new SystemPropertiesCredentialsProvider(), new EnvironmentVariableCredentialsProvider());
}

From source file:com.hazelcast.samples.amazon.elasticbeanstalk.HazelcastInstanceFactory.java

License:Open Source License

public HazelcastInstanceFactory() {
    try {/*from   w  w w  . j av  a  2 s  . c  om*/
        amazonEC2 = new AmazonEC2Client(new InstanceProfileCredentialsProvider());
    } catch (AmazonClientException ace) {
        LOGGER.error("Couldn't authenticate with AWS; local cluster configuration will be used.", ace);
    }
}

From source file:com.ipcglobal.awscdh.util.Utils.java

License:Apache License

/**
 * Inits the credentials./* www .java 2s . com*/
 *
 * @param credentialsFileName
 *            the credentials file name
 * @return the AWS credentials provider
 */
public static AWSCredentialsProvider initCredentials(String credentialsFileName) {
    // Get credentials from IMDS. If unsuccessful, get them from the
    // credential profiles file.
    AWSCredentialsProvider credentialsProvider = null;
    try {
        credentialsProvider = new InstanceProfileCredentialsProvider();
        // Verify we can fetch credentials from the provider
        credentialsProvider.getCredentials();
        log.info("Obtained credentials from the IMDS.");
    } catch (AmazonClientException e) {
        log.warn("Unable to obtain credentials from the IMDS, trying credentialsFileName");
        // If the credentialsFileName contains a path, assume it contains
        // the entire drive:/path/name.ext
        // this is necessary when running as a service on Windows
        String credentialsPathNameExt = null;
        // safest to check for both
        if (credentialsFileName.indexOf("/") > -1 || credentialsFileName.indexOf("\\") > -1)
            credentialsPathNameExt = credentialsFileName;
        else
            credentialsPathNameExt = System.getProperty("user.home") + System.getProperty("file.separator")
                    + ".aws" + System.getProperty("file.separator") + credentialsFileName;
        credentialsProvider = new PropertiesFileCredentialsProvider(credentialsPathNameExt);
        log.info("Obtained credentials from the credentialsFileName file.");
    }
    return credentialsProvider;
}

From source file:com.netflix.eureka.aws.AwsAsgUtil.java

License:Apache License

private Credentials initializeStsSession(String asgAccount) {
    AWSSecurityTokenService sts = new AWSSecurityTokenServiceClient(new InstanceProfileCredentialsProvider());
    String region = clientConfig.getRegion();
    if (!region.equals("us-east-1")) {
        sts.setEndpoint("sts." + region + ".amazonaws.com");
    }/*from  w  ww. j  a  va2 s  .  c o m*/

    String roleName = serverConfig.getListAutoScalingGroupsRoleName();
    String roleArn = "arn:aws:iam::" + asgAccount + ":role/" + roleName;

    AssumeRoleResult assumeRoleResult = sts.assumeRole(
            new AssumeRoleRequest().withRoleArn(roleArn).withRoleSessionName("sts-session-" + asgAccount));

    return assumeRoleResult.getCredentials();
}