List of usage examples for com.amazonaws.auth InstanceProfileCredentialsProvider InstanceProfileCredentialsProvider
@Deprecated
public InstanceProfileCredentialsProvider()
From source file:org.eluder.logback.ext.aws.core.AwsSupport.java
License:Open Source License
public AWSCredentialsProvider getCredentials(AWSCredentials credentials) { return new AWSCredentialsProviderChain(new EnvironmentVariableCredentialsProvider(), new SystemPropertiesCredentialsProvider(), new StaticCredentialsProvider(credentials == null ? new NullCredentials() : credentials), new ProfileCredentialsProvider(), new InstanceProfileCredentialsProvider()); }
From source file:org.kuali.maven.wagon.auth.MavenAwsCredentialsProviderChain.java
License:Educational Community License
private static AWSCredentialsProvider[] getProviders(Optional<AuthenticationInfo> auth) { List<AWSCredentialsProvider> providers = new ArrayList<AWSCredentialsProvider>(); // System properties always win providers.add(new SystemPropertiesCredentialsProvider()); // Then fall through to environment variables providers.add(new EnvironmentVariableCredentialsProvider()); // Then fall through to settings.xml providers.add(new AuthenticationInfoCredentialsProvider(auth)); // Then fall through to Amazon's EC2 Instance Metadata Service // http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/java-dg-roles.html // This allows you setup an IAM role, attach that role to an EC2 Instance at launch time, // and thus automatically provide the wagon with the credentials it needs providers.add(new InstanceProfileCredentialsProvider()); return providers.toArray(new AWSCredentialsProvider[providers.size()]); }
From source file:org.lambadaframework.wagon.AuthenticationInfoAWSCredentialsProviderChain.java
License:Apache License
AuthenticationInfoAWSCredentialsProviderChain(AuthenticationInfo authenticationInfo) { super(new InstanceProfileCredentialsProvider(), new ProfileCredentialsProvider(), new EnvironmentVariableCredentialsProvider(), new SystemPropertiesCredentialsProvider(), new InstanceProfileCredentialsProvider()); }
From source file:org.nuxeo.ecm.core.storage.sql.S3BinaryManager.java
License:Apache License
@Override protected void setupCloudClient() throws IOException { // Get settings from the configuration bucketName = getProperty(BUCKET_NAME_PROPERTY); bucketNamePrefix = MoreObjects.firstNonNull(getProperty(BUCKET_PREFIX_PROPERTY), StringUtils.EMPTY); String bucketRegion = getProperty(BUCKET_REGION_PROPERTY); if (isBlank(bucketRegion)) { bucketRegion = DEFAULT_BUCKET_REGION; }// ww w. ja v a2 s .com String awsID = getProperty(AWS_ID_PROPERTY); String awsSecret = getProperty(AWS_SECRET_PROPERTY); String proxyHost = Framework.getProperty(Environment.NUXEO_HTTP_PROXY_HOST); String proxyPort = Framework.getProperty(Environment.NUXEO_HTTP_PROXY_PORT); String proxyLogin = Framework.getProperty(Environment.NUXEO_HTTP_PROXY_LOGIN); String proxyPassword = Framework.getProperty(Environment.NUXEO_HTTP_PROXY_PASSWORD); int maxConnections = getIntProperty(CONNECTION_MAX_PROPERTY); int maxErrorRetry = getIntProperty(CONNECTION_RETRY_PROPERTY); int connectionTimeout = getIntProperty(CONNECTION_TIMEOUT_PROPERTY); int socketTimeout = getIntProperty(SOCKET_TIMEOUT_PROPERTY); String keystoreFile = getProperty(KEYSTORE_FILE_PROPERTY); String keystorePass = getProperty(KEYSTORE_PASS_PROPERTY); String privkeyAlias = getProperty(PRIVKEY_ALIAS_PROPERTY); String privkeyPass = getProperty(PRIVKEY_PASS_PROPERTY); String endpoint = getProperty(ENDPOINT_PROPERTY); String sseprop = getProperty(SERVERSIDE_ENCRYPTION_PROPERTY); if (isNotBlank(sseprop)) { userServerSideEncryption = Boolean.parseBoolean(sseprop); } // Fallback on default env keys for ID and secret if (isBlank(awsID)) { awsID = System.getenv(AWS_ID_ENV); } if (isBlank(awsSecret)) { awsSecret = System.getenv(AWS_SECRET_ENV); } if (isBlank(bucketName)) { throw new RuntimeException("Missing conf: " + BUCKET_NAME_PROPERTY); } if (!isBlank(bucketNamePrefix) && !bucketNamePrefix.endsWith("/")) { log.warn(String.format("%s %s S3 bucket prefix should end by '/' " + ": added automatically.", BUCKET_PREFIX_PROPERTY, bucketNamePrefix)); bucketNamePrefix += "/"; } // set up credentials if (isBlank(awsID) || isBlank(awsSecret)) { awsCredentialsProvider = new InstanceProfileCredentialsProvider(); try { awsCredentialsProvider.getCredentials(); } catch (AmazonClientException e) { throw new RuntimeException("Missing AWS credentials and no instance role found"); } } else { awsCredentialsProvider = new BasicAWSCredentialsProvider(awsID, awsSecret); } // set up client configuration clientConfiguration = new ClientConfiguration(); if (isNotBlank(proxyHost)) { clientConfiguration.setProxyHost(proxyHost); } if (isNotBlank(proxyPort)) { clientConfiguration.setProxyPort(Integer.parseInt(proxyPort)); } if (isNotBlank(proxyLogin)) { clientConfiguration.setProxyUsername(proxyLogin); } if (proxyPassword != null) { // could be blank clientConfiguration.setProxyPassword(proxyPassword); } if (maxConnections > 0) { clientConfiguration.setMaxConnections(maxConnections); } if (maxErrorRetry >= 0) { // 0 is allowed clientConfiguration.setMaxErrorRetry(maxErrorRetry); } if (connectionTimeout >= 0) { // 0 is allowed clientConfiguration.setConnectionTimeout(connectionTimeout); } if (socketTimeout >= 0) { // 0 is allowed clientConfiguration.setSocketTimeout(socketTimeout); } // set up encryption encryptionMaterials = null; if (isNotBlank(keystoreFile)) { boolean confok = true; if (keystorePass == null) { // could be blank log.error("Keystore password missing"); confok = false; } if (isBlank(privkeyAlias)) { log.error("Key alias missing"); confok = false; } if (privkeyPass == null) { // could be blank log.error("Key password missing"); confok = false; } if (!confok) { throw new RuntimeException("S3 Crypto configuration incomplete"); } try { // Open keystore File ksFile = new File(keystoreFile); FileInputStream ksStream = new FileInputStream(ksFile); KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType()); keystore.load(ksStream, keystorePass.toCharArray()); ksStream.close(); // Get keypair for alias if (!keystore.isKeyEntry(privkeyAlias)) { throw new RuntimeException("Alias " + privkeyAlias + " is missing or not a key alias"); } PrivateKey privKey = (PrivateKey) keystore.getKey(privkeyAlias, privkeyPass.toCharArray()); Certificate cert = keystore.getCertificate(privkeyAlias); PublicKey pubKey = cert.getPublicKey(); KeyPair keypair = new KeyPair(pubKey, privKey); // Get encryptionMaterials from keypair encryptionMaterials = new EncryptionMaterials(keypair); cryptoConfiguration = new CryptoConfiguration(); } catch (IOException | GeneralSecurityException e) { throw new RuntimeException("Could not read keystore: " + keystoreFile + ", alias: " + privkeyAlias, e); } } isEncrypted = encryptionMaterials != null; // Try to create bucket if it doesn't exist if (!isEncrypted) { amazonS3 = new AmazonS3Client(awsCredentialsProvider, clientConfiguration); } else { amazonS3 = new AmazonS3EncryptionClient(awsCredentialsProvider, new StaticEncryptionMaterialsProvider(encryptionMaterials), clientConfiguration, cryptoConfiguration); } if (isNotBlank(endpoint)) { amazonS3.setEndpoint(endpoint); } // Set region explicitely for regions that reguire Version 4 signature ArrayList<String> V4_ONLY_REGIONS = new ArrayList<String>(); V4_ONLY_REGIONS.add("eu-central-1"); V4_ONLY_REGIONS.add("ap-northeast-2"); if (V4_ONLY_REGIONS.contains(bucketRegion)) { amazonS3.setRegion(Region.getRegion(Regions.fromName(bucketRegion))); } try { if (!amazonS3.doesBucketExist(bucketName)) { amazonS3.createBucket(bucketName, bucketRegion); amazonS3.setBucketAcl(bucketName, CannedAccessControlList.Private); } } catch (AmazonClientException e) { throw new IOException(e); } // compat for NXP-17895, using "downloadfroms3", to be removed // these two fields have already been initialized by the base class initialize() // using standard property "directdownload" String dd = getProperty(DIRECTDOWNLOAD_PROPERTY_COMPAT); if (dd != null) { directDownload = Boolean.parseBoolean(dd); } int dde = getIntProperty(DIRECTDOWNLOAD_EXPIRE_PROPERTY_COMPAT); if (dde >= 0) { directDownloadExpire = dde; } transferManager = new TransferManager(amazonS3); abortOldUploads(); }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
License:Open Source License
@Override public void init(Element config) throws IOException { this.name = Main.cloudBucket.toLowerCase(); this.staged_sync_location.mkdirs(); try {/*from w ww . ja va2 s . com*/ if (config.hasAttribute("default-bucket-location")) { bucketLocation = RegionUtils.getRegion(config.getAttribute("default-bucket-location")); } if (config.hasAttribute("connection-check-interval")) { this.checkInterval = Integer.parseInt(config.getAttribute("connection-check-interval")); } if (config.hasAttribute("block-size")) { int sz = (int) StringUtils.parseSize(config.getAttribute("block-size")); HashBlobArchive.MAX_LEN = sz; } if (config.hasAttribute("allow-sync")) { HashBlobArchive.allowSync = Boolean.parseBoolean(config.getAttribute("allow-sync")); if (config.hasAttribute("sync-check-schedule")) { try { new SyncFSScheduler(config.getAttribute("sync-check-schedule")); } catch (Exception e) { SDFSLogger.getLog().error("unable to start sync scheduler", e); } } } if (config.hasAttribute("upload-thread-sleep-time")) { int tm = Integer.parseInt(config.getAttribute("upload-thread-sleep-time")); HashBlobArchive.THREAD_SLEEP_TIME = tm; } if (config.hasAttribute("cache-writes")) { HashBlobArchive.cacheWrites = Boolean.parseBoolean(config.getAttribute("cache-writes")); } if (config.hasAttribute("cache-reads")) { HashBlobArchive.cacheReads = Boolean.parseBoolean(config.getAttribute("cache-reads")); } if (config.hasAttribute("sync-files")) { boolean syncf = Boolean.parseBoolean(config.getAttribute("sync-files")); if (syncf) { new FileReplicationService(this); } } int rsp = 0; int wsp = 0; if (config.hasAttribute("read-speed")) { rsp = Integer.parseInt(config.getAttribute("read-speed")); } if (config.hasAttribute("write-speed")) { wsp = Integer.parseInt(config.getAttribute("write-speed")); } if (config.hasAttribute("local-cache-size")) { long sz = StringUtils.parseSize(config.getAttribute("local-cache-size")); HashBlobArchive.setLocalCacheSize(sz); } if (config.hasAttribute("metadata-version")) { this.mdVersion = Integer.parseInt(config.getAttribute("metadata-version")); } if (config.hasAttribute("map-cache-size")) { int sz = Integer.parseInt(config.getAttribute("map-cache-size")); HashBlobArchive.MAP_CACHE_SIZE = sz; } if (config.hasAttribute("io-threads")) { int sz = Integer.parseInt(config.getAttribute("io-threads")); Main.dseIOThreads = sz; } if (config.hasAttribute("clustered")) { this.clustered = Boolean.parseBoolean(config.getAttribute("clustered")); } if (config.hasAttribute("delete-unclaimed")) { this.deleteUnclaimed = Boolean.parseBoolean(config.getAttribute("delete-unclaimed")); } if (config.hasAttribute("glacier-archive-days")) { this.glacierDays = Integer.parseInt(config.getAttribute("glacier-archive-days")); if (this.glacierDays > 0) Main.checkArchiveOnRead = true; } if (config.hasAttribute("infrequent-access-days")) { this.infrequentAccess = Integer.parseInt(config.getAttribute("infrequent-access-days")); } if (config.hasAttribute("simple-s3")) { EncyptUtils.baseEncode = Boolean.parseBoolean(config.getAttribute("simple-s3")); this.simpleS3 = true; } if (config.hasAttribute("md5-sum")) { this.md5sum = Boolean.parseBoolean(config.getAttribute("md5-sum")); if (!this.md5sum) { System.setProperty("com.amazonaws.services.s3.disableGetObjectMD5Validation", "true"); System.setProperty("com.amazonaws.services.s3.disablePutObjectMD5Validation", "true"); } } ClientConfiguration clientConfig = new ClientConfiguration(); if (config.hasAttribute("use-v4-signer")) { boolean v4s = Boolean.parseBoolean(config.getAttribute("use-v4-signer")); if (v4s) { clientConfig.setSignerOverride("AWSS3V4SignerType"); } } if (config.hasAttribute("use-basic-signer")) { boolean v4s = Boolean.parseBoolean(config.getAttribute("use-basic-signer")); if (v4s) { clientConfig.setSignerOverride("S3SignerType"); } } clientConfig.setMaxConnections(Main.dseIOThreads * 2); clientConfig.setConnectionTimeout(10000); clientConfig.setSocketTimeout(10000); String s3Target = null; if (config.getElementsByTagName("connection-props").getLength() > 0) { Element el = (Element) config.getElementsByTagName("connection-props").item(0); if (el.hasAttribute("connection-timeout")) clientConfig.setConnectionTimeout(Integer.parseInt(el.getAttribute("connection-timeout"))); if (el.hasAttribute("socket-timeout")) clientConfig.setSocketTimeout(Integer.parseInt(el.getAttribute("socket-timeout"))); if (el.hasAttribute("local-address")) clientConfig.setLocalAddress(InetAddress.getByName(el.getAttribute("local-address"))); if (el.hasAttribute("max-retry")) clientConfig.setMaxErrorRetry(Integer.parseInt(el.getAttribute("max-retry"))); if (el.hasAttribute("protocol")) { String pr = el.getAttribute("protocol"); if (pr.equalsIgnoreCase("http")) clientConfig.setProtocol(Protocol.HTTP); else clientConfig.setProtocol(Protocol.HTTPS); } if (el.hasAttribute("s3-target")) { s3Target = el.getAttribute("s3-target"); } if (el.hasAttribute("proxy-host")) { clientConfig.setProxyHost(el.getAttribute("proxy-host")); } if (el.hasAttribute("proxy-domain")) { clientConfig.setProxyDomain(el.getAttribute("proxy-domain")); } if (el.hasAttribute("proxy-password")) { clientConfig.setProxyPassword(el.getAttribute("proxy-password")); } if (el.hasAttribute("proxy-port")) { clientConfig.setProxyPort(Integer.parseInt(el.getAttribute("proxy-port"))); } if (el.hasAttribute("proxy-username")) { clientConfig.setProxyUsername(el.getAttribute("proxy-username")); } } if (s3Target != null && s3Target.toLowerCase().startsWith("https")) { TrustStrategy acceptingTrustStrategy = new TrustStrategy() { @Override public boolean isTrusted(X509Certificate[] certificate, String authType) { return true; } }; SSLSocketFactory sf = new SSLSocketFactory(acceptingTrustStrategy, SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); clientConfig.getApacheHttpClientConfig().withSslSocketFactory(sf); } if (awsCredentials != null) s3Service = new AmazonS3Client(awsCredentials, clientConfig); else s3Service = new AmazonS3Client(new InstanceProfileCredentialsProvider(), clientConfig); if (bucketLocation != null) { s3Service.setRegion(bucketLocation); System.out.println("bucketLocation=" + bucketLocation.toString()); } if (s3Target != null) { s3Service.setEndpoint(s3Target); System.out.println("target=" + s3Target); } if (config.hasAttribute("disableDNSBucket")) { s3Service.setS3ClientOptions(new S3ClientOptions() .withPathStyleAccess(Boolean.parseBoolean(config.getAttribute("disableDNSBucket"))) .disableChunkedEncoding()); System.out.println( "disableDNSBucket=" + Boolean.parseBoolean(config.getAttribute("disableDNSBucket"))); } if (!s3Service.doesBucketExist(this.name)) { s3Service.createBucket(this.name); SDFSLogger.getLog().info("created new store " + name); ObjectMetadata md = new ObjectMetadata(); md.addUserMetadata("currentsize", "0"); md.addUserMetadata("currentcompressedsize", "0"); md.addUserMetadata("clustered", "true"); md.addUserMetadata("lastupdate", Long.toString(System.currentTimeMillis())); md.addUserMetadata("hostname", InetAddress.getLocalHost().getHostName()); md.addUserMetadata("port", Integer.toString(Main.sdfsCliPort)); this.clustered = true; byte[] sz = Long.toString(System.currentTimeMillis()).getBytes(); if (md5sum) { String mds = BaseEncoding.base64().encode(ServiceUtils.computeMD5Hash(sz)); md.setContentMD5(mds); } md.setContentLength(sz.length); this.binm = "bucketinfo/" + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled); s3Service.putObject(this.name, binm, new ByteArrayInputStream(sz), md); } else { Map<String, String> obj = null; ObjectMetadata omd = null; try { omd = s3Service.getObjectMetadata(this.name, binm); obj = omd.getUserMetadata(); obj.get("currentsize"); } catch (Exception e) { omd = null; SDFSLogger.getLog().debug("unable to find bucketinfo object", e); } if (omd == null) { try { this.binm = "bucketinfo/" + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled); omd = s3Service.getObjectMetadata(this.name, binm); obj = omd.getUserMetadata(); obj.get("currentsize"); } catch (Exception e) { omd = null; SDFSLogger.getLog().debug("unable to find bucketinfo object", e); } } if (omd == null) { ObjectMetadata md = new ObjectMetadata(); md.addUserMetadata("currentsize", "0"); md.addUserMetadata("currentcompressedsize", "0"); md.addUserMetadata("clustered", "true"); md.addUserMetadata("lastupdate", Long.toString(System.currentTimeMillis())); md.addUserMetadata("hostname", InetAddress.getLocalHost().getHostName()); md.addUserMetadata("port", Integer.toString(Main.sdfsCliPort)); this.clustered = true; this.binm = "bucketinfo/" + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled); byte[] sz = Long.toString(System.currentTimeMillis()).getBytes(); if (md5sum) { String mds = BaseEncoding.base64().encode(ServiceUtils.computeMD5Hash(sz)); md.setContentMD5(mds); } md.setContentLength(sz.length); s3Service.putObject(this.name, binm, new ByteArrayInputStream(sz), md); } else { if (obj.containsKey("currentsize")) { long cl = Long.parseLong((String) obj.get("currentsize")); if (cl >= 0) { HashBlobArchive.currentLength.set(cl); } else SDFSLogger.getLog().warn("The S3 objectstore DSE did not close correctly len=" + cl); } else { SDFSLogger.getLog().warn( "The S3 objectstore DSE did not close correctly. Metadata tag currentsize was not added"); } if (obj.containsKey("currentcompressedsize")) { long cl = Long.parseLong((String) obj.get("currentcompressedsize")); if (cl >= 0) { HashBlobArchive.compressedLength.set(cl); } else SDFSLogger.getLog().warn("The S3 objectstore DSE did not close correctly clen=" + cl); } else { SDFSLogger.getLog().warn( "The S3 objectstore DSE did not close correctly. Metadata tag currentsize was not added"); } if (obj.containsKey("clustered")) { this.clustered = Boolean.parseBoolean(obj.get("clustered")); } else this.clustered = false; obj.put("clustered", Boolean.toString(this.clustered)); omd.setUserMetadata(obj); try { updateObject(binm, omd); } catch (Exception e) { SDFSLogger.getLog().warn("unable to update bucket info in init", e); SDFSLogger.getLog().info("created new store " + name); ObjectMetadata md = new ObjectMetadata(); md.addUserMetadata("currentsize", "0"); md.addUserMetadata("lastupdate", Long.toString(System.currentTimeMillis())); md.addUserMetadata("currentcompressedsize", "0"); md.addUserMetadata("clustered", Boolean.toString(this.clustered)); md.addUserMetadata("hostname", InetAddress.getLocalHost().getHostName()); md.addUserMetadata("port", Integer.toString(Main.sdfsCliPort)); byte[] sz = Long.toString(System.currentTimeMillis()).getBytes(); if (md5sum) { String mds = BaseEncoding.base64().encode(ServiceUtils.computeMD5Hash(sz)); md.setContentMD5(mds); } md.setContentLength(sz.length); s3Service.putObject(this.name, binm, new ByteArrayInputStream(sz), md); } } } ArrayList<Transition> trs = new ArrayList<Transition>(); if (this.glacierDays > 0 && s3Target == null) { Transition transToArchive = new Transition().withDays(this.glacierDays) .withStorageClass(StorageClass.Glacier); trs.add(transToArchive); } if (this.infrequentAccess > 0 && s3Target == null) { Transition transToArchive = new Transition().withDays(this.infrequentAccess) .withStorageClass(StorageClass.StandardInfrequentAccess); trs.add(transToArchive); } if (trs.size() > 0) { BucketLifecycleConfiguration.Rule ruleArchiveAndExpire = new BucketLifecycleConfiguration.Rule() .withId("SDFS Automated Archive Rule for Block Data").withPrefix("blocks/") .withTransitions(trs).withStatus(BucketLifecycleConfiguration.ENABLED.toString()); List<BucketLifecycleConfiguration.Rule> rules = new ArrayList<BucketLifecycleConfiguration.Rule>(); rules.add(ruleArchiveAndExpire); BucketLifecycleConfiguration configuration = new BucketLifecycleConfiguration().withRules(rules); // Save configuration. s3Service.setBucketLifecycleConfiguration(this.name, configuration); } else if (s3Target == null) { s3Service.deleteBucketLifecycleConfiguration(this.name); } HashBlobArchive.init(this); HashBlobArchive.setReadSpeed(rsp); HashBlobArchive.setWriteSpeed(wsp); Thread th = new Thread(this); th.start(); } catch (Exception e) { SDFSLogger.getLog().error("unable to start service", e); throw new IOException(e); } }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
License:Open Source License
private void multiPartUpload(PutObjectRequest req) throws AmazonServiceException, AmazonClientException, InterruptedException { TransferManager tx = null;/*w ww. ja va 2s .c o m*/ try { if (awsCredentials != null) tx = new TransferManager(awsCredentials); else tx = new TransferManager(new InstanceProfileCredentialsProvider()); Upload myUpload = tx.upload(req); myUpload.waitForCompletion(); } finally { if (tx != null) tx.shutdownNow(); } }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
License:Open Source License
private void multiPartDownload(String keyName, File f) throws AmazonServiceException, AmazonClientException, InterruptedException { TransferManager tx = null;//from w w w. j a va 2 s.co m try { if (awsCredentials != null) tx = new TransferManager(awsCredentials); else tx = new TransferManager(new InstanceProfileCredentialsProvider()); Download myDownload = tx.download(this.name, keyName, f); myDownload.waitForCompletion(); } finally { if (tx != null) tx.shutdownNow(); } }
From source file:org.rdswitchboard.harvesters.pmh.Harvester.java
License:Open Source License
/** * Harvester constructor/*from ww w. j a va2 s . c o m*/ * * @param repoUrl : The Repository URL * @param folderBase : The address of the folder, there received data must be saved. * @throws JAXBException * @throws IOException * @throws Exception */ public Harvester(final Properties properties) throws Exception { repoUrl = properties.getProperty("url"); if (StringUtils.isNullOrEmpty(repoUrl)) throw new IllegalArgumentException("The OAI:PMH Repository URL can not be empty"); repoPrefix = properties.getProperty("name"); if (StringUtils.isNullOrEmpty(repoPrefix)) throw new IllegalArgumentException("The OAI:PMH Repository Prefix can not be empty"); metadataPrefix = properties.getProperty("metadata"); String accessKey = properties.getProperty("aws.access.key"); String secretKey = properties.getProperty("aws.secret.key"); if (StringUtils.isNullOrEmpty(accessKey) || StringUtils.isNullOrEmpty(secretKey)) s3client = new AmazonS3Client(new InstanceProfileCredentialsProvider()); else s3client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey)); bucketName = properties.getProperty("s3.bucket"); folderName = properties.getProperty("folder"); if (StringUtils.isNullOrEmpty(bucketName) && StringUtils.isNullOrEmpty(folderName)) throw new IllegalArgumentException( "Please enter either local folder name or AWS S3 Bucket name to store the harvested files"); if (!StringUtils.isNullOrEmpty(bucketName) && !StringUtils.isNullOrEmpty(folderName)) throw new IllegalArgumentException( "S3 bucket and local folder parameters can not be used at the same time. Please disable one in the configuration file."); try { File fileBlackList = new File(properties.getProperty("black.list")); if (fileBlackList.isFile()) { List<String> list = FileUtils.readLines(fileBlackList); blackList = new HashSet<String>(); for (String l : list) { String s = l.trim(); if (!s.isEmpty()) blackList.add(s); } } } catch (Exception e) { blackList = null; } try { File fileWhiteList = new File(properties.getProperty("white.list")); if (fileWhiteList.isFile()) { List<String> list = FileUtils.readLines(fileWhiteList); whiteList = new HashSet<String>(); for (String l : list) { String s = l.trim(); if (!s.isEmpty()) whiteList.add(s); } } } catch (Exception e) { whiteList = null; } if (null != blackList && !blackList.isEmpty() && null != whiteList && !whiteList.isEmpty()) throw new Exception( "The black and the withe list parameters can not be set at the same time. Please disable one in the configuration file."); connectionTimeout = Integer.parseInt(properties.getProperty("conn.timeout", "0")); readTimeout = Integer.parseInt(properties.getProperty("read.timeout", "0")); maxAttempts = Integer.parseInt(properties.getProperty("max.attempts", "0")); attemptDelay = Integer.parseInt(properties.getProperty("attempt.delay", "0")); failOnError = Boolean.parseBoolean(properties.getProperty("fail.on.error", "true")); }
From source file:org.rdswitchboard.importers.browser.s3.App.java
License:Open Source License
public static void main(String[] args) { try {/*from w w w . j ava 2 s . com*/ if (args.length == 0 || StringUtils.isNullOrEmpty(args[0])) throw new Exception("Please provide properties file"); String propertiesFile = args[0]; Properties properties = new Properties(); try (InputStream in = new FileInputStream(propertiesFile)) { properties.load(in); } String source = properties.getProperty("data.source.id"); if (StringUtils.isNullOrEmpty(source)) throw new IllegalArgumentException("Source can not be empty"); System.out.println("Source: " + source); String baseUrl = properties.getProperty("base.url"); if (StringUtils.isNullOrEmpty(baseUrl)) throw new IllegalArgumentException("Base URL can not be empty"); System.out.println("Base URL: " + baseUrl); String sessionId = properties.getProperty("session.id"); if (StringUtils.isNullOrEmpty(sessionId)) throw new IllegalArgumentException("Session Id can not be empty"); System.out.println("Session Id: " + sessionId); String accessKey = properties.getProperty("aws.access.key"); String secretKey = properties.getProperty("aws.secret.key"); String bucket = properties.getProperty("s3.bucket"); if (StringUtils.isNullOrEmpty(bucket)) throw new IllegalArgumentException("AWS S3 Bucket can not be empty"); System.out.println("S3 Bucket: " + bucket); String prefix = properties.getProperty("s3.prefix"); if (StringUtils.isNullOrEmpty(prefix)) throw new IllegalArgumentException("AWS S3 Prefix can not be empty"); System.out.println("S3 Prefix: " + prefix); String crosswalk = properties.getProperty("crosswalk"); Templates template = null; if (!StringUtils.isNullOrEmpty(crosswalk)) { System.out.println("Crosswalk: " + crosswalk); template = TransformerFactory.newInstance() .newTemplates(new StreamSource(new FileInputStream(crosswalk))); } ObjectMapper mapper = new ObjectMapper(); Client client = Client.create(); Cookie cookie = new Cookie("PHPSESSID", properties.getProperty("session")); AmazonS3 s3client; if (!StringUtils.isNullOrEmpty(accessKey) && !StringUtils.isNullOrEmpty(secretKey)) { System.out.println( "Connecting to AWS via Access and Secret Keys. This is not safe practice, consider to use IAM Role instead."); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); s3client = new AmazonS3Client(awsCredentials); } else { System.out.println("Connecting to AWS via Instance Profile Credentials"); s3client = new AmazonS3Client(new InstanceProfileCredentialsProvider()); } //String file = "rda/rif/class:collection/54800.xml"; ListObjectsRequest listObjectsRequest; ObjectListing objectListing; String file = prefix + "/latest.txt"; S3Object object = s3client.getObject(new GetObjectRequest(bucket, file)); String latest; try (InputStream txt = object.getObjectContent()) { latest = prefix + "/" + IOUtils.toString(txt, StandardCharsets.UTF_8).trim() + "/"; } System.out.println("S3 Repository: " + latest); listObjectsRequest = new ListObjectsRequest().withBucketName(bucket).withPrefix(latest); do { objectListing = s3client.listObjects(listObjectsRequest); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { file = objectSummary.getKey(); System.out.println("Processing file: " + file); object = s3client.getObject(new GetObjectRequest(bucket, file)); String xml = null; if (null != template) { Source reader = new StreamSource(object.getObjectContent()); StringWriter writer = new StringWriter(); Transformer transformer = template.newTransformer(); transformer.transform(reader, new StreamResult(writer)); xml = writer.toString(); } else { InputStream is = object.getObjectContent(); xml = IOUtils.toString(is, ENCODING); } URL url = new URL(baseUrl + "/registry/import/import_s3/"); StringBuilder sb = new StringBuilder(); addParam(sb, "id", source); addParam(sb, "xml", xml); //System.out.println(sb.toString()); WebResource webResource = client.resource(url.toString()); ClientResponse response = webResource .header("User-Agent", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0") .accept(MediaType.APPLICATION_JSON, "*/*").acceptLanguage("en-US", "en") .type(MediaType.APPLICATION_FORM_URLENCODED).cookie(cookie) .post(ClientResponse.class, sb.toString()); if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } String output = response.getEntity(String.class); Result result = mapper.readValue(output, Result.class); if (!result.getStatus().equals("OK")) { System.err.println(result.getMessage()); break; } else System.out.println(result.getMessage()); } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); } catch (Exception e) { e.printStackTrace(); } }
From source file:org.rdswitchboard.tests.crosswalk.App.java
License:Open Source License
public static void main(String[] args) { try {//from www. j a va 2s . c o m String propertiesFile = PROPERTIES_FILE; if (args.length != 0 && !StringUtils.isNullOrEmpty(args[0])) propertiesFile = args[0]; Properties properties = new Properties(); try (InputStream in = new FileInputStream(propertiesFile)) { properties.load(in); } String accessKey = properties.getProperty("aws.access.key"); String secretKey = properties.getProperty("aws.secret.key"); String bucket = properties.getProperty("s3.bucket"); if (StringUtils.isNullOrEmpty(bucket)) throw new IllegalArgumentException("AWS S3 Bucket can not be empty"); System.out.println("S3 Bucket: " + bucket); String key = properties.getProperty("s3.key"); if (StringUtils.isNullOrEmpty(key)) throw new IllegalArgumentException("AWS S3 Key can not be empty"); System.out.println("S3 Key: " + key); String crosswalk = properties.getProperty("crosswalk"); if (StringUtils.isNullOrEmpty(crosswalk)) throw new IllegalArgumentException("Crosswalk can not be empty"); System.out.println("Crosswalk: " + crosswalk); String outFileName = properties.getProperty("out", OUT_FILE_NAME); System.out.println("Out: " + outFileName); AmazonS3 s3client; if (!StringUtils.isNullOrEmpty(accessKey) && !StringUtils.isNullOrEmpty(secretKey)) { System.out.println( "Connecting to AWS via Access and Secret Keys. This is not safe practice, consider to use IAM Role instead."); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); s3client = new AmazonS3Client(awsCredentials); } else { System.out.println("Connecting to AWS via Instance Profile Credentials"); s3client = new AmazonS3Client(new InstanceProfileCredentialsProvider()); } S3Object object = s3client.getObject(new GetObjectRequest(bucket, key)); Templates template = TransformerFactory.newInstance() .newTemplates(new StreamSource(new FileInputStream(crosswalk))); StreamSource reader = new StreamSource(object.getObjectContent()); StreamResult result = (StringUtils.isNullOrEmpty(outFileName) || outFileName.equals("stdout")) ? new StreamResult(System.out) : new StreamResult(new FileOutputStream(outFileName)); Transformer transformer = template.newTransformer(); transformer.transform(reader, result); /* DocumentBuilderFactory dFactory = DocumentBuilderFactory.newInstance(); TransformerFactory tFactory = TransformerFactory.newInstance(); XPath xPath = XPathFactory.newInstance().newXPath(); DocumentBuilder builder = dFactory.newDocumentBuilder(); Document document = builder.parse(object.getObjectContent()); Transformer transformer1 = tFactory.newTemplates( new StreamSource(new FileInputStream(crosswalk))).newTransformer(); Transformer transformer2 = tFactory.newTransformer(); NodeList metadata = (NodeList)xPath.evaluate("/OAI-PMH/ListRecords/record/metadata", document.getDocumentElement(), XPathConstants.NODESET); for (int i = 0; i < metadata.getLength(); ++i) { System.out.println("Converting node: " + i); Element e = (Element) metadata.item(i); Node mets = e.getElementsByTagName("mets").item(0); Node rifcs = document.createElement("registryObjects"); DOMSource xmlSource = new DOMSource(mets); DOMResult xmlResult = new DOMResult(rifcs); transformer1.transform(xmlSource, xmlResult); e.removeChild(mets); e.appendChild(xmlResult.getNode()); // e.replaceChild(rifcs, xmlResult.getNode()); } StreamResult result = (StringUtils.isNullOrEmpty(outFileName) || outFileName.equals("stdout")) ? new StreamResult(System.out) : new StreamResult(new FileOutputStream(outFileName)); transformer2.transform(new DOMSource(document), result); */ } catch (Exception e) { e.printStackTrace(); } }