List of usage examples for com.amazonaws.event ProgressListener ProgressListener
ProgressListener
From source file:aws.example.s3.XferMgrProgress.java
License:Open Source License
public static void uploadFileWithListener(String file_path, String bucket_name, String key_prefix, boolean pause) { System.out.println("file: " + file_path + (pause ? " (pause)" : "")); String key_name = null;/*from w w w. java2s .c o m*/ if (key_prefix != null) { key_name = key_prefix + '/' + file_path; } else { key_name = file_path; } File f = new File(file_path); TransferManager xfer_mgr = new TransferManager(); try { Upload u = xfer_mgr.upload(bucket_name, key_name, f); // print an empty progress bar... printProgressBar(0.0); u.addProgressListener(new ProgressListener() { public void progressChanged(ProgressEvent e) { double pct = e.getBytesTransferred() * 100.0 / e.getBytes(); eraseProgressBar(); printProgressBar(pct); } }); // block with Transfer.waitForCompletion() XferMgrProgress.waitForCompletion(u); // print the final state of the transfer. TransferState xfer_state = u.getState(); System.out.println(": " + xfer_state); } catch (AmazonServiceException e) { System.err.println(e.getErrorMessage()); System.exit(1); } xfer_mgr.shutdownNow(); }
From source file:com.streamsets.datacollector.lib.emr.S3Manager.java
License:Apache License
String uploadToS3(String name, File file) throws IOException { long start = System.currentTimeMillis(); long fileLength = file.length() / (1000 * 1000); String bucket = getBucket(pipelineEmrConfigs.getS3StagingUri()); String path = getPath(pipelineEmrConfigs.getS3StagingUri()) + "/" + pipelineId + "/" + uniquePrefix; String s3Uri = "s3://" + bucket + "/" + path + "/" + name; try {/* w ww.java 2s. c o m*/ // Upload PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, path + "/" + name, file); putObjectRequest.setGeneralProgressListener(new ProgressListener() { long counter; long tick = -1; @Override public void progressChanged(ProgressEvent progressEvent) { counter += progressEvent.getBytesTransferred(); if (counter / (100 * 1000000) > tick) { tick++; LOG.debug("Uploading '{}' {}/{} MB, {} secs", s3Uri, counter / (1000 * 1000), fileLength, (System.currentTimeMillis() - start) / 1000); } } }); getS3TransferManager().upload(putObjectRequest).waitForCompletion(); LOG.info("Uploaded file at: {}", s3Uri); return s3Uri; } catch (SdkBaseException | InterruptedException ex) { throw new IOException(ex); } }
From source file:com.vmware.photon.controller.model.adapters.awsadapter.AWSCostStatsService.java
License:Open Source License
private void downloadAndParse(AWSCostStatsCreationContext statsData, String awsBucketname, int year, int month, AWSCostStatsCreationStages next) throws IOException { // Creating a working directory for downloanding and processing the bill final Path workingDirPath = Paths.get(System.getProperty(TEMP_DIR_LOCATION), UUID.randomUUID().toString()); Files.createDirectories(workingDirPath); String accountId = statsData.computeDesc.customProperties.getOrDefault(AWSConstants.AWS_ACCOUNT_ID_KEY, null);//from ww w . ja va2 s. c o m AWSCsvBillParser parser = new AWSCsvBillParser(); final String csvBillZipFileName = parser.getCsvBillFileName(month, year, accountId, true); Path csvBillZipFilePath = Paths.get(workingDirPath.toString(), csvBillZipFileName); GetObjectRequest getObjectRequest = new GetObjectRequest(awsBucketname, csvBillZipFileName); Download download = statsData.s3Client.download(getObjectRequest, csvBillZipFilePath.toFile()); final StatelessService service = this; download.addProgressListener(new ProgressListener() { @Override public void progressChanged(ProgressEvent progressEvent) { try { ProgressEventType eventType = progressEvent.getEventType(); if (ProgressEventType.TRANSFER_COMPLETED_EVENT.equals(eventType)) { LocalDate monthDate = new LocalDate(year, month, 1); statsData.accountDetailsMap = parser.parseDetailedCsvBill(statsData.ignorableInvoiceCharge, csvBillZipFilePath, monthDate); deleteTempFiles(); OperationContext.restoreOperationContext(statsData.opContext); statsData.stage = next; handleCostStatsCreationRequest(statsData); } else if (ProgressEventType.TRANSFER_FAILED_EVENT.equals(eventType)) { deleteTempFiles(); throw new IOException("Download of AWS CSV Bill '" + csvBillZipFileName + "' failed."); } } catch (IOException e) { logSevere(e); AdapterUtils.sendFailurePatchToProvisioningTask(service, statsData.statsRequest.taskReference, e); } } private void deleteTempFiles() { try { Files.deleteIfExists(csvBillZipFilePath); Files.deleteIfExists(workingDirPath); } catch (IOException e) { // Ignore IO exception while cleaning files. } } }); }
From source file:com.zahdoo.android.extension.GCM.DownloadModel.java
License:Open Source License
public DownloadModel(Context context, String key, String downloadType) { super(context, Uri.parse(key)); this.context = context; strFileName = key;//key is the name of the file; fileDownloadType = downloadType;/* w w w . jav a 2 s . c o m*/ mStatus = Status.IN_PROGRESS; mListener = new ProgressListener() { @Override public void progressChanged(ProgressEvent event) { Log.d("CADIE S3", "Download Progress - " + (int) Global.mDownload.getProgress().getPercentTransferred()); try { // if(!fileDownloadType.contentEquals("THUMB_DOWNLOAD")) // { if ((int) Global.mDownload.getProgress().getPercentTransferred() != 0 && progressVal != (int) Global.mDownload.getProgress().getPercentTransferred()) { progressVal = (int) Global.mDownload.getProgress().getPercentTransferred(); Log.d("CADIE S3", "Download Progress Event Dispatch - " + progressVal); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "TRANSFER_PROGRESS^" + progressVal); } catch (Exception e) { Log.d("CADIE S3", "Download err - " + e.toString()); } } //} if (event.getEventCode() == ProgressEvent.COMPLETED_EVENT_CODE) { Global.mDownload.removeProgressListener(mListener); mStatus = Status.COMPLETED; if (fileDownloadType.contentEquals("THUMB_DOWNLOAD")) { Log.d("CADIE S3", "Thumb Downloaded"); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "THUMB_DOWNLOAD"); } catch (Exception e) { Log.d("CADIE S3", "Thumb Download err - " + e.toString()); } } else { Log.d("CADIE S3", "File Downloaded"); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "DOWNLOAD_SUCCESSFUL"); } catch (Exception e) { Log.d("CADIE S3", "File Download err - " + e.toString()); } } ((FileTransferService) DownloadModel.this.context).stopSelf(); } else if (event.getEventCode() == ProgressEvent.FAILED_EVENT_CODE) { Global.mDownload.removeProgressListener(mListener); try { AmazonClientException e = Global.mDownload.waitForException(); Log.e("CADIE ", "CADIE S3 Exception - " + e.toString() + " " + event.getBytesTransferred()); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "ERROR "); } catch (Exception e1) { Log.d("CADIE S3", "CADIE S3 Exception 100 - " + e1.toString()); } Global.mTransferManager.shutdownNow(); Global.mTransferManager = null; ((FileTransferService) DownloadModel.this.context).stopSelf(); } catch (InterruptedException e) { } } } catch (Exception e) { Log.d("CADIE S3", "S3 Download Exc - " + e.toString()); } } }; }
From source file:com.zahdoo.android.extension.GCM.UploadModel.java
License:Open Source License
public UploadModel(Context context, Uri uri, String fileName, final String uploadType, final String serverResponse) { super(context, uri); this.context = context; strFileName = fileName;/* ww w.ja v a 2 s .c o m*/ isTempFile = false; fileUploadType = uploadType; try { if (uploadType.contentEquals("THUMB")) { Log.d("CADIE GCM", "UPLOADING THUMB"); filePath = "/data/data/air.com.zahdoo.cadie/com.zahdoo.cadie/Local Store/thumbnails/" + fileName; } else { filePath = Environment.getExternalStorageDirectory() + "/cadie/" + fileName; if (!(new File(filePath).exists())) { filePath = "/data/data/air.com.zahdoo.cadie/com.zahdoo.cadie/Local Store/cadie/" + fileName; } } mFile = new File(filePath); if (mFile.exists()) { Log.d("CADIE S3", "File Exists"); } else { isTempFile = true; Log.d("CADIE S3", "File does not exist"); } int i = fileName.lastIndexOf('.'); if (i >= 0) { mExtension = fileName.substring(i + 1); } Log.d("CADIE S3", "File Extension - " + mExtension); mListener = new ProgressListener() { @Override public void progressChanged(ProgressEvent event) { Log.d("CADIE S3", "Upload Progress - " + (int) Global.mUpload.getProgress().getPercentTransferred()); if (fileUploadType.contentEquals("ALL_FILES")) { if ((int) Global.mUpload.getProgress().getPercentTransferred() != 0 && progressVal != (int) Global.mUpload.getProgress().getPercentTransferred()) { progressVal = (int) Global.mUpload.getProgress().getPercentTransferred(); Log.d("CADIE S3", "Upload Progress Event Dispatch - " + progressVal); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "TRANSFER_PROGRESS^" + progressVal); } catch (Exception e) { Log.d("CADIE S3", "Upload Progress Event Dispatch Error - " + e.toString()); } } } if (event.getEventCode() == ProgressEvent.COMPLETED_EVENT_CODE) { Log.d("CADIE S3", "File Uploaded"); Global.mUpload.removeProgressListener(mListener); if (mFile != null) { if (isTempFile) mFile.delete(); Log.d("CADIE S3", "File Deleted"); if (fileUploadType.contentEquals("ALL_FILES")) { String[] vStrings = serverResponse.split("\\^"); try { if (vStrings[0].contentEquals("FILE_UPLOADED")) { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "FILE_UPLOADED^" + vStrings[1]); } else { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "ERROR "); } } catch (Exception e) { Log.d("CADIE S3", "File Upload Error - " + e.toString()); } } else//THUMB { Log.d(CommonUtilities.TAG, "THUMB UPLOADED"); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "THUMB_UPLOADED"); } catch (Exception e) { Log.d(CommonUtilities.TAG, "THUMB UPLOADED Error - " + e.toString()); } } //Global.mTransferManager.shutdownNow(false); //Global.mTransferManager = null; ((FileTransferService) UploadModel.this.context).stopSelf(); } } else if (event.getEventCode() == ProgressEvent.FAILED_EVENT_CODE) { Global.mUpload.removeProgressListener(mListener); //upload(); try { AmazonClientException e = Global.mUpload.waitForException(); Log.e("CADIE ", "CADIE S3 Exception - " + e.toString() + " " + event.getBytesTransferred()); try { FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "ERROR "); } catch (Exception e1) { Log.d(CommonUtilities.TAG, "CADIE S3 Exception - " + e1.toString()); } Global.mTransferManager.shutdownNow(false); Global.mTransferManager = null; ((FileTransferService) UploadModel.this.context).stopSelf(); } catch (InterruptedException e) { } } } }; } catch (Exception e) { Log.d(CommonUtilities.TAG, "UPLOAD EXCEPTIOn - " + e.toString()); } }
From source file:io.dockstore.common.FileProvisioning.java
License:Apache License
public void provisionOutputFile(FileInfo file, String cwlOutputPath) { File sourceFile = new File(cwlOutputPath); long inputSize = sourceFile.length(); if (file.getUrl().startsWith("s3://")) { AmazonS3 s3Client = FileProvisioning.getAmazonS3Client(config); String trimmedPath = file.getUrl().replace("s3://", ""); List<String> splitPathList = Lists.newArrayList(trimmedPath.split("/")); String bucketName = splitPathList.remove(0); PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, Joiner.on("/").join(splitPathList), sourceFile);// ww w .j a va 2s . c o m putObjectRequest.setGeneralProgressListener(new ProgressListener() { ProgressPrinter printer = new ProgressPrinter(); long runningTotal = 0; @Override public void progressChanged(ProgressEvent progressEvent) { if (progressEvent.getEventType() == ProgressEventType.REQUEST_BYTE_TRANSFER_EVENT) { runningTotal += progressEvent.getBytesTransferred(); } printer.handleProgress(runningTotal, inputSize); } }); try { s3Client.putObject(putObjectRequest); } finally { System.out.println(); } } else { try { FileSystemManager fsManager; // trigger a copy from the URL to a local file path that's a UUID to avoid collision fsManager = VFS.getManager(); // check for a local file path FileObject dest = fsManager.resolveFile(file.getUrl()); FileObject src = fsManager.resolveFile(sourceFile.getAbsolutePath()); copyFromInputStreamToOutputStream(src.getContent().getInputStream(), inputSize, dest.getContent().getOutputStream()); } catch (IOException e) { throw new RuntimeException("Could not provision output files", e); } } }
From source file:jp.classmethod.aws.gradle.s3.AmazonS3ProgressiveFileUploadTask.java
License:Apache License
@TaskAction public void upload() throws InterruptedException { // to enable conventionMappings feature String bucketName = getBucketName(); String key = getKey();/*from ww w . ja v a2 s . co m*/ File file = getFile(); if (bucketName == null) { throw new GradleException("bucketName is not specified"); } if (key == null) { throw new GradleException("key is not specified"); } if (file == null) { throw new GradleException("file is not specified"); } if (file.isFile() == false) { throw new GradleException("file must be regular file"); } AmazonS3PluginExtension ext = getProject().getExtensions().getByType(AmazonS3PluginExtension.class); AmazonS3 s3 = ext.getClient(); TransferManager s3mgr = TransferManagerBuilder.standard().withS3Client(s3).build(); getLogger().info("Uploading... s3://{}/{}", bucketName, key); Upload upload = s3mgr.upload( new PutObjectRequest(getBucketName(), getKey(), getFile()).withMetadata(getObjectMetadata())); upload.addProgressListener(new ProgressListener() { public void progressChanged(ProgressEvent event) { getLogger().info(" {}% uploaded", upload.getProgress().getPercentTransferred()); } }); upload.waitForCompletion(); setResourceUrl(s3.getUrl(bucketName, key).toString()); getLogger().info("Upload completed: {}", getResourceUrl()); }
From source file:msv_upload_tool.FXMLDocumentController.java
private void uploadObject() { final Long max = file.length(); task = new Task<Void>() { @Override// w ww . j ava 2 s . c o m protected Void call() { boolean doLoop = true; long total = 0; while (doLoop) { lock.readLock().lock(); try { total = totalBytes; } finally { lock.readLock().unlock(); } updateProgress(total, max); if (total == max) doLoop = false; try { Thread.sleep(50); //1000 milliseconds is one second. } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } updateProgress(-1, max); this.succeeded(); return null; } }; uploadProgress.progressProperty().bind(task.progressProperty()); task.setOnSucceeded(new EventHandler() { @Override public void handle(Event event) { label.setText(""); label2.setText(""); button.setDisable(true); button2.setDisable(false); } }); Thread th = new Thread(task); th.setDaemon(true); //disable the buttons button.setDisable(true); button2.setDisable(true); th.start(); String existingBucketName = "mstargeneralfiles"; String keyName = "duh/" + file.getName(); String filePath = file.getAbsolutePath(); TransferManager tm = new TransferManager(new ProfileCredentialsProvider()); // For more advanced uploads, you can create a request object // and supply additional request parameters (ex: progress listeners, // canned ACLs, etc.) PutObjectRequest request = new PutObjectRequest(existingBucketName, keyName, new File(filePath)); // You can ask the upload for its progress, or you can // add a ProgressListener to your request to receive notifications // when bytes are transferred. request.setGeneralProgressListener(new ProgressListener() { @Override public void progressChanged(ProgressEvent progressEvent) { System.out.println(progressEvent.toString()); lock.writeLock().lock(); try { totalBytes += progressEvent.getBytesTransferred(); } finally { lock.writeLock().unlock(); } } }); // TransferManager processes all transfers asynchronously, // so this call will return immediately. Upload upload = tm.upload(request); }
From source file:org.apache.hadoop.fs.s3a.S3AFileSystem.java
License:Apache License
/** * The src file is on the local disk. Add it to FS at * the given dst name./*from w w w .j a v a 2 s . c om*/ * * This version doesn't need to create a temporary file to calculate the md5. Sadly this doesn't seem to be * used by the shell cp :( * * delSrc indicates if the source should be removed * @param delSrc whether to delete the src * @param overwrite whether to overwrite an existing file * @param src path * @param dst path */ @Override public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path src, Path dst) throws IOException { String key = pathToKey(dst); if (!overwrite && exists(dst)) { throw new IOException(dst + " already exists"); } LOG.info("Copying local file from " + src + " to " + dst); // Since we have a local file, we don't need to stream into a temporary file LocalFileSystem local = getLocal(getConf()); File srcfile = local.pathToFile(src); TransferManagerConfiguration transferConfiguration = new TransferManagerConfiguration(); transferConfiguration.setMinimumUploadPartSize(partSize); transferConfiguration.setMultipartUploadThreshold(partSizeThreshold); TransferManager transfers = new TransferManager(s3); transfers.setConfiguration(transferConfiguration); final ObjectMetadata om = new ObjectMetadata(); if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) { om.setServerSideEncryption(serverSideEncryptionAlgorithm); } PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, srcfile); putObjectRequest.setCannedAcl(cannedACL); putObjectRequest.setMetadata(om); ProgressListener progressListener = new ProgressListener() { public void progressChanged(ProgressEvent progressEvent) { switch (progressEvent.getEventCode()) { case ProgressEvent.PART_COMPLETED_EVENT_CODE: statistics.incrementWriteOps(1); break; } } }; Upload up = transfers.upload(putObjectRequest); up.addProgressListener(progressListener); try { up.waitForUploadResult(); statistics.incrementWriteOps(1); } catch (InterruptedException e) { throw new IOException("Got interrupted, cancelling"); } finally { transfers.shutdownNow(false); } // This will delete unnecessary fake parent directories finishedWrite(key); if (delSrc) { local.delete(src, false); } }
From source file:org.apache.hadoop.fs.s3a.S3AFileSystem.java
License:Apache License
private void copyFile(String srcKey, String dstKey) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("copyFile " + srcKey + " -> " + dstKey); }//from w ww . j av a 2 s .co m TransferManagerConfiguration transferConfiguration = new TransferManagerConfiguration(); transferConfiguration.setMultipartCopyPartSize(partSize); TransferManager transfers = new TransferManager(s3); transfers.setConfiguration(transferConfiguration); ObjectMetadata srcom = s3.getObjectMetadata(bucket, srcKey); final ObjectMetadata dstom = srcom.clone(); if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) { dstom.setServerSideEncryption(serverSideEncryptionAlgorithm); } CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey); copyObjectRequest.setCannedAccessControlList(cannedACL); copyObjectRequest.setNewObjectMetadata(dstom); ProgressListener progressListener = new ProgressListener() { public void progressChanged(ProgressEvent progressEvent) { switch (progressEvent.getEventCode()) { case ProgressEvent.PART_COMPLETED_EVENT_CODE: statistics.incrementWriteOps(1); break; } } }; Copy copy = transfers.copy(copyObjectRequest); copy.addProgressListener(progressListener); try { copy.waitForCopyResult(); statistics.incrementWriteOps(1); } catch (InterruptedException e) { throw new IOException("Got interrupted, cancelling"); } finally { transfers.shutdownNow(false); } }