List of usage examples for org.apache.commons.io IOUtils copyLarge
public static long copyLarge(Reader input, Writer output) throws IOException
Reader
to a Writer
. From source file:hydrograph.engine.spark.datasource.utils.AWSS3Util.java
public void download(RunFileTransferEntity runFileTransferEntity) { log.debug("Start AWSS3Util download"); File filecheck = new File(runFileTransferEntity.getLocalPath()); if (runFileTransferEntity.getFailOnError()) if (!(filecheck.exists() && filecheck.isDirectory()) && !(runFileTransferEntity.getLocalPath().contains("hdfs://"))) { throw new AWSUtilException("Invalid local path"); }/* w w w. j a v a2 s.co m*/ boolean fail_if_exist = false; int retryAttempt = 0; int i; String amazonFileUploadLocationOriginal = null; String keyName = null; if (runFileTransferEntity.getRetryAttempt() == 0) retryAttempt = 1; else retryAttempt = runFileTransferEntity.getRetryAttempt(); for (i = 0; i < retryAttempt; i++) { log.info("connection attempt: " + (i + 1)); try { AmazonS3 s3Client = null; ClientConfiguration clientConf = new ClientConfiguration(); clientConf.setProtocol(Protocol.HTTPS); if (runFileTransferEntity.getCrediationalPropertiesFile() == null) { BasicAWSCredentials creds = new BasicAWSCredentials(runFileTransferEntity.getAccessKeyID(), runFileTransferEntity.getSecretAccessKey()); s3Client = AmazonS3ClientBuilder.standard().withClientConfiguration(clientConf) .withRegion(runFileTransferEntity.getRegion()) .withCredentials(new AWSStaticCredentialsProvider(creds)).build(); } else { File securityFile = new File(runFileTransferEntity.getCrediationalPropertiesFile()); PropertiesCredentials creds = new PropertiesCredentials(securityFile); s3Client = AmazonS3ClientBuilder.standard().withClientConfiguration(clientConf) .withRegion(runFileTransferEntity.getRegion()) .withCredentials(new AWSStaticCredentialsProvider(creds)).build(); } String s3folderName = null; String filepath = runFileTransferEntity.getFolder_name_in_bucket(); if (filepath.lastIndexOf("/") != -1) { s3folderName = filepath.substring(0, filepath.lastIndexOf("/")); keyName = filepath.substring(filepath.lastIndexOf("/") + 1); } else { keyName = filepath; } log.debug("keyName is: " + keyName); log.debug("bucket name is:" + runFileTransferEntity.getBucketName()); log.debug("Folder Name is" + runFileTransferEntity.getFolder_name_in_bucket()); if (s3folderName != null) { amazonFileUploadLocationOriginal = runFileTransferEntity.getBucketName() + "/" + s3folderName; } else { amazonFileUploadLocationOriginal = runFileTransferEntity.getBucketName(); } if (runFileTransferEntity.getLocalPath().contains("hdfs://")) { String outputPath = runFileTransferEntity.getLocalPath(); String s1 = outputPath.substring(7, outputPath.length()); String s2 = s1.substring(0, s1.indexOf("/")); File f = new File("/tmp"); if (!f.exists()) f.mkdir(); GetObjectRequest request = new GetObjectRequest(amazonFileUploadLocationOriginal, keyName); S3Object object = s3Client.getObject(request); if (runFileTransferEntity.getEncoding() != null) object.getObjectMetadata().setContentEncoding(runFileTransferEntity.getEncoding()); File fexist = new File(runFileTransferEntity.getLocalPath() + File.separatorChar + keyName); if (runFileTransferEntity.getOverwrite().trim().equalsIgnoreCase("Overwrite If Exists")) { S3ObjectInputStream objectContent = object.getObjectContent(); IOUtils.copyLarge(objectContent, new FileOutputStream("/tmp/" + keyName)); } else { if (!(fexist.exists() && !fexist.isDirectory())) { S3ObjectInputStream objectContent = object.getObjectContent(); IOUtils.copyLarge(objectContent, new FileOutputStream( runFileTransferEntity.getLocalPath() + File.separatorChar + keyName)); } else { fail_if_exist = true; Log.error("File already exists"); throw new AWSUtilException("File already exists"); } } Configuration conf = new Configuration(); conf.set("fs.defaultFS", "hdfs://" + s2); FileSystem hdfsFileSystem = FileSystem.get(conf); String s = outputPath.substring(7, outputPath.length()); String hdfspath = s.substring(s.indexOf("/"), s.length()); Path local = new Path("/tmp/" + keyName); Path hdfs = new Path(hdfspath); hdfsFileSystem.copyFromLocalFile(local, hdfs); } else { GetObjectRequest request = new GetObjectRequest(amazonFileUploadLocationOriginal, keyName); S3Object object = s3Client.getObject(request); if (runFileTransferEntity.getEncoding() != null) object.getObjectMetadata().setContentEncoding(runFileTransferEntity.getEncoding()); File fexist = new File(runFileTransferEntity.getLocalPath() + File.separatorChar + keyName); if (runFileTransferEntity.getOverwrite().trim().equalsIgnoreCase("Overwrite If Exists")) { S3ObjectInputStream objectContent = object.getObjectContent(); IOUtils.copyLarge(objectContent, new FileOutputStream( runFileTransferEntity.getLocalPath() + File.separatorChar + keyName)); } else { if (!(fexist.exists() && !fexist.isDirectory())) { S3ObjectInputStream objectContent = object.getObjectContent(); IOUtils.copyLarge(objectContent, new FileOutputStream( runFileTransferEntity.getLocalPath() + File.separatorChar + keyName)); } else { fail_if_exist = true; Log.error("File already exists"); throw new AWSUtilException("File already exists"); } } } } catch (AmazonServiceException e) { log.error("Amazon Service Exception", e); if (e.getStatusCode() == 403 || e.getStatusCode() == 404) { if (runFileTransferEntity.getFailOnError()) { Log.error("Incorrect details provided.Please provide correct details", e); throw new AWSUtilException("Incorrect details provided"); } else { Log.error("Unknown amezon exception occured", e); } } { try { Thread.sleep(runFileTransferEntity.getRetryAfterDuration()); } catch (Exception e1) { Log.error("Exception occured while sleeping the thread"); } continue; } } catch (Error e) { Log.error("Error occured while sleeping the thread"); throw new AWSUtilException(e); } catch (Exception e) { log.error("error while transfering file", e); try { Thread.sleep(runFileTransferEntity.getRetryAfterDuration()); } catch (Exception e1) { } catch (Error err) { Log.error("Error occured while downloading"); throw new AWSUtilException(err); } continue; } done = true; break; } if (runFileTransferEntity.getFailOnError() && !done) { log.error("File transfer failed"); throw new AWSUtilException("File transfer failed"); } else if (!done) { log.error("File transfer failed but mentioned fail on error as false"); } if (i == runFileTransferEntity.getRetryAttempt()) { if (runFileTransferEntity.getFailOnError()) { throw new AWSUtilException("File transfer failed"); } } log.debug("Finished AWSS3Util download"); }
From source file:com.manydesigns.portofino.pageactions.text.TextAction.java
protected void commonUploadAttachment() throws IOException { logger.debug("Uploading attachment"); viewAttachmentUrl = null;//www .j a v a 2 s . c o m InputStream attachmentStream = upload.getInputStream(); String attachmentId = RandomUtil.createRandomId(); File dataFile = RandomUtil.getCodeFile(pageInstance.getDirectory(), ATTACHMENT_FILE_NAME_PATTERN, attachmentId); // copy the data FileOutputStream fileOutputStream = new FileOutputStream(dataFile); IOUtils.copyLarge(attachmentStream, fileOutputStream); if (textConfiguration == null) { textConfiguration = new TextConfiguration(); } Attachment attachment = TextLogic.createAttachment(textConfiguration, attachmentId, upload.getFileName(), upload.getContentType(), upload.getSize()); attachment.setDownloadable(uploadDownloadable); viewAttachmentUrl = generateViewAttachmentUrl(attachmentId); saveConfiguration(textConfiguration); logger.info("Attachment uploaded: " + upload.getFileName() + " (" + attachmentId + ")"); IOUtils.closeQuietly(attachmentStream); IOUtils.closeQuietly(fileOutputStream); upload.delete(); logger.debug("Upload resources cleaned"); }
From source file:com.linkedin.pinot.common.utils.FileUploadDownloadClient.java
/** * Download a file using default settings. * * @param uri URI/* w w w . jav a 2s.c o m*/ * @param socketTimeoutMs Socket timeout in milliseconds * @param dest File destination * @return Response status code * @throws IOException * @throws HttpErrorStatusException */ public int downloadFile(URI uri, int socketTimeoutMs, File dest) throws IOException, HttpErrorStatusException { HttpUriRequest request = getDownloadFileRequest(uri, socketTimeoutMs); try (CloseableHttpResponse response = _httpClient.execute(request)) { StatusLine statusLine = response.getStatusLine(); int statusCode = statusLine.getStatusCode(); if (statusCode >= 300) { throw new HttpErrorStatusException(getErrorMessage(request, response), statusCode); } HttpEntity entity = response.getEntity(); try (InputStream inputStream = response.getEntity().getContent(); OutputStream outputStream = new BufferedOutputStream(new FileOutputStream(dest))) { IOUtils.copyLarge(inputStream, outputStream); } // Verify content length if known long contentLength = entity.getContentLength(); if (contentLength >= 0L) { long fileLength = dest.length(); Preconditions.checkState(fileLength == contentLength, String.format( "While downloading file with uri: %s, file length: %d does not match content length: %d", uri, fileLength, contentLength)); } return statusCode; } }
From source file:com.hipu.bdb.util.FileUtils.java
/** * Read the entire stream to EOF into the passed file. * Closes <code>is</code> when done or if an exception. * @param is Stream to read./*from w ww. j a v a 2 s . co m*/ * @param toFile File to write to. * @throws IOException */ public static long readFullyToFile(InputStream is, File toFile) throws IOException { OutputStream os = org.apache.commons.io.FileUtils.openOutputStream(toFile); try { return IOUtils.copyLarge(is, os); } finally { IOUtils.closeQuietly(os); IOUtils.closeQuietly(is); } }
From source file:eu.planets_project.services.utils.DigitalObjectUtils.java
private static File getZipAsFile(DigitalObject digOb) { String folderName = randomizeFileName(getFolderNameFromDigObject(digOb)); File tmpFolder = new File(utils_tmp, folderName); File zip = null;/*from ww w . j a v a2 s. c o m*/ try { FileUtils.forceMkdir(tmpFolder); zip = new File(tmpFolder, getFileNameFromDigObject(digOb, null)); FileOutputStream out = new FileOutputStream(zip); IOUtils.copyLarge(digOb.getContent().getInputStream(), out); out.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return zip; }
From source file:com.dnanexus.DXFile.java
/** * Downloads the specified byte range of the file into an OutputStream. * * @param os output stream downloaded file contents are written into * @param start first byte of the range within the file to be downloaded. The start byte is * inclusive in the range, and 0 is indexed as the first byte in the file. * @param end last byte of the range within the file to be downloaded. The end byte is exclusive * (not included in the range). An input of -1 specifies the end of the file. * * @throws IOException//from w w w . j a v a2 s . c o m */ public void downloadToOutputStream(OutputStream os, long start, long end) throws IOException { InputStream is = getDownloadStream(start, end); IOUtils.copyLarge(is, os); }
From source file:com.tct.email.LegacyConversions.java
/** * Save the body part of a single attachment, to a file in the attachments directory. */// w w w. j a va 2 s .co m public static void saveAttachmentBody(final Context context, final Part part, final Attachment localAttachment, long accountId) throws MessagingException, IOException { if (part.getBody() != null) { final long attachmentId = localAttachment.mId; final File saveIn = AttachmentUtilities.getAttachmentDirectory(context, accountId); if (!saveIn.isDirectory() && !saveIn.mkdirs()) { throw new IOException("Could not create attachment directory"); } final File saveAs = AttachmentUtilities.getAttachmentFilename(context, accountId, attachmentId); InputStream in = null; FileOutputStream out = null; final long copySize; try { in = part.getBody().getInputStream(); out = new FileOutputStream(saveAs); copySize = IOUtils.copyLarge(in, out); //TS: Gantao 2016-02-18 EMAIL BUGFIX_1595378 ADD_S } catch (MessagingException me) { LogUtils.e(LogUtils.TAG, "Get the attachment %d failed", attachmentId); deleteDirtyAttachment(context, attachmentId); return; //TS: Gantao 2016-02-18 EMAIL BUGFIX_1595378 ADD_E } finally { if (in != null) { in.close(); } if (out != null) { out.close(); } } // update the attachment with the extra information we now know final String contentUriString = AttachmentUtilities.getAttachmentUri(accountId, attachmentId) .toString(); localAttachment.mSize = copySize; localAttachment.setContentUri(contentUriString); // update the attachment in the database as well final ContentValues cv = new ContentValues(3); cv.put(AttachmentColumns.SIZE, copySize); cv.put(AttachmentColumns.CONTENT_URI, contentUriString); cv.put(AttachmentColumns.UI_STATE, UIProvider.AttachmentState.SAVED); final Uri uri = ContentUris.withAppendedId(Attachment.CONTENT_URI, attachmentId); context.getContentResolver().update(uri, cv, null, null); } //TS: wenggangjin 2014-12-10 EMAIL BUGFIX_852100 MOD_S else { String contentUriString = AttachmentUtilities.getAttachmentUri(accountId, localAttachment.mId) .toString(); localAttachment.setContentUri(contentUriString); } //TS: wenggangjin 2014-12-10 EMAIL BUGFIX_852100 MOD_E }
From source file:com.dnanexus.DXFile.java
/** * Uploads data from the specified stream to the file. * * <p>// w ww.j av a 2 s .c o m * The file must be in the "open" state. This method assumes exclusive access to the file: the * file must have no parts uploaded before this call is made, and no other clients may upload * data to the same file concurrently. * </p> * * @param data stream containing data to be uploaded * * @throws IOException if an error occurs while uploading the data */ public void upload(InputStream data) throws IOException { Preconditions.checkNotNull(data, "data may not be null"); try (OutputStream uploadOutputStream = this.getUploadStream()) { IOUtils.copyLarge(data, uploadOutputStream); } }
From source file:eu.planets_project.services.utils.ZipUtils.java
private static FileEntry writeEntry(Zip64File zip64File, FileEntry targetPath, File toWrite, boolean compress) { InputStream in = null;//www . ja v a2 s.c o m EntryOutputStream out = null; processAndCreateFolderEntries(zip64File, parseTargetPath(targetPath.getName(), toWrite), compress); try { if (!compress) { out = zip64File.openEntryOutputStream(targetPath.getName(), FileEntry.iMETHOD_STORED, getFileDate(toWrite)); } else { out = zip64File.openEntryOutputStream(targetPath.getName(), FileEntry.iMETHOD_DEFLATED, getFileDate(toWrite)); } if (!targetPath.isDirectory()) { in = new FileInputStream(toWrite); IOUtils.copyLarge(in, out); in.close(); } out.flush(); out.close(); if (targetPath.isDirectory()) { log.info("[createZip] Written folder entry to zip: " + targetPath.getName()); } else { log.info("[createZip] Written file entry to zip: " + targetPath.getName()); } } catch (FileNotFoundException e1) { e1.printStackTrace(); } catch (ZipException e1) { e1.printStackTrace(); } catch (IOException e1) { e1.printStackTrace(); } return targetPath; }
From source file:com.gargoylesoftware.htmlunit.HttpWebConnection.java
/** * Reads the content of the stream and saves it in memory or on the file system. * @param is the stream to read//from w w w .j a v a 2 s .c o m * @param maxInMemory the maximumBytes to store in memory, after which save to a local file * @return a wrapper around the downloaded content * @throws IOException in case of read issues */ public static DownloadedContent downloadContent(final InputStream is, final int maxInMemory) throws IOException { if (is == null) { return new DownloadedContent.InMemory(new byte[] {}); } final ByteArrayOutputStream bos = new ByteArrayOutputStream(); final byte[] buffer = new byte[1024]; int nbRead; try { while ((nbRead = is.read(buffer)) != -1) { bos.write(buffer, 0, nbRead); if (bos.size() > maxInMemory) { // we have exceeded the max for memory, let's write everything to a temporary file final File file = File.createTempFile("htmlunit", ".tmp"); file.deleteOnExit(); try (final FileOutputStream fos = new FileOutputStream(file)) { bos.writeTo(fos); // what we have already read IOUtils.copyLarge(is, fos); // what remains from the server response } return new DownloadedContent.OnFile(file, true); } } } catch (final ConnectionClosedException e) { LOG.warn("Connection was closed while reading from stream.", e); return new DownloadedContent.InMemory(bos.toByteArray()); } catch (final IOException e) { // this might happen with broken gzip content LOG.warn("Exception while reading from stream.", e); return new DownloadedContent.InMemory(bos.toByteArray()); } finally { IOUtils.closeQuietly(is); } return new DownloadedContent.InMemory(bos.toByteArray()); }