List of usage examples for java.io BufferedOutputStream flush
@Override public synchronized void flush() throws IOException
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
@Override public void uploadFile(File f, String to, String pp) throws IOException { this.s3clientLock.readLock().lock(); try {// w w w.ja v a2s . c o m InputStream in = null; while (to.startsWith(File.separator)) to = to.substring(1); String pth = pp + "/" + EncyptUtils.encString(to, Main.chunkStoreEncryptionEnabled); SDFSLogger.getLog().info("uploading " + f.getPath() + " to " + to + " pth " + pth); boolean isDir = false; boolean isSymlink = false; if (!OSValidator.isWindows()) { isDir = Files.readAttributes(f.toPath(), PosixFileAttributes.class, LinkOption.NOFOLLOW_LINKS) .isDirectory(); isSymlink = Files.readAttributes(f.toPath(), PosixFileAttributes.class, LinkOption.NOFOLLOW_LINKS) .isSymbolicLink(); } else { isDir = f.isDirectory(); } if (isSymlink) { try { HashMap<String, String> metaData = new HashMap<String, String>(); metaData.put("encrypt", Boolean.toString(Main.chunkStoreEncryptionEnabled)); metaData.put("lastmodified", Long.toString(f.lastModified())); String slp = EncyptUtils.encString(Files.readSymbolicLink(f.toPath()).toFile().getPath(), Main.chunkStoreEncryptionEnabled); metaData.put("symlink", slp); ObjectMetadata md = new ObjectMetadata(); md.setContentType("binary/octet-stream"); md.setContentLength(pth.getBytes().length); md.setUserMetadata(metaData); PutObjectRequest req = new PutObjectRequest(this.name, pth, new ByteArrayInputStream(pth.getBytes()), md); s3Service.putObject(req); if (this.isClustered()) this.checkoutFile(pth); } catch (Exception e1) { throw new IOException(e1); } } else if (isDir) { HashMap<String, String> metaData = FileUtils.getFileMetaData(f, Main.chunkStoreEncryptionEnabled); metaData.put("encrypt", Boolean.toString(Main.chunkStoreEncryptionEnabled)); metaData.put("lastmodified", Long.toString(f.lastModified())); metaData.put("directory", "true"); ObjectMetadata md = new ObjectMetadata(); md.setContentType("binary/octet-stream"); md.setContentLength(pth.getBytes().length); md.setUserMetadata(metaData); try { PutObjectRequest req = new PutObjectRequest(this.name, pth, new ByteArrayInputStream(pth.getBytes()), md); s3Service.putObject(req); if (this.isClustered()) this.checkoutFile(pth); } catch (Exception e1) { SDFSLogger.getLog().error("error uploading", e1); throw new IOException(e1); } } else { String rnd = RandomGUID.getGuid(); File p = new File(this.staged_sync_location, rnd); File z = new File(this.staged_sync_location, rnd + ".z"); File e = new File(this.staged_sync_location, rnd + ".e"); while (z.exists()) { rnd = RandomGUID.getGuid(); p = new File(this.staged_sync_location, rnd); z = new File(this.staged_sync_location, rnd + ".z"); e = new File(this.staged_sync_location, rnd + ".e"); } try { BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(p)); IOUtils.copy(is, os); os.flush(); os.close(); is.close(); if (Main.compress) { CompressionUtils.compressFile(p, z); p.delete(); p = z; } byte[] ivb = null; if (Main.chunkStoreEncryptionEnabled) { try { ivb = PassPhrase.getByteIV(); EncryptUtils.encryptFile(p, e, new IvParameterSpec(ivb)); } catch (Exception e1) { throw new IOException(e1); } p.delete(); p = e; } String objName = pth; ObjectMetadata md = new ObjectMetadata(); Map<String, String> umd = FileUtils.getFileMetaData(f, Main.chunkStoreEncryptionEnabled); md.setUserMetadata(umd); md.addUserMetadata("lz4compress", Boolean.toString(Main.compress)); md.addUserMetadata("encrypt", Boolean.toString(Main.chunkStoreEncryptionEnabled)); if (ivb != null) md.addUserMetadata("ivspec", BaseEncoding.base64().encode(ivb)); md.addUserMetadata("lastmodified", Long.toString(f.lastModified())); if (simpleS3) { md.setContentType("binary/octet-stream"); in = new BufferedInputStream(new FileInputStream(p), 32768); try { if (md5sum) { byte[] md5Hash = ServiceUtils.computeMD5Hash(in); in.close(); String mds = BaseEncoding.base64().encode(md5Hash); md.setContentMD5(mds); md.addUserMetadata("md5sum", mds); } } catch (NoSuchAlgorithmException e2) { SDFSLogger.getLog().error("while hashing", e2); throw new IOException(e2); } in = new FileInputStream(p); md.setContentLength(p.length()); try { PutObjectRequest req = new PutObjectRequest(this.name, objName, in, md); s3Service.putObject(req); if (this.isClustered()) this.checkoutFile(pth); SDFSLogger.getLog().debug( "uploaded=" + f.getPath() + " lm=" + md.getUserMetadata().get("lastmodified")); } catch (AmazonS3Exception e1) { if (e1.getStatusCode() == 409) { try { s3Service.deleteObject(this.name, objName); this.uploadFile(f, to, pp); return; } catch (Exception e2) { throw new IOException(e2); } } else { throw new IOException(e1); } } catch (Exception e1) { // SDFSLogger.getLog().error("error uploading", e1); throw new IOException(e1); } } else { try { md.setContentType("binary/octet-stream"); in = new BufferedInputStream(new FileInputStream(p), 32768); byte[] md5Hash = ServiceUtils.computeMD5Hash(in); in.close(); String mds = BaseEncoding.base64().encode(md5Hash); md.setContentMD5(mds); md.addUserMetadata("md5sum", mds); in = new BufferedInputStream(new FileInputStream(p), 32768); md.setContentLength(p.length()); PutObjectRequest req = new PutObjectRequest(this.name, objName, in, md); multiPartUpload(req); if (this.isClustered()) this.checkoutFile(pth); } catch (AmazonS3Exception e1) { if (e1.getStatusCode() == 409) { try { s3Service.deleteObject(this.name, objName); this.uploadFile(f, to, pp); return; } catch (Exception e2) { throw new IOException(e2); } } else { throw new IOException(e1); } } catch (Exception e1) { // SDFSLogger.getLog().error("error uploading", e1); throw new IOException(e1); } } } finally { try { if (in != null) in.close(); } finally { p.delete(); z.delete(); e.delete(); } } } } finally { this.s3clientLock.readLock().unlock(); } }
From source file:org.webservice.fotolia.FotoliaApi.java
/** * Download a media and write it to a file if necessary * * @param download_url URL as returned by getMedia() * @param output_file if null the downloaded file will be echoed on standard output *///from w w w . j a v a 2s . co m public void downloadMedia(final String download_url, final String output_file) throws FileNotFoundException, IOException, FotoliaApiException { BufferedOutputStream stream; DefaultHttpClient client; HttpResponse response; StatusLine statusLine; HttpEntity entity; JSONObject obj; String error_msg; int error_code; if (output_file == null) { stream = new BufferedOutputStream(new BufferedOutputStream(System.out)); } else { stream = new BufferedOutputStream(new FileOutputStream(output_file)); } client = this._getHttpClient(true); response = client.execute(new HttpGet(download_url)); statusLine = response.getStatusLine(); entity = response.getEntity(); if (statusLine.getStatusCode() != 200) { if (entity == null) { throw new FotoliaApiException(statusLine.getStatusCode(), statusLine.getReasonPhrase()); } else { obj = (JSONObject) JSONValue.parse(EntityUtils.toString(entity)); error_msg = (String) obj.get("error"); if (obj.get("code") != null) { error_code = Integer.parseInt((String) obj.get("code")); } else { error_code = statusLine.getStatusCode(); } throw new FotoliaApiException(error_code, error_msg); } } stream.write(EntityUtils.toByteArray(entity)); stream.flush(); if (output_file != null) { stream.close(); } }
From source file:okuyama.imdst.util.FileBaseDataMap.java
/** * ?//from w w w .j a v a 2s. c o m * * @param dirs * @param innerCacheSize * @param numberOfKeyData * @return * @throws */ public void init(boolean renewData) { this.innerCache = new InnerCache(this.innerCacheSize); this.totalSize = new AtomicInteger(0); this.dataFileList = new File[numberOfDataFiles]; try { this.fileDirs = new String[this.baseFileDirs.length * this.dataDirsFactor]; int counter = 0; for (int idx = 0; idx < this.baseFileDirs.length; idx++) { for (int idx2 = 0; idx2 < dataDirsFactor; idx2++) { fileDirs[counter] = baseFileDirs[idx] + idx2 + "/"; File dir = new File(fileDirs[counter]); if (!dir.exists()) dir.mkdirs(); counter++; } } for (int i = 0; i < numberOfDataFiles; i++) { // Key???????? File file = new File(this.fileDirs[i % this.fileDirs.length] + i + ".data"); if (file.length() > 0 && (file.length() % lineDataSize) != 0) { if (renewData) { if (file.exists()) { file.delete(); } } else if ((file.length() / lineDataSize) == 0) { if (file.exists()) { file.delete(); } } else { System.out.println(file.getAbsolutePath() + " = This file has broken, it restores."); // ??1??????????????? File recoverFile = new File(this.fileDirs[i % this.fileDirs.length] + i + ".recover"); BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(recoverFile)); BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file)); byte[] recoverData = new byte[new Long(((file.length() / lineDataSize) * lineDataSize)) .intValue()]; bis.read(recoverData); bos.write(recoverData); bos.flush(); bos.close(); bis.close(); file = new File(this.fileDirs[i % this.fileDirs.length] + i + ".data"); file = new File(this.fileDirs[i % this.fileDirs.length] + i + ".data"); recoverFile.delete(); } } else { // ??true??????? if (renewData) { if (file.exists()) { file.delete(); } } } dataFileList[i] = file; } } catch (Exception e) { e.printStackTrace(); } }
From source file:info.ajaxplorer.synchro.SyncJob.java
protected void uriContentToFile(URI uri, File targetFile, File uploadFile) throws Exception { RestRequest rest = new RestRequest(); int postedProgress = 0; int buffersize = 16384; int count = 0; HttpEntity entity = rest.getNotConsumedResponseEntity(uri, null, uploadFile); long fullLength = entity.getContentLength(); Logger.getRootLogger().info("Downloaded " + fullLength + " bytes"); InputStream input = entity.getContent(); BufferedInputStream in = new BufferedInputStream(input, buffersize); FileOutputStream output = new FileOutputStream(targetFile.getPath()); BufferedOutputStream out = new BufferedOutputStream(output); byte data[] = new byte[buffersize]; int total = 0; long startTime = System.nanoTime(); long lastTime = startTime; int lastTimeTotal = 0; long secondLength = 1000000000; long interval = (long) 2 * secondLength; while ((count = in.read(data)) != -1) { long duration = System.nanoTime() - lastTime; int tmpTotal = total + count; // publishing the progress.... int tmpProgress = (int) (tmpTotal * 100 / fullLength); if (tmpProgress - postedProgress > 0 || duration > secondLength) { if (duration > interval) { lastTime = System.nanoTime(); long lastTimeBytes = (long) ((tmpTotal - lastTimeTotal) * secondLength / 1024 / 1000); long speed = (lastTimeBytes / (duration)); double bytesleft = (double) (((double) fullLength - (double) tmpTotal) / 1024); @SuppressWarnings("unused") double ETC = bytesleft / (speed * 10); }//w w w . java2s . co m if (tmpProgress != postedProgress) { logChange(Manager.getMessage("job_log_downloading"), targetFile.getName() + " - " + tmpProgress + "%"); } postedProgress = tmpProgress; } out.write(data, 0, count); total = tmpTotal; if (this.interruptRequired) { break; } } out.flush(); if (out != null) out.close(); if (in != null) in.close(); if (this.interruptRequired) { rest.release(); throw new InterruptedException(); } rest.release(); }
From source file:org.codelabor.system.file.web.controller.xplatform.FileController.java
@RequestMapping("/download") public String download(Model model, @RequestHeader("User-Agent") String userAgent, @RequestParam("fileId") String fileId, HttpServletResponse response) throws Exception { FileDTO fileDTO = fileManager.selectFileByFileId(fileId); logger.debug("fileDTO: {}", fileDTO); String repositoryPath = fileDTO.getRepositoryPath(); String uniqueFilename = fileDTO.getUniqueFilename(); String realFilename = fileDTO.getRealFilename(); InputStream inputStream = null; BufferedInputStream bufferdInputStream = null; ServletOutputStream servletOutputStream = null; BufferedOutputStream bufferedOutputStream = null; StringBuilder sb = new StringBuilder(); DataSetList outputDataSetList = new DataSetList(); VariableList outputVariableList = new VariableList(); try {//w w w . jav a 2s .co m if (StringUtils.isNotEmpty(repositoryPath)) { // FILE_SYSTEM sb.append(repositoryPath); if (!repositoryPath.endsWith(File.separator)) { sb.append(File.separator); } sb.append(uniqueFilename); File file = new File(sb.toString()); inputStream = new FileInputStream(file); } else { // DATABASE byte[] bytes = new byte[] {}; if (fileDTO.getFileSize() > 0) { bytes = fileDTO.getBytes(); } inputStream = new ByteArrayInputStream(bytes); } // set response contenttype, header String encodedRealFilename = URLEncoder.encode(realFilename, "UTF-8"); logger.debug("realFilename: {}", realFilename); logger.debug("encodedRealFilename: {}", encodedRealFilename); response.setContentType(org.codelabor.system.file.FileConstants.CONTENT_TYPE); sb.setLength(0); if (userAgent.indexOf("MSIE5.5") > -1) { sb.append("filename="); } else { sb.append("attachment; filename="); } sb.append(encodedRealFilename); response.setHeader(HttpResponseHeaderConstants.CONTENT_DISPOSITION, sb.toString()); logger.debug("header: {}", sb.toString()); logger.debug("character encoding: {}", response.getCharacterEncoding()); logger.debug("content type: {}", response.getContentType()); logger.debug("bufferSize: {}", response.getBufferSize()); logger.debug("locale: {}", response.getLocale()); bufferdInputStream = new BufferedInputStream(inputStream); servletOutputStream = response.getOutputStream(); bufferedOutputStream = new BufferedOutputStream(servletOutputStream); int bytesRead; byte buffer[] = new byte[2048]; while ((bytesRead = bufferdInputStream.read(buffer)) != -1) { bufferedOutputStream.write(buffer, 0, bytesRead); } // flush stream bufferedOutputStream.flush(); XplatformUtils.setSuccessMessage( messageSource.getMessage("info.success", new Object[] {}, forcedLocale), outputVariableList); } catch (Exception e) { e.printStackTrace(); logger.error(e.getMessage()); throw new XplatformException(messageSource.getMessage("error.failure", new Object[] {}, forcedLocale), e); } finally { // close stream inputStream.close(); bufferdInputStream.close(); servletOutputStream.close(); bufferedOutputStream.close(); } model.addAttribute(OUTPUT_DATA_SET_LIST, outputDataSetList); model.addAttribute(OUTPUT_VARIABLE_LIST, outputVariableList); return VIEW_NAME; }
From source file:edu.harvard.iq.dataverse.ingest.IngestServiceBean.java
private DataFile createSingleDataFile(DatasetVersion version, InputStream inputStream, String fileName, String contentType, boolean addToDataset) { DataFile datafile = new DataFile(contentType); datafile.setModificationTime(new Timestamp(new Date().getTime())); /**/*from w ww.j a v a 2s. co m*/ * @todo Think more about when permissions on files are modified. * Obviously, here at create time files have some sort of permissions, * even if these permissions are *implied*, by ViewUnpublishedDataset at * the dataset level, for example. */ datafile.setPermissionModificationTime(new Timestamp(new Date().getTime())); FileMetadata fmd = new FileMetadata(); fmd.setLabel(checkForDuplicateFileNames(version, fileName)); if (addToDataset) { datafile.setOwner(version.getDataset()); } fmd.setDataFile(datafile); datafile.getFileMetadatas().add(fmd); if (addToDataset) { if (version.getFileMetadatas() == null) { version.setFileMetadatas(new ArrayList()); } version.getFileMetadatas().add(fmd); fmd.setDatasetVersion(version); version.getDataset().getFiles().add(datafile); } // And save the file - but only if the InputStream is not null; // (the temp file may be saved already - if this is a single // file upload case - and in that case this method gets called // with null for the inputStream) if (inputStream != null) { fileService.generateStorageIdentifier(datafile); BufferedOutputStream outputStream = null; // Once again, at this point we are dealing with *temp* // files only; these are always stored on the local filesystem, // so we are using FileInput/Output Streams to read and write // these directly, instead of going through the Data Access // framework. // -- L.A. try { outputStream = new BufferedOutputStream( new FileOutputStream(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier())); byte[] dataBuffer = new byte[8192]; int i = 0; while ((i = inputStream.read(dataBuffer)) > 0) { outputStream.write(dataBuffer, 0, i); outputStream.flush(); } } catch (IOException ioex) { datafile = null; } finally { try { outputStream.close(); } catch (IOException ioex) { } } // MD5: if (datafile != null) { MD5Checksum md5Checksum = new MD5Checksum(); try { datafile.setmd5(md5Checksum .CalculateMD5(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier())); } catch (Exception md5ex) { logger.warning("Could not calculate MD5 signature for new file " + fileName); } } } return datafile; }
From source file:net.sf.jftp.net.WebdavConnection.java
private void work(String file, String outfile) { Log.out("transfer started\nfile: " + file + "\noutfile: " + outfile); BufferedInputStream in = null; BufferedOutputStream out = null; try {/*from w w w . j a v a 2 s .c o m*/ if (outfile.startsWith("http://")) { //out = new BufferedOutputStream(new FileOutputStream(new WebdavResource(new HttpURL(file)).getMethodData()); //new WebdavFile(new URL(outfile), user, pass))); //in = new BufferedInputStream(new FileInputStream(file)); String resPath = outfile.substring(0, outfile.lastIndexOf("/") + 1); String name = outfile.substring(outfile.lastIndexOf("/") + 1); Log.debug("Uploading " + file + " to " + resPath + " as " + name); //HttpURL url = getURL(resPath); WebdavResource res = getResource(resPath); //new WebdavResource(url); /* if(res.checkinMethod()) Log.debug("Checkin OK"); else Log.debug("Checkin FAILED"); Enumeration e = res.getAllowedMethods(); while(e != null && e.hasMoreElements()) { Log.debug("Method: " + e.nextElement().toString()); } */ if (res.putMethod(new File(file))) { fireProgressUpdate(file, DataConnection.FINISHED, -1); } else { Log.debug("Upload failed."); fireProgressUpdate(file, DataConnection.FAILED, -1); } return; } Log.debug("Downloading " + file + " to " + outfile); out = new BufferedOutputStream(new FileOutputStream(outfile)); in = new BufferedInputStream(getResource(file).getMethodData()); //new WebdavResource(getURL(file)).getMethodData()); byte[] buf = new byte[webdavBuffer]; int len = 0; int reallen = 0; //System.out.println(file+":"+getLocalPath()+outfile); while (true) { len = in.read(buf); //System.out.print("."); if (len == StreamTokenizer.TT_EOF) { break; } out.write(buf, 0, len); reallen += len; fireProgressUpdate(StringUtils.getFile(file), DataConnection.GET, reallen); } fireProgressUpdate(file, DataConnection.FINISHED, -1); } catch (IOException ex) { Log.debug("Error with file IO (" + ex + ")!"); ex.printStackTrace(); fireProgressUpdate(file, DataConnection.FAILED, -1); } finally { try { out.flush(); out.close(); in.close(); } catch (Exception ex) { ex.printStackTrace(); } } }
From source file:cn.lhfei.fu.service.impl.ThesisBaseServiceImpl.java
@Override public boolean update(ThesisBaseModel model, String userType) throws Exception { OutputStream out = null;/*w w w . j a v a 2 s. c o m*/ BufferedOutputStream bf = null; Date currentTime = new Date(); boolean result = false; List<MultipartFile> files = model.getFiles(); try { boolean modelIsValid = this.updateThesis(model); if (!modelIsValid) { return result; } int num = 1; for (MultipartFile file : files) {// save archive file if (file.getSize() > 0) { String filePath = filePathBuilder.buildThesisFullPath(model, model.getStudentName()); String fileName = filePathBuilder.buildThesisFileName(model, model.getStudentName(), num); String[] names = file.getOriginalFilename().split("[.]"); String fileType = names[names.length - 1]; String fullPath = filePath + File.separator + fileName + "." + fileType; out = new FileOutputStream(new File(fullPath)); bf = new BufferedOutputStream(out); IOUtils.copyLarge(file.getInputStream(), bf); ThesisArchive archive = new ThesisArchive(); archive.setThesisBaseId(model.getBaseId()); archive.setStudentId(model.getStudentId()); archive.setArchiveName(fileName); archive.setArchivePath(fullPath); archive.setCreateTime(currentTime); archive.setModifyTime(currentTime); archive.setStudentBaseId(model.getStudentBaseId()); archive.setThesisTitle(model.getThesisTitle()); archive.setExtend(model.getThesisEnTitle()); // archive.setExtend1(model.getThesisType()); // // ? if (userType != null && userType.equals(UserTypeEnum.STUDENT.getCode())) { archive.setStatus("" + ApproveStatusEnum.DSH.getCode()); } else if (userType != null && userType.equals(UserTypeEnum.TEACHER.getCode())) { archive.setStatus("" + ApproveStatusEnum.DSH.getCode()); } else if (userType != null && userType.equals(UserTypeEnum.ADMIN.getCode())) { archive.setStatus("" + ApproveStatusEnum.YSH.getCode()); } thesisArchiveDAO.save(archive); // auto increment archives number. num++; } } result = true; } catch (IOException e) { log.error(e.getMessage(), e); throw new IOException(e.getMessage(), e); } catch (NullPointerException e) { log.error("File name arguments missed.", e); throw new NullPointerException(e.getMessage()); } finally { if (out != null) { try { out.flush(); out.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } if (bf != null) { try { bf.flush(); bf.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } } return result; }
From source file:cn.lhfei.fu.service.impl.HomeworkBaseServiceImpl.java
/** * // w ww. jav a 2 s .com * * @see cn.lhfei.fu.service.HomeworkBaseService#update(cn.lhfei.fu.web.model.HomeworkBaseModel) */ @Override public boolean update(HomeworkBaseModel model, String userType) throws NullPointerException { OutputStream out = null; BufferedOutputStream bf = null; Date currentTime = new Date(); List<MultipartFile> files = model.getFiles(); try { HomeworkBase base = homeworkBaseDAO.find(model.getBaseId()); base.setModifyTime(currentTime); base.setActionType("" + OperationTypeEnum.SC.getCode()); base.setOperationTime(currentTime); homeworkBaseDAO.save(base); // update homework_base info int num = 1; for (MultipartFile file : files) {// save archive file if (file.getSize() > 0) { String filePath = filePathBuilder.buildFullPath(model, model.getStudentName()); String fileName = filePathBuilder.buildFileName(model, model.getStudentName(), num); String[] names = file.getOriginalFilename().split("[.]"); String fileType = names[names.length - 1]; String fullPath = filePath + File.separator + fileName + "." + fileType; out = new FileOutputStream(new File(fullPath)); bf = new BufferedOutputStream(out); IOUtils.copyLarge(file.getInputStream(), bf); HomeworkArchive archive = new HomeworkArchive(); archive.setArchiveName(fileName); archive.setArchivePath(fullPath); archive.setCreateTime(currentTime); archive.setModifyTime(currentTime); archive.setName(model.getName()); archive.setStudentBaseId(model.getStudentBaseId()); archive.setHomeworkBaseId(model.getBaseId()); /*archive.setHomeworkBase(base);*/ archive.setStudentName(model.getStudentName()); archive.setStudentId(model.getStudentId()); // ? if (userType != null && userType.equals(UserTypeEnum.STUDENT.getCode())) { archive.setStatus("" + ApproveStatusEnum.DSH.getCode()); } else if (userType != null && userType.equals(UserTypeEnum.TEACHER.getCode())) { archive.setStatus("" + ApproveStatusEnum.DSH.getCode()); } else if (userType != null && userType.equals(UserTypeEnum.ADMIN.getCode())) { archive.setStatus("" + ApproveStatusEnum.YSH.getCode()); } homeworkArchiveDAO.save(archive); // auto increment archives number. num++; } } } catch (IOException e) { log.error(e.getMessage(), e); } catch (NullPointerException e) { log.error("File name arguments missed.", e); throw new NullPointerException(e.getMessage()); } finally { if (out != null) { try { out.flush(); out.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } if (bf != null) { try { bf.flush(); bf.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } } return false; }