List of usage examples for java.nio.channels FileChannel close
public final void close() throws IOException
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.TestContainerManager.java
private void writeByteBufferToFile(File target, ByteBuffer data) throws IOException { FileChannel fileChannel = new FileOutputStream(target, false).getChannel(); fileChannel.write(data);//from w w w. j a va2 s .c o m fileChannel.close(); }
From source file:com.l2jfree.gameserver.geodata.pathfinding.geonodes.GeoPathFinding.java
private void LoadPathNodeFile(byte rx, byte ry) { String fname = "./data/pathnode/" + rx + "_" + ry + ".pn"; short regionoffset = getRegionOffset(rx, ry); _log.info("PathFinding Engine: - Loading: " + fname + " -> region offset: " + regionoffset + "X: " + rx + " Y: " + ry); File Pn = new File(fname); int node = 0, size, index = 0; FileChannel roChannel = null; try {/*from www . jav a 2 s . co m*/ // Create a read-only memory-mapped file roChannel = new RandomAccessFile(Pn, "r").getChannel(); size = (int) roChannel.size(); MappedByteBuffer nodes; if (Config.FORCE_GEODATA) //Force O/S to Loads this buffer's content into physical memory. //it is not guarantee, because the underlying operating system may have paged out some of the buffer's data nodes = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, size).load(); else nodes = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, size); // Indexing pathnode files, so we will know where each block starts IntBuffer indexs = IntBuffer.allocate(65536); while (node < 65536) { byte layer = nodes.get(index); indexs.put(node++, index); index += layer * 10 + 1; } _pathNodesIndex.set(regionoffset, indexs); _pathNodes.set(regionoffset, nodes); } catch (Exception e) { _log.warn("Failed to Load PathNode File: " + fname + "\n", e); } finally { try { if (roChannel != null) roChannel.close(); } catch (Exception e) { } } }
From source file:de.tobiasroeser.maven.featurebuilder.FeatureBuilder.java
private void copyJarsAsBundles(final List<Bundle> bundles, final String copyJarsAsBundlesTo) { final File dir = new File(copyJarsAsBundlesTo); if (dir.exists()) { if (!dir.isDirectory()) { log.error(dir.getAbsolutePath() + " is not a directory."); return; }/*w w w .j av a2 s. c o m*/ } else { dir.mkdirs(); } log.info("Copying " + bundles.size() + " bundles into: " + dir.getAbsolutePath()); for (final Bundle bundle : bundles) { final File target = new File(dir, bundle.getSymbolicName() + "_" + bundle.getVersion() + ".jar"); FileChannel in = null; FileChannel out = null; try { in = new FileInputStream(bundle.getJarLocation()).getChannel(); out = new FileOutputStream(target).getChannel(); // According to // http://www.rgagnon.com/javadetails/java-0064.html // we cannot copy files greater than 64 MB. // magic number for Windows, 64Mb - 32Kb) final int maxCount = (64 * 1024 * 1024) - (32 * 1024); final long size = in.size(); long position = 0; while (position < size) { position += in.transferTo(position, maxCount, out); } } catch (final IOException e) { log.error("Error whily copying '" + bundle.getJarLocation().getAbsolutePath() + "' to '" + target.getAbsolutePath() + "'", e); return; } finally { try { if (in != null) { in.close(); } if (out != null) { out.close(); } } catch (final IOException e) { throw new RuntimeException("Could not recover from error.", e); } } } }
From source file:com.odoo.core.orm.OModel.java
public void exportDB() { FileChannel source; FileChannel destination;/*from w w w .j a v a 2s. com*/ String currentDBPath = getDatabaseLocalPath(); String backupDBPath = OStorageUtils.getDirectoryPath("file") + "/" + getDatabaseName(); File currentDB = new File(currentDBPath); File backupDB = new File(backupDBPath); try { source = new FileInputStream(currentDB).getChannel(); destination = new FileOutputStream(backupDB).getChannel(); destination.transferFrom(source, 0, source.size()); source.close(); destination.close(); String subject = "Database Export: " + getDatabaseName(); Uri uri = Uri.fromFile(backupDB); Intent intent = new Intent(Intent.ACTION_SEND); intent.putExtra(Intent.EXTRA_STREAM, uri); intent.putExtra(Intent.EXTRA_SUBJECT, subject); intent.setType("message/rfc822"); mContext.startActivity(intent); } catch (IOException e) { e.printStackTrace(); } }
From source file:fr.paris.lutece.plugins.directory.web.action.ExportDirectoryAction.java
/** * {@inheritDoc}//from www . j av a 2s . com */ @Override public IPluginActionResult process(HttpServletRequest request, HttpServletResponse response, AdminUser adminUser, DirectoryAdminSearchFields searchFields) throws AccessDeniedException { DefaultPluginActionResult result = new DefaultPluginActionResult(); String strIdDirectory = request.getParameter(PARAMETER_ID_DIRECTORY); int nIdDirectory = DirectoryUtils.convertStringToInt(strIdDirectory); Directory directory = DirectoryHome.findByPrimaryKey(nIdDirectory, getPlugin()); String strIdDirectoryXsl = request.getParameter(PARAMETER_ID_DIRECTORY_XSL); int nIdDirectoryXsl = DirectoryUtils.convertStringToInt(strIdDirectoryXsl); WorkflowService workflowService = WorkflowService.getInstance(); boolean bWorkflowServiceEnable = workflowService.isAvailable(); String strShotExportFinalOutPut = null; DirectoryXsl directoryXsl = DirectoryXslHome.findByPrimaryKey(nIdDirectoryXsl, getPlugin()); // ----------------------------------------------------------------------- if ((directory == null) || (directoryXsl == null) || !RBACService.isAuthorized(Directory.RESOURCE_TYPE, strIdDirectory, DirectoryResourceIdService.PERMISSION_MANAGE_RECORD, adminUser)) { throw new AccessDeniedException( I18nService.getLocalizedString(MESSAGE_ACCESS_DENIED, request.getLocale())); } String strFileExtension = directoryXsl.getExtension(); String strFileName = directory.getTitle() + "." + strFileExtension; strFileName = UploadUtil.cleanFileName(strFileName); boolean bIsCsvExport = strFileExtension.equals(EXPORT_CSV_EXT); boolean bDisplayDateCreation = directory.isDateShownInExport(); boolean bDisplayDateModification = directory.isDateModificationShownInExport(); List<Integer> listResultRecordId = new ArrayList<Integer>(); if (request.getParameter(PARAMETER_BUTTON_EXPORT_SEARCH) != null) { String[] selectedRecords = request.getParameterValues(PARAMETER_SELECTED_RECORD); List<String> listSelectedRecords; if (selectedRecords != null) { listSelectedRecords = Arrays.asList(selectedRecords); if ((listSelectedRecords != null) && (listSelectedRecords.size() > 0)) { for (String strRecordId : listSelectedRecords) { listResultRecordId.add(Integer.parseInt(strRecordId)); } } } else { // sort order and sort entry are not needed in export listResultRecordId = DirectoryUtils.getListResults(request, directory, bWorkflowServiceEnable, true, null, RecordFieldFilter.ORDER_NONE, searchFields, adminUser, adminUser.getLocale()); } } else { // sort order and sort entry are not needed in export listResultRecordId = DirectoryUtils.getListResults(request, directory, bWorkflowServiceEnable, false, null, RecordFieldFilter.ORDER_NONE, searchFields, adminUser, adminUser.getLocale()); } EntryFilter entryFilter = new EntryFilter(); entryFilter.setIdDirectory(directory.getIdDirectory()); entryFilter.setIsGroup(EntryFilter.FILTER_FALSE); entryFilter.setIsComment(EntryFilter.FILTER_FALSE); entryFilter.setIsShownInExport(EntryFilter.FILTER_TRUE); List<IEntry> listEntryResultSearch = EntryHome.getEntryList(entryFilter, getPlugin()); Map<Integer, Field> hashFields = DirectoryUtils.getMapFieldsOfListEntry(listEntryResultSearch, getPlugin()); StringBuffer strBufferListRecordXml = null; java.io.File tmpFile = null; BufferedWriter bufferedWriter = null; OutputStreamWriter outputStreamWriter = null; File fileTemplate = null; String strFileOutPut = DirectoryUtils.EMPTY_STRING; if (directoryXsl.getFile() != null) { fileTemplate = FileHome.findByPrimaryKey(directoryXsl.getFile().getIdFile(), getPlugin()); } XmlTransformerService xmlTransformerService = null; PhysicalFile physicalFile = null; String strXslId = null; if ((fileTemplate != null) && (fileTemplate.getPhysicalFile() != null)) { fileTemplate.setPhysicalFile(PhysicalFileHome .findByPrimaryKey(fileTemplate.getPhysicalFile().getIdPhysicalFile(), getPlugin())); xmlTransformerService = new XmlTransformerService(); physicalFile = fileTemplate.getPhysicalFile(); strXslId = XSL_UNIQUE_PREFIX_ID + physicalFile.getIdPhysicalFile(); } int nSize = listResultRecordId.size(); boolean bIsBigExport = (nSize > EXPORT_RECORD_STEP); // Encoding export String strEncoding = StringUtils.EMPTY; if (bIsCsvExport) { strEncoding = DirectoryParameterService.getService().getExportCSVEncoding(); } else { strEncoding = DirectoryParameterService.getService().getExportXMLEncoding(); } if (bIsBigExport) { try { String strPath = AppPathService.getWebAppPath() + AppPropertiesService.getProperty(PROPERTY_PATH_TMP); java.io.File tmpDir = new java.io.File(strPath); tmpFile = java.io.File.createTempFile(EXPORT_TMPFILE_PREFIX, EXPORT_TMPFILE_SUFIX, tmpDir); } catch (IOException e) { AppLogService.error("Unable to create temp file in webapp tmp dir"); try { tmpFile = java.io.File.createTempFile(EXPORT_TMPFILE_PREFIX, EXPORT_TMPFILE_SUFIX); } catch (IOException e1) { AppLogService.error(e1); } } try { tmpFile.deleteOnExit(); outputStreamWriter = new OutputStreamWriter(new FileOutputStream(tmpFile), strEncoding); bufferedWriter = new BufferedWriter(outputStreamWriter); } catch (IOException e) { AppLogService.error(e); } } Plugin plugin = this.getPlugin(); Locale locale = request.getLocale(); // --------------------------------------------------------------------- StringBuffer strBufferListEntryXml = new StringBuffer(); if (bDisplayDateCreation && bIsCsvExport) { Map<String, String> model = new HashMap<String, String>(); model.put(Entry.ATTRIBUTE_ENTRY_ID, "0"); XmlUtil.beginElement(strBufferListEntryXml, Entry.TAG_ENTRY, model); String strDateCreation = I18nService.getLocalizedString(PROPERTY_ENTRY_TYPE_DATE_CREATION_TITLE, locale); XmlUtil.addElementHtml(strBufferListEntryXml, Entry.TAG_TITLE, strDateCreation); XmlUtil.endElement(strBufferListEntryXml, Entry.TAG_ENTRY); } if (bDisplayDateModification && bIsCsvExport) { Map<String, String> model = new HashMap<String, String>(); model.put(Entry.ATTRIBUTE_ENTRY_ID, "0"); XmlUtil.beginElement(strBufferListEntryXml, Entry.TAG_ENTRY, model); String strDateModification = I18nService.getLocalizedString(PROPERTY_ENTRY_TYPE_DATE_MODIFICATION_TITLE, locale); XmlUtil.addElementHtml(strBufferListEntryXml, Entry.TAG_TITLE, strDateModification); XmlUtil.endElement(strBufferListEntryXml, Entry.TAG_ENTRY); } for (IEntry entry : listEntryResultSearch) { entry.getXml(plugin, locale, strBufferListEntryXml); } Map<String, String> model = new HashMap<String, String>(); if ((directory.getIdWorkflow() != DirectoryUtils.CONSTANT_ID_NULL) && bWorkflowServiceEnable) { model.put(TAG_DISPLAY, TAG_YES); } else { model.put(TAG_DISPLAY, TAG_NO); } XmlUtil.addEmptyElement(strBufferListEntryXml, TAG_STATUS, model); StringBuilder strBufferDirectoryXml = new StringBuilder(); strBufferDirectoryXml.append(XmlUtil.getXmlHeader()); if (bIsBigExport) { strBufferDirectoryXml .append(directory.getXml(plugin, locale, new StringBuffer(), strBufferListEntryXml)); strBufferListRecordXml = new StringBuffer(EXPORT_STRINGBUFFER_INITIAL_SIZE); strFileOutPut = xmlTransformerService.transformBySourceWithXslCache(strBufferDirectoryXml.toString(), physicalFile.getValue(), strXslId, null, null); String strFinalOutPut = null; if (!bIsCsvExport) { int pos = strFileOutPut.indexOf(EXPORT_XSL_EMPTY_LIST_RECORD); strFinalOutPut = strFileOutPut.substring(0, pos) + EXPORT_XSL_BEGIN_LIST_RECORD; } else { strFinalOutPut = strFileOutPut; } try { bufferedWriter.write(strFinalOutPut); } catch (IOException e) { AppLogService.error(e); } } else { strBufferListRecordXml = new StringBuffer(); } // ----------------------------------------------------------------------- List<Integer> nTmpListId = new ArrayList<Integer>(); int idWorflow = directory.getIdWorkflow(); IRecordService recordService = SpringContextService.getBean(RecordService.BEAN_SERVICE); if (bIsBigExport) { int nXmlHeaderLength = XmlUtil.getXmlHeader().length() - 1; int max = nSize / EXPORT_RECORD_STEP; int max1 = nSize - EXPORT_RECORD_STEP; for (int i = 0; i < max1; i += EXPORT_RECORD_STEP) { AppLogService.debug("Directory export progress : " + (((float) i / nSize) * 100) + "%"); nTmpListId = new ArrayList<Integer>(); int k = i + EXPORT_RECORD_STEP; for (int j = i; j < k; j++) { nTmpListId.add(listResultRecordId.get(j)); } List<Record> nTmpListRecords = recordService.loadListByListId(nTmpListId, plugin); for (Record record : nTmpListRecords) { State state = workflowService.getState(record.getIdRecord(), Record.WORKFLOW_RESOURCE_TYPE, idWorflow, Integer.valueOf(directory.getIdDirectory())); if (bIsCsvExport) { strBufferListRecordXml.append(record.getXmlForCsvExport(plugin, locale, false, state, listEntryResultSearch, false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields)); } else { strBufferListRecordXml .append(record.getXml(plugin, locale, false, state, listEntryResultSearch, false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields)); } } strBufferListRecordXml = this.appendPartialContent(strBufferListRecordXml, bufferedWriter, physicalFile, bIsCsvExport, strXslId, nXmlHeaderLength, xmlTransformerService); } // ----------------------------------------------------------------------- int max2 = EXPORT_RECORD_STEP * max; nTmpListId = new ArrayList<Integer>(); for (int i = max2; i < nSize; i++) { nTmpListId.add(listResultRecordId.get((i))); } List<Record> nTmpListRecords = recordService.loadListByListId(nTmpListId, plugin); for (Record record : nTmpListRecords) { State state = workflowService.getState(record.getIdRecord(), Record.WORKFLOW_RESOURCE_TYPE, idWorflow, Integer.valueOf(directory.getIdDirectory())); if (bIsCsvExport) { strBufferListRecordXml.append( record.getXmlForCsvExport(plugin, locale, false, state, listEntryResultSearch, false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields)); } else { strBufferListRecordXml.append(record.getXml(plugin, locale, false, state, listEntryResultSearch, false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields)); } } strBufferListRecordXml = this.appendPartialContent(strBufferListRecordXml, bufferedWriter, physicalFile, bIsCsvExport, strXslId, nXmlHeaderLength, xmlTransformerService); strBufferListRecordXml.insert(0, EXPORT_XSL_BEGIN_PARTIAL_EXPORT); strBufferListRecordXml.insert(0, XmlUtil.getXmlHeader()); strBufferListRecordXml.append(EXPORT_XSL_END_PARTIAL_EXPORT); strFileOutPut = xmlTransformerService.transformBySourceWithXslCache(strBufferListRecordXml.toString(), physicalFile.getValue(), strXslId, null, null); try { if (bIsCsvExport) { bufferedWriter.write(strFileOutPut); } else { bufferedWriter.write(strFileOutPut.substring(nXmlHeaderLength)); bufferedWriter .write(EXPORT_XSL_END_LIST_RECORD + EXPORT_XSL_NEW_LINE + EXPORT_XSL_END_DIRECTORY); } } catch (IOException e) { AppLogService.error(e); } finally { IOUtils.closeQuietly(bufferedWriter); IOUtils.closeQuietly(outputStreamWriter); } } else { List<Record> nTmpListRecords = recordService.loadListByListId(listResultRecordId, plugin); for (Record record : nTmpListRecords) { State state = workflowService.getState(record.getIdRecord(), Record.WORKFLOW_RESOURCE_TYPE, idWorflow, Integer.valueOf(directory.getIdDirectory())); if (bIsCsvExport) { strBufferListRecordXml.append( record.getXmlForCsvExport(plugin, locale, false, state, listEntryResultSearch, false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields)); } else { strBufferListRecordXml.append(record.getXml(plugin, locale, false, state, listEntryResultSearch, false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields)); } } strBufferDirectoryXml .append(directory.getXml(plugin, locale, strBufferListRecordXml, strBufferListEntryXml)); strShotExportFinalOutPut = xmlTransformerService.transformBySourceWithXslCache( strBufferDirectoryXml.toString(), physicalFile.getValue(), strXslId, null, null); } // ----------------------------------------------------------------------- DirectoryUtils.addHeaderResponse(request, response, strFileName); response.setCharacterEncoding(strEncoding); if (bIsCsvExport) { response.setContentType(CONSTANT_MIME_TYPE_CSV); } else { String strMimeType = FileSystemUtil.getMIMEType(strFileName); if (strMimeType != null) { response.setContentType(strMimeType); } else { response.setContentType(CONSTANT_MIME_TYPE_OCTETSTREAM); } } if (bIsBigExport) { FileChannel in = null; WritableByteChannel writeChannelOut = null; OutputStream out = null; try { in = new FileInputStream(tmpFile).getChannel(); out = response.getOutputStream(); writeChannelOut = Channels.newChannel(out); response.setContentLength(Long.valueOf(in.size()).intValue()); in.transferTo(0, in.size(), writeChannelOut); response.getOutputStream().close(); } catch (IOException e) { AppLogService.error(e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { AppLogService.error(e.getMessage(), e); } } IOUtils.closeQuietly(out); tmpFile.delete(); } } else { PrintWriter out = null; try { out = response.getWriter(); out.print(strShotExportFinalOutPut); } catch (IOException e) { AppLogService.error(e.getMessage(), e); } finally { if (out != null) { out.flush(); out.close(); } } } result.setNoop(true); return result; }
From source file:org.pentaho.platform.repository.solution.SolutionRepositoryBase.java
/** * @return int possible values: ISolutionRepository.FILE_ADD_SUCCESSFUL ISolutionRepository.FILE_EXISTS ISolutionRepository.FILE_ADD_FAILED * /*from ww w . ja v a2 s . c o m*/ * TODO mlowery Why can't this delegate to the other addSolutionFile? */ public int addSolutionFile(final String baseUrl, String path, final String fileName, final File f, boolean overwrite) { if (!path.endsWith("/") && !path.endsWith("\\")) { //$NON-NLS-1$ //$NON-NLS-2$ path += File.separator; } File fNew = new File(baseUrl + path + fileName); int status = ISolutionRepository.FILE_ADD_SUCCESSFUL; if (fNew.exists() && !overwrite) { status = ISolutionRepository.FILE_EXISTS; } else { FileChannel in = null, out = null; try { in = new FileInputStream(f).getChannel(); out = new FileOutputStream(fNew).getChannel(); out.transferFrom(in, 0, in.size()); resetRepository(); } catch (Exception e) { SolutionRepositoryBase.logger.error(e.toString()); status = ISolutionRepository.FILE_ADD_FAILED; } finally { try { if (in != null) { in.close(); } if (out != null) { out.close(); } } catch (Exception e) { // TODO, we should probably log the error, and return a failure status } } } return status; }
From source file:com.healthmarketscience.jackcess.Database.java
/** * Open an existing Database. If the existing file is not writeable or the * readOnly flag is <code>true</code>, the file will be opened read-only. * @param mdbFile File containing the database * @param readOnly iff <code>true</code>, force opening file in read-only * mode/*from w w w . ja v a2s . co m*/ * @param autoSync whether or not to enable auto-syncing on write. if * {@code true}, writes will be immediately flushed to disk. * This leaves the database in a (fairly) consistent state * on each write, but can be very inefficient for many * updates. if {@code false}, flushing to disk happens at * the jvm's leisure, which can be much faster, but may * leave the database in an inconsistent state if failures * are encountered during writing. * @param charset Charset to use, if {@code null}, uses default * @param timeZone TimeZone to use, if {@code null}, uses default * @param provider CodecProvider for handling page encoding/decoding, may be * {@code null} if no special encoding is necessary * @usage _intermediate_method_ */ public static Database open(File mdbFile, boolean readOnly, boolean autoSync, Charset charset, TimeZone timeZone, CodecProvider provider) throws IOException { if (!mdbFile.exists() || !mdbFile.canRead()) { throw new FileNotFoundException("given file does not exist: " + mdbFile); } // force read-only for non-writable files readOnly |= !mdbFile.canWrite(); // open file channel FileChannel channel = openChannel(mdbFile, readOnly); if (!readOnly) { // verify that format supports writing JetFormat jetFormat = JetFormat.getFormat(channel); if (jetFormat.READ_ONLY) { // shutdown the channel (quietly) try { channel.close(); } catch (Exception ignored) { // we don't care } throw new IOException("jet format '" + jetFormat + "' does not support writing"); } } return new Database(mdbFile, channel, autoSync, null, charset, timeZone, provider); }
From source file:configuration.Util.java
/** Fast & simple file copy. */ public static void copy(File source, File dest) throws IOException { FileChannel in = null, out = null; try {/* w w w . jav a 2 s . c o m*/ in = new FileInputStream(source).getChannel(); out = new FileOutputStream(dest).getChannel(); long size = in.size(); MappedByteBuffer buf = in.map(FileChannel.MapMode.READ_ONLY, 0, size); out.write(buf); } catch (Exception e) { System.out.println("Copy File Directory Failed!"); System.out.println(e); } if (in != null) in.close(); if (out != null) out.close(); }
From source file:eu.medsea.mimeutil.detector.OpendesktopMimeDetector.java
private void init(final String mimeCacheFile) { String cacheFile = mimeCacheFile; if (!new File(cacheFile).exists()) { cacheFile = internalMimeCacheFile; }//from w w w . ja v a 2 s.c om // Map the mime.cache file as a memory mapped file FileChannel rCh = null; try { RandomAccessFile raf = null; raf = new RandomAccessFile(cacheFile, "r"); rCh = (raf).getChannel(); content = rCh.map(FileChannel.MapMode.READ_ONLY, 0, rCh.size()); // Read all of the MIME type from the Alias list initMimeTypes(); if (log.isDebugEnabled()) { log.debug("Registering a FileWatcher for [" + cacheFile + "]"); } TimerTask task = new FileWatcher(new File(cacheFile)) { protected void onChange(File file) { initMimeTypes(); } }; timer = new Timer(); // repeat the check every 10 seconds timer.schedule(task, new Date(), 10000); } catch (Exception e) { throw new MimeException(e); } finally { if (rCh != null) { try { rCh.close(); } catch (Exception e) { log.error(e.getLocalizedMessage(), e); } } } }