List of usage examples for java.io FileInputStream getChannel
public FileChannel getChannel()
From source file:com.taobao.common.tfs.DefaultTfsManager.java
public String saveTfsFile(String localFileName, String tfsFileName, String tfsSuffix) { if (localFileName == null) { log.error("localfile name is null"); return null; }// ww w . ja v a 2 s. co m FileInputStream input = null; try { input = new FileInputStream(localFileName); long length = input.getChannel().size(); return saveFileEx(input, tfsFileName, tfsSuffix, checkFileType(length), localFileName, false); } catch (TfsException e) { log.error("save fail: " + localFileName + "=>" + tfsFileName + "," + tfsSuffix, e); } catch (FileNotFoundException e) { log.error("local file: " + localFileName + " not exist,", e); } catch (IOException e) { log.error("local file: " + localFileName + " IO failed,", e); } finally { try { if (input != null) { input.close(); } } catch (IOException e) { log.error("close local file fail ", e); } } return null; }
From source file:org.opennms.install.Installer.java
/** * <p>copyFile</p>/*from ww w . ja v a 2 s. c o m*/ * * @param source a {@link java.lang.String} object. * @param destination a {@link java.lang.String} object. * @param description a {@link java.lang.String} object. * @param recursive a boolean. * @throws java.lang.Exception if any. */ public void copyFile(String source, String destination, String description, boolean recursive) throws Exception { File sourceFile = new File(source); File destinationFile = new File(destination); if (!sourceFile.exists()) { throw new Exception("source file (" + source + ") does not exist!"); } if (!sourceFile.isFile()) { throw new Exception("source file (" + source + ") is not a file!"); } if (!sourceFile.canRead()) { throw new Exception("source file (" + source + ") is not readable!"); } if (destinationFile.exists()) { System.out.print(" - " + destination + " exists, removing... "); if (destinationFile.delete()) { System.out.println("REMOVED"); } else { System.out.println("FAILED"); throw new Exception("unable to delete existing file: " + sourceFile); } } System.out.print(" - copying " + source + " to " + destination + "... "); if (!destinationFile.getParentFile().exists()) { if (!destinationFile.getParentFile().mkdirs()) { throw new Exception("unable to create directory: " + destinationFile.getParent()); } } if (!destinationFile.createNewFile()) { throw new Exception("unable to create file: " + destinationFile); } FileChannel from = null; FileInputStream fisFrom = null; FileChannel to = null; FileOutputStream fisTo = null; try { fisFrom = new FileInputStream(sourceFile); from = fisFrom.getChannel(); fisTo = new FileOutputStream(destinationFile); to = fisTo.getChannel(); to.transferFrom(from, 0, from.size()); } catch (FileNotFoundException e) { throw new Exception("unable to copy " + sourceFile + " to " + destinationFile, e); } finally { IOUtils.closeQuietly(fisTo); IOUtils.closeQuietly(to); IOUtils.closeQuietly(fisFrom); IOUtils.closeQuietly(from); } System.out.println("DONE"); }
From source file:org.wso2.esb.integration.common.utils.ESBTestCaseUtils.java
/** * Copy the given source file to the given destination * * @param sourceUri source file location * @param destUri destination file location * @throws IOException//from w w w . j a v a2 s .co m */ public static void copyFile(String sourceUri, String destUri) throws IOException { File sourceFile = new File(sourceUri); File destFile = new File(destUri); if (destFile.exists()) { destFile.delete(); } destFile.createNewFile(); FileInputStream fileInputStream = null; FileOutputStream fileOutputStream = null; try { fileInputStream = new FileInputStream(sourceFile); fileOutputStream = new FileOutputStream(destFile); FileChannel source = fileInputStream.getChannel(); FileChannel destination = fileOutputStream.getChannel(); destination.transferFrom(source, 0, source.size()); } finally { IOUtils.closeQuietly(fileInputStream); IOUtils.closeQuietly(fileOutputStream); } }
From source file:com.taobao.common.tfs.DefaultTfsManager.java
public String saveUniqueFile(String localFileName, String tfsFileName, String tfsSuffix, boolean simpleName) { if (localFileName == null) { log.error("localfile name is null"); return null; }/*w w w . j a va2s. co m*/ if (!checkUniqueStoreConfig()) { log.error( "unique server parameter not config error: serverList, groupName, namespace must be configed"); return null; } FileInputStream input = null; byte[] data = null; try { input = new FileInputStream(localFileName); data = new byte[(int) input.getChannel().size()]; int ret = input.read(data); if (ret != data.length) { log.error("read file:" + localFileName + " failed."); return null; } checkUniqueStoreInit(); TfsFile file = filePool.getFile(); return file.saveUnique(uniqueStore, tfsFileName, tfsSuffix, data, 0, data.length, simpleName); } catch (FileNotFoundException e) { log.error("file not found.", e); } catch (IOException e) { log.error("io error.", e); } catch (RuntimeException e) { // tair may throw runtimeexception if first init fail. // must reinit again uniqueStore = null; log.warn("saveUniqueFile fail, just saveFile. runtime exception: ", e); return saveFile(tfsFileName, tfsSuffix, data, 0, data.length, simpleName); } catch (Exception e) { log.warn("saveUniqueFile fail, just saveFile.", e); return saveFile(tfsFileName, tfsSuffix, data, 0, data.length, simpleName); } finally { if (input != null) { try { input.close(); } catch (IOException e) { log.error("close local file fail."); } } } return null; }
From source file:org.crs4.entando.innomanager.aps.system.services.layer.LayerManager.java
@Override public List<String> getShapeFileAttributes(File dbfFile) { List<String> attributes = new ArrayList<String>(); try {/*from w w w .j ava 2s . co m*/ FileInputStream fis = new FileInputStream(dbfFile); DbaseFileReader dbfReader = new DbaseFileReader(fis.getChannel(), false, Charset.forName("UTF8")); DbaseFileHeader dbfHeader = dbfReader.getHeader(); int n = dbfHeader.getNumFields(); for (int i = 0; i < n; i++) attributes.add(dbfHeader.getFieldName(i) + ": " + dbfHeader.getFieldClass(i).getSimpleName()); return attributes; } catch (Exception e) { ApsSystemUtils.getLogger().debug(e.getMessage()); } return attributes; }
From source file:adept.io.Reader.java
/** * Reads specified file into a string./* ww w . j a va2 s. co m*/ * * @param path the path * @return the string */ public String readFileIntoString(String path) { try { String absolutePath = path; // String absolutePath = getAbsolutePathFromClasspathOrFileSystem(path); FileInputStream stream = new FileInputStream(new File(absolutePath)); try { FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); /* Instead of using default, pass in a decoder. */ return Charset.forName("UTF-8").decode(bb).toString(); } finally { stream.close(); } } catch (IOException e) { e.printStackTrace(); } return null; }
From source file:com.datafibers.kafka.connect.SchemaedFileSourceTask.java
private List<SourceRecord> pollFromFile() throws InterruptedException { log.trace("pollFromFile"); CsvSchema bootstrapCsv;//w w w. j a v a 2 s . c om CsvMapper csvMapper = new CsvMapper(); ObjectMapper jsonMapper = new ObjectMapper(); MappingIterator<Map<?, ?>> mappingIterator; ArrayList<SourceRecord> records = null; long currentTime = System.currentTimeMillis(); long recordsPerPoll; // TODO: Improve ExceptionOnEof logic. // The code below only works when each pass through // poll() reads all available records (not a given). if (config.getExceptionOnEof() && streamOffset != null) { throw new ConnectException("No more deta available on FileInputStream"); } // Initialize the bootstrapCsv schema if necessary if (recordSchema == null || inputType.equalsIgnoreCase("json")) { log.trace("Constructing csvSchema from emptySchema"); bootstrapCsv = config.getCsvHeaders() ? CsvSchema.emptySchema().withHeader() : CsvSchema.emptySchema().withoutHeader(); } else { // We've seen a schema, so we'll assume headers from the recordSchema log.trace("Constructing csvSchema from recordSchema"); CsvSchema.Builder builder = new CsvSchema.Builder(); builder.setUseHeader(false); builder.setColumnSeparator(','); for (Field f : recordSchema.fields()) { log.trace("adding column {}", f.name()); builder.addColumn(f.name()); } bootstrapCsv = builder.build(); } try { if (stream == null) openFileStream(); if (reader == null) reader = new BufferedReader(new InputStreamReader(stream)); if (inputType.equalsIgnoreCase("json")) { mappingIterator = jsonMapper.readerFor(Map.class).readValues(reader); } else if (inputType.equalsIgnoreCase("csv")) { mappingIterator = csvMapper.readerWithSchemaFor(Map.class).with(bootstrapCsv).readValues(reader); } else { log.error("Unsupported file input type specified ({})", inputType); return null; } } catch (FileNotFoundException fnf) { log.warn("Couldn't find file {} for SchemaedFileSourceTask, sleeping to wait for it to be created", logFilename()); synchronized (this) { this.wait(1000); } return null; } catch (IOException e) { // IOException thrown when no more records in stream log.warn("Processed all available data from {}; sleeping to wait additional records", logFilename()); // Close reader and stream; swallowing exceptions ... we're about to throw a Retry try { reader.close(); } catch (Exception nested) { } finally { reader = null; } if (stream != System.in) { try { stream.close(); } catch (Exception nested) { } finally { stream = null; } } synchronized (this) { this.wait(1000); } return null; } log.debug("mappingIterator of type {} created; begin reading data file", mappingIterator.getClass().toString()); // The csvMapper class is really screwy; can't figure out why it // won't return a rational Schema ... so we'll extract it from the // the first object later. if (recordSchema == null && inputType.equalsIgnoreCase("csv") && csvMapper.schema().size() > 0) { recordSchema = ConvertMappingSchema(csvMapper.schemaWithHeader()); log.trace("recordSchema created from csvMapper; type {}", recordSchema.type().toString()); } try { FileInputStream fstream = (FileInputStream) stream; Long lastElementOffset = streamOffset; recordsPerPoll = 3; while (mappingIterator.hasNext()) { Map<?, ?> element = mappingIterator.next(); Long elementOffset, iteratorOffset; recordCount++; recordsPerPoll--; iteratorOffset = mappingIterator.getCurrentLocation().getByteOffset(); // never works !!! if (iteratorOffset < 0) { // The stream channel will CLOSE on the last clean record // seen by mapping Iterator, so we have be careful here // Additionally, when parsing CSV files, there seems to be a // lot of Bad File Descriptor errors; ignore them. try { elementOffset = fstream.getChannel().position(); } catch (java.nio.channels.ClosedChannelException e) { log.trace("getChannel.position threw {}", e.toString()); elementOffset = lastElementOffset; } catch (IOException e) { log.trace("getChannel.position threw {}", e.toString()); elementOffset = lastElementOffset; } } else { log.trace("mappingIterator.getCurrentLocation() returns {}", iteratorOffset.toString()); elementOffset = iteratorOffset; } log.trace("Next input record: {} (class {}) from file position {}", element.toString(), element.getClass().toString(), elementOffset.toString()); if (recordSchema == null) { recordSchema = ConvertMappingSchema(element.keySet()); log.trace("recordSchema created from element; type {}", recordSchema.type().toString()); } if (records == null) records = new ArrayList<>(); records.add(new SourceRecord(offsetKey(filename), offsetValue(elementOffset), topic, recordSchema, ConvertMappingElement(recordSchema, (HashMap<?, ?>) element))); streamOffset = lastElementOffset = elementOffset; } } catch (Exception e) { throw new ConnectException(e); } lastPollTime = currentTime; return records; }
From source file:com.eucalyptus.blockstorage.DASManager.java
public void dupFile(String oldFileName, String newFileName) { FileOutputStream fileOutputStream = null; FileChannel out = null;// w ww. java 2 s .com FileInputStream fileInputStream = null; FileChannel in = null; try { fileOutputStream = new FileOutputStream(new File(newFileName)); out = fileOutputStream.getChannel(); fileInputStream = new FileInputStream(new File(oldFileName)); in = fileInputStream.getChannel(); in.transferTo(0, in.size(), out); } catch (Exception ex) { ex.printStackTrace(); } finally { if (fileOutputStream != null) { try { out.close(); fileOutputStream.close(); } catch (IOException e) { LOG.error(e); } } if (fileInputStream != null) { try { in.close(); fileInputStream.close(); } catch (IOException e) { LOG.error(e); } } } }
From source file:com.servoy.j2db.util.Utils.java
public static byte[] readFile(File f, long size) { if (f != null && f.exists()) { FileInputStream fis = null; try {/*from w w w . j a v a 2 s.c o m*/ int length = (int) f.length(); fis = new FileInputStream(f); FileChannel fc = fis.getChannel(); if (size > length || size < 0) size = length; ByteBuffer bb = ByteBuffer.allocate((int) size); fc.read(bb); bb.rewind(); byte[] bytes = null; if (bb.hasArray()) { bytes = bb.array(); } else { bytes = new byte[(int) size]; bb.get(bytes, 0, (int) size); } return bytes; } catch (Exception e) { Debug.error("Error reading file: " + f, e); //$NON-NLS-1$ } finally { try { if (fis != null) fis.close(); } catch (Exception ex) { } } // ByteArrayOutputStream sb = new ByteArrayOutputStream(); // try // { // FileInputStream is = new FileInputStream(f); // BufferedInputStream bis = new BufferedInputStream(is); // streamCopy(bis, sb); // closeInputStream(bis); // } // catch (Exception e) // { // Debug.error(e); // } // return sb.toByteArray(); } return null; }
From source file:com.servoy.j2db.util.Utils.java
public static String getTXTFileContent(File f, Charset charset) { if (f != null /* && f.exists() */) { if (Thread.currentThread().isInterrupted()) { Thread.interrupted(); // reset interrupted flag of current thread, FileChannel.read() will throw an exception for it. }/* w ww. j a v a 2s. c om*/ FileInputStream fis = null; try { int length = (int) f.length(); if (f.exists()) { fis = new FileInputStream(f); FileChannel fc = fis.getChannel(); ByteBuffer bb = ByteBuffer.allocate(length); fc.read(bb); bb.rewind(); CharBuffer cb = charset.decode(bb); return cb.toString(); } } catch (Exception e) { Debug.error("Error reading txt file: " + f, e); //$NON-NLS-1$ } finally { closeInputStream(fis); } } return null; }