List of usage examples for java.io FileInputStream getChannel
public FileChannel getChannel()
From source file:org.archive.crawler.framework.CrawlJob.java
/** * Refresh knowledge of total launched and last launch by scanning * the job.log. /*ww w . ja v a 2 s .co m*/ */ protected void scanJobLog() { File jobLog = getJobLog(); launchCount = 0; if (!jobLog.exists()) return; try { Pattern launchLine = Pattern.compile("(\\S+) (\\S+) Job launched"); long startPosition = 0; if (jobLog.length() > FileUtils.ONE_KB * 100) { isLaunchInfoPartial = true; startPosition = jobLog.length() - (FileUtils.ONE_KB * 100); } FileInputStream jobLogIn = new FileInputStream(jobLog); jobLogIn.getChannel().position(startPosition); BufferedReader jobLogReader = new BufferedReader(new InputStreamReader(jobLogIn)); String line; while ((line = jobLogReader.readLine()) != null) { Matcher m = launchLine.matcher(line); if (m.matches()) { launchCount++; lastLaunch = new DateTime(m.group(1)); } } jobLogReader.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.yobidrive.diskmap.buckets.BucketTableManager.java
private void initializeBucketTableFromLastCommittedBucketFile() throws BucketTableManagerException { FileInputStream tableStream = null; FileChannel fileChannel = null; try {// w w w . j a v a 2 s. c om File latestCommittedFile = getLatestCommitedFile(); if (latestCommittedFile != null) { tableStream = new FileInputStream(latestCommittedFile); fileChannel = tableStream.getChannel(); ByteBuffer buffer = ByteBuffer.allocate(HEADERSIZE); fileChannel.position(0L); int read = fileChannel.read(buffer); if (read < HEADERSIZE) { fileChannel.close(); throw new BucketTableManagerException( "Wrong bucket table header size: " + read + "/" + HEADERSIZE); } // Check content of header. Start with Big Endian (default for Java) buffer.rewind(); byteOrder = ByteOrder.BIG_ENDIAN; buffer.order(byteOrder); int magic = buffer.getInt(); if (magic == MAGICSTART_BADENDIAN) { byteOrder = ByteOrder.LITTLE_ENDIAN; buffer.order(byteOrder); } else if (magic != MAGICSTART) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Read number of buckets long headerMapSize = buffer.getLong(); // Read checkPoint NeedlePointer includedCheckpoint = new NeedlePointer(); includedCheckpoint.getNeedlePointerFromBuffer(buffer); // Read second magic number magic = buffer.getInt(); if (magic != MAGICEND) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Check number of buckets against requested map size if (headerMapSize != mapSize) { // Map size does not match fileChannel.close(); throw new BucketTableManagerException( "Requested map size " + mapSize + " does not match header map size " + headerMapSize); } // Sets initial checkpoint bucketTable.setInitialCheckPoint(includedCheckpoint); // Now reads all entries logger.info("Hot start: loading buckets..."); for (int i = 0; i < nbBuffers; i++) { bucketTable.prepareBufferForReading(i); read = fileChannel.read(bucketTable.getBuffer(i)); if (read < bucketTable.getBuffer(i).limit()) throw new BucketTableManagerException("Incomplete bucket table file " + latestCommittedFile.getName() + ", expected " + mapSize + HEADERSIZE); //else // logger.info("Hot start: loaded "+(i+1)*entriesPerBuffer+" buckets"+((i<(nbBuffers-1))?"...":"")) ; } // Checks second magic marker buffer = ByteBuffer.allocate(NeedleLogInfo.INFOSIZE); buffer.rewind(); buffer.limit(INTSIZE); if (fileChannel.read(buffer) < INTSIZE) throw new BucketTableManagerException( "Incomplete bucket table file, missing secong magic number " + latestCommittedFile.getName()); buffer.rewind(); magic = buffer.getInt(); if (magic != MAGICSTART) { fileChannel.close(); throw new BucketTableManagerException("Bad header in bucket table file"); } // Now reads clean counters while (true) { buffer.rewind(); buffer.limit(NeedleLogInfo.INFOSIZE); read = fileChannel.read(buffer); if (read > 0 && read < NeedleLogInfo.INFOSIZE) throw new BucketTableManagerException("Incomplete bucket table file, log info too short " + latestCommittedFile.getName() + ", expected " + mapSize + HEADERSIZE); if (read <= 0) break; else { NeedleLogInfo nli = new NeedleLogInfo(useAverage); buffer.rewind(); nli.getNeedleLogInfo(buffer); logInfoPerLogNumber.put(new Integer(nli.getNeedleFileNumber()), nli); } } logger.info("Hot start: loaded " + (nbBuffers * entriesPerBuffer) + " buckets"); } else { // Empty file bucketTable.setInitialCheckPoint(new NeedlePointer()); bucketTable.format(); } } catch (IOException ie) { throw new BucketTableManagerException("Failed initializing bucket table", ie); } catch (BufferUnderflowException bue) { throw new BucketTableManagerException("Bucket table too short", bue); } finally { if (fileChannel != null) { try { fileChannel.close(); } catch (IOException ex) { throw new BucketTableManagerException("Error while closing file channel", ex); } } } }
From source file:edu.harvard.iq.dvn.ingest.dsb.DSBWrapper.java
public String ingest(StudyFileEditBean file) throws IOException { dbgLog.fine("***** DSBWrapper: ingest(): start *****\n"); String ddi = null;//from w w w . j a va 2 s . c om BufferedInputStream infile = null; // ingest-source file File tempFile = new File(file.getTempSystemFileLocation()); SDIOData sd = null; if (file.getControlCardSystemFileLocation() == null) { // A "classic", 1 file ingest: String mime_type = file.getStudyFile().getFileType(); infile = new BufferedInputStream(new FileInputStream(tempFile)); dbgLog.info("\nfile mimeType=" + mime_type + "\n\n"); // get available FileReaders for this MIME-type Iterator<StatDataFileReader> itr = StatDataIO.getStatDataFileReadersByMIMEType(mime_type); if (itr.hasNext()) { // use the first Subsettable data reader StatDataFileReader sdioReader = itr.next(); dbgLog.info("reader class name=" + sdioReader.getClass().getName()); if (mime_type != null) { String requestedCharacterEncoding = file.getDataLanguageEncoding(); if (requestedCharacterEncoding != null) { dbgLog.fine("Will try to process the file assuming that the character strings are " + "encoded in " + requestedCharacterEncoding); sdioReader.setDataLanguageEncoding(requestedCharacterEncoding); } sd = sdioReader.read(infile, null); } else { // fail-safe block if mime_type is null // check the format type again and then read the file dbgLog.info("mime-type was null: use the back-up method"); sd = StatDataIO.read(infile, null); } } else { throw new IllegalArgumentException( "No FileReader Class found" + " for this mime type=" + mime_type); } } else { // This is a 2-file ingest. // As of now, there are 2 supported methods: // 1. CSV raw data file + SPSS control card; // 2. TAB raw data file + DDI control card; // NOTE, that "POR file with the Extended Labels" is NOT a 2-file. // control card-based ingest! Rather, we ingest the file as a regular // SPSS/POR dataset, then modify the variable labels in the resulting // TabularFile. File rawDataFile = tempFile; infile = new BufferedInputStream(new FileInputStream(file.getControlCardSystemFileLocation())); String controlCardType = file.getControlCardType(); if (controlCardType == null || controlCardType.equals("")) { dbgLog.info("No Control Card Type supplied."); throw new IllegalArgumentException("No Control Card Type supplied."); } Iterator<StatDataFileReader> itr = StatDataIO.getStatDataFileReadersByFormatName(controlCardType); if (!itr.hasNext()) { dbgLog.info("No FileReader class found for " + controlCardType + "."); throw new IllegalArgumentException("No FileReader Class found for " + controlCardType + "."); } StatDataFileReader sdioReader = itr.next(); dbgLog.info("reader class name=" + sdioReader.getClass().getName()); sd = sdioReader.read(infile, rawDataFile); } if (sd != null) { SDIOMetadata smd = sd.getMetadata(); // tab-file: source file String tabDelimitedDataFileLocation = smd.getFileInformation().get("tabDelimitedDataFileLocation") .toString(); dbgLog.fine("tabDelimitedDataFileLocation=" + tabDelimitedDataFileLocation); dbgLog.fine("data file(tempFile): abs path:\n" + file.getTempSystemFileLocation()); dbgLog.fine("mimeType :\n" + file.getStudyFile().getFileType()); if (infile != null) { infile.close(); } // parse the response StudyFile f = file.getStudyFile(); // first, check dir // create a sub-directory "ingested" File newDir = new File(tempFile.getParentFile(), "ingested"); if (!newDir.exists()) { newDir.mkdirs(); } dbgLog.fine("newDir: abs path:\n" + newDir.getAbsolutePath()); // tab-file case: destination File newFile = new File(newDir, tempFile.getName()); // nio-based file-copying idiom FileInputStream fis = new FileInputStream(tabDelimitedDataFileLocation); FileOutputStream fos = new FileOutputStream(newFile); FileChannel fcin = fis.getChannel(); FileChannel fcout = fos.getChannel(); fcin.transferTo(0, fcin.size(), fcout); fcin.close(); fcout.close(); fis.close(); fos.close(); dbgLog.fine("newFile: abs path:\n" + newFile.getAbsolutePath()); // store the tab-file location file.setIngestedSystemFileLocation(newFile.getAbsolutePath()); // finally, if we have an extended variable map, let's replace the // labels that have been found in the data file: if (file.getExtendedVariableLabelMap() != null) { for (String varName : file.getExtendedVariableLabelMap().keySet()) { if (smd.getVariableLabel().containsKey(varName)) { smd.getVariableLabel().put(varName, file.getExtendedVariableLabelMap().get(varName)); } } } // return xmlToParse; DDIWriter dw = new DDIWriter(smd); ddi = dw.generateDDI(); return ddi; } return null; }
From source file:org.apache.cordova.core.FileUtils.java
/** * Moved this code into it's own method so moveTo could use it when the move is across file systems *//*from w w w . j av a2 s . c om*/ private void copyAction(File srcFile, File destFile) throws FileNotFoundException, IOException { FileInputStream istream = new FileInputStream(srcFile); FileOutputStream ostream = new FileOutputStream(destFile); FileChannel input = istream.getChannel(); FileChannel output = ostream.getChannel(); try { input.transferTo(0, input.size(), output); } finally { istream.close(); ostream.close(); input.close(); output.close(); } }
From source file:org.h2gis.drivers.geojson.GeoJsonReaderDriver.java
/** * Parses the first GeoJSON feature to create the PreparedStatement. * * @throws SQLException/*w ww . j a va2 s. c om*/ * @throws IOException */ private boolean parseMetadata() throws SQLException, IOException { FileInputStream fis = null; StringBuilder metadataBuilder = new StringBuilder(); try { fis = new FileInputStream(fileName); this.fc = fis.getChannel(); this.fileSize = fc.size(); // Given the file size and an average node file size. // Skip how many nodes in order to update progression at a step of 1% readFileSizeEachNode = Math.max(1, (this.fileSize / AVERAGE_NODE_SIZE) / 100); nodeCountProgress = 0; JsonParser jp = jsFactory.createParser(fis); metadataBuilder.append("CREATE TABLE "); metadataBuilder.append(tableLocation); metadataBuilder.append(" ("); jp.nextToken();//START_OBJECT jp.nextToken(); // field_name (type) jp.nextToken(); // value_string (FeatureCollection) String geomType = jp.getText(); if (geomType.equalsIgnoreCase(GeoJsonField.FEATURECOLLECTION)) { jp.nextToken(); // FIELD_NAME features String firstParam = jp.getText(); //Read the CRS if (firstParam.equalsIgnoreCase(GeoJsonField.CRS)) { parsedSRID = readCRS(jp); readFeatures(jp, geomType, metadataBuilder); } else if (firstParam.equalsIgnoreCase(GeoJsonField.FEATURES)) { readFeatures(jp, geomType, metadataBuilder); } else { throw new SQLException( "Malformed GeoJSON file. Expected 'features', found '" + firstParam + "'"); } } else { throw new SQLException( "Malformed GeoJSON file. Expected 'FeatureCollection', found '" + geomType + "'"); } jp.close(); } catch (FileNotFoundException ex) { throw new SQLException(ex); } finally { try { if (fis != null) { fis.close(); } } catch (IOException ex) { throw new IOException(ex); } } // Now we create the table if there is at leat one geometry field. if (hasGeometryField) { Statement stmt = connection.createStatement(); stmt.execute(metadataBuilder.toString()); stmt.close(); if (fieldIndex > 0) { StringBuilder insert = new StringBuilder("INSERT INTO ").append(tableLocation) .append(" VALUES ( ?"); for (int i = 1; i < fieldIndex; i++) { insert.append(",?"); } insert.append(");"); preparedStatement = connection.prepareStatement(insert.toString()); return true; } } else { throw new SQLException("The first feature must contains a geometry field."); } return false; }
From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.LsrPBImage.java
public void visit(RandomAccessFile file) throws IOException { if (!FSImageUtil.checkFileFormat(file)) { throw new IOException("Unrecognized FSImage"); }// www.j av a 2s . co m FileSummary summary = FSImageUtil.loadSummary(file); FileInputStream fin = null; try { fin = new FileInputStream(file.getFD()); ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { @Override public int compare(FileSummary.Section s1, FileSummary.Section s2) { SectionName n1 = SectionName.fromString(s1.getName()); SectionName n2 = SectionName.fromString(s2.getName()); if (n1 == null) { return n2 == null ? 0 : -1; } else if (n2 == null) { return -1; } else { return n1.ordinal() - n2.ordinal(); } } }); for (FileSummary.Section s : sections) { fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, s.getLength()))); switch (SectionName.fromString(s.getName())) { case STRING_TABLE: loadStringTable(is); break; case INODE: loadINodeSection(is); break; case INODE_REFERENCE: loadINodeReferenceSection(is); break; case INODE_DIR: loadINodeDirectorySection(is); break; default: break; } } list("", INodeId.ROOT_INODE_ID); } finally { IOUtils.cleanup(null, fin); } }
From source file:com.yifanlu.PSXperiaTool.PSXperiaTool.java
private void generateImage() throws IOException { nextStep("Generating PSImage."); FileInputStream in = new FileInputStream(mInputFile); FileOutputStream out = new FileOutputStream(new File(mTempDir, "/ZPAK/data/image.ps")); FileOutputStream tocOut = new FileOutputStream(new File(mTempDir, "/image_ps_toc.bin")); PSImageCreate ps = new PSImageCreate(in); PSImage.ProgressCallback progress = new PSImage.ProgressCallback() { int mBytesRead = 0, mBytesWritten = 0; public void bytesReadChanged(int delta) { mBytesRead += delta;/*from w ww.ja va 2 s . c o m*/ jump(mBytesRead); Logger.verbose("Image bytes read: %d", mBytesRead); } public void bytesWrittenChanged(int delta) { mBytesWritten += delta; Logger.verbose("Compressed PSImage bytes written: %d", mBytesWritten); } }; // progress management int oldSteps = getSteps(); setTotalSteps((int) in.getChannel().size()); jump(0); ps.setCallback(progress); ps.compress(out); ps.writeTocTable(tocOut); out.close(); tocOut.close(); in.close(); setTotalSteps(TOTAL_STEPS); jump(oldSteps); Logger.debug("Done generating PSImage"); Logger.debug("Deleting temporary patched game."); FileUtils.deleteQuietly(new File(mTempDir, "game.iso")); Logger.info("Generating ZPAK."); File zpakDirectory = new File(mTempDir, "/ZPAK"); File zpakFile = new File(mTempDir, "/" + mProperties.getProperty("KEY_TITLE_ID") + ".zpak"); FileOutputStream zpakOut = new FileOutputStream(zpakFile); ZpakCreate zcreate = new ZpakCreate(zpakOut, zpakDirectory); zcreate.create(true); FileUtils.deleteDirectory(zpakDirectory); Logger.debug("Done generating ZPAK at %s", zpakFile.getPath()); }
From source file:org.commoncrawl.service.listcrawler.HDFSFileIndex.java
private void loadIndexFromLocalFile() throws IOException { LOG.info("Loading Index from Local File:" + _localIndexFilePath); // now open an input stream to the local file ... FileInputStream fileInputStream = new FileInputStream(_localIndexFilePath); DataInputStream dataStream = new DataInputStream(fileInputStream); try {//www . j av a2s . c o m // deserialize bloom filter _bloomFilter = BloomFilter.serializer().deserialize(dataStream); _indexHintCount = dataStream.readInt(); int indexHintDataSize = _indexHintCount * INDEX_HINT_SIZE; // and deserialize index hints _indexHints = ByteBuffer.allocate(indexHintDataSize); dataStream.readFully(_indexHints.array()); // load index data buffer size _indexDataSize = dataStream.readInt(); // and capture offset information _indexDataOffset = (int) fileInputStream.getChannel().position(); } finally { if (fileInputStream != null) { fileInputStream.close(); } } LOG.info("Successfully loaded Index"); }
From source file:com.polyvi.xface.extension.filetransfer.XFileTransferExt.java
/** * ?//from w w w . j a va2 s .c o m * @param appWorkspace ? * @param source ? * @param target ?? * @param args JSONArray * @param callbackCtx nativejs * * args[2] fileKey ?name file? * args[3] fileName ??? image.jpg? * args[4] mimeType ?mimeimage/jpeg? * args[5] params HTTP????/ * args[6] trustEveryone * args[7] chunkedMode ??????true * @return FileUploadResult */ private XExtensionResult upload(String appWorkspace, String source, String target, JSONArray args, XCallbackContext callbackCtx) { XLog.d(CLASS_NAME, "upload " + source + " to " + target); HttpURLConnection conn = null; try { String fileKey = getArgument(args, 2, "file"); String fileName = getArgument(args, 3, "image.jpg"); String mimeType = getArgument(args, 4, "image/jpeg"); JSONObject params = args.optJSONObject(5); if (params == null) { params = new JSONObject(); } boolean trustEveryone = args.optBoolean(6); boolean chunkedMode = args.optBoolean(7) || args.isNull(7); JSONObject headers = args.optJSONObject(8); if (headers == null && params != null) { headers = params.optJSONObject("headers"); } String objectId = args.getString(9); //------------------ URL url = new URL(target); conn = getURLConnection(url, trustEveryone); conn.setDoInput(true); conn.setDoOutput(true); conn.setUseCaches(false); conn.setRequestMethod("POST"); conn.setRequestProperty("Connection", "Keep-Alive"); conn.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + BOUNDARY); setCookieProperty(conn, target); // ?? handleRequestHeader(headers, conn); byte[] extraBytes = extraBytesFromParams(params, fileKey); String midParams = "\"" + LINE_END + "Content-Type: " + mimeType + LINE_END + LINE_END; String tailParams = LINE_END + LINE_START + BOUNDARY + LINE_START + LINE_END; byte[] fileNameBytes = fileName.getBytes(ENCODING_TYPE); FileInputStream fileInputStream = (FileInputStream) getPathFromUri(appWorkspace, source); int maxBufferSize = XConstant.BUFFER_LEN; if (chunkedMode) { conn.setChunkedStreamingMode(maxBufferSize); } else { int stringLength = extraBytes.length + midParams.length() + tailParams.length() + fileNameBytes.length; XLog.d(CLASS_NAME, "String Length: " + stringLength); int fixedLength = (int) fileInputStream.getChannel().size() + stringLength; XLog.d(CLASS_NAME, "Content Length: " + fixedLength); conn.setFixedLengthStreamingMode(fixedLength); } // ??? OutputStream ouputStream = conn.getOutputStream(); DataOutputStream dos = new DataOutputStream(ouputStream); dos.write(extraBytes); dos.write(fileNameBytes); dos.writeBytes(midParams); XFileUploadResult result = new XFileUploadResult(); FileTransferProgress progress = new FileTransferProgress(); int bytesAvailable = fileInputStream.available(); int bufferSize = Math.min(bytesAvailable, maxBufferSize); byte[] buffer = new byte[bufferSize]; int bytesRead = fileInputStream.read(buffer, 0, bufferSize); long totalBytes = 0; while (bytesRead > 0) { totalBytes += bytesRead; result.setBytesSent(totalBytes); dos.write(buffer, 0, bytesRead); bytesRead = fileInputStream.read(buffer, 0, bufferSize); if (objectId != null) { //?js??object ID? progress.setTotal(bytesAvailable); XLog.d(CLASS_NAME, "total=" + bytesAvailable); progress.setLoaded(totalBytes); progress.setLengthComputable(true); XExtensionResult progressResult = new XExtensionResult(XExtensionResult.Status.OK, progress.toJSONObject()); progressResult.setKeepCallback(true); callbackCtx.sendExtensionResult(progressResult); } synchronized (abortTriggered) { if (objectId != null && abortTriggered.contains(objectId)) { abortTriggered.remove(objectId); throw new AbortException(ABORT_EXCEPTION_UPLOAD_ABORTED); } } } dos.writeBytes(tailParams); fileInputStream.close(); dos.flush(); dos.close(); checkConnection(conn); setUploadResult(result, conn); // if (trustEveryone && url.getProtocol().toLowerCase().equals("https")) { ((HttpsURLConnection) conn).setHostnameVerifier(mDefaultHostnameVerifier); HttpsURLConnection.setDefaultSSLSocketFactory(mDefaultSSLSocketFactory); } XLog.d(CLASS_NAME, "****** About to return a result from upload"); return new XExtensionResult(XExtensionResult.Status.OK, result.toJSONObject()); } catch (AbortException e) { JSONObject error = createFileTransferError(ABORTED_ERR, source, target, conn); return new XExtensionResult(XExtensionResult.Status.ERROR, error); } catch (FileNotFoundException e) { JSONObject error = createFileTransferError(FILE_NOT_FOUND_ERR, source, target, conn); XLog.e(CLASS_NAME, error.toString()); return new XExtensionResult(XExtensionResult.Status.ERROR, error); } catch (MalformedURLException e) { JSONObject error = createFileTransferError(INVALID_URL_ERR, source, target, conn); XLog.e(CLASS_NAME, error.toString()); return new XExtensionResult(XExtensionResult.Status.ERROR, error); } catch (IOException e) { JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, conn); XLog.e(CLASS_NAME, error.toString()); return new XExtensionResult(XExtensionResult.Status.IO_EXCEPTION, error); } catch (JSONException e) { XLog.e(CLASS_NAME, e.getMessage()); return new XExtensionResult(XExtensionResult.Status.JSON_EXCEPTION); } catch (Throwable t) { JSONObject error = createFileTransferError(CONNECTION_ERR, source, target, conn); XLog.e(CLASS_NAME, error.toString()); return new XExtensionResult(XExtensionResult.Status.IO_EXCEPTION, error); } finally { if (conn != null) { conn.disconnect(); } } }
From source file:com.stfalcon.contentmanager.ContentManager.java
private void handleMediaContent(final Intent data) { pickContentListener.onStartContentLoading(); new Thread(new Runnable() { public void run() { try { Uri contentVideoUri = data.getData(); FileInputStream in = (FileInputStream) activity.getContentResolver() .openInputStream(contentVideoUri); if (targetFile == null) { targetFile = createFile(savedContent); }//from w ww . j a va 2 s . co m FileOutputStream out = new FileOutputStream(targetFile); FileChannel inChannel = in.getChannel(); FileChannel outChannel = out.getChannel(); inChannel.transferTo(0, inChannel.size(), outChannel); in.close(); out.close(); handler.post(new Runnable() { @Override public void run() { pickContentListener.onContentLoaded(Uri.fromFile(targetFile), savedContent.toString()); } }); } catch (final Exception e) { handler.post(new Runnable() { @Override public void run() { pickContentListener.onError(e.getMessage()); } }); } } }).start(); }