List of usage examples for java.io File length
public long length()
From source file:com.bigdata.dastor.db.commitlog.CommitLog.java
public static void recover(File[] clogs) throws IOException { Set<Table> tablesRecovered = new HashSet<Table>(); List<Future<?>> futures = new ArrayList<Future<?>>(); for (File file : clogs) { int bufferSize = (int) Math.min(file.length(), 32 * 1024 * 1024); BufferedRandomAccessFile reader = new BufferedRandomAccessFile(file.getAbsolutePath(), "r", bufferSize); try {/* w ww . j a v a2 s .c o m*/ final CommitLogHeader clHeader; try { clHeader = CommitLogHeader.readCommitLogHeader(reader); } catch (EOFException eofe) { logger.info( "Attempted to recover an incomplete CommitLogHeader. Everything is ok, don't panic."); continue; } /* seek to the lowest position where any CF has non-flushed data */ int lowPos = CommitLogHeader.getLowestPosition(clHeader); if (lowPos == 0) continue; reader.seek(lowPos); if (logger.isDebugEnabled()) logger.debug("Replaying " + file + " starting at " + lowPos); /* read the logs populate RowMutation and apply */ while (!reader.isEOF()) { if (logger.isDebugEnabled()) logger.debug("Reading mutation at " + reader.getFilePointer()); long claimedCRC32; byte[] bytes; try { bytes = new byte[(int) reader.readLong()]; // readlong can throw EOFException too reader.readFully(bytes); claimedCRC32 = reader.readLong(); } catch (EOFException e) { // last CL entry didn't get completely written. that's ok. break; } ByteArrayInputStream bufIn = new ByteArrayInputStream(bytes); Checksum checksum = new CRC32(); checksum.update(bytes, 0, bytes.length); if (claimedCRC32 != checksum.getValue()) { // this part of the log must not have been fsynced. probably the rest is bad too, // but just in case there is no harm in trying them. continue; } /* deserialize the commit log entry */ final RowMutation rm = RowMutation.serializer().deserialize(new DataInputStream(bufIn)); if (logger.isDebugEnabled()) logger.debug(String.format("replaying mutation for %s.%s: %s", rm.getTable(), rm.key(), "{" + StringUtils.join(rm.getColumnFamilies(), ", ") + "}")); final Table table = Table.open(rm.getTable()); tablesRecovered.add(table); final Collection<ColumnFamily> columnFamilies = new ArrayList<ColumnFamily>( rm.getColumnFamilies()); final long entryLocation = reader.getFilePointer(); Runnable runnable = new WrappedRunnable() { public void runMayThrow() throws IOException { /* remove column families that have already been flushed before applying the rest */ for (ColumnFamily columnFamily : columnFamilies) { int id = table.getColumnFamilyId(columnFamily.name()); if (!clHeader.isDirty(id) || entryLocation <= clHeader.getPosition(id)) { rm.removeColumnFamily(columnFamily); } } if (!rm.isEmpty()) { Table.open(rm.getTable()).apply(rm, null, false); } } }; futures.add(StageManager.getStage(StageManager.MUTATION_STAGE).submit(runnable)); if (futures.size() > MAX_OUTSTANDING_REPLAY_COUNT) { FBUtilities.waitOnFutures(futures); futures.clear(); } } } finally { reader.close(); logger.info("Finished reading " + file); } } // wait for all the writes to finish on the mutation stage FBUtilities.waitOnFutures(futures); logger.debug("Finished waiting on mutations from recovery"); // flush replayed tables futures.clear(); for (Table table : tablesRecovered) futures.addAll(table.flush()); FBUtilities.waitOnFutures(futures); }
From source file:com.github.wolfposd.jdpkg.deb.DpkgDeb.java
public static void fileEntryToDestination(File source, ArchiveOutputStream archive, boolean atRoot) throws IOException { TarArchiveEntry entry;//w w w. j a v a 2 s. co m if (atRoot) { entry = new TarArchiveEntry(source.getName()); } else { entry = new TarArchiveEntry(source.getPath().replace(BuildFile, "")); } entry.setSize(source.length()); entry.setMode(TarArchiveEntry.DEFAULT_FILE_MODE); archive.putArchiveEntry(entry); BufferedInputStream input = new BufferedInputStream(new FileInputStream(source)); IOUtils.copy(input, archive); input.close(); archive.closeArchiveEntry(); }
From source file:net.semanticmetadata.lire.utils.FileUtils.java
/** * Reads a whole file into a StringBuffer based on java.nio * * @param file the file to open.//from ww w . ja va 2 s . c om * @throws IOException */ public static byte[] readFileToByteArray(File file) throws IOException { int length = (int) file.length(); MappedByteBuffer in = new FileInputStream(file).getChannel().map(FileChannel.MapMode.READ_ONLY, 0, length); int i = 0; byte[] result = new byte[length]; while (i < length) result[i] = in.get(i++); return result; }
From source file:com.ibm.jaggr.core.test.TestUtils.java
static public long getDirListSize(File directory, FileFilter filter) { File[] files = directory.listFiles(filter); long result = 0; for (File file : files) { result += file.length(); }/*from w w w. j a va 2 s .co m*/ return result; }
From source file:com.jaspersoft.studio.community.RESTCommunityHelper.java
/** * Uploads the specified file to the community site. The return identifier * can be used later when composing other requests. * // w w w.ja v a2s .c o m * @param httpclient * the http client * @param attachment * the file to attach * @param authCookie * the session cookie to use for authentication purpose * @return the identifier of the file uploaded, <code>null</code> otherwise * @throws CommunityAPIException */ public static String uploadFile(CloseableHttpClient httpclient, File attachment, Cookie authCookie) throws CommunityAPIException { FileInputStream fin = null; try { fin = new FileInputStream(attachment); byte fileContent[] = new byte[(int) attachment.length()]; fin.read(fileContent); byte[] encodedFileContent = Base64.encodeBase64(fileContent); FileUploadRequest uploadReq = new FileUploadRequest(attachment.getName(), encodedFileContent); HttpPost fileuploadPOST = new HttpPost(CommunityConstants.FILE_UPLOAD_URL); EntityBuilder fileUploadEntity = EntityBuilder.create(); fileUploadEntity.setText(uploadReq.getAsJSON()); fileUploadEntity.setContentType(ContentType.create(CommunityConstants.JSON_CONTENT_TYPE)); fileUploadEntity.setContentEncoding(CommunityConstants.REQUEST_CHARSET); fileuploadPOST.setEntity(fileUploadEntity.build()); CloseableHttpResponse resp = httpclient.execute(fileuploadPOST); int httpRetCode = resp.getStatusLine().getStatusCode(); String responseBodyAsString = EntityUtils.toString(resp.getEntity()); if (HttpStatus.SC_OK == httpRetCode) { ObjectMapper mapper = new ObjectMapper(); mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); JsonNode jsonRoot = mapper.readTree(responseBodyAsString); String fid = jsonRoot.get("fid").asText(); //$NON-NLS-1$ return fid; } else { CommunityAPIException ex = new CommunityAPIException(Messages.RESTCommunityHelper_FileUploadError); ex.setHttpStatusCode(httpRetCode); ex.setResponseBodyAsString(responseBodyAsString); throw ex; } } catch (FileNotFoundException e) { JSSCommunityActivator.getDefault().logError(Messages.RESTCommunityHelper_FileNotFoundError, e); throw new CommunityAPIException(Messages.RESTCommunityHelper_FileUploadError, e); } catch (UnsupportedEncodingException e) { JSSCommunityActivator.getDefault().logError(Messages.RESTCommunityHelper_EncodingNotValidError, e); throw new CommunityAPIException(Messages.RESTCommunityHelper_FileUploadError, e); } catch (IOException e) { JSSCommunityActivator.getDefault().logError(Messages.RESTCommunityHelper_PostMethodIOError, e); throw new CommunityAPIException(Messages.RESTCommunityHelper_FileUploadError, e); } finally { IOUtils.closeQuietly(fin); } }
From source file:de.nrw.hbz.deepzoomer.fileUtil.FileUtil.java
public static String loadFileIntoString(File file) { String fString = null;//from w w w . ja va 2 s . c o m FileInputStream fis = null; try { fis = new FileInputStream(file); int i = (int) file.length(); byte[] b = new byte[i]; fis.read(b); ByteArrayOutputStream bfos = new ByteArrayOutputStream(); bfos.write(b); fString = bfos.toString("UTF-8"); } catch (Exception e) { // TODO Auto-generated catch block log.error(e); } finally { if (fis != null) { try { fis.close(); } catch (IOException ioExc) { log.error(ioExc); } } } return fString; }
From source file:com.isa.utiles.Utiles.java
public static byte[] loadFile(File file) throws IOException { InputStream is = new FileInputStream(file); long length = file.length(); if (length > Integer.MAX_VALUE) { // File is too large }/*from w w w.j av a 2s.c o m*/ byte[] bytes = new byte[(int) length]; int offset = 0; int numRead = 0; while (offset < bytes.length && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) { offset += numRead; } if (offset < bytes.length) { throw new IOException("Could not completely read file " + file.getName()); } is.close(); return bytes; }
From source file:com.flurry.proguard.UploadMapping.java
/** * Get the payload for creating the Upload in the metadata service * * @param zippedFile the archive to upload * @param projectId the project's ID/*from w ww. j av a 2 s . c o m*/ * @return a JSON string to be sent to the metadata service */ private static String getUploadJson(File zippedFile, String projectId, String uploadType) { return getUploadTemplate().replace("UPLOAD_TYPE", uploadType) .replace("UPLOAD_SIZE", Long.toString(zippedFile.length())).replace("PROJECT_ID", projectId); }
From source file:com.halseyburgund.rwframework.core.RWHttpManager.java
public static String uploadFile(String page, Properties properties, String fileParam, String file, int timeOutSec) throws Exception { if (D) {//from www . ja v a 2 s . co m Log.d(TAG, "Starting upload of file: " + file, null); } // build GET-like page name that includes the RW operation Enumeration<Object> enumProps = properties.keys(); StringBuilder uriBuilder = new StringBuilder(page).append('?'); while (enumProps.hasMoreElements()) { String key = enumProps.nextElement().toString(); String value = properties.get(key).toString(); if ("operation".equals(key)) { uriBuilder.append(key); uriBuilder.append('='); uriBuilder.append(java.net.URLEncoder.encode(value)); break; } } if (D) { Log.d(TAG, "GET request: " + uriBuilder.toString(), null); } HttpParams httpParams = new BasicHttpParams(); HttpConnectionParams.setConnectionTimeout(httpParams, timeOutSec * 1000); HttpConnectionParams.setSoTimeout(httpParams, timeOutSec * 1000); HttpClient httpClient = new DefaultHttpClient(httpParams); HttpPost request = new HttpPost(uriBuilder.toString()); RWMultipartEntity entity = new RWMultipartEntity(); Iterator<Map.Entry<Object, Object>> i = properties.entrySet().iterator(); while (i.hasNext()) { Map.Entry<Object, Object> entry = (Map.Entry<Object, Object>) i.next(); String key = (String) entry.getKey(); String val = (String) entry.getValue(); entity.addPart(key, val); if (D) { Log.d(TAG, "Added StringBody multipart for: '" + key + "' = '" + val + "'", null); } } File upload = new File(file); entity.addPart(fileParam, upload); if (D) { String msg = "Added FileBody multipart for: '" + fileParam + "' =" + " <'" + upload.getAbsolutePath() + ", " + "size: " + upload.length() + " bytes >'"; Log.d(TAG, msg, null); } request.setEntity(entity); if (D) { Log.d(TAG, "Sending HTTP request...", null); } HttpResponse response = httpClient.execute(request); int st = response.getStatusLine().getStatusCode(); if (st == HttpStatus.SC_OK) { StringBuffer sbResponse = new StringBuffer(); InputStream content = response.getEntity().getContent(); BufferedReader reader = new BufferedReader(new InputStreamReader(content)); String line; while ((line = reader.readLine()) != null) { sbResponse.append(line); } content.close(); // this will also close the connection if (D) { Log.d(TAG, "Upload successful (HTTP code: " + st + ")", null); Log.d(TAG, "Server response: " + sbResponse.toString(), null); } return sbResponse.toString(); } else { ByteArrayOutputStream ostream = new ByteArrayOutputStream(); entity.writeTo(ostream); Log.e(TAG, "Upload failed (http code: " + st + ")", null); Log.e(TAG, "Server response: " + ostream.toString(), null); throw new HttpException(String.valueOf(st)); } }
From source file:edu.cmu.cs.lti.util.general.BasicConvenience.java
/** * Read a file by mapping it completely into memory. * //from w w w.ja v a2s. c om * @param filename * @return the bytes contained in the file * @throws IOException */ public static byte[] fastReadFile(File file) throws IOException { FileInputStream stream = new FileInputStream(file); try { MappedByteBuffer buffer = stream.getChannel().map(MapMode.READ_ONLY, 0, file.length()); byte[] bytes = new byte[(int) file.length()]; buffer.get(bytes); return bytes; } finally { if (stream != null) { stream.close(); } } }