List of usage examples for java.io File getUsableSpace
public long getUsableSpace()
From source file:org.apache.bookkeeper.bookie.BookieInitializationTest.java
/** * Check disk full. Expected to throw NoWritableLedgerDirException * during bookie initialisation.//from w w w .ja va2s. c o m */ @Test(timeout = 30000) public void testWithDiskFull() throws Exception { File tempDir = createTempDir("DiskCheck", "test"); long usableSpace = tempDir.getUsableSpace(); long totalSpace = tempDir.getTotalSpace(); final ServerConfiguration conf = new ServerConfiguration().setZkServers(zkUtil.getZooKeeperConnectString()) .setZkTimeout(5000).setJournalDirName(tempDir.getPath()) .setLedgerDirNames(new String[] { tempDir.getPath() }); conf.setDiskUsageThreshold((1f - ((float) usableSpace / (float) totalSpace)) - 0.05f); conf.setDiskUsageWarnThreshold((1f - ((float) usableSpace / (float) totalSpace)) - 0.25f); try { new Bookie(conf).initialize(); fail("Should fail with NoWritableLedgerDirException"); } catch (NoWritableLedgerDirException nlde) { // expected } }
From source file:com.thoughtworks.go.server.service.BackupService.java
public String availableDiskSpace() { File artifactsDir = artifactsDirHolder.getArtifactsDir(); return FileUtils.byteCountToDisplaySize(artifactsDir.getUsableSpace()); }
From source file:org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.java
private boolean isDiskFreeSpaceUnderLimit(File dir) { long freeSpace = dir.getUsableSpace() / (1024 * 1024); return freeSpace < this.diskUtilizationSpaceCutoff; }
From source file:org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.java
private boolean isDiskUsageOverPercentageLimit(File dir, float diskUtilizationPercentageCutoff) { float freePercentage = 100 * (dir.getUsableSpace() / (float) dir.getTotalSpace()); float usedPercentage = 100.0F - freePercentage; return (usedPercentage > diskUtilizationPercentageCutoff || usedPercentage >= 100.0F); }
From source file:tajo.master.MockupWorker.java
@Override public ServerStatusProto getServerStatus(PrimitiveProtos.NullProto request) { // serverStatus builder ServerStatusProto.Builder serverStatus = ServerStatusProto.newBuilder(); // TODO: compute the available number of task slots serverStatus.setAvailableTaskSlotNum(taskQueue.size()); // system(CPU, memory) status builder ServerStatusProto.System.Builder systemStatus = ServerStatusProto.System.newBuilder(); systemStatus.setAvailableProcessors(Runtime.getRuntime().availableProcessors()); systemStatus.setFreeMemory(Runtime.getRuntime().freeMemory()); systemStatus.setMaxMemory(Runtime.getRuntime().maxMemory()); systemStatus.setTotalMemory(Runtime.getRuntime().totalMemory()); serverStatus.setSystem(systemStatus); // disk status builder File[] roots = File.listRoots(); for (File root : roots) { ServerStatusProto.Disk.Builder diskStatus = ServerStatusProto.Disk.newBuilder(); diskStatus.setAbsolutePath(root.getAbsolutePath()); diskStatus.setTotalSpace(root.getTotalSpace()); diskStatus.setFreeSpace(root.getFreeSpace()); diskStatus.setUsableSpace(root.getUsableSpace()); serverStatus.addDisk(diskStatus); }//from w ww .jav a 2 s .co m return serverStatus.build(); }
From source file:org.dspace.util.SolrImportExport.java
/** * Reindexes the specified core// w ww . j av a 2s . c o m * * @param indexName the name of the core to reindex * @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created. * @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the * export directory was created by this method, the export directory will be deleted at the end of the reimport. * @param overwrite allow export files to be overwritten during re-index */ private static void reindex(String indexName, String exportDirName, boolean keepExport, boolean overwrite) throws IOException, SolrServerException, SolrImportExportException { String tempIndexName = indexName + "-temp"; String origSolrUrl = makeSolrUrl(indexName); String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL String tempSolrUrl = baseSolrUrl + "/" + tempIndexName; //The configuration details for the statistics shards reside within the "statistics" folder String instanceIndexName = indexName.startsWith("statistics-") ? "statistics" : indexName; String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + instanceIndexName; // the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work // -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there // if this directory can't be found, output an error message and skip this index File solrInstance = new File(solrInstanceDir); if (!solrInstance.exists() || !solrInstance.canRead() || !solrInstance.isDirectory()) { throw new SolrImportExportException("Directory " + solrInstanceDir + "/conf/ doesn't exist or isn't readable." + " The reindexing process requires the Solr configuration directory for this index to be present on the local machine" + " even if Solr is running on a different host. Not reindexing index " + indexName); } String timeField = makeTimeField(indexName); // Ensure the export directory exists and is writable File exportDir = new File(exportDirName); boolean createdExportDir = exportDir.mkdirs(); if (!createdExportDir && !exportDir.exists()) { throw new SolrImportExportException("Could not create export directory " + exportDirName); } if (!exportDir.canWrite()) { throw new SolrImportExportException("Can't write to export directory " + exportDirName); } try { HttpSolrServer adminSolr = new HttpSolrServer(baseSolrUrl); // try to find out size of core and compare with free space in export directory CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr); Object coreSizeObj = status.getCoreStatus(indexName).get("sizeInBytes"); long coreSize = coreSizeObj != null ? Long.valueOf(coreSizeObj.toString()) : -1; long usableExportSpace = exportDir.getUsableSpace(); if (coreSize >= 0 && usableExportSpace < coreSize) { System.err.println("Not enough space in export directory " + exportDirName + "; need at least as much space as the index (" + FileUtils.byteCountToDisplaySize(coreSize) + ") but usable space in export directory is only " + FileUtils.byteCountToDisplaySize(usableExportSpace) + ". Not continuing with reindex, please use the " + DIRECTORY_OPTION + " option to specify an alternative export directy with sufficient space."); return; } // Create a temp directory to store temporary core data File tempDataDir = new File(ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator + "solr-data"); boolean createdTempDataDir = tempDataDir.mkdirs(); if (!createdTempDataDir && !tempDataDir.exists()) { throw new SolrImportExportException( "Could not create temporary data directory " + tempDataDir.getCanonicalPath()); } if (!tempDataDir.canWrite()) { throw new SolrImportExportException( "Can't write to temporary data directory " + tempDataDir.getCanonicalPath()); } try { // create a temporary core to hold documents coming in during the reindex CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create(); createRequest.setInstanceDir(solrInstanceDir); createRequest.setDataDir(tempDataDir.getCanonicalPath()); createRequest.setCoreName(tempIndexName); createRequest.process(adminSolr).getStatus(); } catch (SolrServerException e) { // try to continue -- it may just be that the core already existed from a previous, failed attempt System.err.println("Caught exception when trying to create temporary core: " + e.getMessage() + "; trying to recover."); e.printStackTrace(System.err); } // swap actual core with temporary one CoreAdminRequest swapRequest = new CoreAdminRequest(); swapRequest.setCoreName(indexName); swapRequest.setOtherCoreName(tempIndexName); swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP); swapRequest.process(adminSolr); try { // export from the actual core (from temp core name, actual data dir) exportIndex(indexName, exportDir, tempSolrUrl, timeField, overwrite); // clear actual core (temp core name, clearing actual data dir) & import importIndex(indexName, exportDir, tempSolrUrl, true); } catch (Exception e) { // we ran into some problems with the export/import -- keep going to try and restore the solr cores System.err.println("Encountered problem during reindex: " + e.getMessage() + ", will attempt to restore Solr cores"); e.printStackTrace(System.err); } // commit changes HttpSolrServer origSolr = new HttpSolrServer(origSolrUrl); origSolr.commit(); // swap back (statistics now going to actual core name in actual data dir) swapRequest = new CoreAdminRequest(); swapRequest.setCoreName(tempIndexName); swapRequest.setOtherCoreName(indexName); swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP); swapRequest.process(adminSolr); // export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export // because the core name for the temporary export has -temp in it while the actual core doesn't exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField, overwrite); // ...and import them into the now-again-actual core *without* clearing importIndex(tempIndexName, exportDir, origSolrUrl, false); // commit changes origSolr.commit(); // unload now-temp core (temp core name) CoreAdminRequest.unloadCore(tempIndexName, false, false, adminSolr); // clean up temporary data dir if this method created it if (createdTempDataDir && tempDataDir.exists()) { FileUtils.deleteDirectory(tempDataDir); } } finally { // clean up export dir if appropriate if (!keepExport && createdExportDir && exportDir.exists()) { FileUtils.deleteDirectory(exportDir); } } }
From source file:org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.java
private void setGoodDirsDiskUtilizationPercentage() { long totalSpace = 0; long usableSpace = 0; for (String dir : localDirs) { File f = new File(dir); if (!f.isDirectory()) { continue; }//from w w w .j ava 2s . c o m totalSpace += f.getTotalSpace(); usableSpace += f.getUsableSpace(); } if (totalSpace != 0) { long tmp = ((totalSpace - usableSpace) * 100) / totalSpace; if (Integer.MIN_VALUE < tmp && Integer.MAX_VALUE > tmp) { goodDirsDiskUtilizationPercentage = (int) tmp; } } else { // got no good dirs goodDirsDiskUtilizationPercentage = 0; } }
From source file:tajo.worker.Worker.java
@Override public ServerStatusProto getServerStatus(NullProto request) { // serverStatus builder ServerStatusProto.Builder serverStatus = ServerStatusProto.newBuilder(); // TODO: compute the available number of task slots serverStatus.setAvailableTaskSlotNum(MAX_TASK_NUM - tasks.size()); // system(CPU, memory) status builder ServerStatusProto.System.Builder systemStatus = ServerStatusProto.System.newBuilder(); systemStatus.setAvailableProcessors(Runtime.getRuntime().availableProcessors()); systemStatus.setFreeMemory(Runtime.getRuntime().freeMemory()); systemStatus.setMaxMemory(Runtime.getRuntime().maxMemory()); systemStatus.setTotalMemory(Runtime.getRuntime().totalMemory()); serverStatus.setSystem(systemStatus); // disk status builder File[] roots = File.listRoots(); for (File root : roots) { ServerStatusProto.Disk.Builder diskStatus = ServerStatusProto.Disk.newBuilder(); diskStatus.setAbsolutePath(root.getAbsolutePath()); diskStatus.setTotalSpace(root.getTotalSpace()); diskStatus.setFreeSpace(root.getFreeSpace()); diskStatus.setUsableSpace(root.getUsableSpace()); serverStatus.addDisk(diskStatus); }//from w w w . j a v a 2 s . c o m return serverStatus.build(); }
From source file:org.apache.flink.runtime.taskmanager.TaskManager.java
/** * Checks, whether the given strings describe existing directories that are writable. If that is not * the case, an exception is raised.//w w w .j ava2 s . c om * * @param tempDirs An array of strings which are checked to be paths to writable directories. * @throws Exception Thrown, if any of the mentioned checks fails. */ private static final void checkTempDirs(final String[] tempDirs) throws Exception { for (int i = 0; i < tempDirs.length; ++i) { final String dir = checkNotNull(tempDirs[i], "Temporary file directory #" + (i + 1) + " is null."); final File f = new File(dir); checkArgument(f.exists(), "Temporary file directory '" + f.getAbsolutePath() + "' does not exist."); checkArgument(f.isDirectory(), "Temporary file directory '" + f.getAbsolutePath() + "' is not a directory."); checkArgument(f.canWrite(), "Temporary file directory '" + f.getAbsolutePath() + "' is not writable."); if (LOG.isInfoEnabled()) { long totalSpaceGb = f.getTotalSpace() >> 30; long usableSpaceGb = f.getUsableSpace() >> 30; double usablePercentage = ((double) usableSpaceGb) / totalSpaceGb * 100; LOG.info(String.format("Temporary file directory '%s': total %d GB, usable %d GB [%.2f%% usable]", f.getAbsolutePath(), totalSpaceGb, usableSpaceGb, usablePercentage)); } } }
From source file:it.dfa.unict.CodeRadePortlet.java
private String processInputFile(UploadPortletRequest uploadRequest, String username, String timestamp, AppInput appInput) throws CodeRadePortletException, IOException { String createdFile = ""; String fileInputName = "fileupload"; //Input filed name in view.jsp // Get the uploaded file as a file. File uploadedFile = uploadRequest.getFile(fileInputName); String sourceFileName = uploadRequest.getFileName(fileInputName); String modelName = FilenameUtils.removeExtension(sourceFileName); appInput.setModelName(modelName);/*from w ww . j a v a 2 s . c om*/ String extension = FilenameUtils.getExtension(sourceFileName); long sizeInBytes = uploadRequest.getSize(fileInputName); if (uploadRequest.getSize(fileInputName) == 0) { throw new CodeRadePortletException("empty-file"); } _log.debug("Uploading file: " + sourceFileName + " ..."); File folder = new File(ROOT_FOLDER_NAME); // Check minimum storage space to save new files... if (folder.getUsableSpace() < UPLOAD_LIMIT) { throw new CodeRadePortletException("error-disk-space"); } else if (sizeInBytes > UPLOAD_LIMIT) { throw new CodeRadePortletException("error-limit-exceeded"); } else { // This is our final file path. File filePath = new File(folder.getAbsolutePath() + File.separator + username + "_" + modelName + "_" + timestamp + "." + extension); // Move the existing temporary file to new location. FileUtils.copyFile(uploadedFile, filePath); _log.debug("File created: " + filePath); createdFile = filePath.getAbsolutePath(); } return createdFile; }