List of usage examples for java.io File toString
public String toString()
From source file:net.okjsp.imageloader.ImageFetcher.java
/** * The main process method, which will be called by the ImageWorker in the AsyncTask background * thread./*from www. j av a 2 s . c o m*/ * * @param data The data to load the bitmap, in this case, a regular http URL * @return The downloaded and resized bitmap */ private Bitmap processBitmap(String data) { if (BuildConfig.DEBUG && DEBUG_LOG) { Log.d(TAG, "processBitmap - " + data); } // Download a bitmap, write it to a file final File f = downloadBitmap(mContext, data); if (f != null) { // Return a sampled down version return decodeSampledBitmapFromFile(f.toString(), mImageWidth, mImageHeight); } return null; }
From source file:bjerne.gallery.service.impl.VideoConversionServiceImpl.java
private void cleanupFailure(Process pr, File newVideo) { LOG.debug("Cleaning up failing conversion job. Killing process {}", pr); pr.destroy();//from w w w. ja va 2 s. com LOG.debug("Trying to remove new file (if any): {}", newVideo.toString()); newVideo.delete(); }
From source file:com.genericdemo.enginedata.Application.java
@Override public void populateDAG(DAG dag, Configuration conf) { String eventSchema = SchemaUtils.jarResourceFileToString(EVENT_SCHEMA); String dataSchema = SchemaUtils.jarResourceFileToString(DATA_SCHEMA); String pojoSchema = SchemaUtils.jarResourceFileToString("schema.json"); String pojoClass = "com.genericdemo.enginedata.EngineDataEvent"; try {//from w w w.j a v a 2 s . c om EngineDataGenerator generator = dag.addOperator("Generator", new EngineDataGenerator()); CsvParser parser = dag.addOperator("Parser", new CsvParser()); //parser.setClazz(EngineDataEvent.class); parser.setSchema(pojoSchema); dag.setOutputPortAttribute(parser.out, PortContext.TUPLE_CLASS, EngineDataEvent.class); POJOEnrichmentOperator enricher = dag.addOperator("Enricher", new POJOEnrichmentOperator()); //InputStream origIs = this.getClass().getResourceAsStream("/ErrorCodes.txt"); //File errorFile = new File("/tmp/blah1.txt"); //FileUtils.deleteQuietly(new File(errorFile.toString())); //FileUtils.copyInputStreamToFile(origIs, errorFile); FSLoader store = new FSLoader(); store.setFileName("/user/ashwin/ErrorCodes.txt"); enricher.setLookupKeyStr("errorCode"); enricher.setStore(store); //enricher.setInputClassStr(pojoClass); //enricher.setOutputClassStr(pojoClass); enricher.setTupleFieldsToCopyFromInputToOutputStr("time,errorCode,model,gear,temperature,speed,rpm"); enricher.setFieldToAddToOutputTupleStr("description"); dag.setOutputPortAttribute(enricher.outputPojo, PortContext.TUPLE_CLASS, EngineDataEvent.class); dag.setInputPortAttribute(enricher.inputPojo, PortContext.TUPLE_CLASS, EngineDataEvent.class); AggregationModule aggregator = dag.addModule("Aggregations", new AggregationModule()); //aggregator.setComputationalSchema(eventSchema); InputStream schemaIs = this.getClass().getResourceAsStream("/engineDataEventSchema.json"); File schemaFile = new File("/tmp/blah.txt"); FileUtils.deleteQuietly(new File(schemaFile.toString())); FileUtils.copyInputStreamToFile(schemaIs, schemaFile); aggregator.setComputationSchemaFilePath(schemaFile.getAbsolutePath()); aggregator.setPojoSchema(pojoClass); aggregator.setTimeFieldName("time"); //aggregator.setStorePartitionCount(4); //aggregator.setDimensionComputationPartitionCount(4); //ConsoleOutputOperator console = dag.addOperator("Console", new ConsoleOutputOperator()); HiveStore hiveStore = new HiveStore(); hiveStore.setDatabaseUrl("jdbc:hive2://localhost:10000"); hiveStore.setDatabaseDriver("org.apache.hive.jdbc.HiveDriver"); hiveStore.setUserName("ashwin"); hiveStore.setFilepath("/user/ashwin/hive/"); ArrayList<String> hivePartitionColumns = new ArrayList<String>(); //hivePartitionColumns.add("model"); ArrayList<FIELD_TYPE> partitiontypes = new ArrayList<FIELD_TYPE>(); partitiontypes.add(FIELD_TYPE.STRING); ArrayList<FIELD_TYPE> fieldtypes = new ArrayList<FIELD_TYPE>(); fieldtypes.add(FIELD_TYPE.LONG); fieldtypes.add(FIELD_TYPE.STRING); fieldtypes.add(FIELD_TYPE.STRING); fieldtypes.add(FIELD_TYPE.INTEGER); fieldtypes.add(FIELD_TYPE.DOUBLE); fieldtypes.add(FIELD_TYPE.DOUBLE); fieldtypes.add(FIELD_TYPE.DOUBLE); fieldtypes.add(FIELD_TYPE.STRING); ArrayList<String> hiveColumns = new ArrayList<String>(); hiveColumns.add("time"); hiveColumns.add("errorCode"); hiveColumns.add("model"); hiveColumns.add("gear"); hiveColumns.add("temperature"); hiveColumns.add("speed"); hiveColumns.add("rpm"); hiveColumns.add("description"); FSPojoToHiveOperator fsRolling = dag.addOperator("HdfsFileWriter", new FSPojoToHiveOperator()); fsRolling.setFilePath("/user/ashwin/hive/"); fsRolling.setHiveColumns(hiveColumns); fsRolling.setHiveColumnDataTypes(fieldtypes); fsRolling.setHivePartitionColumnDataTypes(partitiontypes); fsRolling.setHivePartitionColumns(hivePartitionColumns); ArrayList<String> expressions = new ArrayList<String>(); expressions.add("getTime()"); expressions.add("getErrorCode()"); expressions.add("getModel()"); expressions.add("getGear()"); expressions.add("getTemperature()"); expressions.add("getSpeed()"); expressions.add("getRpm()"); expressions.add("getDescription()"); ArrayList<String> expressionsPartitions = new ArrayList<String>(); //expressionsPartitions.add("getModel()"); fsRolling.setMaxLength(1000000); //fsRolling.setRotationWindows(60); fsRolling.setAlwaysWriteToTmp(false); fsRolling.setExpressionsForHiveColumns(expressions); fsRolling.setExpressionsForHivePartitionColumns(expressionsPartitions); HiveOperator hiveOperator = dag.addOperator("HiveOperator", new HiveOperator()); hiveOperator.setHivestore(hiveStore); hiveOperator.setTablename("errorcodes"); hiveOperator.setHivePartitionColumns(hivePartitionColumns); dag.addStream("rawdata", generator.output, parser.in); dag.addStream("parsed", parser.out, enricher.inputPojo); dag.addStream("aggregate", enricher.outputPojo, aggregator.inputPOJO, fsRolling.input); dag.addStream("toHive", fsRolling.outputPort, hiveOperator.input); } catch (Exception ex) { DTThrowable.rethrow(ex); } }
From source file:de.teamgrit.grit.preprocess.tokenize.GeneralTokenizer.java
/** * On the submission level, this function gathers all relevant files and * returns them./*from w ww .j a v a 2 s . c o m*/ * * @param location * Where to look for files * @return Files matching suffixRegex in the given folder */ private List<Path> extractSubmissionFiles(Path location) { List<Path> submissionFiles = new LinkedList<>(); m_log.info("Extracting files."); // Check if our location has any submissions files (as recognized by // their suffix. if so, note it. for (File currentFile : location.toFile().listFiles()) { // unpack archives if (currentFile.toString().matches(m_archiveRegex)) { try { // the number indicates to which level of a zipfile nested // zipfiles will be extracted ArchiveHandler unzipper = new ZipfileHandler(5, Paths.get(".").toFile()); Path unzippedDir = Paths.get(FilenameUtils.removeExtension(currentFile.toString())); unzipper.extractZip(currentFile, unzippedDir.toFile()); submissionFiles.add(unzippedDir); } catch (FileNotFoundException | ZipException e) { m_log.info("Error while unzipping ilias submission" + e.getMessage()); } catch (NoProperParameterException e) { m_log.info("Bad parameters for zip." + e.getMessage()); } } else if (currentFile.toString().matches(m_sourceSuffixRegex)) { m_log.info("Found: " + currentFile.toString()); if (!submissionFiles.contains(location)) { submissionFiles.add(location); } } else { m_log.info("found invalid file: " + currentFile.toString()); } } return submissionFiles; }
From source file:com.gistlabs.mechanize.AbstractResource.java
@Override public void saveTo(final File file) { if (file.exists()) throw new IllegalArgumentException("File '" + file.toString() + "' already exists."); try {//from w ww .j a v a2 s. c o m saveTo(new FileOutputStream(file)); } catch (FileNotFoundException e) { throw MechanizeExceptionFactory.newException(e); } }
From source file:de.teamgrit.grit.preprocess.tokenize.GeneralTokenizer.java
/** * Traverses a directory tree, only considering directories that match the * ones specified in {@link SubmissionStructure}. When reaching the lowest * level, submissions are gathered.//from w w w. ja v a2 s . c om * * @param structure * A StrcutureObj containing a description of how folders * containing the submissions are arranged. * @param level * how deep we have descended into the hierarchy. This starts * out with 1 and is then used internally to track the depth of * folders. Also used to ensure a maximum recursion depth. * @param location * Which directory is to be scanned. * @return A list of all matching files/folders we have found. * @throws MaximumDirectoryDepthExceededException * when maxDirectoryDepth is exceeded while traversing * directories. */ private List<Path> traverse(List<String> structure, int level, Path location) throws MaximumDirectoryDepthExceededException { m_log.info("traverse: " + location.toString()); // If we went too deep, we abort here in order to avoid exploding our // stackspace if (level >= m_maxDirectoryDepth) { throw new MaximumDirectoryDepthExceededException("Encountered more than " + m_maxDirectoryDepth + " Directories in " + location.toString() + "\nProgram returned Cthulhu."); } // If we have reached the bottom, we can scan for files. if ("SUBMISSION".equals(structure.get(level))) { // look for files. m_log.config("Bottomed out in " + location); List<Path> submission = extractSubmissionFiles(location); // log students without a submission if (submission.isEmpty()) { m_log.info("Nothing found in " + location); m_emptyLocations.add(location); } return submission; } List<Path> foundSubmissions = new LinkedList<>(); // ensure that empty dirs are handled properly if ((location.toFile().listFiles() == null) || (location.toFile().listFiles().length == 0)) { m_log.info("No files in " + location.toString()); return new LinkedList<>(); } // If we are not too deep and not in the final level, go through all // directories here and go one level deeper. for (File currentFile : location.toFile().listFiles()) { m_log.info("looking at " + currentFile.toString()); if (currentFile.isDirectory()) { // does this directory match our structure spec? if (currentFile.getName().matches(structure.get(level))) { // if so, traverse it and collect everything it returns. foundSubmissions.addAll(traverse(structure, (level + 1), currentFile.toPath())); } else { m_log.info("Unexpected file: " + currentFile.toString() + " to " + structure.get(level)); } } } return foundSubmissions; }
From source file:uk.ac.bbsrc.tgac.browser.web.UploadController.java
public void uploadFile(Class type, String qualifier, MultipartFile fileItem) throws IOException { log.info("uploadfile 1.1"); File dir = new File(filesManager.getFileStorageDirectory() + File.separator + type.getSimpleName().toLowerCase() + File.separator + qualifier); if (filesManager.checkDirectory(dir, true)) { log.info("Attempting to store " + dir.toString() + File.separator + fileItem.getOriginalFilename()); fileItem.transferTo(//from w w w . j av a 2s. com new File(dir + File.separator + fileItem.getOriginalFilename().replaceAll("\\s", "_"))); } else { throw new IOException( "Cannot upload file - check that the directory specified in miso.properties exists and is writable"); } log.info("uploadfile 1.2"); }
From source file:de.tarent.maven.plugins.pkg.Utils.java
/** * Copies the <code>AuxFile</code> instances contained within the set. It * takes the <code>srcAuxFilesDir</code> and <code>auxFileDstDir</code> * arguments into account to specify the parent source and destination * directory of the files.// www. j a v a 2 s. c o m * * By default files are copied into directories. If the <code>rename</code> * property of the <code>AuxFile</code> instance is set however the file is * copied and renamed to the last part of the path. * * The return value is the amount of copied bytes. * * @param l * @param srcAuxFilesDir * @param dstDir * @param auxFiles * @param makeExecutable * @return * @throws MojoExecutionException */ public static long copyFiles(Log l, File srcDir, File dstDir, List<? extends AuxFile> auxFiles, String type, boolean makeExecutable) throws MojoExecutionException { long size = 0; Iterator<? extends AuxFile> ite = auxFiles.iterator(); while (ite.hasNext()) { AuxFile af = (AuxFile) ite.next(); File from = new File(srcDir, af.from); File to = new File(dstDir, af.to); l.info("copying " + type + ": " + from.toString()); l.info("destination: " + to.toString()); if (!from.exists()) { throw new MojoExecutionException("File to copy does not exist: " + from.toString()); } createParentDirs(to, type); try { if (from.isDirectory()) { to = new File(to, from.getName()); FileUtils.copyDirectory(from, to, FILTER); for (final Iterator<File> files = FileUtils.iterateFiles(from, FILTER, FILTER); files .hasNext();) { final File nextFile = files.next(); size += nextFile.length(); } } else if (af.isRename()) { FileUtils.copyFile(from, to); size += from.length(); if (makeExecutable) { makeExecutable(l, to.getAbsolutePath()); } } else { FileUtils.copyFileToDirectory(from, to); size += from.length(); if (makeExecutable) { makeExecutable(l, to.getAbsolutePath() + File.separator + from.getName()); } } } catch (IOException ioe) { throw new MojoExecutionException("IOException while copying " + type, ioe); } } return size; }
From source file:com.qspin.qtaste.testsuite.impl.JythonTestScript.java
public static List<String> getAdditionalPythonPath(File file) { List<String> pythonlibs = new ArrayList<>(); //add librairies references by the environment variable for (String additionnalPath : StaticConfiguration.JYTHON_LIB.split(File.pathSeparator)) { File directory = new File(additionnalPath); pythonlibs.add(directory.toString()); }/* w w w .j a v a 2s. c o m*/ if (!file.getAbsolutePath().contains("TestSuites")) { return pythonlibs; } try { File directory = file.getAbsoluteFile().getCanonicalFile(); while (!directory.getName().equals("TestSuites")) { //File testSuitesDirectory = new File("TestSuites").getAbsoluteFile().getCanonicalFile(); //do { directory = directory.getParentFile(); pythonlibs.add(directory + File.separator + "pythonlib"); } //while (!directory.equals(testSuitesDirectory)); } catch (IOException e) { logger.error("Error while getting pythonlib directories: " + e.getMessage()); } return pythonlibs; }