List of usage examples for java.io File toString
public String toString()
From source file:org.balloon_project.overflight.task.importer.ImporterFileListener.java
@Override public void run() { // TODO initial import start // initial import of existing file logger.info("Scanning for files to import"); File importDir = new File(configuration.getDatabaseImportDirectory()); if (importDir.exists() && importDir.isDirectory()) { for (File file : importDir.listFiles()) { if (file.isFile() && file.getPath().endsWith(IndexingTask.N_TRIPLES_EXTENSION)) { logger.info("File event: Adding " + file.toString() + " to importer queue"); importer.startImporting(file); }//from w w w. j av a2 s . c o m } } // starting file watch service for future files try { String path = configuration.getDatabaseImportDirectory(); logger.info("Starting import file listener for path " + path); Path tmpPath = Paths.get(path); WatchService watchService = FileSystems.getDefault().newWatchService(); tmpPath.register(watchService, StandardWatchEventKinds.ENTRY_CREATE); for (;;) { WatchKey key = watchService.take(); for (WatchEvent event : key.pollEvents()) { if (event.kind().name() == "OVERFLOW") { continue; } else { WatchEvent<Path> ev = (WatchEvent<Path>) event; Path filename = ev.context(); logger.info("File event: Adding " + filename.toString() + " to importer queue"); importer.startImporting(tmpPath.resolve(filename).toFile()); } } // Reset the key -- this step is critical if you want to // receive further watch events. If the key is no longer valid, // the directory is inaccessible so exit the loop. boolean valid = key.reset(); if (!valid) { break; } } } catch (IOException | InterruptedException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } finally { logger.debug("Stopping import file listener"); } }
From source file:com.mpobjects.rtcalltree.report.xml.XmlReporter.java
/** * @param aCreationDate/*from w ww . j av a 2 s . c om*/ * @param aThreadName * @return */ protected Writer createWriter(Date aCreationDate, String aThreadName) { String filename = MessageFormat.format(filenamePattern, aCreationDate, aThreadName); filename = filename.replaceAll("[: _]+", "_"); File outfile = new File(destination, filename); try { FileUtils.forceMkdir(outfile.getParentFile()); LOG.debug("Writing XML report to {}", outfile.toString()); return new OutputStreamWriter(new FileOutputStream(outfile), Charsets.UTF_8); } catch (IOException e) { LOG.error("Unable to create file output stream for: " + outfile.toString() + ". " + e.getMessage(), e); return null; } }
From source file:de.tudarmstadt.ukp.dkpro.keyphrases.core.evaluator.KeyphraseEvaluatorTest.java
@Test public void testKeyphraseEvaluator() throws Exception { String documentText = "The added keyphrases should be found in this text. With the exception of one example."; File tmpFile = workdir.newFile(resultFileName); AnalysisEngine ae = AnalysisEngineFactory.createEngine(KeyphraseEvaluator.class, KeyphraseEvaluator.PARAM_N, 3, KeyphraseEvaluator.PARAM_RESULT_FILE, tmpFile.toString(), KeyphraseEvaluator.PARAM_EVAL_TYPE, EvaluatorType.Token.toString(), KeyphraseEvaluator.PARAM_REMOVE_GOLD_AFTER_MATCH, true); JCas jcas = ae.newJCas();/*from ww w . j a v a 2 s . c o m*/ jcas.setDocumentText(documentText); File testFile = new File("src/test/resources/keyphrase/evaluator/test.txt"); DocumentMetaData meta = DocumentMetaData.create(jcas); meta.setDocumentUri(testFile.toURI().toString()); Keyphrase k1 = new Keyphrase(jcas); k1.setKeyphrase("keyphrases"); k1.setScore(0.7); k1.addToIndexes(); Keyphrase k2 = new Keyphrase(jcas); k2.setKeyphrase("exception"); k2.setScore(0.5); k2.addToIndexes(); Keyphrase k3 = new Keyphrase(jcas); k3.setKeyphrase("not_a_valid_keyphrase"); k3.setScore(0.3); k3.addToIndexes(); ae.process(jcas); ae.collectionProcessComplete(); if (!tmpFile.exists()) { fail("Result file not correctly written."); } System.out.println(FileUtils.readFileToString(tmpFile, "UTF-8")); }
From source file:it.geosolutions.geoserver.rest.ConfigTest.java
@Test public void insertStyles() throws FileNotFoundException, IOException { if (!enabled()) { LOGGER.info("Skipping test " + "insertStyles" + "for class:" + this.getClass().getSimpleName()); return;//from ww w . j av a 2s. com } deleteAllStyles(); File sldDir = new ClassPathResource("testdata").getFile(); for (File sldFile : sldDir.listFiles((FilenameFilter) new SuffixFileFilter(".sld"))) { LOGGER.info("Existing styles: " + reader.getStyles().getNames()); String basename = FilenameUtils.getBaseName(sldFile.toString()); LOGGER.info("Publishing style " + sldFile + " as " + basename); assertTrue("Cound not publish " + sldFile, publisher.publishStyle(sldFile, basename)); } }
From source file:com.jaspersoft.jasperserver.test.SampleDataImportTestTestNG.java
@Test() public void doDemoSampleImportTest() throws Exception { m_logger.info("SampleDataImportTestTestNG => doDemoSampleImportTest() called"); File importDirFile = new File( TEST_BASE_DIR + FILE_SEPARATOR + "test-classes" + FILE_SEPARATOR + "exportedResources"); // Load all folders and ZIPs as individual imports and ZIPs File[] files = importDirFile.listFiles(); if (files != null) { for (File f : files) { Parameters importParams = createParameters(); try { if (f.isDirectory()) { importParams.addParameterValue(PARAM_IMPORT_DIR, f.toString()); m_logger.info("processing directory: " + f); } else if (f.getName().endsWith(".zip")) { importParams.addParameterValue(PARAM_IMPORT_ZIP, f.toString()); m_logger.info("processing zip: " + f); } else { m_logger.info("skipping file: " + f); }/* ww w . j a v a2 s .c om*/ if (importParams.getParameterNames().hasNext()) { performImport(importParams); } } catch (Exception e) { m_logger.error("Error importing file " + f, e); throw e; } } } else { m_logger.warn("no files in: " + importDirFile); } }
From source file:ninja.eivind.hotsreplayuploader.repositories.DataSourceFactoryBean.java
private JdbcDataSource getOrmLiteDataSource(File database) { final JdbcDataSource dataSource = new JdbcDataSource(); final String databaseName; if (releaseManager == null || releaseManager.getCurrentVersion().equals("Development")) { databaseName = database.toString() + "-dev"; } else {//from ww w . jav a 2 s . co m databaseName = database.toString(); } final String url = "jdbc:h2:" + databaseName; LOG.info("Setting up DataSource with URL " + url); dataSource.setUrl(url); return dataSource; }
From source file:com.tobedevoured.solrsail.SolrConfig.java
/** * Install Solr Config t the local file system by extracting from the * SolrSail jar//from w ww.j a va2 s . c o m * * @param jar File * @throws IOException */ public void installFromJar(File jar) throws IOException { logger.info("Installing config from Jar to {}", this.getSolrHome()); logger.debug("Opening Jar {}", jar.toString()); JarFile jarFile = new JarFile(jar); for (Enumeration<JarEntry> enumeration = jarFile.entries(); enumeration.hasMoreElements();) { JarEntry entry = enumeration.nextElement(); if (!entry.getName().equals("solr/") && entry.getName().startsWith("solr/")) { StringBuilder dest = new StringBuilder(getSolrHome()).append(File.separator) .append(entry.getName().replaceFirst("solr/", "")); File file = new File(dest.toString()); if (entry.isDirectory()) { file.mkdirs(); } else { if (file.getParentFile() != null) { file.getParentFile().mkdirs(); } logger.debug("Copying {} to {}", entry.getName(), dest.toString()); InputStream input = jarFile.getInputStream(entry); Writer writer = new FileWriter(file.getAbsoluteFile()); IOUtils.copy(input, writer); input.close(); writer.close(); } } } }
From source file:java.it.geosolutions.geoserver.rest.ConfigTest.java
@Test public void insertStyles() throws FileNotFoundException, IOException { if (!enabled()) { LOGGER.info("Skipping test " + "insertStyles" + "for class:" + this.getClass().getSimpleName()); return;//w ww. j av a2 s . c o m } deleteAllStyles(); File sldDir = new ClassPathResource("testdata").getFile(); for (File sldFile : sldDir.listFiles((FilenameFilter) new SuffixFileFilter(".sld"))) { LOGGER.info("Existing styles: " + reader.getStyles().getNames()); String basename = FilenameUtils.getBaseName(sldFile.toString()); LOGGER.info("Publishing style " + sldFile + " as " + basename); assertTrue("Could not publish " + sldFile, publisher.publishStyle(sldFile, basename)); } }
From source file:com.btoddb.fastpersitentqueue.MemorySegmentSerializer.java
public void removePagingFile(MemorySegment segment) { File theFile = new File(directory, segment.getId().toString()); try {//w ww. ja v a 2 s . c o m FileUtils.forceDelete(theFile); logger.debug("removed memory paging file {}", theFile.toString()); } catch (FileNotFoundException e) { try { logger.debug("File not found (this is normal) while removing memory paging file, {}", theFile.getCanonicalFile()); } catch (IOException e1) { // ignore } } catch (IOException e) { try { logger.error("exception while removing memory paging file, {}", theFile.getCanonicalFile(), e); } catch (IOException e1) { // ignore logger.error("another exception while removing memory paging file", e); } } }
From source file:io.druid.segment.loading.LocalDataSegmentPusher.java
@Override public DataSegment push(File dataSegmentFile, DataSegment segment) throws IOException { final String storageDir = this.getStorageDir(segment); final File baseStorageDir = config.getStorageDirectory(); final File outDir = new File(baseStorageDir, storageDir); log.info("Copying segment[%s] to local filesystem at location[%s]", segment.getIdentifier(), outDir.toString()); if (dataSegmentFile.equals(outDir)) { long size = 0; for (File file : dataSegmentFile.listFiles()) { size += file.length();//from ww w.j av a 2 s . c o m } return createDescriptorFile(segment.withLoadSpec(makeLoadSpec(outDir.toURI())).withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir); } final File tmpOutDir = new File(baseStorageDir, intermediateDirFor(storageDir)); log.info("Creating intermediate directory[%s] for segment[%s]", tmpOutDir.toString(), segment.getIdentifier()); final long size = compressSegment(dataSegmentFile, tmpOutDir); final DataSegment dataSegment = createDescriptorFile( segment.withLoadSpec(makeLoadSpec(new File(outDir, "index.zip").toURI())).withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), tmpOutDir); // moving the temporary directory to the final destination, once success the potentially concurrent push operations // will be failed and will read the descriptor.json created by current push operation directly FileUtils.forceMkdir(outDir.getParentFile()); try { Files.move(tmpOutDir.toPath(), outDir.toPath()); } catch (FileAlreadyExistsException e) { log.warn("Push destination directory[%s] exists, ignore this message if replication is configured.", outDir); FileUtils.deleteDirectory(tmpOutDir); return jsonMapper.readValue(new File(outDir, "descriptor.json"), DataSegment.class); } return dataSegment; }