List of usage examples for java.io File toString
public String toString()
From source file:edu.cornell.med.icb.goby.modes.TestCompactToFastaMode.java
/** * Test conversion of a compact file with no quality scores to Illumina/FASTQ format. * @throws IOException if the files cannot be read/written properly *///from w w w . j av a 2s. c o m @Test public void toFastqIlluminaWithQuality() throws IOException { final String inputFilename = "test-data/compact-reads/five-with-quality.compact-reads"; final File fastqFile = createTempFile("llumina", ".fastq"); final String outputFilename = fastqFile.toString(); final CompactToFastaMode compactToFastaMode = new CompactToFastaMode(); compactToFastaMode.setInputFilename(inputFilename); compactToFastaMode.setOutputFilename(outputFilename); compactToFastaMode.setOutputFormat(CompactToFastaMode.OutputFormat.FASTQ); compactToFastaMode.setQualityEncoding(QualityEncoding.ILLUMINA); compactToFastaMode.execute(); final FastXReader reader = new FastXReader(outputFilename); assertEquals("File should be in FASTQ format", "fq", reader.getFileType()); int index = 0; for (final FastXEntry entry : reader) { assertEquals("Entry " + index + "symbol is not correct", '@', entry.getHeaderSymbol()); assertEquals("Seqence for entry " + index + " is not correct", expectedSequence[index], entry.getSequence().toString()); final MutableString quality = entry.getQuality(); assertNotNull("Quality string should never be null", quality); assertEquals("There should be some quality values", 38, quality.length()); // check quality scores for (int i = 0; i < 38; i++) { final char qualityCharacter = quality.charAt(i); assertEquals("Entry " + index + " has incorrect quality score at index " + i, expectedQualityScores[index][i], QualityEncoding.ILLUMINA.asciiEncodingToPhredQualityScore(qualityCharacter)); } assertTrue("Entry " + index + " is not complete", entry.isEntryComplete()); index++; } assertEquals(5, index); reader.close(); }
From source file:edu.cornell.med.icb.goby.modes.TestCompactToFastaMode.java
/** * Test conversion of a compact file with no quality scores to Sanger/FASTQ format. * @throws IOException if the files cannot be read/written properly *//*from w ww . ja v a 2s . c om*/ @Test public void toFastqSangerWithQuality() throws IOException { final String inputFilename = "test-data/compact-reads/five-with-quality.compact-reads"; final File fastqFile = createTempFile("llumina", ".fastq"); final String outputFilename = fastqFile.toString(); final CompactToFastaMode compactToFastaMode = new CompactToFastaMode(); compactToFastaMode.setInputFilename(inputFilename); compactToFastaMode.setOutputFilename(outputFilename); compactToFastaMode.setOutputFormat(CompactToFastaMode.OutputFormat.FASTQ); compactToFastaMode.setQualityEncoding(QualityEncoding.SANGER); compactToFastaMode.execute(); final FastXReader reader = new FastXReader(outputFilename); assertEquals("File should be in FASTQ format", "fq", reader.getFileType()); int index = 0; for (final FastXEntry entry : reader) { assertEquals("Entry " + index + "symbol is not correct", '@', entry.getHeaderSymbol()); assertEquals("Seqence for entry " + index + " is not correct", expectedSequence[index], entry.getSequence().toString()); final MutableString quality = entry.getQuality(); assertNotNull("Quality string should never be null", quality); assertEquals("There should be some quality values", 38, quality.length()); // check quality scores for (int i = 0; i < 38; i++) { final char qualityCharacter = quality.charAt(i); assertEquals("Entry " + index + " has incorrect quality score at index " + i, expectedQualityScores[index][i], QualityEncoding.SANGER.asciiEncodingToPhredQualityScore(qualityCharacter)); } assertTrue("Entry " + index + " is not complete", entry.isEntryComplete()); index++; } assertEquals(5, index); reader.close(); }
From source file:gool.generator.android.AndroidCodePrinter.java
/** * Returns a list of all the classes containing main methods * //from w w w.j a v a2s .com * @param mainFolder * @param mainClassFiles * @return */ private void populateMainMethodClasses(File mainFolder, List<File> mainClassFiles) { File[] tempFileList = mainFolder.listFiles(); for (File tempFile : tempFileList) { if (tempFile.isDirectory()) { populateMainMethodClasses(tempFile, mainClassFiles); } else { if (tempFile.toString().endsWith("Activity.java")) { mainClassFiles.add(tempFile); } } } }
From source file:jobhunter.persistence.Persistence.java
private void updateLastMod(final File file) { this.lastModification = file.lastModified(); try (InputStream in = new FileInputStream(file)) { this.md5sum = DigestUtils.md5(in); } catch (IOException e) { l.error("Failed to read MD5 checksum from {}", file.toString(), e); }/*from ww w . j av a2 s .co m*/ l.debug("File was last modified on {} with MD5 {}", lastModification, this.md5sum.toString()); }
From source file:com.cloudera.sqoop.hive.HiveImport.java
/** * Perform the import of data from an HDFS path to a Hive table. * * @param inputTableName the name of the table as loaded into HDFS * @param outputTableName the name of the table to create in Hive. * @param createOnly if true, run the CREATE TABLE statement but not * LOAD DATA./*from w w w. jav a 2 s . co m*/ */ public void importTable(String inputTableName, String outputTableName, boolean createOnly) throws IOException { if (!isGenerateOnly()) { removeTempLogs(inputTableName); LOG.info("Loading uploaded data into Hive"); } if (null == outputTableName) { outputTableName = inputTableName; } LOG.debug("Hive.inputTable: " + inputTableName); LOG.debug("Hive.outputTable: " + outputTableName); // For testing purposes against our mock hive implementation, // if the sysproperty "expected.script" is set, we set the EXPECTED_SCRIPT // environment variable for the child hive process. We also disable // timestamp comments so that we have deterministic table creation scripts. String expectedScript = System.getProperty("expected.script"); List<String> env = Executor.getCurEnvpStrings(); boolean debugMode = expectedScript != null; if (debugMode) { env.add("EXPECTED_SCRIPT=" + expectedScript); env.add("TMPDIR=" + options.getTempDir()); } // generate the HQL statements to run. TableDefWriter tableWriter = new TableDefWriter(options, connManager, inputTableName, outputTableName, configuration, !debugMode); String createTableStr = tableWriter.getCreateTableStmt() + ";\n"; String loadDataStmtStr = tableWriter.getLoadDataStmt() + ";\n"; // write them to a script file. File scriptFile = getScriptFile(outputTableName); try { String filename = scriptFile.toString(); BufferedWriter w = null; try { FileOutputStream fos = new FileOutputStream(scriptFile); w = new BufferedWriter(new OutputStreamWriter(fos)); w.write(createTableStr, 0, createTableStr.length()); if (!createOnly) { w.write(loadDataStmtStr, 0, loadDataStmtStr.length()); } } catch (IOException ioe) { LOG.error("Error writing Hive load-in script: " + ioe.toString()); ioe.printStackTrace(); throw ioe; } finally { if (null != w) { try { w.close(); } catch (IOException ioe) { LOG.warn("IOException closing stream to Hive script: " + ioe.toString()); } } } if (!isGenerateOnly()) { executeScript(filename, env); LOG.info("Hive import complete."); } } finally { if (!isGenerateOnly()) { // User isn't interested in saving the DDL. Remove the file. if (!scriptFile.delete()) { LOG.warn("Could not remove temporary file: " + scriptFile.toString()); // try to delete the file later. scriptFile.deleteOnExit(); } } } }
From source file:net.sf.eclipsecs.core.config.configtypes.ConfigurationType.java
/** * Gets the property resolver for this configuration type used to expand * property values within the checkstyle configuration. * /*from w ww. ja v a 2 s.c o m*/ * @param checkConfiguration * the actual check configuration * @return the property resolver * @throws IOException * error creating the property resolver */ protected PropertyResolver getPropertyResolver(ICheckConfiguration config, CheckstyleConfigurationFile configFile) throws IOException { MultiPropertyResolver multiResolver = new MultiPropertyResolver(); multiResolver.addPropertyResolver(new ResolvablePropertyResolver(config)); File f = FileUtils.toFile(configFile.getResolvedConfigFileURL()); if (f != null) { multiResolver.addPropertyResolver(new StandardPropertyResolver(f.toString())); } else { multiResolver.addPropertyResolver( new StandardPropertyResolver(configFile.getResolvedConfigFileURL().toString())); } multiResolver.addPropertyResolver(new ClasspathVariableResolver()); multiResolver.addPropertyResolver(new SystemPropertyResolver()); if (configFile.getAdditionalPropertiesBundleStream() != null) { ResourceBundle bundle = new PropertyResourceBundle(configFile.getAdditionalPropertiesBundleStream()); multiResolver.addPropertyResolver(new ResourceBundlePropertyResolver(bundle)); } return multiResolver; }
From source file:org.encog.workbench.util.graph.EncogChartPanel.java
/** * Opens a file chooser and gives the user an opportunity to save the chart * in PNG format./*from w w w . java 2 s .c o m*/ * * @throws IOException if there is an I/O error. */ public void doSaveAs() throws IOException { SaveImageDialog dialog = new SaveImageDialog(EncogWorkBench.getInstance().getMainWindow()); dialog.getImageWidth().setValue(640); dialog.getImageHeight().setValue(480); if (dialog.process()) { File filename = new File(dialog.getTargetFile().getValue()); int width = dialog.getImageWidth().getValue(); int height = dialog.getImageHeight().getValue(); switch (dialog.getFileType().getSelectedIndex()) { case 0: filename = new File(FileUtil.forceExtension(filename.toString(), "png")); ChartUtilities.saveChartAsPNG(filename, this.getChart(), width, height); break; case 1: filename = new File(FileUtil.forceExtension(filename.toString(), "jpg")); ChartUtilities.saveChartAsPNG(filename, this.getChart(), width, height); break; case 2: filename = new File(FileUtil.forceExtension(filename.toString(), "pdf")); DocumentPDF.savePDF(filename, getChart(), width, height); break; case 3: filename = new File(FileUtil.forceExtension(filename.toString(), "svg")); DocumentSVG.saveSVG(filename, getChart(), width, height); break; } } }
From source file:com.linkedin.pinot.core.segment.index.converter.SegmentV1V2ToV3FormatConverter.java
@Override public void convert(File v2SegmentDirectory) throws Exception { Preconditions.checkNotNull(v2SegmentDirectory, "Segment directory should not be null"); Preconditions.checkState(v2SegmentDirectory.exists() && v2SegmentDirectory.isDirectory(), "Segment directory: " + v2SegmentDirectory.toString() + " must exist and should be a directory"); LOGGER.info("Converting segment: {} to v3 format", v2SegmentDirectory); // check existing segment version SegmentMetadataImpl v2Metadata = new SegmentMetadataImpl(v2SegmentDirectory); SegmentVersion oldVersion = SegmentVersion.valueOf(v2Metadata.getVersion()); Preconditions.checkState(oldVersion != SegmentVersion.v3, "Segment {} is already in v3 format but at wrong path", v2Metadata.getName()); Preconditions.checkArgument(oldVersion == SegmentVersion.v1 || oldVersion == SegmentVersion.v2, "Can not convert segment version: {} at path: {} ", oldVersion, v2SegmentDirectory); deleteStaleConversionDirectories(v2SegmentDirectory); File v3TempDirectory = v3ConversionTempDirectory(v2SegmentDirectory); setDirectoryPermissions(v3TempDirectory); createMetadataFile(v2SegmentDirectory, v3TempDirectory); copyCreationMetadata(v2SegmentDirectory, v3TempDirectory); copyIndexData(v2SegmentDirectory, v2Metadata, v3TempDirectory); File newLocation = SegmentDirectoryPaths.segmentDirectoryFor(v2SegmentDirectory, SegmentVersion.v3); LOGGER.info("v3 segment location for segment: {} is {}", v2Metadata.getName(), newLocation); v3TempDirectory.renameTo(newLocation); }
From source file:io.druid.segment.IndexMaker.java
public static File append(final List<IndexableAdapter> adapters, final File outDir, final IndexSpec indexSpec) throws IOException { return append(adapters, outDir, new LoggingProgressIndicator(outDir.toString()), indexSpec); }
From source file:com.dmsl.anyplace.tasks.UploadRSSLogTask.java
@Override protected String doInBackground(Void... params) { try {//from w ww .j av a 2s .co m JSONObject j; j = new JSONObject(); j.put("username", username); j.put("password", password); String json = j.toString(); File rsslog = new File(this.file); if (rsslog.exists() == false) { exceptionOccured = true; return "File not found"; } Log.d("radio upload", rsslog.toString()); String response; HttpClient httpclient = new DefaultHttpClient(); httppost = new HttpPost(AnyplaceAPI.getRadioUploadUrl()); MultipartEntity entity = new MultipartEntity(); entity.addPart("radiomap", new FileBody(rsslog)); entity.addPart("json", new StringBody(json)); ProgressCallback progressCallback = new ProgressCallback() { @Override public void progress(float progress) { if (currentProgress != (int) (progress)) { currentProgress = (int) progress; publishProgress(currentProgress); } } }; httppost.setEntity(new ProgressHttpEntityWrapper(entity, progressCallback)); HttpResponse httpresponse = httpclient.execute(httppost); HttpEntity resEntity = httpresponse.getEntity(); response = EntityUtils.toString(resEntity); Log.d("radio upload", "response: " + response); j = new JSONObject(response); if (j.getString("status").equalsIgnoreCase("error")) { exceptionOccured = true; return "Error: " + j.getString("message"); } } catch (JSONException e) { exceptionOccured = true; Log.d("upload rss log", e.getMessage()); return "Cannot upload RSS log. JSONException occurred[ " + e.getMessage() + " ]"; } catch (ParseException e) { exceptionOccured = true; Log.d("upload rss log", e.getMessage()); return "Cannot upload RSS log. ParseException occurred[ " + e.getMessage() + " ]"; } catch (IOException e) { exceptionOccured = true; Log.d("upload rss log", e.getMessage()); if (httppost != null && httppost.isAborted()) { return "Uploading cancelled!"; } else { return "Cannot upload RSS log. IOException occurred[ " + e.getMessage() + " ]"; } } return "Successfully uploaded RSS log!"; }