List of usage examples for java.io FileInputStream available
public int available() throws IOException
From source file:gov.nih.nci.rembrandt.web.taglib.PCAPlotTag.java
public int doStartTag() { chart = null;// w ww . j a v a 2 s .c o m pcaResults = null; pcaData.clear(); ServletRequest request = pageContext.getRequest(); HttpSession session = pageContext.getSession(); Object o = request.getAttribute(beanName); JspWriter out = pageContext.getOut(); ServletResponse response = pageContext.getResponse(); try { //retrieve the Finding from cache and build the list of PCAData points PrincipalComponentAnalysisFinding principalComponentAnalysisFinding = (PrincipalComponentAnalysisFinding) businessTierCache .getSessionFinding(session.getId(), taskId); Collection<ClinicalFactorType> clinicalFactors = new ArrayList<ClinicalFactorType>(); List<String> sampleIds = new ArrayList<String>(); Map<String, PCAresultEntry> pcaResultMap = new HashMap<String, PCAresultEntry>(); if (principalComponentAnalysisFinding != null) { pcaResults = principalComponentAnalysisFinding.getResultEntries(); for (PCAresultEntry pcaEntry : pcaResults) { sampleIds.add(pcaEntry.getSampleId()); pcaResultMap.put(pcaEntry.getSampleId(), pcaEntry); } Collection<SampleResultset> validatedSampleResultset = ClinicalDataValidator .getValidatedSampleResultsetsFromSampleIDs(sampleIds, clinicalFactors); if (validatedSampleResultset != null) { String id; PCAresultEntry entry; for (SampleResultset rs : validatedSampleResultset) { id = rs.getBiospecimen().getSpecimenName(); entry = pcaResultMap.get(id); PrincipalComponentAnalysisDataPoint pcaPoint = new PrincipalComponentAnalysisDataPoint(id, entry.getPc1(), entry.getPc2(), entry.getPc3()); String diseaseName = rs.getDisease().getValueObject(); if (diseaseName != null) { pcaPoint.setDiseaseName(diseaseName); } else { pcaPoint.setDiseaseName(DiseaseType.NON_TUMOR.name()); } GenderDE genderDE = rs.getGenderCode(); if (genderDE != null && genderDE.getValue() != null) { String gt = genderDE.getValueObject().trim(); if (gt != null) { GenderType genderType = GenderType.valueOf(gt); if (genderType != null) { pcaPoint.setGender(genderType); } } } Long survivalLength = rs.getSurvivalLength(); if (survivalLength != null) { //survival length is stored in days in the DB so divide by 30 to get the //approx survival in months double survivalInMonths = survivalLength.doubleValue() / 30.0; pcaPoint.setSurvivalInMonths(survivalInMonths); } pcaData.add(pcaPoint); } } PCAcomponent pone = PCAcomponent.PC1; PCAcomponent ptwo = PCAcomponent.PC2; //check the components to see which graph to get if (components.equalsIgnoreCase("PC1vsPC2")) { pone = PCAcomponent.PC2; ptwo = PCAcomponent.PC1; //chart = (JFreeChart) CaIntegratorChartFactory.getPrincipalComponentAnalysisGraph(pcaData,PCAcomponent.PC2,PCAcomponent.PC1,PCAcolorByType.valueOf(PCAcolorByType.class,colorBy)); } if (components.equalsIgnoreCase("PC1vsPC3")) { pone = PCAcomponent.PC3; ptwo = PCAcomponent.PC1; //chart = (JFreeChart) CaIntegratorChartFactory.getPrincipalComponentAnalysisGraph(pcaData,PCAcomponent.PC3,PCAcomponent.PC1,PCAcolorByType.valueOf(PCAcolorByType.class,colorBy)); } if (components.equalsIgnoreCase("PC2vsPC3")) { pone = PCAcomponent.PC2; ptwo = PCAcomponent.PC3; //chart = (JFreeChart) CaIntegratorChartFactory.getPrincipalComponentAnalysisGraph(pcaData,PCAcomponent.PC3,PCAcomponent.PC2,PCAcolorByType.valueOf(PCAcolorByType.class,colorBy)); } PrincipalComponentAnalysisPlot plot = new RBTPrincipalComponentAnalysisPlot(pcaData, pone, ptwo, PCAcolorByType.valueOf(PCAcolorByType.class, colorBy)); if (plot != null) { chart = (JFreeChart) plot.getChart(); } RembrandtImageFileHandler imageHandler = new RembrandtImageFileHandler(session.getId(), "png", 650, 600); //The final complete path to be used by the webapplication String finalPath = imageHandler.getSessionTempFolder(); String finalURLpath = imageHandler.getFinalURLPath(); /* * Create the actual charts, writing it to the session temp folder */ ChartRenderingInfo info = new ChartRenderingInfo(new StandardEntityCollection()); String mapName = imageHandler.createUniqueMapName(); //PrintWriter writer = new PrintWriter(new FileWriter(mapName)); ChartUtilities.writeChartAsPNG(new FileOutputStream(finalPath), chart, 650, 600, info); //ImageMapUtil.writeBoundingRectImageMap(writer,"PCAimageMap",info,true); //writer.close(); /* This is here to put the thread into a loop while it waits for the * image to be available. It has an unsophisticated timer but at * least it is something to avoid an endless loop. **/ boolean imageReady = false; int timeout = 1000; FileInputStream inputStream = null; while (!imageReady) { timeout--; try { inputStream = new FileInputStream(finalPath); inputStream.available(); imageReady = true; inputStream.close(); } catch (IOException ioe) { imageReady = false; if (inputStream != null) { inputStream.close(); } } if (timeout <= 1) { break; } } out.print(ImageMapUtil.getBoundingRectImageMapTag(mapName, false, info)); finalURLpath = finalURLpath.replace("\\", "/"); long randomness = System.currentTimeMillis(); //prevent image caching out.print("<img id=\"geneChart\" name=\"geneChart\" alt=\"geneChart\" src=\"" + finalURLpath + "?" + randomness + "\" usemap=\"#" + mapName + "\" border=\"0\" />"); //(imageHandler.getImageTag(mapFileName)); } } catch (IOException e) { logger.error(e); } catch (Exception e) { logger.error(e); } catch (Throwable t) { logger.error(t); } return EVAL_BODY_INCLUDE; }
From source file:net.urlgrey.mythpodcaster.transcode.FeedFileAccessorImpl.java
/** * @param series//w w w. j a v a 2 s . c om * @param program * @param channel * @param feed * @param transcoderProfile */ @SuppressWarnings({ "unchecked", "rawtypes" }) public void addProgramToFeed(RecordedSeries series, RecordedProgram program, Channel channel, SyndFeed feed, String transcodingProfileId) { LOGGER.info("Transcoding new feed entry: programId[" + program.getProgramId() + "], key[" + program.getKey() + "], title[" + program.getTitle() + "], channel[" + (channel != null ? channel.getName() : "") + "], transcodingProfileId[" + transcodingProfileId + "]"); final SyndEntryImpl entry = new SyndEntryImpl(); entry.setUri(program.getKey()); entry.setPublishedDate(program.getRecordedProgramKey().getStartTime()); // set author info from the channel if available if (channel != null) { entry.setAuthor(channel.getName()); } entry.setTitle(program.getProgramTitle()); final SyndContentImpl description = new SyndContentImpl(); description.setType("text/plain"); description.setValue(program.getDescription()); entry.setDescription(description); // apply thumbnail for clip to the feed final String feedImageUrl; final File originalClipThumbnail = clipLocator.locateThumbnailForOriginalClip(program.getFilename()); if (originalClipThumbnail != null) { final String seriesId = series.getSeriesId(); final File encodingDirectory = new File(feedFilePath, transcodingProfileId); final File feedThumbnailFile = new File(encodingDirectory, seriesId + PNG_EXTENSION); feedImageUrl = this.applicationURL + PATH_SEPARATOR + transcodingProfileId + PATH_SEPARATOR + seriesId + PNG_EXTENSION; try { FileOperations.copy(originalClipThumbnail, feedThumbnailFile); final SyndImageImpl feedImage = new SyndImageImpl(); feedImage.setUrl(feedImageUrl); feedImage.setTitle(series.getTitle()); feed.setImage(feedImage); // include iTunes-specific metadata final Module module = feed.getModule("http://www.itunes.com/dtds/podcast-1.0.dtd"); final FeedInformation itunesFeedMetadata; if (module == null) { itunesFeedMetadata = new FeedInformationImpl(); feed.getModules().add(itunesFeedMetadata); } else { itunesFeedMetadata = (FeedInformation) module; } itunesFeedMetadata.setImage(new URL(feedImageUrl)); LOGGER.info("Applied clip thumbnail to feed: thumbnail[" + feedThumbnailFile.getAbsolutePath() + "], url[" + feedImage.getUrl() + "]"); } catch (IOException e) { if (feedThumbnailFile.canWrite()) { feedThumbnailFile.delete(); feed.setImage(null); } } } else { feedImageUrl = null; } // transcode final File originalClip = clipLocator.locateOriginalClip(program.getFilename()); if (originalClip != null) { final TranscodingProfile profile = transcodingProfilesDao.findAllProfiles().get(transcodingProfileId); if (profile != null) { final File outputFile = profile.generateOutputFilePath(feedFilePath, program.getKey()); try { LOGGER.info("Transcode STARTING: profile[" + profile.getId() + "]"); transcodingController.transcode(profile, program.getKey(), series.getTitle(), program.getProgramTitle(), originalClip, outputFile); LOGGER.info("Transcode FINISHED: profile[" + profile.getId() + "]"); if (outputFile.canRead()) { // get the file-size in bytes FileInputStream in = new FileInputStream(outputFile); final int fileSize = in.available(); in.close(); final String link = profile.generateOutputFileURL(this.applicationURL, outputFile); final SyndEnclosure enclosure = new SyndEnclosureImpl(); enclosure.setUrl(link); enclosure.setType(profile.getEncodingMimeType()); enclosure.setLength(fileSize); final List enclosures = new ArrayList(); enclosures.add(enclosure); entry.setEnclosures(enclosures); // include iTunes-specific metadata final Module itunesModule = entry.getModule("http://www.itunes.com/dtds/podcast-1.0.dtd"); if (itunesModule != null) { entry.getModules().remove(itunesModule); } final EntryInformation itunesEntryMetadata = new EntryInformationImpl(); final Duration duration = new Duration(program.getEndTime().getTime() - program.getRecordedProgramKey().getStartTime().getTime()); itunesEntryMetadata.setDuration(duration); itunesEntryMetadata.setSummary(program.getDescription()); if (program.getCategory() != null) { itunesEntryMetadata.setKeywords(new String[] { program.getCategory() }); } if (channel != null) { itunesEntryMetadata.setAuthor(channel.getName()); } entry.getModules().add(itunesEntryMetadata); // include Media RSS metadata final MediaContent[] contents = new MediaContent[1]; final MediaContent mrssContent = new MediaContent(new UrlReference(link)); mrssContent.setFileSize(Long.valueOf(fileSize)); mrssContent.setType(profile.getEncodingMimeType()); final long durationInSeconds = (long) Math.ceil(duration.getMilliseconds() / 1000); mrssContent.setDuration(durationInSeconds); contents[0] = mrssContent; Metadata md = new Metadata(); if (feedImageUrl != null) { Thumbnail[] thumbs = new Thumbnail[1]; thumbs[0] = new Thumbnail(new URI(feedImageUrl)); md.setThumbnail(thumbs); } md.setDescription(program.getDescription()); md.setCategories(new Category[] { new Category(program.getCategory()) }); mrssContent.setMetadata(md); MediaEntryModuleImpl mrssModule = new MediaEntryModuleImpl(); mrssModule.setMediaContents(contents); entry.getModules().add(mrssModule); } else { LOGGER.warn("Transcoded output file cannot be read, setting link to null: path[" + outputFile.getAbsolutePath() + "]"); entry.setLink(null); } } catch (Exception e) { LOGGER.error("Error while transcoding, setting link to null", e); entry.setLink(null); if (outputFile != null && outputFile.canWrite()) { outputFile.delete(); } } } else { final String msg = "Unable to locate transcoding profile with given id: [" + transcodingProfileId + "]"; LOGGER.error(msg); } } else { LOGGER.warn("Original clip could not be found in content paths"); entry.setLink(null); } feed.getEntries().add(entry); }
From source file:org.geoserver.wfs.response.SpatiaLiteOutputFormatDev.java
protected void write(List<SimpleFeatureCollection> collections, Charset charset, OutputStream output, GetFeatureType request) throws IOException, ServiceException { Connection conn = null;/*ww w . java2 s. c om*/ /** * Get the necessary JDBC object. */ try { Class.forName(this.driverClassName); } catch (ClassNotFoundException e) { System.out.println(e); } /** * base location to temporally store spatialite database files */ File tempDir = File.createTempFile("spatialitemp", ".sqlite"); /** * enables load extension */ SQLiteConfig config = new SQLiteConfig(); config.enableLoadExtension(true); /** * the Url for the temporally sqlite file */ String JDBCFileUrl = tempDir.getAbsolutePath(); try { //create a connection to database conn = DriverManager.getConnection("jdbc:sqlite:" + JDBCFileUrl, config.toProperties()); Statement stmt = conn.createStatement(); stmt.setQueryTimeout(30); /** * A string to store the statements to run to create the Spatialite DataBase */ String sql = null; conn.setAutoCommit(false); // FIXME: we are now working on PATH - we will define different JAR files to change this as required by platform sql = "SELECT load_extension('/tmp/libspatialite-2-4.dll');"; stmt.execute(sql); sql = "SELECT InitSpatialMetaData();"; stmt.execute(sql); conn.commit(); /** * A string to store the names of the columns that will be used to populate the table */ String column_names = null; //We might get multiple feature collections in our response (multiple queries?) so we need to //write out multiple tables, one for each query response. for (SimpleFeatureCollection fc : collections) { //get the current feature SimpleFeatureType ft = fc.getSchema(); //To check if the current feature has a geometry. String the_geom = null; if (ft.getGeometryDescriptor() != null) { the_geom = ft.getGeometryDescriptor().getLocalName(); } //Get the table name for the current feature String tbl_name = ft.getName().getLocalPart(); /** * Create the table for the current feature as follows: * - first get the statement for create the table * - execute the statement * - second get the statement for add the geometry (if has one) * - execute the statement */ //Initialize the "create table" query. column_names = ""; int column_cnt = 0; sql = "CREATE TABLE " + tbl_name; sql += " ( PK_UID INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT"; //Get the columns names for the table tbl_name for (int i = 0; i < ft.getAttributeCount(); i++) { AttributeDescriptor ad = ft.getDescriptor(i); if (ad.getLocalName() != the_geom) { sql += ", " + prepareColumnHeader(ad); column_names += ad.getLocalName(); column_cnt++; if (i < ft.getAttributeCount() - 1) { column_names += ", "; } } } sql += ");"; // Finish creating the table System.out.println(sql); stmt.execute(sql); conn.commit(); int srid = 0; //If the table : "tbl_name" has a geometry, then i write the sql to add the geometry if (the_geom != null) { sql = "SELECT AddGeometryColumn('" + tbl_name + "', "; //get the geometry type sql += "'" + the_geom + "', "; //get the SRID. srid = getSpatialSRID(ft.getCoordinateReferenceSystem()); sql += srid + ", "; //get the Geometry type. String geom_type = getSpatialGeometryType(ft); if (geom_type == null) { throw new WFSException("Error while adding the geometry column in table " + tbl_name + ", unrecognized geometry type"); } sql += "'" + geom_type + "', "; //get Dimensions, we only works whit 2 dimensions. String dimension = "XY"; sql += "'" + dimension + "'"; sql += " );"; } //finish creating the geometry column. System.out.println(sql); stmt.execute(sql); conn.commit(); /** * Populates the table for the current feature as follows: * For each row * - first: configure the statement with the appropriates fields. * - second: add to the statement the field the_geom if has a geometry. * - third: configure the statement with the appropriates values. * (if has a geometry i add that value) * - execute the statement * Finally commit. */ //Start populating the table: tbl_name. SimpleFeatureIterator i = fc.features(); try { while (i.hasNext()) { SimpleFeature row = i.next(); sql = "INSERT INTO " + tbl_name + " (" + column_names; //if has a geometry, i add the field the_geom. if (the_geom != null) if (column_cnt > 0) { sql += ", " + the_geom + " ) "; } else { sql += the_geom + ") "; } else { sql += ") "; } //I store the default geometry value, so i can omit it and add at the end. Object geom_data = row.getDefaultGeometry(); sql += "VALUES ("; for (int j = 0; j < row.getAttributeCount(); j++) { Object rowAtt = row.getAttribute(j); if (!rowAtt.equals(geom_data)) { if (rowAtt != null) { //We just transform all content to String. sql += "'" + rowAtt.toString() + "'"; } if (j < row.getAttributeCount() - 1) { sql += ", "; } } } //Finally if has geometry, insert the geometry data. if (the_geom != null) { if (column_cnt > 0) { sql += ", "; } sql += "GeomFromText('" + prepareGeom(geom_data.toString()) + "', " + srid + ")"; } sql += ");"; System.out.println(sql); stmt.executeUpdate(sql); } conn.commit(); } finally { fc.close(i); } } } catch (SQLException e) { System.out.println(e); } /** * A FileInputStream to read the tempDir in a byte array * so i can write this in the OutputStream output and flush it. */ FileInputStream JDBCIn = new FileInputStream(tempDir); int longitud = JDBCIn.available(); byte[] datos = new byte[longitud]; JDBCIn.read(datos); output.write(datos); System.out.println(tempDir.getAbsolutePath()); }
From source file:gov.nih.nci.rembrandt.web.taglib.ClinicalPlotTag.java
public int doStartTag() { chart = null;//w w w .j a v a2 s . c o m clinicalData.clear(); ServletRequest request = pageContext.getRequest(); HttpSession session = pageContext.getSession(); Object o = request.getAttribute(beanName); JspWriter out = pageContext.getOut(); ServletResponse response = pageContext.getResponse(); try { // //retrieve the Finding from cache and build the list of Clinical Data points //ClinicalFinding clinicalFinding = (ClinicalFinding)businessTierCache.getSessionFinding(session.getId(),taskId); ReportBean clincalReportBean = presentationTierCache.getReportBean(session.getId(), taskId); Resultant clinicalResultant = clincalReportBean.getResultant(); ResultsContainer resultsContainer = clinicalResultant.getResultsContainer(); SampleViewResultsContainer sampleViewContainer = null; if (resultsContainer instanceof DimensionalViewContainer) { DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer; sampleViewContainer = dimensionalViewContainer.getSampleViewResultsContainer(); } if (sampleViewContainer != null) { Collection<ClinicalFactorType> clinicalFactors = new ArrayList<ClinicalFactorType>(); clinicalFactors.add(ClinicalFactorType.AgeAtDx); //clinicalFactors.add(ClinicalFactorType.Survival); Collection<SampleResultset> samples = sampleViewContainer.getSampleResultsets(); if (samples != null) { int numDxvsKa = 0; int numDxvsSl = 0; for (SampleResultset rs : samples) { //String id = rs.getBiospecimen().getValueObject(); String id = rs.getSampleIDDE().getValueObject(); ClinicalDataPoint clinicalDataPoint = new ClinicalDataPoint(id); String diseaseName = rs.getDisease().getValueObject(); if (diseaseName != null) { clinicalDataPoint.setDiseaseName(diseaseName); } else { clinicalDataPoint.setDiseaseName(DiseaseType.NON_TUMOR.name()); } Long sl = rs.getSurvivalLength(); double survivalDays = -1.0; double survivalMonths = -1.0; if (sl != null) { survivalDays = sl.doubleValue(); survivalMonths = survivalDays / 30.0; //if ((survivalMonths > 0.0)&&(survivalMonths < 1000.0)) { clinicalDataPoint.setSurvival(survivalDays); //} } Long dxAge = rs.getAge(); if (dxAge != null) { clinicalDataPoint.setAgeAtDx(dxAge.doubleValue()); } KarnofskyClinicalEvalDE ka = rs.getKarnofskyClinicalEvalDE(); if (ka != null) { String kaStr = ka.getValueObject(); if (kaStr != null) { if (kaStr.contains("|")) { kaStr = kaStr.trim(); String[] kaStrArray = kaStr.split("\\|"); for (int i = 0; i < kaStrArray.length; i++) { if (i == 0) { //first score is baseline just use this for now //later we will need to use all score in a series for each patient double kaVal = Double.parseDouble(kaStrArray[i].trim()); clinicalDataPoint.setKarnofskyScore(kaVal); } } } else { double kaVal = Double.parseDouble(kaStr); clinicalDataPoint.setKarnofskyScore(kaVal); } } } if ((dxAge != null) && (ka != null)) { numDxvsKa++; } if ((dxAge != null) && (sl != null)) { numDxvsSl++; } // Object dx = rs.getAgeGroup(); // if(sl !=null && dx !=null){ // clinicalDataPoint.setSurvival(new Double(sl.toString())); // clinicalDataPoint.setAgeAtDx(new Double(dx.toString())); // } // Object ks = rs.getKarnofskyClinicalEvalDE(); // Object dx = rs.getAgeGroup(); // if(ks !=null && dx !=null){ // clinicalDataPoint.setNeurologicalAssessment(new Double(ks.toString())); // clinicalDataPoint.setAgeAtDx(new Double(dx.toString())); // } clinicalData.add(clinicalDataPoint); } } } System.out.println("Done creating points!"); //------------------------------------------------------------- //GET THE CLINICAL DATA AND POPULATE THE clinicalData list //Note the ClinicalFinding is currently an empty class //---------------------------------------------------------- //check the components to see which graph to get if (components.equalsIgnoreCase("SurvivalvsAgeAtDx")) { chart = (JFreeChart) CaIntegratorChartFactory.getClinicalGraph(clinicalData, ClinicalFactorType.SurvivalLength, "Survival Length (Months)", ClinicalFactorType.AgeAtDx, "Age At Diagnosis (Years)"); } if (components.equalsIgnoreCase("KarnofskyScorevsAgeAtDx")) { chart = (JFreeChart) CaIntegratorChartFactory.getClinicalGraph(clinicalData, ClinicalFactorType.KarnofskyAssessment, "Karnofsky Score", ClinicalFactorType.AgeAtDx, "Age At Diagnosis (Years)"); } RembrandtImageFileHandler imageHandler = new RembrandtImageFileHandler(session.getId(), "png", 600, 500); //The final complete path to be used by the webapplication String finalPath = imageHandler.getSessionTempFolder(); String finalURLpath = imageHandler.getFinalURLPath(); /* * Create the actual charts, writing it to the session temp folder */ ChartRenderingInfo info = new ChartRenderingInfo(new StandardEntityCollection()); String mapName = imageHandler.createUniqueMapName(); ChartUtilities.writeChartAsPNG(new FileOutputStream(finalPath), chart, 600, 500, info); /* This is here to put the thread into a loop while it waits for the * image to be available. It has an unsophisticated timer but at * least it is something to avoid an endless loop. **/ boolean imageReady = false; int timeout = 1000; FileInputStream inputStream = null; while (!imageReady) { timeout--; try { inputStream = new FileInputStream(finalPath); inputStream.available(); imageReady = true; inputStream.close(); } catch (IOException ioe) { imageReady = false; if (inputStream != null) { inputStream.close(); } } if (timeout <= 1) { break; } } out.print(ImageMapUtil.getBoundingRectImageMapTag(mapName, false, info)); //finalURLpath = finalURLpath.replace("\\", "/"); finalURLpath = finalURLpath.replace("\\", "/"); long randomness = System.currentTimeMillis(); //prevent image caching out.print("<img id=\"geneChart\" alt=\"geneChart\" name=\"geneChart\" src=\"" + finalURLpath + "?" + randomness + "\" usemap=\"#" + mapName + "\" border=\"0\" />"); //out.print("<img id=\"geneChart\" name=\"geneChart\" src=\""+finalURLpath+"\" usemap=\"#"+mapName + "\" border=\"0\" />"); } catch (IOException e) { logger.error(e); } catch (Exception e) { logger.error(e); } catch (Throwable t) { logger.error(t); } return EVAL_BODY_INCLUDE; }
From source file:com.digitalpersona.onetouch.ui.swing.sample.Enrollment.Dashboard.java
private void verify() { FileInputStream stream = null; try {// w w w . j a v a 2 s . co m File file = new File("C:\\Users\\Guinness\\Documents\\10.jpg"); stream = new FileInputStream(file); byte[] data = new byte[stream.available()]; stream.read(data); stream.close(); DPFPTemplate t = DPFPGlobal.getTemplateFactory().createTemplate(); t.deserialize(data); setTemplate(t); init(); DPFPFeatureSet features = extractFeatures(aw.getSample(), DPFPDataPurpose.DATA_PURPOSE_VERIFICATION); // Check quality of the sample and start verification if it's good if (features != null) { // Compare the feature set with our template DPFPVerificationResult result = verificator.verify(features, t); if (result.isVerified()) { System.out.println("Verified!.."); makeReport("The fingerprint was VERIFIED."); } else { System.out.println("Did not match"); makeReport("The fingerprint was NOT VERIFIED."); } } } catch (FileNotFoundException ex) { Logger.getLogger(Dlg_test.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(Dlg_test.class.getName()).log(Level.SEVERE, null, ex); } finally { try { stream.close(); } catch (IOException ex) { Logger.getLogger(Dlg_test.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:net.sourceforge.squirrel_sql.fw.datasetviewer.cellcomponent.DataTypeBigDecimal.java
/** * Read a file and construct a valid object from its contents. * Errors are returned by throwing an IOException containing the * cause of the problem as its message.//from w ww . ja v a2 s. c o m * <P> * DataType is responsible for validating that the imported * data can be converted to an object, and then must return * a text string that can be used in the Popup window text area. * This object-to-text conversion is the same as is done by * the DataType object internally in the getJTextArea() method. * * <P> * File is assumed to be and ASCII string of digits * representing a value of this data type. */ public String importObject(FileInputStream inStream) throws IOException { InputStreamReader inReader = new InputStreamReader(inStream); int fileSize = inStream.available(); char charBuf[] = new char[fileSize]; int count = inReader.read(charBuf, 0, fileSize); if (count != fileSize) throw new IOException("Could read only " + count + " chars from a total file size of " + fileSize + ". Import failed."); // convert file text into a string // Special case: some systems tack a newline at the end of // the text read. Assume that if last char is a newline that // we want everything else in the line. String fileText; if (charBuf[count - 1] == KeyEvent.VK_ENTER) fileText = new String(charBuf, 0, count - 1); else fileText = new String(charBuf); // test that the string is valid by converting it into an // object of this data type StringBuffer messageBuffer = new StringBuffer(); validateAndConvertInPopup(fileText, null, messageBuffer); if (messageBuffer.length() > 0) { ; // convert number conversion issue into IO issue for consistancy throw new IOException( "Text does not represent data of type " + getClassName() + ". Text was:\n" + fileText); } // return the text from the file since it does // represent a valid value of this data type return fileText; }
From source file:org.sogrey.frame.utils.FileUtil.java
/** * ??//ww w. j a v a 2s .c om * * @param file * * @return * * @throws Exception */ private static long getFileSize(File file) throws Exception { long size = 0; if (file.exists()) { FileInputStream fis = null; fis = new FileInputStream(file); size = fis.available(); } else { file.createNewFile(); LogUtil.e("??", "?!"); } return size; }
From source file:org.nuxeo.ecm.automation.server.test.AbstractAutomationClientTest.java
@Test public void testUploadSmallFile() throws Exception { DigestMockInputStream source = new DigestMockInputStream(100); FileInputStream in = new UploadFileSupport(session, automationTestFolder.getPath()).testUploadFile(source); byte[] sentSum = source.digest.digest(); while (in.available() > 0) { source.digest.update((byte) in.read()); }/* ww w.j ava2 s. c o m*/ byte[] receivedSum = source.digest.digest(); assertTrue("Expected (sent) bytes array: " + Arrays.toString(sentSum) + " - Actual (received) bytes array: " + Arrays.toString(receivedSum), MessageDigest.isEqual(sentSum, receivedSum)); }
From source file:com.krawler.portal.tools.FileImpl.java
public String read(File file, boolean raw) throws IOException { FileInputStream fis = new FileInputStream(file); byte[] bytes = new byte[fis.available()]; fis.read(bytes);//from ww w . j a v a2 s . c o m fis.close(); String s = new String(bytes, StringPool.UTF8); if (raw) { return s; } else { return StringUtil.replace(s, StringPool.RETURN_NEW_LINE, StringPool.NEW_LINE); } }
From source file:com.freecast.LudoCast.MainActivity.java
public void readFileData(String fileName) { String res = ""; try {//from ww w .jav a 2 s .c om FileInputStream fin = openFileInput(fileName); int length = fin.available(); byte[] buffer = new byte[length]; fin.read(buffer); res = EncodingUtils.getString(buffer, "UTF-8"); fin.close(); } catch (Exception e) { e.printStackTrace(); } if (res.equals("")) { } else { editText.setText(res); username = editText.getText().toString(); } }