List of usage examples for java.lang OutOfMemoryError getMessage
public String getMessage()
From source file:com.zimbra.cs.redolog.util.PlaybackUtil.java
public void playback() throws Throwable { try {/*from w w w. j a v a 2 s . co m*/ for (File redolog : mParams.logfiles) { System.out.println("Processing log file: " + redolog.getAbsolutePath()); long until = mParams.toTime; if (until < Long.MAX_VALUE) until++; try { Map<Integer, Integer> mboxIdMap = null; if (mParams.mboxId != Params.MBOX_ID_UNSET) { mboxIdMap = new HashMap<Integer, Integer>(1); mboxIdMap.put(mParams.mboxId, mParams.mboxId); } mPlayer.scanLog(redolog, true, mboxIdMap, mParams.fromTime, until); } catch (OutOfMemoryError oome) { Zimbra.halt("OutOfMemoryError while replaying redolog: " + oome.getMessage(), oome); } catch (Throwable t) { if (mParams.stopOnError) throw t; ZimbraLog.redolog.warn("Ignoring error and moving on: " + t.getMessage(), t); } } } finally { mPlayer.shutdown(); } }
From source file:im.vector.activity.ImageWebViewActivity.java
private String computeCss(String mediaUrl, int thumbnailWidth, int thumbnailHeight, int rotationAngle) { String css = "body { background-color: #000; height: 100%; width: 100%; margin: 0px; padding: 0px; }" + ".wrap { position: absolute; left: 0px; right: 0px; width: 100%; height: 100%; " + "display: -webkit-box; -webkit-box-pack: center; -webkit-box-align: center; " + "display: box; box-pack: center; box-align: center; } "; mRotationAngle = rotationAngle;/*from w w w . j a va 2s .c o m*/ // the rotation angle must be retrieved from the exif metadata if (rotationAngle == Integer.MAX_VALUE) { if (null != mediaUrl) { mRotationAngle = ImageUtils.getRotationAngleForBitmap(this, Uri.parse(mediaUrl)); } } if (mRotationAngle != 0) { // get the image size to scale it to fill in the device screen. int imageWidth = thumbnailWidth; int imageHeight = thumbnailHeight; try { Uri uri = Uri.parse(mHighResUri); FileInputStream imageStream = new FileInputStream(new File(uri.getPath())); BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPreferredConfig = Bitmap.Config.ARGB_8888; options.outWidth = -1; options.outHeight = -1; // get the full size bitmap Bitmap fullSizeBitmap = null; try { fullSizeBitmap = BitmapFactory.decodeStream(imageStream, null, options); } catch (OutOfMemoryError e) { Log.e(LOG_TAG, "Onclick BitmapFactory.decodeStream : " + e.getMessage()); } imageWidth = options.outWidth; imageHeight = options.outHeight; imageStream.close(); fullSizeBitmap.recycle(); } catch (Exception e) { } String cssRotation = calcCssRotation(mRotationAngle, imageWidth, imageHeight); css += "#image { " + cssRotation + " } "; css += "#thumbnail { " + cssRotation + " } "; } return css; }
From source file:net.sf.ehcache.constructs.blocking.SelfPopulatingCacheTest.java
/** * Shows the effect of jamming large amounts of puts into a cache that overflows to disk. * The DiskStore should cause puts to back off and avoid an out of memory error. *///from www.j ava 2 s. c o m public void testBehaviourOnDiskStoreBackUp() throws Exception { Cache cache = new Cache("testGetMemoryStoreSize", 10, true, false, 100, 200, false, 0); manager.addCache(cache); assertEquals(0, cache.getMemoryStoreSize()); Element a = null; int i = 0; try { for (; i < 200000; i++) { String key = i + ""; String value = key; a = new Element(key, value + "DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD"); cache.put(a); } } catch (OutOfMemoryError e) { //the disk store backs up on the laptop. LOG.info("OutOfMemoryError: " + e.getMessage() + " " + i); fail(); } }
From source file:org.apache.cxf.maven_plugin.Java2WSMojo.java
private void processJavaClass(List<String> args) throws MojoExecutionException { if (!fork) {/*from w w w . j a v a 2 s . c om*/ try { CommandInterfaceUtils.commandCommonMain(); JavaToWS j2w = new JavaToWS(args.toArray(new String[args.size()])); j2w.run(); } catch (OutOfMemoryError e) { getLog().debug(e); StringBuilder msg = new StringBuilder(); msg.append(e.getMessage()).append('\n'); msg.append("Try to run this goal using the <fork>true</fork> and " + "<additionalJvmArgs>-Xms128m -Xmx128m</additionalJvmArgs> parameters."); throw new MojoExecutionException(msg.toString(), e); } catch (Throwable e) { getLog().debug(e); throw new MojoExecutionException(e.getMessage(), e); } } else { getLog().info("Running java2ws in fork mode..."); Commandline cmd = new Commandline(); cmd.getShell().setQuotedArgumentsEnabled(true); // for JVM args cmd.setWorkingDirectory(project.getBuild().getDirectory()); try { cmd.setExecutable(getJavaExecutable().getAbsolutePath()); } catch (IOException e) { getLog().debug(e); throw new MojoExecutionException(e.getMessage(), e); } cmd.addArguments(args.toArray(new String[args.size()])); CommandLineUtils.StringStreamConsumer err = new CommandLineUtils.StringStreamConsumer(); CommandLineUtils.StringStreamConsumer out = new CommandLineUtils.StringStreamConsumer(); int exitCode; try { exitCode = CommandLineUtils.executeCommandLine(cmd, out, err); } catch (CommandLineException e) { getLog().debug(e); throw new MojoExecutionException(e.getMessage(), e); } String output = StringUtils.isEmpty(out.getOutput()) ? null : '\n' + out.getOutput().trim(); String cmdLine = CommandLineUtils.toString(cmd.getCommandline()); if (exitCode != 0) { if (StringUtils.isNotEmpty(output)) { getLog().info(output); } StringBuilder msg = new StringBuilder("\nExit code: "); msg.append(exitCode); if (StringUtils.isNotEmpty(err.getOutput())) { msg.append(" - ").append(err.getOutput()); } msg.append('\n'); msg.append("Command line was: ").append(cmdLine).append('\n').append('\n'); throw new MojoExecutionException(msg.toString()); } if (StringUtils.isNotEmpty(err.getOutput()) && err.getOutput().contains("JavaToWS Error")) { StringBuilder msg = new StringBuilder(); msg.append(err.getOutput()); msg.append('\n'); msg.append("Command line was: ").append(cmdLine).append('\n').append('\n'); throw new MojoExecutionException(msg.toString()); } } // Attach the generated wsdl file to the artifacts that get deployed // with the enclosing project if (attachWsdl && outputFile != null) { File wsdlFile = new File(outputFile); if (wsdlFile.exists()) { if (classifier != null) { projectHelper.attachArtifact(project, wsdlFile.getName(), classifier, wsdlFile); } else { projectHelper.attachArtifact(project, wsdlFile.getName(), wsdlFile); } boolean hasWsdlAttached = false; for (Artifact a : project.getAttachedArtifacts()) { if ("wsdl".equals(a.getType())) { hasWsdlAttached = true; } } if (!hasWsdlAttached) { if (classifier != null) { projectHelper.attachArtifact(project, "wsdl", classifier, wsdlFile); } else { projectHelper.attachArtifact(project, "wsdl", wsdlFile); } } } } }
From source file:net.pandoragames.far.ui.UIBean.java
/** * {@inheritDoc}/*from ww w. j a v a 2 s . co m*/ */ public void replace(ReplacementDescriptor criteriaForm, List<TargetFile> fileList) { if (criteriaForm == null) throw new NullPointerException("Replace criteria must not be null"); ReplacementDescriptor criteria = (ReplacementDescriptor) criteriaForm.clone(); wasAborted = false; int counter = 0; try { FileMatcher matcher = new FileMatcher(criteria.getContentPatternAsRegex()); matcher.setBaseDirectory(criteria.getBaseDirectory()); matcher.setDoBackup(criteria.isDoBackup()); if (criteria.isDoBackup()) matcher.setBackUpDirectory(criteria.getBackupDirectory()); int matchCounter = 0; int selectCounter = 0; for (int i = 0; i < fileList.size(); i++) { if (fileList.get(i).isSelected()) selectCounter++; } notifyStarted(OperationType.REPLACE); operationInProgress = OperationType.REPLACE; lastReplaceForm = (ReplaceForm) criteria.clone(); lastFileSet = new ArrayList<TargetFile>(); logger.info("Applying " + criteria.getContentPatternAsRegex().pattern() + " --> " + criteria.getReplacementString() + " on " + selectCounter + " files "); for (TargetFile file : fileList) { counter++; if ((!wasAborted) && file.isSelected()) { file.clear(); lastFileSet.add(file); try { matcher.setCharacterSet(file.getCharacterset()); // TODO: use file dependent line break // like so: // if( criteria.patternContainsLineBreak() ) { // String lb = findLineBreakUsed( file ); // } int numberOfMatches = matcher.apply(file.getFile(), criteria.getReplacementPattern(LINEBREAK)); file.setIncluded(numberOfMatches > 0); if (numberOfMatches > 0) { matchCounter++; file.info(localizer.localize("message.replacement-count", new Integer[] { numberOfMatches })); logger.debug("Replacement pattern applied to " + file.getFile().getName()); } } catch (Exception x) { logger.error(x.getClass().getName() + ": " + x.getMessage(), x); file.error(x.getMessage()); } } else { file.setIncluded(false); } notifyCount(counter, selectCounter, OperationType.REPLACE); } notifyTerminated(OperationType.REPLACE); logger.info(matchCounter + " files changed"); messageBox.info( localizer.localize("message.update-count", new Integer[] { Integer.valueOf(matchCounter) })); } catch (PatternSyntaxException px) { abort(); logger.error("PatternSyntaxException: " + px.getMessage(), px); messageBox.clear(); messageBox.error(localizer.localize("message.syntax-error", new Object[] { px.getMessage() })); } catch (Exception x) { abort(); logger.error(x.getClass().getName() + ": " + x.getMessage(), x); messageBox.clear(); messageBox.error(localizer.localize("message.file-processing-error", new Object[] { fileList.get(counter - 1).getName(), x.getMessage() })); } catch (OutOfMemoryError omu) { abort(); String message = "OutOfMemoryError: " + omu.getMessage(); logger.error(message, omu); messageBox.clear(); messageBox.error(localizer.localize("message.document-too-large-for-processing", new Object[] { fileList.get(counter - 1).getName(), omu.getMessage() })); } wasAborted = false; operationInProgress = OperationType.NONE; }
From source file:uk.bl.wa.solr.TikaExtractor.java
/** * //ww w .j av a2 s. c o m * @param solr * @param is * @param url * @return * @throws IOException */ @SuppressWarnings("deprecation") public SolrRecord extract(SolrRecord solr, InputStream is, String url) throws IOException { // Set up the TikaInputStream: TikaInputStream tikainput = null; if (this.maxBytesToParser > 0) { tikainput = TikaInputStream .get(new BoundedInputStream(new CloseShieldInputStream(is), maxBytesToParser)); } else { tikainput = TikaInputStream.get(new CloseShieldInputStream(is)); } // Also pass URL as metadata to allow extension hints to work: Metadata metadata = new Metadata(); if (url != null) metadata.set(Metadata.RESOURCE_NAME_KEY, url); final long detectStart = System.nanoTime(); StringBuilder detected = new StringBuilder(); try { DetectRunner detect = new DetectRunner(tika, tikainput, detected, metadata); Thread detectThread = new Thread(detect, Long.toString(System.currentTimeMillis())); detectThread.start(); detectThread.join(10000L); detectThread.interrupt(); } catch (NoSuchFieldError e) { // TODO Is this an Apache POI version issue? log.error("Tika.detect(): " + e.getMessage()); addExceptionMetadata(metadata, new Exception("detect threw " + e.getClass().getCanonicalName())); } catch (Exception e) { log.error("Tika.detect(): " + e.getMessage()); addExceptionMetadata(metadata, e); } Instrument.timeRel("WARCPayloadAnalyzers.analyze#tikasolrextract", "TikaExtractor.extract#detect", detectStart); // Only proceed if we have a suitable type: if (!this.checkMime(detected.toString())) { if ("".equals(detected.toString())) { solr.addField(SolrFields.SOLR_CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM.toString()); } else { solr.addField(SolrFields.SOLR_CONTENT_TYPE, detected.toString()); } return solr; } // Context ParseContext context = new ParseContext(); StringWriter content = new StringWriter(); // Override the recursive parsing: if (embedded == null) embedded = new NonRecursiveEmbeddedDocumentExtractor(context); context.set(EmbeddedDocumentExtractor.class, embedded); try { final long parseStart = System.nanoTime(); ParseRunner runner = new ParseRunner(tika.getParser(), tikainput, this.getHandler(content), metadata, context); Thread parseThread = new Thread(runner, Long.toString(System.currentTimeMillis())); try { parseThread.start(); parseThread.join(this.parseTimeout); parseThread.interrupt(); parseThread.join(this.parseTimeout); } catch (OutOfMemoryError o) { log.error("TikaExtractor.parse() - OutOfMemoryError: " + o.getMessage()); addExceptionMetadata(metadata, new Exception("OutOfMemoryError")); } catch (RuntimeException r) { log.error("TikaExtractor.parse() - RuntimeException: " + r.getMessage()); addExceptionMetadata(metadata, r); } Instrument.timeRel("WARCPayloadAnalyzers.analyze#tikasolrextract", "TikaExtractor.extract#parse", parseStart); // If there was a parse error, report it: solr.addField(SolrFields.PARSE_ERROR, metadata.get(TikaExtractor.TIKA_PARSE_EXCEPTION)); final long extractStart = System.nanoTime(); // Copy the body text, forcing a UTF-8 encoding: String output = new String(content.toString().getBytes("UTF-8")); if (runner.complete || !output.equals("")) { if (output.length() > this.max_text_length) { output = output.substring(0, this.max_text_length); } log.debug("Extracted text from: " + url); log.debug("Extracted text: " + StringUtils.left(output, 300)); solr.setField(SolrFields.SOLR_EXTRACTED_TEXT, output); solr.setField(SolrFields.SOLR_EXTRACTED_TEXT_LENGTH, Integer.toString(output.length())); } else { //log.debug("Failed to extract any text from: "+url); } // Noisily report all metadata properties: /* * for( String m : metadata.names() ) { * log.info("For "+url.substring(url.length() - (int) * Math.pow(url.length(),0.85))+": "+m+" -> "+metadata.get(m)); } */ // Attempt to record all metadata discovered: if (this.extractAllMetadata) { for (String m : metadata.names()) { // Ignore these as they are not very interesting: if (Metadata.RESOURCE_NAME_KEY.equalsIgnoreCase(m) || "dc:title".equalsIgnoreCase(m) || "title".equalsIgnoreCase(m) || "description".equalsIgnoreCase(m) || "keywords".equalsIgnoreCase(m) || Metadata.CONTENT_ENCODING.equalsIgnoreCase(m) || Metadata.CONTENT_LOCATION.equalsIgnoreCase(m) || "ACTINICTITLE".equalsIgnoreCase(m) || Metadata.CONTENT_TYPE.equalsIgnoreCase(m)) { continue; } // Record in the document, but trim big ones: String value = metadata.get(m); if (value != null && value.length() > 100) { value = value.substring(0, 100); } solr.addField(SolrFields.SOLR_TIKA_METADATA, m + "=" + value); } } // Also Pick out particular metadata: String contentType = metadata.get(Metadata.CONTENT_TYPE); solr.addField(SolrFields.SOLR_CONTENT_TYPE, contentType); solr.addField(SolrFields.SOLR_TITLE, metadata.get(DublinCore.TITLE)); solr.addField(SolrFields.SOLR_DESCRIPTION, metadata.get(DublinCore.DESCRIPTION)); solr.addField(SolrFields.SOLR_KEYWORDS, metadata.get("keywords")); solr.addField(SolrFields.SOLR_AUTHOR, metadata.get(DublinCore.CREATOR)); solr.addField(SolrFields.CONTENT_ENCODING, metadata.get(Metadata.CONTENT_ENCODING)); // Parse out any embedded date that can act as a created/modified date. String date = null; if (metadata.get(Metadata.CREATION_DATE) != null) date = metadata.get(Metadata.CREATION_DATE); if (metadata.get(Metadata.DATE) != null) date = metadata.get(Metadata.DATE); if (metadata.get(Metadata.MODIFIED) != null) date = metadata.get(Metadata.MODIFIED); if (date != null) { DateTimeFormatter df = ISODateTimeFormat.dateTimeParser(); DateTime edate = null; try { edate = df.parseDateTime(date); } catch (IllegalArgumentException e) { log.error("Could not parse: " + date); } if (edate == null) { Date javadate = Times.extractDate(date); if (javadate != null) edate = new org.joda.time.DateTime(javadate); } if (edate != null) { solr.addField(SolrFields.LAST_MODIFIED_YEAR, "" + edate.getYear()); DateTimeFormatter iso_df = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC); // solr.getSolrDocument().setField(SolrFields.LAST_MODIFIED, // edate); solr.setField(SolrFields.LAST_MODIFIED, iso_df.print(edate)); } } // Also look to record the software identifiers: // Look for generic xmp:CreatorTool solr.addField(SolrFields.GENERATOR, metadata.get("xmp:CreatorTool")); // For PDF, support other metadata tags: //solr.addField(SolrFields.GENERATOR, metadata.get( "creator" )); // This appears to be dc:creator i.e. author. solr.addField(SolrFields.GENERATOR, metadata.get("producer")); solr.addField(SolrFields.GENERATOR, metadata.get(Metadata.SOFTWARE)); solr.addField(SolrFields.GENERATOR, metadata.get("generator")); solr.addField(SolrFields.GENERATOR, metadata.get("Software")); // Application ID, MS Office only AFAICT, and the VERSION is only doc String software = null; if (metadata.get(Metadata.APPLICATION_NAME) != null) software = metadata.get(Metadata.APPLICATION_NAME); if (metadata.get(Metadata.APPLICATION_VERSION) != null) software += " " + metadata.get(Metadata.APPLICATION_VERSION); // Images, e.g. JPEG and TIFF, can have 'Software', 'tiff:Software', // PNGs have a 'tEXt tEXtEntry: keyword=Software, value=GPL Ghostscript 8.71' String png_textentry = metadata.get("tEXt tEXtEntry"); if (png_textentry != null && png_textentry.contains("keyword=Software, value=")) software = png_textentry.replace("keyword=Software, value=", ""); /* Some JPEGs have this: Jpeg Comment: CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), default quality comment: CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), default quality */ if (software != null) { solr.addField(SolrFields.GENERATOR, software); } Instrument.timeRel("WARCPayloadAnalyzers.analyze#tikasolrextract", "TikaExtractor.extract#extract", extractStart); } catch (Exception e) { log.error("TikaExtractor.extract(): " + e.getMessage()); } // TODO: This should probably be wrapped in a method-spanning try-finally to guarantee close if (tikainput != null) { try { tikainput.close(); } catch (IOException e) { log.warn("Exception closing TikaInputStream. This leaves tmp-files: " + e.getMessage()); } } return solr; }
From source file:info.magnolia.cms.exchange.simple.SimpleExchangeServlet.java
/** * @param request/*from w w w . j a v a2s . co m*/ * @param response * @throws javax.servlet.ServletException * @throws java.io.IOException */ public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String statusMessage = ""; String status = ""; try { validateRequest(request); initializeContext(request); applyLock(request); receive(request); // remove cached files if successful this.cacheManager.flushAll(); status = SimpleSyndicator.ACTIVATION_SUCCESSFUL; } catch (OutOfMemoryError e) { Runtime rt = Runtime.getRuntime(); log.error("---------\nOutOfMemoryError caught during activation. Total memory = " //$NON-NLS-1$ + rt.totalMemory() + ", free memory = " //$NON-NLS-1$ + rt.freeMemory() + "\n---------"); //$NON-NLS-1$ statusMessage = e.getMessage(); status = SimpleSyndicator.ACTIVATION_FAILED; } catch (PathNotFoundException e) { log.error(e.getMessage(), e); statusMessage = "Parent not found (not yet activated): " + e.getMessage(); status = SimpleSyndicator.ACTIVATION_FAILED; } catch (Throwable e) { log.error(e.getMessage(), e); statusMessage = e.getMessage(); status = SimpleSyndicator.ACTIVATION_FAILED; } finally { cleanUp(request); response.setHeader(SimpleSyndicator.ACTIVATION_ATTRIBUTE_STATUS, status); response.setHeader(SimpleSyndicator.ACTIVATION_ATTRIBUTE_MESSAGE, statusMessage); } }
From source file:com.nnm.smsviet.Message.java
/** * Default constructor.//from w w w . j a v a 2 s . c o m * * @param context * {@link Context} to spawn the {@link SmileyParser}. * @param cursor * {@link Cursor} to read the data */ private Message(final Context context, final Cursor cursor) { this.id = cursor.getLong(INDEX_ID); this.threadId = cursor.getLong(INDEX_THREADID); this.date = cursor.getLong(INDEX_DATE); if (this.date < ConversationListActivity.MIN_DATE) { this.date *= ConversationListActivity.MILLIS; } if (cursor.getColumnIndex(PROJECTION_JOIN[INDEX_TYPE]) >= 0) { this.address = cursor.getString(INDEX_ADDRESS); this.body = cursor.getString(INDEX_BODY); if (ConversationListActivity.showEmoticons && this.body != null) { this.body = SmileyParser.getInstance(context).addSmileySpans(this.body); } } else { this.body = null; this.address = null; } this.type = cursor.getInt(INDEX_TYPE); this.read = cursor.getInt(INDEX_READ); if (this.body == null) { this.isMms = true; try { this.fetchMmsParts(context); } catch (OutOfMemoryError e) { Log.e(TAG, "error loading parts", e); try { Toast.makeText(context, e.getMessage(), Toast.LENGTH_LONG).show(); } catch (Exception e1) { Log.e(TAG, "error creating Toast", e1); } } } else { this.isMms = false; } try { this.subject = cursor.getString(INDEX_SUBJECT); } catch (IllegalStateException e) { this.subject = null; } try { if (cursor.getColumnCount() > INDEX_MTYPE) { final int t = cursor.getInt(INDEX_MTYPE); if (t != 0) { this.type = t; } } } catch (IllegalStateException e) { this.subject = null; } Log.d(TAG, "threadId: " + this.threadId); Log.d(TAG, "address: " + this.address); // Log.d(TAG, "subject: " + this.subject); // Log.d(TAG, "body: " + this.body); // Log.d(TAG, "type: " + this.type); }
From source file:com.rokolabs.app.common.image.ImageCache.java
public Bitmap getBitmapFromDiskCache(String data, int width, int height) { if (data == null) return null; final String key = hashKeyForDisk(data); synchronized (mDiskCacheLock) { while (mDiskCacheStarting) { try { mDiskCacheLock.wait();/*from w ww. java2s. c o m*/ } catch (InterruptedException e) { } } if (mDiskLruCache != null) { InputStream inputStream = null; try { DiskLruCache.Snapshot snapshot = mDiskLruCache.get(key); if (snapshot != null) { if (Utils.DEBUG) { Log.d(TAG, "Disk cache hit"); } inputStream = snapshot.getInputStream(DISK_CACHE_INDEX); if (inputStream != null) { final BitmapFactory.Options options = new BitmapFactory.Options(); if (width > 0 && height > 0) { options.inJustDecodeBounds = true; BitmapFactory.decodeStream(inputStream, null, options); options.inSampleSize = ImageResizer.calculateInSampleSize(options, width, height); snapshot.close(); snapshot = mDiskLruCache.get(key); inputStream = snapshot.getInputStream(DISK_CACHE_INDEX); } // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; options.inPurgeable = true; options.inInputShareable = true; return BitmapFactory.decodeStream(inputStream, null, options); } } } catch (OutOfMemoryError error) { Logger.e(error.getMessage(), error); Logger.printMemory("OOM in load bitmap from disk"); SharedImageFetcher.clearMemoryCache(); } catch (final IOException e) { Log.e(TAG, "getBitmapFromDiskCache - " + e); } finally { try { if (inputStream != null) { inputStream.close(); } } catch (IOException e) { } } } return null; } }
From source file:org.appcelerator.titanium.util.TiUIHelper.java
/** * Creates and returns a density scaled Bitmap from an InputStream. * @param stream an InputStream to read bitmap data. * @return a new bitmap instance.// w ww .j a v a 2s. c o m */ public static Bitmap createDensityScaledBitmap(final InputStream stream) { Bitmap b = null; try { b = BitmapFactory.decodeResourceStream(null, null, stream, null, getScaledBitmapOptions()); } catch (OutOfMemoryError e) { Log.e(TAG, "Unable to load bitmap. Not enough memory: " + e.getMessage()); } return b; }