List of usage examples for java.awt Graphics2D fillRect
public abstract void fillRect(int x, int y, int width, int height);
From source file:fr.ign.cogit.geoxygene.appli.layer.LayerViewAwtPanel.java
public void saveAsImage(String fileName, int width, int height, boolean doSaveWorldFile, boolean drawOverlay) { Color bg = this.getBackground(); BufferedImage outImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); Graphics2D graphics = outImage.createGraphics(); // TEMP/*from www . ja v a 2 s .co m*/ graphics.setColor(bg); graphics.fillRect(0, 0, width, height); int tmpw = this.getWidth(); int tmph = this.getHeight(); // We save the old extent to force the Viewport to keep the old // window in world coordinates. IEnvelope env = this.getViewport().getEnvelopeInModelCoordinates(); // Artificially resize the canvas to the image dimensions. this.setSize(width, height); try { // We zoom to the old extent in the resized canvas. this.getViewport().zoom(env); } catch (NoninvertibleTransformException e2) { logger.error("In Image Export : failed to zoom in the correct extent."); e2.printStackTrace(); } this.renderingManager.renderAll(); long time = System.currentTimeMillis(); long twaited = 0; while (this.renderingManager.isRendering() && twaited < 15000) { // Wait for the rendering to end for a maximum of 15s. If the // rendering is not finished after this delay, // we give up. twaited = System.currentTimeMillis() - time; } if (this.renderingManager.isRendering()) { logger.error("Export to image : waited 15s but the rendering is still not finished. Abort."); return; } // We have to impose a bbox !!! this.getRenderingManager().copyTo(graphics); if (drawOverlay) this.paintOverlays(graphics); graphics.dispose(); try { ImgUtil.saveImage(outImage, fileName); } catch (IOException e1) { e1.printStackTrace(); } if (doSaveWorldFile) { String wld = FilenameUtils.removeExtension(fileName) + ".wld"; try { AffineTransform t = this.getViewport().getModelToViewTransform(); fr.ign.cogit.geoxygene.util.conversion.WorldFileWriter.write(new File(wld), t.getScaleX(), t.getScaleY(), this.getViewport().getViewOrigin().getX(), this.getViewport().getViewOrigin().getY(), this.getHeight()); } catch (NoninvertibleTransformException e) { logger.error("Failed to save the world file associated with the image file " + fileName); e.printStackTrace(); } } // Finally, rollback the canvas to its original size. this.setSize(tmpw, tmph); try { // Zoom back to the "normal" extent this.getViewport().zoom(env); } catch (NoninvertibleTransformException e2) { logger.error("In Image Export : failed to zoom back to the original LayerViewPanel extent."); e2.printStackTrace(); return; } }
From source file:com.celements.photo.image.GenerateThumbnail.java
BufferedImage convertImageToBufferedImage(Image thumbImg, String watermark, String copyright, Color defaultBg) { BufferedImage thumb = new BufferedImage(thumbImg.getWidth(null), thumbImg.getHeight(null), BufferedImage.TYPE_INT_ARGB); Graphics2D g2d = thumb.createGraphics(); if (defaultBg != null) { g2d.setColor(defaultBg);/*from w w w .j a v a 2 s .c o m*/ g2d.fillRect(0, 0, thumbImg.getWidth(null), thumbImg.getHeight(null)); } g2d.drawImage(thumbImg, 0, 0, null); if ((watermark != null) && (!watermark.equals(""))) { drawWatermark(watermark, g2d, thumb.getWidth(), thumb.getHeight()); } if ((copyright != null) && (!copyright.equals(""))) { drawCopyright(copyright, g2d, thumb.getWidth(), thumb.getHeight()); } mLogger.info("thumbDimensions: " + thumb.getHeight() + "x" + thumb.getWidth()); return thumb; }
From source file:org.evors.rs.ui.sandpit.TrialViewer.java
public void drawText(Graphics2D g2, String text) { FontMetrics fm = g2.getFontMetrics(); List<String> strings = Splitter.on("\n").splitToList(text); float x1 = 40, y = 40; int dx = 0;//from w w w . j ava 2 s . c o m for (String s : strings) { int width = fm.stringWidth(s); if (width > dx) { dx = width; } } g2.setColor(new Color(0.6f, 0.6f, 0.6f, 0.6f)); g2.fillRect((int) x1, (int) y - fm.getHeight(), dx + 5, fm.getHeight() * strings.size() + 5); g2.setColor(Color.red); for (String s : strings) { g2.drawString(s, 40, y); y += fm.getHeight(); } }
From source file:org.tsho.dmc2.core.chart.CowebRenderer.java
private void drawPoint(Graphics2D g2, int x, int y) { CoreStatusEvent statusEv = new CoreStatusEvent(this); statusEv.setType(CoreStatusEvent.REPAINT); if (delay >= 0) { try {/*from w w w . j av a 2s . co m*/ g2.fillRect(x, y, 1, 1); g2blink.drawRect(x - 8, y - 8, 16, 16); Thread.sleep(delay / 3); } catch (final InterruptedException e) { } finally { g2blink.drawRect(x - 8, y - 8, 16, 16); } } else { g2.fillRect(x, y, 1, 1); } }
From source file:org.nekorp.workflow.desktop.servicio.reporte.orden.servicio.OrdenServicioDataFactory.java
private String generaImagenCombustible(double porcentaje) { try {//from www. j av a2 s.c om int width = 186 * 3; int height = 15 * 3; IndicadorBarraGraphicsView view = new IndicadorBarraGraphicsView(); view.setWidthBar(width); view.setHeightBar(height); view.setPorcentaje(porcentaje); BufferedImage off_Image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); Graphics2D g2 = off_Image.createGraphics(); g2.setColor(Color.WHITE); g2.fillRect(0, 0, width, height); view.paint(g2); File file = new File("data/nivelCombustible.jpg"); saveJPG(off_Image, file); return file.getCanonicalPath(); } catch (IOException ex) { throw new RuntimeException(ex); } }
From source file:org.kalypsodeegree_impl.graphics.displayelements.RasterDisplayElement_Impl.java
/** * Paints one cell in form of an envelope.<br> * This is used instead of reusing PolygoneDisplayElement or similar, as for the grid it is crucial, that the border * of two cells is painted without intersection or gap. *//*from w w w. ja v a 2s. c o m*/ private static void paintEnvelope(final Graphics2D g, final GeoTransform projection, final Envelope currentCellEnv, final Color color) { // We assume the envelope is normalized here, so we can safely switch minY anc maxY final double paintMinX = projection.getDestX(currentCellEnv.getMinX()); final double paintMinY = projection.getDestY(currentCellEnv.getMinY()); final double paintMaxX = projection.getDestX(currentCellEnv.getMaxX()); final double paintMaxY = projection.getDestY(currentCellEnv.getMaxY()); final int x1 = (int) Math.ceil(paintMinX); final int y1 = (int) Math.ceil(paintMaxY); final int x2 = (int) Math.ceil(paintMaxX); final int y2 = (int) Math.ceil(paintMinY); final int width = x2 - x1; final int height = y2 - y1; g.setColor(color); g.fillRect(x1, y1, width, height); }
From source file:edu.ku.brc.specify.utilapps.ERDTable.java
/** * Returns the BufferedImage of a background shadow. I creates a large rectangle than the orignal image. * @return Returns the BufferedImage of a background shadow. I creates a large rectangle than the orignal image. *//* w w w .jav a 2 s .c o m*/ private BufferedImage getBackgroundImageBuffer() { if (shadowBuffer == null) { ShadowFactory factory = new ShadowFactory(SHADOW_SIZE, 0.17f, Color.BLACK); Dimension size = inner.getSize(); BufferedImage image = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_ARGB); Graphics2D g2 = image.createGraphics(); g2.setColor(Color.WHITE); g2.fillRect(0, 0, image.getWidth(), image.getHeight()); g2.dispose(); shadowBuffer = factory.createShadow(image); } return shadowBuffer; }
From source file:com.stanley.captioner.Transcriber.java
public void start() { // Create stream speech recognizer. StreamSpeechRecognizer recognizer = null; try {/*w ww.j ava2 s. co m*/ recognizer = new StreamSpeechRecognizer(config); } catch (IOException e) { System.out.println("Failed to create recognizer."); } // Open print writer for writing text output. PrintWriter writer = null; try { writer = new PrintWriter(textOut); } catch (FileNotFoundException e) { System.out.println("Failed to create print writer."); } // Open stream for first pass. InputStream stream = null; try { stream = new FileInputStream(audio); } catch (FileNotFoundException e) { System.out.println("Failed to stream file."); } // Initialize loop variables. SpeechResult result; int resultCount = 0; Stats stats = recognizer.createStats(1); // Start recognizer for first pass. recognizer.startRecognition(stream); System.out.println("First pass (stats collection) started."); // First pass loop to collect statistics for model adaptation. while ((result = recognizer.getResult()) != null) { try { stats.collect(result); } catch (Exception e) { System.out.println("Failed to collect stats."); } resultCount++; // Toggle for testing. if (quickTest && resultCount > 5) { break; } } // Close recognizer (end of first pass). recognizer.stopRecognition(); System.out.println("Stats collection stopped."); // Transform model using model adaptation. Transform transform = stats.createTransform(); recognizer.setTransform(transform); // Reopen stream for second pass. stream = null; try { stream = new FileInputStream(audio); } catch (FileNotFoundException e) { System.out.println("Failed to stream file."); } // Start recognizer for second pass. recognizer.startRecognition(stream); System.out.println("Second pass started."); // Create output text file header. writer.printf("%-20s", "WORD:"); writer.printf("%20s", "CONFIDENCE:"); writer.printf("%20s", "START TIME:"); writer.printf("%20s", "END_TIME:"); writer.println(); for (int i = 0; i < 80; i++) { writer.print("-"); } writer.println(); // Initialize loop variables. int wordCount = 0; String sentence = ""; int sentenceLength = 0; long sentenceStart = 0; long sentenceEnd = 0; ArrayList<Sentence> sentences = new ArrayList<>(); // Second pass loop to calculate sentences. RECOG: while ((result = recognizer.getResult()) != null) { for (WordResult wordResult : result.getWords()) { wordCount++; String word = wordResult.getWord().toString(); double confidence = wordResult.getConfidence(); long startTime = wordResult.getTimeFrame().getStart(); long endTime = wordResult.getTimeFrame().getEnd(); writer.printf("%-20s", word); writer.printf("%20.1f", confidence); writer.printf("%20d", startTime); writer.printf("%20d", endTime); writer.println(); if (sentenceLength + word.length() < 40) { // Add to current sentence. sentence += " " + word; sentenceLength += word.length(); sentenceEnd = endTime; } else { // End of current sentence, store and start a new one. sentences.add(new Sentence(sentence, sentenceStart, sentenceEnd)); sentenceStart = sentenceEnd; sentence = ""; sentenceLength = 0; } // Toggle for testing. if (quickTest && wordCount > 50) { break RECOG; } } } // Close print writer and recognizer (end of second pass). writer.close(); recognizer.stopRecognition(); System.out.println("Second pass stopped."); // Create folder for caption images. String imageDirPath = FilenameUtils.concat(textOut.getParent(), FilenameUtils.getBaseName(textOut.getAbsolutePath())); System.out.println(imageDirPath); File imageDir = new File(imageDirPath); if (!imageDir.exists()) { // Create the folder if it doesn't already exist. imageDir.mkdir(); } // Calculate video output path. String videoOutPath = FilenameUtils.concat(textOut.getParent(), FilenameUtils.getBaseName(textOut.getAbsolutePath()) + ".mp4"); System.out.println(videoOutPath); // Initialize a command string for overlaying the captions. String commandString = String.format("%s -y -loglevel quiet -i %s", new Converter().getFFmpegPath(), videoIn.getAbsolutePath()); System.out.println(commandString); // Initialize a complex filter for overlaying the captions. String filterString = "-filter_complex"; // Acquire a probe object for collecting video details. Converter converter = new Converter(); FFprobe ffprobe = null; try { ffprobe = new FFprobe(converter.getFFprobePath()); } catch (IOException e) { System.out.println("Failed to find ffprobe."); } // Probe the video for details. FFmpegProbeResult probeResult = null; try { probeResult = ffprobe.probe(videoIn.getAbsolutePath()); } catch (IOException e) { System.out.println("Failed to probe video file."); } // Get the width and height of the video. FFmpegStream videoStream = probeResult.getStreams().get(0); int videoWidth = videoStream.width; int videoHeight = videoStream.height; // Calculate the x and y coordinates of the captions. int captionX = (videoWidth / 2) - 220; int captionY = videoHeight - 25 - 10; // Loop over the sentences, generate captions, and build command string. int k = 0; for (Sentence s : sentences) { // Create caption image from sentence. BufferedImage bi = new BufferedImage(440, 50, BufferedImage.TYPE_INT_ARGB); Graphics2D g = bi.createGraphics(); g.setPaint(new Color(0, 0, 0, 128)); g.fillRect(0, 0, 440, 50); g.setPaint(new Color(255, 255, 255, 255)); g.setFont(new Font("Serif", Font.BOLD, 20)); FontMetrics fm = g.getFontMetrics(); int x = bi.getWidth() - fm.stringWidth(s.text) - 5; int y = fm.getHeight() - 5; g.drawString(s.text, x, y); g.dispose(); // Write the image to file for future reference. String suffix = String.format("caption-%03d.png", k); String imagePath = FilenameUtils.concat(imageDirPath, suffix); try { File imageFile = new File(imagePath); ImageIO.write(bi, "png", imageFile); } catch (IOException e) { System.out.println("Failed to write caption image to file."); } // Add the caption image path to the command string. commandString += " -i " + imagePath; // Add an entry to the complex filter with the caption timeframe. if (k == 0) { filterString += String.format(" \"[0:v][1:v] overlay=%d:%d:enable='between(t,%d,%d)'%s", captionX, captionY, s.startTime / 1000, s.endTime / 1000, (k == sentences.size() - 1) ? "\"" : " [tmp];"); } else { filterString += String.format(" [tmp][%d:v] overlay=%d:%d:enable='between(t,%d,%d)'%s", k + 1, captionX, captionY, s.startTime / 1000, s.endTime / 1000, (k == sentences.size() - 1) ? "\"" : " [tmp];"); } k++; } // Build final command string. String finalCommand = String.format("%s %s -codec:a copy %s", commandString, filterString, videoOutPath); System.out.println(finalCommand); // Attempt to run the final command string to embed the captions. try { Process p = Runtime.getRuntime().exec(finalCommand); try { if (p.waitFor() != 0) { // Embedding the captions failed. System.out.println("Image overlay failed."); } } catch (InterruptedException e) { // Embedding the captions was interrupted. System.out.println("Interrupted image overlay."); } } catch (IOException e) { // Command string failed to execute. System.out.println("Failed to execute image overlay."); } // Delete intermediate audio file. audio.delete(); System.out.println("........................CAPTIONING COMPLETE........................"); }
From source file:org.tsho.dmc2.core.chart.TrajectoryRenderer.java
private void drawItem(Graphics2D g2, int item, int x, int y) { if (connectWithLines) { if (item > 0) { g2.drawLine(x, y, prevX, prevY); }/*from w w w .jav a 2 s. co m*/ prevX = x; prevY = y; } if (bigDots) { g2.fillRect(x - 1, y - 1, 3, 3); } else { g2.fillRect(x, y, 1, 1); } }
From source file:edworld.pdfreader4humans.PDFReader.java
public BufferedImage createPageImage(int pageNumber, int scaling, Color inkColor, Color backgroundColor, boolean showStructure) throws IOException { Map<String, Font> fonts = new HashMap<String, Font>(); PDRectangle cropBox = getPageCropBox(pageNumber); BufferedImage image = new BufferedImage(Math.round(cropBox.getWidth() * scaling), Math.round(cropBox.getHeight() * scaling), BufferedImage.TYPE_INT_ARGB); Graphics2D graphics = image.createGraphics(); graphics.setBackground(backgroundColor); graphics.clearRect(0, 0, image.getWidth(), image.getHeight()); graphics.setColor(backgroundColor);/*from w w w . j a v a2 s . c om*/ graphics.fillRect(0, 0, image.getWidth(), image.getHeight()); graphics.setColor(inkColor); graphics.scale(scaling, scaling); for (Component component : getFirstLevelComponents(pageNumber)) draw(component, graphics, inkColor, backgroundColor, showStructure, fonts); graphics.dispose(); return image; }