List of usage examples for java.awt FontMetrics stringWidth
public int stringWidth(String str)
From source file:PeerPanel.java
private int truncatePoint(FontMetrics fontMetrics, String string, int boundsWidth) { int stringWidth = fontMetrics.stringWidth(string); if (stringWidth > boundsWidth) return truncatePoint(fontMetrics, string.substring(0, string.length() - 1), boundsWidth); return string.length(); }
From source file:org.zaproxy.zap.extension.callgraph.CallGraphFrame.java
public Dimension getTextDimension(String text, FontMetrics metrics) { int hgt = metrics.getHeight(); int adv = metrics.stringWidth(text); Dimension size = new Dimension(adv + 5, hgt + 5); return size;/*from w ww .java2s .c om*/ }
From source file:HostPanel.java
private int truncatePoint(FontMetrics fontMetrics, String string, String suffix, int boundsWidth) { int stringWidth = fontMetrics.stringWidth(string + suffix); if (stringWidth > boundsWidth) return truncatePoint(fontMetrics, string.substring(0, string.length() - 1), suffix, boundsWidth); return string.length(); }
From source file:savant.view.tracks.TrackRenderer.java
public void drawFeatureLabel(Graphics2D g2, String geneName, double startXPos, double y) { FontMetrics fm = g2.getFontMetrics(); double stringstartx = startXPos - fm.stringWidth(geneName) - 5; if (stringstartx <= 0) { Rectangle2D r = fm.getStringBounds(geneName, g2); int b = 2; Color textColor = g2.getColor(); g2.setColor(new Color(255, 255, 255, 200)); g2.fill(new RoundRectangle2D.Double(3.0, y - (fm.getHeight() - fm.getDescent()) - b, r.getWidth() + 2 * b, r.getHeight() + 2 * b, 8.0, 8.0)); g2.setColor(textColor);/*from w w w . j av a 2 s . c om*/ g2.drawString(geneName, 5.0F, (float) y); } else { g2.drawString(geneName, (float) stringstartx, (float) y); } }
From source file:com.fluidops.iwb.deepzoom.ImageLoader.java
private void generateIDCard(URI uri, Map<URI, Set<Value>> facets, String url, File file) { int width = 200; int height = 200; BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); Graphics2D ig2 = bi.createGraphics(); ig2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); ig2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); /* Special ID card handling for certain entity types */ /* TODO: special images based on type if(facets.containsKey(RDF.TYPE)) {/*from w w w. java 2 s. co m*/ Set<Value> facet = facets.get(RDF.TYPE); for(Value v : facet) { if(v.equals(Vocabulary.DCAT_DATASET)) { Image img = null; try { img = ImageIO.read( new File( "webapps/ROOT/images/rdf.jpg" ) ); } catch (MalformedURLException e) { logger.error(e.getMessage(), e); } catch (IOException e) { logger.error("Could not get image"); } ig2.drawImage( img, 0, 0, null ); break; } } } */ String label = EndpointImpl.api().getDataManager().getLabel(uri); Font font = new Font(Font.SANS_SERIF, Font.BOLD, 20); ig2.setFont(font); FontMetrics fontMetrics = ig2.getFontMetrics(); int labelwidth = fontMetrics.stringWidth(label); if (labelwidth >= width) { int fontsize = 20 * width / labelwidth; font = new Font(Font.SANS_SERIF, Font.BOLD, fontsize); ig2.setFont(font); fontMetrics = ig2.getFontMetrics(); } int x = (width - fontMetrics.stringWidth(label)) / 2; int y = (fontMetrics.getAscent() + (height - (fontMetrics.getAscent() + fontMetrics.getDescent())) / 2); ig2.setPaint(Color.black); ig2.drawString(label, x, y); BufferedOutputStream out; try { out = new BufferedOutputStream(new FileOutputStream(file)); ImageIO.write(bi, "PNG", out); out.flush(); out.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } }
From source file:ucar.unidata.idv.control.chart.WayPoint.java
/** * Draws the annotation.//from w w w.j av a 2s . com * * @param g2 the graphics device. * @param plot the plot. * @param dataArea the data area. * @param domainAxis the domain axis. * @param rangeAxis the range axis. * @param rendererIndex the renderer index. * @param info an optional info object that will be populated with * entity information. */ public void draw(Graphics2D g2, XYPlot plot, Rectangle2D dataArea, ValueAxis domainAxis, ValueAxis rangeAxis, int rendererIndex, PlotRenderingInfo info) { super.setGraphicsState(g2); if (!getPlotWrapper().okToDraw(this)) { return; } g2.setStroke(new BasicStroke()); if (false && getSelected()) { g2.setColor(COLOR_SELECTED); } else { g2.setColor(getColor()); } x = getXFromValue(dataArea, domainAxis); int width2 = (int) (ANNOTATION_WIDTH / 2); int bottom = (int) (dataArea.getY() + dataArea.getHeight()); y = bottom; int[] xs = { x - width2, x + width2, x, x - width2 }; int[] ys = { bottom - ANNOTATION_WIDTH, bottom - ANNOTATION_WIDTH, bottom, bottom - ANNOTATION_WIDTH }; g2.fillPolygon(xs, ys, xs.length); if ((getName() != null) && !isForAnimation) { FontMetrics fm = g2.getFontMetrics(); int width = fm.stringWidth(getName()); int textLeft = x - width / 2; g2.drawString(getName(), textLeft, bottom - ANNOTATION_WIDTH - 2); } if (getSelected()) { g2.setColor(COLOR_SELECTED); g2.drawPolygon(xs, ys, xs.length); } if (getPropertyListeners().hasListeners(PROP_WAYPOINTVALUE) || isForAnimation) { g2.setColor(Color.gray); g2.drawLine(x, y - ANNOTATION_WIDTH, x, (int) dataArea.getY()); } boolean playSound = canPlaySound(); if (isForAnimation) { if (clockImage == null) { clockImage = GuiUtils.getImage("/auxdata/ui/icons/clock.gif"); } if (playSound) { g2.drawImage(clockImage, x - 8, (int) dataArea.getY() + 1, null); } else { g2.drawImage(clockImage, x - 8, (int) dataArea.getY() + 1, null); } } if (canPlaySound()) { if (noteImage == null) { noteImage = GuiUtils.getImage("/auxdata/ui/icons/note.gif"); } if (isForAnimation) { g2.drawImage(noteImage, x + 8, (int) dataArea.getY() + 1, null); } else { g2.drawImage(noteImage, x, (int) dataArea.getY() + 1, null); } } if (minutesSpan > 0.0) { int left = (int) domainAxis.valueToJava2D(domainValue - (minutesSpan * 60000) / 2, dataArea, RectangleEdge.BOTTOM); int right = (int) domainAxis.valueToJava2D(domainValue + (minutesSpan * 60000) / 2, dataArea, RectangleEdge.BOTTOM); g2.setPaint(Color.black); g2.setStroke(new BasicStroke(2.0f)); g2.drawLine(left, y, right, y); } }
From source file:net.sourceforge.processdash.ui.web.CGIChartBase.java
protected Font getFontSizeToFit(Graphics2D g, Font baseFont, String text, int width) { FontMetrics m = g.getFontMetrics(baseFont); int currentWidth = m.stringWidth(text); Font result = baseFont;/* w w w . ja v a 2s . c o m*/ int maxIter = 100; while (currentWidth > width && maxIter-- > 0) { result = result.deriveFont(result.getSize2D() * 0.95f); m = g.getFontMetrics(result); currentWidth = m.stringWidth(text); } return result; }
From source file:nz.ac.massey.cs.gql4jung.browser.resultviews.GraphBasedResultView.java
private void configureRenderer(RenderContext context, final MotifInstance instance) { final Map<String, Color> colMap = createColorMap(instance); context.setEdgeDrawPaintTransformer(new Transformer<VisualEdge, Paint>() { @Override/* ww w . j a v a 2 s .c o m*/ public Paint transform(VisualEdge e) { return e.isInMotif() ? new Color(0, 0, 0, 100) : new Color(100, 100, 100, 50); } }); context.setVertexLabelTransformer(new Transformer<VisualVertex, String>() { @Override public String transform(VisualVertex v) { String role = v.getRole(); StringBuffer b = new StringBuffer().append("<html>"); if (role != null) { b.append("<<").append(role).append(">>").append("<br/>"); } b.append(v.getName()).append("</html>"); return b.toString(); } }); context.setEdgeLabelTransformer(new Transformer<VisualEdge, String>() { @Override public String transform(VisualEdge e) { return "<<" + e.getType() + ">>"; } }); context.setVertexFillPaintTransformer(new Transformer<VisualVertex, Paint>() { @Override public Paint transform(VisualVertex v) { Color c = colMap.get(v.getNamespace()); if (c != null) return c; else return Color.white; } }); context.setVertexDrawPaintTransformer(new Transformer<VisualVertex, Paint>() { @Override public Paint transform(VisualVertex v) { return v.isInMotif() ? Color.black : Color.gray; } }); context.setVertexStrokeTransformer(new Transformer<VisualVertex, Stroke>() { public Stroke transform(VisualVertex v) { if (v.isInMotif()) { if (v.getRole() != null) return GraphRendererConstants.STROKE_BOLD; else return GraphRendererConstants.STROKE_NORMAL; } else { return GraphRendererConstants.STROKE_NONE; } } }); context.setVertexShapeTransformer(new Transformer<VisualVertex, Shape>() { @Override public Shape transform(VisualVertex v) { String longLabel = v.getName(); Font f = settings.getFont4Participants(); FontMetrics FM = GraphBasedResultView.this.getGraphics().getFontMetrics(f); int W = Math.max(settings.getMinBoxWidth(), FM.stringWidth(longLabel) + 10); int H = v.getRole() != null ? settings.getBoxHeight4Participants() : settings.getBoxHeight4NonParticipants(); return new Rectangle2D.Float(-W / 2, -H / 2, W, H); } }); context.setVertexFontTransformer(new Transformer<VisualVertex, Font>() { @Override public Font transform(VisualVertex v) { return v.getRole() != null ? settings.getFont4Participants() : settings.getFont4NonParticipants(); } }); context.setEdgeFontTransformer(new Transformer<VisualEdge, Font>() { @Override public Font transform(VisualEdge e) { return settings.getFont4Edges(); } }); }
From source file:net.java.sip.communicator.impl.osdependent.jdic.SystrayServiceJdicImpl.java
private BufferedImage createOverlayImage(String text) { int size = 16; BufferedImage image = new BufferedImage(size, size, BufferedImage.TYPE_INT_ARGB); Graphics2D g = image.createGraphics(); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //background// w ww . j a v a 2 s . c o m g.setPaint(new Color(0, 0, 0, 102)); g.fillRoundRect(0, 0, size, size, size, size); //filling int mainRadius = 14; g.setPaint(new Color(255, 98, 89)); g.fillRoundRect(size / 2 - mainRadius / 2, size / 2 - mainRadius / 2, mainRadius, mainRadius, size, size); //text Font font = g.getFont(); g.setFont(new Font(font.getName(), Font.BOLD, 9)); FontMetrics fontMetrics = g.getFontMetrics(); int textWidth = fontMetrics.stringWidth(text); g.setColor(Color.white); g.drawString(text, size / 2 - textWidth / 2, size / 2 - fontMetrics.getHeight() / 2 + fontMetrics.getAscent()); return image; }
From source file:com.stanley.captioner.Transcriber.java
public void start() { // Create stream speech recognizer. StreamSpeechRecognizer recognizer = null; try {//w w w .java 2s.c o m recognizer = new StreamSpeechRecognizer(config); } catch (IOException e) { System.out.println("Failed to create recognizer."); } // Open print writer for writing text output. PrintWriter writer = null; try { writer = new PrintWriter(textOut); } catch (FileNotFoundException e) { System.out.println("Failed to create print writer."); } // Open stream for first pass. InputStream stream = null; try { stream = new FileInputStream(audio); } catch (FileNotFoundException e) { System.out.println("Failed to stream file."); } // Initialize loop variables. SpeechResult result; int resultCount = 0; Stats stats = recognizer.createStats(1); // Start recognizer for first pass. recognizer.startRecognition(stream); System.out.println("First pass (stats collection) started."); // First pass loop to collect statistics for model adaptation. while ((result = recognizer.getResult()) != null) { try { stats.collect(result); } catch (Exception e) { System.out.println("Failed to collect stats."); } resultCount++; // Toggle for testing. if (quickTest && resultCount > 5) { break; } } // Close recognizer (end of first pass). recognizer.stopRecognition(); System.out.println("Stats collection stopped."); // Transform model using model adaptation. Transform transform = stats.createTransform(); recognizer.setTransform(transform); // Reopen stream for second pass. stream = null; try { stream = new FileInputStream(audio); } catch (FileNotFoundException e) { System.out.println("Failed to stream file."); } // Start recognizer for second pass. recognizer.startRecognition(stream); System.out.println("Second pass started."); // Create output text file header. writer.printf("%-20s", "WORD:"); writer.printf("%20s", "CONFIDENCE:"); writer.printf("%20s", "START TIME:"); writer.printf("%20s", "END_TIME:"); writer.println(); for (int i = 0; i < 80; i++) { writer.print("-"); } writer.println(); // Initialize loop variables. int wordCount = 0; String sentence = ""; int sentenceLength = 0; long sentenceStart = 0; long sentenceEnd = 0; ArrayList<Sentence> sentences = new ArrayList<>(); // Second pass loop to calculate sentences. RECOG: while ((result = recognizer.getResult()) != null) { for (WordResult wordResult : result.getWords()) { wordCount++; String word = wordResult.getWord().toString(); double confidence = wordResult.getConfidence(); long startTime = wordResult.getTimeFrame().getStart(); long endTime = wordResult.getTimeFrame().getEnd(); writer.printf("%-20s", word); writer.printf("%20.1f", confidence); writer.printf("%20d", startTime); writer.printf("%20d", endTime); writer.println(); if (sentenceLength + word.length() < 40) { // Add to current sentence. sentence += " " + word; sentenceLength += word.length(); sentenceEnd = endTime; } else { // End of current sentence, store and start a new one. sentences.add(new Sentence(sentence, sentenceStart, sentenceEnd)); sentenceStart = sentenceEnd; sentence = ""; sentenceLength = 0; } // Toggle for testing. if (quickTest && wordCount > 50) { break RECOG; } } } // Close print writer and recognizer (end of second pass). writer.close(); recognizer.stopRecognition(); System.out.println("Second pass stopped."); // Create folder for caption images. String imageDirPath = FilenameUtils.concat(textOut.getParent(), FilenameUtils.getBaseName(textOut.getAbsolutePath())); System.out.println(imageDirPath); File imageDir = new File(imageDirPath); if (!imageDir.exists()) { // Create the folder if it doesn't already exist. imageDir.mkdir(); } // Calculate video output path. String videoOutPath = FilenameUtils.concat(textOut.getParent(), FilenameUtils.getBaseName(textOut.getAbsolutePath()) + ".mp4"); System.out.println(videoOutPath); // Initialize a command string for overlaying the captions. String commandString = String.format("%s -y -loglevel quiet -i %s", new Converter().getFFmpegPath(), videoIn.getAbsolutePath()); System.out.println(commandString); // Initialize a complex filter for overlaying the captions. String filterString = "-filter_complex"; // Acquire a probe object for collecting video details. Converter converter = new Converter(); FFprobe ffprobe = null; try { ffprobe = new FFprobe(converter.getFFprobePath()); } catch (IOException e) { System.out.println("Failed to find ffprobe."); } // Probe the video for details. FFmpegProbeResult probeResult = null; try { probeResult = ffprobe.probe(videoIn.getAbsolutePath()); } catch (IOException e) { System.out.println("Failed to probe video file."); } // Get the width and height of the video. FFmpegStream videoStream = probeResult.getStreams().get(0); int videoWidth = videoStream.width; int videoHeight = videoStream.height; // Calculate the x and y coordinates of the captions. int captionX = (videoWidth / 2) - 220; int captionY = videoHeight - 25 - 10; // Loop over the sentences, generate captions, and build command string. int k = 0; for (Sentence s : sentences) { // Create caption image from sentence. BufferedImage bi = new BufferedImage(440, 50, BufferedImage.TYPE_INT_ARGB); Graphics2D g = bi.createGraphics(); g.setPaint(new Color(0, 0, 0, 128)); g.fillRect(0, 0, 440, 50); g.setPaint(new Color(255, 255, 255, 255)); g.setFont(new Font("Serif", Font.BOLD, 20)); FontMetrics fm = g.getFontMetrics(); int x = bi.getWidth() - fm.stringWidth(s.text) - 5; int y = fm.getHeight() - 5; g.drawString(s.text, x, y); g.dispose(); // Write the image to file for future reference. String suffix = String.format("caption-%03d.png", k); String imagePath = FilenameUtils.concat(imageDirPath, suffix); try { File imageFile = new File(imagePath); ImageIO.write(bi, "png", imageFile); } catch (IOException e) { System.out.println("Failed to write caption image to file."); } // Add the caption image path to the command string. commandString += " -i " + imagePath; // Add an entry to the complex filter with the caption timeframe. if (k == 0) { filterString += String.format(" \"[0:v][1:v] overlay=%d:%d:enable='between(t,%d,%d)'%s", captionX, captionY, s.startTime / 1000, s.endTime / 1000, (k == sentences.size() - 1) ? "\"" : " [tmp];"); } else { filterString += String.format(" [tmp][%d:v] overlay=%d:%d:enable='between(t,%d,%d)'%s", k + 1, captionX, captionY, s.startTime / 1000, s.endTime / 1000, (k == sentences.size() - 1) ? "\"" : " [tmp];"); } k++; } // Build final command string. String finalCommand = String.format("%s %s -codec:a copy %s", commandString, filterString, videoOutPath); System.out.println(finalCommand); // Attempt to run the final command string to embed the captions. try { Process p = Runtime.getRuntime().exec(finalCommand); try { if (p.waitFor() != 0) { // Embedding the captions failed. System.out.println("Image overlay failed."); } } catch (InterruptedException e) { // Embedding the captions was interrupted. System.out.println("Interrupted image overlay."); } } catch (IOException e) { // Command string failed to execute. System.out.println("Failed to execute image overlay."); } // Delete intermediate audio file. audio.delete(); System.out.println("........................CAPTIONING COMPLETE........................"); }