List of usage examples for java.awt Rectangle getWidth
public double getWidth()
From source file:org.apache.fop.render.ps.PSImageHandlerEPS.java
/** {@inheritDoc} */ public void handleImage(RenderingContext context, Image image, Rectangle pos) throws IOException { PSRenderingContext psContext = (PSRenderingContext) context; PSGenerator gen = psContext.getGenerator(); ImageRawEPS eps = (ImageRawEPS) image; float x = (float) pos.getX() / 1000f; float y = (float) pos.getY() / 1000f; float w = (float) pos.getWidth() / 1000f; float h = (float) pos.getHeight() / 1000f; ImageInfo info = image.getInfo();/*from ww w .ja v a 2 s .c o m*/ Rectangle2D bbox = eps.getBoundingBox(); if (bbox == null) { bbox = new Rectangle2D.Double(); bbox.setFrame(new Point2D.Double(), info.getSize().getDimensionPt()); } InputStream in = eps.createInputStream(); try { String resourceName = info.getOriginalURI(); if (resourceName == null) { resourceName = "inline image"; } PSImageUtils.renderEPS(in, resourceName, new Rectangle2D.Float(x, y, w, h), bbox, gen); } finally { IOUtils.closeQuietly(in); } }
From source file:org.eclipse.jubula.rc.javafx.tester.adapter.TreeTableOperationContext.java
@Override public Rectangle getNodeBounds(final Object node) { scrollNodeToVisible(node);/* w w w . j a v a 2 s . c o m*/ return EventThreadQueuerJavaFXImpl.invokeAndWait("getNodeBounds", new Callable<Rectangle>() { //$NON-NLS-1$ @Override public Rectangle call() throws Exception { TreeTableView<?> treeTable = getTree(); treeTable.layout(); TreeItem<?> item = (TreeItem<?>) node; List<TreeTableCell> cells = new NodeTraverseHelper<TreeTableCell>().getInstancesOf(treeTable, TreeTableCell.class); for (TreeTableCell<?, ?> cell : cells) { // Nullchecks because of the virtual flow cells // are created which might not be associated // with a row or an item TreeTableRow<?> ttRow = cell.getTreeTableRow(); if (ttRow == null) { continue; } TreeItem<?> checkItem = ttRow.getTreeItem(); if (checkItem == null) { continue; } if (item != null && checkItem.equals(item) && treeTable.getColumns().indexOf(cell.getTableColumn()) == m_column) { Rectangle b = NodeBounds.getAbsoluteBounds(cell); Rectangle treeB = NodeBounds.getAbsoluteBounds(treeTable); return new Rectangle(Math.abs(treeB.x - b.x), Math.abs(treeB.y - b.y), Rounding.round(b.getWidth()), Rounding.round(b.getHeight())); } } return null; } }); }
From source file:org.eclipse.jubula.rc.swing.listener.RecordActions.java
/** * creates CAP for Click in Component/*from ww w. j av a 2 s . c o m*/ * @param id IComponentIdentifier * @param me MouseEvent * @param source Component */ protected void clickInComponent(IComponentIdentifier id, MouseEvent me, Component source) { int clickcount = me.getClickCount(); if (clickcount < 1) { clickcount = 1; } String clCount = (new Integer(clickcount).toString()); String mbutton = (new Integer(me.getButton()).toString()); Action a = m_recordHelper.compSysToAction(id, "CompSystem.ClickDirect"); //$NON-NLS-1$ Rectangle bounds = me.getComponent().getBounds(); int percentX = (int) (me.getX() / bounds.getWidth() * 100); String percentXString = new Integer(percentX).toString(); int percentY = (int) (me.getY() / bounds.getHeight() * 100); String percentYString = new Integer(percentY).toString(); String units = Constants.REC_UNITS; List parValues = new LinkedList(); parValues.add(clCount); parValues.add(mbutton); parValues.add(percentXString); parValues.add(units); parValues.add(percentYString); parValues.add(units); String logName = createLogicalName(source, id); createCAP(a, id, parValues, logName); }
From source file:org.freecine.filmscan.ScanStrip.java
/** Try to find perforation corners using (modified) Hough transform. After the hough transform, matching pairs of top and bottom corners are found and clustered into pointClusterws list./*from w w w . j a v a 2s .co m*/ */ void houghTransform() { // Siebel transform of stripImage KernelJAI sxKernel = new KernelJAI(3, 3, new float[] { -1.0f, 0.0f, 1.0f, -2.0f, 0.0f, 2.0f, -1.0f, 0.0f, 1.0f }); KernelJAI syKernel = new KernelJAI(3, 3, new float[] { -1.0f, -2.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 2.0f, 1.0f }); RenderedImage dblImg = FormatDescriptor.create(stripImage, DataBuffer.TYPE_DOUBLE, null); RenderedImage sxImg = ConvolveDescriptor.create(dblImg, sxKernel, null); RenderedImage syImg = ConvolveDescriptor.create(dblImg, syKernel, null); SampleModel sm = sxImg.getSampleModel(); int nbands = sm.getNumBands(); double[] sxPixel = new double[nbands]; double[] syPixel = new double[nbands]; /* We are interested only in the left side of the strip as the perforations are there */ Rectangle perfArea = new Rectangle(0, 0, stripImage.getWidth() / 4, stripImage.getHeight()); RectIter sxIter = RectIterFactory.create(sxImg, perfArea); RectIter syIter = RectIterFactory.create(syImg, perfArea); int width = (int) perfArea.getWidth(); int height = (int) perfArea.getHeight(); /* We use 2 accumulators - one for detecting the upper right corner, one for lower right corner. As the original is huge and the detaile we are looking for are tiny, we use a sliding window that stores only the relevant part of accumulator. */ int accumHeight = (int) maxCornerRadius + 2; int[][] startAccum = new int[(int) (maxCornerRadius - minCornerRadius)][width * accumHeight]; int[][] endAccum = new int[(int) (maxCornerRadius - minCornerRadius)][width * accumHeight]; List<Point> startCorners = new ArrayList<Point>(); List<Point> endCorners = new ArrayList<Point>(); int y = 0; int maxVal = 0; if (analysisListener != null) { analysisListener.scanAnalysisProgress(0, height); } while (!sxIter.nextLineDone() && !syIter.nextLineDone()) { if (y % 1000 == 0 && y > 0) { System.out.println("" + y + " lines analyzed"); } sxIter.startPixels(); syIter.startPixels(); int x = 0; while (!sxIter.nextPixelDone() && !syIter.nextPixelDone()) { sxIter.getPixel(sxPixel); syIter.getPixel(syPixel); double isq = sxPixel[0] * sxPixel[0] + syPixel[0] * syPixel[0]; if (isq > EDGE_MIN_GRADIENT * EDGE_MIN_GRADIENT) { // This seems like a border if (syPixel[0] <= 0 && sxPixel[0] >= 0) { // Upper right corner candidate double intensity = Math.sqrt(isq); for (double r = minCornerRadius; r < maxCornerRadius; r += 1.0) { double cx = (double) x - r * sxPixel[0] / intensity; double cy = (double) y - r * syPixel[0] / intensity; if (cx > 0.0) { int accumLine = (int) cy % accumHeight; startAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]++; if (startAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine] > maxVal) { maxVal = startAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]; } } } } if (syPixel[0] >= 0 && sxPixel[0] >= 0) { // Lower right corner candidate double intensity = Math.sqrt(isq); for (double r = minCornerRadius; r < maxCornerRadius; r += 1.0) { double cx = (double) x - r * sxPixel[0] / intensity; double cy = (double) y - r * syPixel[0] / intensity; if (cx > 0.0 && cy > 0.0) { int accumLine = (int) cy % accumHeight; endAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]++; if (endAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine] > maxVal) { maxVal = endAccum[(int) (r - minCornerRadius)][(int) cx + width * accumLine]; } } } } } x++; } y++; /* 1 line processed - check if there are corner candidates in the accumulator line we are going to overwrite */ int y2 = y - accumHeight; int l = y % accumHeight; if (y2 > 0) { for (int n = 0; n < perfArea.getWidth(); n++) { for (int r = 0; r < (int) (maxCornerRadius - minCornerRadius); r++) { if (startAccum[r][n + width * l] >= CORNER_MIN_HOUGH) { // Is this a local maxima? int val = startAccum[r][n + width * l]; if (val == getLocalMaxima(startAccum, r, n, y, width)) { startCorners.add(new Point(n, y)); System.out.println(String.format("Found corner, quality = %d, r = %d, (%d, %d)", val, r, n, y)); // imageDataSingleArray[n+width*y] = (byte) 0xff; } } if (endAccum[r][n + width * l] > CORNER_MIN_HOUGH) { // Is this a local maxima? int val = endAccum[r][n + width * l]; if (val == getLocalMaxima(endAccum, r, n, y2, width)) { endCorners.add(new Point(n, y2)); System.out.println(String.format("Found end corner, quality = %d, r = %d, (%d, %d)", val, r, n, y2)); // imageDataSingleArray[n+width*y2] = (byte) 0x80; } } } } } // Zero the line just analyzed - it will be reused for the next line for (int n = 0; n < perfArea.getWidth(); n++) { for (int r = 0; r < (int) (maxCornerRadius - minCornerRadius); r++) { startAccum[r][n + width * (y % accumHeight)] = 0; endAccum[r][n + width * (y % accumHeight)] = 0; } } if ((y % 100 == 1) && analysisListener != null) { analysisListener.scanAnalysisProgress(y - 1, height); } } if (analysisListener != null) { analysisListener.scanAnalysisProgress(height, height); } /* Find perforations, i.e. pairs of start and end corners that are within the specified range from each other */ for (Point sp : startCorners) { for (Point ep : endCorners) { if (ep.y - sp.y > CC_MAX_DIST) { break; } if (Math.abs(ep.x - sp.x) < 10 && ep.y - sp.y > CC_MIN_DIST) { Perforation p = new Perforation(); p.x = (ep.x + sp.x) >> 1; p.y = (ep.y + sp.y) >> 1; // imageDataSingleArray[p.x+width*p.y] = (byte) 0x40; addPointToCluster(p.x, p.y); } } } System.out.println(String.format("%d clusters:", pointClusters.size())); for (PointCluster c : pointClusters) { System.out.println( String.format(" (%d, %d) %d points", c.getCentroidX(), c.getCentroidY(), c.getPointCount())); // imageDataSingleArray[c.getCentroidX()+width*c.getCentroidY()] = (byte) 0xff; } }
From source file:org.mitre.mpf.wfm.camel.operations.detection.trackmerging.TrackMergingProcessor.java
private boolean intersects(Track track1, Track track2, double segMinOverlap) { if (!StringUtils.equalsIgnoreCase(track1.getType(), track2.getType())) { // Tracks of different types should not be candidates for merger. Ex: It would make no sense to merge a motion and speech track. return false; } else if (StringUtils.equalsIgnoreCase(track1.getType(), "SPEECH")) { // Speech tracks should not be candidates for merger. return false; }//from w w w . ja v a 2 s . co m Detection track1End = track1.getDetections().last(); Detection track2End = track2.getDetections().first(); Detection first = (track1End.getMediaOffsetFrame() < track2End.getMediaOffsetFrame()) ? track1End : track2End; Detection second = (first == track1End) ? track2End : track1End; Rectangle rectangle1 = new Rectangle(first.getX(), first.getY(), first.getWidth(), first.getHeight()); Rectangle rectangle2 = new Rectangle(second.getX(), second.getY(), second.getWidth(), second.getHeight()); if (rectangle1.getWidth() == 0 || rectangle2.getWidth() == 0 || rectangle1.getHeight() == 0 || rectangle1.getHeight() == 0) { return false; } Rectangle intersection = rectangle1.intersection(rectangle2); if (intersection.isEmpty()) { return false; } double intersectArea = intersection.getHeight() * intersection.getWidth(); double unionArea = (rectangle2.getHeight() * rectangle2.getWidth()) + (rectangle1.getHeight() * rectangle1.getWidth()) - intersectArea; double percentOverlap = intersectArea / unionArea; return percentOverlap > segMinOverlap; }
From source file:org.netbeans.jbatch.modeler.specification.model.job.util.JobUtil.java
public static void updateDiagramFlowElement(BatchPlane plane, Widget widget) { //Diagram Model if (widget instanceof NodeWidget) { //reverse ref NodeWidget nodeWidget = (NodeWidget) widget; Rectangle rec = nodeWidget.getSceneViewBound(); BatchShape shape = new BatchShape(); shape.setBounds(new Bounds(rec));//(new Bounds(flowNodeWidget.getBounds())); shape.setBatchElement(((BaseElementWidget) nodeWidget).getId()); shape.setId(((BaseElementWidget) nodeWidget).getId() + "_gui"); if (nodeWidget.getLabelManager() != null && nodeWidget.getLabelManager().isVisible() && nodeWidget.getLabelManager().getLabel() != null && !nodeWidget.getLabelManager().getLabel().trim().isEmpty()) { Rectangle bound = nodeWidget.getLabelManager().getLabelWidget().getParentWidget() .getPreferredBounds(); bound = nodeWidget.getLabelManager().getLabelWidget().getParentWidget().convertLocalToScene(bound); Rectangle rec_label = new Rectangle(bound.x, bound.y, (int) bound.getWidth(), (int) bound.getHeight()); BatchLabel label = new BatchLabel(); label.setBounds(new Bounds(rec_label)); shape.setBatchLabel(label);//w w w . j a va2 s. com } plane.addDiagramElement(shape); ShapeDesign shapeDesign = null;// BatchShapeDesign XML Location Change Here if (nodeWidget instanceof FlowNodeWidget) { FlowNode flowNode = (FlowNode) ((FlowNodeWidget) nodeWidget).getBaseElementSpec(); if (flowNode.getExtensionElements() == null) { flowNode.setExtensionElements(new ExtensionElements()); } ExtensionElements extensionElements = flowNode.getExtensionElements(); for (Object obj : extensionElements.getAny()) { if (obj instanceof Element) { //first time save Element element = (Element) obj; if ("ShapeDesign".equals(element.getNodeName())) { shapeDesign = getBatchShapeDesign(nodeWidget); extensionElements.getAny().remove(obj); extensionElements.getAny().add(shapeDesign); break; } } else if (obj instanceof ShapeDesign) { shapeDesign = getBatchShapeDesign(nodeWidget); extensionElements.getAny().remove(obj); extensionElements.getAny().add(shapeDesign); break; } } } if (shapeDesign == null) { if (nodeWidget instanceof FlowNodeWidget) { FlowNode flowNode = (FlowNode) ((FlowNodeWidget) nodeWidget).getBaseElementSpec(); ExtensionElements extensionElements = flowNode.getExtensionElements(); shapeDesign = getBatchShapeDesign(nodeWidget); extensionElements.getAny().add(shapeDesign); } } // if (nodeWidget instanceof SubProcessWidget) { //Sub_Commented // SubProcessWidget subProcessWidget = (SubProcessWidget) nodeWidget; // for (FlowElementWidget flowElementChildrenWidget : subProcessWidget.getFlowElements()) { // updateDiagramFlowElement(plane, (Widget) flowElementChildrenWidget); // } // } } else if (widget instanceof EdgeWidget) { EdgeWidget edgeWidget = (EdgeWidget) widget; BatchEdge edge = new BatchEdge(); for (java.awt.Point point : edgeWidget.getControlPoints()) { edge.addWaypoint(point); } edge.setBatchElement(((BaseElementWidget) edgeWidget).getId()); edge.setId(((BaseElementWidget) edgeWidget).getId() + "_gui"); if (widget instanceof SequenceFlowWidget) { if (edgeWidget.getLabelManager() != null && edgeWidget.getLabelManager().isVisible() && edgeWidget.getLabelManager().getLabel() != null && !edgeWidget.getLabelManager().getLabel().trim().isEmpty()) { Rectangle bound = edgeWidget.getLabelManager().getLabelWidget().getParentWidget() .getPreferredBounds(); bound = edgeWidget.getLabelManager().getLabelWidget().getParentWidget() .convertLocalToScene(bound); Rectangle rec = new Rectangle(bound.x, bound.y, (int) bound.getWidth(), (int) bound.getHeight()); BatchLabel label = new BatchLabel(); label.setBounds(new Bounds(rec)); edge.setBatchLabel(label); } } plane.addDiagramElement(edge); } else { throw new InvalidElmentException("Invalid Batch Element"); } }
From source file:org.netbeans.jbpmn.modeler.specification.bpmn.model.conversation.util.BPMNConversationUtil.java
public static void updateDiagramFlowElement(BPMNPlane plane, Widget widget) { //Diagram Model if (widget instanceof NodeWidget) { //reverse ref NodeWidget nodeWidget = (NodeWidget) widget; Rectangle rec = nodeWidget.getSceneViewBound(); BPMNShape shape = new BPMNShape(); shape.setBounds(new Bounds(rec));//(new Bounds(flowNodeWidget.getBounds())); shape.setBpmnElement(((BaseElementWidget) nodeWidget).getId()); shape.setId(((BaseElementWidget) nodeWidget).getId() + "_gui"); if (nodeWidget.getLabelManager() != null && nodeWidget.getLabelManager().isVisible() && nodeWidget.getLabelManager().getLabel() != null && !nodeWidget.getLabelManager().getLabel().trim().isEmpty()) { Rectangle bound = nodeWidget.getLabelManager().getLabelWidget().getParentWidget() .getPreferredBounds(); bound = nodeWidget.getLabelManager().getLabelWidget().getParentWidget().convertLocalToScene(bound); Rectangle rec_label = new Rectangle(bound.x, bound.y, (int) bound.getWidth(), (int) bound.getHeight()); BPMNLabel label = new BPMNLabel(); label.setBounds(new Bounds(rec_label)); shape.setBPMNLabel(label);/*from ww w . j a v a 2s . c o m*/ } plane.addDiagramElement(shape); ShapeDesign shapeDesign = null;// BPMNShapeDesign XML Location Change Here if (nodeWidget instanceof CollaborationNodeWidget) { TCollaborationNode collaborationNode = (TCollaborationNode) ((CollaborationNodeWidget) nodeWidget) .getBaseElementSpec(); if (collaborationNode.getExtensionElements() == null) { collaborationNode.setExtensionElements(new TExtensionElements()); } TExtensionElements extensionElements = collaborationNode.getExtensionElements(); for (Object obj : extensionElements.getAny()) { if (obj instanceof Element) { //first time save Element element = (Element) obj; if ("ShapeDesign".equals(element.getNodeName())) { shapeDesign = getBPMNShapeDesign(nodeWidget); extensionElements.getAny().remove(obj); extensionElements.getAny().add(shapeDesign); break; } } else if (obj instanceof ShapeDesign) { shapeDesign = getBPMNShapeDesign(nodeWidget); extensionElements.getAny().remove(obj); extensionElements.getAny().add(shapeDesign); break; } } } if (shapeDesign == null) { if (nodeWidget instanceof CollaborationNodeWidget) { TCollaborationNode collaborationNode = (TCollaborationNode) ((CollaborationNodeWidget) nodeWidget) .getBaseElementSpec(); TExtensionElements extensionElements = collaborationNode.getExtensionElements(); shapeDesign = getBPMNShapeDesign(nodeWidget); extensionElements.getAny().add(shapeDesign); } } // shape.setBpmnShapeDesign(getBPMNShapeDesign(nodeWidget)); // if (nodeWidget instanceof SubProcessWidget) { // SubProcessWidget subProcessWidget = (SubProcessWidget) nodeWidget; // for (FlowElementWidget flowElementChildrenWidget : subProcessWidget.getFlowElements()) { // updateDiagramFlowElement(plane, (Widget) flowElementChildrenWidget); // } // } } else if (widget instanceof EdgeWidget) { EdgeWidget edgeWidget = (EdgeWidget) widget; BPMNEdge edge = new BPMNEdge(); for (java.awt.Point point : edgeWidget.getControlPoints()) { edge.addWaypoint(point); } edge.setBpmnElement(((BaseElementWidget) edgeWidget).getId()); edge.setId(((BaseElementWidget) edgeWidget).getId() + "_gui"); if (widget instanceof ConversationLinkWidget || widget instanceof MessageFlowWidget) { // AssociationWidget has no label if (edgeWidget.getLabelManager() != null && edgeWidget.getLabelManager().isVisible() && edgeWidget.getLabelManager().getLabel() != null && !edgeWidget.getLabelManager().getLabel().trim().isEmpty()) { Rectangle bound = edgeWidget.getLabelManager().getLabelWidget().getParentWidget() .getPreferredBounds(); bound = edgeWidget.getLabelManager().getLabelWidget().getParentWidget() .convertLocalToScene(bound); Rectangle rec = new Rectangle(bound.x, bound.y, (int) bound.getWidth(), (int) bound.getHeight()); BPMNLabel label = new BPMNLabel(); label.setBounds(new Bounds(rec)); edge.setBPMNLabel(label); } } plane.addDiagramElement(edge); } else { throw new InvalidElmentException("Invalid BPMN Element"); } }
From source file:org.netbeans.jbpmn.modeler.specification.bpmn.model.process.util.BPMNProcessUtil.java
public static void updateDiagramFlowElement(BPMNPlane plane, Widget widget) { //Diagram Model if (widget instanceof NodeWidget) { //reverse ref NodeWidget nodeWidget = (NodeWidget) widget; Rectangle rec = nodeWidget.getSceneViewBound(); BPMNShape shape = new BPMNShape(); shape.setBounds(new Bounds(rec));//(new Bounds(flowNodeWidget.getBounds())); shape.setBpmnElement(((BaseElementWidget) nodeWidget).getId()); shape.setId(((BaseElementWidget) nodeWidget).getId() + "_gui"); if (nodeWidget.getLabelManager() != null && nodeWidget.getLabelManager().isVisible() && nodeWidget.getLabelManager().getLabel() != null && !nodeWidget.getLabelManager().getLabel().trim().isEmpty()) { Rectangle bound = nodeWidget.getLabelManager().getLabelWidget().getParentWidget() .getPreferredBounds(); bound = nodeWidget.getLabelManager().getLabelWidget().getParentWidget().convertLocalToScene(bound); Rectangle rec_label = new Rectangle(bound.x, bound.y, (int) bound.getWidth(), (int) bound.getHeight()); BPMNLabel label = new BPMNLabel(); label.setBounds(new Bounds(rec_label)); shape.setBPMNLabel(label);//from w ww. j a v a 2s. c o m } plane.addDiagramElement(shape); ShapeDesign shapeDesign = null;// BPMNShapeDesign XML Location Change Here if (nodeWidget instanceof FlowNodeWidget) { TFlowNode flowNode = (TFlowNode) ((FlowNodeWidget) nodeWidget).getBaseElementSpec(); if (flowNode.getExtensionElements() == null) { flowNode.setExtensionElements(new TExtensionElements()); } TExtensionElements extensionElements = flowNode.getExtensionElements(); for (Object obj : extensionElements.getAny()) { if (obj instanceof Element) { //first time save Element element = (Element) obj; if ("ShapeDesign".equals(element.getNodeName())) { shapeDesign = getBPMNShapeDesign(nodeWidget); extensionElements.getAny().remove(obj); extensionElements.getAny().add(shapeDesign); break; } } else if (obj instanceof ShapeDesign) { shapeDesign = getBPMNShapeDesign(nodeWidget); extensionElements.getAny().remove(obj); extensionElements.getAny().add(shapeDesign); break; } } } if (shapeDesign == null) { if (nodeWidget instanceof FlowNodeWidget) { TFlowNode flowNode = (TFlowNode) ((FlowNodeWidget) nodeWidget).getBaseElementSpec(); TExtensionElements extensionElements = flowNode.getExtensionElements(); shapeDesign = getBPMNShapeDesign(nodeWidget); extensionElements.getAny().add(shapeDesign); } } // shape.setBpmnShapeDesign(getBPMNShapeDesign(nodeWidget)); if (nodeWidget instanceof SubProcessWidget) { SubProcessWidget subProcessWidget = (SubProcessWidget) nodeWidget; for (FlowElementWidget flowElementChildrenWidget : subProcessWidget.getFlowElements()) { updateDiagramFlowElement(plane, (Widget) flowElementChildrenWidget); } } } else if (widget instanceof EdgeWidget) { EdgeWidget edgeWidget = (EdgeWidget) widget; BPMNEdge edge = new BPMNEdge(); for (java.awt.Point point : edgeWidget.getControlPoints()) { edge.addWaypoint(point); } edge.setBpmnElement(((BaseElementWidget) edgeWidget).getId()); edge.setId(((BaseElementWidget) edgeWidget).getId() + "_gui"); if (widget instanceof SequenceFlowWidget) { if (edgeWidget.getLabelManager() != null && edgeWidget.getLabelManager().isVisible() && edgeWidget.getLabelManager().getLabel() != null && !edgeWidget.getLabelManager().getLabel().trim().isEmpty()) { Rectangle bound = edgeWidget.getLabelManager().getLabelWidget().getParentWidget() .getPreferredBounds(); bound = edgeWidget.getLabelManager().getLabelWidget().getParentWidget() .convertLocalToScene(bound); Rectangle rec = new Rectangle(bound.x, bound.y, (int) bound.getWidth(), (int) bound.getHeight()); BPMNLabel label = new BPMNLabel(); label.setBounds(new Bounds(rec)); edge.setBPMNLabel(label); } } plane.addDiagramElement(edge); } else { throw new InvalidElmentException("Invalid BPMN Element"); } }
From source file:org.opencastproject.metadata.mpeg7.Mpeg7Test.java
/** * Tests the contents of the sample catalog mpeg7.xml. */// ww w . ja v a 2s . c o m @SuppressWarnings("unchecked") protected void testContent(Mpeg7Catalog mpeg7) { // Check presence of content assertTrue(mpeg7.hasAudioContent()); assertTrue(mpeg7.hasVideoContent()); assertFalse(mpeg7.hasAudioVisualContent()); // Check content size assertTrue(mpeg7.getMultimediaContent(MultimediaContent.Type.AudioType).size() == 1); assertTrue(mpeg7.getMultimediaContent(MultimediaContent.Type.VideoType).size() == 2); // Check tracks assertNotNull(mpeg7.getAudioById("track-1")); assertNotNull(mpeg7.getVideoById("track-2")); assertNotNull(mpeg7.getVideoById("track-3")); // // Check audio track (track-1) // MultimediaContentType track1 = mpeg7.getAudioById("track-1"); MediaTime audioMediaTime = track1.getMediaTime(); // Media locator assertEquals(track1.getMediaLocator().getMediaURI(), URI.create("file:tracks/audio.pcm")); // Media time point assertEquals(0, audioMediaTime.getMediaTimePoint().getDay()); assertEquals(0, audioMediaTime.getMediaTimePoint().getHour()); assertEquals(0, audioMediaTime.getMediaTimePoint().getMinutes()); assertEquals(0, audioMediaTime.getMediaTimePoint().getSeconds()); assertEquals(25, audioMediaTime.getMediaTimePoint().getFractionsPerSecond()); assertEquals(0, audioMediaTime.getMediaTimePoint().getNFractions()); // Media duration assertEquals(0, audioMediaTime.getMediaDuration().getDays()); assertEquals(1, audioMediaTime.getMediaDuration().getHours()); assertEquals(30, audioMediaTime.getMediaDuration().getMinutes()); assertEquals(0, audioMediaTime.getMediaDuration().getSeconds()); // Segments assertFalse(track1.getTemporalDecomposition().segments().hasNext()); // // Check video track (track-2) // MultimediaContentType track2 = mpeg7.getVideoById("track-2"); MediaTime v1MediaTime = track2.getMediaTime(); // Media locator assertEquals(track2.getMediaLocator().getMediaURI(), URI.create("file:tracks/presentation.mp4")); // Media time point assertEquals(0, v1MediaTime.getMediaTimePoint().getDay()); assertEquals(0, v1MediaTime.getMediaTimePoint().getHour()); assertEquals(0, v1MediaTime.getMediaTimePoint().getMinutes()); assertEquals(0, v1MediaTime.getMediaTimePoint().getSeconds()); assertEquals(25, v1MediaTime.getMediaTimePoint().getFractionsPerSecond()); assertEquals(0, v1MediaTime.getMediaTimePoint().getNFractions()); // Media duration assertEquals(0, v1MediaTime.getMediaDuration().getDays()); assertEquals(1, v1MediaTime.getMediaDuration().getHours()); assertEquals(30, v1MediaTime.getMediaDuration().getMinutes()); assertEquals(0, v1MediaTime.getMediaDuration().getSeconds()); // Segments TemporalDecomposition<VideoSegment> v1Decomposition = (TemporalDecomposition<VideoSegment>) track2 .getTemporalDecomposition(); assertFalse(v1Decomposition.hasGap()); assertFalse(v1Decomposition.isOverlapping()); assertEquals(v1Decomposition.getCriteria(), TemporalDecomposition.DecompositionCriteria.Temporal); assertTrue(v1Decomposition.segments().hasNext()); // Segment track-2.segment-1 VideoSegment v1Segment1 = v1Decomposition.getSegmentById("track-2.segment-1"); assertNotNull(v1Segment1); MediaTime segment1MediaTime = v1Segment1.getMediaTime(); // Media time point assertEquals(0, segment1MediaTime.getMediaTimePoint().getDay()); assertEquals(0, segment1MediaTime.getMediaTimePoint().getHour()); assertEquals(0, segment1MediaTime.getMediaTimePoint().getMinutes()); assertEquals(0, segment1MediaTime.getMediaTimePoint().getSeconds()); assertEquals(25, segment1MediaTime.getMediaTimePoint().getFractionsPerSecond()); assertEquals(0, segment1MediaTime.getMediaTimePoint().getNFractions()); // Media duration assertEquals(0, segment1MediaTime.getMediaDuration().getDays()); assertEquals(1, segment1MediaTime.getMediaDuration().getHours()); assertEquals(7, segment1MediaTime.getMediaDuration().getMinutes()); assertEquals(35, segment1MediaTime.getMediaDuration().getSeconds()); // Text annotations assertTrue(v1Segment1.hasTextAnnotations()); assertTrue(v1Segment1.hasTextAnnotations(0.4f, 0.5f)); assertFalse(v1Segment1.hasTextAnnotations(0.8f, 0.8f)); assertTrue(v1Segment1.hasTextAnnotations("de")); assertFalse(v1Segment1.hasTextAnnotations("fr")); // Keywords TextAnnotation textAnnotation = v1Segment1.textAnnotations().next(); assertEquals("Armin", textAnnotation.keywordAnnotations().next().getKeyword()); assertEquals("Hint Armin", textAnnotation.freeTextAnnotations().next().getText()); // Spaciotemporal decomposition SpatioTemporalDecomposition stdecomposition = v1Segment1.getSpatioTemporalDecomposition(); assertNotNull(stdecomposition); assertTrue(stdecomposition.hasGap()); assertFalse(stdecomposition.isOverlapping()); // VideoText assertEquals(1, stdecomposition.getVideoText().length); VideoText videoText = stdecomposition.getVideoText("text1"); assertNotNull(videoText); SpatioTemporalLocator locator = videoText.getSpatioTemporalLocator(); assertNotNull(locator); MediaTime locatorMediaTime = locator.getMediaTime(); assertNotNull(locatorMediaTime); assertEquals(MediaRelTimePointImpl.parseTimePoint("T00:00:00:0F25"), locatorMediaTime.getMediaTimePoint()); assertEquals(MediaDurationImpl.parseDuration("PT01H07M35S"), locatorMediaTime.getMediaDuration()); Textual textual = videoText.getText(); assertNotNull(textual); assertEquals("Text", textual.getText()); assertEquals("en", textual.getLanguage()); Rectangle boundingBox = videoText.getBoundary(); assertNotNull(boundingBox); assertEquals(10, (int) boundingBox.getX()); assertEquals(150, (int) boundingBox.getWidth()); assertEquals(20, (int) boundingBox.getY()); assertEquals(15, (int) boundingBox.getHeight()); // // Check video track (track-3) // MultimediaContentType track3 = mpeg7.getVideoById("track-3"); MediaTime v2MediaTime = track3.getMediaTime(); // Media locator assertEquals(track3.getMediaLocator().getMediaURI(), URI.create("file:tracks/presenter.mpg")); // Media time point assertEquals(0, v2MediaTime.getMediaTimePoint().getDay()); assertEquals(0, v2MediaTime.getMediaTimePoint().getHour()); assertEquals(0, v2MediaTime.getMediaTimePoint().getMinutes()); assertEquals(0, v2MediaTime.getMediaTimePoint().getSeconds()); assertEquals(25, v2MediaTime.getMediaTimePoint().getFractionsPerSecond()); assertEquals(0, v2MediaTime.getMediaTimePoint().getNFractions()); // Media duration assertEquals(0, v2MediaTime.getMediaDuration().getDays()); assertEquals(1, v2MediaTime.getMediaDuration().getHours()); assertEquals(30, v2MediaTime.getMediaDuration().getMinutes()); assertEquals(0, v2MediaTime.getMediaDuration().getSeconds()); // Segments TemporalDecomposition<VideoSegment> v2Decomposition = (TemporalDecomposition<VideoSegment>) track3 .getTemporalDecomposition(); assertFalse(v2Decomposition.segments().hasNext()); }
From source file:org.opencms.loader.CmsImageScaler.java
/** * Returns a scaled version of the given image byte content according this image scalers parameters.<p> * /* ww w . j av a 2s. com*/ * @param content the image byte content to scale * @param rootPath the root path of the image file in the VFS * * @return a scaled version of the given image byte content according to the provided scaler parameters */ public byte[] scaleImage(byte[] content, String rootPath) { byte[] result = content; // flag for processed image boolean imageProcessed = false; // initialize image crop area initCropArea(); RenderSettings renderSettings; if ((m_renderMode == 0) && (m_quality == 0)) { // use default render mode and quality renderSettings = new RenderSettings(Simapi.RENDER_QUALITY); } else { // use special render mode and/or quality renderSettings = new RenderSettings(m_renderMode); if (m_quality != 0) { renderSettings.setCompressionQuality(m_quality / 100f); } } // set max blur size renderSettings.setMaximumBlurSize(m_maxBlurSize); // new create the scaler Simapi scaler = new Simapi(renderSettings); // calculate a valid image type supported by the imaging library (e.g. "JPEG", "GIF") String imageType = Simapi.getImageType(rootPath); if (imageType == null) { // no type given, maybe the name got mixed up String mimeType = OpenCms.getResourceManager().getMimeType(rootPath, null, null); // check if this is another known MIME type, if so DONT use it (images should not be named *.pdf) if (mimeType == null) { // no MIME type found, use JPEG format to write images to the cache imageType = Simapi.TYPE_JPEG; } } if (imageType == null) { // unknown type, unable to scale the image if (LOG.isDebugEnabled()) { LOG.debug( Messages.get().getBundle().key(Messages.ERR_UNABLE_TO_SCALE_IMAGE_2, rootPath, toString())); } return result; } try { BufferedImage image = Simapi.read(content); if (isCropping()) { // check if the crop width / height are not larger then the source image if ((m_cropHeight > image.getHeight()) || (m_cropWidth > image.getWidth())) { // crop height / width is outside of image - return image unchanged return result; } } Color color = getColor(); if (!m_filters.isEmpty()) { Iterator<String> i = m_filters.iterator(); while (i.hasNext()) { String filter = i.next(); if (FILTER_GRAYSCALE.equals(filter)) { // add a gray scale filter GrayscaleFilter grayscaleFilter = new GrayscaleFilter(); renderSettings.addImageFilter(grayscaleFilter); } else if (FILTER_SHADOW.equals(filter)) { // add a drop shadow filter ShadowFilter shadowFilter = new ShadowFilter(); shadowFilter.setXOffset(5); shadowFilter.setYOffset(5); shadowFilter.setOpacity(192); shadowFilter.setBackgroundColor(color.getRGB()); color = Simapi.COLOR_TRANSPARENT; renderSettings.setTransparentReplaceColor(Simapi.COLOR_TRANSPARENT); renderSettings.addImageFilter(shadowFilter); } } } if (isCropping()) { // image crop operation image = scaler.cropToSize(image, m_cropX, m_cropY, m_cropWidth, m_cropHeight, getWidth(), getHeight(), color); imageProcessed = true; } else { // only rescale the image, if the width and hight are different to the target size int imageWidth = image.getWidth(); int imageHeight = image.getHeight(); // image rescale operation switch (getType()) { // select the "right" method of scaling according to the "t" parameter case 1: // thumbnail generation mode (like 0 but no image enlargement) image = scaler.resize(image, getWidth(), getHeight(), color, getPosition(), false); imageProcessed = true; break; case 2: // scale to exact target size, crop what does not fit if (((imageWidth != getWidth()) || (imageHeight != getHeight()))) { image = scaler.resize(image, getWidth(), getHeight(), getPosition()); imageProcessed = true; } break; case 3: // scale and keep image proportions, target size variable if (((imageWidth != getWidth()) || (imageHeight != getHeight()))) { image = scaler.resize(image, getWidth(), getHeight(), true); imageProcessed = true; } break; case 4: // don't keep image proportions, use exact target size if (((imageWidth != getWidth()) || (imageHeight != getHeight()))) { image = scaler.resize(image, getWidth(), getHeight(), false); imageProcessed = true; } break; case 5: // scale and keep image proportions, target size variable, include maxWidth / maxHeight option // image proportions have already been calculated so should not be a problem, use // 'false' to make sure image size exactly matches height and width attributes of generated tag if (((imageWidth != getWidth()) || (imageHeight != getHeight()))) { image = scaler.resize(image, getWidth(), getHeight(), false); imageProcessed = true; } break; default: // scale to exact target size with background padding image = scaler.resize(image, getWidth(), getHeight(), color, getPosition(), true); imageProcessed = true; } } if (!m_filters.isEmpty()) { Rectangle targetSize = scaler.applyFilterDimensions(getWidth(), getHeight()); image = scaler.resize(image, (int) targetSize.getWidth(), (int) targetSize.getHeight(), Simapi.COLOR_TRANSPARENT, Simapi.POS_CENTER); image = scaler.applyFilters(image); imageProcessed = true; } // get the byte result for the scaled image if some changes have been made. // otherwiese use the original image if (imageProcessed) { result = scaler.getBytes(image, imageType); } } catch (Exception e) { if (LOG.isDebugEnabled()) { LOG.debug( Messages.get().getBundle().key(Messages.ERR_UNABLE_TO_SCALE_IMAGE_2, rootPath, toString()), e); } } return result; }