Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat(Mat m, Range rowRange, Range colRange) 

Source Link

Usage

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Converts a source color image to a gray image.
 *
 * @param src - BGR image./*from   w  ww.  jav a2s. c  om*/
 * @return gray image.
 */
public static Mat grayscale(Mat src) {
    if (src.channels() < 3)
        return src;
    Mat dest = new Mat(src.rows(), src.cols(), CvType.CV_8UC1);
    cvtColor(src, dest, COLOR_BGR2GRAY);
    return dest;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void detectLayers() {
    Mat equalizedImage = CoreOperations.invert(CoreOperations.equalize(sourceImage));

    int halfColumnWidth = 50;
    Mat density = new Mat(equalizedImage.rows(), equalizedImage.cols(), CvType.CV_32F);
    int rows = density.rows();
    int cols = density.cols();

    // > 1 min
    for (int i = 0; i < rows; i++) {
        double p;
        int leftBoundInterval, rightBoundInterval, intervalLength;
        for (int j = 0; j < cols; j++) {
            p = 0.0;/*from ww w.j av  a  2  s  . c o m*/
            leftBoundInterval = Math.max(j - halfColumnWidth, 0);
            rightBoundInterval = Math.min(cols - 1, j + halfColumnWidth);
            intervalLength = rightBoundInterval - leftBoundInterval + 1;

            for (int s = leftBoundInterval; s <= rightBoundInterval; s++) {
                p += equalizedImage.get(i, s)[0];
            }

            density.put(i, j, p / intervalLength);
        }
    }

    //3 seconds
    for (int j = 0; j < cols; j++) {
        double intensity = 0.0;

        for (int i = 0; i < rows; i++) {
            intensity += density.get(i, j)[0];
        }

        for (int i = 0; i < rows; i++) {
            density.put(i, j, density.get(i, j)[0] / intensity);
        }
    }

    double ndlAverage = 1.0 / (double) rows;

    layerBounds = new Mat(6, cols, CvType.CV_32F);
    double k1 = 0.56E-4;
    double k2 = 1.3E-4;

    /*float[] data = new float[density.rows() * (int) density.elemSize()];
    density.get(0, 10, data);*/

    Mat upperBoundExact = new Mat(1, cols, CvType.CV_32F);
    Mat lowerBoundExact = new Mat(1, cols, CvType.CV_32F);

    for (int j = 0; j < cols; j++) {
        int upperBound = 0;
        int lowerBound = 0;

        for (int i = 0; i < rows; i++) {
            if (density.get(i, j)[0] > ndlAverage + k1) {
                upperBound = i;
                break;
            }
        }
        for (int i = rows - 1; i >= 0; i--) {
            if (density.get(i, j)[0] > ndlAverage + k2) {
                lowerBound = i;
                break;
            }
        }

        upperBoundExact.put(0, j, upperBound);
        lowerBoundExact.put(0, j, lowerBound);
    }

    //moving average for bounds
    int movingAverage = 300;
    for (int i = 0; i < upperBoundExact.cols(); i++) {
        int leftBoundInterval = Math.max(i - movingAverage, 0);
        int rightBoundInterval = Math.min(cols - 1, i + movingAverage);
        int intervalLength = rightBoundInterval - leftBoundInterval + 1;
        int upperBoundAverage = 0;
        int lowerBoundAverage = 0;

        for (int j = leftBoundInterval; j <= rightBoundInterval; j++) {
            upperBoundAverage += upperBoundExact.get(0, j)[0];
            lowerBoundAverage += lowerBoundExact.get(0, j)[0];
        }

        upperBoundAverage /= intervalLength;
        lowerBoundAverage /= intervalLength;
        int columnHeight = lowerBoundAverage - upperBoundAverage;
        layerBounds.put(0, i, upperBoundAverage);
        for (int h = 1; h < 5; h++) {
            layerBounds.put(h, i, upperBoundAverage + BRODMANN_COEFFS[h - 1] * columnHeight);
        }
        layerBounds.put(5, i, lowerBoundAverage);
    }
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private Mat applyRayCastingSegmentation() {
    //Mat cannyEdges = CoreOperations.cannyFilter(sourceImage, 26, 58);
    Mat contours = new Mat(preparedImage.rows(), preparedImage.cols(), CvType.CV_32S);
    int contoursCount = /*neurons.size();*/ CoreOperations
            .drawAllContours(CoreOperations.erode(preparedImage, 5), contours);
    Mat result = new Mat(preparedImage.rows(), preparedImage.cols(), preparedImage.type());//CoreOperations.or(CoreOperations.and(cannyEdges, CoreOperations.grayscale(preparedImage)), contours);
    //cannyEdges.release();

    //Mat markers = new Mat(contours.rows(), contours.cols(), CvType.CV_32S);
    //contours.copyTo(markers);
    contours.convertTo(contours, CvType.CV_32S);

    for (Neuron neuron : neurons) {
        int x = (int) neuron.getCenter().x;
        int y = (int) neuron.getCenter().y;
        int color = (int) preparedImage.get(y, x)[0];
        /*contours.put(y, x, color);
        contours.put(y - 2, x, color);/*from w  w  w .  j a  va  2s .co  m*/
        contours.put(y + 2, x, color);
        contours.put(y, x - 2, color);
        contours.put(y, x + 2, color);*/
        Imgproc.circle(contours, neuron.getCenter(), (int) (0.4f * neuron.getRadius()), new Scalar(color), -1);
    }

    Imgproc.watershed(sourceImage, contours);

    for (int i = 0; i < contours.rows(); i++) {
        for (int j = 0; j < contours.cols(); j++) {
            int index = (int) contours.get(i, j)[0];
            if (index == -1) {
                result.put(i, j, 0, 0, 0);
            } else if (index <= 0 || index > contoursCount) {
                result.put(i, j, 0, 0, 0);
            } else {
                if (index == 255) {
                    result.put(i, j, 0, 0, 0/*sourceImage.get(i, j)*/);
                } else {
                    result.put(i, j, index, index, index);
                }
            }
        }
    }

    result = CoreOperations.erode(result, 2);
    result = CoreOperations.dilate(result, 3);

    contours.release();

    contours = sourceImage.clone();
    CoreOperations.drawAllContours(result, contours);

    return contours;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.ContoursFinder.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }// w w w. j  a  va2s  .  c  om
    Mat tmp = frame.clone();
    // Finding outer contours
    contourList.clear();
    Imgproc.findContours(tmp, contourList, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    // Filter bees
    Mat contours = new Mat(tmp.rows(), tmp.cols(), CvType.CV_8UC3);
    tmp.release();
    double area;
    Scalar color;
    numBees = 0;
    for (int i = 0; i < contourList.size(); i++) {
        area = Imgproc.contourArea(contourList.get(i));
        if (area > minArea && area < maxArea) {
            color = GREEN;
            numBees++;
        } else {
            color = RED;
        }
        // Draw contour
        Imgproc.drawContours(contours, contourList, i, color, -1);
    }
    return contours;
}

From source file:com.davidmiguel.gobees.monitoring.camera.AndroidCameraImpl.java

License:Open Source License

/**
 * Starts capturing and converting preview frames.
 *///from  ww  w .  j  a  v  a2  s  .  c  om
@SuppressWarnings("ConstantConditions")
void initCamera() {
    // Get camera instance
    camera = getCameraInstance(cameraIndex, maxFrameWidth, maxFrameHeight, zoomRatio);
    if (camera == null) {
        return;
    }
    // Save frame size
    Camera.Parameters params = camera.getParameters();
    int mFrameWidth = params.getPreviewSize().width;
    int mFrameHeight = params.getPreviewSize().height;
    // Create frame mat
    Mat mFrame = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
    cameraFrame = new CameraFrame(mFrame, mFrameWidth, mFrameHeight);
    // Config texture
    if (this.texture != null) {
        this.texture.release();
    }
    this.texture = new SurfaceTexture(0);
    // Call onCameraStart
    user.onCameraStarted(mFrameWidth, mFrameHeight);
    // Set camera callbacks and start capturing
    try {
        camera.setPreviewTexture(texture);
        camera.startPreview();
        timer.scheduleAtFixedRate(takePhotoTask, initialDelay, frameRate);
    } catch (Exception e) {
        Log.d(TAG, "Error starting camera preview: " + e.getMessage(), e);
    }
}

From source file:com.dtcristo.virtucane.ImageProcessor.java

License:Apache License

public ImageProcessor(Context context, SurfaceHolder holder, int w, int h) {
    Log.i(TAG, "ImageProcessor()");

    mHandler = new OutputHandler(context);
    mHolder = holder;//from w ww .  j  a  v a2 s .c  om

    mWidth = w;
    mHeight = h;

    mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1);
    mGraySubmat = mYuv.submat(0, mHeight, 0, mWidth);
    mRgba = new Mat();
}

From source file:com.example.sarthuak.opencv.MainActivity.java

public void onCameraViewStarted(int width, int height) {

    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mRgbaF = new Mat(height, width, CvType.CV_8UC4);
    mRgbaT = new Mat(width, width, CvType.CV_8UC4);
    hierarchy = new Mat();
}

From source file:com.example.thibautg.libreaudioview.VideoProcessing.java

License:Open Source License

/**
 *
 * @param audioOutput//from w w  w. ja  v  a  2 s  .  c  o  m
 */
public VideoProcessing(AudioOutput audioOutput) {
    mInputMat320240 = new Mat(Globals.acquisitionFrameHeight + Globals.acquisitionFrameHeight / 2,
            Globals.acquisitionFrameWidth, CvType.CV_8UC1);
    mInputGray320240 = new Mat(Globals.acquisitionFrameHeight, Globals.acquisitionFrameWidth, CvType.CV_8UC1);

    mInputMat = new Mat(height + height / 2, width, CvType.CV_8UC1);
    mRgba = new Mat(height, width, CvType.CV_8U, new Scalar(4));
    mPreviousMat = new Mat(height, width, CvType.CV_8UC1);
    mDiffMat2 = new Mat(height, width, CvType.CV_8UC1);
    mInputGray = new Mat(height, width, CvType.CV_8UC1);
    mPreviousMat = new Mat(height, width, CvType.CV_8UC1);
    mOutputGrayMat = new Mat(height, width, CvType.CV_8UC1);
    mSonifier = new Sonifier(audioOutput);
}

From source file:com.example.yannic.remotefacedetection.agent.FaceDetectionAgent.java

License:Open Source License

public static Mat bufferedImageToMat(BufferedImage bi) {
    Mat mat = new Mat(bi.getHeight(), bi.getWidth(), CvType.CV_8UC3);
    byte[] data = ((DataBufferByte) bi.getRaster().getDataBuffer()).getData();
    mat.put(0, 0, data);/* w w  w .  j a va 2s.com*/
    return mat;
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ARLoc.java

License:Apache License

@Override
public void onStart(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();/*from w w  w  .  j av a2 s . co m*/
    log.info("Reading parameters");
    this.parameter = Parameter.createFromParameterTree(connectedNode.getParameterTree());

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    // setup rotation vector and translation vector storing output of the
    // localization
    rvec = new Mat(3, 1, CvType.CV_64F);
    tvec = new MatOfDouble(1.0, 1.0, 1.0);

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    //Utils.tresholdContrastBlackWhite(image, 600);
                    Imgproc.threshold(image, image, 200, 255, Imgproc.THRESH_BINARY);
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    if (poseProcessor.computePose(rvec, tvec, image)) {
                        // notify publisher threads (pose and tf, see below)
                        synchronized (tvec) {
                            tvec.notifyAll();
                        }
                    }

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    });

    // publish tf CAMERA_FRAME_NAME --> MARKER_FRAME_NAME
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            synchronized (tvec) {
                tvec.wait();
            }

            QuaternionHelper q = new QuaternionHelper();

            /*
             * http://euclideanspace.com/maths/geometry/rotations/
             * conversions/matrixToEuler/index.htm
             * http://stackoverflow.com/questions/12933284/rodrigues-into-
             * eulerangles-and-vice-versa
             * 
             * heading = atan2(-m20,m00) attitude = asin(m10) bank =
             * atan2(-m12,m11)
             */
            // convert output rotation vector rvec to rotation matrix R
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // get rotations around X,Y,Z from rotation matrix R
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            // convert Euler angles to quarternion
            q.setFromEuler(bankX, headingY, attitudeZ);

            // set information to message
            TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
            TransformStamped posestamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = posestamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 point = transform.getTranslation();
            point.setX(tvec.get(0, 0)[0]);
            point.setY(tvec.get(1, 0)[0]);
            point.setZ(tvec.get(2, 0)[0]);

            orientation.setW(q.getW());
            orientation.setX(q.getX());
            orientation.setY(q.getY());
            orientation.setZ(q.getZ());
            posestamped.getHeader().setFrameId(parameter.cameraFrameName());
            posestamped.setChildFrameId(parameter.markerFrameName());
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfmessage.getTransforms().add(posestamped);
            tfPublisherCamToMarker.publish(tfmessage);
        }
    });

    // publish Markers
    final Publisher<visualization_msgs.Marker> markerPublisher = connectedNode.newPublisher("markers",
            visualization_msgs.Marker._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {
            // publish markers every 500ms
            Thread.sleep(500);
            // get marker points from markerConfig, each marker has 4
            // vertices
            List<Point3> points3dlist = markerConfig.getUnordered3DPointList();
            int i = 0;
            for (Point3 p : points3dlist) {
                Marker markermessage = markerPublisher.newMessage();
                // FIXME If the markers are published into an existing frame
                // (e.g. map or odom) the node will consume very high CPU
                // and will fail after a short time. The markers are
                // probably published in the wrong way.
                markermessage.getHeader().setFrameId(parameter.markerFrameName());
                markermessage.setId(i);
                i++;
                markermessage.setType(visualization_msgs.Marker.SPHERE);
                markermessage.setAction(visualization_msgs.Marker.ADD);
                // position
                double x = p.x;
                markermessage.getPose().getPosition().setX(x);
                double y = p.y;
                markermessage.getPose().getPosition().setY(y);
                double z = p.z;
                markermessage.getPose().getPosition().setZ(z);
                // orientation
                markermessage.getPose().getOrientation().setX(0);
                markermessage.getPose().getOrientation().setY(0);
                markermessage.getPose().getOrientation().setZ(0);
                markermessage.getPose().getOrientation().setW(1);
                // patterntSize
                markermessage.getScale().setX(0.1);
                markermessage.getScale().setY(0.1);
                markermessage.getScale().setZ(0.1);
                // color
                markermessage.getColor().setA(1);
                markermessage.getColor().setR(1);
                markermessage.getColor().setG(0);
                markermessage.getColor().setB(0);

                markerPublisher.publish(markermessage);
            }
        }
    });

    // publish tf map --> odom
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait to be notified if new
            // image was processed
            synchronized (tvec) {
                tvec.wait();
            }

            // compute transform map to odom from map to
            // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

            // map to camera_rgb_optical_frame
            Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            QuaternionHelper q = new QuaternionHelper();
            // get rotation matrix R from solvepnp output rotation vector
            // rvec
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // transpose R, because we need the transformation from
            // world(map) to camera
            R = R.t();
            // get rotation around X,Y,Z from R in radiants
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            // compute translation vector from world (map) to cam
            // tvec_map_cam
            Core.multiply(R, new Scalar(-1), R); // R=-R
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec

            org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(),
                    q.getY(), q.getZ(), q.getW());
            double x = tvec_map_cam.get(0, 0)[0];
            double y = tvec_map_cam.get(1, 0)[0];
            double z = tvec_map_cam.get(2, 0)[0];
            // create a Transform Object that hold the transform map to cam
            org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            GraphName targetFrame = GraphName.of("odom");
            org.ros.rosjava_geometry.Transform transform_cam_odom = null;
            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "odom! " + "However, " + "will continue..");
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                        + "However, will " + "continue..");
                // cancel this loop..no result can be computed
                return;
            }
            // multiply results
            org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
            result = result.multiply(transform_map_cam);
            result = result.multiply(transform_cam_odom);

            // set information to ROS message
            TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
            TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = transformStamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 vector = transform.getTranslation();
            vector.setX(result.getTranslation().getX());
            vector.setY(result.getTranslation().getY());
            vector.setZ(result.getTranslation().getZ());

            orientation.setW(result.getRotationAndScale().getW());
            orientation.setX(result.getRotationAndScale().getX());
            orientation.setY(result.getRotationAndScale().getY());
            orientation.setZ(result.getRotationAndScale().getZ());
            transformStamped.getHeader().setFrameId("map");
            transformStamped.setChildFrameId("odom");
            transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfMessage.getTransforms().add(transformStamped);
            tfPublisherMapToOdom.publish(tfMessage);
            // System.exit(0);
        }
    });

    // Publish Pose

    final Publisher<geometry_msgs.PoseStamped> posePublisher = connectedNode
            .newPublisher(parameter.poseTopicName(), geometry_msgs.PoseStamped._TYPE);

    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait here to be notified if
            // new image was processed
            synchronized (tvec) {
                tvec.wait();
            }
            final QuaternionHelper q = new QuaternionHelper();

            // convert rotation vector result of solvepnp to rotation matrix
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // see publishers before for documentation
            final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            R = R.t();
            final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            final double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            Core.multiply(R, new Scalar(-1), R);
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
            final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                    q.getX(), q.getY(), q.getZ(), q.getW());
            final double x = tvec_map_cam.get(0, 0)[0];
            final double y = tvec_map_cam.get(1, 0)[0];
            final double z = tvec_map_cam.get(2, 0)[0];

            final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            final GraphName targetFrame = GraphName.of("base_link");
            org.ros.rosjava_geometry.Transform transform_cam_base = null;

            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_base = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "base_link! " + "However, will continue..");
                    // cancel this loop..no result can be computed
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                        + "base_link!" + " However, " + "will continue..");
                // cancel this loop..no result can be computed
                return;
            }

            // multiply results
            org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform.identity();
            current_pose = current_pose.multiply(transform_map_cam);
            current_pose = current_pose.multiply(transform_cam_base);

            // check for plausibility of the pose by checking if movement
            // exceeds max speed (defined) of the robot
            if (parameter.badPoseReject()) {
                Time current_timestamp = connectedNode.getCurrentTime();
                // TODO Unfortunately, we do not have the tf timestamp at
                // hand here. So we can only use the current timestamp.
                double maxspeed = 5;
                boolean goodpose = false;
                // if (current_pose != null && current_timestamp != null) {
                if (last_pose != null && last_timestamp != null) {
                    // check speed of movement between last and current pose
                    double distance = PoseCompare.distance(current_pose, last_pose);
                    double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                    if ((distance / timedelta) < maxspeed) {
                        if (smoothing) {
                            double xold = last_pose.getTranslation().getX();
                            double yold = last_pose.getTranslation().getY();
                            double zold = last_pose.getTranslation().getZ();
                            double xnew = current_pose.getTranslation().getX();
                            double ynew = current_pose.getTranslation().getY();
                            double znew = current_pose.getTranslation().getZ();
                            final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                    (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3, (zold * 2 + znew) / 3);
                            current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                    current_pose.getRotationAndScale());
                            last_pose = current_pose;
                        }
                        last_pose = current_pose;
                        last_timestamp = current_timestamp;
                        goodpose = true;
                    } else {
                        log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                    }

                } else {
                    last_pose = current_pose;
                    last_timestamp = current_timestamp;
                }
                // }
                // bad pose rejection
                if (!goodpose) {
                    return;
                }
            }

            // set information to message
            geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
            Pose pose = posestamped.getPose();
            Quaternion orientation = pose.getOrientation();
            Point point = pose.getPosition();

            point.setX(current_pose.getTranslation().getX());

            point.setY(current_pose.getTranslation().getY());

            point.setZ(current_pose.getTranslation().getZ());

            orientation.setW(current_pose.getRotationAndScale().getW());
            orientation.setX(current_pose.getRotationAndScale().getX());
            orientation.setY(current_pose.getRotationAndScale().getY());
            orientation.setZ(current_pose.getRotationAndScale().getZ());

            // frame_id too
            posestamped.getHeader().setFrameId("map");
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            posePublisher.publish(posestamped);

        }
    });

}