Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:bikecalibration.ROIDetection.java

/**
 * This function processes the image that contains the ROIs. It returns the
 * array of nodes found in the image.//  w w w.  j  a v a  2  s .c  o m
 *
 * @param image
 * @return Array of nodes
 */
public Node[] processImage(Mat image) {
    Node[] outputNodes = new Node[ROIs.size()];

    // convert the scene mat to gray scale
    Mat grayImage = new Mat();
    Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY);

    // create a feature detector
    FeatureDetector detector = FeatureDetector.create(FeatureDetector.SURF);

    List<MatOfKeyPoint> keypoints_objects = new ArrayList<>();
    MatOfKeyPoint keypoints_scene = new MatOfKeyPoint();

    detector.detect(ROIs, keypoints_objects);
    detector.detect(grayImage, keypoints_scene);

    // create a descriptor extractor
    DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.SURF);

    List<Mat> descriptor_objects = new ArrayList<>();
    Mat descriptor_scene = new Mat();

    extractor.compute(ROIs, keypoints_objects, descriptor_objects);
    extractor.compute(grayImage, keypoints_scene, descriptor_scene);

    // create a descriptor matcher
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    List<MatOfDMatch> matches = new ArrayList<>();

    descriptor_objects.stream().map((descriptor_object) -> {
        MatOfDMatch match = new MatOfDMatch();
        matcher.match(descriptor_object, descriptor_scene, match);
        return match;
    }).forEach((match) -> {
        matches.add(match);
    });

    ArrayList<ArrayList<DMatch>> matchesList = new ArrayList<>();
    matches.stream().forEach((match) -> {
        matchesList.add((ArrayList<DMatch>) match.toList());
    });

    double max_dist = 100;
    double min_dist = 0;

    return null;
}

From source file:binarythresold.BinaryThresold.java

/**
 * @param args the command line arguments
 *//*  w  w  w  . j a  v  a  2  s. com*/
public BinaryThresold(String[] args) {
    // TODO code application logic here7
    MyListArgs Param;
    BufferedImage image;
    String ConfigFile;

    Param = new MyListArgs(args);
    ConfigFile = Param.ValueArgsAsString("-CONFIG", "");

    if (!ConfigFile.equals("")) {
        Param.AddArgsFromFile(ConfigFile);
    } //fin if

    String Sintaxis = "-IN:str -OUT:str [-UMBRAL:int] [-SENS:int]";
    //System.out.println(Sintaxis);
    //MySintaxis Review = new MySintaxis(Sintaxis, Param);

    String IN = Param.ValueArgsAsString("-IN", "");
    String OUT = Param.ValueArgsAsString("-OUT", "");
    int UMBRAL = Param.ValueArgsAsInteger("-UMBRAL", 15);
    int SENS = Param.ValueArgsAsInteger("-SENS", 32);

    System.out.println("IN: " + IN);
    File tempDIR = new File(OUT.substring(0, OUT.lastIndexOf(File.separator)));
    //System.out.println("DIR OUT: "+OUT.substring(0, OUT.lastIndexOf(File.separator)));
    if (!tempDIR.exists()) {
        tempDIR.mkdirs();
        //System.out.println("DIR: "+tempDIR.getPath());
    }

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    //System.loadLibrary("opencv3.1");
    //System.out.println(System.getProperty("java.library.path"));
    //System.out.println("BINARIZE THRESOLD COMPONENTE");
    //System.out.println("IN : "+IN);
    //System.out.println("OUT:"+OUT);
    //System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    //try{
    Mat gray = imread(IN, IMREAD_GRAYSCALE);
    Mat result = new Mat();
    adaptiveThreshold(gray, result, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, UMBRAL, SENS);
    //System.out.println("save: "+OUT);
    imwrite((OUT), result);
    //}catch(Exception e){System.out.println("error OPENCV!!!!!");}
}

From source file:bollettini.BullettinCompiler.java

public void show() {
    //resize to show
    Size size = new Size(1100, 335);
    Mat resize = new Mat();
    Imgproc.resize(bullettin, resize, size);

    //create image
    int type = BufferedImage.TYPE_BYTE_GRAY;
    int bufferSize = resize.channels() * resize.cols() * resize.rows();
    byte[] b = new byte[bufferSize];
    resize.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(resize.cols(), resize.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);

    ImageIcon icon = new ImageIcon(image);

    //create image and show
    View view = new View();
    view.init(this);
    view.setIcon(icon);//  www. ja va  2  s.c  o m
    view.setVisible(true);
    view.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

public void startTracking() throws Exception {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    mountFrames();//w  w  w  .  ja va2s .c om

    // Matrices for image processing.
    Mat image = new Mat();
    Mat thresholdedImage = new Mat();
    Mat hsvImage = new Mat();

    // Opens camera capture flow.
    VideoCapture capture = null;
    String imagesource = PropertiesLoaderImpl.getValor("multipleObjectTracking.imagesource");
    if (imagesource.equalsIgnoreCase("webcam")) {
        capture = new VideoCapture(0);
    } else {

        if (imagesource.equalsIgnoreCase("ipcam")) {
            String ipcamAddress = PropertiesLoaderImpl
                    .getValor("multipleObjectTracking.imagesource.ipcam.address");
            capture = new VideoCapture(ipcamAddress);
        }

    }

    if (capture == null) {
        throw new Exception("Could not conect to camera.");
    }

    // Captures one image, for starting the process.
    try {
        capture.read(image);
    } catch (Exception e) {
        throw new Exception("Could not read from camera. Maybe the URL is not correct.");
    }

    setFramesSizes(image);

    if (capture.isOpened()) {

        while (true) {
            capture.read(image);

            if (!image.empty()) {
                Imgproc.cvtColor(image, hsvImage, Imgproc.COLOR_BGR2HSV);

                if (calibrationMode) {
                    thresholdedImage = processImage(hsvImage,
                            new Scalar(calibrationWindow.getMinHValue(), calibrationWindow.getMinSValue(),
                                    calibrationWindow.getMinVValue()),
                            new Scalar(calibrationWindow.getMaxHValue(), calibrationWindow.getMaxSValue(),
                                    calibrationWindow.getMaxVValue()));
                    trackFilteredObject(null, thresholdedImage, image);
                    updateFrames(image, thresholdedImage);
                } else {
                    Ball redBall = new Ball(Ball.Colours.RED);
                    Ball greenBall = new Ball(Ball.Colours.GREEN);
                    Ball blueBall = new Ball(Ball.Colours.BLUE);

                    ArrayList<Ball> balls = new ArrayList<Ball>();
                    balls.add(redBall);
                    balls.add(greenBall);
                    balls.add(blueBall);

                    for (Ball ball : balls) {
                        thresholdedImage = processImage(hsvImage, ball.getHsvMin(), ball.getHsvMax());
                        trackFilteredObject(ball, thresholdedImage, image);
                        updateFrames(image, thresholdedImage);
                    }

                }

            } else {
                throw new Exception("Could not read camera image.");
            }

        }

    } else {
        throw new Exception("Could not read from camera.");
    }

}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

private Mat processImage(Mat hsvImage, Scalar hsvMin, Scalar hsvMax) {
    Mat thresholdedImage = new Mat();
    Core.inRange(hsvImage, hsvMin, hsvMax, thresholdedImage);
    morphOps(thresholdedImage);/*from www  .  j  ava2s.co m*/

    return thresholdedImage;
}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

private void trackFilteredObject(Ball theBall, Mat threshold, Mat cameraFeed) {
    List<Ball> balls = new ArrayList<Ball>();

    Mat temp = new Mat();
    threshold.copyTo(temp);//from w  w w. ja v  a2 s  .c  om

    // The two variables below are the return of "findContours" processing.
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // find contours of filtered image using openCV findContours function      
    Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);

    // use moments method to find our filtered object
    boolean objectFound = false;

    if (contours.size() > 0) {
        int numObjects = contours.size();

        //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter
        if (numObjects < MAX_NUM_OBJECTS) {

            for (int i = 0; i < contours.size(); i++) {
                Moments moment = Imgproc.moments(contours.get(i));
                double area = moment.get_m00();

                //if the area is less than 20 px by 20px then it is probably just noise
                //if the area is the same as the 3/2 of the image size, probably just a bad filter
                //we only want the object with the largest area so we safe a reference area each
                //iteration and compare it to the area in the next iteration.
                if (area > MIN_OBJECT_AREA) {
                    Ball ball = new Ball();
                    ball.setXPos((int) (moment.get_m10() / area));
                    ball.setYPos((int) (moment.get_m01() / area));

                    if (theBall != null) {
                        ball.setType(theBall.getType());
                        ball.setColour(theBall.getColour());
                    }

                    balls.add(ball);

                    objectFound = true;
                } else {
                    objectFound = false;
                }

            }

            //let user know you found an object
            if (objectFound) {
                //draw object location on screen
                drawObject(balls, cameraFeed);
            }

        } else {
            Core.putText(cameraFeed, "TOO MUCH NOISE! ADJUST FILTER", new Point(0, 50), 1, 2,
                    new Scalar(0, 0, 255), 2);
        }

    }

}

From source file:by.zuyeu.deyestracker.core.detection.detector.BaseDetector.java

protected Rect[] detectWithClassifier(final Mat inputframe, final CascadeClassifier classifier) {
    LOG.debug("detectWithClassifier - start;");

    final Mat mRgba = new Mat();
    final Mat mGrey = new Mat();
    final MatOfRect detectedObjects = new MatOfRect();
    inputframe.copyTo(mRgba);// www.j ava 2  s .c  om
    inputframe.copyTo(mGrey);
    Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
    Imgproc.equalizeHist(mGrey, mGrey);
    classifier.detectMultiScale(mGrey, detectedObjects);

    LOG.debug("detectWithClassifier - end;");
    return detectedObjects.toArray();
}

From source file:by.zuyeu.deyestracker.core.video.capture.CameraFrameCapture.java

private void startCapturing() {
    LOG.trace("startCapturing() - start;");
    while (!isCanceled) {
        final Mat webcamImage = new Mat();
        capture.read(webcamImage);// w  w w .  j a  v  a  2  s  .  co m
        if (!webcamImage.empty()) {
            safeAddCapture(webcamImage);
        }
    }
    LOG.trace("startCapturing() - end;");
}

From source file:carmelo.CameraTask.java

@Override
protected Image call() throws Exception {

    // omitir si la camara no esta abierta
    if (!capture.isOpened()) {
        return null;
    }//  www  .  j  a v  a 2  s .c om

    // obtiene la captura de la camara, lo almacena el frame
    Mat frame = new Mat();
    capture.read(frame);

    // verificar si es una captura valida
    if (!frame.empty()) {
        // procesar y convertir la imagen
        Mat dst = imgproc.apply(frame);
        return createImageFromMat(dst);
    }

    return null;
}

From source file:carmelo.JavaFXOpenCV.java

private Mat procesarImagen(Mat src) {
    Mat dst = new Mat();

    Imgproc.cvtColor(src, dst, Imgproc.COLOR_BGR2GRAY);
    Imgproc.GaussianBlur(dst, dst, new Size(7, 7), 1.5, 1.5);
    Imgproc.Canny(dst, dst, 0, 30, 3, false);

    return dst;//w ww  .j  a v a 2  s  . com
}