Example usage for org.opencv.core Mat setTo

List of usage examples for org.opencv.core Mat setTo

Introduction

In this page you can find the example usage for org.opencv.core Mat setTo.

Prototype

public Mat setTo(Mat value) 

Source Link

Usage

From source file:logic.localizator.EyeBrowsLocalizator.java

private boolean detectEyeBrowBoundRect(MatContainer mc) {
    int eyePairW = mc.eyePairRect.width;
    int eyePairH = mc.eyePairRect.height;

    //contains eyebrow bounding rectangles
    Rect boundRectArr[] = new Rect[2];

    //for each eyebrow
    Mat binMat = new Mat();
    for (int i = 0; i < 2; ++i) {
        mc.eyeBrowMatArr[i] = mc.grayFrame.submat(mc.eyeBrowRectArr[i]);
        Scalar meanScalar = Core.mean(mc.eyeBrowMatArr[i]);
        //negate image
        Core.convertScaleAbs(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i], 1, 255 - meanScalar.val[0]);
        Imgproc.equalizeHist(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i]);
        Imgproc.blur(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i], new Size(4, 4));

        //obtain binary image
        Imgproc.threshold(mc.eyeBrowMatArr[i], binMat, 70, 255, Imgproc.THRESH_BINARY_INV);

        Imgproc.morphologyEx(binMat, binMat, Imgproc.MORPH_OPEN,
                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4, 4)));

        //find contours
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Imgproc.findContours(binMat, contours, new Mat(), Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);

        //find the biggest contour
        int maxSize = -1;
        int tmpSize = -1;
        int index = -1;

        if (contours.size() != 0) {
            maxSize = contours.get(0).toArray().length;
            tmpSize = 0;/*from  w  ww .j a  v a 2s  .  com*/
            index = 0;
        }

        //find max contour
        for (int j = 0; j < contours.size(); ++j) {
            //if contour is vertical, exclude it 
            Rect boundRect = Imgproc.boundingRect(contours.get(j));
            if (boundRect.height > boundRect.width)
                continue;

            if ((double) boundRect.height
                    / (double) mc.eyeBrowRectArr[i].height > Parameters.eyebrowBoundRectThresh) {
                LOG.warn("Reset brow rect");
                mc.eyeBrowBoundRectArr[i] = null;
                return false;
            }

            tmpSize = contours.get(j).toArray().length;

            LOG.info("Contour " + j + "; size = " + tmpSize);

            if (tmpSize > maxSize) {
                maxSize = tmpSize;
                index = j;
            }
        }

        binMat.setTo(new Scalar(0));
        boundRectArr[i] = Imgproc.boundingRect(contours.get(index));

        //save eyebrow bounding rectangle
        mc.eyeBrowBoundRectArr[i] = new Rect(mc.eyeBrowRectArr[i].x + boundRectArr[i].x,
                mc.eyeBrowRectArr[i].y + boundRectArr[i].y, boundRectArr[i].width, boundRectArr[i].height);

        //save binary eyebrow Mat for further FP detection (skeletonization)
        mc.eyeBrowBinMatArr[0] = binMat;

        //define tracking template for eyebrow
        mc.eyeBrowTrackingTemplateArr[i] = mc.grayFrame.submat(mc.eyeBrowBoundRectArr[i]); //local rectangle
    }

    //compute eyebrow interrocular distance
    mc.eyeBrowBaseDst = Math.abs(mc.eyeBrowBoundRectArr[0].x + mc.eyeBrowBoundRectArr[0].width / 2
            - (mc.eyeBrowBoundRectArr[1].x + mc.eyeBrowBoundRectArr[1].width / 2));

    LOG.info("eyeBrowBaseDst = " + mc.eyeBrowBaseDst);

    //define new bound rect centers for tracking template
    mc.eyeBrowCentersPointsArr = new Point[2];

    //save eyebrow centers
    //left-right
    Point p1 = new Point(
            mc.eyePairGlobalRect.x + mc.eyeBrowBoundRectArr[0].x + mc.eyeBrowBoundRectArr[0].width / 2,
            mc.eyePairGlobalRect.y + mc.eyeBrowBoundRectArr[0].y + mc.eyeBrowBoundRectArr[0].height / 2);

    Point p2 = new Point(
            mc.eyePairGlobalRect.x + mc.eyeBrowBoundRectArr[1].x + mc.eyeBrowBoundRectArr[1].width / 2,
            mc.eyePairGlobalRect.y + mc.eyeBrowBoundRectArr[1].y + mc.eyeBrowBoundRectArr[1].height / 2);

    Point[] pointArr = new Point[2];
    pointArr[0] = p1;
    pointArr[1] = p2;

    mc.features.eyeBrowCenterPointArr = pointArr;

    return true;
}

From source file:qupath.opencv.classify.NeuralNetworksClassifier.java

License:Open Source License

@Override
protected void createAndTrainClassifier() {
    // Create the required Mats
    int nMeasurements = measurements.size();
    Mat matTraining = new Mat(arrayTraining.length / nMeasurements, nMeasurements, CvType.CV_32FC1);
    matTraining.put(0, 0, arrayTraining);

    // Parse parameters
    ParameterList params = getParameterList();
    int nHidden = Math.max(2, params.getIntParameterValue("nHidden"));
    int termIterations = params.getIntParameterValue("termCritMaxIterations");
    double termEPS = params.getDoubleParameterValue("termCritEPS");
    TermCriteria crit = createTerminationCriteria(termIterations, termEPS);

    // Create & train the classifier
    classifier = createClassifier();//ww  w  . j a va 2s .  c om
    ANN_MLP nnet = (ANN_MLP) classifier;
    System.out.println(nnet.getLayerSizes());
    Mat layers = new Mat(3, 1, CvType.CV_32F);
    int n = arrayTraining.length / nMeasurements;
    //      layers.put(0, 0, new float[]{nMeasurements, nHidden, pathClasses.size()});
    layers.put(0, 0, nMeasurements);
    layers.put(1, 0, nHidden); // Number of hidden layers
    layers.put(2, 0, pathClasses.size());
    if (crit != null)
        nnet.setTermCriteria(crit);
    else
        crit = nnet.getTermCriteria();
    nnet.setLayerSizes(layers);
    //         matResponses.convertTo(matResponses, CvType.CV_32F);
    Mat matResponses = new Mat(n, pathClasses.size(), CvType.CV_32F);
    matResponses.setTo(new Scalar(0));
    for (int i = 0; i < n; i++) {
        matResponses.put(i, arrayResponses[i], 1);
    }
    nnet.setActivationFunction(ANN_MLP.SIGMOID_SYM, 1, 1);
    nnet.train(matTraining, Ml.ROW_SAMPLE, matResponses);

    //      lastDescription = getName() + "\n\nMain parameters:\n  " + DefaultPluginWorkflowStep.getParameterListJSON(params, "\n  ") + "\n\nTermination criteria:\n  " + crit.toString();
}

From source file:samples.SimpleSample.java

public static void main(String[] args) {

    System.load("C:\\opencv\\build\\java\\x64\\opencv_java310.dll");
    System.out.println(System.getProperty("java.library.path"));
    System.out.println("Welcome to OpenCV " + Core.VERSION);
    Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0));
    System.out.println("OpenCV Mat: " + m);
    Mat mr1 = m.row(1);
    mr1.setTo(new Scalar(1));
    Mat mc5 = m.col(5);/*  w ww .  j  a  v  a 2s .  c o m*/
    mc5.setTo(new Scalar(5));
    System.out.println("OpenCV Mat data:\n" + m.dump());
}