Example usage for org.opencv.core Mat empty

List of usage examples for org.opencv.core Mat empty

Introduction

In this page you can find the example usage for org.opencv.core Mat empty.

Prototype

public boolean empty() 

Source Link

Usage

From source file:ch.zhaw.facerecognition.Activities.TrainingActivity.java

License:Open Source License

@Override
public void onResume() {
    super.onResume();

    final Handler handler = new Handler(Looper.getMainLooper());
    thread = new Thread(new Runnable() {
        public void run() {
            if (!Thread.currentThread().isInterrupted()) {
                PreProcessorFactory ppF = new PreProcessorFactory();
                String algorithm = PreferencesHelper.getClassificationMethod();

                FileHelper fileHelper = new FileHelper();
                fileHelper.createDataFolderIfNotExsiting();
                final File[] persons = fileHelper.getTrainingList();
                if (persons.length > 0) {
                    Recognition rec = RecognitionFactory.getRecognitionAlgorithm(Recognition.TRAINING,
                            algorithm);/*from   ww w.j a v a 2s .  c  o m*/
                    for (File person : persons) {
                        if (person.isDirectory()) {
                            File[] files = person.listFiles();
                            int counter = 1;
                            for (File file : files) {
                                if (FileHelper.isFileAnImage(file)) {
                                    Mat imgRgb = Imgcodecs.imread(file.getAbsolutePath());
                                    Imgproc.cvtColor(imgRgb, imgRgb, Imgproc.COLOR_BGRA2RGBA);
                                    Mat processedImage = new Mat();
                                    imgRgb.copyTo(processedImage);
                                    List<Mat> images = ppF.getProcessedImage(processedImage);
                                    if (images == null || images.size() > 1) {
                                        // More than 1 face detected --> cannot use this file for training
                                        continue;
                                    } else {
                                        processedImage = images.get(0);
                                    }
                                    if (processedImage.empty()) {
                                        continue;
                                    }
                                    // The last token is the name --> Folder name = Person name
                                    String[] tokens = file.getParent().split("/");
                                    final String name = tokens[tokens.length - 1];

                                    MatName m = new MatName("processedImage", processedImage);
                                    fileHelper.saveMatToImage(m, FileHelper.DATA_PATH);

                                    rec.addImage(processedImage, name, false);

                                    //                                      fileHelper.saveCroppedImage(imgRgb, ppF, file, name, counter);

                                    // Update screen to show the progress
                                    final int counterPost = counter;
                                    final int filesLength = files.length;
                                    progress.post(new Runnable() {
                                        @Override
                                        public void run() {
                                            progress.append("Image " + counterPost + " of " + filesLength
                                                    + " from " + name + " imported.\n");
                                        }
                                    });

                                    counter++;
                                }
                            }
                        }
                    }
                    final Intent intent = new Intent(getApplicationContext(), MainActivity.class);
                    intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                    if (rec.train()) {
                        intent.putExtra("training", "Training successful");
                    } else {
                        intent.putExtra("training", "Training failed");
                    }
                    handler.post(new Runnable() {
                        @Override
                        public void run() {
                            startActivity(intent);
                        }
                    });
                } else {
                    Thread.currentThread().interrupt();
                }
            }
        }
    });
    thread.start();
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.BackgroundSubtractor.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }/*from   www. j a  v a2s.  co  m*/
    Mat foreground = new Mat();
    // Apply background substraction
    mog.apply(frame, foreground);
    return foreground;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.Blur.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }// www . ja va2 s. co m
    Mat tmp = frame.clone();
    // Apply gaussian blur
    for (int i = 0; i < REPETITIONS; i++) {
        Imgproc.GaussianBlur(tmp, tmp, new Size(KERNEL_SIZE, KERNEL_SIZE), 0);
    }
    return tmp;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.ContoursFinder.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }//from   ww  w.j av  a2 s  .co m
    Mat tmp = frame.clone();
    // Finding outer contours
    contourList.clear();
    Imgproc.findContours(tmp, contourList, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    // Filter bees
    Mat contours = new Mat(tmp.rows(), tmp.cols(), CvType.CV_8UC3);
    tmp.release();
    double area;
    Scalar color;
    numBees = 0;
    for (int i = 0; i < contourList.size(); i++) {
        area = Imgproc.contourArea(contourList.get(i));
        if (area > minArea && area < maxArea) {
            color = GREEN;
            numBees++;
        } else {
            color = RED;
        }
        // Draw contour
        Imgproc.drawContours(contours, contourList, i, color, -1);
    }
    return contours;
}

From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.Morphology.java

License:Open Source License

@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }/*from w  w  w. j a v a 2 s.c o  m*/
    Mat tmp = frame.clone();
    // Step 1: erode to remove legs
    Imgproc.erode(tmp, tmp, KERNEL3);
    // Step 2: dilate to join bodies and heads
    Imgproc.dilate(tmp, tmp, KERNEL2);
    for (int i = 0; i < REPETITIONS_DILATE; i++) {
        Imgproc.dilate(tmp, tmp, kernelDilate);
    }
    // Step 3: erode to recover original size
    Imgproc.erode(tmp, tmp, KERNEL1);
    for (int i = 0; i < REPETITIONS_ERODE; i++) {
        Imgproc.erode(tmp, tmp, kernelErode);
    }
    return tmp;
}

From source file:com.ibm.streamsx.edgevideo.device.NonEdgentFaceDetectApp.java

License:Open Source License

/**
 * Do the continuous face detection processing and render images.
 * @throws Exception/*from   w  w w .  ja v  a2 s  .c  om*/
 */
@Override
protected void runFaceDetection() throws Exception {

    while (true) {

        // Grab a frame
        stats.getFrame.markStart();
        Mat rawRgbFrame = camera.grabFrame();
        stats.getFrame.markEnd();

        // Process it
        if (!rawRgbFrame.empty()) {

            stats.imgProcess.markStart();
            FacesData facesData = faceDetector.detectFaces(rawRgbFrame);
            stats.imgProcess.markEnd();

            //System.out.println(now()+" - Detected faces : "+data.faces.size());

            // render images
            stats.render.markStart();
            renderImages(facesData);
            stats.render.markEnd();

            // Note: lacks publish data to Enterprise IoT hub
        }

        stats.reportFrameProcessed();

        // Note: lacks ability to dynamically control the poll rate
        // Note the following yields "with fixed delay" vs Topology.poll()'s "at fixed rate" 
        Thread.sleep(TimeUnit.MILLISECONDS.convert(sensorPollValue, sensorPollUnit));
    }

}

From source file:com.ibm.streamsx.edgevideo.device.wipRecognition.WIP_NonEdgentFaceDetectApp.java

License:Open Source License

/**
 * Do the continuous face detection processing and render images.
 * @throws Exception/*  w w w. j  a  v a  2  s . c  o  m*/
 */
@Override
protected void runFaceDetection() throws Exception {

    while (true) {

        // Grab a frame
        Mat rawRgbFrame = camera.grabFrame();

        // Process it
        if (!rawRgbFrame.empty()) {

            FacesData data = faceDetector.detectFaces(rawRgbFrame);

            //System.out.println(now()+" - Detected faces : "+data.faces.size());

            doFaceRecognition(data);

            // render images
            renderImages(data.rgbFrame, data.faceRects, data.faces);

            // Note: lacks publish data to Enterprise IoT hub
        }

        // Note: lacks ability to dynamically control the poll rate
        Thread.sleep(TimeUnit.MILLISECONDS.convert(sensorPollValue, sensorPollUnit));
    }

}

From source file:com.joravasal.keyface.PCAfaceRecog.java

License:Open Source License

/**
 * It has no input, it will add the last image (when numerically ordered)
 * to the array of images and calculate the new PCA subspace.
 * //from  w  w  w.  j  a  va  2  s. co m
 * PCA won't work properly if newimage is true.
 * 
 * @return A boolean that specifies if everything went fine.
 * */
public boolean updateData(boolean newimage) {
    if (newimage) { //There is some error with this code, if newimage is true.
        //Probably it is the matrix.create() function. Later when PCA is done, the projection will be wrong.
        //So this code is never used at the moment, and newimage should be used as false always.
        //It uses more instructions, but until a solution is found it must stay as it is.
        numImages++;
        try {
            File directory = new File(imagesDir);
            if (!directory.exists()) {
                throw new IOException("Path to file could not be opened.");
            }
            String lfile = imagesDir + "/Face" + (numImages - 1) + ".png";
            Mat img = Highgui.imread(lfile, 0);
            if (img.empty())
                throw new IOException("Opening image number " + (numImages - 1) + " failed.");
            //we adapt the old matrices to new sizes
            sum.create(numImages, imgLength, CvType.CV_32FC1);
            projectedTraining.create(numImages, numImages, CvType.CV_32FC1);

            //and add the new image to the array of images
            img.reshape(1, 1).convertTo(sum.row(numImages - 1), CvType.CV_32FC1);

        } catch (IOException e) {
            System.err.println(e.getMessage());
            return false;
        }
    } else {
        numImages = KeyFaceActivity.prefs.getInt("savedFaces", numImages);
        sum = new Mat(numImages, imgLength, CvType.CV_32FC1);
        projectedTraining = new Mat(numImages, numImages, CvType.CV_32FC1);

        for (int i = 0; i < numImages; i++) { //opens each image and appends it as a column in the matrix Sum
            String lfile = imagesDir + "/Face" + i + ".png";
            try {
                Mat img = Highgui.imread(lfile, 0);
                //Other way of loading image data
                //Mat img = Utils.bitmapToMat(BitmapFactory.decodeFile(lfile));
                if (img.empty())
                    throw new IOException("Opening image number " + i + " failed.");
                //We add the image to the correspondent row in the matrix of images (sum)
                img.reshape(1, 1).convertTo(sum.row(i), CvType.CV_32FC1);
            } catch (IOException e) {
                System.err.println(e.getMessage());
                return false;
            }
        }
    }

    if (numImages > 1) {
        average = new Mat();
        eigenfaces = new Mat();
        Core.PCACompute(sum, average, eigenfaces);
        for (int i = 0; i < numImages; i++) {
            Core.PCAProject(sum.row(i), average, eigenfaces, projectedTraining.row(i));
        }
    }

    return true;
}

From source file:com.oetermann.imageclassifier.MatchFinderWrapper.java

License:Open Source License

public int bestMatch(Mat queryDescriptors, int minMatches) {
    queryDescriptors.convertTo(queryDescriptors, CvType.CV_32F);
    MatOfDMatch matches = new MatOfDMatch();
    matcher.match(queryDescriptors, matches);
    queryDescriptors.empty(); // Attempt to stop GC from releasing mat
    Arrays.fill(matchesPerImage, 0);
    DMatch[] matchesArray = matches.toArray();
    for (DMatch match : matchesArray) {
        //            match.distance;
        if (match.distance > 1) {
            match.distance = match.distance / 1000;
        }/*from  w w w .j a  v  a2s  .c  o m*/
        if (match.distance < 1) {
            matchesPerImage[match.imgIdx] += 1 - match.distance;
        }
        //            matchesPerImage[match.imgIdx] += 1;
        //            System.out.println("MatchDistance: "+match.distance + "\t\tImage: "+ imageNames[match.imgIdx]);
    }
    int index = 0;
    for (int i = 0; i < matchesPerImage.length; i++) {
        //            System.out.println(matchesPerImage[i] + "\t\tmatches for image " + imageNames[i]);
        if (matchesPerImage[i] > matchesPerImage[index]) {
            index = i;
        }
    }
    //        System.out.println("Total Matches: "+matches.size());
    if (matchesPerImage[index] >= minMatches) {
        return index;
    }
    return -1;
}

From source file:com.opencv.mouse.MouseMainFrame.java

private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed
    try {/*from   w ww  .  j a  va 2 s .c  o m*/
        robot = new Robot();
    } catch (AWTException e) {
    }
    t = new Thread() {
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage

            webCam = new VideoCapture(0);
            if (!webCam.isOpened()) {
                System.out.println("Kamera Ak Deil..!");
            } else
                System.out.println("Kamera Ald --> " + webCam.toString());

            Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3);
            Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3);
            thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3,
                    new Scalar(255, 255, 255));
            if (webCam.isOpened()) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException ex) {

                }

                while (true) {
                    try {
                        webCam.read(webcam_image);
                    } catch (Exception e) {
                        System.out.println("Web Cam Kapal !");
                    }

                    if (!webcam_image.empty()) {
                        try {
                            Thread.sleep(10);
                        } catch (InterruptedException ex) {

                        }
                        // Mat inRangeResim = webcam_image.clone();
                        /*
                        Mat inRangeResim = webcam_image.clone();
                        matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg");
                        image =matToBufferedImageConverter.getBufferedImage();
                        Highgui.imwrite("D:\\bitirme.jpg", inRangeResim);
                        */

                        //       MatOfRect faceDetections = new MatOfRect();
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        //siyah hsv range 0 0 0 - 180 45 100
                        //hsvmavi   Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image);
                        //rgb mavi        // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image);
                        //turuncu hsv      Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image);
                        //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image);
                        //        Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image);

                        //hsv mavi
                        //                       jSliderHmin.setValue(75);
                        //                       jSliderSmin.setValue(63);
                        //                       jSliderVmin.setValue(40);
                        //                       jSliderHmax.setValue(118);
                        //                       jSliderSmax.setValue(255);
                        //                       jSliderVmax.setValue(255);
                        //
                        //                       jSliderHmin.setValue(0);
                        //                       jSliderSmin.setValue(0);
                        //                       jSliderVmin.setValue(0);
                        //                       jSliderHmax.setValue(179);
                        //                       jSliderSmax.setValue(39);
                        //                       jSliderVmax.setValue(120);
                        Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255),
                                thresholded);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);

                        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                        Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                                Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                        Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5);

                        for (int i = 0; i < contours.size(); i++) {
                            //  System.out.println(Imgproc.contourArea(contours.get(i)));
                            //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                            Rect rect = Imgproc.boundingRect(contours.get(i));
                            kesit = thresholded.submat(rect);
                            //System.out.println(rect.height);
                            // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                            //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                            Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                                    new Point(rect.x + rect.width, rect.y + rect.height),
                                    new Scalar(0, 0, 255));

                            //}
                            //}
                            if (rect.height > 15 && rect.width > 15) {
                                System.out.println(rect.x + "\n" + rect.y);
                                Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0));
                                robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25));
                            }

                        }

                        //   Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR);
                        //  hsv_image.convertTo(hsv_image, CvType.CV_32F);

                        //   Imgproc.Canny(thresholded, thresholded, 10, 20);
                        //   Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //ise yarar

                        //    Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR);
                        //  Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //    webcam_image.copyTo(hsv_image, thresholded);
                        //                            System.out.println("<------------------------------>");
                        //                            System.out.println("BGR: " +webcam_image.channels()+"  Size : "+webcam_image.size());
                        //                            System.out.println("HSV :"+hsv_image.channels()+"  Size: "+hsv_image.size());
                        //                            System.out.println("Thresold :"+thresholded.channels()+"  Size : "+thresholded.size());
                        //                            System.out.println("<------------------------------>");
                        //
                        matToBufferedImageConverter.setMatrix(webcam_image, ".jpg");

                        image = matToBufferedImageConverter.getBufferedImage();
                        g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null);

                    } else {

                        System.out.println("Grnt yok!");
                        break;
                    }
                }
                //           webCam.release();
            }

        }
    };
    threadDurum = true;
    t.start();
}