Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:car_counter.counting.opencv.OpencvCarCounter.java

License:Apache License

public OpencvCarCounter() {
    image = new Mat();
    foregroundMask = new Mat();
    maskedImage = new Mat();
    backgroundSubtractor = new BackgroundSubtractorMOG();
}

From source file:cctvanalization.FXMLDocumentController.java

@Override
public void initialize(URL url, ResourceBundle rb) {
    videoCapture = new VideoCapture();
    grabbedFramesPrev = new ArrayList<>();
    grabbedFramesNext = new ArrayList<>();
    grabbedFramesTemp = new ArrayList<>();
    currentFrame = new Mat();
    previousFrame = new Mat();

    Timeline timeline = new Timeline();
    KeyFrame keyFrame = new KeyFrame(Duration.millis(10), event -> {
        if (grabbedFramesPrev.size() > 1) {
            System.out.println("Frames found");
            currentFrame = grabbedFramesPrev.get(grabbedFramesPrev.size() - 1);
            previousFrame = grabbedFramesPrev.get(grabbedFramesPrev.size() - 2);

            //            Mat subtraction = new Mat();
            //            System.out.println("Mat created");

            for (int r = 0; r < currentFrame.height(); r++) {
                for (int c = 0; c < currentFrame.width(); c++) {
                    double currentValue[] = currentFrame.get(r, c);
                    double previousValue[] = previousFrame.get(r, c);
                    double newValue = currentValue[0] - previousValue[0];
                    if (newValue < 0) {
                        newValue = 0;//  w  w w .  j  a v  a2  s  .  c  o  m
                    }
                    //    System.out.printf("%f   ", newValue);
                    //         subtraction.put(r, c, newValue);

                    if (newValue > 10) {
                        int changes = 0;
                        for (int i = r - 10; i <= r; i++) {
                            for (int j = c - 10; j <= c; j++) {
                                if (i >= 0 && j >= 0) {
                                    double currCheckValue[] = currentFrame.get(i, j);
                                    double prevCheckValue[] = previousFrame.get(i, j);
                                    double checkValue = currCheckValue[0] - prevCheckValue[0];
                                    if (checkValue < 0) {
                                        checkValue = 0;
                                    }
                                    if (checkValue > 10) {
                                        changes = changes + 1;
                                    }
                                }
                            }
                        }
                        if (changes > 40) {
                            System.out.println("ChangeFound");
                            if (alarmCont == 0) {
                                alarmCont = 1;
                            }
                            if (alarmCont == 1) {
                                ringAlarm();
                                saveFrame(currentFrame, "changedFrame", "png");
                                alarmCont = 2;
                            }
                            break;
                        }
                    }
                }
                //   System.out.printf("\n");
            }
        }
    });
    timeline.getKeyFrames().add(keyFrame);
    timeline.setCycleCount(Timeline.INDEFINITE);
    timeline.play();
}

From source file:cctvanalization.FXMLDocumentController.java

private Image grabFrame() {
    if (applicationShouldClose) {
        if (videoCapture.isOpened()) {
            videoCapture.release();// w  ww.j av  a 2s  . co m
        }
        scheduledExecutorService.shutdown();
    }

    Image imageToShow = null;
    Mat frame = new Mat();
    //     Mat prevFrame = new Mat(grabbedImagesPrev.get(grabbedImagesPrev.size() - 1));
    int frameNum = 0;
    if (videoCapture.isOpened()) {
        try {
            videoCapture.read(frame);

            if (!frame.empty()) {
                Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGR2GRAY);

                MatOfByte buffer = new MatOfByte();
                Imgcodecs.imencode(".png", frame, buffer);
                imageToShow = new Image(new ByteArrayInputStream(buffer.toArray()));

                grabbedFramesTemp.removeAll(grabbedFramesTemp);
                if (grabbedFramesPrev.size() < 10) {
                    grabbedFramesPrev.add(frame);
                } else {
                    for (int i = 1; i < grabbedFramesPrev.size(); i++) {
                        grabbedFramesTemp.add(grabbedFramesPrev.get(i));
                    }
                    grabbedFramesPrev.removeAll(grabbedFramesPrev);
                    for (int i = 0; i < grabbedFramesTemp.size(); i++) {
                        grabbedFramesPrev.add(grabbedFramesTemp.get(i));
                    }
                    grabbedFramesPrev.add(frame);
                }
            }

        } catch (Exception e) {
            System.err.println(e);
        }
    }

    return imageToShow;
}

From source file:ch.zhaw.facerecognition.Activities.AddPersonPreviewActivity.java

License:Open Source License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();/*from   w w  w. j a  va2s  .  co  m*/
    Mat imgCopy = new Mat();
    imgRgba.copyTo(imgCopy);
    // Selfie / Mirror mode
    if (front_camera) {
        Core.flip(imgRgba, imgRgba, 1);
    }

    long time = new Date().getTime();
    if ((method == MANUALLY) || (method == TIME) && (lastTime + timerDiff < time)) {
        lastTime = time;

        // Check that only 1 face is found. Skip if any or more than 1 are found.
        Mat img = ppF.getCroppedImage(imgCopy);
        if (img != null) {
            Rect[] faces = ppF.getFacesForRecognition();
            //Only proceed if 1 face has been detected, ignore if 0 or more than 1 face have been detected
            if ((faces != null) && (faces.length == 1)) {
                faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                if (((method == MANUALLY) && capturePressed) || (method == TIME)) {
                    MatName m = new MatName(name + "_" + total, img);
                    if (folder.equals("Test")) {
                        String wholeFolderPath = fh.TEST_PATH + name + "/" + subfolder;
                        new File(wholeFolderPath).mkdirs();
                        fh.saveMatToImage(m, wholeFolderPath + "/");
                    } else {
                        String wholeFolderPath = fh.TRAINING_PATH + name;
                        new File(wholeFolderPath).mkdirs();
                        fh.saveMatToImage(m, wholeFolderPath + "/");
                    }

                    for (int i = 0; i < faces.length; i++) {
                        MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], String.valueOf(total),
                                front_camera);
                    }

                    total++;

                    // Stop after numberOfPictures (settings option)
                    if (total >= numberOfPictures) {
                        Intent intent = new Intent(getApplicationContext(), AddPersonActivity.class);
                        intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                        startActivity(intent);
                    }
                    capturePressed = false;
                } else {
                    for (int i = 0; i < faces.length; i++) {
                        MatOperation.drawRectangleOnPreview(imgRgba, faces[i], front_camera);
                    }
                }
            }
        }
    }

    return imgRgba;
}

From source file:ch.zhaw.facerecognition.Activities.RecognitionActivity.java

License:Open Source License

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();/*from w  ww  .j av  a2  s. com*/
    Mat img = new Mat();
    imgRgba.copyTo(img);
    List<Mat> images = ppF.getProcessedImage(img);
    Rect[] faces = ppF.getFacesForRecognition();
    // Selfie / Mirror mode
    if (front_camera) {
        Core.flip(imgRgba, imgRgba, 1);
    }
    if (images == null || images.size() == 0 || faces == null || faces.length == 0
            || !(images.size() == faces.length)) {
        // skip
        return imgRgba;
    } else {
        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
        for (int i = 0; i < faces.length; i++) {
            MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""),
                    front_camera);
        }
        return imgRgba;
    }
}

From source file:ch.zhaw.facerecognition.Activities.TrainingActivity.java

License:Open Source License

@Override
public void onResume() {
    super.onResume();

    final Handler handler = new Handler(Looper.getMainLooper());
    thread = new Thread(new Runnable() {
        public void run() {
            if (!Thread.currentThread().isInterrupted()) {
                PreProcessorFactory ppF = new PreProcessorFactory();
                String algorithm = PreferencesHelper.getClassificationMethod();

                FileHelper fileHelper = new FileHelper();
                fileHelper.createDataFolderIfNotExsiting();
                final File[] persons = fileHelper.getTrainingList();
                if (persons.length > 0) {
                    Recognition rec = RecognitionFactory.getRecognitionAlgorithm(Recognition.TRAINING,
                            algorithm);/*from  w ww .  java  2s .c om*/
                    for (File person : persons) {
                        if (person.isDirectory()) {
                            File[] files = person.listFiles();
                            int counter = 1;
                            for (File file : files) {
                                if (FileHelper.isFileAnImage(file)) {
                                    Mat imgRgb = Imgcodecs.imread(file.getAbsolutePath());
                                    Imgproc.cvtColor(imgRgb, imgRgb, Imgproc.COLOR_BGRA2RGBA);
                                    Mat processedImage = new Mat();
                                    imgRgb.copyTo(processedImage);
                                    List<Mat> images = ppF.getProcessedImage(processedImage);
                                    if (images == null || images.size() > 1) {
                                        // More than 1 face detected --> cannot use this file for training
                                        continue;
                                    } else {
                                        processedImage = images.get(0);
                                    }
                                    if (processedImage.empty()) {
                                        continue;
                                    }
                                    // The last token is the name --> Folder name = Person name
                                    String[] tokens = file.getParent().split("/");
                                    final String name = tokens[tokens.length - 1];

                                    MatName m = new MatName("processedImage", processedImage);
                                    fileHelper.saveMatToImage(m, FileHelper.DATA_PATH);

                                    rec.addImage(processedImage, name, false);

                                    //                                      fileHelper.saveCroppedImage(imgRgb, ppF, file, name, counter);

                                    // Update screen to show the progress
                                    final int counterPost = counter;
                                    final int filesLength = files.length;
                                    progress.post(new Runnable() {
                                        @Override
                                        public void run() {
                                            progress.append("Image " + counterPost + " of " + filesLength
                                                    + " from " + name + " imported.\n");
                                        }
                                    });

                                    counter++;
                                }
                            }
                        }
                    }
                    final Intent intent = new Intent(getApplicationContext(), MainActivity.class);
                    intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                    if (rec.train()) {
                        intent.putExtra("training", "Training successful");
                    } else {
                        intent.putExtra("training", "Training failed");
                    }
                    handler.post(new Runnable() {
                        @Override
                        public void run() {
                            startActivity(intent);
                        }
                    });
                } else {
                    Thread.currentThread().interrupt();
                }
            }
        }
    });
    thread.start();
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.FileHelper.java

License:Open Source License

public void saveCroppedImage(Mat img, PreProcessorFactory ppF, File file, String name, int number) {
    // Save cropped image if not already existing
    File croppedFile = new File(file.getParentFile().getAbsolutePath() + "/cropped/" + name + "_" + number);
    if (!croppedFile.exists()) {
        // Create folder if not existing
        File croppedFolder = new File(file.getParentFile().getAbsolutePath() + "/cropped");
        croppedFolder.mkdir();//from  w w  w.j av a2 s.c  om
        Mat copy = new Mat();
        img.copyTo(copy);
        copy = ppF.getCroppedImage(copy);
        MatName mat = new MatName(name + "_" + number, copy);
        saveMatToImage(mat, file.getParentFile().getAbsolutePath() + "/cropped/");
    }
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.Contours.DifferenceOfGaussian.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        Mat gauss1 = new Mat();
        Mat gauss2 = new Mat();
        Imgproc.GaussianBlur(img, gauss1, size1, sigma1);
        Imgproc.GaussianBlur(img, gauss2, size2, sigma2);
        Core.absdiff(gauss1, gauss2, img);
        processed.add(img);//  w  ww  .ja  v a  2 s  .  c  o m
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:ch.zhaw.facerecognitionlibrary.Recognition.Eigenfaces.java

License:Open Source License

private void computePhi() {
    Mat Psi_repeated = new Mat();
    Core.repeat(Psi, Gamma.rows(), 1, Psi_repeated);
    Core.subtract(Gamma, Psi_repeated, Phi);
}

From source file:ch.zhaw.facerecognitionlibrary.Recognition.Eigenfaces.java

License:Open Source License

public Mat getFeatureVector(Mat original) {
    Mat projected = new Mat();
    Core.PCAProject(original, Psi, eigVectors, projected);
    return projected;
}