Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:com.example.colordetector.CamMainActivity.java

License:Apache License

public void onCameraViewStarted(int width, int height) {
    // initialize the matrix that will contain the working's frame
    rgbaFrame = new Mat();
    rgbFrame = new Mat();
    hsvFrame = new Mat();
    filteredFrame = new Mat();
    inRangeMask = new Mat();
    hChannel = new Mat();

    // Getting the dimension of the frame
    camHeight = height;/*from w  w  w  .  ja v  a2  s  . c om*/
    camWidth = width;
    frameDim = height * width;

    // Variables for countdown in automatic shoot setting
    timeToElapse = 24; // 24 wait cycles of 75ms before taking the picture
    osdSecond = ""; // string containing the countdown display screen

    // Getting preferences list
    SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
    String colorSelected = sharedPref.getString(KEY_PREF_COLOR, "");

    // Getting the shooting image method choosed by the user;
    // if set to manual display the button to take the picture
    if (sharedPref.getString(KEY_PREF_METHOD, "").compareTo("manual") == 0) {
        methodAuto = false;
        button.setVisibility(SurfaceView.VISIBLE);
    } else {
        methodAuto = true;
        button.setVisibility(SurfaceView.GONE);
        // Getting the automatic capture method (countdown, immediate)
        if (sharedPref.getString(KEY_PREF_AUTOMATIC_SHOOT, "").compareTo("countdown") == 0) {
            countDown = true;
        } else {
            countDown = false;
        }
    }

    // Setting threshold values according to the color currently selected
    double thresArray[] = new double[3];

    // Red --> H = 0 | 360
    if (colorSelected.equals("red")) {
        setColorValues(thresArray, 0, 50, 50); // Min
        thresMin.set(thresArray);
        setColorValues(thresArray, 5, 255, 255); // Max
        thresMax.set(thresArray);
    }
    // Yellow --> H = 60 / 2 = 30
    else if (colorSelected.equals("yellow")) {
        setColorValues(thresArray, 25, 50, 50); // Min
        thresMin.set(thresArray);
        setColorValues(thresArray, 35, 255, 255); // Max
        thresMax.set(thresArray);
    }
    // Green --> H = 120 / 2 = 60
    else if (colorSelected.equals("green")) {
        setColorValues(thresArray, 40, 50, 50); // Min
        thresMin.set(thresArray);
        setColorValues(thresArray, 89, 255, 255); // Max
        thresMax.set(thresArray);
    }
    // Light blue --> H = 180 / 2 = 90
    else if (colorSelected.equals("lightBlue")) {
        setColorValues(thresArray, 90, 50, 50); // Min
        thresMin.set(thresArray);
        setColorValues(thresArray, 109, 255, 255); // Max
        thresMax.set(thresArray);
    }
    // Blue --> H = 240 / 2 = 120
    else if (colorSelected.equals("blue")) {
        setColorValues(thresArray, 110, 50, 50); // Min
        thresMin.set(thresArray);
        setColorValues(thresArray, 128, 255, 255); // Max
        thresMax.set(thresArray);
    }
    // Magenta --> H = 300 / 2 = 150
    else if (colorSelected.equals("magenta")) {
        setColorValues(thresArray, 140, 50, 50); // Min
        thresMin.set(thresArray);
        setColorValues(thresArray, 170, 255, 255); // Max
        thresMax.set(thresArray);
    }
}

From source file:com.example.root.dipproj.MainActivity.java

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);
    if (resultCode == RESULT_OK) {
        if (requestCode == 1) {
            File f = new File(Environment.getExternalStorageDirectory().toString());
            for (File temp : f.listFiles()) {
                if (temp.getName().equals("temp.jpg")) {
                    f = temp;/*from   w  w  w .  jav  a  2  s.c  o m*/
                    break;
                }
            }
            try {
                Bitmap bitmap;
                BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
                bitmap = BitmapFactory.decodeFile(f.getAbsolutePath(), bitmapOptions);
                viewImage.setImageBitmap(bitmap);
                String path = android.os.Environment.getExternalStorageDirectory() + File.separator + "Phoenix"
                        + File.separator + "default";
                f.delete();
                OutputStream outFile = null;
                File file = new File(path, String.valueOf(System.currentTimeMillis()) + ".jpg");
                try {
                    outFile = new FileOutputStream(file);
                    bitmap.compress(Bitmap.CompressFormat.JPEG, 85, outFile);
                    outFile.flush();
                    outFile.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        } else if (requestCode == 2) {
            Uri selectedImage = data.getData();
            String[] filePath = { MediaStore.Images.Media.DATA };
            Cursor c = getContentResolver().query(selectedImage, filePath, null, null, null);
            c.moveToFirst();
            int columnIndex = c.getColumnIndex(filePath[0]);
            String picturePath = c.getString(columnIndex);
            c.close();
            Bitmap thumbnail = (BitmapFactory.decodeFile(picturePath));
            Log.w("path of image", picturePath + "");
            Mat imgMat = new Mat();
            Mat imgMat2 = new Mat();
            Mat imgMat3 = new Mat();
            Utils.bitmapToMat(thumbnail, imgMat);
            Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_RGB2GRAY);
            org.opencv.core.Size s = new Size(3, 3);
            Imgproc.createCLAHE();
            Imgproc.GaussianBlur(imgMat, imgMat, s, 2);
            Imgproc.erode(imgMat, imgMat2, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.dilate(imgMat2, imgMat3, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.Sobel(imgMat, imgMat, CvType.CV_8UC1, 1, 0);
            Core.absdiff(imgMat, imgMat3, imgMat);
            Imgproc.threshold(imgMat, imgMat, 123, 255, Imgproc.THRESH_OTSU);
            Utils.matToBitmap(imgMat, thumbnail);
            viewImage.setImageBitmap(thumbnail);
            saveBitmaptoSDCard(thumbnail);
        }
    }
}

From source file:com.example.yannic.remotefacedetection.agent.FaceDetectionAgent.java

License:Open Source License

public Tuple2Future<byte[], Integer> recognizeFace(int id, byte[] inputFace) {
    int c = 0;//from w w w .j  av a  2s  . c  o  m
    long startTime = System.currentTimeMillis();

    byte[] recognizedFace = new byte[0];
    System.out.println(id);
    BufferedImage bi;
    try {
        bi = ImageIO.read(new ByteArrayInputStream(inputFace));

        Mat input = bufferedImageToMat(bi);

        Mat resizedImg = new Mat();
        Size size = new Size(92, 112);
        Imgproc.resize(input, resizedImg, size);
        bi = matToBufferedImage(resizedImg, bi);
        ImageIO.write(bi, "jpg", new File("C:\\inputPicturesFace\\" + c + "-test.jpg"));

        int label = mFaceRecognizer.recognizeFace("C:\\inputPicturesFace\\" + c + "-test.jpg");

        c++;
        recognizedFace = Files
                .readAllBytes(new File("C:\\trainingPictures\\(" + label + ")-test.jpg").toPath());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    Tuple2Future fut = new Tuple2Future<byte[], Integer>();
    fut.setFirstResult(recognizedFace);
    fut.setSecondResult(id);
    long endTime = System.currentTimeMillis();
    System.out.println("Dauer Result: " + (endTime - startTime) + " milliseconds");

    return fut;
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

Bitmap matToBitmap(Mat input) {
    if (input == null) {
        return Bitmap.createBitmap(0, 0, Bitmap.Config.ARGB_8888);
    }//from   www  .  j  a  v a 2s . c  o m
    Mat tmp = new Mat();
    if (input.channels() == 1) {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_GRAY2RGB);
    } else {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_BGR2RGB);
    }
    Core.transpose(tmp, tmp);
    Core.flip(tmp, tmp, 1);

    Bitmap bm = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(tmp, bm);
    return bm;
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

public Bitmap getImageBitmap(int w, int h) {
    Mat tmp = new Mat();
    Imgproc.resize(working_image, tmp, new Size(h, w));
    return matToBitmap(tmp);
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

public List<Point> extractPoints() {
    Mat gray = new Mat();//work_width, work_height, CvType.CV_8UC1);
    Mat binary = new Mat();

    Mat kernel = Mat.ones(3, 3, CvType.CV_8UC1);

    debugreset();//from   w w w.  j a  v  a  2  s. c om

    Mat image = load_transformed();
    working_image = image.clone();
    debugsave(image, "source");

    Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);
    debugsave(gray, "grayscale");

    Imgproc.GaussianBlur(gray, gray, new Size(15, 15), 0);
    debugsave(gray, "blurred");

    //Imgproc.equalizeHist(gray, gray);
    //debugsave(gray, "equalized");

    Imgproc.adaptiveThreshold(gray, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV,
            129, 5);
    //Imgproc.threshold(gray, binary, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    //Imgproc.threshold(gray, binary, 128, 255, Imgproc.THRESH_BINARY_INV);
    debugsave(binary, "binary");

    Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_CLOSE, kernel);
    debugsave(binary, "closed");

    Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_OPEN, kernel);
    debugsave(binary, "opened");

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(binary, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); // is binary is now changed
    Imgproc.drawContours(image, contours, -1, new Scalar(0, 0, 255), 3);
    debugsave(image, "contours");

    List<PointAndArea> points = new ArrayList<>();

    for (MatOfPoint cnt : contours) {
        MatOfPoint2f c2f = new MatOfPoint2f();
        c2f.fromArray(cnt.toArray());
        RotatedRect rr = Imgproc.minAreaRect(c2f);

        double area = Imgproc.contourArea(cnt);

        if (rr.size.width / rr.size.height < 3 && rr.size.height / rr.size.width < 3 && rr.size.width < 64
                && rr.size.height < 64 && area > 9 && area < 10000) {
            points.add(new PointAndArea((int) area, rr.center));
        }
    }

    List<Point> final_points = new ArrayList<>();

    Collections.sort(points);
    Collections.reverse(points);
    int prev = -1;
    for (PointAndArea p : points) {
        Log.i("area", Integer.toString(p.area));
        if (prev == -1 || p.area >= prev / 2) {
            prev = p.area;
            Imgproc.circle(image, p.point, 10, new Scalar(0, 255, 0), 5);
            final_points.add(new Point(1 - p.point.y / work_height, 1 - p.point.x / work_width));
        }
    }
    debugsave(image, "circles");

    return final_points;
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ComputePose.java

License:Apache License

public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException {
    // convert image to NyAR style for processing
    final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2);

    // create new marker system configuration
    i_config = new NyARMarkerSystemConfig(i_param);
    markerSystemState = new NyARMarkerSystem(i_config);
    // Create wrapper that passes cam pictures to marker system
    cameraSensorWrapper = new NyARSensor(i_screen_size);
    ids = new int[markerPatterns.size()];
    patternmap = new HashMap<>();
    for (int i = 0; i < markerPatterns.size(); i++) {
        // create marker description from pattern file and add to marker
        // system
        ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize());
        patternmap.put(ids[i], markerPatterns.get(i));
    }/*from ww w .ja  va2  s.com*/

    cameraSensorWrapper.update(imageRaster);
    markerSystemState.update(cameraSensorWrapper);

    // init 3D point list
    final List<Point3> points3dlist = new ArrayList<>();
    final List<Point> points2dlist = new ArrayList<>();

    for (final int id : ids) {
        // process only if this marker has been detected
        if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) {
            // read and add 2D points
            final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id);
            Point p = new Point(vertex2d[0].x, vertex2d[0].y);
            points2dlist.add(p);
            p = new Point(vertex2d[1].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[2].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[3].x, vertex2d[3].y);
            points2dlist.add(p);

            final MatOfPoint mop = new MatOfPoint();
            mop.fromList(points2dlist);
            final List<MatOfPoint> pts = new ArrayList<>();
            pts.add(mop);
            // read and add corresponding 3D points
            points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id)));
            // draw red rectangle around detected marker
            Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y),
                    new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255));
        }

    }
    // load 2D and 3D points to Mats for solvePNP
    final MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(points3dlist);
    final MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(points2dlist);

    if (visualization) {
        // show image with markers detected
        Imshow.show(image2);
    }

    // do not call solvePNP with empty intput data (no markers detected)
    if (points2dlist.size() == 0) {
        return false;
    }

    // uncomment these lines if using RANSAC-based pose estimation (more
    // shaking)
    Mat inliers = new Mat();

    Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16,
            inliers, Calib3d.CV_P3P);
    ARLoc.getLog().info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size());
    // avoid publish zero pose if localization failed
    if (inliers.rows() == 0) {
        return false;
    }

    return true;
}

From source file:com.ibm.streamsx.edgevideo.device.Camera.java

License:Open Source License

/**
 * Grab a frame from the camera. Calls {@link #open()} if needed.
 * @return the frame//from  ww w  .j ava2s  . co m
 */
public Mat grabFrame() {
    if (!initialized) {
        open();
    }
    Mat frame = new Mat();
    camera.read(frame);
    return frame;
}

From source file:com.ibm.streamsx.edgevideo.device.edgent.JsonMat.java

License:Open Source License

private static String base64MimeEncodeMat(Mat mat) {
    int width = mat.width(), height = mat.height(), channels = mat.channels();

    // resize if needed
    // With initial resize factor of 4 and being within 2' of the MBP camera,
    // a face image seems to be on the order of 15Kb.
    if (width * height * channels > 50 * 1024) {
        Mat smallerFace = new Mat();
        int resizeFactor = 2;
        Imgproc.resize(mat, smallerFace, new Size(mat.width() / resizeFactor, mat.height() / resizeFactor));
        mat = smallerFace;// w w w. j a va  2s .  c o m
        width = mat.width();
        height = mat.height();
        channels = mat.channels();
    }

    byte[] sourcePixels = new byte[width * height * channels];
    mat.get(0, 0, sourcePixels);

    // Base64 encode the image to be able to package in JsonObject
    // java.utils.Base64 since 1.8, otherwise use Apache Commons
    Encoder encoder = Base64.getMimeEncoder();
    String base64 = encoder.encodeToString(sourcePixels);

    //System.out.println("pub face bytes size: " + sourcePixels.length + " base64 size:" + base64.length());

    return base64;
}

From source file:com.ibm.streamsx.edgevideo.device.FaceDetector.java

License:Open Source License

public Mat resize(Mat frame) {
    Mat resized = new Mat();
    Imgproc.resize(frame, resized, new Size(frame.width() / resizeFactor, frame.height() / resizeFactor));
    return resized;
}