Example usage for org.opencv.core Mat dump

List of usage examples for org.opencv.core Mat dump

Introduction

In this page you can find the example usage for org.opencv.core Mat dump.

Prototype

public String dump() 

Source Link

Usage

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

/**
 * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
 *
 * @param recs  output ArrayList of AttitudeRec
 * @return total number of frame of the video
 */// ww w.j  av  a 2  s. com
private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));

    int decimation = 1;
    boolean use_timestamp = true;

    // roughly determine if decimation is necessary
    if (meta.fps > DECIMATION_FPS_TARGET) {
        decimation = (int) (meta.fps / DECIMATION_FPS_TARGET);
        meta.fps /= decimation;
    }

    VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(new File(mPath, "video.mp4"), decimation);

    Mat frame;
    Mat gray = new Mat();
    int i = -1;

    Size frameSize = videoDecoder.getSize();

    if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
        // this is very unlikely
        return -1;
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.startMethodTracing("cvprocess");
    }

    Size patternSize = new Size(4, 11);

    float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0));
    Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2));
    MatOfDouble coeff = new MatOfDouble(); // dummy

    MatOfPoint2f centers = new MatOfPoint2f();
    MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    Mat rvec = new MatOfFloat();
    Mat tvec = new MatOfFloat();

    MatOfPoint2f reprojCenters = new MatOfPoint2f();

    if (LOCAL_LOGV) {
        Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    }

    long startTime = System.nanoTime();
    long[] ts = new long[1];

    while ((frame = videoDecoder.getFrame(ts)) != null) {
        if (LOCAL_LOGV) {
            Log.v(TAG, "got a frame " + i);
        }

        if (use_timestamp && ts[0] == -1) {
            use_timestamp = false;
        }

        // has to be in front, as there are cases where execution
        // will skip the later part of this while
        i++;

        // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
        Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);

        boolean foundPattern = Calib3d.findCirclesGrid(gray, patternSize, centers,
                Calib3d.CALIB_CB_ASYMMETRIC_GRID);

        if (!foundPattern) {
            // skip to next frame
            continue;
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
        }

        // figure out the extrinsic parameters using real ground truth 3D points and the pixel
        // position of blobs found in findCircleGrid, an estimated camera matrix and
        // no-distortion are assumed.
        boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false,
                Calib3d.CV_ITERATIVE);

        if (!foundSolution) {
            // skip to next frame
            if (LOCAL_LOGV) {
                Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
            }
            continue;
        }

        // reproject points to for evaluation of result accuracy of solvePnP
        Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);

        // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
        double error = Core.norm(centers, reprojCenters, Core.NORM_L2);

        if (LOCAL_LOGV) {
            Log.v(TAG, "Found attitude, re-projection error = " + error);
        }

        // if error is reasonable, add it into the results. use ratio to frame height to avoid
        // discriminating higher definition videos
        if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) {
            double[] rv = new double[3];
            double timestamp;

            rvec.get(0, 0, rv);
            if (use_timestamp) {
                timestamp = (double) ts[0] / 1e6;
            } else {
                timestamp = (double) i / meta.fps;
            }
            if (LOCAL_LOGV)
                Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
            recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
            Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img"
                    + i + ".png", frame);
        }
    }

    if (LOCAL_LOGV) {
        Log.v(TAG, "Finished decoding");
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.stopMethodTracing();
    }

    if (LOCAL_LOGV) {
        // time analysis
        double totalTime = (System.nanoTime() - startTime) / 1e9;
        Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i);
    }
    return i;
}

From source file:contador_de_moedas.PosProcessamento.java

public void salvaImagem(Mat output, boolean criaBase, Point center, int raio) {
    if (criaBase) {
        FileWriter arq = null;//  w  w  w  . ja v  a  2  s.  c  om
        try {
            String nome = "X" + (int) center.x + "Y" + (int) center.y + "R" + (int) raio;
            arq = new FileWriter("baseConhecimento/" + nome + ".txt");
            PrintWriter gravarArq = new PrintWriter(arq);
            gravarArq.print(output.dump());
            arq.close();
            // Imgcodecs.imwrite("baseConhecimento/" + nome + ".yml", output);
        } catch (IOException ex) {
            Logger.getLogger(Circulo.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            try {
                arq.close();
            } catch (IOException ex) {
                Logger.getLogger(Circulo.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }
}

From source file:cx.uni.jk.mms.iaip.tools.SimpleBrushTool.java

License:Open Source License

@Override
public Rect apply(Mat mat, BrushModel brush, int x, int y, boolean inverseEffect) {

    Rect changedArea = null;//  w  w w  . j a  v a 2s .  c  o m

    try {
        this.logger.finer(String.format("apply mode=\"%s\" inverse=%s, size=%d, strength=%d", brush.getMode(),
                inverseEffect, brush.getSize(), brush.getValue()));

        this.logger.finest("mat    = " + mat.toString());

        /** where is brush going to work? this may reach outside the mat! */
        int brushColStart = x - (brush.getSize() - 1) / 2;
        int brushColEnd = x + brush.getSize() / 2;
        int brushRowStart = y - (brush.getSize() - 1) / 2;
        int brushRowEnd = y + brush.getSize() / 2;

        if (brushColEnd >= 0 && brushColStart < mat.cols() && brushRowEnd >= 0 && brushRowStart < mat.rows()) {

            /** calculate bounds for roiMat to fit into original mat */
            int subColStart = Math.max(0, brushColStart);
            int subColEnd = Math.min(brushColEnd, mat.cols() - 1);
            int subRowStart = Math.max(0, brushRowStart);
            int subRowEnd = Math.min(brushRowEnd, mat.rows() - 1);

            /**
             * the caller may want to know. Rect constructor interprets the
             * second point being outside of the Rect! a one pixel rectangle
             * Rect(Point(a,b), Point(a+1,b+1)) has height and width 1. see
             * 
             * @link{http://docs.opencv.org/java/org/opencv/core/Rect.html
             */
            changedArea = new Rect(new Point(subColStart, subRowStart),
                    new Point(subColEnd + 1, subRowEnd + 1));

            /**
             * get the part of original mat which going to be affected by
             * change
             */
            Mat roiMat = mat.submat(subRowStart, subRowEnd + 1, subColStart, subColEnd + 1);
            this.logger.finest("matRoi = " + roiMat.toString());

            /** does the brush fit into the roiMat we shall work on ? */
            boolean brushFits = brushColStart == subColStart && brushColEnd == subColEnd
                    && brushRowStart == subRowStart && brushRowEnd == subRowEnd;

            this.logger.finest("brush fits = " + brushFits);

            /**
             * make sure to have a working mat which matches the full brush
             * size
             */
            Mat workMat, workRoi = null;
            if (brushFits) {
                /** just work in the original mat area defined by roi */
                workMat = roiMat;
            } else {
                /** create a new mat as big as the brush */
                workMat = Mat.zeros(brush.getSize(), brush.getSize(), MatModel.MAT_TYPE);
                this.logger.finest("workMat= " + workMat.toString());
                /**
                 * create an ROI in the workMat as big as the subMat,
                 * correct offset for brushing in the middle
                 */
                int roiColStart = subColStart - brushColStart;
                int roiColEnd = roiColStart + roiMat.cols();
                int roiRowStart = subRowStart - brushRowStart;
                int roiRowEend = roiRowStart + roiMat.rows();

                workRoi = workMat.submat(roiRowStart, roiRowEend, roiColStart, roiColEnd);
                this.logger.finest("workRoi= " + workRoi.toString());
                roiMat.copyTo(workRoi);
                this.logger.finest("workRoi= " + workRoi.toString());

                // workRoi.put(0, 0, 1333.0d);
                this.logger.finest("roiMat  dump1 " + roiMat.dump());
                this.logger.finest("workRoi dump1 " + workRoi.dump());
                this.logger.finest("workMat dump1 " + workMat.dump());
            }

            /** the real action */
            this.applyToWorkMat(brush, inverseEffect, workMat);

            this.logger.finest("workMat dump2 " + workMat.dump());
            this.logger.finest("matRoi  dump2 " + roiMat.dump());

            if (brushFits) {
                /**
                 * nothing to do, we have been working directly in original
                 * mat
                 */
            } else {
                /** copy workMat back into original mat */
                this.logger.finest("workRoi dump2 " + workRoi.dump());
                // workRoi.put(0, 0, 1338);
                this.logger.finest("workRoi dump3 " + workRoi.dump());
                /**
                 * copy roi of changed workmat back into roi of original mat
                 */
                this.logger.finest("matRoi = " + roiMat.toString());
                workRoi.copyTo(roiMat);
                this.logger.finest("matRoi = " + roiMat.toString());
            }
            this.logger.finest("matRoi  dump3 " + roiMat.dump());
        }

    } catch (CvException e) {
        /** nevermind if the user does not notice */
        this.logger.fine(e.getStackTrace().toString());
    }

    /** let the caller know caller which area has potentially been changed */
    return changedArea;
}

From source file:gov.nasa.jpl.memex.pooledtimeseries.healthcheck.CheckOpenCV.java

License:Apache License

public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());

    String filename = args[0];//from www .  j  ava 2  s.  c  om

    System.out.println("opening video file " + filename);
    VideoCapture capture = new VideoCapture(filename.toString());

    if (!capture.isOpened()) {
        System.out.println("video file " + filename + " could not be opened.");

    }
}

From source file:javaapplication1.JavaApplication1.java

public static void main(String[] args) {
    // you must load the OpenCV library like this before trying to do
    // anything with OpenCV!
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // Print OpenCV version
    System.out.println("Welcome to OpenCV " + Core.VERSION);

    // In OpenCV, the most important data type is the Matrix, Mat.
    ///*from   ww w  . ja v  a  2 s. c o m*/
    // Here, we create a matrix that has 5 rows and 10 columns.  It
    // stores an 8-bit type with a single channel.  In other words, a
    // matrix of bytes.  We'll initialize every element to 0.
    Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0));

    // Dump information about the matrix
    System.out.println("OpenCV Mat: " + m);

    // set row 1 to be all 1s, and then column 5 to be all 5s
    Mat mr1 = m.row(1);
    mr1.setTo(new Scalar(1));
    Mat mc5 = m.col(5);
    mc5.setTo(new Scalar(5));

    // Dump the actual matrix contents
    System.out.println("OpenCV Mat data:\n" + m.dump());

    Ocv ocv = new Ocv();
    ocv.getFilePath();

    /**
     * Find faces in an image.
     *
     * @param filter Path to the xml face finding filter to use
     * @param input Path to the input image file
     * @param output Path to the output image file
     */
    //ocv.findFaces("lbpcascade_frontalface.xml", "C:\\Users\\Wellesley\\Documents\\GitHub\\CSE398\\opencvTutorial\\JavaApplication1\\src\\javaapplication1\\lena.png", "../javaapplication1");
    ocv.setOutput("step2.png");
    ocv.findFaces("", "", "");
    ocv.setOutput("step3.png");
    ocv.cropEachFace("", "");
    ocv.setOutput("step4.png");
    ocv.resizeEachFace("", "");
    ocv.setOutput("step6.png");
    ocv.makeFacesGray("", "", "");
    ocv.setOutput("step8.png");
    ocv.blendWithGray50("", "");
    ocv.setOutput("step10.png");
    ocv.doSobel("", "");
    ocv.setOutput("step11.png");
    ocv.directManip("", "");
}

From source file:javacv.JavaCV.java

/**
 * @param args the command line arguments
 *//*www  .j  a  va  2s. c o m*/
public static void main(String[] args) {
    // TODO code application logic here

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());

    CascadeClassifier faceDetector = new CascadeClassifier("./data/lbpcascade_frontalface.xml");
    //CascadeClassifier faceDetector = new CascadeClassifier();

    JFrame frame = new JFrame("BasicPanel");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(400, 400);
    JavaCV panel = new JavaCV();
    frame.setContentPane(panel);
    frame.setVisible(true);
    Mat webcam_image = new Mat();
    BufferedImage temp;
    VideoCapture capture;
    capture = new VideoCapture(0);

    if (capture.isOpened()) {
        while (true) {
            capture.read(webcam_image);
            if (!webcam_image.empty()) {
                frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60);

                MatOfRect faceDetections = new MatOfRect();
                faceDetector.detectMultiScale(webcam_image, faceDetections);

                //System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

                // Draw a bounding box around each face.
                for (Rect rect : faceDetections.toArray()) {
                    Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                            new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
                }

                temp = matToBufferedImage(webcam_image);
                panel.setimage(temp);
                panel.repaint();
            } else {
                System.out.println(" --(!) No captured frame -- Break!");
                break;
            }
        }
    }
    return;

}

From source file:karthik.Barcode.Barcode.java

License:Open Source License

protected static void write_Mat(String filename, Mat img) {
    // write the contents of a Mat object to disk
    try {//ww  w.  j a v a 2  s. c  om
        PrintStream original = new PrintStream(System.out);
        PrintStream printStream = new PrintStream(new FileOutputStream(new File(filename)));
        System.setOut(printStream);
        System.out.println(img.dump());
        System.setOut(original);
    } catch (IOException ioe) {
    }

}

From source file:neuroimagingdataportal.EdgedetectProcessing.java

public String process(int value1, int value2) {

    this.threshold1 = value1;
    this.threshold2 = value2;
    System.out.println("Welcome to OpenCV " + Core.VERSION);
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat m = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("m = " + m.dump());

    Mat src1;//from  ww w  . j av  a2  s .c  o  m

    //if ("null".equals(this.url)) {
    //    JOptionPane.showMessageDialog(null, "Please select image to process", "WARNING_MESSAGE", JOptionPane.WARNING_MESSAGE);
    //    return null;
    //} else {
    src1 = imread(url, CV_LOAD_IMAGE_COLOR);
    System.out.println(" read the file" + url);

    // Mat gray, edge, draw;
    gray = new Mat();
    cvtColor(src1, gray, COLOR_BGR2GRAY);

    edge = new Mat();
    draw = new Mat();
    /*
     void Canny(InputArray image, OutputArray edges, double threshold1, double threshold2, int apertureSize=3, bool L2gradient=false )
     Parameters:
     image  single-channel 8-bit input image.
     edges  output edge map; it has the same size and type as image .
     threshold1  first threshold for the hysteresis procedure.
     threshold2  second threshold for the hysteresis procedure.
     apertureSize  aperture size for the Sobel() operator.
     L2gradient  a flag, indicating whether a more accurate norm. 
     */

    Canny(gray, edge, threshold1, threshold2, 3, false);
    edge.convertTo(draw, CV_8U);

    // write draw mat to the bufferedImage as I think......
    //BufferedImage drawImage = ImageIO.read( (Image) draw);

    Mat src2;

    saveUrl = currentDirectory + "\\" + count + "converted.jpg";

    boolean imwrite;

    imwrite = imwrite(saveUrl, draw);

    System.out.println(" write the image to given file path !!!");

    return saveUrl;

    //}
}

From source file:opencvdemos.OpenCVDemos.java

/**
 * @param args the command line arguments
 *//*w  w w.j a  v a 2s  . co m*/
public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());
}

From source file:opencvtuto1.Opencvtuto1.java

/**
 * @param args the command line arguments
 *///www  .  j  a  v  a 2 s. c o m
public static void main(String[] args) {
    // TODO code application logic here

    System.out.println("hola mundo opencv version" + Core.VERSION);
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat m = Mat.eye(8, 8, CvType.CV_8UC1);

    System.out.println("m=" + m.dump());

}