Example usage for org.opencv.core Mat height

List of usage examples for org.opencv.core Mat height

Introduction

In this page you can find the example usage for org.opencv.core Mat height.

Prototype

public int height() 

Source Link

Usage

From source file:com.ibm.streamsx.edgevideo.device.MyPanel.java

License:Open Source License

public boolean matToBufferedImage(Mat mat, int bufferedImageType) {
    int width = mat.width(), height = mat.height(), channels = mat.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    mat.get(0, 0, sourcePixels);//from w ww.ja va  2  s  .c o m
    // create new image and get reference to backing data  
    image = new BufferedImage(width, height, bufferedImageType);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);
    return true;
}

From source file:com.joravasal.keyface.CameraAccessView.java

License:Open Source License

public Mat correctCameraImage(Mat image) {
    //Log.i(tag, "Correcting image rotation");
    //Check rotation of device
    int rotation = ((KeyFaceActivity) this.getContext()).getWindowManager().getDefaultDisplay().getRotation();
    switch (rotation) {
    case Surface.ROTATION_0:
        int degrees = 90;
        //Mirror (y axis) if front camera and rotation in any case
        Mat imageResult = new Mat();
        //For some reason to rotate the image properly, we have to set the center like this
        Point center = new Point(image.width() / 2, image.width() / 2);
        Mat transform = Imgproc.getRotationMatrix2D(center, degrees, 1.0);
        try {// w w  w . j a  v a 2 s.  c  o m
            Imgproc.warpAffine(image, imageResult, transform, new Size(image.height(), image.width()));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(imageResult, imageResult, -1);
        else
            Core.flip(imageResult, imageResult, 1);
        return imageResult;
    case Surface.ROTATION_90:
        //Mirror on y axis if front camera
        if (!KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, 1);
        break;
    case Surface.ROTATION_180:
        //Never gets here but just in case:
        break;
    case Surface.ROTATION_270:
        //Mirror on the x axis if rear camera, both axis if front camera
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, -1);
        else
            Core.flip(image, image, 0);
        break;
    default:
        break;
    }

    return image;
}

From source file:com.mitzuli.core.ocr.OcrPreprocessor.java

License:Open Source License

/**
 * Binarizes and cleans the input image for OCR, saving debugging images in the given directory.
 *
 * @param input the input image, which is recycled by this method, so the caller should make a defensive copy of it if necessary.
 * @param debugDir the directory to write the debugging images to, or null to disable debugging.
 * @return the preprocessed image./*w  w  w. java 2 s .  c o m*/
 */
static Image preprocess(final Image input, final File debugDir) {
    // TODO Temporary workaround to allow to manually enable debugging (the global final variable should be used)
    boolean DEBUG = debugDir != null;

    // Initialization
    final Mat mat = input.toGrayscaleMat();
    final Mat debugMat = DEBUG ? input.toRgbMat() : null;
    input.recycle();
    final Mat aux = new Mat(mat.size(), CvType.CV_8UC1);
    final Mat binary = new Mat(mat.size(), CvType.CV_8UC1);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "1_input.jpg"));

    // Binarize the input image in mat through adaptive Gaussian thresholding
    Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 51,
            13);
    // Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 31, 7);

    // Edge detection
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_3X3); // Open
    Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_3X3); // Close
    Core.addWeighted(mat, 0.5, aux, 0.5, 0, mat); // Average
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_GRADIENT, KERNEL_3X3); // Gradient
    Imgproc.threshold(mat, mat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); // Edge map
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "2_edges.jpg"));

    // Extract word level connected-components from the dilated edge map
    Imgproc.dilate(mat, mat, KERNEL_3X3);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "3_dilated_edges.jpg"));
    final List<MatOfPoint> wordCCs = new ArrayList<MatOfPoint>();
    Imgproc.findContours(mat, wordCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter word level connected-components individually and calculate their average attributes
    final List<MatOfPoint> individuallyFilteredWordCCs = new ArrayList<MatOfPoint>();
    final List<MatOfPoint> removedWordCCs = new ArrayList<MatOfPoint>();
    double avgWidth = 0, avgHeight = 0, avgArea = 0;
    for (MatOfPoint cc : wordCCs) {
        final Rect boundingBox = Imgproc.boundingRect(cc);
        if (boundingBox.height >= 6 // bounding box height >= 6
                && boundingBox.area() >= 50 // bounding box area >= 50
                && (double) boundingBox.width / (double) boundingBox.height >= 0.25 // bounding box aspect ratio >= 1:4
                && boundingBox.width <= 0.75 * mat.width() // bounding box width <= 0.75 image width
                && boundingBox.height <= 0.75 * mat.height()) // bounding box height <= 0.75 image height
        {
            individuallyFilteredWordCCs.add(cc);
            avgWidth += boundingBox.width;
            avgHeight += boundingBox.height;
            avgArea += boundingBox.area();
        } else {
            if (DEBUG)
                removedWordCCs.add(cc);
        }
    }
    wordCCs.clear();
    avgWidth /= individuallyFilteredWordCCs.size();
    avgHeight /= individuallyFilteredWordCCs.size();
    avgArea /= individuallyFilteredWordCCs.size();
    if (DEBUG) {
        Imgproc.drawContours(debugMat, removedWordCCs, -1, BLUE, -1);
        removedWordCCs.clear();
    }

    // Filter word level connected-components in relation to their average attributes
    final List<MatOfPoint> filteredWordCCs = new ArrayList<MatOfPoint>();
    for (MatOfPoint cc : individuallyFilteredWordCCs) {
        final Rect boundingBox = Imgproc.boundingRect(cc);
        if (boundingBox.width >= 0.125 * avgWidth // bounding box width >= 0.125 average width
                && boundingBox.width <= 8 * avgWidth // bounding box width <= 8 average width
                && boundingBox.height >= 0.25 * avgHeight // bounding box height >= 0.25 average height
                && boundingBox.height <= 4 * avgHeight) // bounding box height <= 4 average height
        {
            filteredWordCCs.add(cc);
        } else {
            if (DEBUG)
                removedWordCCs.add(cc);
        }
    }
    individuallyFilteredWordCCs.clear();
    if (DEBUG) {
        Imgproc.drawContours(debugMat, filteredWordCCs, -1, GREEN, -1);
        Imgproc.drawContours(debugMat, removedWordCCs, -1, PURPLE, -1);
        removedWordCCs.clear();
    }

    // Extract paragraph level connected-components
    mat.setTo(BLACK);
    Imgproc.drawContours(mat, filteredWordCCs, -1, WHITE, -1);
    final List<MatOfPoint> paragraphCCs = new ArrayList<MatOfPoint>();
    Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_30X30);
    Imgproc.findContours(aux, paragraphCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter paragraph level connected-components according to the word level connected-components inside
    final List<MatOfPoint> textCCs = new ArrayList<MatOfPoint>();
    for (MatOfPoint paragraphCC : paragraphCCs) {
        final List<MatOfPoint> wordCCsInParagraphCC = new ArrayList<MatOfPoint>();
        aux.setTo(BLACK);
        Imgproc.drawContours(aux, Collections.singletonList(paragraphCC), -1, WHITE, -1);
        Core.bitwise_and(mat, aux, aux);
        Imgproc.findContours(aux, wordCCsInParagraphCC, new Mat(), Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);
        final Rect boundingBox = Imgproc.boundingRect(paragraphCC);
        final double center = mat.size().width / 2;
        final double distToCenter = center > boundingBox.x + boundingBox.width
                ? center - boundingBox.x - boundingBox.width
                : center < boundingBox.x ? boundingBox.x - center : 0.0;
        if (DEBUG) {
            System.err.println("****************************************");
            System.err.println("\tArea:                " + boundingBox.area());
            System.err.println("\tDistance to center:  " + distToCenter);
            System.err.println("\tCCs inside:          " + wordCCsInParagraphCC.size());
        }
        if ((wordCCsInParagraphCC.size() >= 10 || wordCCsInParagraphCC.size() >= 0.3 * filteredWordCCs.size())
                && mat.size().width / distToCenter >= 4) {
            textCCs.addAll(wordCCsInParagraphCC);
            if (DEBUG) {
                System.err.println("\tText:                YES");
                Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_GREEN, 5);
            }
        } else {
            if (DEBUG) {
                System.err.println("\tText:                NO");
                Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_RED, 5);
            }
        }
    }
    filteredWordCCs.clear();
    paragraphCCs.clear();
    mat.setTo(WHITE);
    Imgproc.drawContours(mat, textCCs, -1, BLACK, -1);
    textCCs.clear();
    if (DEBUG)
        Image.fromMat(debugMat).write(new File(debugDir, "4_filtering.jpg"));

    // Obtain the final text mask from the filtered connected-components
    Imgproc.erode(mat, mat, KERNEL_15X15);
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_30X30);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "5_text_mask.jpg"));

    // Apply the text mask to the binarized image
    if (DEBUG)
        Image.fromMat(binary).write(new File(debugDir, "6_binary.jpg"));
    binary.setTo(WHITE, mat);
    if (DEBUG)
        Image.fromMat(binary).write(new File(debugDir, "7_binary_text.jpg"));

    // Dewarp the text using Leptonica
    Pix pixs = Image.fromMat(binary).toGrayscalePix();
    Pix pixsDewarp = Dewarp.dewarp(pixs, 0, Dewarp.DEFAULT_SAMPLING, 5, true);
    final Image result = Image.fromGrayscalePix(pixsDewarp);
    if (DEBUG)
        result.write(new File(debugDir, "8_dewarp.jpg"));

    // Clean up
    pixs.recycle();
    mat.release();
    aux.release();
    binary.release();
    if (debugMat != null)
        debugMat.release();

    return result;
}

From source file:com.mycompany.mavenproject1.GenericResource.java

/**
 * Retrieves representation of an instance of com.mycompany.mavenproject1.GenericResource
 * @return an instance of java.lang.String
 *///from   w w  w . j  a  v  a2s.  com
@GET
@Produces(MediaType.APPLICATION_JSON)
public JsonObject getXml() {
    //TODO return proper representation object
    JsonBuilderFactory factory = Json.createBuilderFactory(null);

    String filename = "D:\\1Downloads\\Firefox downloads\\stock.jpg";
    Mat img;
    img = Imgcodecs.imread(filename);
    int width = img.width();
    int height = img.height();
    System.out.println("w= " + width + "  h= " + height);

    Analyzer analyzer = new Analyzer(filename);
    analyzer.setFrame(0, 0, width, height);

    Individual ind = new Individual(analyzer);
    ind.generateIndividual(width, height);
    System.out.println("\n\nTHE RESULT: " + analyzer.calcCombinedAestheticScore());

    Population myPop = new Population(200, true, width, height, analyzer);

    long startTime = System.currentTimeMillis();

    int generationCount = 0;
    while (generationCount < 500) {
        generationCount++;
        System.out.println(
                "Generation: " + generationCount + " Fittest: " + myPop.getFittest().getAestheticScore());
        //myPop = CropAlgorithm.evolvePopulation(myPop);
        CropAlgorithm cropAlg = new CropAlgorithm(analyzer);
        myPop = cropAlg.evolvePopulation(myPop, width, height);
        System.out.println("-------------------------------------------------ACTUAL BEST:  "
                + myPop.getFittest().getAestheticScore() + "    " + myPop.getFittest().toString());
    }
    System.out.println("Solution found!");
    System.out.println("Generation: " + generationCount);
    System.out.println("Genes:");
    Individual fittest = myPop.getFittest();
    System.out.println(fittest);
    double score = fittest.getAestheticScore();
    System.out.println(fittest.getAestheticScore());

    int x = (int) fittest.getX();
    int y = (int) fittest.getY();
    int w = (int) fittest.getWidth();
    int h = (int) fittest.getHeight();

    Rect roi = new Rect(x, y, w, h);
    Mat cropped = new Mat(img, roi);

    Imgcodecs.imwrite("D:\\1Downloads\\Firefox downloads\\cropped\\torocko.jpg", cropped);

    long estimatedTime = System.currentTimeMillis() - startTime;
    System.out.println("IDOOO==== " + estimatedTime);

    JsonObject value = factory.createObjectBuilder().add("value", score).add("x", x).add("y", y).add("width", w)
            .add("height", h).build();

    System.out.println("-------------------------------");

    return value;
}

From source file:com.randhirkumar.webcam.MainFrameForm.java

public void displayScreen() {

    Mat webcamImage = new Mat();
    VideoCapture videoCapture = new VideoCapture(0);

    if (videoCapture.isOpened()) {

        while (true) {

            videoCapture.read(webcamImage);

            if (!webcamImage.empty()) {
                setSize(webcamImage.width() + 50, webcamImage.height() + 70);
                webcamImage = processor.detect(webcamImage);
                cameraPanel.convertMatToImage(webcamImage);
                cameraPanel.repaint();// w  w  w . ja v  a2  s .co  m
            } else {
                System.out.println("Problem");
                break;
            }
        }
    }
}

From source file:com.shootoff.camera.Camera.java

License:Open Source License

public static BufferedImage matToBufferedImage(Mat matBGR) {
    BufferedImage image = new BufferedImage(matBGR.width(), matBGR.height(), BufferedImage.TYPE_3BYTE_BGR);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    matBGR.get(0, 0, targetPixels);/*from w w  w  .  j  a  v  a2s  . c o m*/

    return image;
}

From source file:com.sikulix.api.Image.java

License:Open Source License

private Mat get() {
    Mat mContent = new Mat();
    if (urlImg != null) {
        File imgFile = new File(urlImg.getPath());
        mContent = Highgui.imread(imgFile.getAbsolutePath());
        if (!mContent.empty()) {
            log.debug("get: loaded: (%dx%s) %s", mContent.width(), mContent.height(), urlImg);
        } else {//w ww .j  a va 2  s  . c  o m
            log.error("get: not loaded: %s", urlImg);
        }
    }
    return mContent;
}

From source file:com.sikulix.api.Image.java

License:Open Source License

private static long getMatSize(Mat mat) {
    return mat.channels() * mat.width() * mat.height();
}

From source file:com.trandi.opentld.TLDView.java

License:Apache License

@Override
public Mat onCameraFrame(Mat originalFrame) {
    try {/*w  ww  .  ja va  2  s.c o  m*/
        // Image is too big and this requires too much CPU for a phone, so scale everything down...
        Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE);
        final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width,
                originalFrame.height() / WORKING_FRAME_SIZE.height);
        // usefull to see what we're actually working with...
        _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(),
                originalFrame.rows(), 0, _workingFrame.cols()));

        if (_trackedBox != null) {
            if (_tld == null) { // run the 1st time only
                Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY);
                _tld = new Tld(_tldProperties);
                final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio);
                Log.i(Util.TAG, "Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox
                        + " / Scaled down to: " + scaledDownTrackedBox);
                try {
                    _tld.init(_lastGray, scaledDownTrackedBox);
                } catch (Exception eInit) {
                    // start from scratch, you have to select an init box again !
                    _trackedBox = null;
                    _tld = null;
                    throw eInit; // re-throw it as it will be dealt with later
                }
            } else {
                Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY);

                _processFrameStruct = _tld.processFrame(_lastGray, _currentGray);
                drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0));
                drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio,
                        new Scalar(0, 255, 0));
                drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio),
                        new Scalar(0, 0, 255));

                _currentGray.copyTo(_lastGray);

                // overlay the current positive examples on the real image(needs converting at the same time !)
                //copyTo(_tld.getPPatterns(), originalFrame);
            }
        }
    } catch (Exception e) {
        _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage();
        Log.e(Util.TAG, "TLDView PROBLEM", e);
    }

    if (_errMessage != null) {
        Imgproc.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d,
                new Scalar(255, 0, 0), 2);
    }

    return originalFrame;
}

From source file:com.wallerlab.compcellscope.calcDPCTask.java

License:BSD License

protected Long doInBackground(Mat... matrix_list) {
    //int count = urls.length;
    Mat in1 = matrix_list[0];
    Mat in2 = matrix_list[1];//from  ww  w  . ja v a 2 s  .c  o m
    Mat outputMat = matrix_list[2];

    Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type());
    Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type());
    in1.copyTo(Mat1);
    in2.copyTo(Mat2);

    Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1);
    Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1);

    Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4);
    Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);

    /*
    Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type()));
    Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type()));
    */

    // Convert to Floats
    Mat1.convertTo(Mat1, CvType.CV_32FC1);
    Mat2.convertTo(Mat2, CvType.CV_32FC1);
    Core.add(Mat1, Mat2, dpcSum);
    Core.subtract(Mat1, Mat2, dpcDifference);
    Core.divide(dpcDifference, dpcSum, dpcImgF);
    Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0
    Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255
    dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB
    Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4);

    dpcSum.release();
    dpcDifference.release();
    dpcImgF.release();
    Mat1.release();
    Mat2.release();

    Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4);
    int radius = maskedImg.width() / 2 + 25;
    Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius,
            new Scalar(255, 255, 255), -1, 8, 0);
    output.copyTo(outputMat, maskedImg);
    output.release();
    maskedImg.release();
    return null;
}