Example usage for org.opencv.core Mat cols

List of usage examples for org.opencv.core Mat cols

Introduction

In this page you can find the example usage for org.opencv.core Mat cols.

Prototype

public int cols() 

Source Link

Usage

From source file:com.Linguist.model.sharpeningClass.java

public File imagePreprocessing(String imgeNme, String extnsn) {
    File sharpen = null;//from  www.  j  a v  a2 s . c  om
    try {
        // System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        Mat source = Imgcodecs.imread(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + imgeNme,
                Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        Imgproc.equalizeHist(source, destination);
        Imgcodecs.imwrite("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg",
                destination);
        sharpen = new File("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg");
    } catch (Exception e) {
        System.out.println("error: " + e.getMessage());
    }
    return sharpen;
}

From source file:com.louislepper.waveform.MainActivity.java

License:Apache License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat currentMat = inputFrame.rgba();

    Imgproc.cvtColor(currentMat, currentMat, Imgproc.COLOR_RGBA2GRAY);
    Imgproc.GaussianBlur(currentMat, currentMat, new Size(5, 5), 2, 2);
    Imgproc.threshold(currentMat, currentMat, LIGHT_THRESH, LIGHT_THRESH, Imgproc.THRESH_TRUNC);
    Imgproc.GaussianBlur(currentMat, currentMat, new Size(5, 5), 2, 2);
    Imgproc.GaussianBlur(currentMat, currentMat, new Size(9, 9), 0, 0);
    Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.dilate(currentMat, currentMat, kernel);

    //Canny edge detection
    Imgproc.Canny(currentMat, currentMat, CANNY_LOW, CANNY_HIGH);

    if (soundData == null || soundData.length != currentMat.cols()) {
        soundData = new short[currentMat.cols()];
    }/*from   w  ww .jav  a  2 s  .c  o m*/

    imageArrayToSoundArray(new ArrayMat(currentMat), soundData);

    SampleInterpolator.StartAndEnd startAndEnd = SampleInterpolator.interpolateInvalidSamples(soundData);

    if (smoothing) {
        soundData = SampleCrossfader.crossfade(soundData, startAndEnd.getStart(), startAndEnd.getLength());
    }

    if (audioThread == null || !audioThread.isAlive()) {
        audioThread = new AudioThread();
        keyboardView.setMidiListener(audioThread);
        //This should maybe get its value from preferences. Not sure if number picker will have been set in time.
        audioThread.setOctave(numberPicker.getValue());
        if (currentScreen.equals(KEYBOARD)) {
            audioThread.keyboardOn();
        } else {
            audioThread.keyboardOff();
        }
        audioThread.start();
    }

    //Send array here.
    audioThread.setWaveform(soundData, startAndEnd);

    if (lineFeedback) {
        Imgproc.cvtColor(currentMat, currentMat, Imgproc.COLOR_GRAY2RGBA);

        soundArrayToImage(soundData, currentMat);
    }

    return currentMat;
}

From source file:com.louislepper.waveform.MainActivity.java

License:Apache License

private Mat soundArrayToImage(short[] array, Mat image) {
    final double[] red = new double[] { 255.0, 0.0, 0.0, 0.0 };
    for (int x = 0; x < image.cols(); x++) {
        if (!(array[x] == -1)) {
            image.put(array[x], x, red);
        }// w ww .  j a  v  a  2  s . co  m
    }
    return image;
}

From source file:com.minio.io.alice.MatVideoWriter.java

License:Open Source License

private byte[] captureBitmap(Mat mat) {
    Bitmap bitmap;//from   w  ww  .j  ava2s  . c  om
    try {
        bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(mat, bitmap);

        ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
        bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteStream);

        // Convert ByteArrayOutputStream to byte array. Close stream.
        matByteArray = byteStream.toByteArray();
        byteStream.close();
        return matByteArray;

    } catch (Exception ex) {
        System.out.println(ex.getMessage());
    }
    return null;
}

From source file:com.mycompany.analyzer.Analyzer.java

public BufferedImage mat2BufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from   ww  w  . j a v a 2 s  .  c om*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:com.oetermann.imageclassifier.DescriptorExtractorWrapper.java

License:Open Source License

public List<Mat> readImages(List<String> files, boolean grayscale) {
    List<Mat> images = new ArrayList<>();
    Mat mat;

    for (ListIterator<String> it = files.listIterator(); it.hasNext();) {
        String file = it.next();//from   w  w  w. j a  v  a 2s  .  c om
        mat = Imgcodecs.imread(file);
        if (mat.dims() > 0 && mat.cols() > 0 && mat.rows() > 0) {
            if (grayscale) {
                Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2GRAY);
            }
            images.add(mat);
        } else {
            it.remove();
            System.out.println("Cannot read file: " + file);
        }
    }
    return images;
}

From source file:com.oetermann.imageclassifier.Util.java

License:Open Source License

public static void saveMat(String path, Mat mat) {
    File file = new File(path).getAbsoluteFile();
    file.getParentFile().mkdirs();/*from w  w  w  .ja v  a 2  s  . c om*/
    try {
        int rows = mat.rows();
        int cols = mat.cols();
        int type = mat.type();
        Object data;
        switch (mat.type()) {
        case CvType.CV_8S:
        case CvType.CV_8U:
            data = new byte[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (byte[]) data);
            break;
        case CvType.CV_16S:
        case CvType.CV_16U:
            data = new short[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (short[]) data);
            break;
        case CvType.CV_32S:
            data = new int[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (int[]) data);
            break;
        case CvType.CV_32F:
            data = new float[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (float[]) data);
            break;
        case CvType.CV_64F:
            data = new double[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (double[]) data);
            break;
        default:
            data = null;
        }
        try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(path))) {
            oos.writeObject(rows);
            oos.writeObject(cols);
            oos.writeObject(type);
            oos.writeObject(data);
            oos.close();
        }
    } catch (IOException | ClassCastException ex) {
        System.err.println("ERROR: Could not save mat to file: " + path);
        //            Logger.getLogger(ImageClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.opencv.mouse.MouseMainFrame.java

private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed
    try {//w w w  .ja  v  a 2  s .  c o  m
        robot = new Robot();
    } catch (AWTException e) {
    }
    t = new Thread() {
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage

            webCam = new VideoCapture(0);
            if (!webCam.isOpened()) {
                System.out.println("Kamera Ak Deil..!");
            } else
                System.out.println("Kamera Ald --> " + webCam.toString());

            Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3);
            Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3);
            thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3,
                    new Scalar(255, 255, 255));
            if (webCam.isOpened()) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException ex) {

                }

                while (true) {
                    try {
                        webCam.read(webcam_image);
                    } catch (Exception e) {
                        System.out.println("Web Cam Kapal !");
                    }

                    if (!webcam_image.empty()) {
                        try {
                            Thread.sleep(10);
                        } catch (InterruptedException ex) {

                        }
                        // Mat inRangeResim = webcam_image.clone();
                        /*
                        Mat inRangeResim = webcam_image.clone();
                        matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg");
                        image =matToBufferedImageConverter.getBufferedImage();
                        Highgui.imwrite("D:\\bitirme.jpg", inRangeResim);
                        */

                        //       MatOfRect faceDetections = new MatOfRect();
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        //siyah hsv range 0 0 0 - 180 45 100
                        //hsvmavi   Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image);
                        //rgb mavi        // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image);
                        //turuncu hsv      Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image);
                        //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image);
                        //        Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image);

                        //hsv mavi
                        //                       jSliderHmin.setValue(75);
                        //                       jSliderSmin.setValue(63);
                        //                       jSliderVmin.setValue(40);
                        //                       jSliderHmax.setValue(118);
                        //                       jSliderSmax.setValue(255);
                        //                       jSliderVmax.setValue(255);
                        //
                        //                       jSliderHmin.setValue(0);
                        //                       jSliderSmin.setValue(0);
                        //                       jSliderVmin.setValue(0);
                        //                       jSliderHmax.setValue(179);
                        //                       jSliderSmax.setValue(39);
                        //                       jSliderVmax.setValue(120);
                        Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255),
                                thresholded);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);

                        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                        Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                                Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                        Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5);

                        for (int i = 0; i < contours.size(); i++) {
                            //  System.out.println(Imgproc.contourArea(contours.get(i)));
                            //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                            Rect rect = Imgproc.boundingRect(contours.get(i));
                            kesit = thresholded.submat(rect);
                            //System.out.println(rect.height);
                            // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                            //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                            Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                                    new Point(rect.x + rect.width, rect.y + rect.height),
                                    new Scalar(0, 0, 255));

                            //}
                            //}
                            if (rect.height > 15 && rect.width > 15) {
                                System.out.println(rect.x + "\n" + rect.y);
                                Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0));
                                robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25));
                            }

                        }

                        //   Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR);
                        //  hsv_image.convertTo(hsv_image, CvType.CV_32F);

                        //   Imgproc.Canny(thresholded, thresholded, 10, 20);
                        //   Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //ise yarar

                        //    Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR);
                        //  Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //    webcam_image.copyTo(hsv_image, thresholded);
                        //                            System.out.println("<------------------------------>");
                        //                            System.out.println("BGR: " +webcam_image.channels()+"  Size : "+webcam_image.size());
                        //                            System.out.println("HSV :"+hsv_image.channels()+"  Size: "+hsv_image.size());
                        //                            System.out.println("Thresold :"+thresholded.channels()+"  Size : "+thresholded.size());
                        //                            System.out.println("<------------------------------>");
                        //
                        matToBufferedImageConverter.setMatrix(webcam_image, ".jpg");

                        image = matToBufferedImageConverter.getBufferedImage();
                        g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null);

                    } else {

                        System.out.println("Grnt yok!");
                        break;
                    }
                }
                //           webCam.release();
            }

        }
    };
    threadDurum = true;
    t.start();
}

From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java

License:Open Source License

private static byte[] computeImageBytesCount(Mat image, boolean raw) {
    int rawBytesCount = image.channels() * image.rows() * image.cols();
    int simDocExtra = raw ? image.rows() : 0;
    return new byte[rawBytesCount + simDocExtra];
}

From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java

License:Open Source License

private static void fillByteArray(byte[] byteArray, Mat image, boolean raw) {
    int colsNb = image.cols();
    int bytesPerPixel = image.channels();
    int bytesPerRow = colsNb * bytesPerPixel + (raw ? 1 : 0);
    byte[] pixel = new byte[bytesPerPixel];
    int magicNumberOffset = 0;
    for (int y = 0; y < image.rows(); y++) {
        for (int x = 0; x < colsNb; x++) {
            image.get(y, x, pixel);/*from  ww  w  . j a  v  a  2 s .  co m*/
            for (int z = 0; z < bytesPerPixel; z++) {
                byteArray[magicNumberOffset + y * bytesPerRow + x * bytesPerPixel + z] = pixel[z];
            }
        }
        if (raw) {
            byteArray[magicNumberOffset + y * bytesPerRow + colsNb * bytesPerPixel] = SIMDOC_LINE_TERMINATION;
        }
    }
}