Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:com.github.rosjava_catkin_package_a.ARLocROS.Imshow.java

License:Apache License

/**
 * @param opencvImage/* w  w  w  .jav a2s  .  co  m*/
 */
public static void show(Mat opencvImage) {

    Dimension frameSize = new Dimension(opencvImage.rows(), opencvImage.cols());
    if (frame == null) {
        frame = new Imshow("", frameSize.height, frameSize.width);
        frame.Window.setVisible(true);

        frame.Window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
        if (frame.SizeCustom) {
            Imgproc.resize(opencvImage, opencvImage, new Size(frame.Height, frame.Width));
        }
    }
    BufferedImage bufImage = null;
    try {

        int type = BufferedImage.TYPE_BYTE_GRAY;
        if (opencvImage.channels() > 1) {
            type = BufferedImage.TYPE_3BYTE_BGR;
        }
        int bufferSize = opencvImage.channels() * opencvImage.cols() * opencvImage.rows();
        byte[] b = new byte[bufferSize];
        opencvImage.get(0, 0, b);
        BufferedImage bufferedImage = new BufferedImage(opencvImage.cols(), opencvImage.rows(), type);
        final byte[] targetPixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
        System.arraycopy(b, 0, targetPixels, 0, b.length);
        bufImage = bufferedImage;
        frame.image.setImage(bufImage);
        frame.Window.pack();
        frame.label.updateUI();
        //frame.Window.setVisible(true);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.jeremydyer.nifi.ZoomImageProcessor.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile original = session.get();
    if (original == null) {
        return;/*from  ww  w .  j av  a  2  s  . co m*/
    }

    session.transfer(session.clone(original), REL_ORIGINAL);

    FlowFile ff = session.write(original, new StreamCallback() {
        @Override
        public void process(InputStream inputStream, OutputStream outputStream) throws IOException {
            try {
                int zoomingFactor = context.getProperty(ZOOMING_FACTOR).asInteger();

                BufferedImage image = ImageIO.read(inputStream);
                byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
                Mat source = new Mat(image.getHeight(), image.getWidth(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
                source.put(0, 0, pixels);

                Mat destination = new Mat(source.rows() * zoomingFactor, source.cols() * zoomingFactor,
                        source.type());
                Imgproc.resize(source, destination, destination.size(), zoomingFactor, zoomingFactor,
                        Imgproc.INTER_NEAREST);

                MatOfByte bytemat = new MatOfByte();
                Imgcodecs.imencode(".png", destination, bytemat);
                pixels = bytemat.toArray();
                outputStream.write(pixels);

            } catch (Exception ex) {
                getLogger().error(ex.getMessage());
                ex.printStackTrace();
            }
        }
    });

    session.transfer(ff, REL_SUCCESS);

}

From source file:com.jiminger.image.mjpeg.MJPEGWriter.java

License:Open Source License

static public boolean appendFile(final String filename) {
    if (height == -1) {
        final Mat origImage = Imgcodecs.imread(filename);
        width = origImage.cols();//  ww  w  .ja  v a 2 s.  c  o  m
        height = origImage.rows();
    }
    return doappendFile(filename, width, height);
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i("eigenFacesActivity::", "OnCreate");
    super.onCreate(savedInstanceState);

    setContentView(R.layout.eigenfaces);
    setTitle("Eigenfaces");
    Mat aver = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getAverage();
    Mat faces = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getEigenFaces();

    int size = new Integer(KeyFaceActivity.prefs.getString("savedFaceSize", "200"));
    Mat aux = new Mat();

    aver = aver.reshape(1, size);//from   ww  w.  j  a  va2s  .c  o m
    //aver.convertTo(aux, );
    aver = toGrayscale(aver);
    average = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
    Imgproc.cvtColor(aver, aux, Imgproc.COLOR_GRAY2RGBA, 4);
    Utils.matToBitmap(aux, average);
    LinearLayout layout = (LinearLayout) findViewById(id.eigenFacesHorizontalLayout);

    TextView avrgImgTV = new TextView(getApplicationContext());
    avrgImgTV.setText("Average image:");
    avrgImgTV.setPadding(5, 10, 10, 20);
    avrgImgTV.setGravity(Gravity.CENTER);

    TextView eigenfacesImgsTV = new TextView(getApplicationContext());
    eigenfacesImgsTV.setText("Eigenfaces:");
    eigenfacesImgsTV.setPadding(5, 10, 10, 20);
    eigenfacesImgsTV.setGravity(Gravity.CENTER);

    ImageView imgV = new ImageView(getApplicationContext());

    imgV.setClickable(false);
    imgV.setVisibility(0);
    imgV.setPadding(0, 10, 10, 20);
    imgV.setImageBitmap(average);

    layout.addView(avrgImgTV);
    layout.addView(imgV);
    layout.addView(eigenfacesImgsTV);

    LinkedList<ImageView> variables = new LinkedList<ImageView>();
    eigenfacesList = new LinkedList<Bitmap>();
    for (int i = 0; i < faces.rows(); i++) {
        variables.add(new ImageView(getApplicationContext()));
        eigenfacesList.add(Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888));

        aux = new Mat();
        aux = faces.row(i).reshape(1, size);
        aux = toGrayscale(aux);
        Mat auxGreyC4 = new Mat();
        Imgproc.cvtColor(aux, auxGreyC4, Imgproc.COLOR_GRAY2RGBA, 4);
        Utils.matToBitmap(auxGreyC4, eigenfacesList.get(i));

        variables.get(i).setClickable(false);
        variables.get(i).setVisibility(0);
        variables.get(i).setPadding(0, 10, 10, 20);
        variables.get(i).setImageBitmap(eigenfacesList.get(i));
        layout.addView(variables.get(i));
    }

    Button save = (Button) findViewById(id.saveEigenfacesB);
    save.setOnClickListener(this);
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

/**
 * Converts a matrix with any values into a matrix with correct values (between 0 and 255, both included) to be shown as an image.
 * @param mat: The matrix to convert/* w w  w.jav a  2 s  .  c o  m*/
 * @return A matrix that can be used as an image
 */
private Mat toGrayscale(Mat mat) {
    Mat res = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC1);
    double min, max;
    MinMaxLocResult minmax = Core.minMaxLoc(mat);
    min = minmax.minVal;
    max = minmax.maxVal;
    for (int row = 0; row < mat.rows(); row++) {
        for (int col = 0; col < mat.cols(); col++) {
            res.put(row, col, 255 * ((mat.get(row, col)[0] - min) / (max - min)));
        }
    }
    return res;
}

From source file:com.Linguist.model.AdaptiveThresholdClass.java

@Override
public File imagePreprocessing(String imageFile, String extnsn) {
    //  System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat graySource = imread("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + imageFile,
            IMREAD_GRAYSCALE);//  w w  w .j a v a  2  s.  c  om
    Mat destintn = new Mat(graySource.rows(), graySource.cols(), graySource.type());

    adaptiveThreshold(graySource, destintn, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, 15, 40);
    Imgcodecs.imwrite("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\adaptive.jpg",
            destintn);
    File outputImage = new File(
            "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\adaptive.jpg");
    return outputImage;
}

From source file:com.Linguist.model.grayscaleClass.java

@Override
public File imagePreprocessing(String image, String extnsn) {

    BufferedImage bImge = null;//from w  w  w.jav  a  2  s  .  co  m
    BufferedImage bImage2 = null;
    File grayscle = null;

    try {

        // loadOpenCV_Lib();
        //String path = "opencv\\build\\java\\x64\\opencv_java300.dll";
        FileInputStream fileName = new FileInputStream(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + image);
        InputStream input = fileName;
        bImge = ImageIO.read(input);
        byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData();
        Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3);
        mat1.put(0, 0, imgeByte);
        Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1);
        Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY);
        byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())];
        mat2.get(0, 0, imageData);
        bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY);
        bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData);

        String extn = null;
        switch (extnsn) {
        case ".jpg":
            extn = "jpg";
            break;
        case ".png":
            extn = "png";
            break;
        case ".pdf":
            extn = "pdf";
            break;
        case ".tiff":
            extn = "tif";
            break;

        }
        //writing the grayscale image to the folder
        grayscle = new File(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\grayscale" + "." + extn);
        ImageIO.write(bImage2, "jpg", grayscle);
    } catch (IOException ex) {
        System.out.println("" + ex.getMessage());
    } catch (Exception ex) {
        Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex);
    }
    return grayscle;

}

From source file:com.Linguist.model.sharpeningClass.java

public File imagePreprocessing(String imgeNme, String extnsn) {
    File sharpen = null;//from  www.j  ava 2  s .c  om
    try {
        // System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        Mat source = Imgcodecs.imread(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + imgeNme,
                Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        Imgproc.equalizeHist(source, destination);
        Imgcodecs.imwrite("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg",
                destination);
        sharpen = new File("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg");
    } catch (Exception e) {
        System.out.println("error: " + e.getMessage());
    }
    return sharpen;
}

From source file:com.minio.io.alice.MatVideoWriter.java

License:Open Source License

private byte[] captureBitmap(Mat mat) {
    Bitmap bitmap;/*from   ww w. ja v  a 2  s  .  c  om*/
    try {
        bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(mat, bitmap);

        ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
        bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteStream);

        // Convert ByteArrayOutputStream to byte array. Close stream.
        matByteArray = byteStream.toByteArray();
        byteStream.close();
        return matByteArray;

    } catch (Exception ex) {
        System.out.println(ex.getMessage());
    }
    return null;
}

From source file:com.mycompany.analyzer.Analyzer.java

public BufferedImage mat2BufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from   w w  w .  j ava 2s . c om*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}