Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public int get(int row, int col, double[] data) 

Source Link

Usage

From source file:qupath.opencv.DetectCytokeratinCV.java

License:Open Source License

public static void updateArea(final List<MatOfPoint> contours, final Mat hierarchy, final Area area, int row,
        int depth) {
    while (row >= 0) {
        int[] data = new int[4];
        hierarchy.get(0, row, data);

        MatOfPoint contour = contours.get(row);

        // Don't include isolated pixels - otherwise add or remove, as required
        if (contour.height() > 2) {
            Path2D path = getContour(contour);
            if (depth % 2 == 0)
                area.add(new Area(path));
            else/*from w ww. j  av a2s  .  com*/
                area.subtract(new Area(path));
        }

        // Deal with any sub-contours
        if (data[2] >= 0)
            updateArea(contours, hierarchy, area, data[2], depth + 1);

        // Move to next contour in this hierarchy level
        row = data[0];
    }
}

From source file:qupath.opencv.processing.PixelImageCV.java

License:Open Source License

public PixelImageCV(Mat mat) {
    // Extract dimensions and pixels
    this.width = (int) mat.size().width;
    this.height = (int) mat.size().height;

    pixels = new float[(int) mat.total()];
    if (mat.depth() == CvType.CV_32F)
        mat.get(0, 0, pixels);
    else {/*  w  w w .j av  a2s.  c o  m*/
        Mat mat2 = new Mat();
        mat.convertTo(mat2, CvType.CV_32F);
        mat2.get(0, 0, pixels);
    }
}

From source file:qupath.opencv.TissueSegmentationCommand.java

License:Open Source License

@Override
public void hierarchyChanged(PathObjectHierarchyEvent event) {
    if (img == null || isChanging || event.isChanging())
        return;/*from  w w  w . ja v a2 s.  c  om*/

    List<PathObject> annotations = hierarchy.getObjects(null, PathAnnotationObject.class);
    if (annotation != null)
        annotations.remove(annotation);
    List<PathObject> background = new ArrayList<>();
    List<PathObject> foreground = new ArrayList<>();
    PathClass whitespaceClass = PathClassFactory.getDefaultPathClass(PathClasses.WHITESPACE);
    for (PathObject a : annotations) {
        if (a == annotation)
            continue;
        if (a.getPathClass() == whitespaceClass)
            background.add(a);
        else
            foreground.add(a);
    }

    if (background.isEmpty() || foreground.isEmpty())
        return;

    // Create labels
    Graphics2D g2d = imgMask.createGraphics();
    g2d.setColor(Color.BLACK);
    g2d.fillRect(0, 0, img.getWidth(), img.getHeight());
    g2d.scale((double) img.getWidth() / imageData.getServer().getWidth(),
            (double) img.getHeight() / imageData.getServer().getHeight());
    g2d.setColor(Color.GRAY);
    for (PathObject a : background) {
        g2d.draw(PathROIToolsAwt.getShape(a.getROI()));
    }
    g2d.setColor(Color.WHITE);
    for (PathObject a : foreground) {
        g2d.draw(PathROIToolsAwt.getShape(a.getROI()));
    }
    g2d.dispose();

    // Get the data to classify
    RTrees trees = RTrees.create();

    byte[] bytes = ((DataBufferByte) imgMask.getRaster().getDataBuffer()).getData();
    int n = 0;
    for (int i = 0; i < bytes.length; i++) {
        byte b = bytes[i];
        if (b == (byte) 0)
            continue;
        if (b == (byte) 255) {
            trainingResponses[n] = 2;
        } else {
            trainingResponses[n] = 1;
        }
        for (int k = 0; k < featureStride; k++)
            training[n * featureStride + k] = features[i * featureStride + k];
        n++;
    }

    Mat matTraining = new Mat(n, featureStride, CvType.CV_32FC1);
    matTraining.put(0, 0, Arrays.copyOf(training, n * featureStride));
    Mat matResponses = new Mat(n, 1, CvType.CV_32SC1);
    matResponses.put(0, 0, Arrays.copyOf(trainingResponses, n));

    trees.train(matTraining, Ml.ROW_SAMPLE, matResponses);

    matTraining.release();
    matResponses.release();

    Mat samples = new Mat(buf.length, featureStride, CvType.CV_32FC1);
    samples.put(0, 0, features);
    Mat results = new Mat(buf.length, 1, CvType.CV_32SC1);
    trees.predict(samples, results, RTrees.PREDICT_AUTO);
    BufferedImage imgOutput = new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_INT_RGB);
    float[] resultsArray = new float[buf.length];
    results.get(0, 0, resultsArray);

    for (int i = 0; i < resultsArray.length; i++) {
        if (resultsArray[i] == 1f)
            imgOutput.setRGB(i % img.getWidth(), i / img.getWidth(), ColorTools.makeRGB(255, 0, 0));
        else if (resultsArray[i] == 2f)
            imgOutput.setRGB(i % img.getWidth(), i / img.getWidth(), ColorTools.makeRGB(255, 255, 255));
    }

    isChanging = true;
    hierarchy.fireHierarchyChangedEvent(this);
    isChanging = false;
}

From source file:sanntidvideo.Main.java

public BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from www  .j  av  a2  s.  c o  m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:sanntidvideo.VideoCap.java

public Image toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//from w w  w. j  a v a  2 s  .  c om
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:servershootingstar.BallDetector.java

public static BufferedImage Mat2BufferedImage(Mat m) {
    // source: http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui/
    // The output can be assigned either to a BufferedImage or to an Image

    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*  w  w w.  j a va 2 s.c o m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:src.main.java.org.roomwatcher.watcher.Watcher.java

/**  
* Converts/writes a Mat into a BufferedImage.  
*   //ww  w .j av a2  s . c o  m
* @param matrix Mat of type CV_8UC3 or CV_8UC1  
* @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY  
*/
public boolean MatToBufferedImage(Mat matBGR) {
    long startTime = System.nanoTime();
    int width = matBGR.width(), height = matBGR.height(), channels = matBGR.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    matBGR.get(0, 0, sourcePixels);

    // create new image and get reference to backing data  
    image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);
    long endTime = System.nanoTime();

    //System.out.println(String.format("Elapsed time: %.2f ms", (float)(endTime - startTime)/1000000));  
    return true;
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

public static byte getByte(final int row, final int col, final Mat mat) {
    if (CvType.CV_8UC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_8UC1, we found: " + CvType.typeToString(mat.type()));

    mat.get(row, col, _byteBuff1);
    return _byteBuff1[0];
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

/**
 * The corresponding Java primitive array type depends on the Mat type:
 * CV_8U and CV_8S -> byte[]//from  w w w .  j  a  v  a  2s . c om
 * CV_16U and CV_16S -> short[]
 * CV_32S -> int[]
 * CV_32F -> float[]
 * CV_64F-> double[]
 */
public static byte[] getByteArray(final Mat mat) {
    if (CvType.CV_8UC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_8UC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_byteBuff.length != size) {
        _byteBuff = new byte[size];
    }
    mat.get(0, 0, _byteBuff); // 0 for row and col means the WHOLE Matrix
    return _byteBuff;
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

public static int[] getIntArray(final Mat mat) {
    if (CvType.CV_32SC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_32SC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_intBuff.length != size) {
        _intBuff = new int[size];
    }/*from  w w  w.  ja  v  a  2 s  . c  o  m*/
    mat.get(0, 0, _intBuff); // 0 for row and col means the WHOLE Matrix
    return _intBuff;
}