Example usage for org.opencv.core Mat put

List of usage examples for org.opencv.core Mat put

Introduction

In this page you can find the example usage for org.opencv.core Mat put.

Prototype

public int put(int row, int col, byte[] data) 

Source Link

Usage

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void detectLayers() {
    Mat equalizedImage = CoreOperations.invert(CoreOperations.equalize(sourceImage));

    int halfColumnWidth = 50;
    Mat density = new Mat(equalizedImage.rows(), equalizedImage.cols(), CvType.CV_32F);
    int rows = density.rows();
    int cols = density.cols();

    // > 1 min
    for (int i = 0; i < rows; i++) {
        double p;
        int leftBoundInterval, rightBoundInterval, intervalLength;
        for (int j = 0; j < cols; j++) {
            p = 0.0;//  w  w w . j  av a2  s. com
            leftBoundInterval = Math.max(j - halfColumnWidth, 0);
            rightBoundInterval = Math.min(cols - 1, j + halfColumnWidth);
            intervalLength = rightBoundInterval - leftBoundInterval + 1;

            for (int s = leftBoundInterval; s <= rightBoundInterval; s++) {
                p += equalizedImage.get(i, s)[0];
            }

            density.put(i, j, p / intervalLength);
        }
    }

    //3 seconds
    for (int j = 0; j < cols; j++) {
        double intensity = 0.0;

        for (int i = 0; i < rows; i++) {
            intensity += density.get(i, j)[0];
        }

        for (int i = 0; i < rows; i++) {
            density.put(i, j, density.get(i, j)[0] / intensity);
        }
    }

    double ndlAverage = 1.0 / (double) rows;

    layerBounds = new Mat(6, cols, CvType.CV_32F);
    double k1 = 0.56E-4;
    double k2 = 1.3E-4;

    /*float[] data = new float[density.rows() * (int) density.elemSize()];
    density.get(0, 10, data);*/

    Mat upperBoundExact = new Mat(1, cols, CvType.CV_32F);
    Mat lowerBoundExact = new Mat(1, cols, CvType.CV_32F);

    for (int j = 0; j < cols; j++) {
        int upperBound = 0;
        int lowerBound = 0;

        for (int i = 0; i < rows; i++) {
            if (density.get(i, j)[0] > ndlAverage + k1) {
                upperBound = i;
                break;
            }
        }
        for (int i = rows - 1; i >= 0; i--) {
            if (density.get(i, j)[0] > ndlAverage + k2) {
                lowerBound = i;
                break;
            }
        }

        upperBoundExact.put(0, j, upperBound);
        lowerBoundExact.put(0, j, lowerBound);
    }

    //moving average for bounds
    int movingAverage = 300;
    for (int i = 0; i < upperBoundExact.cols(); i++) {
        int leftBoundInterval = Math.max(i - movingAverage, 0);
        int rightBoundInterval = Math.min(cols - 1, i + movingAverage);
        int intervalLength = rightBoundInterval - leftBoundInterval + 1;
        int upperBoundAverage = 0;
        int lowerBoundAverage = 0;

        for (int j = leftBoundInterval; j <= rightBoundInterval; j++) {
            upperBoundAverage += upperBoundExact.get(0, j)[0];
            lowerBoundAverage += lowerBoundExact.get(0, j)[0];
        }

        upperBoundAverage /= intervalLength;
        lowerBoundAverage /= intervalLength;
        int columnHeight = lowerBoundAverage - upperBoundAverage;
        layerBounds.put(0, i, upperBoundAverage);
        for (int h = 1; h < 5; h++) {
            layerBounds.put(h, i, upperBoundAverage + BRODMANN_COEFFS[h - 1] * columnHeight);
        }
        layerBounds.put(5, i, lowerBoundAverage);
    }
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

private static void adjustXPosOfLines(Mat lines, int xPosAdjustment) {
    if (xPosAdjustment == 0)
        return;/*w w  w .  jav a 2 s.c  o  m*/

    for (int i = 0; i < lines.rows(); i++) {
        double[] line = lines.get(i, 0);
        line[0] += xPosAdjustment;
        line[2] += xPosAdjustment;
        lines.put(i, 0, line);
    }
}

From source file:com.example.yannic.remotefacedetection.agent.FaceDetectionAgent.java

License:Open Source License

public static Mat bufferedImageToMat(BufferedImage bi) {
    Mat mat = new Mat(bi.getHeight(), bi.getWidth(), CvType.CV_8UC3);
    byte[] data = ((DataBufferByte) bi.getRaster().getDataBuffer()).getData();
    mat.put(0, 0, data);
    return mat;//from   w w w . j  a v  a2  s . c  o  m
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.Utils.java

License:Apache License

static public void tresholdContrastBlackWhite(Mat image2, double d) {
    int width = image2.width();
    int height = image2.height();
    for (int i = 0; i < width; i++)
        for (int j = 0; j < height; j++) {
            double[] rgb = image2.get(j, i);
            double[] rgbnew = new double[rgb.length];
            if (rgb[0] + rgb[1] + rgb[2] < d)
                rgbnew[0] = rgbnew[1] = rgbnew[2] = 0.0;
            else/*from w w  w  . ja  v  a2  s . c om*/
                rgbnew[0] = rgbnew[1] = rgbnew[2] = 255.0;
            image2.put(j, i, rgbnew);
        }
}

From source file:com.ibm.streamsx.edgevideo.device.edgent.JsonMat.java

License:Open Source License

private static Mat base64MimeDecodeMat(int width, int height, int type, String base64MimeMatStr) {
    // java.utils.Base64 since 1.8, otherwise use Apache Commons
    Decoder decoder = Base64.getMimeDecoder();
    byte[] sourcePixels = decoder.decode(base64MimeMatStr);

    //System.out.println(String.format("base64DecodeMat: width=%d height=%d type=%d", width, height, type));

    Mat mat = new Mat(height, width, type);
    mat.put(0, 0, sourcePixels);

    return mat;//from   w ww.  ja  va  2 s.  c  o m
}

From source file:com.jeremydyer.nifi.ZoomImageProcessor.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile original = session.get();
    if (original == null) {
        return;/*from   w ww .  j  av a 2 s. c  o  m*/
    }

    session.transfer(session.clone(original), REL_ORIGINAL);

    FlowFile ff = session.write(original, new StreamCallback() {
        @Override
        public void process(InputStream inputStream, OutputStream outputStream) throws IOException {
            try {
                int zoomingFactor = context.getProperty(ZOOMING_FACTOR).asInteger();

                BufferedImage image = ImageIO.read(inputStream);
                byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
                Mat source = new Mat(image.getHeight(), image.getWidth(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
                source.put(0, 0, pixels);

                Mat destination = new Mat(source.rows() * zoomingFactor, source.cols() * zoomingFactor,
                        source.type());
                Imgproc.resize(source, destination, destination.size(), zoomingFactor, zoomingFactor,
                        Imgproc.INTER_NEAREST);

                MatOfByte bytemat = new MatOfByte();
                Imgcodecs.imencode(".png", destination, bytemat);
                pixels = bytemat.toArray();
                outputStream.write(pixels);

            } catch (Exception ex) {
                getLogger().error(ex.getMessage());
                ex.printStackTrace();
            }
        }
    });

    session.transfer(ff, REL_SUCCESS);

}

From source file:com.jiminger.image.houghspace.internal.Mask.java

License:Open Source License

/**
* Generate an OpenCV Mat image that contains a view of the mask.
*///  www .j  a  va  2  s  .  c o m
public Mat getMaskImage() {
    final Mat m = new Mat(mheight, mwidth, CvType.CV_8UC1);
    m.put(0, 0, mask);
    return m;
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

/**
 * Converts a matrix with any values into a matrix with correct values (between 0 and 255, both included) to be shown as an image.
 * @param mat: The matrix to convert/*  w  w w. ja  va  2s. co  m*/
 * @return A matrix that can be used as an image
 */
private Mat toGrayscale(Mat mat) {
    Mat res = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC1);
    double min, max;
    MinMaxLocResult minmax = Core.minMaxLoc(mat);
    min = minmax.minVal;
    max = minmax.maxVal;
    for (int row = 0; row < mat.rows(); row++) {
        for (int col = 0; col < mat.cols(); col++) {
            res.put(row, col, 255 * ((mat.get(row, col)[0] - min) / (max - min)));
        }
    }
    return res;
}

From source file:com.kunato.imagestitching.SphereObject.java

License:Apache License

public void draw(float[] viewMatrix, float[] projectionMatrix) {
    int xh = GLES20.glGetUniformLocation(mProgram, "img_x");
    int yh = GLES20.glGetUniformLocation(mProgram, "img_y");
    int widthh = GLES20.glGetUniformLocation(mProgram, "img_width");
    int heighth = GLES20.glGetUniformLocation(mProgram, "img_height");

    if (mTexRequireUpdate) {
        Log.i("GLSphere", "Bitmap updated,Return to normal activity.");
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, this.mTextures[0]);
        GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mQueueBitmap, 0);
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        mQueueBitmap.recycle();/*from  w  w w .j  av a  2s.  c o m*/
        mTexRequireUpdate = false;
    }
    GLES20.glUseProgram(mProgram);
    //Attrib
    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
    mSphereBuffer.position(0);
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, mSphereShape.getVeticesStride(),
            mSphereBuffer);

    mSphereBuffer.position(3);
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, 2, GLES20.GL_FLOAT, false,
            mSphereShape.getVeticesStride(), mSphereBuffer);
    //Uniform
    mTextureHandle = GLES20.glGetUniformLocation(mProgram, "sTexture");
    GLES20.glUniform1i(mTextureHandle, 0);
    //Area
    GLES20.glUniform1f(xh, mArea[0]);
    GLES20.glUniform1f(yh, mArea[1]);
    GLES20.glUniform1f(widthh, mArea[2]);
    GLES20.glUniform1f(heighth, mArea[3]);

    mViewMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uViewMatrix");
    mProjectionMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uProjectionMatrix");
    GLES20.glUniformMatrix4fv(mViewMatrixHandle, 1, false, viewMatrix, 0);
    GLES20.glUniformMatrix4fv(mProjectionMatrixHandle, 1, false, projectionMatrix, 0);
    GLES20.glDrawElements(GLES20.GL_TRIANGLES, mSphereShape.getNumIndices()[0], GLES20.GL_UNSIGNED_SHORT,
            mIndexBuffer);
    GLES20.glDisableVertexAttribArray(mPositionHandle);
    GLES20.glDisableVertexAttribArray(mTextureCoordinateHandle);

    if (readPixel) {
        Log.d("GL", "ReadPixel");
        mScreenBuffer = ByteBuffer.allocateDirect(glRenderer.mHeight * glRenderer.mWidth * 4);
        mScreenBuffer.order(ByteOrder.nativeOrder());
        GLES20.glReadPixels(0, 0, glRenderer.mWidth, glRenderer.mHeight, GLES20.GL_RGBA,
                GLES20.GL_UNSIGNED_BYTE, mScreenBuffer);
        Log.d("mScreenBuffer", "Remaining " + mScreenBuffer.remaining());
        mScreenBuffer.rewind();
        byte pixelsBuffer[] = new byte[4 * glRenderer.mHeight * glRenderer.mWidth];
        mScreenBuffer.get(pixelsBuffer);
        Mat mat = new Mat(glRenderer.mHeight, glRenderer.mWidth, CvType.CV_8UC4);
        mat.put(0, 0, pixelsBuffer);
        Mat m = new Mat();
        Imgproc.cvtColor(mat, m, Imgproc.COLOR_RGBA2BGR);
        Core.flip(m, mat, 0);
        Highgui.imwrite("/sdcard/stitch/readpixel.jpg", mat);

    }
}

From source file:com.lauszus.facerecognitionapp.TinyDB.java

License:Apache License

public ArrayList<Mat> getListMat(String key) {
    ArrayList<String> objStrings = getListString(key);
    ArrayList<Mat> objects = new ArrayList<Mat>();

    for (String jObjString : objStrings) {
        byte[] data = Base64.decode(jObjString, Base64.DEFAULT);
        Mat mat = new Mat(data.length, 1, CvType.CV_8U);
        mat.put(0, 0, data);
        objects.add(mat);//from   w w w .  j  a v a  2s . c o  m
    }
    return objects;
}