Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat(Mat m, Range rowRange, Range colRange) 

Source Link

Usage

From source file:com.ibm.streamsx.edgevideo.device.edgent.JsonMat.java

License:Open Source License

private static Mat base64MimeDecodeMat(int width, int height, int type, String base64MimeMatStr) {
    // java.utils.Base64 since 1.8, otherwise use Apache Commons
    Decoder decoder = Base64.getMimeDecoder();
    byte[] sourcePixels = decoder.decode(base64MimeMatStr);

    //System.out.println(String.format("base64DecodeMat: width=%d height=%d type=%d", width, height, type));

    Mat mat = new Mat(height, width, type);
    mat.put(0, 0, sourcePixels);//from  w  w  w  .  ja  v a 2s.  c o  m

    return mat;
}

From source file:com.jeremydyer.nifi.ZoomImageProcessor.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile original = session.get();
    if (original == null) {
        return;//from   ww w. ja va2 s .  c o  m
    }

    session.transfer(session.clone(original), REL_ORIGINAL);

    FlowFile ff = session.write(original, new StreamCallback() {
        @Override
        public void process(InputStream inputStream, OutputStream outputStream) throws IOException {
            try {
                int zoomingFactor = context.getProperty(ZOOMING_FACTOR).asInteger();

                BufferedImage image = ImageIO.read(inputStream);
                byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
                Mat source = new Mat(image.getHeight(), image.getWidth(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
                source.put(0, 0, pixels);

                Mat destination = new Mat(source.rows() * zoomingFactor, source.cols() * zoomingFactor,
                        source.type());
                Imgproc.resize(source, destination, destination.size(), zoomingFactor, zoomingFactor,
                        Imgproc.INTER_NEAREST);

                MatOfByte bytemat = new MatOfByte();
                Imgcodecs.imencode(".png", destination, bytemat);
                pixels = bytemat.toArray();
                outputStream.write(pixels);

            } catch (Exception ex) {
                getLogger().error(ex.getMessage());
                ex.printStackTrace();
            }
        }
    });

    session.transfer(ff, REL_SUCCESS);

}

From source file:com.jiminger.image.houghspace.internal.Mask.java

License:Open Source License

/**
* Generate an OpenCV Mat image that contains a view of the mask.
*//*  w w w.j  ava2s.  co  m*/
public Mat getMaskImage() {
    final Mat m = new Mat(mheight, mwidth, CvType.CV_8UC1);
    m.put(0, 0, mask);
    return m;
}

From source file:com.jonwohl.Attention.java

License:Open Source License

private Mat warpPerspective(ArrayList<PVector> inputPoints, int w, int h) {
    Mat transform = getPerspectiveTransformation(inputPoints, w, h);
    Mat unWarpedMarker = new Mat(w, h, CvType.CV_8UC1);
    Imgproc.warpPerspective(ocv.getColor(), unWarpedMarker, transform, new Size(w, h));
    return unWarpedMarker;
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

/**
 * Converts a matrix with any values into a matrix with correct values (between 0 and 255, both included) to be shown as an image.
 * @param mat: The matrix to convert/*from  w w  w.j  av a 2  s  .  c o m*/
 * @return A matrix that can be used as an image
 */
private Mat toGrayscale(Mat mat) {
    Mat res = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC1);
    double min, max;
    MinMaxLocResult minmax = Core.minMaxLoc(mat);
    min = minmax.minVal;
    max = minmax.maxVal;
    for (int row = 0; row < mat.rows(); row++) {
        for (int col = 0; col < mat.cols(); col++) {
            res.put(row, col, 255 * ((mat.get(row, col)[0] - min) / (max - min)));
        }
    }
    return res;
}

From source file:com.joravasal.keyface.PCAfaceRecog.java

License:Open Source License

/**
 * Given a Mat object (data structure from OpenCV) with a face on it, 
 * it will try to find if the face is recognized from the data saved.
 * It applies a change in size to match the one needed.
 * /*from  w ww .jav  a  2  s .c  o m*/
 * @return An integer that specifies which vector is recognized with the given Mat
 * */
public AlgorithmReturnValue recognizeFace(Mat face) {
    if (numImages < 2) {
        return new AlgorithmReturnValue();
    }
    Imgproc.resize(face, face, imageSize); //Size must be equal to the size of the saved faces 

    Mat analyze = new Mat(1, imgLength, CvType.CV_32FC1);
    Mat X = analyze.row(0);
    try {
        face.reshape(1, 1).convertTo(X, CvType.CV_32FC1);
    } catch (CvException e) {
        return new AlgorithmReturnValue();
    }
    Mat res = new Mat();
    Core.PCAProject(analyze, average, eigenfaces, res);
    return findClosest(res);
}

From source file:com.joravasal.keyface.PCAfaceRecog.java

License:Open Source License

/**
 * It has no input, it will add the last image (when numerically ordered)
 * to the array of images and calculate the new PCA subspace.
 * // w  ww. j  a v a 2 s  . c o m
 * PCA won't work properly if newimage is true.
 * 
 * @return A boolean that specifies if everything went fine.
 * */
public boolean updateData(boolean newimage) {
    if (newimage) { //There is some error with this code, if newimage is true.
        //Probably it is the matrix.create() function. Later when PCA is done, the projection will be wrong.
        //So this code is never used at the moment, and newimage should be used as false always.
        //It uses more instructions, but until a solution is found it must stay as it is.
        numImages++;
        try {
            File directory = new File(imagesDir);
            if (!directory.exists()) {
                throw new IOException("Path to file could not be opened.");
            }
            String lfile = imagesDir + "/Face" + (numImages - 1) + ".png";
            Mat img = Highgui.imread(lfile, 0);
            if (img.empty())
                throw new IOException("Opening image number " + (numImages - 1) + " failed.");
            //we adapt the old matrices to new sizes
            sum.create(numImages, imgLength, CvType.CV_32FC1);
            projectedTraining.create(numImages, numImages, CvType.CV_32FC1);

            //and add the new image to the array of images
            img.reshape(1, 1).convertTo(sum.row(numImages - 1), CvType.CV_32FC1);

        } catch (IOException e) {
            System.err.println(e.getMessage());
            return false;
        }
    } else {
        numImages = KeyFaceActivity.prefs.getInt("savedFaces", numImages);
        sum = new Mat(numImages, imgLength, CvType.CV_32FC1);
        projectedTraining = new Mat(numImages, numImages, CvType.CV_32FC1);

        for (int i = 0; i < numImages; i++) { //opens each image and appends it as a column in the matrix Sum
            String lfile = imagesDir + "/Face" + i + ".png";
            try {
                Mat img = Highgui.imread(lfile, 0);
                //Other way of loading image data
                //Mat img = Utils.bitmapToMat(BitmapFactory.decodeFile(lfile));
                if (img.empty())
                    throw new IOException("Opening image number " + i + " failed.");
                //We add the image to the correspondent row in the matrix of images (sum)
                img.reshape(1, 1).convertTo(sum.row(i), CvType.CV_32FC1);
            } catch (IOException e) {
                System.err.println(e.getMessage());
                return false;
            }
        }
    }

    if (numImages > 1) {
        average = new Mat();
        eigenfaces = new Mat();
        Core.PCACompute(sum, average, eigenfaces);
        for (int i = 0; i < numImages; i++) {
            Core.PCAProject(sum.row(i), average, eigenfaces, projectedTraining.row(i));
        }
    }

    return true;
}

From source file:com.kunato.imagestitching.SphereObject.java

License:Apache License

public void draw(float[] viewMatrix, float[] projectionMatrix) {
    int xh = GLES20.glGetUniformLocation(mProgram, "img_x");
    int yh = GLES20.glGetUniformLocation(mProgram, "img_y");
    int widthh = GLES20.glGetUniformLocation(mProgram, "img_width");
    int heighth = GLES20.glGetUniformLocation(mProgram, "img_height");

    if (mTexRequireUpdate) {
        Log.i("GLSphere", "Bitmap updated,Return to normal activity.");
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, this.mTextures[0]);
        GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mQueueBitmap, 0);
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        mQueueBitmap.recycle();// w w  w.  j  a va 2  s .  c  o  m
        mTexRequireUpdate = false;
    }
    GLES20.glUseProgram(mProgram);
    //Attrib
    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
    mSphereBuffer.position(0);
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, mSphereShape.getVeticesStride(),
            mSphereBuffer);

    mSphereBuffer.position(3);
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, 2, GLES20.GL_FLOAT, false,
            mSphereShape.getVeticesStride(), mSphereBuffer);
    //Uniform
    mTextureHandle = GLES20.glGetUniformLocation(mProgram, "sTexture");
    GLES20.glUniform1i(mTextureHandle, 0);
    //Area
    GLES20.glUniform1f(xh, mArea[0]);
    GLES20.glUniform1f(yh, mArea[1]);
    GLES20.glUniform1f(widthh, mArea[2]);
    GLES20.glUniform1f(heighth, mArea[3]);

    mViewMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uViewMatrix");
    mProjectionMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uProjectionMatrix");
    GLES20.glUniformMatrix4fv(mViewMatrixHandle, 1, false, viewMatrix, 0);
    GLES20.glUniformMatrix4fv(mProjectionMatrixHandle, 1, false, projectionMatrix, 0);
    GLES20.glDrawElements(GLES20.GL_TRIANGLES, mSphereShape.getNumIndices()[0], GLES20.GL_UNSIGNED_SHORT,
            mIndexBuffer);
    GLES20.glDisableVertexAttribArray(mPositionHandle);
    GLES20.glDisableVertexAttribArray(mTextureCoordinateHandle);

    if (readPixel) {
        Log.d("GL", "ReadPixel");
        mScreenBuffer = ByteBuffer.allocateDirect(glRenderer.mHeight * glRenderer.mWidth * 4);
        mScreenBuffer.order(ByteOrder.nativeOrder());
        GLES20.glReadPixels(0, 0, glRenderer.mWidth, glRenderer.mHeight, GLES20.GL_RGBA,
                GLES20.GL_UNSIGNED_BYTE, mScreenBuffer);
        Log.d("mScreenBuffer", "Remaining " + mScreenBuffer.remaining());
        mScreenBuffer.rewind();
        byte pixelsBuffer[] = new byte[4 * glRenderer.mHeight * glRenderer.mWidth];
        mScreenBuffer.get(pixelsBuffer);
        Mat mat = new Mat(glRenderer.mHeight, glRenderer.mWidth, CvType.CV_8UC4);
        mat.put(0, 0, pixelsBuffer);
        Mat m = new Mat();
        Imgproc.cvtColor(mat, m, Imgproc.COLOR_RGBA2BGR);
        Core.flip(m, mat, 0);
        Highgui.imwrite("/sdcard/stitch/readpixel.jpg", mat);

    }
}

From source file:com.lauszus.facerecognitionapp.TinyDB.java

License:Apache License

public ArrayList<Mat> getListMat(String key) {
    ArrayList<String> objStrings = getListString(key);
    ArrayList<Mat> objects = new ArrayList<Mat>();

    for (String jObjString : objStrings) {
        byte[] data = Base64.decode(jObjString, Base64.DEFAULT);
        Mat mat = new Mat(data.length, 1, CvType.CV_8U);
        mat.put(0, 0, data);//w  ww.ja v  a  2s  .  co m
        objects.add(mat);
    }
    return objects;
}

From source file:com.Linguist.model.AdaptiveThresholdClass.java

@Override
public File imagePreprocessing(String imageFile, String extnsn) {
    //  System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat graySource = imread("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + imageFile,
            IMREAD_GRAYSCALE);/*w  ww .ja v a2 s.co  m*/
    Mat destintn = new Mat(graySource.rows(), graySource.cols(), graySource.type());

    adaptiveThreshold(graySource, destintn, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, 15, 40);
    Imgcodecs.imwrite("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\adaptive.jpg",
            destintn);
    File outputImage = new File(
            "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\adaptive.jpg");
    return outputImage;
}