Example usage for org.opencv.core Mat put

List of usage examples for org.opencv.core Mat put

Introduction

In this page you can find the example usage for org.opencv.core Mat put.

Prototype

public int put(int row, int col, byte[] data) 

Source Link

Usage

From source file:com.Linguist.model.grayscaleClass.java

@Override
public File imagePreprocessing(String image, String extnsn) {

    BufferedImage bImge = null;//from www .jav  a  2s. c  o m
    BufferedImage bImage2 = null;
    File grayscle = null;

    try {

        // loadOpenCV_Lib();
        //String path = "opencv\\build\\java\\x64\\opencv_java300.dll";
        FileInputStream fileName = new FileInputStream(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + image);
        InputStream input = fileName;
        bImge = ImageIO.read(input);
        byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData();
        Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3);
        mat1.put(0, 0, imgeByte);
        Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1);
        Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY);
        byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())];
        mat2.get(0, 0, imageData);
        bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY);
        bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData);

        String extn = null;
        switch (extnsn) {
        case ".jpg":
            extn = "jpg";
            break;
        case ".png":
            extn = "png";
            break;
        case ".pdf":
            extn = "pdf";
            break;
        case ".tiff":
            extn = "tif";
            break;

        }
        //writing the grayscale image to the folder
        grayscle = new File(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\grayscale" + "." + extn);
        ImageIO.write(bImage2, "jpg", grayscle);
    } catch (IOException ex) {
        System.out.println("" + ex.getMessage());
    } catch (Exception ex) {
        Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex);
    }
    return grayscle;

}

From source file:com.louislepper.waveform.MainActivity.java

License:Apache License

private Mat soundArrayToImage(short[] array, Mat image) {
    final double[] red = new double[] { 255.0, 0.0, 0.0, 0.0 };
    for (int x = 0; x < image.cols(); x++) {
        if (!(array[x] == -1)) {
            image.put(array[x], x, red);
        }//  w  w w  . ja  va 2s.  c  o  m
    }
    return image;
}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ????????/*from   w ww.  j a v a  2 s  . c o m*/
 * @param lineEq ?????(ax+by=1) ??[angle][section]
 * @param points ?ArrayList
 * @param img ????null??????
 * @return true:? false:
 */
private boolean calcSquare(StraightLineEquation lineEq[][], ArrayList<Point> points, Mat img) {
    //2??
    Mat mat = new Mat(2, 2, CvType.CV_32F);
    mPointCenterX = 0.0f;
    mPointCenterY = 0.0f;
    int counter = 0;
    for (int ang0sec = 0; ang0sec < 2; ang0sec++) {
        mat.put(0, 0, lineEq[0][ang0sec].a);
        mat.put(0, 1, lineEq[0][ang0sec].b);
        for (int ang1sec = 0; ang1sec < 2; ang1sec++) {
            mat.put(1, 0, lineEq[1][ang1sec].a);
            mat.put(1, 1, lineEq[1][ang1sec].b);
            Mat matAns;
            try {
                matAns = mat.inv();
                if (matAns == null)
                    return false;
            } catch (Exception e) {//??????????
                e.printStackTrace();
                return false;
            }
            float x = (float) (matAns.get(0, 0)[0] + matAns.get(0, 1)[0] + mCenterX);
            float y = (float) (matAns.get(1, 0)[0] + matAns.get(1, 1)[0] + mCenterY);
            Point p = new Point(x, y);
            points.add(p);
            mPointCenterX += x;
            mPointCenterY += y;
            counter++;
        }
    }
    mPointCenterX /= (float) counter;
    mPointCenterY /= (float) counter;
    //????
    Collections.sort(points, new PointComparator());
    if (img != null) {
        Scalar color[] = new Scalar[4];
        color[0] = new Scalar(0xff, 0x00, 0x00);
        color[1] = new Scalar(0x00, 0xff, 0x00);
        color[2] = new Scalar(0x00, 0x00, 0xff);
        color[3] = new Scalar(0xff, 0x00, 0xff);

        for (int i = 0; i < 4; i++) {
            Core.circle(img, points.get(i), 30, color[i], 5);
        }
    }
    if (MyDebug.DEBUG) {
        for (int i = 0; i < 4; i++) {
            Log.d(LOG_TAG, "point(" + i + ") = " + points.get(i).x + ":" + points.get(i).y);
        }
    }

    return true;
}

From source file:com.oetermann.imageclassifier.Util.java

License:Open Source License

public static Mat loadMat(String path) {
    try {//from w  w w.j a  v  a  2  s .  co m
        int rows, cols, type;
        Object data;
        try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(path))) {
            rows = (int) ois.readObject();
            cols = (int) ois.readObject();
            type = (int) ois.readObject();
            data = ois.readObject();
        }
        Mat mat = new Mat(rows, cols, type);
        switch (type) {
        case CvType.CV_8S:
        case CvType.CV_8U:
            mat.put(0, 0, (byte[]) data);
            break;
        case CvType.CV_16S:
        case CvType.CV_16U:
            mat.put(0, 0, (short[]) data);
            break;
        case CvType.CV_32S:
            mat.put(0, 0, (int[]) data);
            break;
        case CvType.CV_32F:
            mat.put(0, 0, (float[]) data);
            break;
        case CvType.CV_64F:
            mat.put(0, 0, (double[]) data);
            break;
        }
        return mat;
    } catch (IOException | ClassNotFoundException | ClassCastException ex) {
        System.err.println("ERROR: Could not load mat from file: " + path);
        //            Logger.getLogger(ImageClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
    return null;
}

From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java

License:Open Source License

private static Mat bytesToMat(byte[] bytes, int rows, int columns, boolean raw) {
    int simDocLineExtra = raw ? 1 : 0;
    Mat mat = new Mat(rows, columns, CvType.CV_8UC1);
    byte[] dat = new byte[1];
    for (int y = 0; y < rows; y++) {
        for (int x = 0; x < columns; x++) {
            dat[0] = bytes[y * (columns + simDocLineExtra) + x];
            mat.put(y, x, dat);
        }//w  w  w  .  j a  va 2  s.  co m
    }
    return mat;
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return/* ww w  .  j av a 2s  .c o m*/
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Optional<Long> checkForFrameChange(BufferedImage frame) {
    Mat mat;//w  w w  .  jav  a  2 s. com

    synchronized (frame) {
        undistortFrame(frame);
        mat = Camera.bufferedImageToMat(frame);
    }

    final double[] pixel = getFrameDelayPixel(mat);

    // Initialize
    if (patternLuminosity[0] == -1) {
        patternLuminosity = pixel;
        return Optional.empty();
    }

    final Mat tempMat = new Mat(1, 2, CvType.CV_8UC3);
    tempMat.put(0, 0, patternLuminosity);
    tempMat.put(0, 1, pixel);

    Imgproc.cvtColor(tempMat, tempMat, Imgproc.COLOR_BGR2HSV);

    if (tempMat.get(0, 1)[2] < .9 * tempMat.get(0, 0)[2]) {
        return Optional.of(cameraManager.getCurrentFrameTimestamp() - frameTimestampBeforeFrameChange);
    }

    return Optional.empty();
}

From source file:com.shootoff.camera.Camera.java

License:Open Source License

public static Mat bufferedImageToMat(BufferedImage frame) {
    BufferedImage transformedFrame = ConverterFactory.convertToType(frame, BufferedImage.TYPE_3BYTE_BGR);
    byte[] pixels = ((DataBufferByte) transformedFrame.getRaster().getDataBuffer()).getData();
    Mat mat = new Mat(frame.getHeight(), frame.getWidth(), CvType.CV_8UC3);
    mat.put(0, 0, pixels);

    return mat;/*from   w  w  w .ja v  a 2s.  c o  m*/
}

From source file:com.shootoff.camera.shotdetection.JavaShotDetector.java

License:Open Source License

/**
 * Use and HSV copy of the current camera frame to detect shots and use a
 * BGR copy to draw bright pixels as red and high motion pixels as blue. The
 * BGR copy is what ShootOFF shows//from   ww w .  ja  v a 2  s.  co  m
 * 
 * @param frameHSV
 * 
 * @param frameBGR
 *            a blue, green, red copy of the current frame for drawing
 *            bright/high motion pixels
 * @param detectShots
 *            whether or not to detect a shot
 */
@Override
public void processFrame(final Mat frameBGR, final boolean detectShots) {
    updateMovingAveragePeriod();

    // Must reset before every updateFilter loop
    brightPixels.clear();

    // Create a hue, saturation, value copy of the current frame used to
    // detect
    // the shots. The BGR version is just used by this implementation to
    // show
    // the user where bright/high motion pixels are
    final Mat frameHSV = new Mat();
    Imgproc.cvtColor(frameBGR, frameHSV, Imgproc.COLOR_BGR2HSV);

    final Set<Pixel> thresholdPixels = findThresholdPixelsAndUpdateFilter(frameHSV,
            (detectShots && filtersInitialized));

    int thresholdPixelsSize = thresholdPixels.size();

    if (logger.isTraceEnabled()) {
        if (thresholdPixelsSize >= 1)
            logger.trace("thresholdPixels {} getMinimumShotDimension {}", thresholdPixelsSize,
                    getMinimumShotDimension());

        for (final Pixel pixel : thresholdPixels) {
            logger.trace("thresholdPixel {} {} - from array {} from pixel cur {} avg {}", pixel.x, pixel.y,
                    lumsMovingAverage[pixel.x][pixel.y], pixel.getCurrentLum(), pixel.getLumAverage());
        }
    }

    if (!filtersInitialized)
        filtersInitialized = checkIfInitialized();

    if (detectShots && filtersInitialized) {
        updateAvgThresholdPixels(thresholdPixelsSize);

        updateAvgBrightPixels(brightPixels.size());

        if (shouldShowBrightnessWarning()) {
            cameraManager.showBrightnessWarning();
        }

        if (thresholdPixelsSize >= getMinimumShotDimension() && !isExcessiveMotion(thresholdPixelsSize)) {
            final Set<PixelCluster> clusters = pixelClusterManager.clusterPixels(thresholdPixels,
                    getMinimumShotDimension());

            if (logger.isTraceEnabled()) {
                logger.trace("thresholdPixels {}", thresholdPixelsSize);
                logger.trace("clusters {}", clusters.size());
            }

            detectShots(frameHSV, clusters);
        }

        // Moved to after detectShots because otherwise we'll have changed
        // pixels in the frame that's being checked for shots
        else if (isExcessiveMotion(thresholdPixelsSize)) {
            if (shouldShowMotionWarning(thresholdPixelsSize))
                cameraManager.showMotionWarning();

            for (final Pixel pixel : thresholdPixels) {
                frameBGR.put(pixel.y, pixel.x, BLUE_MAT_PIXEL);
            }
        }

        if (shouldShowBrightnessWarningBool && !brightPixels.isEmpty()) {
            // Make the feed pixels red so the user can easily see what the
            // problem pixels are
            for (final Pixel pixel : brightPixels) {
                frameBGR.put(pixel.y, pixel.x, RED_MAT_PIXEL);
            }
        }
    }
}

From source file:com.shootoff.camera.shotdetection.JavaShotDetector.java

License:Open Source License

private void addShot(Mat workingFrame, PixelCluster pc) {
    final Optional<Color> color = pc.getColor(workingFrame, colorDistanceFromRed);

    if (!color.isPresent()) {
        if (logger.isDebugEnabled())
            logger.debug("Processing Shot: Shot Rejected By Lack Of Color Density");
        return;/*from   w w w  .j  a va 2  s  .com*/
    }

    final double x = pc.centerPixelX;
    final double y = pc.centerPixelY;

    if (super.addShot(color.get(), x, y, true) && config.isDebugShotsRecordToFiles()) {
        final Mat debugFrame = new Mat();
        Imgproc.cvtColor(workingFrame, debugFrame, Imgproc.COLOR_HSV2BGR);

        String filename = String.format("shot-%d-%d-%d_orig.png", cameraManager.getFrameCount(),
                (int) pc.centerPixelX, (int) pc.centerPixelY);
        final File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, debugFrame);

        for (final Pixel p : pc) {
            if (javafx.scene.paint.Color.GREEN.equals(color.get())) {
                final double[] greenColor = { 0, 255, 0 };
                debugFrame.put(p.y, p.x, greenColor);
            } else {
                final double[] redColor = { 0, 0, 255 };
                debugFrame.put(p.y, p.x, redColor);
            }
        }

        File outputfile = new File(String.format("shot-%d-%d-%d.png", cameraManager.getFrameCount(),
                (int) pc.centerPixelX, (int) pc.centerPixelY));
        filename = outputfile.toString();
        Highgui.imwrite(filename, debugFrame);
    }
}