List of usage examples for org.opencv.core Mat row
public Mat row(int y)
From source file:ch.zhaw.facerecognitionlibrary.Recognition.Eigenfaces.java
License:Open Source License
public String recognize(Mat img, String expectedLabel) { // Ignore//ww w . j ava2s. c o m img = img.reshape(1, 1); // Subtract mean img.convertTo(img, CvType.CV_32F); Core.subtract(img, Psi, img); // Project to subspace Mat projected = getFeatureVector(img); // Save all points of image for tSNE img.convertTo(img, CvType.CV_8U); addImage(projected, expectedLabel, true); //addImage(projected, expectedLabel); Mat distance = new Mat(Omega.rows(), 1, CvType.CV_64FC1); for (int i = 0; i < Omega.rows(); i++) { double dist = Core.norm(projected.row(0), Omega.row(i), Core.NORM_L2); distance.put(i, 0, dist); } Mat sortedDist = new Mat(Omega.rows(), 1, CvType.CV_8UC1); Core.sortIdx(distance, sortedDist, Core.SORT_EVERY_COLUMN + Core.SORT_ASCENDING); // Give back the name of the found person int index = (int) (sortedDist.get(0, 0)[0]); return labelMap.getKey(labelList.get(index)); }
From source file:com.joravasal.keyface.EigenFacesActivity.java
License:Open Source License
@Override public void onCreate(Bundle savedInstanceState) { Log.i("eigenFacesActivity::", "OnCreate"); super.onCreate(savedInstanceState); setContentView(R.layout.eigenfaces); setTitle("Eigenfaces"); Mat aver = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getAverage(); Mat faces = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getEigenFaces(); int size = new Integer(KeyFaceActivity.prefs.getString("savedFaceSize", "200")); Mat aux = new Mat(); aver = aver.reshape(1, size);//from w ww . ja va 2 s. c o m //aver.convertTo(aux, ); aver = toGrayscale(aver); average = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888); Imgproc.cvtColor(aver, aux, Imgproc.COLOR_GRAY2RGBA, 4); Utils.matToBitmap(aux, average); LinearLayout layout = (LinearLayout) findViewById(id.eigenFacesHorizontalLayout); TextView avrgImgTV = new TextView(getApplicationContext()); avrgImgTV.setText("Average image:"); avrgImgTV.setPadding(5, 10, 10, 20); avrgImgTV.setGravity(Gravity.CENTER); TextView eigenfacesImgsTV = new TextView(getApplicationContext()); eigenfacesImgsTV.setText("Eigenfaces:"); eigenfacesImgsTV.setPadding(5, 10, 10, 20); eigenfacesImgsTV.setGravity(Gravity.CENTER); ImageView imgV = new ImageView(getApplicationContext()); imgV.setClickable(false); imgV.setVisibility(0); imgV.setPadding(0, 10, 10, 20); imgV.setImageBitmap(average); layout.addView(avrgImgTV); layout.addView(imgV); layout.addView(eigenfacesImgsTV); LinkedList<ImageView> variables = new LinkedList<ImageView>(); eigenfacesList = new LinkedList<Bitmap>(); for (int i = 0; i < faces.rows(); i++) { variables.add(new ImageView(getApplicationContext())); eigenfacesList.add(Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888)); aux = new Mat(); aux = faces.row(i).reshape(1, size); aux = toGrayscale(aux); Mat auxGreyC4 = new Mat(); Imgproc.cvtColor(aux, auxGreyC4, Imgproc.COLOR_GRAY2RGBA, 4); Utils.matToBitmap(auxGreyC4, eigenfacesList.get(i)); variables.get(i).setClickable(false); variables.get(i).setVisibility(0); variables.get(i).setPadding(0, 10, 10, 20); variables.get(i).setImageBitmap(eigenfacesList.get(i)); layout.addView(variables.get(i)); } Button save = (Button) findViewById(id.saveEigenfacesB); save.setOnClickListener(this); }
From source file:com.joravasal.keyface.PCAfaceRecog.java
License:Open Source License
/** * Given a Mat object (data structure from OpenCV) with a face on it, * it will try to find if the face is recognized from the data saved. * It applies a change in size to match the one needed. * // w ww. j a v a 2 s . c om * @return An integer that specifies which vector is recognized with the given Mat * */ public AlgorithmReturnValue recognizeFace(Mat face) { if (numImages < 2) { return new AlgorithmReturnValue(); } Imgproc.resize(face, face, imageSize); //Size must be equal to the size of the saved faces Mat analyze = new Mat(1, imgLength, CvType.CV_32FC1); Mat X = analyze.row(0); try { face.reshape(1, 1).convertTo(X, CvType.CV_32FC1); } catch (CvException e) { return new AlgorithmReturnValue(); } Mat res = new Mat(); Core.PCAProject(analyze, average, eigenfaces, res); return findClosest(res); }
From source file:javaapplication1.JavaApplication1.java
public static void main(String[] args) { // you must load the OpenCV library like this before trying to do // anything with OpenCV! System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // Print OpenCV version System.out.println("Welcome to OpenCV " + Core.VERSION); // In OpenCV, the most important data type is the Matrix, Mat. ///*from w w w.j a v a 2 s.co m*/ // Here, we create a matrix that has 5 rows and 10 columns. It // stores an 8-bit type with a single channel. In other words, a // matrix of bytes. We'll initialize every element to 0. Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0)); // Dump information about the matrix System.out.println("OpenCV Mat: " + m); // set row 1 to be all 1s, and then column 5 to be all 5s Mat mr1 = m.row(1); mr1.setTo(new Scalar(1)); Mat mc5 = m.col(5); mc5.setTo(new Scalar(5)); // Dump the actual matrix contents System.out.println("OpenCV Mat data:\n" + m.dump()); Ocv ocv = new Ocv(); ocv.getFilePath(); /** * Find faces in an image. * * @param filter Path to the xml face finding filter to use * @param input Path to the input image file * @param output Path to the output image file */ //ocv.findFaces("lbpcascade_frontalface.xml", "C:\\Users\\Wellesley\\Documents\\GitHub\\CSE398\\opencvTutorial\\JavaApplication1\\src\\javaapplication1\\lena.png", "../javaapplication1"); ocv.setOutput("step2.png"); ocv.findFaces("", "", ""); ocv.setOutput("step3.png"); ocv.cropEachFace("", ""); ocv.setOutput("step4.png"); ocv.resizeEachFace("", ""); ocv.setOutput("step6.png"); ocv.makeFacesGray("", "", ""); ocv.setOutput("step8.png"); ocv.blendWithGray50("", ""); ocv.setOutput("step10.png"); ocv.doSobel("", ""); ocv.setOutput("step11.png"); ocv.directManip("", ""); }
From source file:net.semanticmetadata.lire.imageanalysis.features.local.opencvfeatures.CvSiftExtractor.java
License:Open Source License
@Override public void extract(BufferedImage img) { MatOfKeyPoint keypoints = new MatOfKeyPoint(); Mat descriptors = new Mat(); List<KeyPoint> myKeys;/*from w w w . j av a2s .c om*/ // Mat img_object = Highgui.imread(image, 0); //0 = CV_LOAD_IMAGE_GRAYSCALE // detector.detect(img_object, keypoints); byte[] data = ((DataBufferByte) img.getRaster().getDataBuffer()).getData(); Mat matRGB = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3); matRGB.put(0, 0, data); Mat matGray = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1); Imgproc.cvtColor(matRGB, matGray, Imgproc.COLOR_BGR2GRAY); //TODO: RGB or BGR? byte[] dataGray = new byte[matGray.rows() * matGray.cols() * (int) (matGray.elemSize())]; matGray.get(0, 0, dataGray); detector.detect(matGray, keypoints); extractor.compute(matGray, keypoints, descriptors); myKeys = keypoints.toList(); features = new LinkedList<CvSiftFeature>(); KeyPoint key; CvSiftFeature feat; double[] desc; int cols, rows = myKeys.size(); for (int i = 0; i < rows; i++) { cols = (descriptors.row(i)).cols(); desc = new double[cols]; key = myKeys.get(i); for (int j = 0; j < cols; j++) { desc[j] = descriptors.get(i, j)[0]; } feat = new CvSiftFeature(key.pt.x, key.pt.y, key.size, desc); features.add(feat); } }
From source file:net.semanticmetadata.lire.imageanalysis.features.local.opencvfeatures.CvSurfExtractor.java
License:Open Source License
@Override public void extract(BufferedImage img) { MatOfKeyPoint keypoints = new MatOfKeyPoint(); Mat descriptors = new Mat(); List<KeyPoint> myKeys;/*from ww w. j av a 2 s . c om*/ // Mat img_object = Highgui.imread(image, 0); //0 = CV_LOAD_IMAGE_GRAYSCALE // detector.detect(img_object, keypoints); byte[] data = ((DataBufferByte) img.getRaster().getDataBuffer()).getData(); Mat matRGB = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3); matRGB.put(0, 0, data); Mat matGray = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1); Imgproc.cvtColor(matRGB, matGray, Imgproc.COLOR_BGR2GRAY); //TODO: RGB or BGR? byte[] dataGray = new byte[matGray.rows() * matGray.cols() * (int) (matGray.elemSize())]; matGray.get(0, 0, dataGray); detector.detect(matGray, keypoints); extractor.compute(matGray, keypoints, descriptors); myKeys = keypoints.toList(); features = new LinkedList<CvSurfFeature>(); KeyPoint key; CvSurfFeature feat; double[] desc; int cols, rows = myKeys.size(); for (int i = 0; i < rows; i++) { cols = (descriptors.row(i)).cols(); desc = new double[cols]; key = myKeys.get(i); for (int j = 0; j < cols; j++) { desc[j] = descriptors.get(i, j)[0]; } feat = new CvSurfFeature(key.pt.x, key.pt.y, key.size, desc); features.add(feat); } }
From source file:samples.SimpleSample.java
public static void main(String[] args) { System.load("C:\\opencv\\build\\java\\x64\\opencv_java310.dll"); System.out.println(System.getProperty("java.library.path")); System.out.println("Welcome to OpenCV " + Core.VERSION); Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0)); System.out.println("OpenCV Mat: " + m); Mat mr1 = m.row(1); mr1.setTo(new Scalar(1)); Mat mc5 = m.col(5);//from w ww. j a v a 2 s . co m mc5.setTo(new Scalar(5)); System.out.println("OpenCV Mat data:\n" + m.dump()); }