List of usage examples for org.opencv.core Mat Mat
public Mat(Mat m, Range rowRange, Range colRange)
From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java
License:Open Source License
private Mat byte2Mat(byte[] pixels) { Mat yuv = new Mat((int) (mFrameHeight * 1.5), mFrameWidth, CvType.CV_8UC1); yuv.put(0, 0, pixels);/*from w w w . j av a2 s.c o m*/ Mat result = new Mat(mFrameHeight, mFrameWidth, CvType.CV_8UC3); Imgproc.cvtColor(yuv, result, Imgproc.COLOR_YUV2RGB_NV21); return result; }
From source file:Beans.Imagen.java
public Mat getMatFotografia() { byte[] pixels = ((DataBufferByte) fotografia.getRaster().getDataBuffer()).getData(); // Create a Matrix the same size of image Mat image = new Mat(alto, ancho, CvType.CV_8UC3); // Fill Matrix with image values image.put(0, 0, pixels);/*from w ww . jav a 2s .c om*/ Imgproc.resize(image, image, new Size(480, 640)); return image; }
From source file:br.com.prj.TelaPrincipal.java
private void btnProcurarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnProcurarActionPerformed // remove todos os labels do componente jPanel1.removeAll();/*from w w w.ja v a 2 s.c om*/ // reposicionao o primeiro label boundX = 12; boundY = 22; CascadeClassifier faceDetector = new CascadeClassifier(URL_LIB_FACE); imagemCarregada = Imgcodecs.imread(selectedFile.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); // imagem com retangulo dos rostos encontrados imagemDest = new Mat(imagemCarregada.rows(), imagemCarregada.cols(), imagemCarregada.type()); MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(imagemCarregada, faceDetections); // tentar verificar se o rect encontrado possui olhos Rect[] faceEncontrada = new Rect[faceDetections.toArray().length]; int i = 0; for (Rect rect : faceDetections.toArray()) { faceEncontrada[i] = new Rect(new Point(rect.x - PAD_LATERAL + 5, rect.y - PAD_SUPERIOR + 5), new Point(rect.x + rect.width + PAD_LATERAL, (rect.y + rect.height + PAD_SUPERIOR) - 5)); adicionarLabel(convertMatToImage(new Mat(imagemCarregada, faceEncontrada[i])), faceEncontrada[i]); // ADICIONA RETANGULO DO ROSTO NA IMAGEM // Imgproc.rectangle(imagemDest, // new Point(rect.x - PAD_LATERAL, rect.y - PAD_SUPERIOR), // new Point(rect.x + rect.width + PAD_LATERAL, (rect.y + rect.height + PAD_SUPERIOR) - 5), // new Scalar(0, 255, 0)); i++; } if (faceDetections.toArray().length == 0) { totalRostos.setText("No foi possvel identificar nenhum rosto na imagem selecionada."); } else { totalRostos .setText("Identificamos " + faceDetections.toArray().length + " rosto(s) na imagem carregada."); } }
From source file:br.com.prj.TelaPrincipal.java
/** * Convert uma Image em Mat//from ww w . j av a2 s . c om * * @param img * @return new Mat() */ public Mat convertImageToMat(Image img) { BufferedImage image = (BufferedImage) img; byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); Mat mat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3); mat.put(0, 0, data); return mat; }
From source file:ch.hslu.pren.t37.camera.BildAuswertungKorb.java
public int bildAuswerten() { //Bild in dem gesucht werden soll String inFile = "../camera.jpg"; //das Bild dass im infile gesucht wird String templateFile = "../Bilder/korb.jpg"; //Lsung wird in diesem Bild prsentiert String outFile = "../LoesungsBild.jpg"; //berprfungswert wird gesetzt int match_method = Imgproc.TM_CCOEFF_NORMED; //das original Bild und das zu suchende werden geladen Mat img = Highgui.imread(inFile, Highgui.CV_LOAD_IMAGE_COLOR); Mat templ = Highgui.imread(templateFile, Highgui.CV_LOAD_IMAGE_COLOR); // Lsungsmatrix generieren int result_cols = img.cols() - templ.cols() + 1; int result_rows = img.rows() - templ.rows() + 1; Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1); // Suchen und normalisieren Imgproc.matchTemplate(img, templ, result, match_method); Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat()); // Mit MinMax Logik wird der beste "Match" gesucht Core.MinMaxLocResult mmr = Core.minMaxLoc(result); Point matchLoc;//from w ww . j a v a 2s . com if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) { matchLoc = mmr.minLoc; } else { matchLoc = mmr.maxLoc; } // Darstellen Core.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()), new Scalar(0, 255, 0), 10); // Alle 4 Eckpunkte speichern Point topLeft = new Point(matchLoc.x, matchLoc.y); Point topRight = new Point(matchLoc.x + templ.cols(), matchLoc.y); Point downLeft = new Point(matchLoc.x, matchLoc.y + templ.rows()); Point downRight = new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()); // Lsungsbild speichern Highgui.imwrite(outFile, img); //Mittelpunkt berechnen double mittePicture; double mitteKorb; double differnez; Mat sol = Highgui.imread(outFile, Highgui.CV_LOAD_IMAGE_COLOR); mittePicture = sol.width() / 2; mitteKorb = (topRight.x - topLeft.x) / 2; mitteKorb = topLeft.x + mitteKorb; differnez = mitteKorb - mittePicture; logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Korb: " + mitteKorb); logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Bild: " + mittePicture); logger.log(PrenLogger.LogLevel.DEBUG, "Differenz: " + differnez + "\nWenn Differnez negativ, nach rechts drehen"); return (int) differnez; }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatOperation.java
License:Open Source License
public static Rect[] rotateFaces(Mat img, Rect[] faces, int angle) { Point center = new Point(img.cols() / 2, img.rows() / 2); Mat rotMat = Imgproc.getRotationMatrix2D(center, angle, 1); rotMat.convertTo(rotMat, CvType.CV_32FC1); float scale = img.cols() / img.rows(); for (Rect face : faces) { Mat m = new Mat(3, 1, CvType.CV_32FC1); m.put(0, 0, face.x);/*from w w w .ja v a 2 s .c o m*/ m.put(1, 0, face.y); m.put(2, 0, 1); Mat res = Mat.zeros(2, 1, CvType.CV_32FC1); Core.gemm(rotMat, m, 1, new Mat(), 0, res, 0); face.x = (int) res.get(0, 0)[0]; face.y = (int) res.get(1, 0)[0]; if (angle == 270 || angle == -90) { face.x = (int) (face.x * scale - face.width); face.x = face.x + face.width / 4; face.y = face.y + face.height / 4; } else if (angle == 180 || angle == -180) { face.x = face.x - face.width; face.y = face.y - face.height; } else if (angle == 90 || angle == -270) { face.y = (int) (face.y * scale - face.height); face.x = face.x - face.width / 4; face.y = face.y - face.height / 4; } } return faces; }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatXml.java
License:Open Source License
public Mat readMat(String tag) { if (isWrite) { System.err.println("Try read from file with write flags"); return null; }//from ww w. j ava 2 s . c om NodeList nodelist = doc.getElementsByTagName(tag); Mat readMat = null; for (int i = 0; i < nodelist.getLength(); i++) { Node node = nodelist.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; String type_id = element.getAttribute("type_id"); if ("opencv-matrix".equals(type_id) == false) { System.out.println("Fault type_id "); } String rowsStr = element.getElementsByTagName("rows").item(0).getTextContent(); String colsStr = element.getElementsByTagName("cols").item(0).getTextContent(); String dtStr = element.getElementsByTagName("dt").item(0).getTextContent(); String dataStr = element.getElementsByTagName("data").item(0).getTextContent(); int rows = Integer.parseInt(rowsStr); int cols = Integer.parseInt(colsStr); int type = CvType.CV_8U; Scanner s = new Scanner(dataStr); if ("f".equals(dtStr)) { type = CvType.CV_32F; readMat = new Mat(rows, cols, type); float fs[] = new float[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextFloat()) { fs[0] = s.nextFloat(); } else { fs[0] = 0; System.err.println("Unmatched number of float value at rows=" + r + " cols=" + c); } readMat.put(r, c, fs); } } } else if ("i".equals(dtStr)) { type = CvType.CV_32S; readMat = new Mat(rows, cols, type); int is[] = new int[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextInt()) { is[0] = s.nextInt(); } else { is[0] = 0; System.err.println("Unmatched number of int value at rows=" + r + " cols=" + c); } readMat.put(r, c, is); } } } else if ("s".equals(dtStr)) { type = CvType.CV_16S; readMat = new Mat(rows, cols, type); short ss[] = new short[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextShort()) { ss[0] = s.nextShort(); } else { ss[0] = 0; System.err.println("Unmatched number of int value at rows=" + r + " cols=" + c); } readMat.put(r, c, ss); } } } else if ("b".equals(dtStr)) { readMat = new Mat(rows, cols, type); byte bs[] = new byte[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextByte()) { bs[0] = s.nextByte(); } else { bs[0] = 0; System.err.println("Unmatched number of byte value at rows=" + r + " cols=" + c); } readMat.put(r, c, bs); } } } } } return readMat; }
From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.Contours.LocalBinaryPattern.java
License:Open Source License
@Override public PreProcessor preprocessImage(PreProcessor preProcessor) { List<Mat> images = preProcessor.getImages(); List<Mat> processed = new ArrayList<Mat>(); for (Mat img : images) { // Resize for Performance enhancement Size size = new Size(preProcessor.getN(), preProcessor.getN()); Imgproc.resize(img, img, size);//from w w w. j a va 2 s .com Mat lbp = new Mat(img.rows() - 2, img.cols() - 2, img.type()); for (int i = 1; i < img.rows() - 1; i++) { for (int j = 1; j < img.cols() - 1; j++) { BitSet out = new BitSet(8); double cen = img.get(i, j)[0]; if (img.get(i - 1, j - 1)[0] > cen) out.set(0); if (img.get(i - 1, j)[0] > cen) out.set(1); if (img.get(i - 1, j + 1)[0] > cen) out.set(2); if (img.get(i, j + 1)[0] > cen) out.set(3); if (img.get(i + 1, j + 1)[0] > cen) out.set(4); if (img.get(i + 1, j)[0] > cen) out.set(5); if (img.get(i + 1, j - 1)[0] > cen) out.set(6); if (img.get(i, j - 1)[0] > cen) out.set(7); int value = 0; for (int k = 0; k < out.length(); k++) { int index = out.nextSetBit(k); value += Math.pow(2, out.length() - 1 - index); k = index; } lbp.put(i - 1, j - 1, value); } } processed.add(lbp); } preProcessor.setImages(processed); return preProcessor; }
From source file:ch.zhaw.facerecognitionlibrary.Recognition.Eigenfaces.java
License:Open Source License
public String recognize(Mat img, String expectedLabel) { // Ignore//w ww . ja va 2s .c om img = img.reshape(1, 1); // Subtract mean img.convertTo(img, CvType.CV_32F); Core.subtract(img, Psi, img); // Project to subspace Mat projected = getFeatureVector(img); // Save all points of image for tSNE img.convertTo(img, CvType.CV_8U); addImage(projected, expectedLabel, true); //addImage(projected, expectedLabel); Mat distance = new Mat(Omega.rows(), 1, CvType.CV_64FC1); for (int i = 0; i < Omega.rows(); i++) { double dist = Core.norm(projected.row(0), Omega.row(i), Core.NORM_L2); distance.put(i, 0, dist); } Mat sortedDist = new Mat(Omega.rows(), 1, CvType.CV_8UC1); Core.sortIdx(distance, sortedDist, Core.SORT_EVERY_COLUMN + Core.SORT_ASCENDING); // Give back the name of the found person int index = (int) (sortedDist.get(0, 0)[0]); return labelMap.getKey(labelList.get(index)); }
From source file:ch.zhaw.facerecognitionlibrary.Recognition.KNearestNeighbor.java
License:Open Source License
@Override public void loadFromFile() { MatName mtrainingList = new MatName("TrainingList", trainingList); List<MatName> listMat = new ArrayList<MatName>(); listMat.add(mtrainingList);// w w w. j av a 2 s. c o m labelList = fh.loadIntegerList(fh.createLabelFile(fh.KNN_PATH, "train")); labelMap = fh.getLabelMapFromFile(fh.KNN_PATH); trainingList = fh.getMatListFromXml(listMat, fh.KNN_PATH, trainingFile).get(0).getMat(); labels = new Mat(labelList.size(), 1, CvType.CV_8UC1); for (int i = 0; i < labelList.size(); i++) { Integer label = labelList.get(i); // Fill shorter labels with 0s labels.put(i, 0, label); } labels.convertTo(labels, CvType.CV_32F); SharedPreferences sharedPref = PreferenceManager .getDefaultSharedPreferences((context.getApplicationContext())); k = Integer.valueOf(sharedPref.getString("key_K", "20")); knn = KNearest.create(); knn.setIsClassifier(true); knn.train(trainingList, 0, labels); }