List of usage examples for org.opencv.core Mat Mat
public Mat(Mat m, Range rowRange, Range colRange)
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
private MinMaxLocResult getBestTemplateMatching(int matchMethod, Mat sceneImageMat, Mat objectImageMat) { // / Create the result matrix int resultCols = sceneImageMat.cols() - objectImageMat.cols() + 1; int resultRows = sceneImageMat.rows() - objectImageMat.rows() + 1; Mat result = new Mat(resultRows, resultCols, CvType.CV_32FC1); // / Do the Matching and Normalize Imgproc.matchTemplate(sceneImageMat, objectImageMat, result, matchMethod); // / Localizing the best match with minMaxLoc return Core.minMaxLoc(result); }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private Optional<Long> checkForFrameChange(BufferedImage frame) { Mat mat;/*from www . java2 s. c om*/ synchronized (frame) { undistortFrame(frame); mat = Camera.bufferedImageToMat(frame); } final double[] pixel = getFrameDelayPixel(mat); // Initialize if (patternLuminosity[0] == -1) { patternLuminosity = pixel; return Optional.empty(); } final Mat tempMat = new Mat(1, 2, CvType.CV_8UC3); tempMat.put(0, 0, patternLuminosity); tempMat.put(0, 1, pixel); Imgproc.cvtColor(tempMat, tempMat, Imgproc.COLOR_BGR2HSV); if (tempMat.get(0, 1)[2] < .9 * tempMat.get(0, 0)[2]) { return Optional.of(cameraManager.getCurrentFrameTimestamp() - frameTimestampBeforeFrameChange); } return Optional.empty(); }
From source file:com.shootoff.camera.Camera.java
License:Open Source License
public static Mat bufferedImageToMat(BufferedImage frame) { BufferedImage transformedFrame = ConverterFactory.convertToType(frame, BufferedImage.TYPE_3BYTE_BGR); byte[] pixels = ((DataBufferByte) transformedFrame.getRaster().getDataBuffer()).getData(); Mat mat = new Mat(frame.getHeight(), frame.getWidth(), CvType.CV_8UC3); mat.put(0, 0, pixels);//from ww w . j a v a 2s. co m return mat; }
From source file:com.sikulix.core.Visual.java
License:Open Source License
protected static Mat makeMat(BufferedImage bImg) { Mat aMat = null;/*from w w w. j a v a2 s . c om*/ if (bImg.getType() == BufferedImage.TYPE_INT_RGB) { vLog.trace("makeMat: INT_RGB (%dx%d)", bImg.getWidth(), bImg.getHeight()); int[] data = ((DataBufferInt) bImg.getRaster().getDataBuffer()).getData(); ByteBuffer byteBuffer = ByteBuffer.allocate(data.length * 4); IntBuffer intBuffer = byteBuffer.asIntBuffer(); intBuffer.put(data); aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC4); aMat.put(0, 0, byteBuffer.array()); Mat oMatBGR = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3); Mat oMatA = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC1); List<Mat> mixIn = new ArrayList<Mat>(Arrays.asList(new Mat[] { aMat })); List<Mat> mixOut = new ArrayList<Mat>(Arrays.asList(new Mat[] { oMatA, oMatBGR })); //A 0 - R 1 - G 2 - B 3 -> A 0 - B 1 - G 2 - R 3 Core.mixChannels(mixIn, mixOut, new MatOfInt(0, 0, 1, 3, 2, 2, 3, 1)); return oMatBGR; } else if (bImg.getType() == BufferedImage.TYPE_3BYTE_BGR) { vLog.error("makeMat: 3BYTE_BGR (%dx%d)", bImg.getWidth(), bImg.getHeight()); } else { vLog.error("makeMat: Type not supported: %d (%dx%d)", bImg.getType(), bImg.getWidth(), bImg.getHeight()); } return aMat; }
From source file:com.team.formal.eyeshopping.ActivityFindingResults.java
License:Open Source License
private void callCloudVision(final Bitmap bitmap) throws IOException { // Do the real work in an async task, because we need to use the network anyway new AsyncTask<Object, Void, ArrayList<String>>() { final ProgressDialog asyncDialog = new ProgressDialog(ActivityFindingResults.this); @Override//from w w w. jav a 2s . co m protected void onPreExecute() { super.onPreExecute(); asyncDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); asyncDialog.setMessage("Loading Products ..."); asyncDialog.show(); } @Override protected ArrayList<String> doInBackground(Object... params) { try { HttpTransport httpTransport = AndroidHttp.newCompatibleTransport(); JsonFactory jsonFactory = GsonFactory.getDefaultInstance(); VisionRequestInitializer requestInitializer = new VisionRequestInitializer( CLOUD_VISION_API_KEY) { /** * We override this so we can inject important identifying fields into the HTTP * headers. This enables use of a restricted cloud platform API key. */ @Override protected void initializeVisionRequest(VisionRequest<?> visionRequest) throws IOException { super.initializeVisionRequest(visionRequest); String packageName = getPackageName(); visionRequest.getRequestHeaders().set(ANDROID_PACKAGE_HEADER, packageName); String sig = PackageManagerUtils.getSignature(getPackageManager(), packageName); visionRequest.getRequestHeaders().set(ANDROID_CERT_HEADER, sig); } }; Vision.Builder builder = new Vision.Builder(httpTransport, jsonFactory, null); builder.setVisionRequestInitializer(requestInitializer); Vision vision = builder.build(); BatchAnnotateImagesRequest batchAnnotateImagesRequest = new BatchAnnotateImagesRequest(); batchAnnotateImagesRequest.setRequests(new ArrayList<AnnotateImageRequest>() { { AnnotateImageRequest annotateImageRequest = new AnnotateImageRequest(); // Add the image Image base64EncodedImage = new Image(); // Convert the bitmap to a JPEG // Just in case it's a format that Android understands but Cloud Vision ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.JPEG, 90, byteArrayOutputStream); byte[] imageBytes = byteArrayOutputStream.toByteArray(); // Base64 encode the JPEG base64EncodedImage.encodeContent(imageBytes); annotateImageRequest.setImage(base64EncodedImage); // add the features we want annotateImageRequest.setFeatures(new ArrayList<Feature>() { { Feature webDetection = new Feature(); webDetection.setType("WEB_DETECTION"); webDetection.setMaxResults(20); add(webDetection); } }); // Add the list of one thing to the request add(annotateImageRequest); } }); Vision.Images.Annotate annotateRequest = vision.images().annotate(batchAnnotateImagesRequest); // Due to a bug: requests to Vision API containing large images fail when GZipped. annotateRequest.setDisableGZipContent(true); Log.d(TAG, "created Cloud Vision request object, sending request"); BatchAnnotateImagesResponse response = annotateRequest.execute(); return convertResponseToString(response); } catch (GoogleJsonResponseException e) { Log.d(TAG, "failed to make API request because " + e.getContent()); } catch (IOException e) { Log.d(TAG, "failed to make API request because of other IOException " + e.getMessage()); } return null; } @Override protected void onPostExecute(ArrayList<String> urls) { super.onPostExecute(urls); // urls . for (int i = 0; i < urls.size(); i++) { Log.d("pages", urls.get(i)); } ArrayList<String[]> parsedUrl = urlParsing(urls); Map<String, Integer> map = new HashMap<String, Integer>(); // 1. url / _- for (int i = 0; i < parsedUrl.size(); i++) { for (int j = 0; j < parsedUrl.get(i).length; j++) { System.out.println(parsedUrl.get(i)[j]); Integer count = map.get(parsedUrl.get(i)[j]); map.put(parsedUrl.get(i)[j], (count == null) ? 1 : count + 1); } System.out.println(""); } // , keyword // . // naver Shop api ArrayList<String> rankCount = new ArrayList<>(); ArrayList<ArrayList<String>> resultArr = new ArrayList<ArrayList<String>>(); for (Map.Entry<String, Integer> entry : map.entrySet()) { if (entry.getValue() >= 4) { System.out.println("keyword : " + entry.getKey() + " Count : " + entry.getValue()); rankCount.add(entry.getKey()); } } final ArrayList<String> coreKeywords = new ArrayList<>(); if (!rankCount.isEmpty()) { for (int k = 0; k < 7; k++) { int randomCount = randomRange(1, rankCount.size()); boolean checkDuplicate[] = new boolean[rankCount.size()]; String combiw = ""; for (int i = 0; i < randomCount; i++) { int rand1; while (checkDuplicate[(rand1 = randomRange(0, rankCount.size() - 1))]) { } combiw += rankCount.get(rand1) + "%20"; Log.d("combi", combiw); checkDuplicate[rand1] = true; } coreKeywords.add(combiw); } for (int i = 0; i < coreKeywords.size(); i++) { Log.d("coreKey", coreKeywords.get(i)); } // for (String arr[] : parsedUrl) { // int count = 0; // boolean check[] = new boolean[arr.length]; // ArrayList<String> strArr = new ArrayList<>(); // // for (int i = 0; i < arr.length; i++) { // for (String rank : rankCount) { // if (arr[i].equals(rank)) { // check[i] = true; // strArr.add(arr[i]); // count++; // } // } // Log.d("strArr", strArr.toString()); // } // // int rand; // int randSize = randomRange(1, arr.length - count); // for (int i = 0; i < randSize; i++) { // while (check[(rand = randomRange(0, arr.length - 1))]) { // } // strArr.add(arr[rand]); // check[rand] = true; // } // resultArr.add(strArr); // Log.d("raa", resultArr.toString()); // } } // end of isEmpty() new AsyncTask<Object, Void, List<Shop>>() { @Override protected List<Shop> doInBackground(Object... params) { List<Shop> results = new ArrayList<>(); if (results.size() > 5) results = results.subList(0, 5); for (int i = 0; i < coreKeywords.size(); i++) { System.out.println(coreKeywords.get(i).toString().replaceAll(", ", "%20")); Log.d("uri", coreKeywords.get(i).toString().replaceAll(", ", "%20")); final String xmlRaw = coreKeywords.get(i).toString().replaceAll(", ", "%20"); // 1 URL url = null; try { url = new URL("https://openapi.naver.com/v1/search/shop.xml?query=" + xmlRaw + "&display=50"); } catch (MalformedURLException e) { e.printStackTrace(); } HttpURLConnection urlConnection = null; try { urlConnection = (HttpURLConnection) url.openConnection(); urlConnection.setRequestProperty("X-Naver-Client-ID", clientID); urlConnection.setRequestProperty("X-Naver-Client-Secret", clientSecret); urlConnection.setRequestProperty("User-Agent", "Mozilla/5.0 ( compatible ) "); urlConnection.setRequestProperty("Accept", "*/*"); } catch (IOException e) { e.printStackTrace(); } InputStream in = null; try { in = new BufferedInputStream(urlConnection.getInputStream()); } catch (IOException e) { e.printStackTrace(); } String data = ""; String msg = null; BufferedReader br = null; try { if (in != null) { br = new BufferedReader(new InputStreamReader(in, "UTF-8")); } } catch (UnsupportedEncodingException e) { e.printStackTrace(); } try { if (br != null) { while ((msg = br.readLine()) != null) { data += msg; } } } catch (IOException e) { e.printStackTrace(); } Log.i("msg of br: ", data); // 2 String shopResult = data; try { List<Shop> parsingResult = parsingShopResultXml(shopResult); if (parsingResult.size() > 5) parsingResult = parsingResult.subList(0, 5); for (final Shop shop : parsingResult) { Bitmap thumbImg = getBitmapFromURL(shop.getImage()); if (thumbImg != null) { ArrayList<String> keywords = new ArrayList<>(Arrays.asList(coreKeywords .get(i).replace("[", "").replace("]", "").split("%20"))); String combinationKeyword = coreKeywords.get(i).replace("[", "") .replace("]", "").replaceAll("%20", " "); shop.setThumbBmp(thumbImg); // ? Url shop.setCombinationKeyword(combinationKeyword); shop.setKeywords(keywords); results.add(shop); } } if (results.size() > 10) // must be results = results.subList(0, 10); for (Shop dummyShop : results) { mNaverPrImg = dummyShop.getThumbBmp(); Mat userSelImgTarget = new Mat(userSelImg.width(), userSelImg.height(), CvType.CV_8UC4); Mat naverPrImgTarget = new Mat(mNaverPrImg.getWidth(), mNaverPrImg.getHeight(), CvType.CV_8UC4); Utils.bitmapToMat(mNaverPrImg, naverPrImgTarget); Imgproc.cvtColor(userSelImg, userSelImgTarget, Imgproc.COLOR_BGR2RGB); Imgproc.cvtColor(naverPrImgTarget, naverPrImgTarget, Imgproc.COLOR_RGBA2RGB); int ret = AkazeFeatureMatching(userSelImg.getNativeObjAddr(), naverPrImgTarget.getNativeObjAddr()); if (ret == 1) { // find one! DecimalFormat df = new DecimalFormat("#,###"); String num = df.format(dummyShop.getLprice()); int exist_flag = 0; for (int ii = 0; ii < findingItems.size(); ii++) { if (findingItems.get(ii).getProductName() .equals(dummyShop.getTitle())) { exist_flag = 1; break; } } if (exist_flag == 0) { findingItems.add(new Results_GridItem(dummyShop.getTitle(), mNaverPrImg, " " + num + "?", dummyShop.getLprice(), dummyShop.getLink(), dummyShop.getKeywords(), dummyShop.getCombinationKeyword(), dummyShop.getImage())); } } } } catch (Exception e) { e.printStackTrace(); } } // end of for return results; } // end of doinbackground @Override protected void onPostExecute(List<Shop> shops) { super.onPostExecute(shops); TextView t = (TextView) findViewById(R.id.loadingText); t.setVisibility(View.GONE); GridView g = (GridView) findViewById(R.id.list_view); g.setVisibility(View.VISIBLE); if (findingItems.size() == 0) { TextView tLoad = (TextView) findViewById(R.id.loadingText); tLoad.setText(" ."); tLoad.setVisibility(View.VISIBLE); gridView.setVisibility(View.GONE); } else { Log.d(TAG, "finding Size!!!!" + Integer.toString(findingItems.size())); Collections.sort(findingItems, new Comparator<Results_GridItem>() { @Override public int compare(Results_GridItem o1, Results_GridItem o2) { if (o1.getPrice() > o2.getPrice()) { return 1; } else if (o1.getPrice() < o2.getPrice()) { return -1; } else { return 0; } } }); for (int i = 0; i < findingItems.size(); i++) { Log.d(TAG, "List !! " + Integer.toString(findingItems.get(i).getPrice())); } Log.d(TAG, "finding Size!!!!" + Integer.toString(findingItems.size())); gridViewAdapter = new GridViewAdapter(getApplicationContext(), findingItems); gridView.setAdapter(gridViewAdapter); } asyncDialog.dismiss(); } }.execute(); } // end of PostExcute }.execute(); }
From source file:com.trandi.opentld.tld.Tld.java
License:Apache License
/** * Structure the classifier into 3 stages: * a) patch variance//from w w w . j a v a2 s. c om * b) ensemble of ferns classifier * c) nearest neighbour */ private Pair<List<DetectionStruct>, List<DetectionStruct>> detect(final Mat frame) { Log.i(Util.TAG, "[DETECT]"); final List<DetectionStruct> fernClassDetected = new ArrayList<Tld.DetectionStruct>(); //dt final List<DetectionStruct> nnMatches = new ArrayList<Tld.DetectionStruct>(); //dbb // 0. Cleaning _boxClusterMap.clear(); // 1. DETECTION final Mat img = new Mat(frame.rows(), frame.cols(), CvType.CV_8U); updateIntegralImgs(frame); Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5); // Apply the Variance filter TODO : Bottleneck int a = 0; for (BoundingBox box : _grid) { // a) speed up by doing the features/ferns check ONLY if the variance is high enough ! if (Util.getVar(box, _iisumJava, _iisqsumJava, _iiCols) >= _var) { a++; final Mat patch = img.submat(box); final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, box.scaleIdx); final double averagePosterior = _classifierFern.averagePosterior(allFernsHashCodes); _fernDetectionNegDataForLearning.put(box, allFernsHashCodes);// store for later use in learning // b) if (averagePosterior > _classifierFern.getFernPosThreshold()) { fernClassDetected.add(new DetectionStruct(box, allFernsHashCodes, averagePosterior, patch)); } } } Log.i(Util.TAG, a + " Bounding boxes passed the variance filter (" + _var + ")"); Log.i(Util.TAG, fernClassDetected.size() + " Initial detected from Fern Classifier"); if (fernClassDetected.size() == 0) { Log.i(Util.TAG, "[DETECT END]"); return null; } // keep only the best Util.keepBestN(fernClassDetected, MAX_DETECTED, new Comparator<DetectionStruct>() { @Override public int compare(DetectionStruct detS1, DetectionStruct detS2) { return Double.compare(detS1.averagePosterior, detS2.averagePosterior); } }); // 2. MATCHING using the NN classifier c) for (DetectionStruct detStruct : fernClassDetected) { // update detStruct.patch to params.patch_size and normalise it Mat pattern = new Mat(); resizeZeroMeanStdev(detStruct.patch, pattern, _params.patch_size); detStruct.nnConf = _classifierNN.nnConf(pattern); Log.i(Util.TAG, "NNConf: " + detStruct.nnConf.relativeSimilarity + " / " + detStruct.nnConf.conservativeSimilarity + " Threshold: " + _classifierNN.getNNThreshold()); // only keep valid boxes if (detStruct.nnConf.relativeSimilarity > _classifierNN.getNNThreshold()) { nnMatches.add(detStruct); } } Log.i(Util.TAG, "[DETECT END]"); return new Pair<List<DetectionStruct>, List<DetectionStruct>>(fernClassDetected, nnMatches); }
From source file:com.trandi.opentld.tld.Tld.java
License:Apache License
/** * @param boxClusterMap INPUT / OUTPUT/*from w ww.j a v a2 s .c o m*/ * @return Total clusters count */ private int clusterBB() { final int size = _boxClusterMap.size(); // need the data in arrays final DetectionStruct[] dbb = _boxClusterMap.keySet().toArray(new DetectionStruct[size]); final int[] indexes = new int[size]; for (int i = 0; i < size; i++) { indexes[i] = _boxClusterMap.get(dbb[i]); } // 1. Build proximity matrix final float[] data = new float[size * size]; for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { final float d = 1 - dbb[i].detectedBB.calcOverlap(dbb[j].detectedBB); data[i * size + j] = d; data[j * size + i] = d; } } Mat D = new Mat(size, size, CvType.CV_32F); D.put(0, 0, data); // 2. Initialise disjoint clustering final int[] belongs = new int[size]; int m = size; for (int i = 0; i < size; i++) { belongs[i] = i; } for (int it = 0; it < size - 1; it++) { //3. Find nearest neighbour float min_d = 1; int node_a = -1, node_b = -1; for (int i = 0; i < D.rows(); i++) { for (int j = i + 1; j < D.cols(); j++) { if (data[i * size + j] < min_d && belongs[i] != belongs[j]) { min_d = data[i * size + j]; node_a = i; node_b = j; } } } // are we done ? if (min_d > 0.5) { int max_idx = 0; for (int j = 0; j < size; j++) { boolean visited = false; for (int i = 0; i < 2 * size - 1; i++) { if (belongs[j] == i) { // populate the correct / aggregated cluster indexes[j] = max_idx; visited = true; } } if (visited) { max_idx++; } } // update the main map before going back for (int i = 0; i < size; i++) { _boxClusterMap.put(dbb[i], indexes[i]); } return max_idx; } //4. Merge clusters and assign level if (node_a >= 0 && node_b >= 0) { // this should always BE true, otherwise we would have returned for (int k = 0; k < size; k++) { if (belongs[k] == belongs[node_a] || belongs[k] == belongs[node_b]) belongs[k] = m; } m++; } } // there seem to be only 1 cluster for (int i = 0; i < size; i++) { _boxClusterMap.put(dbb[i], 0); } return 1; }
From source file:com.untref.bordes.HoughCirculos.java
public static BufferedImage implementarCiculos(BufferedImage screen, int acumulador, int radioMin, int radioMax) { Mat source = new Mat(screen.getHeight(), screen.getWidth(), CvType.CV_8UC3); byte[] data = ((DataBufferByte) screen.getRaster().getDataBuffer()).getData(); source.put(0, 0, data);// w w w. j a v a 2s. c o m //ImageIO.write(screen, "jpg", "imagen"); //Mat source = Highgui.imread("test.jpg", Highgui.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); Imgproc.cvtColor(source, destination, Imgproc.COLOR_RGB2GRAY); Imgproc.GaussianBlur(destination, destination, new Size(3, 3), 0, 0); Mat circles = new Mat(); Imgproc.HoughCircles(destination, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 30, 10, acumulador, radioMin, radioMax); int radius; org.opencv.core.Point pt; for (int x = 0; x < circles.cols(); x++) { double vCircle[] = circles.get(0, x); if (vCircle == null) { break; } pt = new org.opencv.core.Point(Math.round(vCircle[0]), Math.round(vCircle[1])); radius = (int) Math.round(vCircle[2]); // draw the found circle Core.circle(source, pt, radius, new Scalar(150, 0, 0), 2); Core.circle(source, pt, 1, new Scalar(0, 0, 0), 2); } BufferedImage res = matToBufferedImage(source); return res; }
From source file:com.wallerlab.compcellscope.calcDPCTask.java
License:BSD License
protected Long doInBackground(Mat... matrix_list) { //int count = urls.length; Mat in1 = matrix_list[0];/* ww w.ja va 2s.co m*/ Mat in2 = matrix_list[1]; Mat outputMat = matrix_list[2]; Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type()); Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type()); in1.copyTo(Mat1); in2.copyTo(Mat2); Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1); Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1); Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4); Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); /* Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type())); Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type())); */ // Convert to Floats Mat1.convertTo(Mat1, CvType.CV_32FC1); Mat2.convertTo(Mat2, CvType.CV_32FC1); Core.add(Mat1, Mat2, dpcSum); Core.subtract(Mat1, Mat2, dpcDifference); Core.divide(dpcDifference, dpcSum, dpcImgF); Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0 Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255 dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4); dpcSum.release(); dpcDifference.release(); dpcImgF.release(); Mat1.release(); Mat2.release(); Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4); int radius = maskedImg.width() / 2 + 25; Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius, new Scalar(255, 255, 255), -1, 8, 0); output.copyTo(outputMat, maskedImg); output.release(); maskedImg.release(); return null; }
From source file:com.wallerlab.compcellscope.MultiModeView.java
License:BSD License
protected boolean initializeCamera(int width, int height) { Log.d(TAG, "Initialize java camera"); boolean result = true; synchronized (this) { mCamera = null;/*from w ww. j av a 2s.com*/ boolean connected = false; int numberOfCameras = android.hardware.Camera.getNumberOfCameras(); android.hardware.Camera.CameraInfo cameraInfo = new android.hardware.Camera.CameraInfo(); for (int i = 0; i < numberOfCameras; i++) { android.hardware.Camera.getCameraInfo(i, cameraInfo); if (cameraInfo.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { try { mCamera = Camera.open(i); mCameraId = i; connected = true; } catch (RuntimeException e) { Log.e(TAG, "Camera #" + i + "failed to open: " + e.getMessage()); } if (connected) break; } } if (mCamera == null) return false; /* Now set camera parameters */ try { Camera.Parameters params = mCamera.getParameters(); Log.d(TAG, "getSupportedPreviewSizes()"); List<Camera.Size> sizes = params.getSupportedPreviewSizes(); if (sizes != null) { /* Select the size that fits surface considering maximum size allowed */ Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), height, width); //use turn around values here to get the correct prev size for portrait mode params.setPreviewFormat(ImageFormat.NV21); Log.d(TAG, "Set preview size to " + Integer.valueOf((int) frameSize.width) + "x" + Integer.valueOf((int) frameSize.height)); params.setPreviewSize((int) frameSize.width, (int) frameSize.height); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) params.setRecordingHint(true); List<String> FocusModes = params.getSupportedFocusModes(); if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); } mCamera.setParameters(params); params = mCamera.getParameters(); mFrameWidth = params.getPreviewSize().height; //the frame width and height of the super class are used to generate the cached bitmap and they need to be the size of the resulting frame mFrameHeight = params.getPreviewSize().width; int realWidth = mFrameHeight; //the real width and height are the width and height of the frame received in onPreviewFrame int realHeight = mFrameWidth; if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT)) mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth); else mScale = 0; if (mFpsMeter != null) { mFpsMeter.setResolution(mFrameWidth, mFrameHeight); } int size = mFrameWidth * mFrameHeight; size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; mBuffer = new byte[size]; mCamera.addCallbackBuffer(mBuffer); mCamera.setPreviewCallbackWithBuffer(this); mFrameChain = new Mat[2]; mFrameChain[0] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1); //the frame chane is still in landscape mFrameChain[1] = new Mat(realHeight + (realHeight / 2), realWidth, CvType.CV_8UC1); AllocateCache(); mCameraFrame = new JavaCameraFrame[2]; mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight); //the camera frame is in portrait mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID); mCamera.setPreviewTexture(mSurfaceTexture); } else mCamera.setPreviewDisplay(null); /* Finally we are ready to start the preview */ Log.d(TAG, "startPreview"); mCamera.startPreview(); } else result = false; } catch (Exception e) { result = false; e.printStackTrace(); } } return result; }