List of usage examples for android.graphics RectF offset
public void offset(float dx, float dy)
From source file:com.ionesmile.variousdemo.view.HorizontalPicker.java
@Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); int saveCount = canvas.getSaveCount(); canvas.save();/* w w w.ja v a 2 s. c o m*/ int selectedItem = this.selectedItem; float itemWithPadding = itemWidth + dividerSize; // translate horizontal to center canvas.translate(itemWithPadding * sideItems, 0); if (values != null) { for (int i = 0; i < values.length; i++) { // set text color for item textPaint.setColor(getTextColor(i)); textPaint.setTextSize(getTextSize(i)); // get text layout BoringLayout layout = layouts[i]; int saveCountHeight = canvas.getSaveCount(); canvas.save(); float x = 0; float lineWidth = layout.getLineWidth(0); if (lineWidth > itemWidth) { if (isRtl(values[i])) { x += (lineWidth - itemWidth) / 2; } else { x -= (lineWidth - itemWidth) / 2; } } if (marquee != null && i == selectedItem) { x += marquee.getScroll(); } // translate vertically to center canvas.translate(-x, (canvas.getHeight() - layout.getHeight()) / 2); RectF clipBounds; if (x == 0) { clipBounds = itemClipBounds; } else { clipBounds = itemClipBoundsOffset; clipBounds.set(itemClipBounds); clipBounds.offset(x, 0); } canvas.clipRect(clipBounds); layout.draw(canvas); if (marquee != null && i == selectedItem && marquee.shouldDrawGhost()) { canvas.translate(marquee.getGhostOffset(), 0); layout.draw(canvas); } // restore vertical translation canvas.restoreToCount(saveCountHeight); // translate horizontal for 1 item canvas.translate(itemWithPadding, 0); } } // restore horizontal translation canvas.restoreToCount(saveCount); drawEdgeEffect(canvas, leftEdgeEffect, 270); drawEdgeEffect(canvas, rightEdgeEffect, 90); }
From source file:im.ene.lab.design.widget.coverflow.FeatureCoverFlow.java
private void getScrolledTransformedChildRectangle(View child, RectF r) { transformChildHitRectangle(child, r); final int offset = child.getLeft() - getScrollX(); r.offset(offset, child.getTop()); }
From source file:com.appunite.scroll.ScaleImageView.java
private void getImageRect(RectF rect) { rect.set(mSrcRect);//from w w w. j a va2 s .c om final float width = Math.max(mSrcRect.width() * mScale, mContentRect.width() - mMarginLeft - mMarginRight); final float height = Math.max(mSrcRect.height() * mScale, mContentRect.height() - mMarginTop - mMarginBottom); final float width2 = width / 2; final float height2 = height / 2; if (mUsedAlignType == ALIGN_BOTTOM) { rect.set(-width2, -height, width2, 0); } else if (mUsedAlignType == ALIGN_TOP) { rect.set(-width2, 0, width2, height); } else if (mUsedAlignType == ALIGN_CENTER_VERTICAL || mUsedAlignType == ALIGN_CENTER_HORIZONTAL) { rect.set(-width2, -height2, width2, height2); } else if (mUsedAlignType == ALIGN_LEFT) { rect.set(0, -height2, width, height2); } else if (mUsedAlignType == ALIGN_RIGHT) { rect.set(-width, -height2, 0, height2); } getRealTranslation(mTranslation, mRealTranslation); rect.offset(mRealTranslation.x, mRealTranslation.y); }
From source file:com.ape.camera2raw.Camera2RawFragment.java
/** * Configure the necessary {@link Matrix} transformation to `mTextureView`, * and start/restart the preview capture session if necessary. * <p/>/* w ww .j a va 2 s . c om*/ * This method should be called after the camera state has been initialized in * setUpCameraOutputs. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` */ private void configureTransform(int viewWidth, int viewHeight) { Activity activity = getActivity(); synchronized (mCameraStateLock) { if (null == mTextureView || null == activity) { return; } StreamConfigurationMap map = mCharacteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we always use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); // Find the rotation of the device relative to the native device orientation. int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); // Find the rotation of the device relative to the camera sensor's orientation. int totalRotation = sensorToDeviceRotation(mCharacteristics, deviceRotation); // Swap the view dimensions for calculation as needed if they are rotated relative to // the sensor. boolean swappedDimensions = totalRotation == 90 || totalRotation == 270; int rotatedViewWidth = viewWidth; int rotatedViewHeight = viewHeight; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedViewWidth = viewHeight; rotatedViewHeight = viewWidth; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } // Preview should not be larger than display size and 1080p. if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Find the best preview size for these view dimensions and configured JPEG size. Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth, rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg); if (swappedDimensions) { mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); } else { mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); } // Find rotation of device in degrees (reverse device orientation for front-facing // cameras). int rotation = (mCharacteristics .get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) ? (360 + ORIENTATIONS.get(deviceRotation)) % 360 : (360 - ORIENTATIONS.get(deviceRotation)) % 360; Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); // Initially, output stream images from the Camera2 API will be rotated to the native // device orientation from the sensor's orientation, and the TextureView will default to // scaling these buffers to fill it's view bounds. If the aspect ratios and relative // orientations are correct, this is fine. // // However, if the device orientation has been rotated relative to its native // orientation so that the TextureView's dimensions are swapped relative to the // native device orientation, we must do the following to ensure the output stream // images are not incorrectly scaled by the TextureView: // - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions // in the native device orientation) to the TextureView's dimension. // - Apply a scale-to-fill from the output buffer's rotated dimensions // (i.e. its dimensions in the current device orientation) to the TextureView's // dimensions. // - Apply the rotation from the native device orientation to the current device // rotation. if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = Math.max((float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); } matrix.postRotate(rotation, centerX, centerY); mTextureView.setTransform(matrix); // Start or restart the active capture session if the preview was initialized or // if its aspect ratio changed significantly. if (mPreviewSize == null || !checkAspectsEqual(previewSize, mPreviewSize)) { mPreviewSize = previewSize; if (mState != STATE_CLOSED) { createCameraPreviewSessionLocked(); } } } }