Example usage for java.util Collections max

List of usage examples for java.util Collections max

Introduction

In this page you can find the example usage for java.util Collections max.

Prototype

@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> T max(Collection<? extends T> coll, Comparator<? super T> comp) 

Source Link

Document

Returns the maximum element of the given collection, according to the order induced by the specified comparator.

Usage

From source file:com.example.camera2apidemo.Camera2BasicFragment.java

/**   , ?
 * ??:/*from  w ww  . ja  v a2s  .c o  m*/
 * 1. ???, ?
 * 2. ????, ???
 * 3. ??, ?
 * Sets up member variables related to camera.     *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 */
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try { // ????
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            checkCamera2Support(characteristics);
            // ???, ?(???)
            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(LENS_FACING);
            if (facing != null && facing == LENS_FACING_FRONT) {
                continue;
            }

            //                ??
            StreamConfigurationMap map = characteristics.get(SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }
            // For still image captures, we use the largest available size.
            // 
            // maxImagesImageReader??
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());
            mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                    /*maxImages*/2);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            // ???(??, ""???ROTATION_90
            // ROTATION_270,?ROTATION_0ROTATION_180)
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            // ??(""?0, , ?90)
            // ?, ??, , 90, switch??
            mSensorOrientation = characteristics.get(SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            // ROTATION_0ROTATION_180??????
            // ?, ?90270, ??(?true)
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }
            // ???, 
            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            // ?(), ??
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            // ?(, ?(texture????))
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            // ??, 
            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }
            // ??
            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            // ?
            // ?:map.getOutputSizes(SurfaceTexture.class)SurfaceTexture?List
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            // ????
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                // ??(landscape)
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:net.sourceforge.fenixedu.domain.student.Registration.java

public StudentCurricularPlan getLastStudentCurricularPlan() {
    final Set<StudentCurricularPlan> studentCurricularPlans = getStudentCurricularPlansSet();

    if (studentCurricularPlans.isEmpty()) {
        return null;
    }/*from w  w  w  . j a  v a 2s . com*/
    return Collections.max(studentCurricularPlans,
            StudentCurricularPlan.STUDENT_CURRICULAR_PLAN_COMPARATOR_BY_START_DATE);
}

From source file:com.example.android.camera2raw.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *///w  w w.  j  a  va  2s  .c o m
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:com.ape.camera2raw.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *///ww  w . j  ava 2  s .  c  om
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());
            Log.d("WAY", "largestRaw = " + largestRaw);
            //largestRaw = new Size(4208, 3120);//9051

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:com.quectel.camera2test.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *//*ww  w  .ja v  a  2  s .c  o m*/
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            Log.d(TAG, "---characteristics = " + characteristics);
            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                Log.d(TAG, "-1--characteristics continue");
                continue;
            }

            Log.d(TAG, "-1--characteristics = " + characteristics);
            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:net.ddns.mlsoftlaberge.trycorder.TryviscamFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//* ww  w  .j  a v a2 s.  com*/
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());
            mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                    /*maxImages*/2);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        //ErrorDialog.newInstance(getString(R.string.camera_error))
        //        .show(getChildFragmentManager(), FRAGMENT_DIALOG);
        e.printStackTrace();
    }
}

From source file:org.kuali.kra.protocol.noteattachment.NotesAttachmentsHelperBase.java

/** gets the next entry number based on previously generated numbers. */
private Integer getNextEntryNumber() {
    final Collection<ProtocolNotepadBase> notepads = this.getProtocol().getNotepads();
    final Integer maxEntry = notepads.isEmpty() ? Integer.valueOf(0)
            : Collections.max(notepads, ProtocolNotepadBase.NotepadByEntryNumber.INSTANCE).getEntryNumber();
    return Integer.valueOf(maxEntry.intValue() + 1);
}

From source file:org.apache.hadoop.hbase.client.HBaseFsck.java

/**
 * Scan .META. and -ROOT-, adding all regions found to the regionInfo map.
 * @throws IOException if an error is encountered
 *//*from ww  w  .  ja  v a  2  s .c o m*/
void getMetaEntries() throws IOException {
    MetaScannerVisitor visitor = new MetaScannerVisitor() {
        int countRecord = 1;

        // comparator to sort KeyValues with latest modtime
        final Comparator<KeyValue> comp = new Comparator<KeyValue>() {
            public int compare(KeyValue k1, KeyValue k2) {
                return (int) (k1.getTimestamp() - k2.getTimestamp());
            }
        };

        public boolean processRow(Result result) throws IOException {
            try {

                // record the latest modification of this META record
                long ts = Collections.max(result.list(), comp).getTimestamp();

                // record region details
                byte[] value = result.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
                HRegionInfo info = null;
                HServerAddress server = null;
                byte[] startCode = null;
                if (value != null) {
                    info = Writables.getHRegionInfo(value);
                }

                // record assigned region server
                value = result.getValue(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER);
                if (value != null && value.length > 0) {
                    String address = Bytes.toString(value);
                    server = new HServerAddress(address);
                }

                // record region's start key
                value = result.getValue(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER);
                if (value != null) {
                    startCode = value;
                }
                MetaEntry m = new MetaEntry(info, server, startCode, ts);
                HbckInfo hbInfo = new HbckInfo(m);
                HbckInfo previous = regionInfo.put(info.getEncodedName(), hbInfo);
                if (previous != null) {
                    throw new IOException("Two entries in META are same " + previous);
                }

                // show proof of progress to the user, once for every 100 records.
                if (countRecord % 100 == 0) {
                    errors.progress();
                }
                countRecord++;
                return true;
            } catch (RuntimeException e) {
                LOG.error("Result=" + result);
                throw e;
            }
        }
    };

    // Scan -ROOT- to pick up META regions
    MetaScanner.metaScan(conf, visitor, HConstants.ROOT_TABLE_NAME, HConstants.EMPTY_START_ROW, null,
            Integer.MAX_VALUE);

    // Scan .META. to pick up user regions
    MetaScanner.metaScan(conf, visitor);
    errors.print("");
}

From source file:org.kuali.kra.irb.noteattachment.NotesAttachmentsHelper.java

/** gets the next entry number based on previously generated numbers. */
private Integer getNextEntryNumber() {
    final Collection<ProtocolNotepad> notepads = this.getProtocol().getNotepads();
    final Integer maxEntry = notepads.isEmpty() ? Integer.valueOf(0)
            : Collections.max(notepads, ProtocolNotepad.NotepadByEntryNumber.INSTANCE).getEntryNumber();
    return Integer.valueOf(maxEntry.intValue() + 1);
}

From source file:com.isentropy.accumulo.collections.AccumuloSortedMap.java

/**
 * is there no way in accumulo to efficiently scan to last key??
 * //from   w  w  w  .  j  a  v  a 2 s  .  c om
 * this method uses count iterator (which returns last key from each tablet server)
 * 
 * @return
 */
@Override
public K lastKey() {
    K k = Collections.max(deriveMap(new CountsDerivedMapper()).keySet(), comparator());
    return k;
}