List of usage examples for java.util EnumSet allOf
public static <E extends Enum<E>> EnumSet<E> allOf(Class<E> elementType)
From source file:org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.java
@Override public List<NodeReport> getNodeReports(NodeState... states) throws YarnException, IOException { EnumSet<NodeState> statesSet = (states.length == 0) ? EnumSet.allOf(NodeState.class) : EnumSet.noneOf(NodeState.class); for (NodeState state : states) { statesSet.add(state);/*www . j a v a2 s.co m*/ } GetClusterNodesRequest request = GetClusterNodesRequest.newInstance(statesSet); GetClusterNodesResponse response = rmClient.getClusterNodes(request); return response.getNodeReports(); }
From source file:org.photovault.imginfo.PhotoInfo.java
/** Creates new thumbnail and preview instances for this image on specific volume @param volume The volume in which the instance is to be created /* w w w .j av a 2s.c o m*/ @deprecated Use {@link PhotoInstanceCreator} instead */ protected void createThumbnail(VolumeBase volume, boolean createPreview) { log.debug("Creating thumbnail for " + getUuid()); // Maximum size of the thumbnail int maxThumbWidth = 100; int maxThumbHeight = 100; checkCropBounds(); /* Determine the minimum size for the instance used for thumbnail creation to get decent image quality. The cropped portion of the image must be roughly the same resolution as the intended thumbnail. */ double cropWidth = cropMaxX - cropMinX; cropWidth = (cropWidth > 0.000001) ? cropWidth : 1.0; double cropHeight = cropMaxY - cropMinY; cropHeight = (cropHeight > 0.000001) ? cropHeight : 1.0; int minInstanceWidth = (int) (((double) maxThumbWidth) / cropWidth); int minInstanceHeight = (int) (((double) maxThumbHeight) / cropHeight); int minInstanceSide = Math.max(minInstanceWidth, minInstanceHeight); // Find the original image to use as a staring point EnumSet<ImageOperations> allowedOps = EnumSet.allOf(ImageOperations.class); if (createPreview) { // We need to create also the preview image, so we need original. allowedOps = EnumSet.noneOf(ImageOperations.class); minInstanceWidth = 1024; minInstanceHeight = 1024; } ImageDescriptorBase srcImage = this.getPreferredImage(EnumSet.noneOf(ImageOperations.class), allowedOps, minInstanceWidth, minInstanceHeight, Integer.MAX_VALUE, Integer.MAX_VALUE); if (srcImage == null) { // If there are no uncorrupted instances, no thumbnail can be created log.warn("Error - no original image was found!!!"); return; } log.debug("Found original, reading it..."); /* We try to ensure that the thumbnail is actually from the original image by comparing aspect ratio of it to original. This is not a perfect check but it will usually catch the most typical errors (like having a the original rotated by RAW conversion SW but still the original EXIF thumbnail. */ double origAspect = this.getAspect(original.getWidth(), original.getHeight(), 1.0); double aspectAccuracy = 0.01; // First, check if there is a thumbnail in image header RenderedImage origImage = null; // Read the image RenderedImage thumbImage = null; RenderedImage previewImage = null; try { File imageFile = srcImage.getFile().findAvailableCopy(); PhotovaultImageFactory imgFactory = new PhotovaultImageFactory(); PhotovaultImage img = imgFactory.create(imageFile, false, false); ChannelMapOperation channelMap = getColorChannelMapping(); if (channelMap != null) { img.setColorAdjustment(channelMap); } if (img instanceof RawImage) { RawImage ri = (RawImage) img; ri.setRawSettings(getProcessing().getRawConvSettings()); } if (createPreview) { // Calculate preview image size int previewWidth = img.getWidth(); int previewHeight = img.getHeight(); while (previewWidth > 2048 || previewHeight > 2048) { previewWidth >>= 1; previewHeight >>= 1; } previewImage = img.getRenderedImage(previewWidth, previewHeight, false); } img.setCropBounds(this.getCropBounds()); double srcRotation = 0.0; if (srcImage instanceof CopyImageDescriptor) { srcRotation = ((CopyImageDescriptor) srcImage).getRotation(); } img.setRotation(getPrefRotation() - srcRotation); thumbImage = img.getRenderedImage(maxThumbWidth, maxThumbHeight, true); } catch (Exception e) { log.warn("Error reading image: " + e.getMessage()); // TODO: If we aborted here due to image writing problem we would have // problems later with non-existing transaction. We should really // rethink the error handling logic in the whole function. Anyway, we // haven't changed anything yet so we can safely commit the tx. return; } log.debug("Done, finding name"); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName(this, "jpg"); log.debug("name = " + thumbnailFile.getName()); try { saveInstance(thumbnailFile, thumbImage); if (thumbImage instanceof PlanarImage) { ((PlanarImage) thumbImage).dispose(); System.gc(); } } catch (PhotovaultException ex) { log.error("error writing thumbnail for " + srcImage.getFile().findAvailableCopy().getAbsolutePath() + ": " + ex.getMessage()); // TODO: If we abort here due to image writing problem we will have // problems later with non-existing transaction. We should really // rethink the error handling login in the whole function. Anyway, we // haven't changed anything yet so we can safely commit the tx. return; } try { ImageFile thumbFile; thumbFile = new ImageFile(thumbnailFile); CopyImageDescriptor thumbImageDesc = new CopyImageDescriptor(thumbFile, "image#0", original); thumbImageDesc.setRotation(getPrefRotation()); thumbImageDesc.setCropArea(getCropBounds()); thumbImageDesc.setColorChannelMapping(getColorChannelMapping()); thumbImageDesc.setRawSettings(getProcessing().getRawConvSettings()); thumbFile.addLocation(new FileLocation(volume, volume.mapFileToVolumeRelativeName(thumbnailFile))); } catch (Exception ex) { log.error("Error creating thumb instance: " + ex.getMessage()); } log.debug("Loading thumbnail..."); thumbnail = Thumbnail.createThumbnail(this, thumbnailFile); oldThumbnail = null; log.debug("Thumbnail loaded"); /* if ( createPreview ) { File previewFile = volume.getInstanceName( this, "jpg" ); try { saveInstance( previewFile, previewImage ); if ( previewImage instanceof PlanarImage ) { ((PlanarImage)previewImage).dispose(); System.gc(); } } catch (PhotovaultException ex) { log.error( "error writing preview for " + srcImage.getFile().findAvailableCopy() + ": " + ex.getMessage() ); return; } ImageInstance previewInstance = addInstance( volume, previewFile, ImageInstance.INSTANCE_TYPE_MODIFIED ); previewInstance.setColorChannelMapping( channelMap ); previewInstance.setRawSettings( rawSettings ); } txw.commit(); */ }
From source file:gr.abiss.calipso.domain.Metadata.java
public Set<Field.Name> getUnusedFieldNames() { EnumSet<Field.Name> allFieldNames = EnumSet.allOf(Field.Name.class); for (Field f : getFields().values()) { allFieldNames.remove(f.getName()); }/*from w ww. jav a 2 s .c om*/ return allFieldNames; }
From source file:org.photovault.imginfo.Test_PhotoInfo.java
@Test public void testPreferredImageSelection() throws CommandException { File f = new File(testImgDir, "test1.jpg"); PhotoInfo photo = createPhoto(f);/*from ww w . j av a2 s. co m*/ VolumeDAO volDAO = daoFactory.getVolumeDAO(); File instanceFile = volDAO.getDefaultVolume().getFilingFname(f); try { FileUtils.copyFile(f, instanceFile); } catch (IOException e) { fail(e.getMessage()); } ModifyImageFileCommand fileCmd = new ModifyImageFileCommand(photo.getOriginal().getFile()); Volume vol = volDAO.getDefaultVolume(); fileCmd.addLocation(vol.getFileLocation(instanceFile)); cmdHandler.executeCommand(fileCmd); // Create a copy CreateCopyImageCommand copyCmd = new CreateCopyImageCommand(photo, vol, 200, 200); CreateCopyImageCommand copy2Cmd = new CreateCopyImageCommand(photo, vol, 100, 100); CreateCopyImageCommand copy3Cmd = new CreateCopyImageCommand(photo, vol, 300, 300); copy3Cmd.setOperationsToApply(EnumSet.of(ImageOperations.COLOR_MAP)); cmdHandler.executeCommand(copyCmd); cmdHandler.executeCommand(copy2Cmd); cmdHandler.executeCommand(copy3Cmd); ImageDescriptorBase img = photo.getPreferredImage(EnumSet.allOf(ImageOperations.class), EnumSet.allOf(ImageOperations.class), 0, 0, 100, 100); assertEquals(100, img.getWidth()); img = photo.getPreferredImage(EnumSet.allOf(ImageOperations.class), EnumSet.allOf(ImageOperations.class), 150, 150, 300, 300); assertEquals(200, img.getWidth()); photo.setPrefRotation(90); img = photo.getPreferredImage(EnumSet.allOf(ImageOperations.class), EnumSet.allOf(ImageOperations.class), 150, 150, 300, 300); assertNull(img); img = photo.getPreferredImage(EnumSet.noneOf(ImageOperations.class), EnumSet.allOf(ImageOperations.class), 201, 201, 300, 300); assertEquals(300, img.getWidth()); }
From source file:org.apache.hadoop.tools.mapred.TestCopyMapper.java
@Test(timeout = 40000) public void testFailCopyWithAccessControlException() { try {/*from w w w .j a v a 2 s.c o m*/ deleteState(); createSourceData(); UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest"); final CopyMapper copyMapper = new CopyMapper(); final StubContext stubContext = tmpUser.doAs(new PrivilegedAction<StubContext>() { @Override public StubContext run() { try { return new StubContext(getConfiguration(), null, 0); } catch (Exception e) { LOG.error("Exception encountered ", e); throw new RuntimeException(e); } } }); EnumSet<DistCpOptions.FileAttribute> preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class); preserveStatus.remove(DistCpOptions.FileAttribute.ACL); preserveStatus.remove(DistCpOptions.FileAttribute.XATTR); final Mapper<Text, CopyListingFileStatus, Text, Text>.Context context = stubContext.getContext(); context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus)); touchFile(SOURCE_PATH + "/src/file"); OutputStream out = cluster.getFileSystem().create(new Path(TARGET_PATH + "/src/file")); out.write("hello world".getBytes()); out.close(); cluster.getFileSystem().setPermission(new Path(SOURCE_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); cluster.getFileSystem().setPermission(new Path(TARGET_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); final FileSystem tmpFS = tmpUser.doAs(new PrivilegedAction<FileSystem>() { @Override public FileSystem run() { try { return FileSystem.get(configuration); } catch (IOException e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); throw new RuntimeException("Test ought to fail here"); } } }); tmpUser.doAs(new PrivilegedAction<Integer>() { @Override public Integer run() { try { copyMapper.setup(context); copyMapper.map(new Text("/src/file"), new CopyListingFileStatus(tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file"))), context); Assert.fail("Didn't expect the file to be copied"); } catch (AccessControlException ignore) { } catch (Exception e) { // We want to make sure the underlying cause of the exception is // due to permissions error. The exception we're interested in is // wrapped twice - once in RetriableCommand and again in CopyMapper // itself. if (e.getCause() == null || e.getCause().getCause() == null || !(e.getCause().getCause() instanceof AccessControlException)) { throw new RuntimeException(e); } } return null; } }); } catch (Exception e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); } }
From source file:com.microsoft.windowsazure.mobileservices.MobileServiceTableBase.java
/** * Gets the system properties header value from the MobileServiceSystemProperties. * @param properties The system properties to set in the system properties header. * @return The system properties header value. Returns null if properties is null or empty. *//*from w ww . j ava2 s. com*/ private static String getSystemPropertiesString(EnumSet<MobileServiceSystemProperty> properties) { if (properties == null || properties.isEmpty()) { return null; } if (properties.containsAll(EnumSet.allOf(MobileServiceSystemProperty.class))) { return "*"; } StringBuilder sb = new StringBuilder(); int i = 0; for (MobileServiceSystemProperty systemProperty : properties) { sb.append(getSystemPropertyString(systemProperty)); i++; if (i < properties.size()) { sb.append(","); } } return sb.toString(); }
From source file:com.isentropy.accumulo.collections.AccumuloSortedMap.java
/** * getScanner() always sees 1 value per key. * getMultiScanner() sees multiple when setMultiMap(n) has been called with n > 1 *//* ww w . j a va 2s .c o m*/ protected Scanner getScanner() throws TableNotFoundException { Scanner s = getConnector().createScanner(getTable(), getAuthorizations()); EnumSet<IteratorScope> all = EnumSet.allOf(IteratorScope.class); int prior = ITERATOR_PRIORITY_VERSIONING; IteratorSetting is = new IteratorSetting(prior, ITERATOR_NAME_VERSIONING + prior, VersioningIterator.class); VersioningIterator.setMaxVersions(is, 1); s.addScanIterator(is); return s; }
From source file:com.inmobi.conduit.distcp.tools.mapred.TestCopyMapper.java
@Test public void testFailCopyWithAccessControlException() { try {/* w ww . j a va 2 s . c o m*/ deleteState(); createSourceData(); final InMemoryWriter writer = new InMemoryWriter(); UserGroupInformation tmpUser = UserGroupInformation.createRemoteUser("guest"); final CopyMapper copyMapper = new CopyMapper(); final Mapper<Text, FileStatus, NullWritable, Text>.Context context = tmpUser .doAs(new PrivilegedAction<Mapper<Text, FileStatus, NullWritable, Text>.Context>() { @Override public Mapper<Text, FileStatus, NullWritable, Text>.Context run() { try { StatusReporter reporter = new StubStatusReporter(); return getMapperContext(copyMapper, reporter, writer); } catch (Exception e) { LOG.error("Exception encountered ", e); throw new RuntimeException(e); } } }); EnumSet<DistCpOptions.FileAttribute> preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class); context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus)); touchFile(SOURCE_PATH + "/src/file"); OutputStream out = cluster.getFileSystem().create(new Path(TARGET_PATH + "/src/file")); out.write("hello world".getBytes()); out.close(); cluster.getFileSystem().setPermission(new Path(SOURCE_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); cluster.getFileSystem().setPermission(new Path(TARGET_PATH + "/src/file"), new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ)); final FileSystem tmpFS = tmpUser.doAs(new PrivilegedAction<FileSystem>() { @Override public FileSystem run() { try { return FileSystem.get(configuration); } catch (IOException e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); throw new RuntimeException("Test ought to fail here"); } } }); tmpUser.doAs(new PrivilegedAction<Integer>() { @Override public Integer run() { try { copyMapper.setup(context); copyMapper.map(new Text("/src/file"), tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file")), context); Assert.fail("Didn't expect the file to be copied"); } catch (AccessControlException ignore) { } catch (Exception e) { if (e.getCause() == null || !(e.getCause() instanceof AccessControlException)) { throw new RuntimeException(e); } } return null; } }); } catch (Exception e) { LOG.error("Exception encountered ", e); Assert.fail("Test failed: " + e.getMessage()); } }
From source file:org.rhq.enterprise.server.authz.RoleManagerBean.java
private void processDependentPermissions(Role role) { /*/* w w w .ja va 2 s . co m*/ * if you can control user/roles, then you can give yourself permissions, too; so we might as well * automagically give all permissions to users that are explicitly given the MANAGE_SECURITY permission */ if (role.getPermissions().contains(Permission.MANAGE_SECURITY)) { role.getPermissions().addAll(EnumSet.allOf(Permission.class)); } /* * similarly, MANAGE_INVENTORY implies all Resource perms */ if (role.getPermissions().contains(Permission.MANAGE_INVENTORY)) { role.getPermissions().addAll(Permission.RESOURCE_ALL); } /* * write-access implies read-access */ if (role.getPermissions().contains(Permission.CONFIGURE_WRITE)) { role.getPermissions().add(Permission.CONFIGURE_READ); } /* * and lack of read-access implies lack of write-access */ if (!role.getPermissions().contains(Permission.CONFIGURE_READ)) { role.getPermissions().remove(Permission.CONFIGURE_WRITE); } }
From source file:org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore.java
/** * Retrieves a list of entities satisfying given parameters. * * @param base A byte array prefix for the lookup * @param entityType The type of the entity * @param limit A limit on the number of entities to return * @param starttime The earliest entity start time to retrieve (exclusive) * @param endtime The latest entity start time to retrieve (inclusive) * @param fromId Retrieve entities starting with this entity * @param fromTs Ignore entities with insert timestamp later than this ts * @param secondaryFilters Filter pairs that the entities should match * @param fields The set of fields to retrieve * @return A list of entities/*from ww w . j a v a 2s .com*/ * @throws IOException */ private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException { // Even if other info and primary filter fields are not included, we // still need to load them to match secondary filters when they are // non-empty if (fields == null) { fields = EnumSet.allOf(Field.class); } boolean addPrimaryFilters = false; boolean addOtherInfo = false; if (secondaryFilters != null && secondaryFilters.size() > 0) { if (!fields.contains(Field.PRIMARY_FILTERS)) { fields.add(Field.PRIMARY_FILTERS); addPrimaryFilters = true; } if (!fields.contains(Field.OTHER_INFO)) { fields.add(Field.OTHER_INFO); addOtherInfo = true; } } LeveldbIterator iterator = null; try { KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); // only db keys matching the prefix (base + entity type) will be parsed byte[] prefix = kb.getBytesForLookup(); if (endtime == null) { // if end time is null, place no restriction on end time endtime = Long.MAX_VALUE; } // construct a first key that will be seeked to using end time or fromId byte[] first = null; if (fromId != null) { Long fromIdStartTime = getStartTimeLong(fromId, entityType); if (fromIdStartTime == null) { // no start time for provided id, so return empty entities return new TimelineEntities(); } if (fromIdStartTime <= endtime) { // if provided id's start time falls before the end of the window, // use it to construct the seek key first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup(); } } // if seek key wasn't constructed using fromId, construct it using end ts if (first == null) { first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup(); } byte[] last = null; if (starttime != null) { // if start time is not null, set a last key that will not be // iterated past last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)) .getBytesForLookup(); } if (limit == null) { // if limit is not specified, use the default limit = DEFAULT_LIMIT; } TimelineEntities entities = new TimelineEntities(); iterator = new LeveldbIterator(db); iterator.seek(first); // iterate until one of the following conditions is met: limit is // reached, there are no more keys, the key prefix no longer matches, // or a start time has been specified and reached/exceeded while (entities.getEntities().size() < limit && iterator.hasNext()) { byte[] key = iterator.peekNext().getKey(); if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) { break; } // read the start time and entity id from the current key KeyParser kp = new KeyParser(key, prefix.length); Long startTime = kp.getNextLong(); String entityId = kp.getNextString(); if (fromTs != null) { long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0); if (insertTime > fromTs) { byte[] firstKey = key; while (iterator.hasNext() && prefixMatches(firstKey, kp.getOffset(), key)) { iterator.next(); key = iterator.peekNext().getKey(); } continue; } } // parse the entity that owns this key, iterating over all keys for // the entity TimelineEntity entity = getEntity(entityId, entityType, startTime, fields, iterator, key, kp.getOffset()); // determine if the retrieved entity matches the provided secondary // filters, and if so add it to the list of entities to return boolean filterPassed = true; if (secondaryFilters != null) { for (NameValuePair filter : secondaryFilters) { Object v = entity.getOtherInfo().get(filter.getName()); if (v == null) { Set<Object> vs = entity.getPrimaryFilters().get(filter.getName()); if (vs == null || !vs.contains(filter.getValue())) { filterPassed = false; break; } } else if (!v.equals(filter.getValue())) { filterPassed = false; break; } } } if (filterPassed) { if (entity.getDomainId() == null) { entity.setDomainId(DEFAULT_DOMAIN_ID); } if (checkAcl == null || checkAcl.check(entity)) { // Remove primary filter and other info if they are added for // matching secondary filters if (addPrimaryFilters) { entity.setPrimaryFilters(null); } if (addOtherInfo) { entity.setOtherInfo(null); } entities.addEntity(entity); } } } return entities; } catch (DBException e) { throw new IOException(e); } finally { IOUtils.cleanup(LOG, iterator); } }