List of usage examples for java.util EnumSet allOf
public static <E extends Enum<E>> EnumSet<E> allOf(Class<E> elementType)
From source file:org.auraframework.integration.test.util.WebDriverTestCase.java
/** * Find all the browsers the current test case should be executed in. Test cases can be annotated with multiple * target browsers. If the testcase does not have an annotation, the class level annotation is used. * * @return/*from w w w. jav a 2s.c o m*/ * @throws NoSuchMethodException */ public Set<BrowserType> getTargetBrowsers() { TargetBrowsers targetBrowsers = null; try { Method method = getClass().getMethod(getName()); targetBrowsers = method.getAnnotation(TargetBrowsers.class); if (targetBrowsers == null) { // Inherit defaults from the test class targetBrowsers = getClass().getAnnotation(TargetBrowsers.class); } } catch (NoSuchMethodException e) { // Do nothing } if (targetBrowsers == null) { // If no target browsers are specified, default to ALL return EnumSet.allOf(BrowserType.class); } return Sets.newEnumSet(Arrays.asList(targetBrowsers.value()), BrowserType.class); }
From source file:org.squashtest.tm.service.internal.requirement.VerifiedRequirementsManagerServiceImpl.java
/** * Return a merged map. For each {@link ExecutionStatus}, the returned value is the value in map1 + value in map 2. * The state of the two arguments maps is preserved * @param mainStatusMap/*from w w w .jav a 2 s. c o m*/ * @param descendantStatusMap * @return */ private Map<ExecutionStatus, Long> mergeMapResult(Map<ExecutionStatus, Long> mainStatusMap, Map<ExecutionStatus, Long> descendantStatusMap) { Map<ExecutionStatus, Long> mergedStatusMap = new EnumMap<>(ExecutionStatus.class); EnumSet<ExecutionStatus> allStatus = EnumSet.allOf(ExecutionStatus.class); for (ExecutionStatus executionStatus : allStatus) { Long mainCount = mainStatusMap.get(executionStatus) == null ? 0 : mainStatusMap.get(executionStatus); Long descendantCount = descendantStatusMap.get(executionStatus) == null ? 0 : descendantStatusMap.get(executionStatus); Long totalCount = mainCount + descendantCount; mergedStatusMap.put(executionStatus, totalCount); } return mergedStatusMap; }
From source file:org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore.java
/** * Retrieves a list of entities satisfying given parameters. * * @param base/*from w w w . j av a2s .c o m*/ * A byte array prefix for the lookup * @param entityType * The type of the entity * @param limit * A limit on the number of entities to return * @param starttime * The earliest entity start time to retrieve (exclusive) * @param endtime * The latest entity start time to retrieve (inclusive) * @param fromId * Retrieve entities starting with this entity * @param fromTs * Ignore entities with insert timestamp later than this ts * @param secondaryFilters * Filter pairs that the entities should match * @param fields * The set of fields to retrieve * @param usingPrimaryFilter * true if this query is using a primary filter * @return A list of entities * @throws IOException */ private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl, boolean usingPrimaryFilter) throws IOException { KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); // only db keys matching the prefix (base + entity type) will be parsed byte[] prefix = kb.getBytesForLookup(); if (endtime == null) { // if end time is null, place no restriction on end time endtime = Long.MAX_VALUE; } // Sanitize the fields parameter if (fields == null) { fields = EnumSet.allOf(Field.class); } // construct a first key that will be seeked to using end time or fromId long firstStartTime = Long.MAX_VALUE; byte[] first = null; if (fromId != null) { Long fromIdStartTime = getStartTimeLong(fromId, entityType); if (fromIdStartTime == null) { // no start time for provided id, so return empty entities return new TimelineEntities(); } if (fromIdStartTime <= endtime) { // if provided id's start time falls before the end of the window, // use it to construct the seek key firstStartTime = fromIdStartTime; first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup(); } } // if seek key wasn't constructed using fromId, construct it using end ts if (first == null) { firstStartTime = endtime; first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup(); } byte[] last = null; if (starttime != null) { // if start time is not null, set a last key that will not be // iterated past last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)) .getBytesForLookup(); } if (limit == null) { // if limit is not specified, use the default limit = DEFAULT_LIMIT; } TimelineEntities entities = new TimelineEntities(); RollingLevelDB rollingdb = null; if (usingPrimaryFilter) { rollingdb = indexdb; } else { rollingdb = entitydb; } DB db = rollingdb.getDBForStartTime(firstStartTime); while (entities.getEntities().size() < limit && db != null) { try (DBIterator iterator = db.iterator()) { iterator.seek(first); // iterate until one of the following conditions is met: limit is // reached, there are no more keys, the key prefix no longer matches, // or a start time has been specified and reached/exceeded while (entities.getEntities().size() < limit && iterator.hasNext()) { byte[] key = iterator.peekNext().getKey(); if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) { break; } // read the start time and entity id from the current key KeyParser kp = new KeyParser(key, prefix.length); Long startTime = kp.getNextLong(); String entityId = kp.getNextString(); if (fromTs != null) { long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0); if (insertTime > fromTs) { byte[] firstKey = key; while (iterator.hasNext()) { key = iterator.peekNext().getKey(); iterator.next(); if (!prefixMatches(firstKey, kp.getOffset(), key)) { break; } } continue; } } // Even if other info and primary filter fields are not included, we // still need to load them to match secondary filters when they are // non-empty EnumSet<Field> queryFields = EnumSet.copyOf(fields); boolean addPrimaryFilters = false; boolean addOtherInfo = false; if (secondaryFilters != null && secondaryFilters.size() > 0) { if (!queryFields.contains(Field.PRIMARY_FILTERS)) { queryFields.add(Field.PRIMARY_FILTERS); addPrimaryFilters = true; } if (!queryFields.contains(Field.OTHER_INFO)) { queryFields.add(Field.OTHER_INFO); addOtherInfo = true; } } // parse the entity that owns this key, iterating over all keys for // the entity TimelineEntity entity = null; if (usingPrimaryFilter) { entity = getEntity(entityId, entityType, queryFields); iterator.next(); } else { entity = getEntity(entityId, entityType, startTime, queryFields, iterator, key, kp.getOffset()); } // determine if the retrieved entity matches the provided secondary // filters, and if so add it to the list of entities to return boolean filterPassed = true; if (secondaryFilters != null) { for (NameValuePair filter : secondaryFilters) { Object v = entity.getOtherInfo().get(filter.getName()); if (v == null) { Set<Object> vs = entity.getPrimaryFilters().get(filter.getName()); if (vs == null || !vs.contains(filter.getValue())) { filterPassed = false; break; } } else if (!v.equals(filter.getValue())) { filterPassed = false; break; } } } if (filterPassed) { if (entity.getDomainId() == null) { entity.setDomainId(DEFAULT_DOMAIN_ID); } if (checkAcl == null || checkAcl.check(entity)) { // Remove primary filter and other info if they are added for // matching secondary filters if (addPrimaryFilters) { entity.setPrimaryFilters(null); } if (addOtherInfo) { entity.setOtherInfo(null); } entities.addEntity(entity); } } } db = rollingdb.getPreviousDB(db); } } return entities; }
From source file:org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerOnTimelineStore.java
private ApplicationReportExt getApplication(ApplicationId appId, ApplicationReportField field) throws YarnException, IOException { TimelineEntity entity = timelineDataManager.getEntity(ApplicationMetricsConstants.ENTITY_TYPE, appId.toString(), EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser()); if (entity == null) { throw new ApplicationNotFoundException( "The entity for application " + appId + " doesn't exist in the timeline store"); } else {//from www. j a va 2 s.c om return generateApplicationReport(entity, field); } }
From source file:org.kuali.kfs.module.tem.document.TravelDocumentBase.java
/** * @see org.kuali.kfs.module.tem.document.TravelDocument#getDocumentGrandTotal() *//*from www . ja v a2 s .c o m*/ @Override @Transient public KualiDecimal getDocumentGrandTotal() { KualiDecimal total = KualiDecimal.ZERO; for (ExpenseType expense : EnumSet.allOf(ExpenseType.class)) { total = getTravelExpenseService().getExpenseServiceByType(expense).getAllExpenseTotal(this, true) .add(total); } return total; }
From source file:org.totschnig.myexpenses.util.Utils.java
/** * @param ctx/*from w w w . ja v a 2s .c o m*/ * for retrieving resources * @param other * if not null, all features except the one provided will be returned * @param type if not null, only features of this type will be listed * @return construct a list of all contrib features to be included into a * TextView */ public static CharSequence getContribFeatureLabelsAsFormattedList(Context ctx, ContribFeature other, LicenceHandler.LicenceStatus type) { CharSequence result = "", linefeed = Html.fromHtml("<br>"); Iterator<ContribFeature> iterator = EnumSet.allOf(ContribFeature.class).iterator(); while (iterator.hasNext()) { ContribFeature f = iterator.next(); if (!f.equals(other) && (!f.equals(ContribFeature.AD_FREE) || IS_FLAVOURED)) { if (type != null && ((f.isExtended() && !type.equals(LicenceHandler.LicenceStatus.EXTENDED)) || (!f.isExtended() && type.equals(LicenceHandler.LicenceStatus.EXTENDED)))) { continue; } String resName = "contrib_feature_" + f.toString() + "_label"; int resId = ctx.getResources().getIdentifier(resName, "string", ctx.getPackageName()); if (resId == 0) { AcraHelper.report(new Resources.NotFoundException(resName)); continue; } if (!result.equals("")) { result = TextUtils.concat(result, linefeed); } result = TextUtils.concat(result, "\u25b6 ", ctx.getText(resId)); } } return result; }
From source file:org.ambraproject.service.annotation.AnnotationServiceTest.java
@Test public void testCountComments() { UserProfile user = new UserProfile("email@testCountComments.org", "displayNameTestCountComments", "pass"); dummyDataStore.store(user);/*from w w w .ja v a 2 s . c o m*/ Article article = new Article("id:doi-test-count-comments"); dummyDataStore.store(article); Long commentId = Long .valueOf(dummyDataStore.store(new Annotation(user, AnnotationType.COMMENT, article.getID()))); Annotation reply = new Annotation(user, AnnotationType.REPLY, article.getID()); reply.setParentID(commentId); dummyDataStore.store(reply); assertEquals(annotationService.countAnnotations(article.getID(), EnumSet.of(AnnotationType.COMMENT)), 1, "annotation service returned incorrect count of comments and notes"); assertEquals(annotationService.countAnnotations(article.getID(), EnumSet.allOf(AnnotationType.class)), 2, "annotation service returned incorrect count of comments and notes"); }
From source file:org.libreplan.web.planner.order.OrderPlanningModel.java
private void refillLoadChartWhenNeeded(ChangeHooker changeHooker, final Planner planner, final Chart loadChart, final boolean updateEarnedValueChartLegend) { planner.getTimeTracker()// w w w. ja v a 2 s.co m .addZoomListener(fillOnZoomChange(loadChart, planner, updateEarnedValueChartLegend)); planner.addChartVisibilityListener(fillOnChartVisibilityChange(loadChart)); changeHooker.withReadOnlyTransactionWrapping().hookInto(EnumSet.allOf(ChangeTypes.class), () -> { if (isExecutingOutsideZKExecution()) { return; } if (planner.isVisibleChart()) { loadChart.fillChart(); if (updateEarnedValueChartLegend) { updateEarnedValueChartLegend(); } } }); }
From source file:org.apache.accumulo.test.proxy.SimpleProxyBase.java
@Test(expected = AccumuloSecurityException.class, timeout = 5000) public void attachNamespaceIteratorLoginFailure() throws Exception { IteratorSetting setting = new IteratorSetting(100, "DebugTheThings", DebugIterator.class.getName(), Collections.<String, String>emptyMap()); client.attachNamespaceIterator(badLogin, namespaceName, setting, EnumSet.allOf(IteratorScope.class)); }