Example usage for java.util EnumSet allOf

List of usage examples for java.util EnumSet allOf

Introduction

In this page you can find the example usage for java.util EnumSet allOf.

Prototype

public static <E extends Enum<E>> EnumSet<E> allOf(Class<E> elementType) 

Source Link

Document

Creates an enum set containing all of the elements in the specified element type.

Usage

From source file:sf.net.experimaestro.server.JsonRPCMethods.java

private EnumSet<ResourceState> getStates(Object[] states) {
    final EnumSet<ResourceState> statesSet;

    if (states == null || states.length == 0)
        statesSet = EnumSet.allOf(ResourceState.class);
    else {// w  ww .j  a va 2  s  .  c  om
        ResourceState statesArray[] = new ResourceState[states.length];
        for (int i = 0; i < states.length; i++)
            statesArray[i] = ResourceState.valueOf(states[i].toString());
        statesSet = EnumSet.of(statesArray[0], statesArray);
    }
    return statesSet;
}

From source file:com.feedzai.commons.sql.abstraction.engine.configuration.PdbProperties.java

/**
 * Gets the isolation level.//from w w w. j a  v  a  2s.  c o  m
 *
 * @return The isolation level.
 */
public int getIsolationLevel() {
    final Optional<IsolationLevel> e = Enums.getIfPresent(IsolationLevel.class,
            getProperty(ISOLATION_LEVEL).toUpperCase());

    if (!e.isPresent()) {
        throw new DatabaseEngineRuntimeException(ISOLATION_LEVEL + " must be set and be one of the following: "
                + EnumSet.allOf(IsolationLevel.class));
    }

    switch (e.get()) {
    case READ_UNCOMMITTED:
        return Connection.TRANSACTION_READ_UNCOMMITTED;
    case READ_COMMITTED:
        return Connection.TRANSACTION_READ_COMMITTED;
    case REPEATABLE_READ:
        return Connection.TRANSACTION_REPEATABLE_READ;
    case SERIALIZABLE:
        return Connection.TRANSACTION_SERIALIZABLE;
    default:
        // Never happens.
        throw new DatabaseEngineRuntimeException("New isolation level?!" + e.get());
    }
}

From source file:com.smartsheet.api.internal.SheetResourcesImplTest.java

@Test
public void testCreateSheetInWorkspaceFromTemplate() throws SmartsheetException, IOException {
    server.setResponseBody(new File("src/test/resources/createSheetFromExisting.json"));

    Sheet sheet = new Sheet();
    sheet.setFromId(2906571706525572L);//www.  j a  v  a2 s  . c  o m
    Sheet newSheet = sheetResource.createSheetInWorkspaceFromTemplate(1234L, sheet,
            EnumSet.allOf(SheetTemplateInclusion.class));

    assertEquals(7960873114331012L, newSheet.getId().longValue());
    assertEquals(AccessLevel.OWNER, newSheet.getAccessLevel());
    assertEquals("https://app.smartsheet.com/b/home?lx=lbKEF1UakfTNJTZ5XkpxWg", newSheet.getPermalink());

    newSheet = sheetResource.createSheetInWorkspaceFromTemplate(1234L, sheet, null);
}

From source file:com.thinkbiganalytics.ingest.TableRegisterSupportTest.java

/**
 * Verify dropping multiple tables.//  w w  w  . j a va 2  s  . co m
 */
@Test
public void testDropTables() throws Exception {
    // Mock SQL objects
    final Statement statement = Mockito.mock(Statement.class);
    Mockito.when(statement.execute(Mockito.anyString())).then(invocation -> {
        final String sql = (String) invocation.getArguments()[0];
        if (sql.startsWith("DROP TABLE IF EXISTS `invalid`")) {
            throw new SQLException();
        }
        return true;
    });

    final Connection connection = Mockito.mock(Connection.class);
    Mockito.when(connection.createStatement()).thenReturn(statement);

    // Test dropping tables with success
    TableRegisterSupport support = new TableRegisterSupport(connection);
    Assert.assertTrue(support.dropTables("cat", "feed",
            EnumSet.of(TableType.MASTER, TableType.VALID, TableType.INVALID), ImmutableSet.of("backup.feed")));
    Mockito.verify(statement).execute("DROP TABLE IF EXISTS `cat`.`feed`");
    Mockito.verify(statement).execute("DROP TABLE IF EXISTS `cat`.`feed_valid`");
    Mockito.verify(statement).execute("DROP TABLE IF EXISTS `cat`.`feed_invalid`");
    Mockito.verify(statement).execute("DROP TABLE IF EXISTS backup.feed");

    // Test dropping tables with exception
    Assert.assertFalse(
            support.dropTables("invalid", "feed", EnumSet.allOf(TableType.class), ImmutableSet.of()));
    Assert.assertFalse(support.dropTables("cat", "feed", ImmutableSet.of(), ImmutableSet.of("`invalid`")));
}

From source file:io.insideout.stanbol.enhancer.nlp.freeling.TestFreelingAnalysis.java

private void validateAnalysedText(String text, AnalysedText at) {
    Assert.assertNotNull(text);// www. j av a  2 s  . c  om
    Assert.assertNotNull(at);
    //Assert the AnalysedText
    Assert.assertEquals(0, at.getStart());
    Assert.assertEquals(text.length(), at.getEnd());
    Iterator<Span> it = at.getEnclosed(EnumSet.allOf(SpanTypeEnum.class));
    while (it.hasNext()) {
        //validate that the span|start|end corresponds with the Text
        Span span = it.next();
        Assert.assertNotNull(span);
        Assert.assertEquals(text.substring(span.getStart(), span.getEnd()), span.getSpan());
        switch (span.getType()) {
        case Token:
            double prevProb = -1;
            List<Value<PosTag>> posTags = span.getAnnotations(POS_ANNOTATION);
            Assert.assertTrue("All Tokens need to have a PosTag (missing for " + span + ")",
                    posTags != null && !posTags.isEmpty());
            for (Value<PosTag> posTag : posTags) {
                //assert Mapped PosTags
                Assert.assertTrue("PosTag " + posTag + " used by " + span + " is not mapped",
                        posTag.value().isMapped());
                //assert declining probabilities
                Assert.assertTrue("Wrong order in " + posTags + " of " + span + "!",
                        prevProb < 0 || posTag.probability() <= prevProb);
                prevProb = posTag.probability();
            }
            Assert.assertNull("Tokens MUST NOT have Phrase annotations!",
                    span.getAnnotation(PHRASE_ANNOTATION));
            Assert.assertNull("Tokens MUST NOT have NER annotations!", span.getAnnotation(NER_ANNOTATION));
            break;
        case Chunk:
            Assert.assertNull("Chunks MUST NOT have POS annotations!", span.getAnnotation(POS_ANNOTATION));
            List<Token> tokens = AnalysedTextUtils.asList(((Chunk) span).getTokens());
            prevProb = -1;
            List<Value<PhraseTag>> phraseTags = span.getAnnotations(PHRASE_ANNOTATION);
            boolean hasPhraseTag = (phraseTags != null && !phraseTags.isEmpty());
            List<Value<NerTag>> nerTags = span.getAnnotations(NER_ANNOTATION);
            boolean hasNerTag = (nerTags != null && !nerTags.isEmpty());
            Assert.assertTrue(
                    "All Chunks with several words need to have a PhraseTag (missing for " + span + ")",
                    hasPhraseTag || tokens.size() < 2);
            Assert.assertTrue("All Chunks with a single word need to have a NerTag (missing for" + span + ")",
                    hasNerTag || tokens.size() > 1);
            for (Value<PhraseTag> phraseTag : phraseTags) {
                //assert Mapped PosTags
                Assert.assertNotNull("PhraseTag " + phraseTag + " is not mapped",
                        phraseTag.value().getCategory());
                //assert declining probabilities
                Assert.assertTrue(prevProb < 0 || phraseTag.probability() < prevProb);
                prevProb = phraseTag.probability();
            }
            for (Value<NerTag> nerTag : nerTags) {
                Assert.assertTrue("NER Tags need to have a probability", nerTag.probability() > 0);
            }
            break;
        default:
            Assert.assertNull(span.getType() + " type Spans MUST NOT have POS annotations!",
                    span.getAnnotation(POS_ANNOTATION));
            Assert.assertNull(span.getType() + " type Spans MUST NOT have Phrase annotations!",
                    span.getAnnotation(PHRASE_ANNOTATION));
            Assert.assertNull(span.getType() + " type Spans MUST NOT have NER annotations!",
                    span.getAnnotation(NER_ANNOTATION));
            break;
        }
    }
}

From source file:com.baasbox.configuration.PropertiesConfigurationHelper.java

public static Object findByKey(String completeKey) throws ConfigurationException {
    String[] splittedKeys = completeKey.split("\\.");
    String section = splittedKeys[0];
    Class en = PropertiesConfigurationHelper.CONFIGURATION_SECTIONS.get(section);
    EnumSet values = EnumSet.allOf(en);
    for (Object v : values) {
        try {//  w  ww. ja v a  2 s.c om
            String key = StringUtils.join(Arrays.copyOfRange(splittedKeys, 1, splittedKeys.length), ".");
            if (((String) en.getMethod("getKey").invoke(v)).equalsIgnoreCase(key))
                return v;
        } catch (Exception e) {
            throw new ConfigurationException(
                    "Is it " + en.getCanonicalName() + " an Enum that implements the IProperties interface?",
                    e);
        }
    }
    return null;
}

From source file:org.photovault.common.Test_NewSchemaMigration.java

/**
 Test that migration works OK.//from   w w w.j  a v  a 2s.co m
 */
@Test
public void testMigrationToVersioned() {
    SchemaUpdateAction sua = new SchemaUpdateAction(PhotovaultSettings.getSettings().getCurrentDatabase());
    sua.upgradeDatabase();

    // Verify that the photos are persisted correctly
    Session s = HibernateUtil.getSessionFactory().openSession();
    HibernateDAOFactory df = (HibernateDAOFactory) DAOFactory.instance(HibernateDAOFactory.class);
    df.setSession(s);
    PhotoInfoDAO photoDao = df.getPhotoInfoDAO();

    PhotoInfo p1 = photoDao.findByUUID(UUID.fromString("639f492f-99b2-4d93-b18e-597324edc482"));
    OriginalImageDescriptor o1 = p1.getOriginal();
    assertEquals(1536, o1.getWidth());
    assertEquals(2048, o1.getHeight());
    assertEquals("London", p1.getShootingPlace());
    assertEquals("Harri", p1.getPhotographer());
    Set<FileLocation> locations = p1.getOriginal().getFile().getLocations();
    assertEquals(1, locations.size());
    FileLocation l = locations.iterator().next();
    assertEquals("/test4.jpg", l.getFname());

    FuzzyDate fd = p1.getFuzzyShootTime();
    assertEquals(182.5, fd.getAccuracy(), 0.001);

    ObjectHistory<PhotoInfo> h1 = p1.getHistory();
    Set<Change<PhotoInfo>> ch1 = h1.getChanges();
    assertEquals(3, ch1.size());
    assertNull(p1.getRawSettings());

    Set<PhotoFolder> p1folders = p1.getFolders();
    assertEquals(1, p1folders.size());
    PhotoFolderDAO folderDao = df.getPhotoFolderDAO();
    PhotoFolder f1 = folderDao.findById(UUID.fromString("433404fe-ed6b-43a4-872d-286b23a6dfad"), false);
    assertTrue(p1folders.contains(f1));

    /*
     Photo # 23 & #24 were actually the same image but with different original 
     hash due to changed EXIF data. As copies created from #24 are identical 
     to those created from #23, they should be associated with #23.
     */

    PhotoInfo p23 = photoDao.findByUUID(UUID.fromString("7115db43-12e8-43f2-a6ad-d66f8c039636"));
    PhotoInfo p24 = photoDao.findByUUID(UUID.fromString("e1a08867-1b6f-4d53-a22e-2744ab770914"));

    OriginalImageDescriptor p23orig = p23.getOriginal();
    boolean foundP23Thumb = false;
    for (CopyImageDescriptor c : p23orig.getCopies()) {
        if (c.getFile().getId().equals(UUID.fromString("fec3b45f-4acc-4978-9b00-8b2acb5268a1"))) {
            foundP23Thumb = true;
        }
    }
    assertTrue(foundP23Thumb);
    OriginalImageDescriptor p24orig = p24.getOriginal();
    assertEquals(0, p24orig.getCopies().size());

    // Photo with raw image
    PhotoInfo p2 = photoDao.findByUUID(UUID.fromString("e3f4b466-d1a3-48c1-ac86-01d9babf373f"));
    RawConversionSettings r2 = p2.getRawSettings();
    assertEquals(31347, r2.getWhite());
    assertEquals(0.5, r2.getHighlightCompression());

    OriginalImageDescriptor o2 = p2.getOriginal();
    CopyImageDescriptor t2 = (CopyImageDescriptor) p2.getPreferredImage(
            EnumSet.of(ImageOperations.RAW_CONVERSION), EnumSet.allOf(ImageOperations.class), 66, 66, 200, 200);

    assertEquals(r2, t2.getRawSettings());
    boolean f = false;
    for (CopyImageDescriptor c : o2.getCopies()) {
        if (c != t2) {
            assertEquals(17847, c.getRawSettings().getWhite());
            f = true;
        }
    }
    assertTrue(f);
    s.close();
}

From source file:com.hortonworks.streamline.streams.service.TopologyCatalogResource.java

@POST
@Path("/topologies")
@Timed/*w  w w. j  av  a2 s  . c  o  m*/
public Response addTopology(Topology topology, @Context SecurityContext securityContext) {
    SecurityUtil.checkRole(authorizer, securityContext, Roles.ROLE_TOPOLOGY_ADMIN);
    if (StringUtils.isEmpty(topology.getName())) {
        throw BadRequestException.missingParameter(Topology.NAME);
    }
    if (StringUtils.isEmpty(topology.getConfig())) {
        throw BadRequestException.missingParameter(Topology.CONFIG);
    }
    Topology createdTopology = catalogService.addTopology(topology);
    SecurityUtil.addAcl(authorizer, securityContext, NAMESPACE, createdTopology.getId(),
            EnumSet.allOf(Permission.class));
    return WSUtils.respondEntity(createdTopology, CREATED);
}

From source file:io.soabase.core.SoaBundle.java

private void checkCorsFilter(SoaConfiguration configuration, ServletEnvironment servlets) {
    if (configuration.isAddCorsFilter()) {
        // from http://jitterted.com/tidbits/2014/09/12/cors-for-dropwizard-0-7-x/

        FilterRegistration.Dynamic filter = servlets.addFilter("CORS", CrossOriginFilter.class);
        filter.addMappingForUrlPatterns(EnumSet.allOf(DispatcherType.class), true, "/*");
        filter.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, "GET,PUT,POST,DELETE,OPTIONS");
        filter.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*");
        filter.setInitParameter(CrossOriginFilter.ACCESS_CONTROL_ALLOW_ORIGIN_HEADER, "*");
        filter.setInitParameter("allowedHeaders",
                "Content-Type,Authorization,X-Requested-With,Content-Length,Accept,Origin");
        filter.setInitParameter("allowCredentials", "true");
    }/*w  w  w.  ja va  2s  .  co m*/
}

From source file:com.github.tomakehurst.wiremock.jetty9.JettyHttpServer.java

@SuppressWarnings({ "rawtypes", "unchecked" })
private ServletContextHandler addMockServiceContext(StubRequestHandler stubRequestHandler,
        FileSource fileSource, Notifier notifier) {
    ServletContextHandler mockServiceContext = new ServletContextHandler(jettyServer, "/");

    mockServiceContext.setInitParameter("org.eclipse.jetty.servlet.Default.maxCacheSize", "0");
    mockServiceContext.setInitParameter("org.eclipse.jetty.servlet.Default.resourceBase", fileSource.getPath());
    mockServiceContext.setInitParameter("org.eclipse.jetty.servlet.Default.dirAllowed", "false");

    mockServiceContext.addServlet(DefaultServlet.class, FILES_URL_MATCH);

    mockServiceContext.setAttribute(JettyFaultInjectorFactory.class.getName(), new JettyFaultInjectorFactory());
    mockServiceContext.setAttribute(StubRequestHandler.class.getName(), stubRequestHandler);
    mockServiceContext.setAttribute(Notifier.KEY, notifier);
    ServletHolder servletHolder = mockServiceContext.addServlet(WireMockHandlerDispatchingServlet.class, "/");
    servletHolder.setInitParameter(RequestHandler.HANDLER_CLASS_KEY, StubRequestHandler.class.getName());
    servletHolder.setInitParameter(FaultInjectorFactory.INJECTOR_CLASS_KEY,
            JettyFaultInjectorFactory.class.getName());
    servletHolder.setInitParameter(WireMockHandlerDispatchingServlet.SHOULD_FORWARD_TO_FILES_CONTEXT, "true");

    MimeTypes mimeTypes = new MimeTypes();
    mimeTypes.addMimeMapping("json", "application/json");
    mimeTypes.addMimeMapping("html", "text/html");
    mimeTypes.addMimeMapping("xml", "application/xml");
    mimeTypes.addMimeMapping("txt", "text/plain");
    mockServiceContext.setMimeTypes(mimeTypes);

    mockServiceContext.setWelcomeFiles(new String[] { "index.json", "index.html", "index.xml", "index.txt" });

    mockServiceContext.addFilter(GzipFilter.class, "/*",
            EnumSet.of(DispatcherType.REQUEST, DispatcherType.FORWARD));
    mockServiceContext.addFilter(ContentTypeSettingFilter.class, FILES_URL_MATCH,
            EnumSet.of(DispatcherType.FORWARD));
    mockServiceContext.addFilter(TrailingSlashFilter.class, FILES_URL_MATCH,
            EnumSet.allOf(DispatcherType.class));

    return mockServiceContext;
}