Example usage for java.util EnumSet of

List of usage examples for java.util EnumSet of

Introduction

In this page you can find the example usage for java.util EnumSet of.

Prototype

public static <E extends Enum<E>> EnumSet<E> of(E e) 

Source Link

Document

Creates an enum set initially containing the specified element.

Usage

From source file:com.linecorp.armeria.server.thrift.ThriftDocServiceTest.java

@Test
public void testOk() throws Exception {
    final Map<Class<?>, Iterable<EndpointInfo>> serviceMap = new HashMap<>();
    serviceMap.put(HelloService.class, Collections
            .singletonList(new EndpointInfo("*", "/", "hello", THRIFT_BINARY, SerializationFormat.ofThrift())));
    serviceMap.put(SleepService.class, Collections
            .singletonList(new EndpointInfo("*", "/", "sleep", THRIFT_BINARY, SerializationFormat.ofThrift())));
    serviceMap.put(FooService.class, Collections
            .singletonList(new EndpointInfo("*", "/foo", "", THRIFT_COMPACT, EnumSet.of(THRIFT_COMPACT))));
    serviceMap.put(Cassandra.class,
            Arrays.asList(new EndpointInfo("*", "/cassandra", "", THRIFT_BINARY, EnumSet.of(THRIFT_BINARY)),
                    new EndpointInfo("*", "/cassandra/debug", "", THRIFT_TEXT, EnumSet.of(THRIFT_TEXT))));
    serviceMap.put(Hbase.class, Collections
            .singletonList(new EndpointInfo("*", "/hbase", "", THRIFT_BINARY, SerializationFormat.ofThrift())));
    serviceMap.put(OnewayHelloService.class, Collections.singletonList(
            new EndpointInfo("*", "/oneway", "", THRIFT_BINARY, SerializationFormat.ofThrift())));

    final ObjectMapper mapper = new ObjectMapper();
    final String expectedJson = mapper
            .writeValueAsString(ThriftServiceSpecificationGenerator.generate(serviceMap));
    // FIXME(trustin): Bring this back.
    //ImmutableMap.of(hello_args.class, SAMPLE_HELLO),
    //SAMPLE_HTTP_HEADERS));

    try (CloseableHttpClient hc = HttpClients.createMinimal()) {
        final HttpGet req = new HttpGet(specificationUri());

        try (CloseableHttpResponse res = hc.execute(req)) {
            assertThat(res.getStatusLine().toString()).isEqualTo("HTTP/1.1 200 OK");
            String responseJson = EntityUtils.toString(res.getEntity());

            // Convert to Map for order-insensitive comparison.
            Map<?, ?> actual = mapper.readValue(responseJson, Map.class);
            Map<?, ?> expected = mapper.readValue(expectedJson, Map.class);
            assertThat(actual).isEqualTo(expected);
        }//  www.j  av a 2 s .  com
    }
}

From source file:org.lieuofs.commune.biz.dao.CommuneOFSDaoTest.java

@Test
public void fusionCommune() {
    // Mutation N 2162 : Albeuve + Montbovon + Neirivue + Lessoc --> Haut-Intyamon
    int numMutation = 2162;
    List<PersistCommune> communes = dao.getMutation(numMutation);
    assertEquals("Nbre commune", 5, communes.size());

    List<PersistCommune> communesRadiees = getCommunesRadiees(communes, numMutation);
    assertEquals("Nbre communes radies", 4, communesRadiees.size());
    typeRadiationIn(EnumSet.of(RADIATION), communesRadiees);

    List<PersistCommune> communesInscrites = getCommunesCrees(communes, numMutation);
    assertEquals("Nbre communes cres", 1, communesInscrites.size());
    typeInscriptionIn(EnumSet.of(CREATION), communesInscrites);

    // Mutation N 1562 : Lohn + Ammannsegg --> Lohn-Ammannsegg
    numMutation = 1562;// w w  w.ja va  2s.  c o  m
    communes = dao.getMutation(numMutation);
    assertEquals("Nbre commune", 3, communes.size());

    communesRadiees = getCommunesRadiees(communes, numMutation);
    assertEquals("Nbre communes radies", 2, communesRadiees.size());
    typeRadiationIn(EnumSet.of(RADIATION), communesRadiees);

    communesInscrites = getCommunesCrees(communes, numMutation);
    assertEquals("Nbre communes cres", 1, communesInscrites.size());
    typeInscriptionIn(EnumSet.of(CREATION), communesInscrites);
}

From source file:com.mellanox.r4h.TestHFlush.java

/**
 * Test hsync (with updating block length in NameNode) while no data is
 * actually written yet/* ww w.j a  v a 2  s.  c  o m*/
 */
@Test
public void hSyncUpdateLength_00() throws IOException {
    Configuration conf = new HdfsConfiguration();
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
    DistributedFileSystem fileSystem = (DistributedFileSystem) cluster.getFileSystem();

    try {
        Path path = new Path(fName);
        FSDataOutputStream stm = fileSystem.create(path, true, 4096, (short) 2,
                MiniDFSClusterBridge.getAppendTestUtil_BLOCK_SIZE());
        System.out.println("Created file " + path.toString());
        ((DFSOutputStream) stm.getWrappedStream()).hsync(EnumSet.of(SyncFlag.UPDATE_LENGTH));
        long currentFileLength = fileSystem.getFileStatus(path).getLen();
        assertEquals(0L, currentFileLength);
        stm.close();
    } finally {
        fileSystem.close();
        cluster.shutdown();
    }
}

From source file:io.fabric8.apiman.Fabric8ManagerApiMicroService.java

@Override
protected void addModulesToJetty(HandlerCollection handlers) throws Exception {
    super.addModulesToJetty(handlers);
    //override the apimanUiServer handler 
    ServletContextHandler apimanUiServer = new ServletContextHandler(ServletContextHandler.SESSIONS);

    addSecurityHandler(apimanUiServer);// w w w.  ja v  a2s  .com
    apimanUiServer.setContextPath("/apimanui");
    apimanUiServer.addFilter(ApimanCorsFilter.class, "/*", EnumSet.of(DispatcherType.REQUEST));

    //add the servlets before the static content
    LinkServlet parseServlet = new LinkServlet();
    apimanUiServer.addServlet(new ServletHolder(parseServlet), "/link");
    ConfigurationServlet configServlet = new ConfigurationServlet();
    apimanUiServer.addServlet(new ServletHolder(configServlet), "/apiman/config.js");
    TranslationServlet translationServlet = new TranslationServlet();
    apimanUiServer.addServlet(new ServletHolder(translationServlet), "/apiman/translations.js");

    //figuring out from where to load the static content in the apimanui war
    String indexFile = this.getClass().getClassLoader().getResource("apimanui/index.html").toExternalForm();
    String webDir = indexFile.substring(0, indexFile.length() - 10);
    apimanUiServer.setInitParameter("org.eclipse.jetty.servlet.Default.resourceBase", webDir);
    apimanUiServer.setInitParameter("org.eclipse.jetty.servlet.Default.dirAllowed", "false");
    DefaultServlet defaultServlet = new DefaultServlet();
    ServletHolder holder = new ServletHolder("default", defaultServlet);
    apimanUiServer.addServlet(holder, "/*");

    //rewriting some paths to angularjs index.html app
    RewriteHandler rewriter = new RewriteHandler();
    rewriter.setRewriteRequestURI(true);
    rewriter.setRewritePathInfo(false);
    rewriter.setOriginalPathAttribute("requestedPath");
    RewriteRegexRule rule1 = new RewriteRegexRule();
    rule1.setRegex("/apimanui/api-manager/.*");
    rule1.setReplacement("/apimanui/index.html");
    rewriter.addRule(rule1);
    RewriteRegexRule rule2 = new RewriteRegexRule();
    rule2.setRegex("/apimanui/api-manager/|/apimanui/|/apimanui");
    rule2.setReplacement("/apimanui/index.html");
    rewriter.addRule(rule2);

    rewriter.setHandler(apimanUiServer);

    Handler[] newHandlers = new Handler[] { handlers.getHandlers()[0], rewriter };
    handlers.setHandlers(newHandlers);
}

From source file:ch.cyberduck.core.worker.SingleTransferWorkerTest.java

@Test
public void testTransferredSizeRepeat() throws Exception {
    final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
    final byte[] content = new byte[62768];
    new Random().nextBytes(content);
    final OutputStream out = local.getOutputStream(false);
    IOUtils.write(content, out);/*from   www  .j av a2 s . c  om*/
    out.close();
    final Host host = new Host(new DAVProtocol(), "test.cyberduck.ch",
            new Credentials(System.getProperties().getProperty("webdav.user"),
                    System.getProperties().getProperty("webdav.password")));
    host.setDefaultPath("/dav/basic");
    final AtomicBoolean failed = new AtomicBoolean();
    final DAVSession session = new DAVSession(host) {
        final DAVUploadFeature upload = new DAVUploadFeature(new DAVWriteFeature(this)) {
            @Override
            protected InputStream decorate(final InputStream in, final MessageDigest digest)
                    throws IOException {
                if (failed.get()) {
                    // Second attempt successful
                    return in;
                }
                return new CountingInputStream(in) {
                    @Override
                    protected void beforeRead(final int n) throws IOException {
                        super.beforeRead(n);
                        if (this.getByteCount() >= 32768L) {
                            failed.set(true);
                            throw new SocketTimeoutException();
                        }
                    }
                };
            }
        };

        @Override
        @SuppressWarnings("unchecked")
        public <T> T _getFeature(final Class<T> type) {
            if (type == Upload.class) {
                return (T) upload;
            }
            return super._getFeature(type);
        }
    };
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final Path test = new Path(new DefaultHomeFinderService(session).find(), UUID.randomUUID().toString(),
            EnumSet.of(Path.Type.file));
    final Transfer t = new UploadTransfer(new Host(new TestProtocol()), test, local);
    final BytecountStreamListener counter = new BytecountStreamListener(new DisabledStreamListener());
    assertTrue(new SingleTransferWorker(session, session, t, new TransferOptions(), new TransferSpeedometer(t),
            new DisabledTransferPrompt() {
                @Override
                public TransferAction prompt(final TransferItem file) {
                    return TransferAction.overwrite;
                }
            }, new DisabledTransferErrorCallback(), new DisabledProgressListener(), counter,
            new DisabledLoginCallback(), new DisabledPasswordCallback(), TransferItemCache.empty()) {

    }.run(session, session));
    local.delete();
    assertEquals(62768L, counter.getSent(), 0L);
    assertEquals(62768L, new DAVAttributesFinderFeature(session).find(test).getSize());
    assertTrue(failed.get());
    new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
}

From source file:ch.cyberduck.core.dav.DAVUploadFeatureTest.java

@Test
public void testAppend() throws Exception {
    final Host host = new Host(new DAVProtocol(), "test.cyberduck.ch",
            new Credentials(System.getProperties().getProperty("webdav.user"),
                    System.getProperties().getProperty("webdav.password")));
    host.setDefaultPath("/dav/basic");
    final DAVSession session = new DAVSession(host);
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
    final int length = 32770;
    final byte[] content = RandomUtils.nextBytes(length);
    final OutputStream out = local.getOutputStream(false);
    IOUtils.write(content, out);//from   w w w. ja v a 2  s  .c o  m
    out.close();
    final Path test = new Path(new DefaultHomeFinderService(session).find(), UUID.randomUUID().toString(),
            EnumSet.of(Path.Type.file));
    {
        final TransferStatus status = new TransferStatus().length(content.length / 2);
        new DAVUploadFeature(new DAVWriteFeature(session)).upload(test, local,
                new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status,
                new DisabledConnectionCallback());
    }
    {
        final TransferStatus status = new TransferStatus().length(content.length / 2).skip(content.length / 2)
                .append(true);
        new DAVUploadFeature(new DAVWriteFeature(session)).upload(test, local,
                new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status,
                new DisabledConnectionCallback());
    }
    final byte[] buffer = new byte[content.length];
    final InputStream in = new DAVReadFeature(session).read(test, new TransferStatus().length(content.length),
            new DisabledConnectionCallback());
    IOUtils.readFully(in, buffer);
    in.close();
    assertArrayEquals(content, buffer);
    new DAVDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
    local.delete();
    session.close();
}

From source file:ch.cyberduck.core.irods.IRODSReadFeatureTest.java

@Test(expected = NotfoundException.class)
public void testReadNotFound() throws Exception {
    final ProtocolFactory factory = new ProtocolFactory(
            new HashSet<>(Collections.singleton(new IRODSProtocol())));
    final Profile profile = new ProfilePlistReader(factory)
            .read(new Local("../profiles/iRODS (iPlant Collaborative).cyberduckprofile"));
    final Host host = new Host(profile, profile.getDefaultHostname(),
            new Credentials(System.getProperties().getProperty("irods.key"),
                    System.getProperties().getProperty("irods.secret")));

    final IRODSSession session = new IRODSSession(host);
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());

    final Path test = new Path(new IRODSHomeFinderService(session).find(), UUID.randomUUID().toString(),
            EnumSet.of(Path.Type.file));
    assertFalse(session.getFeature(Find.class).find(test));

    new IRODSReadFeature(session).read(test, new TransferStatus(), new DisabledConnectionCallback());
}

From source file:ch.cyberduck.core.onedrive.OneDriveWriteFeatureTest.java

@Test
public void testWriteUmlautZeroLength() throws Exception {
    final OneDriveWriteFeature feature = new OneDriveWriteFeature(session);
    final Path container = new OneDriveHomeFinderFeature(session).find();
    final byte[] content = RandomUtils.nextBytes(0);
    final TransferStatus status = new TransferStatus();
    status.setLength(content.length);//w w  w . ja v a 2 s .  c  om
    final Path file = new Path(container, String.format("%s", new AlphanumericRandomStringService().random()),
            EnumSet.of(Path.Type.file));
    final HttpResponseOutputStream<Void> out = feature.write(file, status, new DisabledConnectionCallback());
    final ByteArrayInputStream in = new ByteArrayInputStream(content);
    assertEquals(content.length, IOUtils.copyLarge(in, out));
    in.close();
    out.close();
    assertNull(out.getStatus());
    assertTrue(new DefaultFindFeature(session).find(file));
    final byte[] compare = new byte[content.length];
    final InputStream stream = new OneDriveReadFeature(session).read(file,
            new TransferStatus().length(content.length), new DisabledConnectionCallback());
    IOUtils.readFully(stream, compare);
    stream.close();
    assertArrayEquals(content, compare);
    new OneDriveDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
}

From source file:be.nille.generator.parser.ParserService.java

public void parseEntityData(EntityParserData parserData) throws IOException {
    StringWriter sw = new StringWriter();
    JavaWriter writer = new JavaWriter(sw);

    //write packageName
    String entityName = ParserUtils.createEntityName(parserData.getEntityData().getName());

    writer.emitPackage(parserData.getEntityData().getPackageName())
            .beginType(entityName, "class", EnumSet.of(PUBLIC)).emitEmptyLine();
    //write fields
    for (FieldParserData fieldData : parserData.getData()) {
        writer.emitField(fieldData.getFieldData().getType(),
                ParserUtils.createFieldName(fieldData.getFieldData().getName()), EnumSet.of(PRIVATE));
    }/*www .  j  a v  a 2  s.c o  m*/
    writer.emitEmptyLine();

    //write getters and setters
    for (FieldParserData fieldData : parserData.getData()) {

        String fieldType = fieldData.getFieldData().getType();
        String fieldName = fieldData.getFieldData().getName();
        // the getter
        writer.beginMethod(fieldType, GET_PREFIX + upperCaseFirst(fieldName), EnumSet.of(PUBLIC))
                .emitStatement("return " + fieldName).endMethod();

        // the setter
        writer.beginMethod("void", SET_PREFIX + upperCaseFirst(fieldName), EnumSet.of(PUBLIC), fieldType,
                fieldName).emitStatement("this." + fieldName + " = " + fieldName).endMethod();

    }

    writer.endType();
    writer.close();
    System.out.print(sw.toString());

}

From source file:com.opengamma.integration.tool.marketdata.MarketDataSnapshotTool.java

@Override
protected void doRun() throws Exception {
    final String viewDefinitionName = getCommandLine().getOptionValue(VIEW_NAME_OPTION);

    final String valuationTimeArg = getCommandLine().getOptionValue(VALUATION_TIME_OPTION);
    Instant valuationInstant;/*www .j a  va 2 s  . c o  m*/
    if (!StringUtils.isBlank(valuationTimeArg)) {
        final LocalTime valuationTime = LocalTime.parse(valuationTimeArg, VALUATION_TIME_FORMATTER);
        valuationInstant = ZonedDateTime.now().with(valuationTime.truncatedTo(SECONDS)).toInstant();
    } else {
        valuationInstant = Instant.now();
    }
    final boolean historicalInput = getCommandLine().hasOption(HISTORICAL_OPTION);

    final MarketDataSpecification marketDataSpecification = historicalInput
            ? new LatestHistoricalMarketDataSpecification()
            : MarketData.live();
    final ViewExecutionOptions viewExecutionOptions = ExecutionOptions.singleCycle(valuationInstant,
            marketDataSpecification, EnumSet.of(ViewExecutionFlags.AWAIT_MARKET_DATA));

    final List<RemoteViewProcessor> viewProcessors = getRemoteComponentFactory().getViewProcessors();
    if (viewProcessors.size() == 0) {
        s_logger.warn("No view processors found at {}", getRemoteComponentFactory().getBaseUri());
        return;
    }
    final MarketDataSnapshotMaster marketDataSnapshotMaster = getRemoteComponentFactory()
            .getMarketDataSnapshotMaster(DEFAULT_PREFERRED_CLASSIFIERS);
    if (marketDataSnapshotMaster == null) {
        s_logger.warn("No market data snapshot masters found at {}", getRemoteComponentFactory().getBaseUri());
        return;
    }
    final Collection<ConfigMaster> configMasters = getRemoteComponentFactory().getConfigMasters().values();
    if (configMasters.size() == 0) {
        s_logger.warn("No config masters found at {}", getRemoteComponentFactory().getBaseUri());
        return;
    }

    final RemoteViewProcessor viewProcessor = viewProcessors.get(0);
    final MarketDataSnapshotter marketDataSnapshotter = viewProcessor.getMarketDataSnapshotter();

    Set<ConfigDocument> viewDefinitions = Sets.newHashSet();

    for (final ConfigMaster configMaster : configMasters) {
        final ConfigSearchRequest<ViewDefinition> request = new ConfigSearchRequest<ViewDefinition>(
                ViewDefinition.class);
        request.setName(viewDefinitionName);
        Iterables.addAll(viewDefinitions, ConfigSearchIterator.iterable(configMaster, request));
    }

    if (viewDefinitions.isEmpty()) {
        endWithError("Unable to resolve any view definitions with name '%s'", viewDefinitionName);
    }

    if (viewDefinitions.size() > 1) {
        endWithError("Multiple view definitions resolved when searching for string '%s': %s",
                viewDefinitionName, viewDefinitions);
    }
    ConfigItem<?> value = Iterables.get(viewDefinitions, 0).getValue();
    StructuredMarketDataSnapshot snapshot = makeSnapshot(marketDataSnapshotter, viewProcessor,
            (ViewDefinition) value.getValue(), viewExecutionOptions);

    final ManageableMarketDataSnapshot manageableMarketDataSnapshot = new ManageableMarketDataSnapshot(
            snapshot);
    manageableMarketDataSnapshot.setName(snapshot.getBasisViewName() + "/" + valuationInstant);
    marketDataSnapshotMaster.add(new MarketDataSnapshotDocument(manageableMarketDataSnapshot));
}