Example usage for java.util EnumSet allOf

List of usage examples for java.util EnumSet allOf

Introduction

In this page you can find the example usage for java.util EnumSet allOf.

Prototype

public static <E extends Enum<E>> EnumSet<E> allOf(Class<E> elementType) 

Source Link

Document

Creates an enum set containing all of the elements in the specified element type.

Usage

From source file:org.apache.nifi.web.server.JettyServer.java

@Override
public void start() {
    try {//from w  w w.  j ava  2s .c  o  m
        // start the server
        server.start();

        // ensure everything started successfully
        for (Handler handler : server.getChildHandlers()) {
            // see if the handler is a web app
            if (handler instanceof WebAppContext) {
                WebAppContext context = (WebAppContext) handler;

                // see if this webapp had any exceptions that would
                // cause it to be unavailable
                if (context.getUnavailableException() != null) {
                    startUpFailure(context.getUnavailableException());
                }
            }
        }

        // ensure the appropriate wars deployed successfully before injecting the NiFi context and security filters
        // this must be done after starting the server (and ensuring there were no start up failures)
        if (webApiContext != null) {
            // give the web api the component ui extensions
            final ServletContext webApiServletContext = webApiContext.getServletHandler().getServletContext();
            webApiServletContext.setAttribute("nifi-ui-extensions", componentUiExtensions);

            // get the application context
            final WebApplicationContext webApplicationContext = WebApplicationContextUtils
                    .getRequiredWebApplicationContext(webApiServletContext);

            // component ui extensions
            if (CollectionUtils.isNotEmpty(componentUiExtensionWebContexts)) {
                final NiFiWebConfigurationContext configurationContext = webApplicationContext
                        .getBean("nifiWebConfigurationContext", NiFiWebConfigurationContext.class);

                for (final WebAppContext customUiContext : componentUiExtensionWebContexts) {
                    // set the NiFi context in each custom ui servlet context
                    final ServletContext customUiServletContext = customUiContext.getServletHandler()
                            .getServletContext();
                    customUiServletContext.setAttribute("nifi-web-configuration-context", configurationContext);

                    // add the security filter to any ui extensions wars
                    final FilterHolder securityFilter = webApiContext.getServletHandler()
                            .getFilter("springSecurityFilterChain");
                    if (securityFilter != null) {
                        customUiContext.addFilter(securityFilter, "/*", EnumSet.allOf(DispatcherType.class));
                    }
                }
            }

            // content viewer extensions
            if (CollectionUtils.isNotEmpty(contentViewerWebContexts)) {
                for (final WebAppContext contentViewerContext : contentViewerWebContexts) {
                    // add the security filter to any content viewer  wars
                    final FilterHolder securityFilter = webApiContext.getServletHandler()
                            .getFilter("springSecurityFilterChain");
                    if (securityFilter != null) {
                        contentViewerContext.addFilter(securityFilter, "/*",
                                EnumSet.allOf(DispatcherType.class));
                    }
                }
            }

            // content viewer controller
            if (webContentViewerContext != null) {
                final ContentAccess contentAccess = webApplicationContext.getBean("contentAccess",
                        ContentAccess.class);

                // add the content access
                final ServletContext webContentViewerServletContext = webContentViewerContext
                        .getServletHandler().getServletContext();
                webContentViewerServletContext.setAttribute("nifi-content-access", contentAccess);

                final FilterHolder securityFilter = webApiContext.getServletHandler()
                        .getFilter("springSecurityFilterChain");
                if (securityFilter != null) {
                    webContentViewerContext.addFilter(securityFilter, "/*",
                            EnumSet.allOf(DispatcherType.class));
                }
            }
        }

        // ensure the web document war was loaded and provide the extension mapping
        if (webDocsContext != null) {
            final ServletContext webDocsServletContext = webDocsContext.getServletHandler().getServletContext();
            webDocsServletContext.setAttribute("nifi-extension-mapping", extensionMapping);
        }

        // if this nifi is a node in a cluster, start the flow service and load the flow - the
        // flow service is loaded here for clustered nodes because the loading of the flow will
        // initialize the connection between the node and the NCM. if the node connects (starts
        // heartbeating, etc), the NCM may issue web requests before the application (wars) have
        // finished loading. this results in the node being disconnected since its unable to
        // successfully respond to the requests. to resolve this, flow loading was moved to here
        // (after the wars have been successfully deployed) when this nifi instance is a node
        // in a cluster
        if (props.isNode()) {

            FlowService flowService = null;
            try {

                logger.info("Loading Flow...");

                ApplicationContext ctx = WebApplicationContextUtils
                        .getWebApplicationContext(webApiContext.getServletContext());
                flowService = ctx.getBean("flowService", FlowService.class);

                // start and load the flow
                flowService.start();
                flowService.load(null);

                logger.info("Flow loaded successfully.");

            } catch (BeansException | LifeCycleStartException | IOException | FlowSerializationException
                    | FlowSynchronizationException | UninheritableFlowException e) {
                // ensure the flow service is terminated
                if (flowService != null && flowService.isRunning()) {
                    flowService.stop(false);
                }
                throw new Exception("Unable to load flow due to: " + e, e);
            }
        }

        // dump the application url after confirming everything started successfully
        dumpUrls();
    } catch (Exception ex) {
        startUpFailure(ex);
    }
}

From source file:com.isentropy.accumulo.collections.AccumuloSortedMap.java

/**
 * This method enables one-to-many mapping. It uses Accumulo's VersioningIterator. 
 * There is no way to delete a single value. You can only delete the key. 
 * See multiEntryIterator() and getAll(key), which read the multiple values.
 * /*  w ww.ja  va  2s .  com*/
 * @param max_values_per_key the maximum number of values that will be persisted for each key, or -1 for unlimited
 * @return
 * @throws AccumuloSecurityException
 * @throws AccumuloException
 * @throws TableNotFoundException
 */
public AccumuloSortedMap<K, V> setMaxValuesPerKey(int max_values_per_key)
        throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
    if (isReadOnly())
        throw new UnsupportedOperationException("must set multiMap on base map, not derived map");

    int existingMM = getMaxValuesPerKey();
    if (max_values_per_key == existingMM) {
        log.info("setMultiMap doing nothing because max_values_per_key is already set to " + existingMM);
        return this;
    }

    EnumSet<IteratorScope> all = EnumSet.allOf(IteratorScope.class);
    //this.max_values_per_key = max_values_per_key;
    if (getConnector().tableOperations().listIterators(getTable()).containsKey(ITERATOR_NAME_VERSIONING)) {
        getConnector().tableOperations().removeIterator(getTable(), ITERATOR_NAME_VERSIONING, all);
        log.info("Removed versioning iterator for table " + getTable());
    }

    if (max_values_per_key > 0) {
        IteratorSetting is = new IteratorSetting(ITERATOR_PRIORITY_VERSIONING, ITERATOR_NAME_VERSIONING,
                VersioningIterator.class);
        VersioningIterator.setMaxVersions(is, max_values_per_key);
        log.info("attaching versioning iterator for table " + getTable() + " with max version = "
                + max_values_per_key);
        getConnector().tableOperations().attachIterator(getTable(), is, all);
    }
    return this;
}

From source file:org.ambraproject.annotation.service.AnnotationServiceTest.java

@Test
public void testCountComments() {
    UserProfile user = new UserProfile("authIdForTestCountComments", "email@testCountComments.org",
            "displayNameTestCountComments");
    dummyDataStore.store(user);/*from   w w  w.  j  av a  2s.com*/
    Article article = new Article("id:doi-test-count-comments");
    dummyDataStore.store(article);

    Long commentId = Long
            .valueOf(dummyDataStore.store(new Annotation(user, AnnotationType.COMMENT, article.getID())));
    dummyDataStore.store(new Annotation(user, AnnotationType.FORMAL_CORRECTION, article.getID()));
    dummyDataStore.store(new Annotation(user, AnnotationType.MINOR_CORRECTION, article.getID()));
    dummyDataStore.store(new Annotation(user, AnnotationType.RETRACTION, article.getID()));
    dummyDataStore.store(new Annotation(user, AnnotationType.NOTE, article.getID()));
    dummyDataStore.store(new Rating(user, article.getID()));

    Annotation reply = new Annotation(user, AnnotationType.REPLY, article.getID());
    reply.setParentID(commentId);
    dummyDataStore.store(reply);

    assertEquals(
            annotationService.countAnnotations(article.getID(),
                    EnumSet.of(AnnotationType.NOTE, AnnotationType.COMMENT)),
            2, "annotation service returned incorrect count of comments and notes");
    assertEquals(
            annotationService.countAnnotations(article.getID(),
                    EnumSet.of(AnnotationType.FORMAL_CORRECTION, AnnotationType.MINOR_CORRECTION,
                            AnnotationType.RETRACTION)),
            3, "annotation service returned incorrect count of corrections");
    assertEquals(annotationService.countAnnotations(article.getID(), EnumSet.allOf(AnnotationType.class)), 7,
            "annotation service returned incorrect count of comments and notes");
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.ClientRMService.java

@Override
public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) throws YarnException {
    GetClusterNodesResponse response = recordFactory.newRecordInstance(GetClusterNodesResponse.class);
    EnumSet<NodeState> nodeStates = request.getNodeStates();
    if (nodeStates == null || nodeStates.isEmpty()) {
        nodeStates = EnumSet.allOf(NodeState.class);
    }//from  ww  w .  jav  a 2s  .  co m
    Collection<RMNode> nodes = RMServerUtils.queryRMNodes(rmContext, nodeStates);

    List<NodeReport> nodeReports = new ArrayList<NodeReport>(nodes.size());
    for (RMNode nodeInfo : nodes) {
        nodeReports.add(createNodeReports(nodeInfo));
    }
    response.setNodeReports(nodeReports);
    return response;
}

From source file:com.isentropy.accumulo.collections.AccumuloSortedMap.java

/**
 * /*from   w  ww.j a va  2 s.com*/
 * @param timeout the entry timeout in ms. If timeout <= 0, the ageoff feature will be removed
 * @return
 * @throws AccumuloSecurityException
 * @throws AccumuloException
 * @throws TableNotFoundException
 */
public AccumuloSortedMap<K, V> setTimeOutMs(long timeout) {
    if (isReadOnly())
        throw new UnsupportedOperationException();
    try {
        EnumSet<IteratorScope> all = EnumSet.allOf(IteratorScope.class);
        getConnector().tableOperations().removeIterator(getTable(), ITERATOR_NAME_AGEOFF, all);
        log.info("Removed timeout for table " + getTable());
        if (timeout > 0) {
            IteratorSetting is = new IteratorSetting(ITERATOR_PRIORITY_AGEOFF, ITERATOR_NAME_AGEOFF,
                    AgeOffFilter.class);
            AgeOffFilter.setNegate(is, false);
            AgeOffFilter.setTTL(is, timeout);
            log.info("Set timeout for table " + getTable() + ": " + timeout + " ms");

            try {
                getConnector().tableOperations().attachIterator(getTable(), is, all);
            } catch (IllegalArgumentException e) {
                log.warn("Attach Iterator threw exception: " + e
                        + "\nThis probably means the iterator was already set.");
            }
        } else {
            log.warn("Disabling entry timeout");
            if (getConnector().tableOperations().listIterators(getTable()).containsKey(ITERATOR_NAME_AGEOFF))
                getConnector().tableOperations().removeIterator(getTable(), ITERATOR_NAME_AGEOFF, all);
        }
        return this;
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:org.silverpeas.migration.jcr.service.repository.DocumentRepositoryTest.java

/**
 * Test of listAttachmentsByForeignIdAndDocumentType method, of class DocumentRepository.
 *//*from   w w  w.ja  va 2 s.c  om*/
@Test
public void listAttachmentsByForeignIdAndDocumentType() throws Exception {
    new JcrDocumentRepositoryTest() {
        @Override
        public void run(final Session session) throws Exception {
            Set<String> createdIds = new HashSet<String>();
            // No WYSIWYG content exists
            List<String> wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, notNullValue());
            assertThat(wysiwygFIdLangFilenames, hasSize(0));

            // Creating an FR "attachment" content.
            String createdUuid = createAttachmentForTest(
                    defaultDocumentBuilder("fId_1").setDocumentType(attachment), defaultFRContentBuilder(),
                    "fId_1_fr").getId();
            createdIds.add(createdUuid);
            SimpleDocument enDocument = getDocumentById(createdUuid, "en");
            assertThat(enDocument, notNullValue());
            assertThat(enDocument.getAttachment(), nullValue());
            SimpleDocument frDocument = getDocumentById(createdUuid, "fr");
            assertThat(frDocument, notNullValue());
            assertThat(frDocument.getAttachment(), notNullValue());
            assertThat(frDocument.getDocumentType(), is(attachment));
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_1", attachment));
            assertThat(wysiwygFIdLangFilenames, hasSize(1));
            assertThat(wysiwygFIdLangFilenames, contains("fId_1|fr|test.odp"));

            // Updating attachment with EN content.
            setEnData(frDocument);
            updateAttachmentForTest(frDocument, "en", "fId_1_en");
            createdIds.add(frDocument.getId());

            // Vrifying the attachment exists into both of tested languages.
            enDocument = getDocumentById(createdUuid, "en");
            assertThat(enDocument, notNullValue());
            assertThat(enDocument.getAttachment(), notNullValue());
            assertThat(enDocument.getDocumentType(), is(attachment));
            checkEnglishSimpleDocument(enDocument);
            frDocument = getDocumentById(createdUuid, "fr");
            assertThat(frDocument, notNullValue());
            assertThat(frDocument.getAttachment(), notNullValue());
            assertThat(frDocument.getDocumentType(), is(attachment));
            checkFrenchSimpleDocument(frDocument);

            // No WYSIWYG : that is what it is expected
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_1", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(0));
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_1", attachment));
            assertThat(wysiwygFIdLangFilenames, hasSize(2));
            assertThat(wysiwygFIdLangFilenames, containsInAnyOrder("fId_1|fr|test.odp", "fId_1|en|test.odp"));

            // Adding several documents, but no WYSIWYG
            Set<DocumentType> documentTypes = EnumSet.allOf(DocumentType.class);
            documentTypes.remove(DocumentType.wysiwyg);
            int id = 2;
            for (DocumentType documentType : documentTypes) {
                createdIds.add(createAttachmentForTest(
                        defaultDocumentBuilder("fId_" + id).setDocumentType(documentType),
                        defaultFRContentBuilder().setFilename("fId_" + id + "_wysiwyg_en.txt"),
                        "fId_" + id + "_fr").getId());
                id++;
            }

            // No WYSIWYG : that is what it is expected
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_1", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(0));

            // Number of expected created documents
            int nbDocuments = 1 + (DocumentType.values().length - 1);
            assertThat(createdIds.size(), is(nbDocuments));

            // Adding the first WYSIWYG EN content
            SimpleDocument createdDocument = createAttachmentForTest(
                    defaultDocumentBuilder("fId_26").setDocumentType(wysiwyg), defaultENContentBuilder(),
                    "fId_26_en");
            createdIds.add(createdDocument.getId());

            // One wrong WYSIWYG base name
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_26", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(1));
            assertThat(wysiwygFIdLangFilenames, contains("fId_26|en|test.pdf"));

            // Updating wysiwyg file name
            createdDocument.setFilename("fId_26_wysiwyg_en.txt");
            updateAttachmentForTest(createdDocument);

            // One WYSIWYG base name
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_26", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(1));
            assertThat(wysiwygFIdLangFilenames, contains("fId_26|en|fId_26_wysiwyg_en.txt"));

            // Adding the FR content to the first WYSIWYG document
            enDocument = getDocumentById(createdDocument.getId(), "en");
            setFrData(enDocument);
            enDocument.setFilename("fId_26_wysiwyg_fr.txt");
            updateAttachmentForTest(enDocument, "fr", "fId_26_fr");
            createdIds.add(enDocument.getId());

            // One WYSIWYG on one Component
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_26", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(2));
            assertThat(wysiwygFIdLangFilenames,
                    containsInAnyOrder("fId_26|fr|fId_26_wysiwyg_fr.txt", "fId_26|en|fId_26_wysiwyg_en.txt"));

            // Adding the second WYSIWYG document (on same component)
            SimpleDocument secondCreatedDocument = createAttachmentForTest(
                    defaultDocumentBuilder("fId_27").setDocumentType(wysiwyg),
                    defaultFRContentBuilder().setFilename("fId_27_wysiwyg_fr.txt"), "fId_27_fr");
            createdIds.add(secondCreatedDocument.getId());

            // Two WYSIWYG on one Component
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_27", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(1));
            assertThat(wysiwygFIdLangFilenames, contains("fId_27|fr|fId_27_wysiwyg_fr.txt"));

            // Updating wysiwyg file name
            setEnData(secondCreatedDocument);
            secondCreatedDocument.setFilename(secondCreatedDocument.getFilename());
            updateAttachmentForTest(secondCreatedDocument, "en", "fId_27_en");

            // Two WYSIWYG (each one in two languages) on one Component
            wysiwygFIdLangFilenames = extractForeignIdLanguageFilenames(getDocumentRepository()
                    .listAttachmentsByForeignIdAndDocumentType(session, instanceId, "fId_27", wysiwyg));
            assertThat(wysiwygFIdLangFilenames, hasSize(2));
            assertThat(wysiwygFIdLangFilenames,
                    contains("fId_27|fr|fId_27_wysiwyg_fr.txt", "fId_27|en|fId_27_wysiwyg_fr.txt"));

            assertThat(createdIds, hasSize(nbDocuments + 2));
        }
    }.execute();
}

From source file:org.apache.accumulo.test.proxy.SimpleProxyBase.java

@Test
public void namespaceNotFound() throws Exception {
    final String doesNotExist = "doesNotExists";
    try {/*from  w  ww .  j  a v a  2s.c om*/
        client.deleteNamespace(creds, doesNotExist);
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.renameNamespace(creds, doesNotExist, "abcdefg");
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.setNamespaceProperty(creds, doesNotExist, "table.compaction.major.ratio", "4");
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.removeNamespaceProperty(creds, doesNotExist, "table.compaction.major.ratio");
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.getNamespaceProperties(creds, doesNotExist);
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        IteratorSetting setting = new IteratorSetting(100, "DebugTheThings", DebugIterator.class.getName(),
                Collections.<String, String>emptyMap());
        client.attachNamespaceIterator(creds, doesNotExist, setting, EnumSet.allOf(IteratorScope.class));
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.removeNamespaceIterator(creds, doesNotExist, "DebugTheThings",
                EnumSet.allOf(IteratorScope.class));
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.getNamespaceIteratorSetting(creds, doesNotExist, "DebugTheThings", IteratorScope.SCAN);
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.listNamespaceIterators(creds, doesNotExist);
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        IteratorSetting setting = new IteratorSetting(100, "DebugTheThings", DebugIterator.class.getName(),
                Collections.<String, String>emptyMap());
        client.checkNamespaceIteratorConflicts(creds, doesNotExist, setting,
                EnumSet.allOf(IteratorScope.class));
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.addNamespaceConstraint(creds, doesNotExist, MaxMutationSize.class.getName());
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.removeNamespaceConstraint(creds, doesNotExist, 1);
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.listNamespaceConstraints(creds, doesNotExist);
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
    try {
        client.testNamespaceClassLoad(creds, doesNotExist, DebugIterator.class.getName(),
                SortedKeyValueIterator.class.getName());
        fail("exception not thrown");
    } catch (NamespaceNotFoundException ex) {
    }
}

From source file:org.apache.accumulo.proxy.SimpleTest.java

@Test
public void testTableOperations() throws Exception {
    final String TABLE_TEST = makeTableName();

    client.createTable(creds, TABLE_TEST, true, TimeType.MILLIS);
    // constraints
    client.addConstraint(creds, TABLE_TEST, NumericValueConstraint.class.getName());
    assertEquals(2, client.listConstraints(creds, TABLE_TEST).size());
    client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "123"));

    try {/*from ww  w .j a va2 s  . c  o m*/
        client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "x"));
        fail("constraint did not fire");
    } catch (MutationsRejectedException ex) {
    }

    client.removeConstraint(creds, TABLE_TEST, 2);
    assertEquals(1, client.listConstraints(creds, TABLE_TEST).size());
    client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "x"));
    String scanner = client.createScanner(creds, TABLE_TEST, null);
    ScanResult more = client.nextK(scanner, 2);
    client.closeScanner(scanner);
    assertFalse(more.isMore());
    assertEquals(1, more.getResults().size());
    assertEquals(s2bb("x"), more.getResults().get(0).value);
    // splits, merge
    client.addSplits(creds, TABLE_TEST,
            new HashSet<ByteBuffer>(Arrays.asList(s2bb("a"), s2bb("m"), s2bb("z"))));
    List<ByteBuffer> splits = client.listSplits(creds, TABLE_TEST, 1);
    assertEquals(Arrays.asList(s2bb("m")), splits);
    client.mergeTablets(creds, TABLE_TEST, null, s2bb("m"));
    splits = client.listSplits(creds, TABLE_TEST, 10);
    assertEquals(Arrays.asList(s2bb("m"), s2bb("z")), splits);
    client.mergeTablets(creds, TABLE_TEST, null, null);
    splits = client.listSplits(creds, TABLE_TEST, 10);
    List<ByteBuffer> empty = Collections.emptyList();
    assertEquals(empty, splits);
    // iterators
    client.deleteTable(creds, TABLE_TEST);
    client.createTable(creds, TABLE_TEST, true, TimeType.MILLIS);
    HashMap<String, String> options = new HashMap<String, String>();
    options.put("type", "STRING");
    options.put("columns", "cf");
    IteratorSetting setting = new IteratorSetting(10, TABLE_TEST, SummingCombiner.class.getName(), options);
    client.attachIterator(creds, TABLE_TEST, setting, EnumSet.allOf(IteratorScope.class));
    for (int i = 0; i < 10; i++) {
        client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "1"));
    }
    scanner = client.createScanner(creds, TABLE_TEST, null);
    more = client.nextK(scanner, 2);
    client.closeScanner(scanner);
    assertEquals("10", new String(more.getResults().get(0).getValue()));
    try {
        client.checkIteratorConflicts(creds, TABLE_TEST, setting, EnumSet.allOf(IteratorScope.class));
        fail("checkIteratorConflicts did not throw an exception");
    } catch (Exception ex) {
    }
    client.deleteRows(creds, TABLE_TEST, null, null);
    client.removeIterator(creds, TABLE_TEST, "test", EnumSet.allOf(IteratorScope.class));
    for (int i = 0; i < 10; i++) {
        client.updateAndFlush(creds, TABLE_TEST, mutation("row" + i, "cf", "cq", "" + i));
        client.flushTable(creds, TABLE_TEST, null, null, true);
    }
    scanner = client.createScanner(creds, TABLE_TEST, null);
    more = client.nextK(scanner, 100);
    client.closeScanner(scanner);
    assertEquals(10, more.getResults().size());
    // clone
    final String TABLE_TEST2 = makeTableName();
    client.cloneTable(creds, TABLE_TEST, TABLE_TEST2, true, null, null);
    scanner = client.createScanner(creds, TABLE_TEST2, null);
    more = client.nextK(scanner, 100);
    client.closeScanner(scanner);
    assertEquals(10, more.getResults().size());
    client.deleteTable(creds, TABLE_TEST2);

    // don't know how to test this, call it just for fun
    client.clearLocatorCache(creds, TABLE_TEST);

    // compact
    client.compactTable(creds, TABLE_TEST, null, null, null, true, true);
    assertEquals(1, countFiles(TABLE_TEST));

    // get disk usage
    client.cloneTable(creds, TABLE_TEST, TABLE_TEST2, true, null, null);
    Set<String> tablesToScan = new HashSet<String>();
    tablesToScan.add(TABLE_TEST);
    tablesToScan.add(TABLE_TEST2);
    tablesToScan.add("foo");
    client.createTable(creds, "foo", true, TimeType.MILLIS);
    List<DiskUsage> diskUsage = (client.getDiskUsage(creds, tablesToScan));
    assertEquals(2, diskUsage.size());
    assertEquals(1, diskUsage.get(0).getTables().size());
    assertEquals(2, diskUsage.get(1).getTables().size());
    client.compactTable(creds, TABLE_TEST2, null, null, null, true, true);
    diskUsage = (client.getDiskUsage(creds, tablesToScan));
    assertEquals(3, diskUsage.size());
    assertEquals(1, diskUsage.get(0).getTables().size());
    assertEquals(1, diskUsage.get(1).getTables().size());
    assertEquals(1, diskUsage.get(2).getTables().size());
    client.deleteTable(creds, "foo");
    client.deleteTable(creds, TABLE_TEST2);

    // export/import
    String dir = folder.getRoot() + "/test";
    String destDir = folder.getRoot() + "/test_dest";
    client.offlineTable(creds, TABLE_TEST);
    client.exportTable(creds, TABLE_TEST, dir);
    // copy files to a new location
    FileSystem fs = FileSystem.get(new Configuration());
    FSDataInputStream is = fs.open(new Path(dir + "/distcp.txt"));
    BufferedReader r = new BufferedReader(new InputStreamReader(is));
    while (true) {
        String line = r.readLine();
        if (line == null)
            break;
        Path srcPath = new Path(line);
        FileUtils.copyFile(new File(srcPath.toUri().getPath()), new File(destDir, srcPath.getName()));
    }
    client.deleteTable(creds, TABLE_TEST);
    client.importTable(creds, "testify", destDir);
    scanner = client.createScanner(creds, "testify", null);
    more = client.nextK(scanner, 100);
    client.closeScanner(scanner);
    assertEquals(10, more.results.size());

    try {
        // ACCUMULO-1558 a second import from the same dir should fail, the first import moved the files
        client.importTable(creds, "testify2", destDir);
        fail();
    } catch (Exception e) {
    }

    assertFalse(client.listTables(creds).contains("testify2"));

    // Locality groups
    client.createTable(creds, "test", true, TimeType.MILLIS);
    Map<String, Set<String>> groups = new HashMap<String, Set<String>>();
    groups.put("group1", Collections.singleton("cf1"));
    groups.put("group2", Collections.singleton("cf2"));
    client.setLocalityGroups(creds, "test", groups);
    assertEquals(groups, client.getLocalityGroups(creds, "test"));
    // table properties
    Map<String, String> orig = client.getTableProperties(creds, "test");
    client.setTableProperty(creds, "test", "table.split.threshold", "500M");
    Map<String, String> update = client.getTableProperties(creds, "test");
    for (int i = 0; i < 5; i++) {
        if (update.get("table.split.threshold").equals("500M"))
            break;
        UtilWaitThread.sleep(200);
    }
    assertEquals(update.get("table.split.threshold"), "500M");
    client.removeTableProperty(creds, "test", "table.split.threshold");
    update = client.getTableProperties(creds, "test");
    assertEquals(orig, update);
    // rename table
    Map<String, String> tables = client.tableIdMap(creds);
    client.renameTable(creds, "test", "bar");
    Map<String, String> tables2 = client.tableIdMap(creds);
    assertEquals(tables.get("test"), tables2.get("bar"));
    // table exists
    assertTrue(client.tableExists(creds, "bar"));
    assertFalse(client.tableExists(creds, "test"));
    // bulk import
    String filename = dir + "/bulk/import/rfile.rf";
    FileSKVWriter writer = FileOperations.getInstance().openWriter(filename, fs, fs.getConf(),
            DefaultConfiguration.getInstance());
    writer.startDefaultLocalityGroup();
    writer.append(new org.apache.accumulo.core.data.Key(new Text("a"), new Text("b"), new Text("c")),
            new Value("value".getBytes()));
    writer.close();
    fs.mkdirs(new Path(dir + "/bulk/fail"));
    client.importDirectory(creds, "bar", dir + "/bulk/import", dir + "/bulk/fail", true);
    scanner = client.createScanner(creds, "bar", null);
    more = client.nextK(scanner, 100);
    client.closeScanner(scanner);
    assertEquals(1, more.results.size());
    ByteBuffer maxRow = client.getMaxRow(creds, "bar", null, null, false, null, false);
    assertEquals(s2bb("a"), maxRow);

    assertFalse(client.testTableClassLoad(creds, "bar", "abc123", SortedKeyValueIterator.class.getName()));
    assertTrue(client.testTableClassLoad(creds, "bar", VersioningIterator.class.getName(),
            SortedKeyValueIterator.class.getName()));
}

From source file:org.apache.accumulo.proxy.SimpleProxyIT.java

@Test
public void testTableOperations() throws Exception {
    final String TABLE_TEST = makeTableName();

    client.createTable(creds, TABLE_TEST, true, TimeType.MILLIS);
    // constraints
    client.addConstraint(creds, TABLE_TEST, NumericValueConstraint.class.getName());
    assertEquals(2, client.listConstraints(creds, TABLE_TEST).size());

    UtilWaitThread.sleep(2000);//from   w  w w.  j  a  va  2  s  . co  m

    client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "123"));

    try {
        client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "x"));
        fail("constraint did not fire");
    } catch (MutationsRejectedException ex) {
    }

    client.removeConstraint(creds, TABLE_TEST, 2);

    UtilWaitThread.sleep(2000);

    assertEquals(1, client.listConstraints(creds, TABLE_TEST).size());

    client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "x"));
    assertScan(new String[][] { { "row1", "cf", "cq", "x" } }, TABLE_TEST);
    // splits, merge
    client.addSplits(creds, TABLE_TEST,
            new HashSet<ByteBuffer>(Arrays.asList(s2bb("a"), s2bb("m"), s2bb("z"))));
    List<ByteBuffer> splits = client.listSplits(creds, TABLE_TEST, 1);
    assertEquals(Arrays.asList(s2bb("m")), splits);
    client.mergeTablets(creds, TABLE_TEST, null, s2bb("m"));
    splits = client.listSplits(creds, TABLE_TEST, 10);
    assertEquals(Arrays.asList(s2bb("m"), s2bb("z")), splits);
    client.mergeTablets(creds, TABLE_TEST, null, null);
    splits = client.listSplits(creds, TABLE_TEST, 10);
    List<ByteBuffer> empty = Collections.emptyList();
    assertEquals(empty, splits);
    // iterators
    client.deleteTable(creds, TABLE_TEST);
    client.createTable(creds, TABLE_TEST, true, TimeType.MILLIS);
    HashMap<String, String> options = new HashMap<String, String>();
    options.put("type", "STRING");
    options.put("columns", "cf");
    IteratorSetting setting = new IteratorSetting(10, TABLE_TEST, SummingCombiner.class.getName(), options);
    client.attachIterator(creds, TABLE_TEST, setting, EnumSet.allOf(IteratorScope.class));
    for (int i = 0; i < 10; i++) {
        client.updateAndFlush(creds, TABLE_TEST, mutation("row1", "cf", "cq", "1"));
    }
    assertScan(new String[][] { { "row1", "cf", "cq", "10" } }, TABLE_TEST);
    try {
        client.checkIteratorConflicts(creds, TABLE_TEST, setting, EnumSet.allOf(IteratorScope.class));
        fail("checkIteratorConflicts did not throw an exception");
    } catch (Exception ex) {
    }
    client.deleteRows(creds, TABLE_TEST, null, null);
    client.removeIterator(creds, TABLE_TEST, "test", EnumSet.allOf(IteratorScope.class));
    String expected[][] = new String[10][];
    for (int i = 0; i < 10; i++) {
        client.updateAndFlush(creds, TABLE_TEST, mutation("row" + i, "cf", "cq", "" + i));
        expected[i] = new String[] { "row" + i, "cf", "cq", "" + i };
        client.flushTable(creds, TABLE_TEST, null, null, true);
    }
    assertScan(expected, TABLE_TEST);
    // clone
    final String TABLE_TEST2 = makeTableName();
    client.cloneTable(creds, TABLE_TEST, TABLE_TEST2, true, null, null);
    assertScan(expected, TABLE_TEST2);
    client.deleteTable(creds, TABLE_TEST2);

    // don't know how to test this, call it just for fun
    client.clearLocatorCache(creds, TABLE_TEST);

    // compact
    client.compactTable(creds, TABLE_TEST, null, null, null, true, true);
    assertEquals(1, countFiles(TABLE_TEST));
    assertScan(expected, TABLE_TEST);

    // get disk usage
    client.cloneTable(creds, TABLE_TEST, TABLE_TEST2, true, null, null);
    Set<String> tablesToScan = new HashSet<String>();
    tablesToScan.add(TABLE_TEST);
    tablesToScan.add(TABLE_TEST2);
    tablesToScan.add("foo");
    client.createTable(creds, "foo", true, TimeType.MILLIS);
    List<DiskUsage> diskUsage = (client.getDiskUsage(creds, tablesToScan));
    assertEquals(2, diskUsage.size());
    assertEquals(1, diskUsage.get(0).getTables().size());
    assertEquals(2, diskUsage.get(1).getTables().size());
    client.compactTable(creds, TABLE_TEST2, null, null, null, true, true);
    diskUsage = (client.getDiskUsage(creds, tablesToScan));
    assertEquals(3, diskUsage.size());
    assertEquals(1, diskUsage.get(0).getTables().size());
    assertEquals(1, diskUsage.get(1).getTables().size());
    assertEquals(1, diskUsage.get(2).getTables().size());
    client.deleteTable(creds, "foo");
    client.deleteTable(creds, TABLE_TEST2);

    // export/import
    File dir = tempFolder.newFolder("test");
    File destDir = tempFolder.newFolder("test_dest");
    client.offlineTable(creds, TABLE_TEST, false);
    client.exportTable(creds, TABLE_TEST, dir.getAbsolutePath());
    // copy files to a new location
    FileSystem fs = FileSystem.get(new Configuration());
    FSDataInputStream is = fs.open(new Path(dir + "/distcp.txt"));
    BufferedReader r = new BufferedReader(new InputStreamReader(is));
    while (true) {
        String line = r.readLine();
        if (line == null)
            break;
        Path srcPath = new Path(line);
        FileUtils.copyFile(new File(srcPath.toUri().getPath()), new File(destDir, srcPath.getName()));
    }
    client.deleteTable(creds, TABLE_TEST);
    client.importTable(creds, "testify", destDir.getAbsolutePath());
    assertScan(expected, "testify");
    client.deleteTable(creds, "testify");

    try {
        // ACCUMULO-1558 a second import from the same dir should fail, the first import moved the files
        client.importTable(creds, "testify2", destDir.getAbsolutePath());
        fail();
    } catch (Exception e) {
    }

    assertFalse(client.listTables(creds).contains("testify2"));

    // Locality groups
    client.createTable(creds, "test", true, TimeType.MILLIS);
    Map<String, Set<String>> groups = new HashMap<String, Set<String>>();
    groups.put("group1", Collections.singleton("cf1"));
    groups.put("group2", Collections.singleton("cf2"));
    client.setLocalityGroups(creds, "test", groups);
    assertEquals(groups, client.getLocalityGroups(creds, "test"));
    // table properties
    Map<String, String> orig = client.getTableProperties(creds, "test");
    client.setTableProperty(creds, "test", "table.split.threshold", "500M");
    Map<String, String> update = client.getTableProperties(creds, "test");
    assertEquals(update.get("table.split.threshold"), "500M");
    client.removeTableProperty(creds, "test", "table.split.threshold");
    update = client.getTableProperties(creds, "test");
    assertEquals(orig, update);
    // rename table
    Map<String, String> tables = client.tableIdMap(creds);
    client.renameTable(creds, "test", "bar");
    Map<String, String> tables2 = client.tableIdMap(creds);
    assertEquals(tables.get("test"), tables2.get("bar"));
    // table exists
    assertTrue(client.tableExists(creds, "bar"));
    assertFalse(client.tableExists(creds, "test"));
    // bulk import
    String filename = dir + "/bulk/import/rfile.rf";
    FileSKVWriter writer = FileOperations.getInstance().openWriter(filename, fs, fs.getConf(),
            DefaultConfiguration.getInstance());
    writer.startDefaultLocalityGroup();
    writer.append(new org.apache.accumulo.core.data.Key(new Text("a"), new Text("b"), new Text("c")),
            new Value("value".getBytes()));
    writer.close();
    fs.mkdirs(new Path(dir + "/bulk/fail"));
    client.importDirectory(creds, "bar", dir + "/bulk/import", dir + "/bulk/fail", true);
    String scanner = client.createScanner(creds, "bar", null);
    ScanResult more = client.nextK(scanner, 100);
    client.closeScanner(scanner);
    assertEquals(1, more.results.size());
    ByteBuffer maxRow = client.getMaxRow(creds, "bar", null, null, false, null, false);
    assertEquals(s2bb("a"), maxRow);

    assertFalse(client.testTableClassLoad(creds, "bar", "abc123", SortedKeyValueIterator.class.getName()));
    assertTrue(client.testTableClassLoad(creds, "bar", VersioningIterator.class.getName(),
            SortedKeyValueIterator.class.getName()));
}

From source file:com.wanikani.androidnotifier.StatsFragment.java

/**
 * Constructor/*from www.j a  va 2  s.c  o  m*/
 */
public StatsFragment() {
    charts = new Vector<TYChart>();
    gcharts = new Vector<GenericChart>();
    fcharts = new Vector<IconizableChart>();
    hdbc = new HistoryDatabaseCache();

    semiPreservedState = new Hashtable<Integer, Boolean>();
    netwe = new NetworkEngine();

    timeline = new ReviewsTimelineChart(netwe, R.id.os_review_timeline_item, R.id.os_review_timeline_srs,
            MeterSpec.T.OTHER_STATS);
    netwe.add(timeline);

    netwe.add(new ItemDistributionChart(netwe, R.id.os_kanji_levels, MeterSpec.T.OTHER_STATS,
            EnumSet.of(Item.Type.KANJI)));
    netwe.add(new ItemDistributionChart(netwe, R.id.os_levels, MeterSpec.T.MORE_STATS,
            EnumSet.of(Item.Type.VOCABULARY)));

    netwe.add(new ItemAgeChart(netwe, R.id.ct_age_distribution, MeterSpec.T.OTHER_STATS,
            EnumSet.allOf(Item.Type.class)));

    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_jlpt, MeterSpec.T.OTHER_STATS, R.string.jlpt5, KLIB_JLPT_5));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_jlpt, MeterSpec.T.OTHER_STATS, R.string.jlpt4, KLIB_JLPT_4));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_jlpt, MeterSpec.T.OTHER_STATS, R.string.jlpt3, KLIB_JLPT_3));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_jlpt, MeterSpec.T.OTHER_STATS, R.string.jlpt2, KLIB_JLPT_2));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_jlpt, MeterSpec.T.OTHER_STATS, R.string.jlpt1, KLIB_JLPT_1));

    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyo1, KLIB_JOYO_1));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyo2, KLIB_JOYO_2));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyo3, KLIB_JOYO_3));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyo4, KLIB_JOYO_4));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyo5, KLIB_JOYO_5));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyo6, KLIB_JOYO_6));
    netwe.add(
            new KanjiProgressChart(netwe, R.id.os_joyo, MeterSpec.T.OTHER_STATS, R.string.joyoS, KLIB_JOYO_S));
}