Example usage for java.util HashSet remove

List of usage examples for java.util HashSet remove

Introduction

In this page you can find the example usage for java.util HashSet remove.

Prototype

public boolean remove(Object o) 

Source Link

Document

Removes the specified element from this set if it is present.

Usage

From source file:net.toxbank.client.resource.InvestigationClient.java

public void addPolicyRules(MultipartEntity entity, List<PolicyRule> accessRights) throws Exception {
    HashSet<String> emptyFields = new HashSet<String>();

    for (Method method : Method.values()) {
        for (Boolean allows : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) {
            String userField = AbstractClient.getPolicyRuleWebField(new User(), method, allows);
            String groupField = AbstractClient.getPolicyRuleWebField(new Group(), method, allows);
            emptyFields.add(userField);//from   w ww  .  ja v a2  s.c  o m
            emptyFields.add(groupField);
        }
    }

    if (accessRights != null) {
        for (PolicyRule rule : accessRights) {
            for (Method method : Method.values()) {
                Boolean allows = rule.allows(method.name());
                if (allows == null)
                    continue;
                String field = null;
                if (rule instanceof UserPolicyRule)
                    field = AbstractClient.getPolicyRuleWebField((User) rule.getSubject(), method, allows);
                else if (rule instanceof GroupPolicyRule)
                    field = AbstractClient.getPolicyRuleWebField((Group) rule.getSubject(), method, allows);
                if (field == null)
                    continue;
                entity.addPart(field,
                        new StringBody(rule.getSubject().getResourceURL().toExternalForm(), utf8));
                emptyFields.remove(field);
            }
        }
    }

    // add placeholders for fields that were not defined to ensure the investigation service
    // deletes any old ones
    for (String field : emptyFields) {
        entity.addPart(field, new StringBody("", utf8));
    }
}

From source file:com.microsoft.azure.keyvault.test.CertificateOperationsTest.java

/**
 * List versions of a certificate in a vault.
 *//*from   w ww  .java 2  s. c  o m*/
@Test
public void listCertificateVersions() throws Exception {
    String certificateName = "listCertificateVersions";
    String certificateContent = "MIIJOwIBAzCCCPcGCSqGSIb3DQEHAaCCCOgEggjkMIII4DCCBgkGCSqGSIb3DQEHAaCCBfoEggX2MIIF8jCCBe4GCyqGSIb3DQEMCgECoIIE/jCCBPowHAYKKoZIhvcNAQwBAzAOBAj15YH9pOE58AICB9AEggTYLrI+SAru2dBZRQRlJY7XQ3LeLkah2FcRR3dATDshZ2h0IA2oBrkQIdsLyAAWZ32qYR1qkWxLHn9AqXgu27AEbOk35+pITZaiy63YYBkkpR+pDdngZt19Z0PWrGwHEq5z6BHS2GLyyN8SSOCbdzCz7blj3+7IZYoMj4WOPgOm/tQ6U44SFWek46QwN2zeA4i97v7ftNNns27ms52jqfhOvTA9c/wyfZKAY4aKJfYYUmycKjnnRl012ldS2lOkASFt+lu4QCa72IY6ePtRudPCvmzRv2pkLYS6z3cI7omT8nHP3DymNOqLbFqr5O2M1ZYaLC63Q3xt3eVvbcPh3N08D1hHkhz/KDTvkRAQpvrW8ISKmgDdmzN55Pe55xHfSWGB7gPw8sZea57IxFzWHTK2yvTslooWoosmGxanYY2IG/no3EbPOWDKjPZ4ilYJe5JJ2immlxPz+2e2EOCKpDI+7fzQcRz3PTd3BK+budZ8aXX8aW/lOgKS8WmxZoKnOJBNWeTNWQFugmktXfdPHAdxMhjUXqeGQd8wTvZ4EzQNNafovwkI7IV/ZYoa++RGofVR3ZbRSiBNF6TDj/qXFt0wN/CQnsGAmQAGNiN+D4mY7i25dtTu/Jc7OxLdhAUFpHyJpyrYWLfvOiS5WYBeEDHkiPUa/8eZSPA3MXWZR1RiuDvuNqMjct1SSwdXADTtF68l/US1ksU657+XSC+6ly1A/upz+X71+C4Ho6W0751j5ZMT6xKjGh5pee7MVuduxIzXjWIy3YSd0fIT3U0A5NLEvJ9rfkx6JiHjRLx6V1tqsrtT6BsGtmCQR1UCJPLqsKVDvAINx3cPA/CGqr5OX2BGZlAihGmN6n7gv8w4O0k0LPTAe5YefgXN3m9pE867N31GtHVZaJ/UVgDNYS2jused4rw76ZWN41akx2QN0JSeMJqHXqVz6AKfz8ICS/dFnEGyBNpXiMRxrY/QPKi/wONwqsbDxRW7vZRVKs78pBkE0ksaShlZk5GkeayDWC/7Hi/NqUFtIloK9XB3paLxo1DGu5qqaF34jZdktzkXp0uZqpp+FfKZaiovMjt8F7yHCPk+LYpRsU2Cyc9DVoDA6rIgf+uEP4jppgehsxyT0lJHax2t869R2jYdsXwYUXjgwHIV0voj7bJYPGFlFjXOp6ZW86scsHM5xfsGQoK2Fp838VT34SHE1ZXU/puM7rviREHYW72pfpgGZUILQMohuTPnd8tFtAkbrmjLDo+k9xx7HUvgoFTiNNWuq/cRjr70FKNguMMTIrid+HwfmbRoaxENWdLcOTNeascER2a+37UQolKD5ksrPJG6RdNA7O2pzp3micDYRs/+s28cCIxO//J/d4nsgHp6RTuCu4+Jm9k0YTw2Xg75b2cWKrxGnDUgyIlvNPaZTB5QbMid4x44/lE0LLi9kcPQhRgrK07OnnrMgZvVGjt1CLGhKUv7KFc3xV1r1rwKkosxnoG99oCoTQtregcX5rIMjHgkc1IdflGJkZzaWMkYVFOJ4Weynz008i4ddkske5vabZs37Lb8iggUYNBYZyGzalruBgnQyK4fz38Fae4nWYjyildVfgyo/fCePR2ovOfphx9OQJi+M9BoFmPrAg+8ARDZ+R+5yzYuEc9ZoVX7nkp7LTGB3DANBgkrBgEEAYI3EQIxADATBgkqhkiG9w0BCRUxBgQEAQAAADBXBgkqhkiG9w0BCRQxSh5IAGEAOAAwAGQAZgBmADgANgAtAGUAOQA2AGUALQA0ADIAMgA0AC0AYQBhADEAMQAtAGIAZAAxADkANABkADUAYQA2AGIANwA3MF0GCSsGAQQBgjcRATFQHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABvAGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIwggLPBgkqhkiG9w0BBwagggLAMIICvAIBADCCArUGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEGMA4ECNX+VL2MxzzWAgIH0ICCAojmRBO+CPfVNUO0s+BVuwhOzikAGNBmQHNChmJ/pyzPbMUbx7tO63eIVSc67iERda2WCEmVwPigaVQkPaumsfp8+L6iV/BMf5RKlyRXcwh0vUdu2Qa7qadD+gFQ2kngf4Dk6vYo2/2HxayuIf6jpwe8vql4ca3ZtWXfuRix2fwgltM0bMz1g59d7x/glTfNqxNlsty0A/rWrPJjNbOPRU2XykLuc3AtlTtYsQ32Zsmu67A7UNBw6tVtkEXlFDqhavEhUEO3dvYqMY+QLxzpZhA0q44ZZ9/ex0X6QAFNK5wuWxCbupHWsgxRwKftrxyszMHsAvNoNcTlqcctee+ecNwTJQa1/MDbnhO6/qHA7cfG1qYDq8Th635vGNMW1w3sVS7l0uEvdayAsBHWTcOC2tlMa5bfHrhY8OEIqj5bN5H9RdFy8G/W239tjDu1OYjBDydiBqzBn8HG1DSj1Pjc0kd/82d4ZU0308KFTC3yGcRad0GnEH0Oi3iEJ9HbriUbfVMbXNHOF+MktWiDVqzndGMKmuJSdfTBKvGFvejAWVO5E4mgLvoaMmbchc3BO7sLeraHnJN5hvMBaLcQI38N86mUfTR8AP6AJ9c2k514KaDLclm4z6J8dMz60nUeo5D3YD09G6BavFHxSvJ8MF0Lu5zOFzEePDRFm9mH8W0N/sFlIaYfD/GWU/w44mQucjaBk95YtqOGRIj58tGDWr8iUdHwaYKGqU24zGeRae9DhFXPzZshV1ZGsBQFRaoYkyLAwdJWIXTi+c37YaC8FRSEnnNmS79Dou1Kc3BvK4EYKAD2KxjtUebrV174gD0Q+9YuJ0GXOTspBvCFd5VT2Rw5zDNrA/J3F5fMCk4wOzAfMAcGBSsOAwIaBBSxgh2xyF+88V4vAffBmZXv8Txt4AQU4O/NX4MjxSodbE7ApNAMIvrtREwCAgfQ";
    String certificatePassword = "123";

    // Set content type to indicate the certificate is PKCS12 format.
    SecretProperties secretProperties = new SecretProperties();
    secretProperties.withContentType(MIME_PKCS12);
    CertificatePolicy certificatePolicy = new CertificatePolicy();
    certificatePolicy.withSecretProperties(secretProperties);

    HashSet<String> certificates = new HashSet<String>();
    for (int i = 0; i < MAX_CERTS; ++i) {
        int failureCount = 0;
        for (;;) {
            try {
                CertificateBundle certificateBundle = keyVaultClient.importCertificate(
                        new ImportCertificateRequest.Builder(getVaultUri(), certificateName, certificateContent)
                                .withPassword(certificatePassword).withPolicy(certificatePolicy).build());
                CertificateIdentifier id = certificateBundle.certificateIdentifier();
                certificates.add(id.identifier());
                break;
            } catch (KeyVaultErrorException e) {
                ++failureCount;
                if (e.body().error().code().equals("Throttled")) {
                    System.out.println("Waiting to avoid throttling");
                    Thread.sleep(failureCount * 1500);
                    continue;
                }
                throw e;
            }
        }
    }

    PagedList<CertificateItem> listResult = keyVaultClient.listCertificateVersions(getVaultUri(),
            certificateName, PAGELIST_MAX_CERTS);
    Assert.assertTrue(PAGELIST_MAX_CERTS >= listResult.currentPage().items().size());

    listResult = keyVaultClient.listCertificateVersions(getVaultUri(), certificateName);

    for (CertificateItem item : listResult) {
        if (item != null) {
            certificates.remove(item.id());
        }
    }

    Assert.assertEquals(0, certificates.size());

    keyVaultClient.deleteCertificate(getVaultUri(), certificateName);
}

From source file:xc.mst.services.marcaggregation.MarcAggregationService.java

private List<OutputRecord> processBibDelete(InputRecord r) {
    LOG.info("MAS:  processBibDelete: " + r.getId());
    List<OutputRecord> results = new ArrayList<OutputRecord>();

    if (r.getSuccessors().size() == 0) {
        // NEW-DELETED
        ///*w  w w .j  av  a 2  s  .  c o m*/
        // nothing to do?  should we still double-check datastructures and db?
    } else {
        TimingLogger.start("processRecord.updateDeleted");
        // UPDATE-DELETED
        //
        // ( mostly ) directly lifted from norm...
        //
        boolean isAbibWithSuccessors = false;
        results = new ArrayList<OutputRecord>();
        List<OutputRecord> successors = r.getSuccessors();

        // If there are successors then the record exist and needs to be deleted. Since we are
        // deleting the record, we need to decrement the count.
        if (successors != null && successors.size() > 0) {
            inputRecordCount--;

            // and if the record exists, check if it is a bib
            // if it is in mergedRecordsI2Omap, it is a bib, fastest way.  don't try to parse record, deleted could be incomplete
            // and unparseable,
            //
            if (allBibRecordsI2Omap.containsKey(r.getId())) {
                // is bib!  flag it for later...
                isAbibWithSuccessors = true;
            }

            // Handle reprocessing of successors
            for (OutputRecord successor : successors) {
                successor.setStatus(Record.DELETED);
                successor.setFormat(marc21);
                results.add(successor);
            }
        }
        if (isAbibWithSuccessors) {
            HashSet<Long> formerMatchSet = deleteAllMergeDetails(r);
            LOG.info("MAS:  processBibDelete formerMatchSet [" + formerMatchSet.size()
                    + "] = deleteAllMergeDetails: " + formerMatchSet);
            for (long formerId : formerMatchSet) {
                //                   currentMatchSets.remove(formerId);                   
                recordOfSourceMap.remove(formerId);
            }
            //                currentMatchSets.remove(r.getId());
            recordOfSourceMap.remove(r.getId());

            formerMatchSet.remove(r.getId());

            if (formerMatchSet.size() > 0) {
                List<HashSet<Long>> listOfMatchSets = findMatchSets(formerMatchSet);
                for (HashSet<Long> matchset : listOfMatchSets) {
                    LOG.info("MAS:  processBibDelete listOfMatchSets [" + matchset.size()
                            + "]  = findMatchSets: " + matchset);
                    results = remerge(results, matchset);
                }
            }

        }
        TimingLogger.stop("processRecord.updateDeleted");
    }
    return results;
}

From source file:ddf.catalog.impl.CatalogFrameworkImplTest.java

@Test
public void testUpdateWithStores() throws Exception {
    MockEventProcessor eventAdmin = new MockEventProcessor();
    MockMemoryProvider provider = new MockMemoryProvider("Provider", "Provider", "v1.0", "DDF", new HashSet<>(),
            true, new Date());

    Map<String, CatalogStore> storeMap = new HashMap<>();
    Map<String, FederatedSource> sourceMap = new HashMap<>();
    MockCatalogStore store = new MockCatalogStore("catalogStoreId-1", true);
    storeMap.put(store.getId(), store);// ww  w .jav  a 2s  . com
    sourceMap.put(store.getId(), store);

    CatalogFramework framework = createDummyCatalogFramework(provider, storeMap, sourceMap, eventAdmin);
    FilterFactory filterFactory = new FilterFactoryImpl();

    Filter filter = filterFactory.like(filterFactory.property(Metacard.METADATA), "*", "*", "?", "/", false);

    List<Metacard> metacards = new ArrayList<>();
    String id = UUID.randomUUID().toString();
    MetacardImpl newCard = new MetacardImpl();
    newCard.setId(id);
    newCard.setAttribute("myKey", "myValue1");
    metacards.add(newCard);
    Map<String, Serializable> reqProps = new HashMap<>();
    HashSet<String> destinations = new HashSet<>();
    destinations.add("mockMemoryProvider");
    destinations.add("catalogStoreId-1");
    framework.create(new CreateRequestImpl(metacards, reqProps, destinations));

    MetacardImpl updateCard = new MetacardImpl();
    updateCard.setId(id);
    updateCard.setAttribute("myKey", "myValue2");
    List<Entry<Serializable, Metacard>> updates = new ArrayList<>();
    updates.add(new SimpleEntry<>(id, updateCard));
    destinations.remove("mockMemoryProvider");
    framework.update(new UpdateRequestImpl(updates, Metacard.ID, new HashMap<>(), destinations));
    assertThat(provider.hasReceivedUpdateByIdentifier(), is(false));
    assertThat(store.hasReceivedUpdateByIdentifier(), is(true));
    QueryResponse storeResponse = framework.query(new QueryRequestImpl(new QueryImpl(filter), destinations));
    assertThat(storeResponse.getResults().size(), is(1));
    assertThat(storeResponse.getResults().get(0).getMetacard().getAttribute("myKey").getValue(),
            equalTo("myValue2"));
    destinations.clear();
    QueryResponse providerResponse = framework.query(new QueryRequestImpl(new QueryImpl(filter), destinations));
    assertThat(providerResponse.getResults().size(), is(1));
    assertThat(providerResponse.getResults().get(0).getMetacard().getAttribute("myKey").getValue(),
            equalTo("myValue1"));

}

From source file:com.microsoft.azure.keyvault.test.CertificateOperationsTest.java

/**
 * List certificates in a vault.//from   w w  w  .j a  v a  2s.com
 */
@Test
public void listCertificates() throws Exception {
    String certificateName = "listCertificate";
    String certificateContent = "MIIJOwIBAzCCCPcGCSqGSIb3DQEHAaCCCOgEggjkMIII4DCCBgkGCSqGSIb3DQEHAaCCBfoEggX2MIIF8jCCBe4GCyqGSIb3DQEMCgECoIIE/jCCBPowHAYKKoZIhvcNAQwBAzAOBAj15YH9pOE58AICB9AEggTYLrI+SAru2dBZRQRlJY7XQ3LeLkah2FcRR3dATDshZ2h0IA2oBrkQIdsLyAAWZ32qYR1qkWxLHn9AqXgu27AEbOk35+pITZaiy63YYBkkpR+pDdngZt19Z0PWrGwHEq5z6BHS2GLyyN8SSOCbdzCz7blj3+7IZYoMj4WOPgOm/tQ6U44SFWek46QwN2zeA4i97v7ftNNns27ms52jqfhOvTA9c/wyfZKAY4aKJfYYUmycKjnnRl012ldS2lOkASFt+lu4QCa72IY6ePtRudPCvmzRv2pkLYS6z3cI7omT8nHP3DymNOqLbFqr5O2M1ZYaLC63Q3xt3eVvbcPh3N08D1hHkhz/KDTvkRAQpvrW8ISKmgDdmzN55Pe55xHfSWGB7gPw8sZea57IxFzWHTK2yvTslooWoosmGxanYY2IG/no3EbPOWDKjPZ4ilYJe5JJ2immlxPz+2e2EOCKpDI+7fzQcRz3PTd3BK+budZ8aXX8aW/lOgKS8WmxZoKnOJBNWeTNWQFugmktXfdPHAdxMhjUXqeGQd8wTvZ4EzQNNafovwkI7IV/ZYoa++RGofVR3ZbRSiBNF6TDj/qXFt0wN/CQnsGAmQAGNiN+D4mY7i25dtTu/Jc7OxLdhAUFpHyJpyrYWLfvOiS5WYBeEDHkiPUa/8eZSPA3MXWZR1RiuDvuNqMjct1SSwdXADTtF68l/US1ksU657+XSC+6ly1A/upz+X71+C4Ho6W0751j5ZMT6xKjGh5pee7MVuduxIzXjWIy3YSd0fIT3U0A5NLEvJ9rfkx6JiHjRLx6V1tqsrtT6BsGtmCQR1UCJPLqsKVDvAINx3cPA/CGqr5OX2BGZlAihGmN6n7gv8w4O0k0LPTAe5YefgXN3m9pE867N31GtHVZaJ/UVgDNYS2jused4rw76ZWN41akx2QN0JSeMJqHXqVz6AKfz8ICS/dFnEGyBNpXiMRxrY/QPKi/wONwqsbDxRW7vZRVKs78pBkE0ksaShlZk5GkeayDWC/7Hi/NqUFtIloK9XB3paLxo1DGu5qqaF34jZdktzkXp0uZqpp+FfKZaiovMjt8F7yHCPk+LYpRsU2Cyc9DVoDA6rIgf+uEP4jppgehsxyT0lJHax2t869R2jYdsXwYUXjgwHIV0voj7bJYPGFlFjXOp6ZW86scsHM5xfsGQoK2Fp838VT34SHE1ZXU/puM7rviREHYW72pfpgGZUILQMohuTPnd8tFtAkbrmjLDo+k9xx7HUvgoFTiNNWuq/cRjr70FKNguMMTIrid+HwfmbRoaxENWdLcOTNeascER2a+37UQolKD5ksrPJG6RdNA7O2pzp3micDYRs/+s28cCIxO//J/d4nsgHp6RTuCu4+Jm9k0YTw2Xg75b2cWKrxGnDUgyIlvNPaZTB5QbMid4x44/lE0LLi9kcPQhRgrK07OnnrMgZvVGjt1CLGhKUv7KFc3xV1r1rwKkosxnoG99oCoTQtregcX5rIMjHgkc1IdflGJkZzaWMkYVFOJ4Weynz008i4ddkske5vabZs37Lb8iggUYNBYZyGzalruBgnQyK4fz38Fae4nWYjyildVfgyo/fCePR2ovOfphx9OQJi+M9BoFmPrAg+8ARDZ+R+5yzYuEc9ZoVX7nkp7LTGB3DANBgkrBgEEAYI3EQIxADATBgkqhkiG9w0BCRUxBgQEAQAAADBXBgkqhkiG9w0BCRQxSh5IAGEAOAAwAGQAZgBmADgANgAtAGUAOQA2AGUALQA0ADIAMgA0AC0AYQBhADEAMQAtAGIAZAAxADkANABkADUAYQA2AGIANwA3MF0GCSsGAQQBgjcRATFQHk4ATQBpAGMAcgBvAHMAbwBmAHQAIABTAHQAcgBvAG4AZwAgAEMAcgB5AHAAdABvAGcAcgBhAHAAaABpAGMAIABQAHIAbwB2AGkAZABlAHIwggLPBgkqhkiG9w0BBwagggLAMIICvAIBADCCArUGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEGMA4ECNX+VL2MxzzWAgIH0ICCAojmRBO+CPfVNUO0s+BVuwhOzikAGNBmQHNChmJ/pyzPbMUbx7tO63eIVSc67iERda2WCEmVwPigaVQkPaumsfp8+L6iV/BMf5RKlyRXcwh0vUdu2Qa7qadD+gFQ2kngf4Dk6vYo2/2HxayuIf6jpwe8vql4ca3ZtWXfuRix2fwgltM0bMz1g59d7x/glTfNqxNlsty0A/rWrPJjNbOPRU2XykLuc3AtlTtYsQ32Zsmu67A7UNBw6tVtkEXlFDqhavEhUEO3dvYqMY+QLxzpZhA0q44ZZ9/ex0X6QAFNK5wuWxCbupHWsgxRwKftrxyszMHsAvNoNcTlqcctee+ecNwTJQa1/MDbnhO6/qHA7cfG1qYDq8Th635vGNMW1w3sVS7l0uEvdayAsBHWTcOC2tlMa5bfHrhY8OEIqj5bN5H9RdFy8G/W239tjDu1OYjBDydiBqzBn8HG1DSj1Pjc0kd/82d4ZU0308KFTC3yGcRad0GnEH0Oi3iEJ9HbriUbfVMbXNHOF+MktWiDVqzndGMKmuJSdfTBKvGFvejAWVO5E4mgLvoaMmbchc3BO7sLeraHnJN5hvMBaLcQI38N86mUfTR8AP6AJ9c2k514KaDLclm4z6J8dMz60nUeo5D3YD09G6BavFHxSvJ8MF0Lu5zOFzEePDRFm9mH8W0N/sFlIaYfD/GWU/w44mQucjaBk95YtqOGRIj58tGDWr8iUdHwaYKGqU24zGeRae9DhFXPzZshV1ZGsBQFRaoYkyLAwdJWIXTi+c37YaC8FRSEnnNmS79Dou1Kc3BvK4EYKAD2KxjtUebrV174gD0Q+9YuJ0GXOTspBvCFd5VT2Rw5zDNrA/J3F5fMCk4wOzAfMAcGBSsOAwIaBBSxgh2xyF+88V4vAffBmZXv8Txt4AQU4O/NX4MjxSodbE7ApNAMIvrtREwCAgfQ";
    String certificatePassword = "123";

    // Set content type to indicate the certificate is PKCS12 format.
    SecretProperties secretProperties = new SecretProperties();
    secretProperties.withContentType(MIME_PKCS12);
    CertificatePolicy certificatePolicy = new CertificatePolicy();
    certificatePolicy.withSecretProperties(secretProperties);

    HashSet<String> certificates = new HashSet<String>();
    for (int i = 0; i < MAX_CERTS; ++i) {
        int failureCount = 0;
        for (;;) {
            try {
                CertificateBundle certificateBundle = keyVaultClient
                        .importCertificate(new ImportCertificateRequest.Builder(getVaultUri(),
                                certificateName + i, certificateContent).withPassword(certificatePassword)
                                        .withPolicy(certificatePolicy).build());
                CertificateIdentifier id = certificateBundle.certificateIdentifier();
                certificates.add(id.baseIdentifier());
                break;
            } catch (KeyVaultErrorException e) {
                ++failureCount;
                if (e.body().error().code().equals("Throttled")) {
                    System.out.println("Waiting to avoid throttling");
                    Thread.sleep(failureCount * 1500);
                    continue;
                }
                throw e;
            }
        }
    }

    PagedList<CertificateItem> listResult = keyVaultClient.listCertificates(getVaultUri(), PAGELIST_MAX_CERTS);
    Assert.assertTrue(PAGELIST_MAX_CERTS >= listResult.currentPage().items().size());

    HashSet<String> toDelete = new HashSet<String>();

    for (CertificateItem item : listResult) {
        if (item != null) {
            CertificateIdentifier id = new CertificateIdentifier(item.id());
            toDelete.add(id.name());
            certificates.remove(item.id());
        }
    }

    Assert.assertEquals(0, certificates.size());

    for (String toDeleteCertificateName : toDelete) {
        keyVaultClient.deleteCertificate(getVaultUri(), toDeleteCertificateName);
    }
}

From source file:org.codehaus.enunciate.modules.docs.DocumentationDeploymentModule.java

/**
 * Builds the base output directory.// w  w  w.j a  v a 2 s.  c  o  m
 */
protected void buildBase() throws IOException {
    Enunciate enunciate = getEnunciate();
    File buildDir = getDocsBuildDir();
    buildDir.mkdirs();
    if (this.base == null) {
        InputStream discoveredBase = DocumentationDeploymentModule.class
                .getResourceAsStream("/META-INF/enunciate/docs-base.zip");
        if (discoveredBase == null) {
            debug("Default base to be used for documentation base.");
            enunciate.extractBase(loadDefaultBase(), buildDir);

            URL discoveredCss = DocumentationDeploymentModule.class
                    .getResource("/META-INF/enunciate/css/style.css");
            if (discoveredCss != null) {
                enunciate.copyResource(discoveredCss, new File(new File(buildDir, "css"), "style.css"));
            } else if (this.css != null) {
                enunciate.copyFile(enunciate.resolvePath(this.css),
                        new File(new File(buildDir, "css"), "style.css"));
            }
        } else {
            debug("Discovered documentation base at /META-INF/enunciate/docs-base.zip");
            enunciate.extractBase(discoveredBase, buildDir);
        }
    } else {
        File baseFile = enunciate.resolvePath(this.base);
        if (baseFile.isDirectory()) {
            debug("Directory %s to be used as the documentation base.", baseFile);
            enunciate.copyDir(baseFile, buildDir);
        } else {
            debug("Zip file %s to be extracted as the documentation base.", baseFile);
            enunciate.extractBase(new FileInputStream(baseFile), buildDir);
        }
    }

    for (SchemaInfo schemaInfo : getModel().getNamespacesToSchemas().values()) {
        if (schemaInfo.getProperty("file") != null) {
            File from = (File) schemaInfo.getProperty("file");
            String filename = schemaInfo.getProperty("filename") != null
                    ? (String) schemaInfo.getProperty("filename")
                    : from.getName();
            File to = new File(getDocsBuildDir(), filename);
            enunciate.copyFile(from, to);
        }
    }

    for (WsdlInfo wsdlInfo : getModel().getNamespacesToWSDLs().values()) {
        if (wsdlInfo.getProperty("file") != null) {
            File from = (File) wsdlInfo.getProperty("file");
            String filename = wsdlInfo.getProperty("filename") != null
                    ? (String) wsdlInfo.getProperty("filename")
                    : from.getName();
            File to = new File(getDocsBuildDir(), filename);
            enunciate.copyFile(from, to);
        }
    }

    File wadlFile = getModelInternal().getWadlFile();
    if (wadlFile != null) {
        enunciate.copyFile(wadlFile, new File(getDocsBuildDir(), wadlFile.getName()));
    }

    HashSet<String> explicitArtifacts = new HashSet<String>();
    TreeSet<Artifact> downloads = new TreeSet<Artifact>();
    for (DownloadConfig download : this.downloads) {
        if (download.getArtifact() != null) {
            explicitArtifacts.add(download.getArtifact());
        } else if (download.getFile() != null) {
            File downloadFile = enunciate.resolvePath(download.getFile());

            debug("File %s to be added as an extra download.", downloadFile.getAbsolutePath());
            DownloadBundle downloadArtifact = new DownloadBundle(getName(), downloadFile.getName(),
                    downloadFile);

            if (download.getName() != null) {
                downloadArtifact.setName(download.getName());
            }

            if (download.getDescription() != null) {
                downloadArtifact.setDescription(download.getDescription());
            }

            downloads.add(downloadArtifact);
        }
    }

    for (Artifact artifact : enunciate.getArtifacts()) {
        if (((artifact instanceof NamedArtifact) && (includeDefaultDownloads))
                || (explicitArtifacts.contains(artifact.getId()))) {
            if (artifact.isPublic()) {
                downloads.add(artifact);
            }

            debug("Artifact %s to be added as an extra download.", artifact.getId());
            explicitArtifacts.remove(artifact.getId());
        }
    }

    if (explicitArtifacts.size() > 0) {
        for (String artifactId : explicitArtifacts) {
            warn("WARNING: Unknown artifact '%s'.  Will not be available for download.", artifactId);
        }
    }

    for (Artifact download : downloads) {
        debug("Exporting %s to directory %s.", download.getId(), buildDir);
        download.exportTo(buildDir, enunciate);
    }

    Set<String> additionalCssFiles = new HashSet<String>();
    for (String additionalCss : getAdditionalCss()) {
        File additionalCssFile = enunciate.resolvePath(additionalCss);
        debug("File %s to be added as an additional css file.", additionalCss);
        enunciate.copyFile(additionalCssFile, new File(buildDir, additionalCssFile.getName()));
        additionalCssFiles.add(additionalCssFile.getName());
    }

    EnunciateFreemarkerModel model = getModel();
    model.put("downloads", downloads);
    model.put("additionalCssFiles", additionalCssFiles);
}

From source file:cn.iterlog.myapplication.widget.overscroll.StaggeredGridView.java

/**
 * Should be called with mPopulating set to true
 *
 * @param fromPosition Position to start filling from
 * @param overhang the number of extra pixels to fill beyond the current bottom edge
 * @return the max overhang beyond the end of the view of any added items at the bottom
 *///from w w w. j  av  a2s .  c  o m
final int fillDown(int fromPosition, int overhang) {

    final int paddingLeft = getPaddingLeft();
    final int paddingRight = getPaddingRight();
    final int itemMargin = mItemMargin;
    final int colWidth = (getWidth() - paddingLeft - paddingRight - itemMargin * (mColCount - 1)) / mColCount;
    final int gridBottom = getHeight() - getPaddingBottom();
    final int fillTo = gridBottom + overhang;
    int nextCol = getNextColumnDown(fromPosition);
    int position = fromPosition;

    while (nextCol >= 0 && mItemBottoms[nextCol] < fillTo && position < mItemCount) {

        final View child = obtainView(position, null);

        if (child == null)
            continue;

        LayoutParams lp = (LayoutParams) child.getLayoutParams();
        if (lp == null) {
            lp = this.generateDefaultLayoutParams();
            child.setLayoutParams(lp);
        }
        if (child.getParent() != this) {
            if (mInLayout) {
                addViewInLayout(child, -1, lp);
            } else {
                addView(child);
            }
        }

        final int span = Math.min(mColCount, lp.span);
        final int widthSize = colWidth * span + itemMargin * (span - 1);
        final int widthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY);

        LayoutRecord rec;
        if (span > 1) {
            rec = getNextRecordDown(position, span);
            //                nextCol = rec.column;
        } else {
            rec = mLayoutRecords.get(position);
        }

        boolean invalidateAfter = false;
        if (rec == null) {
            rec = new LayoutRecord();
            mLayoutRecords.put(position, rec);
            rec.column = nextCol;
            rec.span = span;
        } else if (span != rec.span) {
            rec.span = span;
            rec.column = nextCol;
            invalidateAfter = true;
        } else {
            //                nextCol = rec.column;
        }

        if (mHasStableIds) {
            final long id = mAdapter.getItemId(position);
            rec.id = id;
            lp.id = id;
        }

        lp.column = nextCol;

        final int heightSpec;
        if (lp.height == LayoutParams.WRAP_CONTENT) {
            heightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
        } else {
            heightSpec = MeasureSpec.makeMeasureSpec(lp.height, MeasureSpec.EXACTLY);
        }
        child.measure(widthSpec, heightSpec);

        final int childHeight = child.getMeasuredHeight();
        if (invalidateAfter || (childHeight != rec.height && rec.height > 0)) {
            invalidateLayoutRecordsAfterPosition(position);
        }
        rec.height = childHeight;

        final int startFrom;
        if (span > 1) {
            int lowest = mItemBottoms[nextCol];
            for (int i = nextCol + 1; i < nextCol + span; i++) {
                final int bottom = mItemBottoms[i];
                if (bottom > lowest) {
                    lowest = bottom;
                }
            }
            startFrom = lowest;
        } else {
            startFrom = mItemBottoms[nextCol];
        }

        final int childTop = startFrom + itemMargin;
        final int childBottom = childTop + childHeight;
        final int childLeft = paddingLeft + nextCol * (colWidth + itemMargin);
        final int childRight = childLeft + child.getMeasuredWidth();
        child.layout(childLeft, childTop, childRight, childBottom);

        // add the position to the mapping
        Integer positionInt = Integer.valueOf(position);
        if (!mColMappings.get(nextCol).contains(positionInt)) {

            // check to see if the mapping exists in other columns
            // this would happen if list has been updated
            for (HashSet<Integer> cols : mColMappings) {
                cols.remove(positionInt);
            }

            mColMappings.get(nextCol).add(positionInt);
        }

        for (int i = nextCol; i < nextCol + span; i++) {
            mItemBottoms[i] = childBottom + rec.getMarginBelow(i - nextCol);
        }

        position++;
        nextCol = getNextColumnDown(position);
    }

    int lowestView = 0;
    for (int i = 0; i < mColCount; i++) {
        if (mItemBottoms[i] > lowestView) {
            lowestView = mItemBottoms[i];
        }
    }
    return lowestView - gridBottom;
}

From source file:com.ad.view.staggeredgridview.StaggeredGridView.java

/**
 * Should be called with mPopulating set to true
 * /* ww w.ja v a  2 s .  co  m*/
 * @param fromPosition
 *            Position to start filling from
 * @param overhang
 *            the number of extra pixels to fill beyond the current bottom
 *            edge
 * @return the max overhang beyond the end of the view of any added items at
 *         the bottom
 */
final int fillDown(int fromPosition, int overhang) {

    final int paddingLeft = getPaddingLeft();
    final int paddingRight = getPaddingRight();
    final int itemMargin = mItemMargin;
    final int colWidth = (getWidth() - paddingLeft - paddingRight - itemMargin * (mColCount - 1)) / mColCount;
    final int gridBottom = getHeight() - getPaddingBottom();
    final int fillTo = gridBottom + overhang;
    int nextCol = getNextColumnDown(fromPosition);
    int position = fromPosition;

    while (nextCol >= 0 && mItemBottoms[nextCol] < fillTo && position < mItemCount) {

        final View child = obtainView(position, null);

        if (child == null)
            continue;

        LayoutParams lp = (LayoutParams) child.getLayoutParams();
        if (lp == null) {
            lp = this.generateDefaultLayoutParams();
            child.setLayoutParams(lp);
        }
        if (child.getParent() != this) {
            if (mInLayout) {
                addViewInLayout(child, -1, lp);
            } else {
                addView(child);
            }
        }

        final int span = Math.min(mColCount, lp.span);
        final int widthSize = colWidth * span + itemMargin * (span - 1);
        final int widthSpec = MeasureSpec.makeMeasureSpec(widthSize, MeasureSpec.EXACTLY);

        LayoutRecord rec;
        if (span > 1) {
            rec = getNextRecordDown(position, span);
            // nextCol = rec.column;
        } else {
            rec = mLayoutRecords.get(position);
        }

        boolean invalidateAfter = false;
        if (rec == null) {
            rec = new LayoutRecord();
            mLayoutRecords.put(position, rec);
            rec.column = nextCol;
            rec.span = span;
        } else if (span != rec.span) {
            rec.span = span;
            rec.column = nextCol;
            invalidateAfter = true;
        } else {
            // nextCol = rec.column;
        }

        if (mHasStableIds) {
            final long id = mAdapter.getItemId(position);
            rec.id = id;
            lp.id = id;
        }

        lp.column = nextCol;

        final int heightSpec;
        if (lp.height == LayoutParams.WRAP_CONTENT) {
            heightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
        } else {
            heightSpec = MeasureSpec.makeMeasureSpec(lp.height, MeasureSpec.EXACTLY);
        }
        child.measure(widthSpec, heightSpec);

        final int childHeight = child.getMeasuredHeight();
        if (invalidateAfter || (childHeight != rec.height && rec.height > 0)) {
            invalidateLayoutRecordsAfterPosition(position);
        }
        rec.height = childHeight;

        final int startFrom;
        if (span > 1) {
            int lowest = mItemBottoms[nextCol];
            for (int i = nextCol + 1; i < nextCol + span; i++) {
                final int bottom = mItemBottoms[i];
                if (bottom > lowest) {
                    lowest = bottom;
                }
            }
            startFrom = lowest;
        } else {
            startFrom = mItemBottoms[nextCol];
        }

        final int childTop = startFrom + itemMargin;
        final int childBottom = childTop + childHeight;
        final int childLeft = paddingLeft + nextCol * (colWidth + itemMargin);
        final int childRight = childLeft + child.getMeasuredWidth();
        child.layout(childLeft, childTop, childRight, childBottom);

        // add the position to the mapping
        Integer positionInt = Integer.valueOf(position);
        if (!mColMappings.get(nextCol).contains(positionInt)) {

            // check to see if the mapping exists in other columns
            // this would happen if list has been updated
            for (HashSet<Integer> cols : mColMappings) {
                cols.remove(positionInt);
            }

            mColMappings.get(nextCol).add(positionInt);
        }

        for (int i = nextCol; i < nextCol + span; i++) {
            mItemBottoms[i] = childBottom + rec.getMarginBelow(i - nextCol);
        }

        position++;
        nextCol = getNextColumnDown(position);
    }

    int lowestView = 0;
    for (int i = 0; i < mColCount; i++) {
        if (mItemBottoms[i] > lowestView) {
            lowestView = mItemBottoms[i];
        }
    }
    return lowestView - gridBottom;
}

From source file:org.apache.jackrabbit.core.query.lucene.SearchIndex.java

/**
 * This implementation forwards the call to
 * {@link MultiIndex#update(Collection, Collection)} and
 * transforms the two iterators to the required types.
 *
 * @param remove uuids of nodes to remove.
 * @param add    NodeStates to add. Calls to <code>next()</code> on this
 *               iterator may return <code>null</code>, to indicate that a
 *               node could not be indexed successfully.
 * @throws RepositoryException if an error occurs while indexing a node.
 * @throws IOException         if an error occurs while updating the index.
 *///from  w  w w .ja v  a  2 s.c o m
public void updateNodes(NodeIdIterator remove, NodeStateIterator add) throws RepositoryException, IOException {
    checkOpen();
    final Map aggregateRoots = new HashMap();
    final HashSet removedUUIDs = new HashSet();
    final Set addedUUIDs = new HashSet();

    index.update(IteratorUtils.toList(new TransformIterator(remove, new Transformer() {
        public Object transform(Object input) {
            UUID uuid = ((NodeId) input).getUUID();
            removedUUIDs.add(uuid);
            return uuid;
        }
    })), IteratorUtils.toList(new TransformIterator(add, new Transformer() {
        public Object transform(Object input) {
            NodeState state = (NodeState) input;
            if (state == null) {
                return null;
            }
            UUID uuid = state.getNodeId().getUUID();
            addedUUIDs.add(uuid);
            removedUUIDs.remove(uuid);
            Document doc = null;
            try {
                doc = createDocument(state, getNamespaceMappings(), index.getIndexFormatVersion());
                retrieveAggregateRoot(state, aggregateRoots);
            } catch (RepositoryException e) {
                log.warn("Exception while creating document for node: " + state.getNodeId() + ": "
                        + e.toString());
            }
            return doc;
        }
    })));

    // remove any aggregateRoot nodes that are new
    // and therefore already up-to-date
    aggregateRoots.keySet().removeAll(addedUUIDs);

    // based on removed UUIDs get affected aggregate root nodes
    retrieveAggregateRoot(removedUUIDs, aggregateRoots);

    // update aggregates if there are any affected
    if (aggregateRoots.size() > 0) {
        Collection modified = TransformedCollection.decorate(new ArrayList(), new Transformer() {
            public Object transform(Object input) {
                NodeState state = (NodeState) input;
                try {
                    return createDocument(state, getNamespaceMappings(), index.getIndexFormatVersion());
                } catch (RepositoryException e) {
                    log.warn("Exception while creating document for node: " + state.getNodeId() + ": "
                            + e.toString());
                }
                return null;
            }
        });
        modified.addAll(aggregateRoots.values());
        index.update(aggregateRoots.keySet(), modified);
    }
}

From source file:org.apache.hadoop.hbase.master.procedure.TestMasterProcedureQueue.java

/**
 * Verify that "write" operations for a single table are serialized,
 * but different tables can be executed in parallel.
 *//*ww w.j  a  v  a2s  .com*/
@Test(timeout = 90000)
public void testConcurrentWriteOps() throws Exception {
    final TestTableProcSet procSet = new TestTableProcSet(queue);

    final int NUM_ITEMS = 10;
    final int NUM_TABLES = 4;
    final AtomicInteger opsCount = new AtomicInteger(0);
    for (int i = 0; i < NUM_TABLES; ++i) {
        TableName tableName = TableName.valueOf(String.format("testtb-%04d", i));
        for (int j = 1; j < NUM_ITEMS; ++j) {
            procSet.addBack(new TestTableProcedure(i * 100 + j, tableName,
                    TableProcedureInterface.TableOperationType.EDIT));
            opsCount.incrementAndGet();
        }
    }
    assertEquals(opsCount.get(), queue.size());

    final Thread[] threads = new Thread[NUM_TABLES * 2];
    final HashSet<TableName> concurrentTables = new HashSet<TableName>();
    final ArrayList<String> failures = new ArrayList<String>();
    final AtomicInteger concurrentCount = new AtomicInteger(0);
    for (int i = 0; i < threads.length; ++i) {
        threads[i] = new Thread() {
            @Override
            public void run() {
                while (opsCount.get() > 0) {
                    try {
                        TableProcedureInterface proc = procSet.acquire();
                        if (proc == null) {
                            queue.signalAll();
                            if (opsCount.get() > 0) {
                                continue;
                            }
                            break;
                        }
                        synchronized (concurrentTables) {
                            assertTrue("unexpected concurrency on " + proc.getTableName(),
                                    concurrentTables.add(proc.getTableName()));
                        }
                        assertTrue(opsCount.decrementAndGet() >= 0);
                        try {
                            long procId = ((Procedure) proc).getProcId();
                            TableName tableId = proc.getTableName();
                            int concurrent = concurrentCount.incrementAndGet();
                            assertTrue("inc-concurrent=" + concurrent + " 1 <= concurrent <= " + NUM_TABLES,
                                    concurrent >= 1 && concurrent <= NUM_TABLES);
                            LOG.debug("[S] tableId=" + tableId + " procId=" + procId + " concurrent="
                                    + concurrent);
                            Thread.sleep(2000);
                            concurrent = concurrentCount.decrementAndGet();
                            LOG.debug("[E] tableId=" + tableId + " procId=" + procId + " concurrent="
                                    + concurrent);
                            assertTrue("dec-concurrent=" + concurrent, concurrent < NUM_TABLES);
                        } finally {
                            synchronized (concurrentTables) {
                                assertTrue(concurrentTables.remove(proc.getTableName()));
                            }
                            procSet.release(proc);
                        }
                    } catch (Throwable e) {
                        LOG.error("Failed " + e.getMessage(), e);
                        synchronized (failures) {
                            failures.add(e.getMessage());
                        }
                    } finally {
                        queue.signalAll();
                    }
                }
            }
        };
        threads[i].start();
    }
    for (int i = 0; i < threads.length; ++i) {
        threads[i].join();
    }
    assertTrue(failures.toString(), failures.isEmpty());
    assertEquals(0, opsCount.get());
    assertEquals(0, queue.size());

    for (int i = 1; i <= NUM_TABLES; ++i) {
        TableName table = TableName.valueOf(String.format("testtb-%04d", i));
        assertTrue("queue should be deleted, table=" + table, queue.markTableAsDeleted(table));
    }
}