List of usage examples for java.util Set containsAll
boolean containsAll(Collection<?> c);
From source file:edu.unc.lib.dl.services.DigitalObjectManagerMoveTest.java
@Test public void multiSourceTest() throws Exception { List<PID> moving = Arrays.asList(new PID("uuid:child1"), new PID("uuid:child32")); makeMatcherPair("/fedora/containerRELSEXT1.xml", source1PID); makeMatcherPair("/fedora/containerRELSEXT3.xml", source2PID); when(tripleStoreQueryService.fetchContainer(eq(new PID("uuid:child1")))).thenReturn(source1PID); when(tripleStoreQueryService.fetchContainer(eq(new PID("uuid:child32")))).thenReturn(source2PID); digitalMan.move(moving, destPID, "user", ""); verify(accessClient, times(2)).getDatastreamDissemination(eq(source1PID), eq(RELS_EXT.getName()), anyString());//from w w w. j av a 2 s .co m verify(accessClient, times(2)).getDatastreamDissemination(eq(source2PID), eq(RELS_EXT.getName()), anyString()); ArgumentCaptor<Document> source1RelsExtUpdateCaptor = ArgumentCaptor.forClass(Document.class); verify(managementClient, times(2)).modifyDatastream(eq(source1PID), eq(RELS_EXT.getName()), anyString(), anyString(), source1RelsExtUpdateCaptor.capture()); // Check that the first source was updated Document clean1RelsExt = source1RelsExtUpdateCaptor.getValue(); Set<PID> children = JDOMQueryUtil.getRelationSet(clean1RelsExt.getRootElement(), contains); assertEquals("Incorrect number of children in source 1 after cleanup", 11, children.size()); // Check that the second source was updated ArgumentCaptor<Document> source2RelsExtUpdateCaptor = ArgumentCaptor.forClass(Document.class); verify(managementClient, times(2)).modifyDatastream(eq(source2PID), eq(RELS_EXT.getName()), anyString(), anyString(), source2RelsExtUpdateCaptor.capture()); Document clean2RelsExt = source2RelsExtUpdateCaptor.getValue(); children = JDOMQueryUtil.getRelationSet(clean2RelsExt.getRootElement(), contains); assertEquals("Incorrect number of children in source 2 after cleanup", 1, children.size()); // Check that items from both source 1 and 2 ended up in the destination. ArgumentCaptor<Document> destRelsExtUpdateCaptor = ArgumentCaptor.forClass(Document.class); verify(managementClient).modifyDatastream(eq(destPID), eq(RELS_EXT.getName()), anyString(), anyString(), destRelsExtUpdateCaptor.capture()); Document destRelsExt = destRelsExtUpdateCaptor.getValue(); children = JDOMQueryUtil.getRelationSet(destRelsExt.getRootElement(), contains); assertEquals("Incorrect number of children in destination container after moved", 9, children.size()); assertTrue("Moved children were not present in destination", children.containsAll(moving)); }
From source file:org.apache.flume.channel.file.TestFileChannelRestart.java
@Test public void testDataFilesRequiredByBackupNotDeleted() throws Exception { Map<String, String> overrides = Maps.newHashMap(); overrides.put(FileChannelConfiguration.USE_DUAL_CHECKPOINTS, "true"); overrides.put(FileChannelConfiguration.MAX_FILE_SIZE, "1000"); channel = createFileChannel(overrides); channel.start();/*from w w w . j a va 2s .c om*/ String prefix = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"; Assert.assertTrue(channel.isOpen()); putEvents(channel, prefix, 10, 100); Set<String> origFiles = Sets.newHashSet(); for (File dir : dataDirs) { origFiles.addAll(Lists.newArrayList(dir.list())); } forceCheckpoint(channel); takeEvents(channel, 10, 50); long beforeSecondCheckpoint = System.currentTimeMillis(); forceCheckpoint(channel); Set<String> newFiles = Sets.newHashSet(); int olderThanCheckpoint = 0; int totalMetaFiles = 0; for (File dir : dataDirs) { File[] metadataFiles = dir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name.endsWith(".meta")) { return true; } return false; } }); totalMetaFiles = metadataFiles.length; for (File metadataFile : metadataFiles) { if (metadataFile.lastModified() < beforeSecondCheckpoint) { olderThanCheckpoint++; } } newFiles.addAll(Lists.newArrayList(dir.list())); } /* * Files which are not required by the new checkpoint should not have been * modified by the checkpoint. */ Assert.assertTrue(olderThanCheckpoint > 0); Assert.assertTrue(totalMetaFiles != olderThanCheckpoint); /* * All files needed by original checkpoint should still be there. */ Assert.assertTrue(newFiles.containsAll(origFiles)); takeEvents(channel, 10, 50); forceCheckpoint(channel); newFiles = Sets.newHashSet(); for (File dir : dataDirs) { newFiles.addAll(Lists.newArrayList(dir.list())); } Assert.assertTrue(!newFiles.containsAll(origFiles)); }
From source file:net.sf.morph.context.support.ContextBaseTestCase.java
public void testKeySet() { Set keySet = null; Collection all = new ArrayList(); // Unsupported operations // keySet = context.keySet(); // try { // keySet.add("bop"); // fail("Should have thrown UnsupportedOperationException"); // } catch (UnsupportedOperationException e) { // ; // Expected result // }//from w w w.ja v a 2 s.c o m // try { // Collection adds = new ArrayList(); // adds.add("bop"); // keySet.addAll(adds); // fail("Should have thrown UnsupportedOperationException"); // } catch (UnsupportedOperationException e) { // ; // Expected result // } // Before-modification checks keySet = context.keySet(); assertEquals(context.size(), keySet.size()); assertTrue(!keySet.contains("foo")); assertTrue(!keySet.contains("bar")); assertTrue(!keySet.contains("baz")); assertTrue(!keySet.contains("bop")); // Add the new elements context.put("foo", "foo value"); context.put("bar", "bar value"); context.put("baz", "baz value"); all.add("foo"); all.add("bar"); all.add("baz"); // After-modification checks keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); assertTrue(keySet.contains("bar")); assertTrue(keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Remove a single element via remove() // context.remove("bar"); all.remove("bar"); keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); // assertTrue(!keySet.contains("bar")); assertTrue(keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Remove a single element via keySet.remove() keySet.remove("baz"); all.remove("baz"); keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); // assertTrue(!keySet.contains("bar")); // assertTrue(!keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Remove all elements via keySet.clear() all.clear(); // assertTrue(!keySet.contains("foo")); // assertTrue(!keySet.contains("bar")); // assertTrue(!keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); // Add the new elements #2 context.put("foo", "foo value"); context.put("bar", "bar value"); context.put("baz", "baz value"); all.add("foo"); all.add("bar"); all.add("baz"); // After-modification checks #2 keySet = context.keySet(); assertEquals(expectedAttributeCount() + 3, keySet.size()); assertTrue(keySet.contains("foo")); assertTrue(keySet.contains("bar")); assertTrue(keySet.contains("baz")); assertTrue(!keySet.contains("bop")); assertTrue(keySet.containsAll(all)); }
From source file:org.springsource.ide.eclipse.dashboard.internal.ui.editors.DashboardMainPage.java
private void displayFeeds(Set<SyndEntry> entries, Composite composite, ScrolledComposite scrolled, PageBook pagebook, Control disclaimer, Section section) { // make sure the entries are sorted correctly List<SyndEntry> sortedEntries = new ArrayList<SyndEntry>(entries); Collections.sort(sortedEntries, new Comparator<SyndEntry>() { public int compare(SyndEntry o1, SyndEntry o2) { return o2.getPublishedDate().compareTo(o1.getPublishedDate()); }/* www .ja v a 2s. c om*/ }); if (sortedEntries.isEmpty()) { pagebook.showPage(disclaimer); return; } if (displayedEntries.containsAll(entries) && entries.containsAll(displayedEntries)) { return; } else { displayedEntries.clear(); displayedEntries.addAll(entries); } Control[] children = composite.getChildren(); int counter = 0; for (SyndEntry entry : sortedEntries) { displayFeed(entry, composite, section, counter, children); counter++; } for (int i = counter * 2; i < children.length; i++) { children[i].dispose(); } pagebook.showPage(scrolled); composite.pack(true); }
From source file:amie.keys.CombinationsExplorationNew.java
/** * It determines whether there exists a more general version of the given * conditional key, a version of the key with the exact same relations but * fewer instantiations. For instance if the key states lastname | * nationality=French, field=Databases but the map contains a key lastname * nationality | field=Databases (here nationality is not instantiated), the * method will report this a subsumption case and return true. * * @param conditionalKey/* w ww .j a va2s. c om*/ * @param conditionRule * @param conditions2Keys2 * @return */ private static boolean isSubsumedByKey(Rule conditionalKey, Rule conditionRule, MultiMap<Rule, Rule> conditions2Keys2) { if (conditionRule.getLength() < 2) { return false; } Set<ByteString> instantiations = new LinkedHashSet<>(); Set<ByteString> instantiatedRelations = new LinkedHashSet<>(); Set<ByteString> nonInstantiatedRelations = new LinkedHashSet<>(); Utilities.parseConditionalKey(conditionalKey, nonInstantiatedRelations, instantiations, instantiatedRelations); /** * Now get all possible simpler versions of the condition If the * condition is field=Databases, residence=Paris, gender=female the * method returns: field=Databases, residence=Paris field=Database, * gender=female residence=Paris, gender=female residence=Paris * gender=female field=Databases * */ List<Rule> properSubconditions = getAllProperSubconditions(conditionRule); for (Rule subCondition : properSubconditions) { List<Rule> potentialParents = conditions2Keys2.get(subCondition); if (potentialParents != null) { for (Rule potentialParent : potentialParents) { if (potentialParent.getLength() != conditionalKey.getLength()) { // System.out.println("potentialParent:" + potentialParent); continue; } Set<ByteString> instantiatedRelationsParent = new LinkedHashSet<>(); Set<ByteString> nonInstantiatedRelationsParent = new LinkedHashSet<>(); Set<ByteString> instantiationsParent = new LinkedHashSet<>(); Utilities.parseConditionalKey(potentialParent, nonInstantiatedRelationsParent, instantiationsParent, instantiatedRelationsParent); Set<ByteString> instansiatedNonInstantiatedRelations = new LinkedHashSet<>(); instansiatedNonInstantiatedRelations.addAll(instantiatedRelations); instansiatedNonInstantiatedRelations.addAll(nonInstantiatedRelations); Set<ByteString> instansiatedNonInstantiatedRelationsParent = new LinkedHashSet<>(); instansiatedNonInstantiatedRelationsParent.addAll(instantiatedRelationsParent); instansiatedNonInstantiatedRelationsParent.addAll(nonInstantiatedRelationsParent); if (instantiatedRelations.containsAll(instantiatedRelationsParent) && nonInstantiatedRelationsParent.containsAll(nonInstantiatedRelations) && instansiatedNonInstantiatedRelationsParent .containsAll(instansiatedNonInstantiatedRelations)) { return true; } } } } return false; }
From source file:com.aurel.track.accessControl.AccessBeans.java
/** * Returns the fields restrictions for a person for a certain project and * issueType A field will be returned only if it is restricted (either no * read or no modify right)//w ww . j a v a2s . c o m * * @param personID * @param projectID * can be null, in this case get the field restrictions for all * roles the user has in any project * @param issueTypeID * can be null, same as by projectID * @param fieldIDs * can be null, if specified search for restrictions only for * these fields * @param edit * whether we are in an editing (edit/create issue) mode or only * read only mode (print issue, email sending) * @return a map with fieldID to restriction */ public static Map<Integer, Integer> getFieldRestrictions(Integer personID, Integer projectID, Integer issueTypeID, List<Integer> fieldIDs, boolean edit) { Map<Integer, Integer> resultMap = new HashMap<Integer, Integer>(); if (personID == null) { // person can be null for example by POP3 email submission // the submitter gets an email by creating of the issue, // but then no restrictions are needed return resultMap; } // get all hidden and read only fields for all roles Map<Integer, Set<Integer>> hiddenFields = new HashMap<Integer, Set<Integer>>(); Map<Integer, Set<Integer>> readOnlyFields = new HashMap<Integer, Set<Integer>>(); FieldsRestrictionsToRoleBL.getRestrictedFieldsToRoles(fieldIDs, hiddenFields, readOnlyFields); if (!hasFieldRestrictions(hiddenFields, readOnlyFields, fieldIDs, edit)) { return resultMap; } List<Integer> personIDs = getMeAndSubstitutedAndGroups(personID); Set<Integer> roles = null; if (projectID != null) { // roles for project and issueType roles = getRolesForPersonInProjectForIssueType(personIDs, projectID, issueTypeID); } else { // roles in any projects for any issueType roles = getAllRolesForPerson(personIDs); } if (roles == null || roles.isEmpty()) { // all roles revoked from the project (only RACI role) -> no // roleFlag restriction if (LOGGER.isDebugEnabled()) { LOGGER.debug(noRole + personID + " in project " + projectID + " issueType " + issueTypeID); } return resultMap; } if (edit) { Set<Integer> allRolesWithHiddenOrReadOnlyFields = new HashSet<Integer>(); for (Set<Integer> rolesWithHiddenFields : hiddenFields.values()) { allRolesWithHiddenOrReadOnlyFields.addAll(rolesWithHiddenFields); } for (Set<Integer> rolesWithReadOnlyFields : readOnlyFields.values()) { allRolesWithHiddenOrReadOnlyFields.addAll(rolesWithReadOnlyFields); } if (allRolesWithHiddenOrReadOnlyFields.containsAll(roles)) { // each role a user has contains either hidden or read only // field restrictions addRestrictions(resultMap, roles, hiddenFields, TRoleFieldBean.ACCESSFLAG.NOACCESS); addRestrictions(resultMap, roles, readOnlyFields, TRoleFieldBean.ACCESSFLAG.READ_ONLY); } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Role without restriction found for person " + personID + " in project " + projectID + " issueType " + issueTypeID); } return resultMap; } } else { Set<Integer> allRolesWithHiddenFields = new HashSet<Integer>(); for (Set<Integer> rolesWithHiddenFields : hiddenFields.values()) { allRolesWithHiddenFields.addAll(rolesWithHiddenFields); } if (allRolesWithHiddenFields.containsAll(roles)) { // each role a user has contains hidden field restrictions addRestrictions(resultMap, roles, hiddenFields, TRoleFieldBean.ACCESSFLAG.NOACCESS); } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Role without restriction found for person " + personID + " in project " + projectID + " issueType " + issueTypeID); } return resultMap; } } return resultMap; }
From source file:org.deri.iris.queryrewriting.FORewriter.java
/** * Checks if the n atoms are sharing the same variables in all the existential positions * @param q a conjunctive query/*from ww w . j a v a2 s. c om*/ * @param r a TGD * @param a1 the first atom * @param a2 the second atom * @return true if they share the same variables in all the existential positions */ protected boolean factorisable(IRule q, final IRule r, final Map<IVariable, ITerm> sbstMap) { rep.incrementValue(RewMetric.FACTOR_COUNT); if (q.getBody().size() > 1) { // Get the atoms in body(q) that unify with head(r). final IAtom rheadAtom = r.getHead().iterator().next().getAtom(); final Set<IPosition> headExPos = r.getExistentialPositions(); final Set<IAtom> potentialUnifiableAtoms = new LinkedHashSet<IAtom>(); for (final ILiteral l : q.getBody()) { final IAtom qbodyAtom = l.getAtom(); if (qbodyAtom.getPredicate().equals(rheadAtom.getPredicate())) { potentialUnifiableAtoms.add(qbodyAtom); } } if (potentialUnifiableAtoms.size() < 2) return false; else { // compute the powerset of atoms that are potentially unifiable in the body in the query. final Set<Set<IAtom>> atomsPowSet = Sets.powerSet(potentialUnifiableAtoms); // sort the set by size final List<Set<IAtom>> sortedPowSet = Lists.newArrayList(atomsPowSet); Collections.sort(sortedPowSet, new SetSizeComparator()); for (final Set<IAtom> candidateSet : sortedPowSet) { // check that we have at least two atoms in the candidate set. if (candidateSet.size() > 1) { final Map<IVariable, ITerm> unifier = new HashMap<IVariable, ITerm>(); if (TermMatchingAndSubstitution.unify(candidateSet, unifier)) { // the atoms have a unifier, check that there is a well-behaved existential variable // get variables in existential positions final Set<IVariable> variables = getVariablesInPositions(candidateSet, headExPos); for (final IVariable var : variables) { // check that the variable does not occur in non-existential positions if (headExPos.containsAll(q.getPositions(var)) && containedInAllAtoms(var, candidateSet)) { q = RewritingUtils.factoriseQuery(q, unifier); return true; } } } } } return false; } } else return false; }
From source file:org.auraframework.def.BaseComponentDefTest.java
/** * Test method for {@link BaseComponentDef#getModelDefDescriptors()}. *///from w w w. j av a 2 s . c om public void testGetModelDefDescriptors() throws QuickFixException { DefDescriptor<T> grandParentDesc = addSourceAutoCleanup(getDefClass(), String.format(baseTag, "extensible='true'", "")); DefDescriptor<ModelDef> grandParentModelDesc = DefDescriptorImpl.getAssociateDescriptor(grandParentDesc, ModelDef.class, DefDescriptor.JAVASCRIPT_PREFIX); addSourceAutoCleanup(grandParentModelDesc, "{obj:{}}"); DefDescriptor<T> parentDesc = addSourceAutoCleanup(getDefClass(), String.format(baseTag, String.format("extends='%s' extensible='true' model='js://test.jsModel'", grandParentDesc.getDescriptorName()), "")); DefDescriptor<T> compDesc = addSourceAutoCleanup(getDefClass(), String.format(baseTag, String.format("extends='%s' model='java://org.auraframework.impl.java.model.TestModel'", parentDesc.getDescriptorName()), "")); List<DefDescriptor<ModelDef>> dds = compDesc.getDef().getModelDefDescriptors(); assertNotNull(dds); assertEquals(3, dds.size()); List<String> names = Lists.transform(dds, new Function<DefDescriptor<?>, String>() { @Override public String apply(DefDescriptor<?> input) { return input.getQualifiedName(); } }); Set<String> expected = ImmutableSet.of("java://org.auraframework.impl.java.model.TestModel", "js://test.jsModel", grandParentModelDesc.getQualifiedName()); if (!names.containsAll(expected)) { fail("Missing expected models. Expected: " + expected + ", Actual: " + names); } if (!expected.containsAll(names)) { fail("Unexpected models. Expected: " + expected + ", Actual: " + names); } }
From source file:org.auraframework.def.BaseComponentDefTest.java
public void testAppendDependenciesWithAllReferences() throws QuickFixException { DefDescriptor<T> parentDesc = addSourceAutoCleanup(getDefClass(), String.format(baseTag, "extensible='true'", "")); DefDescriptor<ComponentDef> childDesc = addSourceAutoCleanup(ComponentDef.class, "<aura:component/>"); DefDescriptor<InterfaceDef> intfDesc = addSourceAutoCleanup(InterfaceDef.class, "<aura:interface/>"); DefDescriptor<EventDef> eventDesc = addSourceAutoCleanup(EventDef.class, "<aura:event type='component' support='GA'/>"); DefDescriptor<ProviderDef> providerDesc = DefDescriptorImpl .getInstance("java://org.auraframework.impl.java.provider.ConcreteProvider", ProviderDef.class); DefDescriptor<T> cmpDesc = addSourceAutoCleanup(getDefClass(), String.format(baseTag, String.format("extends='%s' implements='%s' provider='%s'", parentDesc.getDescriptorName(), intfDesc.getDescriptorName(), providerDesc), String.format("<%s/><aura:registerevent name='evt' type='%s'/>", childDesc.getDescriptorName(), eventDesc.getDescriptorName()))); DefDescriptor<ModelDef> modelDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, ModelDef.class, DefDescriptor.JAVASCRIPT_PREFIX); addSourceAutoCleanup(modelDesc, "{obj:{}}"); DefDescriptor<ControllerDef> controllerDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, ControllerDef.class, DefDescriptor.JAVASCRIPT_PREFIX); addSourceAutoCleanup(controllerDesc, "{hi:function(){}}"); DefDescriptor<RendererDef> renderDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, RendererDef.class, DefDescriptor.JAVASCRIPT_PREFIX); addSourceAutoCleanup(renderDesc, "({render:function(c){return this.superRender();}})"); DefDescriptor<HelperDef> helperDesc = DefDescriptorImpl.getAssociateDescriptor(cmpDesc, HelperDef.class, DefDescriptor.JAVASCRIPT_PREFIX); addSourceAutoCleanup(helperDesc, "({help:function(){}})"); DefDescriptor<StyleDef> styleDesc = Aura.getDefinitionService().getDefDescriptor(cmpDesc, DefDescriptor.CSS_PREFIX, StyleDef.class); String className = cmpDesc.getNamespace() + StringUtils.capitalize(cmpDesc.getName()); addSourceAutoCleanup(styleDesc, String.format(".%s {font-style:italic;}", className)); DefDescriptor<NamespaceDef> namespaceDesc = Aura.getDefinitionService().getDefDescriptor( String.format("%s://%s", DefDescriptor.MARKUP_PREFIX, styleDesc.getNamespace()), NamespaceDef.class); addSourceAutoCleanup(namespaceDesc, "<aura:namespace/>"); Set<DefDescriptor<?>> dependencies = new HashSet<>(); cmpDesc.getDef().appendDependencies(dependencies); @SuppressWarnings("unchecked") Set<DefDescriptor<?>> expected = Sets.newHashSet(parentDesc, childDesc, intfDesc, providerDesc, modelDesc, controllerDesc, eventDesc, styleDesc, renderDesc, helperDesc); if (!dependencies.containsAll(expected)) { fail(String.format("missing dependencies - EXPECTED: %s, ACTUAL: %s", expected, dependencies)); }//from www.j a v a2 s . c o m if (!expected.containsAll(dependencies)) { fail(String.format("extra dependencies - EXPECTED: %s, ACTUAL: %s", expected, dependencies)); } }
From source file:com.github.gekoh.yagen.ddl.CreateDDL.java
private void addAuditTrigger(Dialect dialect, StringBuffer buf, String nameLC, Set<String> columns) { TableConfig tableConfig = tblNameToConfig.get(nameLC); String templateName = "AuditTrigger"; if (!columns.containsAll(AUDIT_COLUMNS)) { if (tableConfig != null && tableConfig.getTableAnnotationOfType(Auditable.class) != null && columns.contains(AuditInfo.LAST_MODIFIED_AT)) { templateName += "SingleTimestamp"; } else {//from www.j a va 2 s . c o m return; } } if (isPostgreSql(dialect)) { writePostgreSqlAuditTrigger(dialect, buf, nameLC); return; } StringWriter wr = new StringWriter(); VelocityContext context = new VelocityContext(); context.put("liveTableName", nameLC); context.put("created_at", AuditInfo.CREATED_AT); context.put("created_by", AuditInfo.CREATED_BY); context.put("last_modified_at", AuditInfo.LAST_MODIFIED_AT); context.put("last_modified_by", AuditInfo.LAST_MODIFIED_BY); if (isOracle(dialect)) { writeOracleAuditTrigger(dialect, buf, context, nameLC, templateName + ".vm.pl.sql"); } else { try { templateName += "SingleOperation.vm.pl.sql"; wr.append(STATEMENT_SEPARATOR); writeTriggerSingleOperation(dialect, wr, templateName, context, nameLC, "_at", "I"); wr.write("\n/\n"); wr.append(STATEMENT_SEPARATOR); writeTriggerSingleOperation(dialect, wr, templateName, context, nameLC, "_at", "U"); wr.write("\n/\n"); buf.append(wr.toString()); } catch (IOException e) { LOG.error("error writing audit triggers", e); } } }