List of usage examples for java.util Set clear
void clear();
From source file:tools.xor.logic.DefaultUpdate4Set.java
public void testCase14() { // Initial association between A and C C = taskDao.findById(C.getId());// www. ja v a 2 s. c o m Set<Task> children = new HashSet<Task>(); children.add(C); // Setup the bi-directional link A = taskDao.findById(A.getId()); A.setTaskChildren(children); C.setTaskParent(A); taskDao.saveOrUpdate(A); // Initial association between D and B B = taskDao.findById(B.getId()); children = new HashSet<Task>(); children.add(B); // Setup the bi-directional link D = taskDao.findById(D.getId()); D.setTaskChildren(children); B.setTaskParent(D); taskDao.saveOrUpdate(D); A = (Task) aggregateService.read(A, getSettings()); B = (Task) aggregateService.read(B, getSettings()); D = (Task) aggregateService.read(D, getSettings()); C = A.getTaskChildren().iterator().next(); assert (C.getTaskParent() != null); // Remove child1 and add child2 children = A.getTaskChildren(); children.clear(); children.add(B); D.getTaskChildren().clear(); A.setAlternateTask(D); C.setTaskParent(null); B.setTaskParent(A); B.setAlternateTask(C); Settings settings = getSettings(); settings.setInterceptor(new Interceptor() { // check the number of actions in each object private void checkNumber(BusinessObject dataObject, List<Executable> actions) { Task task = (Task) dataObject.getInstance(); if (task.getName().equals(B_NAME)) { System.out.println("B_NAME expected 3 found: " + actions.size()); for (Executable e : actions) { System.out.println("Executable: " + e.toString()); } assert (actions.size() == 3); } else if (task.getName().equals(C_NAME)) { System.out.println("C_NAME expected 2 found: " + actions.size()); for (Executable e : actions) { System.out.println("Executable: " + e.toString()); } assert (actions.size() == 2); } else { System.out.println("else expected 1 found: " + actions.size()); for (Executable e : actions) { System.out.println("Executable: " + e.toString()); } assert (actions.size() == 1); } } @Override public void preBiDirActionStage(Map<PropertyKey, List<Executable>> actions) { // check the action queue to see if the correct number of actions are present assert (actions.size() == 4); for (Map.Entry<PropertyKey, List<Executable>> entry : actions.entrySet()) checkNumber(entry.getKey().getDataObject(), entry.getValue()); } }); A = (Task) aggregateService.update(A, settings); A = taskDao.findById(A.getId()); B = taskDao.findById(B.getId()); C = taskDao.findById(C.getId()); D = taskDao.findById(D.getId()); A = (Task) aggregateService.read(A, getSettings()); B = (Task) aggregateService.read(B, getSettings()); C = (Task) aggregateService.read(C, getSettings()); D = (Task) aggregateService.read(D, getSettings()); assert (A.getTaskChildren() != null && A.getTaskChildren().size() == 1); assert (B.getId() == A.getTaskChildren().iterator().next().getId()); assert (C.getTaskParent() == null); assert (D.getTaskChildren() == null || D.getTaskChildren().size() == 0); }
From source file:de.unihannover.l3s.mws.bean.CompareSearches.java
private void alignSiteDomain() { // siteAvailableDomainlist1=new ArrayList<String>(); Set<String> domains = new HashSet<String>(); for (String s : siteAvailablelist1) domains.add(getTldString("http://" + s)); siteAvailableDomainlist1.clear();//from ww w .j av a 2 s.c om siteAvailableDomainlist1.addAll(domains); // siteSelectedDomainlist1=new ArrayList<String>(); domains.clear(); for (String s : siteSelectedlist1) domains.add(getTldString("http://" + s)); siteSelectedDomainlist1.clear(); siteSelectedDomainlist1.addAll(domains); }
From source file:org.dkpro.lab.engine.impl.BatchTaskEngine.java
/** * Locate the latest task execution compatible with the given task configuration. * /*from ww w .j a va2s . c om*/ * @param aContext * the context of the current batch task. * @param aConfig * the current parameter configuration. * @param aExecutedSubtasks * already executed subtasks. */ protected void executeConfiguration(BatchTask aConfiguration, TaskContext aContext, Map<String, Object> aConfig, Set<String> aExecutedSubtasks) throws ExecutionException, LifeCycleException { if (log.isTraceEnabled()) { // Show all subtasks executed so far for (String est : aExecutedSubtasks) { log.trace("-- Already executed: " + est); } } // Set up initial scope used by sub-batch-tasks using the inherited scope. The scope is // extended as the subtasks of this batch are executed with the present configuration. // FIXME: That means that sub-batch-tasks in two different configurations cannot see // each other. Is that intended? Mind that the "executedSubtasks" set is intentionally // maintained *across* configurations, so maybe the scope should also be maintained // *across* configurations? - REC 2014-06-15 Set<String> scope = new HashSet<String>(); if (aConfiguration.getScope() != null) { scope.addAll(aConfiguration.getScope()); } // Configure subtasks for (Task task : aConfiguration.getTasks()) { aContext.getLifeCycleManager().configure(aContext, task, aConfig); } Queue<Task> queue = new LinkedList<Task>(aConfiguration.getTasks()); Set<Task> loopDetection = new HashSet<Task>(); List<UnresolvedImportException> deferralReasons = new ArrayList<UnresolvedImportException>(); while (!queue.isEmpty()) { Task task = queue.poll(); try { // Check if a subtask execution compatible with the present configuration has // does already exist ... TaskContextMetadata execution = getExistingExecution(aConfiguration, aContext, task, aConfig, aExecutedSubtasks); if (execution == null) { // ... otherwise execute it with the present configuration log.info("Executing task [" + task.getType() + "]"); // set scope here so that the inherited scopes are considered // set scope here so that tasks added to scope in this loop are considered if (task instanceof BatchTask) { ((BatchTask) task).setScope(scope); } execution = runNewExecution(aContext, task, aConfig, aExecutedSubtasks); } else { log.debug("Using existing execution [" + execution.getId() + "]"); } // Record new/existing execution aExecutedSubtasks.add(execution.getId()); scope.add(execution.getId()); loopDetection.clear(); deferralReasons.clear(); } catch (UnresolvedImportException e) { // Add task back to queue log.debug("Deferring execution of task [" + task.getType() + "]: " + e.getMessage()); queue.add(task); // Detect endless loop if (loopDetection.contains(task)) { StringBuilder details = new StringBuilder(); for (UnresolvedImportException r : deferralReasons) { details.append("\n -"); details.append(r.getMessage()); } // throw an UnresolvedImportException in case there is an outer BatchTask which needs to be executed first throw new UnresolvedImportException(e, details.toString()); } // Record failed execution loopDetection.add(task); deferralReasons.add(e); } } }
From source file:com.netflix.nicobar.core.module.ScriptModuleLoaderTest.java
@Test public void testCompileErrorSendsNotification() throws Exception { // original graph: A->B->C->D long originalCreateTime = 1000; Set<ScriptArchive> updateArchives = new HashSet<ScriptArchive>(); updateArchives.add(new TestDependecyScriptArchive(new ScriptModuleSpec.Builder("A") .addCompilerPluginId("mockPlugin").addModuleDependency("B").build(), originalCreateTime)); updateArchives.add(new TestDependecyScriptArchive(new ScriptModuleSpec.Builder("B") .addCompilerPluginId("mockPlugin").addModuleDependency("C").build(), originalCreateTime)); updateArchives.add(new TestDependecyScriptArchive(new ScriptModuleSpec.Builder("C") .addCompilerPluginId("mockPlugin").addModuleDependency("D").build(), originalCreateTime)); updateArchives.add(new TestDependecyScriptArchive( new ScriptModuleSpec.Builder("D").addCompilerPluginId("mockPlugin").build(), originalCreateTime)); ScriptModuleListener mockListener = createMockListener(); ScriptModuleLoader moduleLoader = new ScriptModuleLoader.Builder().addListener(mockListener) .addPluginSpec(new ScriptCompilerPluginSpec.Builder("mockPlugin") .withPluginClassName(MockScriptCompilerPlugin.class.getName()).build()) .build();/* w w w . jav a 2s. com*/ when(MOCK_COMPILER.shouldCompile(Mockito.any(ScriptArchive.class))).thenReturn(true); moduleLoader.updateScriptArchives(updateArchives); reset(mockListener); // update C, but set compilation to fail. updateArchives.clear(); long updatedCreateTime = 2000; TestDependecyScriptArchive updatedArchiveC = new TestDependecyScriptArchive( new ScriptModuleSpec.Builder("C").addCompilerPluginId("mockPlugin").addModuleDependency("D") .build(), updatedCreateTime); updateArchives.add(updatedArchiveC); reset(MOCK_COMPILER); when(MOCK_COMPILER.shouldCompile(Mockito.eq(updatedArchiveC))).thenReturn(true); ScriptCompilationException compilationException = new ScriptCompilationException("TestCompileException", null); when(MOCK_COMPILER.compile(Mockito.eq(updatedArchiveC), Mockito.any(JBossModuleClassLoader.class), Mockito.any(Path.class))).thenThrow(compilationException); moduleLoader.updateScriptArchives(updateArchives); // validate that they were compiled in the updated reverse dependency order verify(mockListener).archiveRejected(updatedArchiveC, ArchiveRejectedReason.COMPILE_FAILURE, compilationException); verifyNoMoreInteractions(mockListener); // validate the post-condition of the module database assertEquals(moduleLoader.getScriptModule("A").getCreateTime(), originalCreateTime); assertEquals(moduleLoader.getScriptModule("B").getCreateTime(), originalCreateTime); assertEquals(moduleLoader.getScriptModule("C").getCreateTime(), originalCreateTime); assertEquals(moduleLoader.getScriptModule("D").getCreateTime(), originalCreateTime); assertEquals(moduleLoader.getAllScriptModules().size(), 4); }
From source file:org.apache.sentry.provider.db.service.persistent.TestSentryStore.java
@Test public void testListSentryPrivilegesForProvider() throws Exception { String roleName1 = "list-privs-r1", roleName2 = "list-privs-r2"; String groupName1 = "list-privs-g1", groupName2 = "list-privs-g2"; String grantor = "g1"; long seqId = sentryStore.createSentryRole(roleName1).getSequenceId(); assertEquals(seqId + 1, sentryStore.createSentryRole(roleName2).getSequenceId()); TSentryPrivilege privilege1 = new TSentryPrivilege(); privilege1.setPrivilegeScope("TABLE"); privilege1.setServerName("server1"); privilege1.setDbName("db1"); privilege1.setTableName("tbl1"); privilege1.setAction("SELECT"); privilege1.setCreateTime(System.currentTimeMillis()); assertEquals(seqId + 2,/*from w w w . j a va 2s .co m*/ sentryStore.alterSentryRoleGrantPrivilege(grantor, roleName1, privilege1).getSequenceId()); assertEquals(seqId + 3, sentryStore.alterSentryRoleGrantPrivilege(grantor, roleName2, privilege1).getSequenceId()); TSentryPrivilege privilege2 = new TSentryPrivilege(); privilege2.setPrivilegeScope("SERVER"); privilege2.setServerName("server1"); privilege2.setCreateTime(System.currentTimeMillis()); assertEquals(seqId + 4, sentryStore.alterSentryRoleGrantPrivilege(grantor, roleName2, privilege2).getSequenceId()); Set<TSentryGroup> groups = Sets.newHashSet(); TSentryGroup group = new TSentryGroup(); group.setGroupName(groupName1); groups.add(group); assertEquals(seqId + 5, sentryStore.alterSentryRoleAddGroups(grantor, roleName1, groups).getSequenceId()); groups.clear(); group = new TSentryGroup(); group.setGroupName(groupName2); groups.add(group); // group 2 has both roles 1 and 2 assertEquals(seqId + 6, sentryStore.alterSentryRoleAddGroups(grantor, roleName1, groups).getSequenceId()); assertEquals(seqId + 7, sentryStore.alterSentryRoleAddGroups(grantor, roleName2, groups).getSequenceId()); // group1 all roles assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select"), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName1), new TSentryActiveRoleSet(true, new HashSet<String>())))); // one active role assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select"), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName1), new TSentryActiveRoleSet(false, Sets.newHashSet(roleName1))))); // unknown active role assertEquals(Sets.newHashSet(), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName1), new TSentryActiveRoleSet(false, Sets.newHashSet("not a role"))))); // no active roles assertEquals(Sets.newHashSet(), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName1), new TSentryActiveRoleSet(false, new HashSet<String>())))); // group2 all roles assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select", "server=server1"), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName2), new TSentryActiveRoleSet(true, new HashSet<String>())))); // one active role assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select"), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName2), new TSentryActiveRoleSet(false, Sets.newHashSet(roleName1))))); assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select", "server=server1"), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName2), new TSentryActiveRoleSet(false, Sets.newHashSet(roleName2))))); // unknown active role assertEquals(Sets.newHashSet(), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName2), new TSentryActiveRoleSet(false, Sets.newHashSet("not a role"))))); // no active roles assertEquals(Sets.newHashSet(), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName2), new TSentryActiveRoleSet(false, new HashSet<String>())))); // both groups, all active roles assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select", "server=server1"), SentryStore.toTrimedLower( sentryStore.listAllSentryPrivilegesForProvider(Sets.newHashSet(groupName1, groupName2), new TSentryActiveRoleSet(true, new HashSet<String>())))); // one active role assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select"), SentryStore.toTrimedLower( sentryStore.listAllSentryPrivilegesForProvider(Sets.newHashSet(groupName1, groupName2), new TSentryActiveRoleSet(false, Sets.newHashSet(roleName1))))); assertEquals(Sets.newHashSet("server=server1->db=db1->table=tbl1->action=select", "server=server1"), SentryStore.toTrimedLower( sentryStore.listAllSentryPrivilegesForProvider(Sets.newHashSet(groupName1, groupName2), new TSentryActiveRoleSet(false, Sets.newHashSet(roleName2))))); // unknown active role assertEquals(Sets.newHashSet(), SentryStore.toTrimedLower( sentryStore.listAllSentryPrivilegesForProvider(Sets.newHashSet(groupName1, groupName2), new TSentryActiveRoleSet(false, Sets.newHashSet("not a role"))))); // no active roles assertEquals(Sets.newHashSet(), SentryStore.toTrimedLower(sentryStore.listAllSentryPrivilegesForProvider( Sets.newHashSet(groupName1, groupName2), new TSentryActiveRoleSet(false, new HashSet<String>())))); }
From source file:com.kalessil.phpStorm.phpInspectionsEA.inspectors.languageConstructions.ClassConstantCanBeUsedInspector.java
@Override @NotNull// www.j a v a 2s . co m public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) { return new BasePhpElementVisitor() { public void visitPhpFunctionCall(FunctionReference reference) { /* ensure selected language level supports the ::class feature*/ final Project project = holder.getProject(); final PhpLanguageLevel phpVersion = PhpProjectConfigurationFacade.getInstance(project) .getLanguageLevel(); if (!phpVersion.hasFeature(PhpLanguageFeature.CLASS_NAME_CONST)) { return; } final String functionName = reference.getName(); final PsiElement[] params = reference.getParameters(); if (0 == params.length && null != functionName && functionName.equals("get_called_class")) { final String replacement = "static::class"; holder.registerProblem(reference, messageUseStatic, ProblemHighlightType.WEAK_WARNING, new UseStaticFix(replacement)); } } public void visitPhpStringLiteralExpression(StringLiteralExpression expression) { /* ensure selected language level supports the ::class feature*/ final Project project = holder.getProject(); final PhpLanguageLevel phpVersion = PhpProjectConfigurationFacade.getInstance(project) .getLanguageLevel(); if (!phpVersion.hasFeature(PhpLanguageFeature.CLASS_NAME_CONST)) { return; } /* Skip certain contexts processing and strings with inline injections */ PsiElement parent = expression.getParent(); if (parent instanceof BinaryExpression || parent instanceof SelfAssignmentExpression || null != expression.getFirstPsiChild()) { return; } /* Process if has no inline statements and at least 3 chars long (foo, bar and etc. are not a case) */ final String contents = expression.getContents(); if (contents.length() > 3) { final Matcher regexMatcher = classNameRegex.matcher(contents); if (!regexMatcher.matches() || ExpressionSemanticUtil.getBlockScope(expression) instanceof PhpDocComment) { return; } /* do not process lowercase-only strings */ if (-1 == contents.indexOf('\\') && contents.toLowerCase().equals(contents)) { return; } String normalizedContents = contents.replaceAll("\\\\\\\\", "\\\\"); /* TODO: handle __NAMESPACE__.'\Class' */ final boolean isFull = normalizedContents.charAt(0) == '\\'; final Set<String> namesToLookup = new HashSet<>(); if (isFull) { namesToLookup.add(normalizedContents); } else { if (LOOK_ROOT_NS_UP || normalizedContents.contains("\\")) { normalizedContents = '\\' + normalizedContents; namesToLookup.add(normalizedContents); } } /* if we could find an appropriate candidate and resolved the class => report (case must match) */ if (1 == namesToLookup.size()) { final String fqnToLookup = namesToLookup.iterator().next(); final PhpIndex index = PhpIndex.getInstance(project); /* try searching interfaces and classes for the given FQN */ Collection<PhpClass> classes = index.getClassesByFQN(fqnToLookup); if (0 == classes.size()) { classes = index.getInterfacesByFQN(fqnToLookup); } /* check resolved items */ if (1 == classes.size() && classes.iterator().next().getFQN().equals(fqnToLookup)) { final String message = messagePattern.replace("%c%", normalizedContents); holder.registerProblem(expression, message, ProblemHighlightType.WEAK_WARNING, new TheLocalFix(normalizedContents, IMPORT_CLASSES_ON_QF, USE_RELATIVE_QF)); } } namesToLookup.clear(); } } }; }
From source file:com.redhat.rhn.frontend.xmlrpc.errata.ErrataHandler.java
/** * creates an errata//from w w w . ja v a 2 s . co m * @param loggedInUser The current user * @param errataInfo map containing the following values: * String "synopsis" short synopsis of the errata * String "advisory_name" advisory name of the errata * Integer "advisory_release" release number of the errata * String "advisory_type" the type of advisory for the errata (Must be one of the * following: "Security Advisory", "Product Enhancement Advisory", or * "Bug Fix Advisory" * String "product" the product the errata affects * String "errataFrom" the author of the errata * String "topic" the topic of the errata * String "description" the description of the errata * String "solution" the solution of the errata * String "references" references of the errata to be created * String "notes" notes on the errata * @param bugs a List of maps consisting of 'id' Integers and 'summary' strings * @param keywords a List of keywords for the errata * @param packageIds a List of package Id packageId Integers * @param publish should the errata be published * @param channelLabels an array of channel labels to publish to if the errata is to * be published * @throws InvalidChannelRoleException if the user perms are incorrect * @return The errata created (whether published or unpublished) * * @xmlrpc.doc Create a custom errata. If "publish" is set to true, * the errata will be published as well * @xmlrpc.param #session_key() * @xmlrpc.param * #struct("errata info") * #prop("string", "synopsis") * #prop("string", "advisory_name") * #prop("int", "advisory_release") * #prop_desc("string", "advisory_type", "Type of advisory (one of the * following: 'Security Advisory', 'Product Enhancement Advisory', * or 'Bug Fix Advisory'") * #prop("string", "product") * #prop("string", "errataFrom") * #prop("string", "topic") * #prop("string", "description") * #prop("string", "references") * #prop("string", "notes") * #prop("string", "solution") * #struct_end() * @xmlrpc.param * #array() * #struct("bug") * #prop_desc("int", "id", "Bug Id") * #prop("string", "summary") * #prop("string", "url") * #struct_end() * #array_end() * @xmlrpc.param #array_single("string", "keyword - List of keywords to associate * with the errata.") * @xmlrpc.param #array_single("int", "packageId") * @xmlrpc.param #param_desc("boolean", "publish", "Should the errata be published.") * @xmlrpc.param * #array_single("string", "channelLabel - list of channels the errata should be * published too, ignored if publish is set to false") * @xmlrpc.returntype * $ErrataSerializer */ public Errata create(User loggedInUser, Map<String, String> errataInfo, List<Map<String, Object>> bugs, List<String> keywords, List<Integer> packageIds, boolean publish, List<String> channelLabels) throws InvalidChannelRoleException { // confirm that the user only provided valid keys in the map Set<String> validKeys = new HashSet<String>(); validKeys.add("synopsis"); validKeys.add("advisory_name"); validKeys.add("advisory_release"); validKeys.add("advisory_type"); validKeys.add("product"); validKeys.add("errataFrom"); validKeys.add("topic"); validKeys.add("description"); validKeys.add("references"); validKeys.add("notes"); validKeys.add("solution"); validateMap(validKeys, errataInfo); validKeys.clear(); validKeys.add("id"); validKeys.add("summary"); validKeys.add("url"); for (Map<String, Object> bugMap : bugs) { validateMap(validKeys, bugMap); } //Don't want them to publish an errata without any channels, //so check first before creating anything List<Channel> channels = null; if (publish) { channels = verifyChannelList(channelLabels, loggedInUser); } String synopsis = (String) getRequiredAttribute(errataInfo, "synopsis"); String advisoryName = (String) getRequiredAttribute(errataInfo, "advisory_name"); Integer advisoryRelease = (Integer) getRequiredAttribute(errataInfo, "advisory_release"); if (advisoryRelease.longValue() > ErrataManager.MAX_ADVISORY_RELEASE) { throw new InvalidAdvisoryReleaseException(advisoryRelease.longValue()); } String advisoryType = (String) getRequiredAttribute(errataInfo, "advisory_type"); String product = (String) getRequiredAttribute(errataInfo, "product"); String errataFrom = errataInfo.get("errataFrom"); String topic = (String) getRequiredAttribute(errataInfo, "topic"); String description = (String) getRequiredAttribute(errataInfo, "description"); String solution = (String) getRequiredAttribute(errataInfo, "solution"); String references = errataInfo.get("references"); String notes = errataInfo.get("notes"); Errata newErrata = ErrataManager.lookupByAdvisory(advisoryName); if (newErrata != null) { throw new DuplicateErrataException(advisoryName); } newErrata = ErrataManager.createNewErrata(); newErrata.setOrg(loggedInUser.getOrg()); //all required newErrata.setSynopsis(synopsis); newErrata.setAdvisory(advisoryName + "-" + advisoryRelease.toString()); newErrata.setAdvisoryName(advisoryName); newErrata.setAdvisoryRel(new Long(advisoryRelease.longValue())); if (advisoryType.equals("Security Advisory") || advisoryType.equals("Product Enhancement Advisory") || advisoryType.equals("Bug Fix Advisory")) { newErrata.setAdvisoryType(advisoryType); } else { throw new InvalidAdvisoryTypeException(advisoryType); } newErrata.setProduct(product); newErrata.setTopic(topic); newErrata.setDescription(description); newErrata.setSolution(solution); newErrata.setIssueDate(new Date()); newErrata.setUpdateDate(new Date()); //not required newErrata.setErrataFrom(errataFrom); newErrata.setRefersTo(references); newErrata.setNotes(notes); for (Iterator<Map<String, Object>> itr = bugs.iterator(); itr.hasNext();) { Map<String, Object> bugMap = itr.next(); String url = null; if (bugMap.containsKey("url")) { url = (String) bugMap.get("url"); } Bug bug = ErrataFactory.createPublishedBug(new Long(((Integer) bugMap.get("id")).longValue()), (String) bugMap.get("summary"), url); newErrata.addBug(bug); } for (Iterator<String> itr = keywords.iterator(); itr.hasNext();) { String keyword = itr.next(); newErrata.addKeyword(keyword); } newErrata.setPackages(new HashSet()); for (Iterator<Integer> itr = packageIds.iterator(); itr.hasNext();) { Integer pid = itr.next(); Package pack = PackageFactory.lookupByIdAndOrg(new Long(pid.longValue()), loggedInUser.getOrg()); if (pack != null) { newErrata.addPackage(pack); } else { throw new InvalidPackageException(pid.toString()); } } ErrataFactory.save(newErrata); //if true, channels will not be null, but will be a List of channel objects if (publish) { return publish(newErrata, channels, loggedInUser, false); } return newErrata; }
From source file:io.druid.indexing.jdbc.JDBCIndexTask.java
private Set<Integer> assignPartitionsAndSeekToNext(Handle handle) { final Set<Integer> assignment = Sets.newHashSet(); // for (Map.Entry<Integer, Integer> entry : nextOffsets.entrySet()) { // final long endOffset = endOffsets.get(entry.getKey()); // if (entry.getValue() < endOffset) { // assignment.add(entry.getKey()); // } else if (entry.getValue() == endOffset) { // log.info("Finished reading partition[%d].", entry.getKey()); // } else { // throw new ISE( // "WTF?! Cannot start from offset[%,d] > endOffset[%,d]", // entry.getValue(), // endOffset // );/*from ww w . j a v a 2 s .c om*/ // } // } // Seek to starting offsets. org.skife.jdbi.v2.Query<Map<String, Object>> maxItemQuery = handle .createQuery(makeMaxQuery(ioConfig.getJdbcOffsets())); long currOffset = maxItemQuery != null ? (long) maxItemQuery.list(1).get(0).get("MAX") : 0; if (currOffset > (int) endOffsets.values().toArray()[0]) { assignment.add((int) currOffset); } else { assignment.clear(); } return assignment; }
From source file:com.rapleaf.hank.storage.curly.TestAbstractCurlyPartitionUpdater.java
public void testGetCachedVersions() throws IOException { Set<DomainVersion> versions = new HashSet<DomainVersion>(); updater.ensureCacheExists();//from w w w.j a v a2s .c o m // Empty cache assertEquals(versions, updater.detectCachedBases()); assertEquals(versions, updater.detectCachedDeltas()); // Do not consider cueball files only makeLocalCacheFile("00001.base.cueball"); assertEquals(Collections.<DomainVersion>emptySet(), updater.detectCachedBases()); assertEquals(Collections.<DomainVersion>emptySet(), updater.detectCachedDeltas()); deleteLocalCacheFile("00001.base.cueball"); // Delta only makeLocalCacheFile("00001.delta.cueball"); makeLocalCacheFile("00001.delta.curly"); assertEquals(Collections.<DomainVersion>emptySet(), updater.detectCachedBases()); assertEquals(Collections.<DomainVersion>singleton(v1), updater.detectCachedDeltas()); deleteLocalCacheFile("00001.delta.cueball"); deleteLocalCacheFile("00001.delta.curly"); // Use bases makeLocalCacheFile("00000.base.cueball"); makeLocalCacheFile("00000.base.curly"); assertEquals(Collections.<DomainVersion>singleton(v0), updater.detectCachedBases()); assertEquals(Collections.<DomainVersion>emptySet(), updater.detectCachedDeltas()); deleteLocalCacheFile("00000.base.cueball"); deleteLocalCacheFile("00000.base.curly"); // Use multiple bases makeLocalCacheFile("00000.base.cueball"); makeLocalCacheFile("00000.base.curly"); makeLocalCacheFile("00001.base.cueball"); makeLocalCacheFile("00001.base.curly"); versions.add(v0); versions.add(v1); assertEquals(versions, updater.detectCachedBases()); assertEquals(Collections.<DomainVersion>emptySet(), updater.detectCachedDeltas()); versions.clear(); deleteLocalCacheFile("00000.base.cueball"); deleteLocalCacheFile("00000.base.curly"); deleteLocalCacheFile("00001.base.cueball"); deleteLocalCacheFile("00001.base.curly"); }
From source file:gaffer.accumulostore.operation.spark.handler.GetRDDOfElementsHandlerTest.java
@Test public void checkGetCorrectElementsInRDDForEntitySeed() throws OperationException, IOException { final Graph graph1 = new Graph.Builder() .addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")) .addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")) .addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")) .storeProperties(getClass().getResourceAsStream("/store.properties")).build(); final List<Element> elements = new ArrayList<>(); for (int i = 0; i < 10; i++) { final Entity entity = new Entity(ENTITY_GROUP); entity.setVertex("" + i); final Edge edge1 = new Edge(EDGE_GROUP); edge1.setSource("" + i); edge1.setDestination("B"); edge1.setDirected(false);// w w w . j a v a2 s . com edge1.putProperty("count", 2); final Edge edge2 = new Edge(EDGE_GROUP); edge2.setSource("" + i); edge2.setDestination("C"); edge2.setDirected(false); edge2.putProperty("count", 4); elements.add(edge1); elements.add(edge2); elements.add(entity); } final User user = new User(); graph1.execute(new AddElements(elements), user); final SparkConf sparkConf = new SparkConf().setMaster("local") .setAppName("testCheckGetCorrectElementsInRDDForEntitySeed") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.registrator", "gaffer.serialisation.kryo.Registrator") .set("spark.driver.allowMultipleContexts", "true"); final SparkContext sparkContext = new SparkContext(sparkConf); // Create Hadoop configuration and serialise to a string final Configuration configuration = new Configuration(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); configuration.write(new DataOutputStream(baos)); final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8); // Check get correct edges for "1" GetRDDOfElements<EntitySeed> rddQuery = new GetRDDOfElements.Builder<EntitySeed>() .sparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); RDD<Element> rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } Set<Element> results = new HashSet<>(); // NB: IDE suggests the cast in the following line is unnecessary but compilation fails without it Element[] returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } final Set<Element> expectedElements = new HashSet<>(); final Entity entity1 = new Entity(ENTITY_GROUP); entity1.setVertex("1"); final Edge edge1B = new Edge(EDGE_GROUP); edge1B.setSource("1"); edge1B.setDestination("B"); edge1B.setDirected(false); edge1B.putProperty("count", 2); final Edge edge1C = new Edge(EDGE_GROUP); edge1C.setSource("1"); edge1C.setDestination("C"); edge1C.setDirected(false); edge1C.putProperty("count", 4); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify entities only rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))) .view(new View.Builder().entity(ENTITY_GROUP).build()).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } expectedElements.clear(); expectedElements.add(entity1); assertEquals(expectedElements, results); // Check get correct edges for "1" when specify edges only rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext) .seeds(Collections.singleton(new EntitySeed("1"))).view(new View.Builder().edge(EDGE_GROUP).build()) .build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } expectedElements.clear(); expectedElements.add(edge1B); expectedElements.add(edge1C); assertEquals(expectedElements, results); // Check get correct edges for "1" and "5" Set<EntitySeed> seeds = new HashSet<>(); seeds.add(new EntitySeed("1")); seeds.add(new EntitySeed("5")); rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext).seeds(seeds).build(); rddQuery.addOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY, configurationString); rdd = graph1.execute(rddQuery, user); if (rdd == null) { fail("No RDD returned"); } results.clear(); returnedElements = (Element[]) rdd.collect(); for (int i = 0; i < returnedElements.length; i++) { results.add(returnedElements[i]); } final Entity entity5 = new Entity(ENTITY_GROUP); entity5.setVertex("5"); final Edge edge5B = new Edge(EDGE_GROUP); edge5B.setSource("5"); edge5B.setDestination("B"); edge5B.setDirected(false); edge5B.putProperty("count", 2); final Edge edge5C = new Edge(EDGE_GROUP); edge5C.setSource("5"); edge5C.setDestination("C"); edge5C.setDirected(false); edge5C.putProperty("count", 4); expectedElements.clear(); expectedElements.add(entity1); expectedElements.add(edge1B); expectedElements.add(edge1C); expectedElements.add(entity5); expectedElements.add(edge5B); expectedElements.add(edge5C); assertEquals(expectedElements, results); sparkContext.stop(); }