List of usage examples for java.util Queue addAll
boolean addAll(Collection<? extends E> c);
From source file:org.commonjava.maven.ext.io.rest.DefaultTranslator.java
/** * Translate the versions.//from w w w . j a v a 2 s . c o m * <pre>{@code * [ { * "groupId": "com.google.guava", * "artifactId": "guava", * "version": "13.0.1" * } } * }</pre> * This equates to a List of ProjectVersionRef. * * <pre>{@code * { * "productNames": [], * "productVersionIds": [], * "repositoryGroup": "", * "gavs": [ * { * "groupId": "com.google.guava", * "artifactId": "guava", * "version": "13.0.1" * } ] * } * }</pre> * There may be a lot of them, possibly causing timeouts or other issues. * This is mitigated by splitting them into smaller chunks when an error occurs and retrying. */ public Map<ProjectVersionRef, String> translateVersions(List<ProjectVersionRef> projects) { init(rgm); final Map<ProjectVersionRef, String> result = new HashMap<>(); final Queue<Task> queue = new ArrayDeque<>(); if (initialRestMaxSize != 0) { // Presplit final List<List<ProjectVersionRef>> partition = ListUtils.partition(projects, initialRestMaxSize); for (List<ProjectVersionRef> p : partition) { queue.add(new Task(rgm, p, endpointUrl + REPORTS_LOOKUP_GAVS)); } logger.debug("For initial sizing of {} have split the queue into {} ", initialRestMaxSize, queue.size()); } else { queue.add(new Task(rgm, projects, endpointUrl + REPORTS_LOOKUP_GAVS)); } while (!queue.isEmpty()) { Task task = queue.remove(); task.executeTranslate(); if (task.isSuccess()) { result.putAll(task.getResult()); } else { if (task.canSplit() && task.getStatus() == 504) { List<Task> tasks = task.split(); logger.warn( "Failed to translate versions for task @{} due to {}, splitting and retrying. Chunk size was: {} and new chunk size {} in {} segments.", task.hashCode(), task.getStatus(), task.getChunkSize(), tasks.get(0).getChunkSize(), tasks.size()); queue.addAll(tasks); } else { if (task.getStatus() < 0) { logger.debug("Caught exception calling server with message {}", task.getErrorMessage()); } else { logger.debug("Did not get status {} but received {}", SC_OK, task.getStatus()); } if (task.getStatus() > 0) { throw new RestException("Received response status " + task.getStatus() + " with message: " + task.getErrorMessage()); } else { throw new RestException("Received response status " + task.getStatus() + " with message " + task.getErrorMessage()); } } } } return result; }
From source file:edu.emory.cci.aiw.umls.UMLSDatabaseConnection.java
public List<TerminologyCode> getTermSubsumption(TerminologyCode code) throws UMLSQueryException, UMLSNoSuchTermException { validateCode(code);//from w w w. j a v a 2 s. c o m if (!codeExists(code)) { throw new UMLSNoSuchTermException("No such terminology code: " + code); } List<TerminologyCode> result = new ArrayList<TerminologyCode>(); // stores the unexpanded children Queue<TerminologyCode> descendants = new LinkedList<TerminologyCode>(); result.add(code); descendants.addAll(getChildrenByCode(code)); // loop through all children until the queue is empty, like BFS/DFS while (!descendants.isEmpty()) { // dequeue from the descendants and set as current term TerminologyCode current = descendants.remove(); // add the current child under examination to the result set result.add(current); // get all of the current term's children and them to the queue List<TerminologyCode> curChildren = getChildrenByCode(current); if (!curChildren.isEmpty()) { descendants.addAll(curChildren); } } return result; }
From source file:org.unitime.timetable.solver.course.ui.ClassInfoModel.java
public void update() throws Exception { if (iChange == null) return;//from w w w . ja va 2s. co m Vector<ClassAssignment> assignments = new Vector(iChange.getAssignments()); Hashtable<Long, ClassAssignment> table = iChange.getAssignmentTable(); iUnassignConflictingAssignments = !iForm.getKeepConflictingAssignments(); iChange.getAssignments().clear(); for (ClassAssignment assignment : assignments) { iChange.getAssignments().add(new ClassAssignmentInfo(assignment.getClazz(), assignment.getTime(), assignment.getDate(), assignment.getRooms(), table)); } if (assignments.isEmpty()) { for (Iterator<ClassAssignment> i = iChange.getConflicts().iterator(); i.hasNext();) { ClassAssignment assignment = i.next(); if (!assignment.getClassId().equals(getClazz().getClassId())) i.remove(); } } else { iChange.getConflicts().clear(); } for (ClassAssignment assignment : iChange.getAssignments()) { // Skip incomplete assignments (that have no time assigned yet) if (!assignment.hasTime()) continue; // Check for room conflicts if (iUnassignConflictingAssignments) { if (assignment.getRooms() != null) for (ClassRoomInfo room : assignment.getRooms()) { if (!room.isIgnoreRoomChecks()) { for (Assignment a : room.getLocation().getCommitedAssignments()) { if (a.getClazz().isCancelled()) continue; if (assignment.getTime().overlaps(new ClassTimeInfo(a)) && !a.getClazz().canShareRoom(assignment.getClazz())) { if (iChange.getCurrent(a.getClassId()) == null && iChange.getConflict(a.getClassId()) == null) iChange.getConflicts().add(new ClassAssignment(a)); } } } } // Check for instructor conflicts if (assignment.getInstructors() != null) for (ClassInstructorInfo instructor : assignment.getInstructors()) { if (!instructor.isLead()) continue; // check all departmental instructors with the same external id for (DepartmentalInstructor di : DepartmentalInstructor .getAllForInstructor(instructor.getInstructor().getInstructor())) { for (ClassInstructor ci : di.getClasses()) { if (ci.equals(instructor.getInstructor())) continue; Assignment a = ci.getClassInstructing().getCommittedAssignment(); if (a == null || a.getClazz().isCancelled()) continue; if (assignment.getTime() != null && assignment.getTime().overlaps(new ClassTimeInfo(a)) && !a.getClazz().canShareInstructor(assignment.getClazz())) { if (iChange.getCurrent(a.getClassId()) == null && iChange.getConflict(a.getClassId()) == null) iChange.getConflicts().add(new ClassAssignment(a)); } } } /* // Potential speed-up #1) only check the current department instructors for (ClassInstructor ci : instructor.getInstructor().getInstructor().getClasses()) { if (ci.equals(instructor.getInstructor())) continue; Assignment a = ci.getClassInstructing().getCommittedAssignment(); if (a == null) continue; if (assignment.getTime().overlaps(new ClassTimeInfo(a))) { if (iChange.getCurrent(a.getClassId())==null && iChange.getConflict(a.getClassId())==null) iChange.getConflicts().add(new ClassAssignment(a)); } } */ /* // Potential speed-up #2) use instructor assignments from the solution for (Assignment a : instructor.getInstructor().getInstructor().getCommitedAssignments()) { if (assignment.getTime().overlaps(new ClassTimeInfo(a))) { if (iChange.getCurrent(a.getClassId())==null && iChange.getConflict(a.getClassId())==null) iChange.getConflicts().add(new ClassAssignment(a)); } } */ } } // Check the course structure for conflicts Class_ clazz = assignment.getClazz(Class_DAO.getInstance().getSession()); // a) all parents Class_ parent = clazz.getParentClass(); while (parent != null) { if (iChange.getCurrent(parent.getUniqueId()) == null && iChange.getConflict(parent.getUniqueId()) == null) { Assignment a = parent.getCommittedAssignment(); if (a != null && !a.getClazz().isCancelled() && assignment.getTime().overlaps(new ClassTimeInfo(a))) { iChange.getConflicts().add(new ClassAssignment(a)); } } parent = parent.getParentClass(); } // b) all children Queue<Class_> children = new LinkedList(); try { children.addAll(clazz.getChildClasses()); } catch (LazyInitializationException e) { sLog.error("This should never happen."); Class_ c = Class_DAO.getInstance().get(assignment.getClassId()); children.addAll(c.getChildClasses()); } Class_ child = null; while ((child = children.poll()) != null) { if (iChange.getCurrent(child.getUniqueId()) == null && iChange.getConflict(child.getUniqueId()) == null) { Assignment a = child.getCommittedAssignment(); if (a != null && !a.getClazz().isCancelled() && assignment.getTime().overlaps(new ClassTimeInfo(a))) { iChange.getConflicts().add(new ClassAssignment(a)); } } if (!child.getChildClasses().isEmpty()) children.addAll(child.getChildClasses()); } // c) all single-class subparts for (Iterator i = clazz.getSchedulingSubpart().getInstrOfferingConfig().getSchedulingSubparts() .iterator(); i.hasNext();) { SchedulingSubpart ss = (SchedulingSubpart) i.next(); if (ss.getClasses().size() == 1) { child = (Class_) ss.getClasses().iterator().next(); if (iChange.getCurrent(child.getUniqueId()) == null && iChange.getConflict(child.getUniqueId()) == null) { Assignment a = child.getCommittedAssignment(); if (a != null && !a.getClazz().isCancelled() && assignment.getTime().overlaps(new ClassTimeInfo(a))) { iChange.getConflicts().add(new ClassAssignment(a)); } } if (!child.getChildClasses().isEmpty()) children.addAll(child.getChildClasses()); } } //TODO: Check for other HARD conflicts (e.g., distribution constraints) } }
From source file:com.intel.ssg.dcst.panthera.parse.SkinDriver.java
private CommandProcessorResponse runInternal(String command) throws CommandNeedRetryException { errorMessage = null;//from w w w. java 2 s . c o m SQLState = null; downstreamError = null; if (!validateConfVariables()) { return new CommandProcessorResponse(12, errorMessage, SQLState); } HiveDriverRunHookContext hookContext = new HiveDriverRunHookContextImpl(conf, command); // Get all the driver run hooks and pre-execute them. List<HiveDriverRunHook> driverRunHooks; try { driverRunHooks = getHooks(HiveConf.ConfVars.HIVE_DRIVER_RUN_HOOKS, HiveDriverRunHook.class); for (HiveDriverRunHook driverRunHook : driverRunHooks) { driverRunHook.preDriverRun(hookContext); } } catch (Exception e) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return new CommandProcessorResponse(12, errorMessage, SQLState); } // Reset the perf logger PerfLogger perfLogger = PerfLogger.getPerfLogger(true); perfLogger.PerfLogBegin(LOG, PerfLogger.DRIVER_RUN); perfLogger.PerfLogBegin(LOG, PerfLogger.TIME_TO_SUBMIT); int ret; synchronized (compileMonitor) { ret = compile(command); } if (ret != 0) { releaseLocks(ctx.getHiveLocks()); return new CommandProcessorResponse(ret, errorMessage, SQLState); } boolean requireLock = false; boolean ckLock = checkLockManager(); if (ckLock) { boolean lockOnlyMapred = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_LOCK_MAPRED_ONLY); if (lockOnlyMapred) { Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>(); taskQueue.addAll(plan.getRootTasks()); while (taskQueue.peek() != null) { Task<? extends Serializable> tsk = taskQueue.remove(); requireLock = requireLock || tsk.requireLock(); if (requireLock) { break; } if (tsk instanceof ConditionalTask) { taskQueue.addAll(((ConditionalTask) tsk).getListTasks()); } if (tsk.getChildTasks() != null) { taskQueue.addAll(tsk.getChildTasks()); } // does not add back up task here, because back up task should be the same // type of the original task. } } else { requireLock = true; } } if (requireLock) { ret = acquireReadWriteLocks(); if (ret != 0) { releaseLocks(ctx.getHiveLocks()); return new CommandProcessorResponse(ret, errorMessage, SQLState); } } ret = execute(); if (ret != 0) { //if needRequireLock is false, the release here will do nothing because there is no lock releaseLocks(ctx.getHiveLocks()); return new CommandProcessorResponse(ret, errorMessage, SQLState); } //if needRequireLock is false, the release here will do nothing because there is no lock releaseLocks(ctx.getHiveLocks()); perfLogger.PerfLogEnd(LOG, PerfLogger.DRIVER_RUN); perfLogger.close(LOG, plan); // Take all the driver run hooks and post-execute them. try { for (HiveDriverRunHook driverRunHook : driverRunHooks) { driverRunHook.postDriverRun(hookContext); } } catch (Exception e) { errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return new CommandProcessorResponse(12, errorMessage, SQLState); } return new CommandProcessorResponse(ret); }
From source file:org.joox.test.JOOXTest.java
License:asdf
@Test public void testEachCallback() { final Queue<Integer> queue = new LinkedList<Integer>(); queue.addAll(Arrays.asList(0)); $.each(new Each() { @Override/*from www .ja v a 2 s . c om*/ public void each(Context context) { assertEquals(context.element(), context.match()); assertEquals(context.elementIndex(), context.matchIndex()); assertEquals(context.elementSize(), context.matchSize()); assertEquals((int) queue.poll(), context.matchIndex()); assertEquals(1, context.matchSize()); assertEquals("document", context.element().getTagName()); } }); assertTrue(queue.isEmpty()); queue.addAll(Arrays.asList(0, 1, 2)); $.children().each(new Each() { @Override public void each(Context context) { assertEquals(context.element(), context.match()); assertEquals(context.elementIndex(), context.matchIndex()); assertEquals(context.elementSize(), context.matchSize()); assertEquals((int) queue.poll(), context.matchIndex()); assertEquals(3, context.matchSize()); assertEquals("library", context.element().getTagName()); } }); assertTrue(queue.isEmpty()); }
From source file:org.unitime.timetable.model.Solution.java
@Override public Set<Assignment> getConflicts(Long classId) throws Exception { if (classId == null) return null; Class_ clazz = Class_DAO.getInstance().get(classId); if (clazz == null || clazz.isCancelled()) return null; Assignment assignment = getAssignment(clazz); if (assignment == null) return null; Set<Assignment> conflicts = new HashSet<Assignment>(); if (assignment.getRooms() != null) for (Location room : assignment.getRooms()) { if (!room.isIgnoreRoomCheck()) { for (Assignment a : room.getAssignments(this)) if (!assignment.equals(a) && !a.getClazz().isCancelled() && assignment.overlaps(a) && !clazz.canShareRoom(a.getClazz())) conflicts.add(a); }//from w ww .ja va2 s.c o m } if (clazz.getClassInstructors() != null) for (ClassInstructor instructor : clazz.getClassInstructors()) { if (!instructor.isLead()) continue; for (DepartmentalInstructor di : DepartmentalInstructor .getAllForInstructor(instructor.getInstructor())) { for (ClassInstructor ci : di.getClasses()) { if (ci.equals(instructor)) continue; Assignment a = getAssignment(ci.getClassInstructing()); if (a != null && !a.getClazz().isCancelled() && assignment.overlaps(a) && !clazz.canShareInstructor(a.getClazz())) conflicts.add(a); } } } Class_ parent = clazz.getParentClass(); while (parent != null) { Assignment a = getAssignment(parent); if (a != null && !a.getClazz().isCancelled() && assignment.overlaps(a)) conflicts.add(a); parent = parent.getParentClass(); } Queue<Class_> children = new LinkedList(clazz.getChildClasses()); Class_ child = null; while ((child = children.poll()) != null) { Assignment a = getAssignment(child); if (a != null && !a.getClazz().isCancelled() && assignment.overlaps(a)) conflicts.add(a); if (!child.getChildClasses().isEmpty()) children.addAll(child.getChildClasses()); } for (Iterator<SchedulingSubpart> i = clazz.getSchedulingSubpart().getInstrOfferingConfig() .getSchedulingSubparts().iterator(); i.hasNext();) { SchedulingSubpart ss = i.next(); if (ss.getClasses().size() == 1) { child = ss.getClasses().iterator().next(); if (clazz.equals(child)) continue; Assignment a = getAssignment(child); if (a != null && !a.getClazz().isCancelled() && assignment.overlaps(a)) conflicts.add(a); } } return conflicts; }
From source file:org.evosuite.setup.TestClusterGenerator.java
/** * All public methods defined directly in the SUT should be covered * // w ww. ja va2s .co m * TODO: What if we use instrument_parent? * * @param targetClass */ @SuppressWarnings("unchecked") private void initializeTargetMethods() throws RuntimeException, ClassNotFoundException { logger.info("Analyzing target class"); Class<?> targetClass = Properties.getTargetClass(); TestCluster cluster = TestCluster.getInstance(); Set<Class<?>> targetClasses = new LinkedHashSet<Class<?>>(); if (targetClass == null) { throw new RuntimeException("Failed to load " + Properties.TARGET_CLASS); } targetClasses.add(targetClass); addDeclaredClasses(targetClasses, targetClass); if (Modifier.isAbstract(targetClass.getModifiers())) { logger.info("SUT is an abstract class"); Set<Class<?>> subclasses = getConcreteClasses(targetClass, inheritanceTree); logger.info("Found {} concrete subclasses", subclasses.size()); targetClasses.addAll(subclasses); } // To make sure we also have anonymous inner classes double check inner classes using ASM ClassNode targetClassNode = DependencyAnalysis.getClassNode(Properties.TARGET_CLASS); Queue<InnerClassNode> innerClasses = new LinkedList<InnerClassNode>(); innerClasses.addAll(targetClassNode.innerClasses); while (!innerClasses.isEmpty()) { InnerClassNode icn = innerClasses.poll(); try { logger.debug("Loading inner class: {}, {},{}", icn.innerName, icn.name, icn.outerName); String innerClassName = ResourceList.getClassNameFromResourcePath(icn.name); Class<?> innerClass = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(innerClassName); //if (!canUse(innerClass)) // continue; // Sometimes strange things appear such as Map$Entry if (!targetClasses.contains(innerClass)) { // && !innerClassName.matches(".*\\$\\d+(\\$.*)?$")) { logger.info("Adding inner class {}", innerClassName); targetClasses.add(innerClass); ClassNode innerClassNode = DependencyAnalysis.getClassNode(innerClassName); innerClasses.addAll(innerClassNode.innerClasses); } } catch (Throwable t) { logger.error("Problem for {}. Error loading inner class: {}, {},{}: {}", Properties.TARGET_CLASS, icn.innerName, icn.name, icn.outerName, t); } } for (Class<?> clazz : targetClasses) { logger.info("Current SUT class: {}", clazz); if (!canUse(clazz)) { logger.info("Cannot access SUT class: {}", clazz); continue; } // Add all constructors for (Constructor<?> constructor : getConstructors(clazz)) { logger.info("Checking target constructor {}", constructor); String name = "<init>" + org.objectweb.asm.Type.getConstructorDescriptor(constructor); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), "<init>", org.objectweb.asm.Type.getConstructorDescriptor(constructor)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(constructor)) { GenericConstructor genericConstructor = new GenericConstructor(constructor, clazz); cluster.addTestCall(genericConstructor); // TODO: Add types! cluster.addGenerator(new GenericClass(clazz).getWithWildcardTypes(), genericConstructor); addDependencies(genericConstructor, 1); logger.debug("Keeping track of {}.{}{}", constructor.getDeclaringClass().getName(), constructor.getName(), Type.getConstructorDescriptor(constructor)); } else { logger.debug("Constructor cannot be used: {}", constructor); } } // Add all methods for (Method method : getMethods(clazz)) { logger.info("Checking target method {}", method); String name = method.getName() + org.objectweb.asm.Type.getMethodDescriptor(method); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), method.getName(), org.objectweb.asm.Type.getMethodDescriptor(method)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(method, clazz)) { logger.debug("Adding method {}.{}{}", clazz.getName(), method.getName(), Type.getMethodDescriptor(method)); GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); cluster.addModifier(new GenericClass(clazz).getWithWildcardTypes(), genericMethod); addDependencies(genericMethod, 1); GenericClass retClass = new GenericClass(method.getReturnType()); if (!retClass.isPrimitive() && !retClass.isVoid() && !retClass.isObject()) cluster.addGenerator(retClass.getWithWildcardTypes(), genericMethod); } else { logger.debug("Method cannot be used: {}", method); } } for (Field field : getFields(clazz)) { logger.info("Checking target field {}", field); if (canUse(field, clazz)) { GenericField genericField = new GenericField(field, clazz); addDependencies(genericField, 1); cluster.addGenerator(new GenericClass(field.getGenericType()).getWithWildcardTypes(), genericField); logger.debug("Adding field {}", field); if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } else { logger.debug("Is final"); if (Modifier.isStatic(field.getModifiers()) && !field.getType().isPrimitive()) { logger.debug("Is static non-primitive"); /* * With this we are trying to cover such cases: * public static final DurationField INSTANCE = new MillisDurationField(); private MillisDurationField() { super(); } */ try { Object o = field.get(null); if (o == null) { logger.info("Field is not yet initialized: {}", field); } else { Class<?> actualClass = o.getClass(); logger.debug("Actual class is {}", actualClass); if (!actualClass.isAssignableFrom(genericField.getRawGeneratedType()) && genericField.getRawGeneratedType().isAssignableFrom(actualClass)) { GenericField superClassField = new GenericField(field, clazz); cluster.addGenerator(new GenericClass(actualClass), superClassField); } } } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } else { logger.debug("Can't use field {}", field); } } analyzedClasses.add(clazz); // TODO: Set to generic type rather than class? cluster.getAnalyzedClasses().add(clazz); } if (Properties.INSTRUMENT_PARENT) { for (String superClass : inheritanceTree.getSuperclasses(Properties.TARGET_CLASS)) { try { Class<?> superClazz = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(superClass); dependencies.add(new Pair(0, superClazz)); } catch (ClassNotFoundException e) { logger.error("Problem for {}. Class not found: {}", Properties.TARGET_CLASS, superClass, e); } } } if (Properties.HANDLE_STATIC_FIELDS) { GetStaticGraph getStaticGraph = GetStaticGraphGenerator.generate(Properties.TARGET_CLASS); Map<String, Set<String>> staticFields = getStaticGraph.getStaticFields(); for (String className : staticFields.keySet()) { logger.info("Adding static fields to cluster for class {}", className); Class<?> clazz; try { clazz = getClass(className); } catch (ExceptionInInitializerError ex) { logger.debug("Class class init caused exception {}", className); continue; } if (clazz == null) { logger.debug("Class not found {}", className); continue; } if (!canUse(clazz)) continue; Set<String> fields = staticFields.get(className); for (Field field : getFields(clazz)) { if (!canUse(field, clazz)) continue; if (fields.contains(field.getName())) { if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } } } } PutStaticMethodCollector collector = new PutStaticMethodCollector(Properties.TARGET_CLASS, staticFields); Set<MethodIdentifier> methodIdentifiers = collector.collectMethods(); for (MethodIdentifier methodId : methodIdentifiers) { Class<?> clazz = getClass(methodId.getClassName()); if (clazz == null) continue; if (!canUse(clazz)) continue; Method method = getMethod(clazz, methodId.getMethodName(), methodId.getDesc()); if (method == null) continue; GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); } } logger.info("Finished analyzing target class"); }
From source file:org.kuali.rice.krad.uif.service.impl.ViewHelperServiceImpl.java
/** * {@inheritDoc}/*from w ww . j ava 2 s. co m*/ */ @Override public void applyDefaultValues(Component component) { if (component == null) { return; } View view = ViewLifecycle.getView(); Object model = ViewLifecycle.getModel(); @SuppressWarnings("unchecked") Queue<LifecycleElement> elementQueue = RecycleUtils.getInstance(LinkedList.class); elementQueue.offer(component); try { while (!elementQueue.isEmpty()) { LifecycleElement currentElement = elementQueue.poll(); // if component is a data field apply default value if (currentElement instanceof DataField) { DataField dataField = ((DataField) currentElement); // need to make sure binding is initialized since this could be on a page we have not initialized yet dataField.getBindingInfo().setDefaults(view, dataField.getPropertyName()); populateDefaultValueForField(model, dataField, dataField.getBindingInfo().getBindingPath()); } elementQueue.addAll(ViewLifecycleUtils.getElementsForLifecycle(currentElement).values()); } } finally { elementQueue.clear(); RecycleUtils.recycle(elementQueue); } }
From source file:de.tudarmstadt.ukp.dkpro.lexsemresource.graph.EntityGraphJGraphT.java
/** * Creates the hyponym map, that maps from nodes to their (recursive) number of hyponyms for * each node. "recursive" means that the hyponyms of hyponyms are also taken into account. * * @throws UnsupportedOperationException * @throws LexicalSemanticResourceException *//*from w ww . j av a 2 s.c o m*/ private Map<String, Integer> getHyponymCountMap() throws LexicalSemanticResourceException { // do only create hyponymMap, if it was not already computed if (hyponymCountMap != null) { return hyponymCountMap; } // work on the lcc, otherwise this is not going to work // EntityGraphJGraphT lcc = this; EntityGraphJGraphT lcc = this.getLargestConnectedComponent(); lcc.removeCycles(); int nrOfNodes = lcc.getNumberOfNodes(); File hyponymCountMapSerializedFile = new File( getGraphId() + "_" + hyponymCountMapFilename + (lexSemRes.getIsCaseSensitive() ? "-cs" : "-cis")); hyponymCountMap = new HashMap<String, Integer>(); if (hyponymCountMapSerializedFile.exists()) { logger.info("Loading saved hyponymyCountMap ..."); hyponymCountMap = EntityGraphUtils.deserializeMap(hyponymCountMapSerializedFile); if (hyponymCountMap.size() != nrOfNodes) { throw new LexicalSemanticResourceException( "HyponymCountMap does not contain an entry for each node in the graph." + hyponymCountMap.size() + "/" + nrOfNodes); } logger.info("Done loading saved hyponymyCountMap"); return hyponymCountMap; } hyponymCountMap = new HashMap<String, Integer>(); // a queue holding the nodes to process Queue<String> queue = new LinkedList<String>(); // In the entity graph a node may have more than one father. // Thus, we check whether a node was already visited. // Then, it is not expanded again. Set<String> visited = new HashSet<String>(); // initialize the queue with all leaf nodes Set<String> leafNodes = new HashSet<String>(); for (Entity leaf : lcc.getLeaves()) { leafNodes.add(leaf.getId()); } queue.addAll(leafNodes); logger.info(leafNodes.size() + " leaf nodes."); ProgressMeter progress = new ProgressMeter(getNumberOfNodes()); // while the queue is not empty while (!queue.isEmpty()) { // remove first element from queue String currNodeId = queue.poll(); Entity currNode = lexSemRes.getEntityById(currNodeId); // in some rare cases, getEntityById might fail - so better check for nulls and fail // gracefully if (currNode == null) { visited.add(currNodeId); hyponymCountMap.put(currNodeId, 0); } logger.debug(queue.size()); if (visited.contains(currNodeId)) { continue; } progress.next(); if (logger.isDebugEnabled()) { logger.debug(progress + " - " + queue.size() + " left in queue"); } else if (logger.isInfoEnabled() && (progress.getCount() % 100 == 0)) { logger.info(progress + " - " + queue.size() + " left in queue"); } Set<Entity> children = lcc.getChildren(currNode); Set<String> invalidChildIds = new HashSet<String>(); int validChildren = 0; int sumChildHyponyms = 0; boolean invalid = false; for (Entity child : children) { if (lcc.containsVertex(child)) { if (hyponymCountMap.containsKey(child.getId())) { sumChildHyponyms += hyponymCountMap.get(child.getId()); validChildren++; } else { invalid = true; invalidChildIds.add(child.getId()); } } } // we cannot use continue directly if invalid as this would continue the inner loop not // the outer loop if (invalid) { // One of the childs is not in the hyponymCountMap yet // Re-Enter the node into the queue and continue with next node // Also enter all the childs that are not in the queue yet queue.add(currNodeId); for (String childId : invalidChildIds) { if (!visited.contains(childId) && !queue.contains(childId)) { queue.add(childId); } } continue; } // mark as visited visited.add(currNodeId); // number of hyponomys of current node is the number of its own hyponyms and the sum of // the hyponyms of its children. int currNodeHyponomyCount = validChildren + sumChildHyponyms; hyponymCountMap.put(currNodeId, currNodeHyponomyCount); // add parents of current node to queue for (Entity parent : lcc.getParents(currNode)) { if (lcc.containsVertex(parent)) { queue.add(parent.getId()); } } } // while queue not empty logger.info(visited.size() + " nodes visited"); if (visited.size() != nrOfNodes) { List<Entity> missed = new ArrayList<Entity>(); for (Entity e : lcc.getNodes()) { if (!visited.contains(e.getId())) { missed.add(e); System.out.println("Missed: [" + e + "]"); } } throw new LexicalSemanticResourceException( "Visited only " + visited.size() + " out of " + nrOfNodes + " nodes."); } if (hyponymCountMap.size() != nrOfNodes) { throw new LexicalSemanticResourceException( "HyponymCountMap does not contain an entry for each node in the graph." + hyponymCountMap.size() + "/" + nrOfNodes); } /* * As an EntityGraph is a graph rather than a tree, the hyponymCount for top nodes can be * greater than the number of nodes in the graph. This is due to the multiple counting of nodes * having more than one parent. Thus, we have to scale hyponym counts to fall in * [0,NumberOfNodes]. */ for (String key : hyponymCountMap.keySet()) { if (hyponymCountMap.get(key) > hyponymCountMap.size()) { // TODO scaling function is not optimal (to say the least :) hyponymCountMap.put(key, (hyponymCountMap.size() - 1)); } } logger.info("Computed hyponymCountMap"); EntityGraphUtils.serializeMap(hyponymCountMap, hyponymCountMapSerializedFile); logger.info("Serialized hyponymCountMap"); return hyponymCountMap; }
From source file:org.gvnix.flex.entity.ActionScriptEntityMetadataProvider.java
private void createActionScriptMirrorClass(String asEntityId, ActionScriptType asType, JavaType javaType) { Queue<TypeMapping> relatedTypes = new LinkedList<TypeMapping>(); List<MetaTagAttributeValue<?>> attributes = new ArrayList<MetaTagAttributeValue<?>>(); attributes.add(new StringAttributeValue(new ActionScriptSymbolName(ALIAS_ATTR), javaType.getFullyQualifiedTypeName())); ASMetaTagMetadata remoteClassTag = new DefaultASMetaTagMetadata(REMOTE_CLASS_TAG, attributes); List<ASMetaTagMetadata> typeMetaTags = new ArrayList<ASMetaTagMetadata>(); typeMetaTags.add(remoteClassTag);/*from w w w .j ava2 s.co m*/ // TODO - for now we will only handle classes...interfaces could come // later but would add complexity (i.e., need // to find all implementations and mirror those as well) List<ASFieldMetadata> declaredFields = new ArrayList<ASFieldMetadata>(); MemberDetails memberDetails = getMemberDetails(javaType); for (MethodMetadata method : MemberFindingUtils.getMethods(memberDetails)) { if (BeanInfoUtils.isAccessorMethod(method)) { JavaSymbolName propertyName = BeanInfoUtils.getPropertyNameForJavaBeanMethod(method); FieldMetadata javaField = BeanInfoUtils.getFieldForPropertyName(memberDetails, propertyName); // TODO - We don't add any meta-tags and we set the field to // public - any other choice? ASFieldMetadata asField = ActionScriptMappingUtils.toASFieldMetadata(asEntityId, javaField, true); relatedTypes.addAll(findRequiredMappings(javaField, asField)); declaredFields.add(asField); } } ASClassOrInterfaceTypeDetails asDetails = new DefaultASClassOrInterfaceTypeDetails(asEntityId, asType, ASPhysicalTypeCategory.CLASS, declaredFields, null, null, null, null, null, typeMetaTags); // new DefaultASClassOrInterfaceTypeDetails(declaredByMetadataId, name, // physicalTypeCategory, declaredFields, // declaredConstructor, declaredMethods, superClass, extendsTypes, // implementsTypes, typeMetaTags); ASPhysicalTypeMetadata asMetadata = new DefaultASPhysicalTypeMetadata(asEntityId, getPhysicalLocationCanonicalPath(asEntityId), asDetails); getAsPhysicalTypeProvider().createPhysicalType(asMetadata); // Now trigger the creation of any related types while (!relatedTypes.isEmpty()) { TypeMapping mapping = relatedTypes.poll(); createActionScriptMirrorClass(mapping.getMetadataId(), mapping.getAsType(), mapping.getJavaType()); } }