List of usage examples for java.util Queue poll
E poll();
From source file:tachyon.master.file.FileSystemMaster.java
/** * Gets absolute paths of all in memory files. Called by the web ui. * * @return absolute paths of all in memory files *///from ww w . j a v a 2s .c o m public List<TachyonURI> getInMemoryFiles() { List<TachyonURI> ret = new ArrayList<TachyonURI>(); Queue<Pair<InodeDirectory, TachyonURI>> nodesQueue = new LinkedList<Pair<InodeDirectory, TachyonURI>>(); synchronized (mInodeTree) { // TODO(yupeng): Verify we want to use absolute path. nodesQueue.add(new Pair<InodeDirectory, TachyonURI>(mInodeTree.getRoot(), new TachyonURI(TachyonURI.SEPARATOR))); while (!nodesQueue.isEmpty()) { Pair<InodeDirectory, TachyonURI> pair = nodesQueue.poll(); InodeDirectory directory = pair.getFirst(); TachyonURI curUri = pair.getSecond(); Set<Inode> children = directory.getChildren(); for (Inode inode : children) { TachyonURI newUri = curUri.join(inode.getName()); if (inode.isDirectory()) { nodesQueue.add(new Pair<InodeDirectory, TachyonURI>((InodeDirectory) inode, newUri)); } else if (isFullyInMemory((InodeFile) inode)) { ret.add(newUri); } } } } return ret; }
From source file:net.floodlightcontroller.devicemanager.internal.DeviceManagerImpl.java
/** * Send update notifications to listeners * @param updates the updates to process. *//*w ww . j a v a2 s . c om*/ protected void processUpdates(Queue<DeviceUpdate> updates) { if (updates == null) return; DeviceUpdate update = null; while (null != (update = updates.poll())) { if (logger.isTraceEnabled()) { logger.trace("Dispatching device update: {}", update); } List<IDeviceListener> listeners = deviceListeners.getOrderedListeners(); notifyListeners(listeners, update); } }
From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) { if (!directory.exists()) { throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist"); }/*from ww w . j a va2 s. c o m*/ //Create the file filter to use when searching for files to import final FileFilter fileFilter; if (pattern != null) { fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes); } else { fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes); } //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "import"); //Setup reporting file final File importReport = new File(logDirectory, "data-import.txt"); final PrintWriter reportWriter; try { reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + importReport, e); } //Convert directory to URI String to provide better logging output final URI directoryUri = directory.toURI(); final String directoryUriStr = directoryUri.toString(); IMPORT_BASE_DIR.set(directoryUriStr); try { //Scan the specified directory for files to import logger.info("Scanning for files to Import from: {}", directory); final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options); this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor); final long resourceCount = fileProcessor.getResourceCount(); logger.info("Found {} files to Import from: {}", resourceCount, directory); //See if the import should fail on error final boolean failOnError = options != null ? options.isFailOnError() : true; //Map of files to import, grouped by type final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport(); //Import the data files for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) { final Queue<Resource> files = dataToImport.remove(portalDataKey); if (files == null) { continue; } final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>(); final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final int fileCount = files.size(); logger.info("Importing {} files of type {}", fileCount, portalDataKey); reportWriter.println(portalDataKey + "," + fileCount); while (!files.isEmpty()) { final Resource file = files.poll(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong importTime = new AtomicLong(-1); //Create import task final Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { IMPORT_BASE_DIR.set(directoryUriStr); importTime.set(System.nanoTime()); try { importData(file, portalDataKey); } finally { importTime.set(System.nanoTime() - importTime.get()); IMPORT_BASE_DIR.remove(); } } }; //Submit the import task final Future<?> importFuture = this.importExportThreadPool.submit(task); //Add the future for tracking importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime)); } //Wait for all of the imports on of this type to complete final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException( failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n"); } reportWriter.flush(); } if (!dataToImport.isEmpty()) { throw new IllegalStateException( "The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet()); } logger.info("For a detailed report on the data import see " + importReport); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to import", e); } finally { IOUtils.closeQuietly(reportWriter); IMPORT_BASE_DIR.remove(); } }
From source file:org.jasig.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void importData(File directory, String pattern, final BatchImportOptions options) { if (!directory.exists()) { throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist"); }/*from www . jav a 2 s . c o m*/ //Create the file filter to use when searching for files to import final FileFilter fileFilter; if (pattern != null) { fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes); } else { fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes); } //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "import"); //Setup reporting file final File importReport = new File(logDirectory, "data-import.txt"); final PrintWriter reportWriter; try { reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + importReport, e); } //Convert directory to URI String to provide better logging output final URI directoryUri = directory.toURI(); final String directoryUriStr = directoryUri.toString(); IMPORT_BASE_DIR.set(directoryUriStr); try { //Scan the specified directory for files to import logger.info("Scanning for files to Import from: {}", directory); final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options); this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor); final long resourceCount = fileProcessor.getResourceCount(); logger.info("Found {} files to Import from: {}", resourceCount, directory); //See if the import should fail on error final boolean failOnError = options != null ? options.isFailOnError() : true; //Map of files to import, grouped by type final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport(); //Import the data files for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) { final Queue<Resource> files = dataToImport.remove(portalDataKey); if (files == null) { continue; } final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>(); final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final int fileCount = files.size(); logger.info("Importing {} files of type {}", fileCount, portalDataKey); reportWriter.println(portalDataKey + "," + fileCount); while (!files.isEmpty()) { final Resource file = files.poll(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong importTime = new AtomicLong(-1); //Create import task final Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { IMPORT_BASE_DIR.set(directoryUriStr); importTime.set(System.nanoTime()); try { importData(file, portalDataKey); } finally { importTime.set(System.nanoTime() - importTime.get()); IMPORT_BASE_DIR.remove(); } } }; //Submit the import task final Future<?> importFuture = this.importExportThreadPool.submit(task); //Add the future for tracking importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime)); } //Wait for all of the imports on of this type to complete final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException( failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n"); } reportWriter.flush(); } if (!dataToImport.isEmpty()) { throw new IllegalStateException( "The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet()); } logger.info("For a detailed report on the data import see " + importReport); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to import", e); } finally { IOUtils.closeQuietly(reportWriter); IMPORT_BASE_DIR.remove(); } }
From source file:HashMapComponentGraph.java
/** * Remove an edge. If the removal results in one or more isolated nodes, * these will be removed from the graph implicitly. * //from w w w .j av a 2 s . c o m * For non-dense and relatively fragmented graphs, this operation will be * cheap. Otherwise, for dense and strongly connected graphs, the operation * could include a full traversal of the graph visiting all present edges, * resulting in an O((n-1)^2) operation, where n is the number of nodes in * the graph. * * @param pair * edge to be removed * @return true if the edge was actually removed, false if the edge did not * exists before call. */ @Override public final boolean removeEdge(Pair<T> pair) { // don't act if edge is not present if (!edgeData.containsKey(pair)) { // System.out.println("Edge NOT present"); return false; } else { edgeData.remove(pair); } // get the node out of the node adjacency hash map ( at this point we // know that the nodes must // exist, because the edge exists Node a = new Node(pair.getFirst()); if (allnodes.containsKey(a)) { a = allnodes.get(a); } else { // not possible throw new IllegalStateException( "ComponentGraph.removeEdge(): Node did not have an adjacency entry. ComponentGraph corrupted."); } Node b = new Node(pair.getSecond()); if (allnodes.containsKey(b)) { b = allnodes.get(b); } else { // this is not possible throw new IllegalStateException( "ComponentGraph.removeEdge(): Node did not have an adjacency entry. ComponentGraph corrupted."); } // if b is fixed, interchange a and b ( now, if b is fixed, both a and b // are fixed) if (nodeClassifier.isDelimitor(b.element)) { Node t = a; a = b; b = t; } // remove references to each node, in each nodes // connected node sets edges.get(a).remove(b); edges.get(b).remove(a); // if no edges left in set, remove the set if (edges.get(a).isEmpty()) edges.remove(a); // if no edges left in set, remove it if (edges.get(b).isEmpty()) edges.remove(b); // Cases // i. Both node are delimiters // do nothing // ii. One node is delimiter: // a). non-delimiter is in a component // do nothing (node could now be alone in its component) // if node contains no other edges, delete it from its component // b). non-delimiter is not in a component (not possible) // do nothing/ report fatal error // iii. No node is delimiter: // a). no node is in a component (not possible) // do nothing/ error // b). one node is in a component (not possible) // do nothing // c). both nodes are in a component // 1. the same component // remove edge, traverse breadth-first from each node to determine // if component should be split. // 2. different components (not possible) // do nothing/ error // both nodes are fixed if (nodeClassifier.isDelimitor(b.element)) { // do nothing // return; // one is fixed } else if (nodeClassifier.isDelimitor(a.element)) { if (component.containsKey(b)) { // only possible option // System.out.println("One fixed node"); Component g = component.get(b); // check for another edge on this node if (!edges.containsKey(b)) { // System.out.println("b did not have any edges"); // remove the node from component component.remove(b); // notify handler componenthandler.nodeRemovedFromComponent(g.element, b.element); // b is now free freenodes.add(b); Set<Node> s = componentNodes.get(g); if (!s.remove(b)) { System.out.println("ALARM"); System.exit(0); } // remove group if empty if (s.isEmpty()) { // System.out.println("groups entry removed"); componentNodes.remove(g); // TODO notify handler } else { System.out.println("Group isn't empty, why??"); // System.exit(0); } } else { // b has edges left, and is part of a group. Were done } // remove edge from component (even if b was not removed from // the group) Set<Pair<T>> sp = componentEdges.get(g); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + g ); componentEdges.remove(g); } } else { throw new IllegalStateException( "HashMapComponentGraph.removeEdge(): A connected non-delimiter node was not in a component. ComponentGraph corrupted."); } // return; // none is fixed } else { // if b has edges, interchange a and b // ( now, if b has edges, both a and b have edges) if (edges.containsKey(b)) { Node t = a; a = b; b = t; } // both are in the same group (only possible option) Component oldgroup = component.get(a); if (oldgroup != component.get(b)) { System.out.println("Different groups??!"); System.exit(0); } // both have edges if (edges.containsKey(b)) { final int NONE = 0; final int RED = 1; final int BLUE = 2; // clear node colors in entire group Iterator<Node> i = componentNodes.get(oldgroup).iterator(); while (i.hasNext()) { i.next().color = NONE; } // perform breadth-first traversal, // to determine if group has become disjoint boolean disjoint = true; Queue<Node> queue = new LinkedList<Node>(); Set<Pair<T>> blueEdges = new LinkedHashSet<Pair<T>>(); a.color = RED; b.color = BLUE; queue.add(a); queue.add(b); // traverse while (!queue.isEmpty()) { Node node = queue.poll(); // add nodes neighbors to queue Iterator<Node> neighbors = edges.get(node).iterator(); while (neighbors.hasNext()) { Node neighbor = neighbors.next(); // remember visited edges if (node.color == BLUE) blueEdges.add(new Pair<T>(node.element, neighbor.element)); if (nodeClassifier.isDelimitor(neighbor.element)) { // ignore fixed nodes continue; } else if (neighbor.color == NONE) { neighbor.color = node.color; queue.add(neighbor); continue; } else if (neighbor.color != node.color) { // group is connected disjoint = false; break; } else { // already visited continue; } } // while neighbors } // while queue // handle result of traversal if (disjoint) { // System.out.println("Splitting group"); // new group Component newgroup = new Component(componenthandler.newComponent()); Set<Node> blues = new LinkedHashSet<Node>(); // find all blue nodes Iterator<Node> iter = componentNodes.get(oldgroup).iterator(); while (iter.hasNext()) { Node node = iter.next(); if (node.color == BLUE) { blues.add(node); component.put(node, newgroup); } } // impossible if (blues.isEmpty()) { System.out.println("Why was no blue nodes found?"); System.exit(0); } // remove bodies from old components and add the new // component componentNodes.get(oldgroup).removeAll(blues); componentNodes.put(newgroup, blues); // remove blue edges from the red group and create a new // group with pairs (ng) componentEdges.get(oldgroup).removeAll(blueEdges); componentEdges.get(oldgroup).remove(pair); // the edge that // was to be // removed componentEdges.put(newgroup, blueEdges); // return; } else { // System.out.println("Group still connected"); // we keep group as it is, but remove the pair (edge) Set<Pair<T>> sp = componentEdges.get(oldgroup); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + // oldgroup ); componentEdges.remove(oldgroup); } // return; } // a has an edge and b do not } else if (edges.containsKey(a)) { // keep group as it is, but wipe out b component.remove(b); componentNodes.get(oldgroup).remove(b); // b is now a free node freenodes.add(b); // notify handler that b is removed from oldgroup componenthandler.nodeRemovedFromComponent(oldgroup.element, b.element); if (componentNodes.get(oldgroup).isEmpty()) { // never happens System.out.println("How can group be empty?"); componentNodes.remove(oldgroup); } // remove from pairs // System.out.println("removing " + pair +" from group pairs " + // oldgroup); Set<Pair<T>> sp = componentEdges.get(oldgroup); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + oldgroup // ); componentEdges.remove(oldgroup); } // non have edges } else { // clear out group entirely component.remove(a); component.remove(b); // both a and b are free nodes now freenodes.add(a); freenodes.add(b); // notify handler that a and b is removed componenthandler.nodeRemovedFromComponent(oldgroup.element, a.element); componenthandler.nodeRemovedFromComponent(oldgroup.element, b.element); // assume that the group is only containing a and b? componentNodes.get(oldgroup).remove(b); componentNodes.get(oldgroup).remove(a); if (componentNodes.get(oldgroup).isEmpty()) { componentNodes.remove(oldgroup); } else { // impossible System.out.println("Hmm still stuff in group but no outgoing edges?" + componentNodes.get(oldgroup) + " a and b is " + a + ", " + b); System.exit(0); } // remove from pairs Set<Pair<T>> sp = componentEdges.get(oldgroup); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + oldgroup // ); componentEdges.remove(oldgroup); } } // non have edges } // none is fixed // System.out.println("After remove: " + groups.keySet().size() + // " groups with " + group.size() + " bodies" ); // Iterator<Component<V>> groupiter = // componentNodes.keySet().iterator(); // // Set<Pair<T>> allpairs = new HashSet<Pair<T>>(); // Set<Node> allnodes = new HashSet<Node>(); // while(groupiter.hasNext()){ // Component<V> g = groupiter.next(); // //System.out.println( "Group " + g + " : " + groupPairs.get(g).size() // + " pairs " ); // // Iterator<Pair<T>> pairiter = componentEdges.get(g).iterator(); // while (pairiter.hasNext()) { // Pair<T> thispair = pairiter.next(); // //System.out.println( " pair:"+thispair.hashCode()); // if (allpairs.contains(thispair)) { // System.out.println("Duplicates!!!!"); // System.exit(0); // } // allpairs.add(thispair); // // } // // // Iterator<Node> nodeiter = componentNodes.get(g).iterator(); // while (nodeiter.hasNext()) { // Node node = nodeiter.next(); // //System.out.println( " Node:"+node); // if (allnodes.contains(node)) { // System.out.println("Duplicates!!!!"); // System.exit(0); // } // allnodes.add(node); // // } // // } return true; }
From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java
private void findOceans() { for (Graph.Center c : graph.centers) { c.ocean = false;/*w w w . j a va 2s .c o m*/ c.water = false; } for (Graph.Corner c : graph.corners) { c.ocean = false; } //set water parameter of centers float LAKE_THRESHOLD = 0.3f; Queue<Graph.Center> queue = new ArrayDeque<>(); for (Graph.Center p : graph.centers) { int numWater = 0; for (Graph.Corner c : p.corners) { if (c.border || c.ocean) { p.border = true; p.water = true; p.ocean = true; queue.add(p); break; } if (c.water) { numWater++; } } p.water = (p.ocean || numWater >= p.corners.size() * LAKE_THRESHOLD); } LOG.info("border cells: " + queue.size()); //float fill borders to distinguish between ocean and likes while (!queue.isEmpty()) { Graph.Center c = queue.poll(); for (Graph.Center r : c.neighbors) { if (r.water && !r.ocean) { r.ocean = true; queue.add(r); } } } //assign coast tag for (Graph.Corner q : graph.corners) { q.coast = false; } for (Graph.Center c : graph.centers) { if (c.ocean) { for (Graph.Corner q : c.corners) { if (!q.water) { q.coast = true; } else { q.ocean = true; } } } } //assign basic biomes int oceanCount = 0; int lakeCount = 0; int landCount = 0; for (Graph.Center c : graph.centers) { if (c.ocean) { c.biome = Biome.OCEAN; oceanCount++; } else if (c.water) { c.biome = Biome.LAKE; lakeCount++; } else { c.biome = Biome.BEACH; lakeCount++; } } LOG.log(Level.INFO, "ocean cells: {0}, lake cells: {1}, land cells: {2}", new Object[] { oceanCount, lakeCount, landCount }); }
From source file:org.evosuite.setup.TestClusterGenerator.java
/** * All public methods defined directly in the SUT should be covered * /*from ww w . j av a 2s .c o m*/ * TODO: What if we use instrument_parent? * * @param targetClass */ @SuppressWarnings("unchecked") private void initializeTargetMethods() throws RuntimeException, ClassNotFoundException { logger.info("Analyzing target class"); Class<?> targetClass = Properties.getTargetClass(); TestCluster cluster = TestCluster.getInstance(); Set<Class<?>> targetClasses = new LinkedHashSet<Class<?>>(); if (targetClass == null) { throw new RuntimeException("Failed to load " + Properties.TARGET_CLASS); } targetClasses.add(targetClass); addDeclaredClasses(targetClasses, targetClass); if (Modifier.isAbstract(targetClass.getModifiers())) { logger.info("SUT is an abstract class"); Set<Class<?>> subclasses = getConcreteClasses(targetClass, inheritanceTree); logger.info("Found {} concrete subclasses", subclasses.size()); targetClasses.addAll(subclasses); } // To make sure we also have anonymous inner classes double check inner classes using ASM ClassNode targetClassNode = DependencyAnalysis.getClassNode(Properties.TARGET_CLASS); Queue<InnerClassNode> innerClasses = new LinkedList<InnerClassNode>(); innerClasses.addAll(targetClassNode.innerClasses); while (!innerClasses.isEmpty()) { InnerClassNode icn = innerClasses.poll(); try { logger.debug("Loading inner class: {}, {},{}", icn.innerName, icn.name, icn.outerName); String innerClassName = ResourceList.getClassNameFromResourcePath(icn.name); Class<?> innerClass = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(innerClassName); //if (!canUse(innerClass)) // continue; // Sometimes strange things appear such as Map$Entry if (!targetClasses.contains(innerClass)) { // && !innerClassName.matches(".*\\$\\d+(\\$.*)?$")) { logger.info("Adding inner class {}", innerClassName); targetClasses.add(innerClass); ClassNode innerClassNode = DependencyAnalysis.getClassNode(innerClassName); innerClasses.addAll(innerClassNode.innerClasses); } } catch (Throwable t) { logger.error("Problem for {}. Error loading inner class: {}, {},{}: {}", Properties.TARGET_CLASS, icn.innerName, icn.name, icn.outerName, t); } } for (Class<?> clazz : targetClasses) { logger.info("Current SUT class: {}", clazz); if (!canUse(clazz)) { logger.info("Cannot access SUT class: {}", clazz); continue; } // Add all constructors for (Constructor<?> constructor : getConstructors(clazz)) { logger.info("Checking target constructor {}", constructor); String name = "<init>" + org.objectweb.asm.Type.getConstructorDescriptor(constructor); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), "<init>", org.objectweb.asm.Type.getConstructorDescriptor(constructor)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(constructor)) { GenericConstructor genericConstructor = new GenericConstructor(constructor, clazz); cluster.addTestCall(genericConstructor); // TODO: Add types! cluster.addGenerator(new GenericClass(clazz).getWithWildcardTypes(), genericConstructor); addDependencies(genericConstructor, 1); logger.debug("Keeping track of {}.{}{}", constructor.getDeclaringClass().getName(), constructor.getName(), Type.getConstructorDescriptor(constructor)); } else { logger.debug("Constructor cannot be used: {}", constructor); } } // Add all methods for (Method method : getMethods(clazz)) { logger.info("Checking target method {}", method); String name = method.getName() + org.objectweb.asm.Type.getMethodDescriptor(method); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), method.getName(), org.objectweb.asm.Type.getMethodDescriptor(method)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(method, clazz)) { logger.debug("Adding method {}.{}{}", clazz.getName(), method.getName(), Type.getMethodDescriptor(method)); GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); cluster.addModifier(new GenericClass(clazz).getWithWildcardTypes(), genericMethod); addDependencies(genericMethod, 1); GenericClass retClass = new GenericClass(method.getReturnType()); if (!retClass.isPrimitive() && !retClass.isVoid() && !retClass.isObject()) cluster.addGenerator(retClass.getWithWildcardTypes(), genericMethod); } else { logger.debug("Method cannot be used: {}", method); } } for (Field field : getFields(clazz)) { logger.info("Checking target field {}", field); if (canUse(field, clazz)) { GenericField genericField = new GenericField(field, clazz); addDependencies(genericField, 1); cluster.addGenerator(new GenericClass(field.getGenericType()).getWithWildcardTypes(), genericField); logger.debug("Adding field {}", field); if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } else { logger.debug("Is final"); if (Modifier.isStatic(field.getModifiers()) && !field.getType().isPrimitive()) { logger.debug("Is static non-primitive"); /* * With this we are trying to cover such cases: * public static final DurationField INSTANCE = new MillisDurationField(); private MillisDurationField() { super(); } */ try { Object o = field.get(null); if (o == null) { logger.info("Field is not yet initialized: {}", field); } else { Class<?> actualClass = o.getClass(); logger.debug("Actual class is {}", actualClass); if (!actualClass.isAssignableFrom(genericField.getRawGeneratedType()) && genericField.getRawGeneratedType().isAssignableFrom(actualClass)) { GenericField superClassField = new GenericField(field, clazz); cluster.addGenerator(new GenericClass(actualClass), superClassField); } } } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } else { logger.debug("Can't use field {}", field); } } analyzedClasses.add(clazz); // TODO: Set to generic type rather than class? cluster.getAnalyzedClasses().add(clazz); } if (Properties.INSTRUMENT_PARENT) { for (String superClass : inheritanceTree.getSuperclasses(Properties.TARGET_CLASS)) { try { Class<?> superClazz = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(superClass); dependencies.add(new Pair(0, superClazz)); } catch (ClassNotFoundException e) { logger.error("Problem for {}. Class not found: {}", Properties.TARGET_CLASS, superClass, e); } } } if (Properties.HANDLE_STATIC_FIELDS) { GetStaticGraph getStaticGraph = GetStaticGraphGenerator.generate(Properties.TARGET_CLASS); Map<String, Set<String>> staticFields = getStaticGraph.getStaticFields(); for (String className : staticFields.keySet()) { logger.info("Adding static fields to cluster for class {}", className); Class<?> clazz; try { clazz = getClass(className); } catch (ExceptionInInitializerError ex) { logger.debug("Class class init caused exception {}", className); continue; } if (clazz == null) { logger.debug("Class not found {}", className); continue; } if (!canUse(clazz)) continue; Set<String> fields = staticFields.get(className); for (Field field : getFields(clazz)) { if (!canUse(field, clazz)) continue; if (fields.contains(field.getName())) { if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } } } } PutStaticMethodCollector collector = new PutStaticMethodCollector(Properties.TARGET_CLASS, staticFields); Set<MethodIdentifier> methodIdentifiers = collector.collectMethods(); for (MethodIdentifier methodId : methodIdentifiers) { Class<?> clazz = getClass(methodId.getClassName()); if (clazz == null) continue; if (!canUse(clazz)) continue; Method method = getMethod(clazz, methodId.getMethodName(), methodId.getDesc()); if (method == null) continue; GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); } } logger.info("Finished analyzing target class"); }
From source file:edu.uci.ics.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.java
public void init() throws HyracksDataException { Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>(); Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>(); List<IConnectorDescriptor> outputConnectors = null; /**// w ww .j a v a2 s. c o m * Set up the source operators */ for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) { IOperatorNodePushable opPushable = entry.getValue().createPushRuntime(ctx, recordDescProvider, partition, nPartitions); startOperatorNodePushables.put(entry.getKey(), opPushable); operatprNodePushablesBFSOrder.add(opPushable); operatorNodePushables.put(entry.getKey(), opPushable); inputArity += opPushable.getInputArity(); outputConnectors = parent.getActivityOutputMap().get(entry.getKey()); if (outputConnectors != null) { for (IConnectorDescriptor conn : outputConnectors) { childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId())); } } } /** * Using BFS (breadth-first search) to construct to runtime execution * DAG; */ while (childQueue.size() > 0) { /** * expend the executing activities further to the downstream */ if (outputConnectors != null && outputConnectors.size() > 0) { for (IConnectorDescriptor conn : outputConnectors) { if (conn != null) { childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId())); } } } /** * construct the source to destination information */ Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> channel = childQueue.poll(); ActivityId sourceId = channel.getLeft().getLeft().getActivityId(); int outputChannel = channel.getLeft().getRight(); ActivityId destId = channel.getRight().getLeft().getActivityId(); int inputChannel = channel.getRight().getRight(); IOperatorNodePushable sourceOp = operatorNodePushables.get(sourceId); IOperatorNodePushable destOp = operatorNodePushables.get(destId); if (destOp == null) { destOp = channel.getRight().getLeft().createPushRuntime(ctx, recordDescProvider, partition, nPartitions); operatprNodePushablesBFSOrder.add(destOp); operatorNodePushables.put(destId, destOp); } /** * construct the dataflow connection from a producer to a consumer */ sourceOp.setOutputFrameWriter(outputChannel, destOp.getInputFrameWriter(inputChannel), recordDescProvider.getInputRecordDescriptor(destId, inputChannel)); /** * traverse to the child of the current activity */ outputConnectors = parent.getActivityOutputMap().get(destId); } }
From source file:info.raack.appliancelabeler.machinelearning.appliancedetection.algorithms.BasePowerDrawDetectionAlgorithm.java
public AlgorithmPredictions algorithmCalculateApplianceEnergyUsePredictions(EnergyMonitor energyMonitor, Queue<EnergyTimestep> originTimesteps, ItemReader<SecondData> dataReader) { AlgorithmPredictions algorithmPredictions = new AlgorithmPredictions(); Map<UserAppliance, List<EnergyTimestep>> applianceTimesteps = new HashMap<UserAppliance, List<EnergyTimestep>>(); // get all of the possible user appliances and their last known on/off state List<UserAppliance> apps = database.getUserAppliancesForAlgorithmForEnergyMonitor(energyMonitor, getId()); Map<UserAppliance, Double> currentTimestepEnergyConsumption = new HashMap<UserAppliance, Double>(); for (UserAppliance appliance : apps) { currentTimestepEnergyConsumption.put(appliance, 0d); applianceTimesteps.put(appliance, new ArrayList<EnergyTimestep>()); }// ww w . ja v a 2 s . c om Map<Long, List<ApplianceStateTransition>> stateTransitions = new HashMap<Long, List<ApplianceStateTransition>>(); if (originTimesteps.size() > 0) { // ASSUMPTION - measurements are in chronological order if (apps.size() > 0) { // run whatever the energy delta state transition detectors models predict for these new data points stateTransitions = detectStateTransitions( database.getAlgorithmResultForMonitorAndAlgorithm(energyMonitor, this), apps.get(0), dataReader); // reset the data reader dataReader.moveToBeginning(); EnergyTimestep currentTimestep = originTimesteps.poll(); Map<UserAppliance, ApplianceState> applianceStates = new HashMap<UserAppliance, ApplianceState>(); // while we have timesteps remaining //logger.debug("Current timestep: " + currentTimestep.getStartTime() + " - " + currentTimestep.getEndTime()); long currentTimestepEndTime = currentTimestep.getEndTime().getTime(); // for each second in the measurement list try { for (SecondData measurement = dataReader.read(); measurement != null; measurement = dataReader .read()) { long currentMeasurementTime = measurement.getCalLong(); while (currentMeasurementTime > currentTimestepEndTime) { //logger.debug("End of timestep " + currentTimestep.getEndTime() + "; getting next timestamp"); // get new timestep currentTimestep = originTimesteps.poll(); // need to check to see if the current timestep is not null - we won't process up to the very last second, as some will run over the last full 5 minute block if (currentTimestep == null) { // done! break; } else { currentTimestepEndTime = currentTimestep.getEndTime().getTime(); } } // update the states of any of the appliances based on any state transitions at this second if (stateTransitions.containsKey(currentMeasurementTime)) { updateStateForAppliances(applianceStates, stateTransitions.get(currentMeasurementTime), measurement); } else { updateStateForAppliances(applianceStates, new ArrayList<ApplianceStateTransition>(), measurement); } for (UserAppliance userAppliance : currentTimestepEnergyConsumption.keySet()) { // is appliance on? if (applianceStates.get(userAppliance) != null && applianceStates.get(userAppliance).isOn() == true) { ApplianceState applianceState = applianceStates.get(userAppliance); double previousConsumption = currentTimestepEnergyConsumption.get(userAppliance); // BIG ASSUMPTION OF THIS ALGORITHM - appliances all take constant power during their operation = power delta (watts) * 1 second double newConsumption = applianceState.getCurrentPower(); //logger.debug("Appliance " + userAppliance + " last transition was to on; adding " + newConsumption + " watt-seconds to energy consumption"); // add previous consumption plus new consumption currentTimestepEnergyConsumption.put(userAppliance, previousConsumption + newConsumption); } } if (currentMeasurementTime == currentTimestepEndTime) { //logger.debug("Timestep start " + currentTimestep.getStartTime() + "; closing energy measurement"); // save current energy consumption in this timestep and reset counter for (UserAppliance appliance : apps) { if (currentTimestepEnergyConsumption.get(appliance) > 0) { EnergyTimestep step = currentTimestep.copyWithoutEnergyOrAppliance(); step.setEnergyConsumed(currentTimestepEnergyConsumption.get(appliance)); step.setUserAppliance(appliance); applianceTimesteps.get(appliance).add(step); } currentTimestepEnergyConsumption.put(appliance, 0d); } // get new timestep currentTimestep = originTimesteps.poll(); // need to check to see if the current timestep is not null - we won't process up to the very last second, as some will run over the last full 5 minute block if (currentTimestep == null) { // done! break; } else { currentTimestepEndTime = currentTimestep.getEndTime().getTime(); } } } } catch (Exception e) { throw new RuntimeException("Cannot calculate energy consumption predictions", e); } logger.debug("Done with energy usage calculations"); } } List<ApplianceStateTransition> onlyStateTransitions = new ArrayList<ApplianceStateTransition>(); for (List<? extends ApplianceStateTransition> list : stateTransitions.values()) { onlyStateTransitions.addAll(list); } algorithmPredictions.setStateTransitions(onlyStateTransitions); algorithmPredictions.setEnergyTimesteps(applianceTimesteps); return algorithmPredictions; }
From source file:org.unitime.timetable.solver.course.ui.ClassInfoModel.java
public void update() throws Exception { if (iChange == null) return;// www . ja v a 2 s. c om Vector<ClassAssignment> assignments = new Vector(iChange.getAssignments()); Hashtable<Long, ClassAssignment> table = iChange.getAssignmentTable(); iUnassignConflictingAssignments = !iForm.getKeepConflictingAssignments(); iChange.getAssignments().clear(); for (ClassAssignment assignment : assignments) { iChange.getAssignments().add(new ClassAssignmentInfo(assignment.getClazz(), assignment.getTime(), assignment.getDate(), assignment.getRooms(), table)); } if (assignments.isEmpty()) { for (Iterator<ClassAssignment> i = iChange.getConflicts().iterator(); i.hasNext();) { ClassAssignment assignment = i.next(); if (!assignment.getClassId().equals(getClazz().getClassId())) i.remove(); } } else { iChange.getConflicts().clear(); } for (ClassAssignment assignment : iChange.getAssignments()) { // Skip incomplete assignments (that have no time assigned yet) if (!assignment.hasTime()) continue; // Check for room conflicts if (iUnassignConflictingAssignments) { if (assignment.getRooms() != null) for (ClassRoomInfo room : assignment.getRooms()) { if (!room.isIgnoreRoomChecks()) { for (Assignment a : room.getLocation().getCommitedAssignments()) { if (a.getClazz().isCancelled()) continue; if (assignment.getTime().overlaps(new ClassTimeInfo(a)) && !a.getClazz().canShareRoom(assignment.getClazz())) { if (iChange.getCurrent(a.getClassId()) == null && iChange.getConflict(a.getClassId()) == null) iChange.getConflicts().add(new ClassAssignment(a)); } } } } // Check for instructor conflicts if (assignment.getInstructors() != null) for (ClassInstructorInfo instructor : assignment.getInstructors()) { if (!instructor.isLead()) continue; // check all departmental instructors with the same external id for (DepartmentalInstructor di : DepartmentalInstructor .getAllForInstructor(instructor.getInstructor().getInstructor())) { for (ClassInstructor ci : di.getClasses()) { if (ci.equals(instructor.getInstructor())) continue; Assignment a = ci.getClassInstructing().getCommittedAssignment(); if (a == null || a.getClazz().isCancelled()) continue; if (assignment.getTime() != null && assignment.getTime().overlaps(new ClassTimeInfo(a)) && !a.getClazz().canShareInstructor(assignment.getClazz())) { if (iChange.getCurrent(a.getClassId()) == null && iChange.getConflict(a.getClassId()) == null) iChange.getConflicts().add(new ClassAssignment(a)); } } } /* // Potential speed-up #1) only check the current department instructors for (ClassInstructor ci : instructor.getInstructor().getInstructor().getClasses()) { if (ci.equals(instructor.getInstructor())) continue; Assignment a = ci.getClassInstructing().getCommittedAssignment(); if (a == null) continue; if (assignment.getTime().overlaps(new ClassTimeInfo(a))) { if (iChange.getCurrent(a.getClassId())==null && iChange.getConflict(a.getClassId())==null) iChange.getConflicts().add(new ClassAssignment(a)); } } */ /* // Potential speed-up #2) use instructor assignments from the solution for (Assignment a : instructor.getInstructor().getInstructor().getCommitedAssignments()) { if (assignment.getTime().overlaps(new ClassTimeInfo(a))) { if (iChange.getCurrent(a.getClassId())==null && iChange.getConflict(a.getClassId())==null) iChange.getConflicts().add(new ClassAssignment(a)); } } */ } } // Check the course structure for conflicts Class_ clazz = assignment.getClazz(Class_DAO.getInstance().getSession()); // a) all parents Class_ parent = clazz.getParentClass(); while (parent != null) { if (iChange.getCurrent(parent.getUniqueId()) == null && iChange.getConflict(parent.getUniqueId()) == null) { Assignment a = parent.getCommittedAssignment(); if (a != null && !a.getClazz().isCancelled() && assignment.getTime().overlaps(new ClassTimeInfo(a))) { iChange.getConflicts().add(new ClassAssignment(a)); } } parent = parent.getParentClass(); } // b) all children Queue<Class_> children = new LinkedList(); try { children.addAll(clazz.getChildClasses()); } catch (LazyInitializationException e) { sLog.error("This should never happen."); Class_ c = Class_DAO.getInstance().get(assignment.getClassId()); children.addAll(c.getChildClasses()); } Class_ child = null; while ((child = children.poll()) != null) { if (iChange.getCurrent(child.getUniqueId()) == null && iChange.getConflict(child.getUniqueId()) == null) { Assignment a = child.getCommittedAssignment(); if (a != null && !a.getClazz().isCancelled() && assignment.getTime().overlaps(new ClassTimeInfo(a))) { iChange.getConflicts().add(new ClassAssignment(a)); } } if (!child.getChildClasses().isEmpty()) children.addAll(child.getChildClasses()); } // c) all single-class subparts for (Iterator i = clazz.getSchedulingSubpart().getInstrOfferingConfig().getSchedulingSubparts() .iterator(); i.hasNext();) { SchedulingSubpart ss = (SchedulingSubpart) i.next(); if (ss.getClasses().size() == 1) { child = (Class_) ss.getClasses().iterator().next(); if (iChange.getCurrent(child.getUniqueId()) == null && iChange.getConflict(child.getUniqueId()) == null) { Assignment a = child.getCommittedAssignment(); if (a != null && !a.getClazz().isCancelled() && assignment.getTime().overlaps(new ClassTimeInfo(a))) { iChange.getConflicts().add(new ClassAssignment(a)); } } if (!child.getChildClasses().isEmpty()) children.addAll(child.getChildClasses()); } } //TODO: Check for other HARD conflicts (e.g., distribution constraints) } }