List of usage examples for java.util Queue isEmpty
boolean isEmpty();
From source file:org.jasig.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void importData(File directory, String pattern, final BatchImportOptions options) { if (!directory.exists()) { throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist"); }/*from w w w . ja va 2s.com*/ //Create the file filter to use when searching for files to import final FileFilter fileFilter; if (pattern != null) { fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes); } else { fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes); } //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "import"); //Setup reporting file final File importReport = new File(logDirectory, "data-import.txt"); final PrintWriter reportWriter; try { reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + importReport, e); } //Convert directory to URI String to provide better logging output final URI directoryUri = directory.toURI(); final String directoryUriStr = directoryUri.toString(); IMPORT_BASE_DIR.set(directoryUriStr); try { //Scan the specified directory for files to import logger.info("Scanning for files to Import from: {}", directory); final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options); this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor); final long resourceCount = fileProcessor.getResourceCount(); logger.info("Found {} files to Import from: {}", resourceCount, directory); //See if the import should fail on error final boolean failOnError = options != null ? options.isFailOnError() : true; //Map of files to import, grouped by type final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport(); //Import the data files for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) { final Queue<Resource> files = dataToImport.remove(portalDataKey); if (files == null) { continue; } final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>(); final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final int fileCount = files.size(); logger.info("Importing {} files of type {}", fileCount, portalDataKey); reportWriter.println(portalDataKey + "," + fileCount); while (!files.isEmpty()) { final Resource file = files.poll(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong importTime = new AtomicLong(-1); //Create import task final Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { IMPORT_BASE_DIR.set(directoryUriStr); importTime.set(System.nanoTime()); try { importData(file, portalDataKey); } finally { importTime.set(System.nanoTime() - importTime.get()); IMPORT_BASE_DIR.remove(); } } }; //Submit the import task final Future<?> importFuture = this.importExportThreadPool.submit(task); //Add the future for tracking importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime)); } //Wait for all of the imports on of this type to complete final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException( failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n"); } reportWriter.flush(); } if (!dataToImport.isEmpty()) { throw new IllegalStateException( "The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet()); } logger.info("For a detailed report on the data import see " + importReport); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to import", e); } finally { IOUtils.closeQuietly(reportWriter); IMPORT_BASE_DIR.remove(); } }
From source file:org.opendaylight.netvirt.elan.internal.ElanInterfaceManager.java
List<ListenableFuture<Void>> handleunprocessedElanInterfaces(ElanInstance elanInstance) throws ElanException { List<ListenableFuture<Void>> futures = new ArrayList<>(); Queue<ElanInterface> elanInterfaces = unProcessedElanInterfaces.get(elanInstance.getElanInstanceName()); if (elanInterfaces == null || elanInterfaces.isEmpty()) { return futures; }//from w ww. j a va 2s . co m for (ElanInterface elanInterface : elanInterfaces) { String interfaceName = elanInterface.getName(); InterfaceInfo interfaceInfo = interfaceManager.getInterfaceInfo(interfaceName); addElanInterface(futures, elanInterface, interfaceInfo, elanInstance); } return futures; }
From source file:edu.brown.hstore.dtxn.LocalTransaction.java
/** * Store a VoltTable result that this transaction is waiting for. * @param partition The partition id that generated the result * @param dependency_id The dependency id that this result corresponds to * @param key The hackish partition+dependency key * @param force If false, then we will check to make sure the result isn't a duplicate * @param result The actual data for the result *///from w w w . ja v a 2 s. c om private void addResult(final int partition, final int dependency_id, final int key, final boolean force, VoltTable result) { final int base_offset = hstore_site.getLocalPartitionOffset(this.base_partition); assert (result != null); assert (this.round_state[base_offset] == RoundState.INITIALIZED || this.round_state[base_offset] == RoundState.STARTED) : String.format( "Invalid round state %s for %s at partition %d", this.round_state[base_offset], this, this.base_partition); if (d) LOG.debug(String.format("%s - Attemping to add new result for %s [numRows=%d]", this, debugPartDep(partition, dependency_id), result.getRowCount())); // If the txn is still in the INITIALIZED state, then we just want to queue up the results // for now. They will get released when we switch to STARTED // This is the only part that we need to synchonize on if (force == false) { if (this.predict_singlePartition == false) this.state.lock.lock(); try { if (this.round_state[base_offset] == RoundState.INITIALIZED) { assert (this.state.queued_results.containsKey(key) == false) : String.format( "%s - Duplicate result %s [key=%d]", this, debugPartDep(partition, dependency_id), key); this.state.queued_results.put(key, result); if (d) LOG.debug(String.format("%s - Queued result %s until the round is started [key=%s]", this, debugPartDep(partition, dependency_id), key)); return; } if (d) { LOG.debug(String.format("%s - Storing new result for key %d", this, key)); if (t) LOG.trace("Result stmt_ctr(key=" + key + "): " + this.state.results_dependency_stmt_ctr.get(key)); } } finally { if (this.predict_singlePartition == false) this.state.lock.unlock(); } // SYNCH } // Each partition+dependency_id should be unique within the Statement batch. // So as the results come back to us, we have to figure out which Statement it belongs to DependencyInfo dinfo = null; Queue<Integer> queue = this.state.results_dependency_stmt_ctr.get(key); assert (queue != null) : String.format("Unexpected %s in %s", debugPartDep(partition, dependency_id), this); assert (queue.isEmpty() == false) : String.format( "No more statements for %s in %s [key=%d]\nresults_dependency_stmt_ctr = %s", debugPartDep(partition, dependency_id), this, key, this.state.results_dependency_stmt_ctr); int stmt_index = queue.remove().intValue(); dinfo = this.getDependencyInfo(dependency_id); assert (dinfo != null) : String.format("Unexpected %s for %s [stmt_index=%d]\n%s", debugPartDep(partition, dependency_id), this, stmt_index, result); dinfo.addResult(partition, result); if (this.predict_singlePartition == false) this.state.lock.lock(); try { this.state.received_ctr++; // Check whether we need to start running stuff now // 2011-12-31: This needs to be synchronized because they might check // whether there are no more blocked tasks before we // can add to_unblock to the unblocked_tasks queue if (this.state.blocked_tasks.isEmpty() == false && dinfo.hasTasksReady()) { Collection<WorkFragment> to_unblock = dinfo.getAndReleaseBlockedWorkFragments(); assert (to_unblock != null); assert (to_unblock.isEmpty() == false); if (d) LOG.debug(String.format( "%s - Got %d WorkFragments to unblock that were waiting for DependencyId %d", this, to_unblock.size(), dinfo.getDependencyId())); this.state.blocked_tasks.removeAll(to_unblock); this.state.unblocked_tasks.addLast(to_unblock); } else if (d) { LOG.debug(String.format( "%s - No WorkFragments to unblock after storing %s [blockedTasks=%d, hasTasksReady=%s]", this, debugPartDep(partition, dependency_id), this.state.blocked_tasks.size(), dinfo.hasTasksReady())); } if (this.state.dependency_latch != null) { this.state.dependency_latch.countDown(); // HACK: If the latch is now zero, then push an EMPTY set into the unblocked queue // This will cause the blocked PartitionExecutor thread to wake up and realize that he's done if (this.state.dependency_latch.getCount() == 0) { if (d) LOG.debug(String.format( "%s - Pushing EMPTY_SET to PartitionExecutor because all the dependencies have arrived!", this)); this.state.unblocked_tasks.addLast(EMPTY_FRAGMENT_SET); } if (d) LOG.debug(String.format("%s - Setting CountDownLatch to %d", this, this.state.dependency_latch.getCount())); } this.state.still_has_tasks = this.state.blocked_tasks.isEmpty() == false || this.state.unblocked_tasks.isEmpty() == false; } finally { if (this.predict_singlePartition == false) this.state.lock.unlock(); } // SYNCH if (d) { Map<String, Object> m = new ListOrderedMap<String, Object>(); m.put("Blocked Tasks", (this.state != null ? this.state.blocked_tasks.size() : null)); m.put("DependencyInfo", dinfo.toString()); m.put("hasTasksReady", dinfo.hasTasksReady()); LOG.debug(this + " - Status Information\n" + StringUtil.formatMaps(m)); if (t) LOG.trace(this.debug()); } }
From source file:HashMapComponentGraph.java
/** * Remove an edge. If the removal results in one or more isolated nodes, * these will be removed from the graph implicitly. * /* w w w .j a v a 2s . c om*/ * For non-dense and relatively fragmented graphs, this operation will be * cheap. Otherwise, for dense and strongly connected graphs, the operation * could include a full traversal of the graph visiting all present edges, * resulting in an O((n-1)^2) operation, where n is the number of nodes in * the graph. * * @param pair * edge to be removed * @return true if the edge was actually removed, false if the edge did not * exists before call. */ @Override public final boolean removeEdge(Pair<T> pair) { // don't act if edge is not present if (!edgeData.containsKey(pair)) { // System.out.println("Edge NOT present"); return false; } else { edgeData.remove(pair); } // get the node out of the node adjacency hash map ( at this point we // know that the nodes must // exist, because the edge exists Node a = new Node(pair.getFirst()); if (allnodes.containsKey(a)) { a = allnodes.get(a); } else { // not possible throw new IllegalStateException( "ComponentGraph.removeEdge(): Node did not have an adjacency entry. ComponentGraph corrupted."); } Node b = new Node(pair.getSecond()); if (allnodes.containsKey(b)) { b = allnodes.get(b); } else { // this is not possible throw new IllegalStateException( "ComponentGraph.removeEdge(): Node did not have an adjacency entry. ComponentGraph corrupted."); } // if b is fixed, interchange a and b ( now, if b is fixed, both a and b // are fixed) if (nodeClassifier.isDelimitor(b.element)) { Node t = a; a = b; b = t; } // remove references to each node, in each nodes // connected node sets edges.get(a).remove(b); edges.get(b).remove(a); // if no edges left in set, remove the set if (edges.get(a).isEmpty()) edges.remove(a); // if no edges left in set, remove it if (edges.get(b).isEmpty()) edges.remove(b); // Cases // i. Both node are delimiters // do nothing // ii. One node is delimiter: // a). non-delimiter is in a component // do nothing (node could now be alone in its component) // if node contains no other edges, delete it from its component // b). non-delimiter is not in a component (not possible) // do nothing/ report fatal error // iii. No node is delimiter: // a). no node is in a component (not possible) // do nothing/ error // b). one node is in a component (not possible) // do nothing // c). both nodes are in a component // 1. the same component // remove edge, traverse breadth-first from each node to determine // if component should be split. // 2. different components (not possible) // do nothing/ error // both nodes are fixed if (nodeClassifier.isDelimitor(b.element)) { // do nothing // return; // one is fixed } else if (nodeClassifier.isDelimitor(a.element)) { if (component.containsKey(b)) { // only possible option // System.out.println("One fixed node"); Component g = component.get(b); // check for another edge on this node if (!edges.containsKey(b)) { // System.out.println("b did not have any edges"); // remove the node from component component.remove(b); // notify handler componenthandler.nodeRemovedFromComponent(g.element, b.element); // b is now free freenodes.add(b); Set<Node> s = componentNodes.get(g); if (!s.remove(b)) { System.out.println("ALARM"); System.exit(0); } // remove group if empty if (s.isEmpty()) { // System.out.println("groups entry removed"); componentNodes.remove(g); // TODO notify handler } else { System.out.println("Group isn't empty, why??"); // System.exit(0); } } else { // b has edges left, and is part of a group. Were done } // remove edge from component (even if b was not removed from // the group) Set<Pair<T>> sp = componentEdges.get(g); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + g ); componentEdges.remove(g); } } else { throw new IllegalStateException( "HashMapComponentGraph.removeEdge(): A connected non-delimiter node was not in a component. ComponentGraph corrupted."); } // return; // none is fixed } else { // if b has edges, interchange a and b // ( now, if b has edges, both a and b have edges) if (edges.containsKey(b)) { Node t = a; a = b; b = t; } // both are in the same group (only possible option) Component oldgroup = component.get(a); if (oldgroup != component.get(b)) { System.out.println("Different groups??!"); System.exit(0); } // both have edges if (edges.containsKey(b)) { final int NONE = 0; final int RED = 1; final int BLUE = 2; // clear node colors in entire group Iterator<Node> i = componentNodes.get(oldgroup).iterator(); while (i.hasNext()) { i.next().color = NONE; } // perform breadth-first traversal, // to determine if group has become disjoint boolean disjoint = true; Queue<Node> queue = new LinkedList<Node>(); Set<Pair<T>> blueEdges = new LinkedHashSet<Pair<T>>(); a.color = RED; b.color = BLUE; queue.add(a); queue.add(b); // traverse while (!queue.isEmpty()) { Node node = queue.poll(); // add nodes neighbors to queue Iterator<Node> neighbors = edges.get(node).iterator(); while (neighbors.hasNext()) { Node neighbor = neighbors.next(); // remember visited edges if (node.color == BLUE) blueEdges.add(new Pair<T>(node.element, neighbor.element)); if (nodeClassifier.isDelimitor(neighbor.element)) { // ignore fixed nodes continue; } else if (neighbor.color == NONE) { neighbor.color = node.color; queue.add(neighbor); continue; } else if (neighbor.color != node.color) { // group is connected disjoint = false; break; } else { // already visited continue; } } // while neighbors } // while queue // handle result of traversal if (disjoint) { // System.out.println("Splitting group"); // new group Component newgroup = new Component(componenthandler.newComponent()); Set<Node> blues = new LinkedHashSet<Node>(); // find all blue nodes Iterator<Node> iter = componentNodes.get(oldgroup).iterator(); while (iter.hasNext()) { Node node = iter.next(); if (node.color == BLUE) { blues.add(node); component.put(node, newgroup); } } // impossible if (blues.isEmpty()) { System.out.println("Why was no blue nodes found?"); System.exit(0); } // remove bodies from old components and add the new // component componentNodes.get(oldgroup).removeAll(blues); componentNodes.put(newgroup, blues); // remove blue edges from the red group and create a new // group with pairs (ng) componentEdges.get(oldgroup).removeAll(blueEdges); componentEdges.get(oldgroup).remove(pair); // the edge that // was to be // removed componentEdges.put(newgroup, blueEdges); // return; } else { // System.out.println("Group still connected"); // we keep group as it is, but remove the pair (edge) Set<Pair<T>> sp = componentEdges.get(oldgroup); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + // oldgroup ); componentEdges.remove(oldgroup); } // return; } // a has an edge and b do not } else if (edges.containsKey(a)) { // keep group as it is, but wipe out b component.remove(b); componentNodes.get(oldgroup).remove(b); // b is now a free node freenodes.add(b); // notify handler that b is removed from oldgroup componenthandler.nodeRemovedFromComponent(oldgroup.element, b.element); if (componentNodes.get(oldgroup).isEmpty()) { // never happens System.out.println("How can group be empty?"); componentNodes.remove(oldgroup); } // remove from pairs // System.out.println("removing " + pair +" from group pairs " + // oldgroup); Set<Pair<T>> sp = componentEdges.get(oldgroup); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + oldgroup // ); componentEdges.remove(oldgroup); } // non have edges } else { // clear out group entirely component.remove(a); component.remove(b); // both a and b are free nodes now freenodes.add(a); freenodes.add(b); // notify handler that a and b is removed componenthandler.nodeRemovedFromComponent(oldgroup.element, a.element); componenthandler.nodeRemovedFromComponent(oldgroup.element, b.element); // assume that the group is only containing a and b? componentNodes.get(oldgroup).remove(b); componentNodes.get(oldgroup).remove(a); if (componentNodes.get(oldgroup).isEmpty()) { componentNodes.remove(oldgroup); } else { // impossible System.out.println("Hmm still stuff in group but no outgoing edges?" + componentNodes.get(oldgroup) + " a and b is " + a + ", " + b); System.exit(0); } // remove from pairs Set<Pair<T>> sp = componentEdges.get(oldgroup); sp.remove(pair); // remove group if empty if (sp.isEmpty()) { // System.out.println("grouppair entry removed " + oldgroup // ); componentEdges.remove(oldgroup); } } // non have edges } // none is fixed // System.out.println("After remove: " + groups.keySet().size() + // " groups with " + group.size() + " bodies" ); // Iterator<Component<V>> groupiter = // componentNodes.keySet().iterator(); // // Set<Pair<T>> allpairs = new HashSet<Pair<T>>(); // Set<Node> allnodes = new HashSet<Node>(); // while(groupiter.hasNext()){ // Component<V> g = groupiter.next(); // //System.out.println( "Group " + g + " : " + groupPairs.get(g).size() // + " pairs " ); // // Iterator<Pair<T>> pairiter = componentEdges.get(g).iterator(); // while (pairiter.hasNext()) { // Pair<T> thispair = pairiter.next(); // //System.out.println( " pair:"+thispair.hashCode()); // if (allpairs.contains(thispair)) { // System.out.println("Duplicates!!!!"); // System.exit(0); // } // allpairs.add(thispair); // // } // // // Iterator<Node> nodeiter = componentNodes.get(g).iterator(); // while (nodeiter.hasNext()) { // Node node = nodeiter.next(); // //System.out.println( " Node:"+node); // if (allnodes.contains(node)) { // System.out.println("Duplicates!!!!"); // System.exit(0); // } // allnodes.add(node); // // } // // } return true; }
From source file:com.jiangge.apns4j.impl.ApnsConnectionImpl.java
private void startErrorWorker() { Thread thread = new Thread(new Runnable() { @Override//from w ww.ja v a2 s .c om public void run() { Socket curSocket = socket; try { if (!isSocketAlive(curSocket)) { return; } InputStream socketIs = curSocket.getInputStream(); byte[] res = new byte[ERROR_RESPONSE_BYTES_LENGTH]; int size = 0; while (true) { try { size = socketIs.read(res); if (size > 0 || size == -1) { // break, when something was read or there is no data any more break; } } catch (SocketTimeoutException e) { // There is no data. Keep reading. } } int command = res[0]; /** EN: error-response,close the socket and resent notifications * CN: ??????? */ if (size == res.length && command == Command.ERROR) { int status = res[1]; int errorId = ApnsTools.parse4ByteInt(res[2], res[3], res[4], res[5]); if (logger.isInfoEnabled()) { logger.info( String.format("%s Received error response. status: %s, id: %s, error-desc: %s", connName, status, errorId, ErrorResponse.desc(status))); } Queue<PushNotification> resentQueue = new LinkedList<PushNotification>(); synchronized (lock) { boolean found = false; errorHappendedLastConn = true; while (!notificationCachedQueue.isEmpty()) { PushNotification pn = notificationCachedQueue.poll(); if (pn.getId() == errorId) { found = true; } else { /** * https://developer.apple.com/library/ios/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/Chapters/CommunicatingWIthAPS.html * As the document said, add the notifications which need be resent to the queue. * Igonre the error one */ if (found) { resentQueue.add(pn); } } } if (!found) { logger.warn(connName + " Didn't find error-notification in the queue. Maybe it's time to adjust cache length. id: " + errorId); } } // resend notifications if (!resentQueue.isEmpty()) { ApnsResender.getInstance().resend(name, resentQueue); } } else { // ignore and continue reading logger.error( connName + " Unexpected command or size. commend: " + command + " , size: " + size); } } catch (Exception e) { // logger.error(connName + " " + e.getMessage(), e); logger.error(connName + " " + e.getMessage()); } finally { /** * EN: close the old socket although it may be closed once before. * CN: ??? */ closeSocket(curSocket); } } }); thread.start(); }
From source file:fr.inria.oak.paxquery.common.xml.navigation.NavigationTreePatternNode.java
/** * Returns the descendants of this node in BFS order * /* w w w . jav a 2s . co m*/ * @return all the descendant of the node in BFS order * * */ public ArrayList<NavigationTreePatternNode> getBFSOrderedDescendants() { ArrayList<NavigationTreePatternNode> nodes = new ArrayList<NavigationTreePatternNode>(); //BFS uses Queue data structure Queue<NavigationTreePatternNode> q = new LinkedList<NavigationTreePatternNode>(); q.add(this); nodes.add(this); while (!q.isEmpty()) { NavigationTreePatternNode n = q.remove(); for (NavigationTreePatternNode child : n.getChildrenList()) { nodes.add(child); q.add(child); } } return nodes; }
From source file:it.geosolutions.geobatch.actions.freemarker.FreeMarkerAction.java
/** * Removes TemplateModelEvents from the queue and put *///from w ww. j av a2 s .c om public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException { listenerForwarder.started(); listenerForwarder.setTask("initializing the FreeMarker engine"); if (!initialized) { try { initialize(); } catch (IllegalArgumentException e) { throw new ActionException(this, e.getLocalizedMessage(), e.getCause()); } catch (IOException e) { throw new ActionException(this, e.getLocalizedMessage(), e.getCause()); } } listenerForwarder.setTask("build the output absolute file name"); // build the output absolute file name File outputDir = computeOutputDir(); // may throw ActionEx // return final Queue<EventObject> ret = new LinkedList<EventObject>(); listenerForwarder.setTask("Building/getting the root data structure"); /* * Building/getting the root data structure */ final Map<String, Object> root = conf.getRoot() != null ? conf.getRoot() : new HashMap<String, Object>(); // list of incoming event to inject into the root datamodel final List<TemplateModel> list; if (conf.isNtoN()) { list = new ArrayList<TemplateModel>(events.size()); } else { list = new ArrayList<TemplateModel>(1); } // append the list of adapted event objects root.put(TemplateModelEvent.EVENT_KEY, list); while (!events.isEmpty()) { // the adapted event final TemplateModelEvent ev; final TemplateModel dataModel; try { if ((ev = adapter(events.remove())) != null) { listenerForwarder.setTask("Try to get a Template DataModel from the Adapted event"); // try to get a Template DataModel from the Adapted event dataModel = ev.getModel(processor); } else { final String message = "Unable to append the event: unrecognized format. SKIPPING..."; if (LOGGER.isErrorEnabled()) { LOGGER.error(message); } if (conf.isFailIgnored()) { continue; } else { final ActionException e = new ActionException(this, message); listenerForwarder.failed(e); throw e; } } } catch (TemplateModelException tme) { final String message = "Unable to wrap the passed object: " + tme.getLocalizedMessage(); if (LOGGER.isErrorEnabled()) LOGGER.error(message); if (conf.isFailIgnored()) { continue; } else { listenerForwarder.failed(tme); throw new ActionException(this, tme.getLocalizedMessage()); } } catch (Exception ioe) { final String message = "Unable to produce the output: " + ioe.getLocalizedMessage(); if (LOGGER.isErrorEnabled()) LOGGER.error(message); if (conf.isFailIgnored()) { continue; } else { listenerForwarder.failed(ioe); throw new ActionException(this, ioe.getLocalizedMessage(), ioe); } } listenerForwarder.setTask("Generating the output"); /* * If getNtoN: For each data incoming event (Template DataModel) * build a file. Otherwise the entire queue of incoming object will * be transformed in a list of datamodel. In this case only one file * is generated. */ if (conf.isNtoN()) { if (list.size() > 0) { list.remove(0); } list.add(dataModel); final File outputFile; // append the incoming data structure try { outputFile = buildOutput(outputDir, root); } catch (ActionException e) { if (LOGGER.isErrorEnabled()) LOGGER.error(e.getLocalizedMessage(), e); if (conf.isFailIgnored()) { continue; } else { listenerForwarder.failed(e); throw e; } } // add the file to the return ret.add(new FileSystemEvent(outputFile.getAbsoluteFile(), FileSystemEventType.FILE_ADDED)); } else { list.add(dataModel); } } if (!conf.isNtoN()) { final File outputFile; // append the incoming data structure try { outputFile = buildOutput(outputDir, root); } catch (ActionException e) { if (LOGGER.isErrorEnabled()) LOGGER.error(e.getLocalizedMessage(), e); listenerForwarder.failed(e); throw e; } // add the file to the return ret.add(new FileSystemEvent(outputFile.getAbsoluteFile(), FileSystemEventType.FILE_ADDED)); } listenerForwarder.completed(); return ret; }
From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java
private void findOceans() { for (Graph.Center c : graph.centers) { c.ocean = false;//w w w . j a va2s . c o m c.water = false; } for (Graph.Corner c : graph.corners) { c.ocean = false; } //set water parameter of centers float LAKE_THRESHOLD = 0.3f; Queue<Graph.Center> queue = new ArrayDeque<>(); for (Graph.Center p : graph.centers) { int numWater = 0; for (Graph.Corner c : p.corners) { if (c.border || c.ocean) { p.border = true; p.water = true; p.ocean = true; queue.add(p); break; } if (c.water) { numWater++; } } p.water = (p.ocean || numWater >= p.corners.size() * LAKE_THRESHOLD); } LOG.info("border cells: " + queue.size()); //float fill borders to distinguish between ocean and likes while (!queue.isEmpty()) { Graph.Center c = queue.poll(); for (Graph.Center r : c.neighbors) { if (r.water && !r.ocean) { r.ocean = true; queue.add(r); } } } //assign coast tag for (Graph.Corner q : graph.corners) { q.coast = false; } for (Graph.Center c : graph.centers) { if (c.ocean) { for (Graph.Corner q : c.corners) { if (!q.water) { q.coast = true; } else { q.ocean = true; } } } } //assign basic biomes int oceanCount = 0; int lakeCount = 0; int landCount = 0; for (Graph.Center c : graph.centers) { if (c.ocean) { c.biome = Biome.OCEAN; oceanCount++; } else if (c.water) { c.biome = Biome.LAKE; lakeCount++; } else { c.biome = Biome.BEACH; lakeCount++; } } LOG.log(Level.INFO, "ocean cells: {0}, lake cells: {1}, land cells: {2}", new Object[] { oceanCount, lakeCount, landCount }); }
From source file:org.evosuite.setup.TestClusterGenerator.java
/** * All public methods defined directly in the SUT should be covered * /*from www .j a v a2 s .c o m*/ * TODO: What if we use instrument_parent? * * @param targetClass */ @SuppressWarnings("unchecked") private void initializeTargetMethods() throws RuntimeException, ClassNotFoundException { logger.info("Analyzing target class"); Class<?> targetClass = Properties.getTargetClass(); TestCluster cluster = TestCluster.getInstance(); Set<Class<?>> targetClasses = new LinkedHashSet<Class<?>>(); if (targetClass == null) { throw new RuntimeException("Failed to load " + Properties.TARGET_CLASS); } targetClasses.add(targetClass); addDeclaredClasses(targetClasses, targetClass); if (Modifier.isAbstract(targetClass.getModifiers())) { logger.info("SUT is an abstract class"); Set<Class<?>> subclasses = getConcreteClasses(targetClass, inheritanceTree); logger.info("Found {} concrete subclasses", subclasses.size()); targetClasses.addAll(subclasses); } // To make sure we also have anonymous inner classes double check inner classes using ASM ClassNode targetClassNode = DependencyAnalysis.getClassNode(Properties.TARGET_CLASS); Queue<InnerClassNode> innerClasses = new LinkedList<InnerClassNode>(); innerClasses.addAll(targetClassNode.innerClasses); while (!innerClasses.isEmpty()) { InnerClassNode icn = innerClasses.poll(); try { logger.debug("Loading inner class: {}, {},{}", icn.innerName, icn.name, icn.outerName); String innerClassName = ResourceList.getClassNameFromResourcePath(icn.name); Class<?> innerClass = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(innerClassName); //if (!canUse(innerClass)) // continue; // Sometimes strange things appear such as Map$Entry if (!targetClasses.contains(innerClass)) { // && !innerClassName.matches(".*\\$\\d+(\\$.*)?$")) { logger.info("Adding inner class {}", innerClassName); targetClasses.add(innerClass); ClassNode innerClassNode = DependencyAnalysis.getClassNode(innerClassName); innerClasses.addAll(innerClassNode.innerClasses); } } catch (Throwable t) { logger.error("Problem for {}. Error loading inner class: {}, {},{}: {}", Properties.TARGET_CLASS, icn.innerName, icn.name, icn.outerName, t); } } for (Class<?> clazz : targetClasses) { logger.info("Current SUT class: {}", clazz); if (!canUse(clazz)) { logger.info("Cannot access SUT class: {}", clazz); continue; } // Add all constructors for (Constructor<?> constructor : getConstructors(clazz)) { logger.info("Checking target constructor {}", constructor); String name = "<init>" + org.objectweb.asm.Type.getConstructorDescriptor(constructor); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), "<init>", org.objectweb.asm.Type.getConstructorDescriptor(constructor)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(constructor)) { GenericConstructor genericConstructor = new GenericConstructor(constructor, clazz); cluster.addTestCall(genericConstructor); // TODO: Add types! cluster.addGenerator(new GenericClass(clazz).getWithWildcardTypes(), genericConstructor); addDependencies(genericConstructor, 1); logger.debug("Keeping track of {}.{}{}", constructor.getDeclaringClass().getName(), constructor.getName(), Type.getConstructorDescriptor(constructor)); } else { logger.debug("Constructor cannot be used: {}", constructor); } } // Add all methods for (Method method : getMethods(clazz)) { logger.info("Checking target method {}", method); String name = method.getName() + org.objectweb.asm.Type.getMethodDescriptor(method); if (Properties.TT) { String orig = name; name = BooleanTestabilityTransformation.getOriginalNameDesc(clazz.getName(), method.getName(), org.objectweb.asm.Type.getMethodDescriptor(method)); if (!orig.equals(name)) logger.info("TT name: {} -> {}", orig, name); } if (canUse(method, clazz)) { logger.debug("Adding method {}.{}{}", clazz.getName(), method.getName(), Type.getMethodDescriptor(method)); GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); cluster.addModifier(new GenericClass(clazz).getWithWildcardTypes(), genericMethod); addDependencies(genericMethod, 1); GenericClass retClass = new GenericClass(method.getReturnType()); if (!retClass.isPrimitive() && !retClass.isVoid() && !retClass.isObject()) cluster.addGenerator(retClass.getWithWildcardTypes(), genericMethod); } else { logger.debug("Method cannot be used: {}", method); } } for (Field field : getFields(clazz)) { logger.info("Checking target field {}", field); if (canUse(field, clazz)) { GenericField genericField = new GenericField(field, clazz); addDependencies(genericField, 1); cluster.addGenerator(new GenericClass(field.getGenericType()).getWithWildcardTypes(), genericField); logger.debug("Adding field {}", field); if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } else { logger.debug("Is final"); if (Modifier.isStatic(field.getModifiers()) && !field.getType().isPrimitive()) { logger.debug("Is static non-primitive"); /* * With this we are trying to cover such cases: * public static final DurationField INSTANCE = new MillisDurationField(); private MillisDurationField() { super(); } */ try { Object o = field.get(null); if (o == null) { logger.info("Field is not yet initialized: {}", field); } else { Class<?> actualClass = o.getClass(); logger.debug("Actual class is {}", actualClass); if (!actualClass.isAssignableFrom(genericField.getRawGeneratedType()) && genericField.getRawGeneratedType().isAssignableFrom(actualClass)) { GenericField superClassField = new GenericField(field, clazz); cluster.addGenerator(new GenericClass(actualClass), superClassField); } } } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } else { logger.debug("Can't use field {}", field); } } analyzedClasses.add(clazz); // TODO: Set to generic type rather than class? cluster.getAnalyzedClasses().add(clazz); } if (Properties.INSTRUMENT_PARENT) { for (String superClass : inheritanceTree.getSuperclasses(Properties.TARGET_CLASS)) { try { Class<?> superClazz = TestGenerationContext.getInstance().getClassLoaderForSUT() .loadClass(superClass); dependencies.add(new Pair(0, superClazz)); } catch (ClassNotFoundException e) { logger.error("Problem for {}. Class not found: {}", Properties.TARGET_CLASS, superClass, e); } } } if (Properties.HANDLE_STATIC_FIELDS) { GetStaticGraph getStaticGraph = GetStaticGraphGenerator.generate(Properties.TARGET_CLASS); Map<String, Set<String>> staticFields = getStaticGraph.getStaticFields(); for (String className : staticFields.keySet()) { logger.info("Adding static fields to cluster for class {}", className); Class<?> clazz; try { clazz = getClass(className); } catch (ExceptionInInitializerError ex) { logger.debug("Class class init caused exception {}", className); continue; } if (clazz == null) { logger.debug("Class not found {}", className); continue; } if (!canUse(clazz)) continue; Set<String> fields = staticFields.get(className); for (Field field : getFields(clazz)) { if (!canUse(field, clazz)) continue; if (fields.contains(field.getName())) { if (!Modifier.isFinal(field.getModifiers())) { logger.debug("Is not final"); cluster.addTestCall(new GenericField(field, clazz)); } } } } PutStaticMethodCollector collector = new PutStaticMethodCollector(Properties.TARGET_CLASS, staticFields); Set<MethodIdentifier> methodIdentifiers = collector.collectMethods(); for (MethodIdentifier methodId : methodIdentifiers) { Class<?> clazz = getClass(methodId.getClassName()); if (clazz == null) continue; if (!canUse(clazz)) continue; Method method = getMethod(clazz, methodId.getMethodName(), methodId.getDesc()); if (method == null) continue; GenericMethod genericMethod = new GenericMethod(method, clazz); cluster.addTestCall(genericMethod); } } logger.info("Finished analyzing target class"); }
From source file:org.photovault.common.SchemaUpdateAction.java
/** Convert folder hiearchy from old schema to the new one *//* w w w. j a v a 2 s . c o m*/ private void convertFolders() throws SQLException { SchemaUpdateOperation oper = new SchemaUpdateOperation("COnverting folders"); log.debug("Starting to convert folders to new schema"); Session s = HibernateUtil.getSessionFactory().openSession(); Session sqlSess = HibernateUtil.getSessionFactory().openSession(); Connection conn = sqlSess.connection(); Queue<Integer> waiting = new LinkedList<Integer>(); Map<Integer, PhotoFolder> foldersById = new HashMap<Integer, PhotoFolder>(); waiting.add(1); HibernateDAOFactory df = (HibernateDAOFactory) DAOFactory.instance(HibernateDAOFactory.class); df.setSession(s); PhotoFolderDAO folderDao = df.getPhotoFolderDAO(); DTOResolverFactory rf = df.getDTOResolverFactory(); PhotoFolder topFolder = folderDao.findByUUID(PhotoFolder.ROOT_UUID); if (topFolder == null) { topFolder = folderDao.create(PhotoFolder.ROOT_UUID, null); topFolder.setName("Top"); } foldersById.put(1, topFolder); Statement countStmt = conn.createStatement(); ResultSet countRs = countStmt.executeQuery("select count(*) from photo_collections"); int folderCount = -1; if (countRs.next()) { folderCount = countRs.getInt(1); } countRs.close(); countStmt.close(); int convertedCount = 0; PreparedStatement stmt = conn.prepareStatement("select * from photo_collections where parent = ?"); while (!waiting.isEmpty()) { int parentId = waiting.remove(); PhotoFolder parent = foldersById.get(parentId); log.debug("Querying for folders with parent " + parentId); stmt.setInt(1, parentId); ResultSet rs = stmt.executeQuery(); while (rs.next()) { // Create the folder /* TODO: should the UUID be created algorithmically? Or better, how to ensure that UUIDs for folders that are part of external volume will always be the same? */ fireStatusChangeEvent(new SchemaUpdateEvent(oper, convertedCount * 100 / folderCount)); String uuidStr = rs.getString("collection_uuid"); int id = rs.getInt("collection_id"); log.debug("Creating folder with old id " + id + ", uuid " + uuidStr); UUID uuid = (uuidStr != null) ? UUID.fromString(uuidStr) : UUID.randomUUID(); if (id == 1) { uuid = PhotoFolder.ROOT_UUID; } PhotoFolder f = folderDao.create(uuid, parent); VersionedObjectEditor<PhotoFolder> e = f.editor(rf); FolderEditor fe = (FolderEditor) e.getProxy(); fe.setName(rs.getString("collection_name")); fe.setDescription(rs.getString("collection_desc")); e.apply(); /* TODO: how to set the create time & last modified time without exposing them to others? */ log.debug("folder saved"); foldersById.put(id, f); folderUuids.put(id, uuid); waiting.add(id); convertedCount++; } try { rs.close(); } catch (SQLException e) { log.error("Error closing result set", e); } } s.flush(); sqlSess.close(); s.close(); fireStatusChangeEvent(new SchemaUpdateEvent(oper, convertedCount * 100 / folderCount)); }