List of usage examples for java.util Queue isEmpty
boolean isEmpty();
From source file:org.apache.synapse.transport.nhttp.HttpCoreNIOListener.java
/** * Start specific end points given by InetSockeAddress list * * @param endpointsClosed InetSocketAddresses of endpoints to be started * @throws AxisFault/*from w ww. j a va 2s .c om*/ */ private void startSpecificEndpoints(List<InetSocketAddress> endpointsClosed) throws AxisFault { Queue<ListenerEndpoint> endpoints = new LinkedList<ListenerEndpoint>(); // Ensure simple but stable order List<InetSocketAddress> addressList = endpointsClosed; Collections.sort(addressList, new Comparator<InetSocketAddress>() { public int compare(InetSocketAddress a1, InetSocketAddress a2) { String s1 = a1.toString(); String s2 = a2.toString(); return s1.compareTo(s2); } }); for (InetSocketAddress address : addressList) { endpoints.add(ioReactor.listen(address)); } // Wait for the endpoint to become ready, i.e. for the listener to start accepting // requests. while (!endpoints.isEmpty()) { ListenerEndpoint endpoint = endpoints.remove(); try { endpoint.waitFor(); if (log.isInfoEnabled()) { InetSocketAddress address = (InetSocketAddress) endpoint.getAddress(); if (!address.isUnresolved()) { log.info(name + " started on " + address.getHostName() + ":" + address.getPort()); } else { log.info(name + " started on " + address); } } } catch (InterruptedException e) { log.warn("Listener startup was interrupted"); break; } } }
From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java
private void createBiomes() { if (graph == null) { return;/* w w w . j a v a 2s .c o m*/ } //assign temperatures for (Graph.Corner c : graph.corners) { c.temperature = c.elevation; c.temperature *= c.temperature; c.temperature = 1 - c.temperature; } assignCenterTemperature(); //create random rivers Random rand = new Random(seed * 3); for (Graph.Corner c : graph.corners) { c.river = 0; } float riverProb = 0.2f; float riverStartHeight = 0.7f; int riverCounter = 0; corner: for (Graph.Corner c : graph.corners) { if (c.water || c.elevation < riverStartHeight) { continue; } if (rand.nextFloat() > riverProb) { continue; } if (c.river > 0) continue; for (Graph.Corner c2 : c.adjacent) { if (c2.river > 0) { continue corner; } for (Graph.Corner c3 : c2.adjacent) { if (c3.river > 0) { continue corner; } } } //start new river from here Graph.Corner current = c; current.river = Math.max(current.river, 1); while (!current.ocean && !current.coast) { float minH = current.elevation; Graph.Corner minC = null; for (Graph.Corner c2 : current.adjacent) { if (c2.river > 0 && c2.elevation < current.elevation) { minC = c2; //force closing of rivers break; } if (c2.elevation < minH) { minC = c2; minH = c2.elevation; } } if (minC == null) { LOG.warning("river stuck in a local minima without reaching the ocean"); break; } minC.river = Math.max(minC.river, current.river + 1); current = minC; } riverCounter++; } LOG.info("count of created rivers: " + riverCounter); showRivers = true; //assign moisture Queue<Graph.Corner> queue = new ArrayDeque<>(); for (Graph.Corner q : graph.corners) { if ((q.water || q.river > 0) && !q.ocean) { q.moisture = q.river > 0 ? Math.min(3.0f, (0.4f * q.river)) : 1; queue.add(q); } else { q.moisture = 0; } } while (!queue.isEmpty()) { Graph.Corner q = queue.poll(); for (Graph.Corner r : q.adjacent) { float newMoisture = q.moisture * 0.8f; if (newMoisture > r.moisture) { r.moisture = newMoisture; queue.add(r); } } } for (Graph.Corner q : graph.corners) { if (q.ocean || q.coast) { q.moisture = 1; } } //redistribute moisture ArrayList<Graph.Corner> corners = new ArrayList<>(); for (Graph.Corner q : graph.corners) { if (!q.ocean && !q.coast) { corners.add(q); } } Collections.sort(corners, new Comparator<Graph.Corner>() { @Override public int compare(Graph.Corner o1, Graph.Corner o2) { return Float.compare(o1.moisture, o2.moisture); } }); for (int i = 0; i < corners.size(); i++) { corners.get(i).moisture = i / (float) (corners.size() - 1); } assignCenterMoisture(); assignBiomes(); //update mesh updateTemperatureGeometry(); updateMoistureGeometry(); updateBiomesGeometry(); }
From source file:hudson.plugins.emailext.plugins.content.BuildLogRegexContent.java
String getContent(BufferedReader reader) throws IOException { final boolean asHtml = matchedLineHtmlStyle != null; escapeHtml = asHtml || escapeHtml;//from ww w.j a v a 2 s . co m final Pattern pattern = Pattern.compile(regex); final StringBuffer buffer = new StringBuffer(); int numLinesTruncated = 0; int numMatches = 0; int numLinesStillNeeded = 0; boolean insidePre = false; Queue<String> linesBeforeList = new LinkedList<String>(); String line = null; while ((line = reader.readLine()) != null) { // Remove console notes (JENKINS-7402) line = ConsoleNote.removeNotes(line); // Remove any lines before that are no longer needed. while (linesBeforeList.size() > linesBefore) { linesBeforeList.remove(); ++numLinesTruncated; } final Matcher matcher = pattern.matcher(line); final StringBuffer sb = new StringBuffer(); boolean matched = false; while (matcher.find()) { matched = true; if (substText != null) { matcher.appendReplacement(sb, substText); } else { break; } } if (matched) { // The current line matches. if (showTruncatedLines == true && numLinesTruncated > 0) { // Append information about truncated lines. insidePre = stopPre(buffer, insidePre); appendLinesTruncated(buffer, numLinesTruncated, asHtml); numLinesTruncated = 0; } if (asHtml) { insidePre = startPre(buffer, insidePre); } while (!linesBeforeList.isEmpty()) { appendContextLine(buffer, linesBeforeList.remove(), escapeHtml); } // Append the (possibly transformed) current line. if (substText != null) { matcher.appendTail(sb); line = sb.toString(); } appendMatchedLine(buffer, line, escapeHtml, matchedLineHtmlStyle, addNewline); ++numMatches; // Set up to add numLinesStillNeeded numLinesStillNeeded = linesAfter; } else { // The current line did not match. if (numLinesStillNeeded > 0) { // Append this line as a line after. appendContextLine(buffer, line, escapeHtml); --numLinesStillNeeded; } else { // Store this line as a possible line before. linesBeforeList.offer(line); } } if (maxMatches != 0 && numMatches >= maxMatches && numLinesStillNeeded == 0) { break; } } if (showTruncatedLines == true) { // Count the rest of the lines. // Include any lines in linesBefore. while (linesBeforeList.size() > 0) { linesBeforeList.remove(); ++numLinesTruncated; } if (line != null) { // Include the rest of the lines that haven't been read in. while ((line = reader.readLine()) != null) { ++numLinesTruncated; } } if (numLinesTruncated > 0) { insidePre = stopPre(buffer, insidePre); appendLinesTruncated(buffer, numLinesTruncated, asHtml); } } insidePre = stopPre(buffer, insidePre); if (buffer.length() == 0) { return defaultValue; } return buffer.toString(); }
From source file:tachyon.master.file.FileSystemMaster.java
/** * Gets absolute paths of all in memory files. Called by the web ui. * * @return absolute paths of all in memory files */// w ww .j a v a 2 s. com public List<TachyonURI> getInMemoryFiles() { List<TachyonURI> ret = new ArrayList<TachyonURI>(); Queue<Pair<InodeDirectory, TachyonURI>> nodesQueue = new LinkedList<Pair<InodeDirectory, TachyonURI>>(); synchronized (mInodeTree) { // TODO(yupeng): Verify we want to use absolute path. nodesQueue.add(new Pair<InodeDirectory, TachyonURI>(mInodeTree.getRoot(), new TachyonURI(TachyonURI.SEPARATOR))); while (!nodesQueue.isEmpty()) { Pair<InodeDirectory, TachyonURI> pair = nodesQueue.poll(); InodeDirectory directory = pair.getFirst(); TachyonURI curUri = pair.getSecond(); Set<Inode> children = directory.getChildren(); for (Inode inode : children) { TachyonURI newUri = curUri.join(inode.getName()); if (inode.isDirectory()) { nodesQueue.add(new Pair<InodeDirectory, TachyonURI>((InodeDirectory) inode, newUri)); } else if (isFullyInMemory((InodeFile) inode)) { ret.add(newUri); } } } } return ret; }
From source file:org.rhq.enterprise.server.cloud.StorageNodeManagerBean.java
private Map<Integer, Integer> findResourcesWithAlertsToStorageNodeMap(StorageNode storageNode) { Stopwatch stopwatch = stopwatchStart(); List<StorageNode> initialStorageNodes = getStorageNodes(); try {/*from w w w. j a v a 2 s. c o m*/ if (storageNode == null) { initialStorageNodes = getStorageNodes(); } else { initialStorageNodes = Arrays.asList(storageNode.getResource() == null ? entityManager.find(StorageNode.class, storageNode.getId()) : storageNode); } Map<Integer, Integer> resourceIdsToStorageNodeMap = new HashMap<Integer, Integer>(); Queue<Resource> unvisitedResources = new LinkedList<Resource>(); // we are assuming here that the set of resources is disjunktive across different storage nodes for (StorageNode initialStorageNode : initialStorageNodes) { if (initialStorageNode.getResource() != null) { unvisitedResources.add(initialStorageNode.getResource()); while (!unvisitedResources.isEmpty()) { Resource resource = unvisitedResources.poll(); if (!resource.getAlertDefinitions().isEmpty()) { resourceIdsToStorageNodeMap.put(resource.getId(), initialStorageNode.getId()); } Set<Resource> childResources = resource.getChildResources(); if (childResources != null) { for (Resource child : childResources) { unvisitedResources.add(child); } } } } } return resourceIdsToStorageNodeMap; } finally { if (log.isDebugEnabled()) { stopwatchEnd(stopwatch, "Found storage node resources with alert defs in "); } } }
From source file:it.geosolutions.geobatch.actions.commons.MoveAction.java
/** * Removes TemplateModelEvents from the queue and put *///w w w. j a v a 2s .co m public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException { listenerForwarder.started(); listenerForwarder.setTask("build the output absolute file name"); // return final Queue<EventObject> ret = new LinkedList<EventObject>(); listenerForwarder.setTask("Building/getting the root data structure"); boolean moveMultipleFile; final int size = events.size(); if (size == 0) { throw new ActionException(this, "Empty file list"); } else if (size > 1) { moveMultipleFile = true; } else { moveMultipleFile = false; } if (conf.getDestination() == null) { throw new IllegalArgumentException("Unable to work with a null dest dir"); } if (!conf.getDestination().isAbsolute()) { conf.setDestination(new File(this.getConfigDir(), conf.getDestination().getPath())); if (LOGGER.isWarnEnabled()) { LOGGER.warn("Destination is not an absolute path. Absolutizing destination using temp dir: " + conf.getDestination()); } } boolean moveToDir; if (!conf.getDestination().isDirectory()) { // TODO LOG moveToDir = false; if (moveMultipleFile) { throw new ActionException(this, "Unable to run on multiple file with an output file, use directory instead"); } } else { moveToDir = true; } while (!events.isEmpty()) { listenerForwarder.setTask("Generating the output"); final EventObject event = events.remove(); if (event == null) { // TODO LOG continue; } if (event instanceof FileSystemEvent) { File source = ((FileSystemEvent) event).getSource(); File dest; listenerForwarder.setTask("moving to destination"); if (moveToDir) { dest = conf.getDestination(); try { FileUtils.moveFileToDirectory(source, dest, true); } catch (IOException e) { throw new ActionException(this, e.getLocalizedMessage()); } } else if (moveMultipleFile) { dest = new File(conf.getDestination(), source.getPath()); try { FileUtils.moveFile(source, dest); } catch (IOException e) { throw new ActionException(this, e.getLocalizedMessage()); } } else { // LOG continue continue; } // add the file to the return ret.add(new FileSystemEvent(dest, FileSystemEventType.FILE_ADDED)); } } listenerForwarder.completed(); return ret; }
From source file:com.comphenix.xp.parser.text.PotionParser.java
@Override public PotionQuery parse(String text) throws ParsingException { Queue<String> tokens = getParameterQueue(text); ParsingException reason = null;//from ww w. j a v a2 s . c o m List<Integer> items = Utility.getElementList((Integer) null); ; List<PotionType> types = Utility.getElementList((PotionType) null); ; List<Integer> tiers = Utility.getElementList((Integer) null); ; try { items = Utility.flatten(itemNameParser.parse(tokens)); types = potionTypeParser.parse(tokens); tiers = tierParser.parse(tokens); } catch (ParsingException ex) { // Wait, don't give up yet. reason = ex; } // Possibly a double check if (items.isEmpty() || !items.contains(Material.POTION.getId())) throw new ParsingException("Can only create potion queries from potion rules."); // Scan all unused parameters for these options first List<Boolean> extended = extendedParser.parseAny(tokens); List<Boolean> splash = splashParser.parseAny(tokens); Integer maxLevel = getMaxLevel(types); // Just assume level two is the highest if (maxLevel == null) maxLevel = 2; // Check tiers for (Integer tier : tiers) { if (tier > maxLevel) { throw ParsingException.fromFormat("Potion level %d is too high.", tier); } else if (tier < 1) { throw ParsingException.fromFormat("Potion level %d is too low.", tier); } } // If there are some tokens left, a problem occured if (!tokens.isEmpty()) { // Let the user know about the reason too if (reason != null) throw reason; else throw ParsingException.fromFormat("Unknown item tokens: ", StringUtils.join(tokens, ", ")); } // Create the query return new PotionQuery(types, tiers, extended, splash); }
From source file:com.todoroo.astrid.actfm.sync.ActFmSyncService.java
private void initializeRetryRunnable() { pushRetryRunnable = new Runnable() { public void run() { while (true) { AndroidUtilities.sleepDeep(TIME_BETWEEN_TRIES); if (failedPushes.isEmpty()) { synchronized (ActFmSyncService.this) { pushRetryThread = null; return; }/* w ww . ja va2s. co m*/ } if (failedPushes.size() > 0) { // Copy into a second queue so we don't end up infinitely retrying in the same loop Queue<FailedPush> toTry = new LinkedList<FailedPush>(); while (failedPushes.size() > 0) { toTry.add(failedPushes.remove(0)); } while (!toTry.isEmpty() && !actFmPreferenceService.isOngoing()) { FailedPush pushOp = toTry.remove(); switch (pushOp.pushType) { case PUSH_TYPE_TASK: pushTask(pushOp.itemId); break; case PUSH_TYPE_TAG: pushTag(pushOp.itemId); break; case PUSH_TYPE_UPDATE: pushUpdate(pushOp.itemId); break; } } } } } }; }
From source file:edu.emory.cci.aiw.umls.UMLSDatabaseConnection.java
public List<TerminologyCode> getTermSubsumption(TerminologyCode code) throws UMLSQueryException, UMLSNoSuchTermException { validateCode(code);//from w ww .j a va2 s . c om if (!codeExists(code)) { throw new UMLSNoSuchTermException("No such terminology code: " + code); } List<TerminologyCode> result = new ArrayList<TerminologyCode>(); // stores the unexpanded children Queue<TerminologyCode> descendants = new LinkedList<TerminologyCode>(); result.add(code); descendants.addAll(getChildrenByCode(code)); // loop through all children until the queue is empty, like BFS/DFS while (!descendants.isEmpty()) { // dequeue from the descendants and set as current term TerminologyCode current = descendants.remove(); // add the current child under examination to the result set result.add(current); // get all of the current term's children and them to the queue List<TerminologyCode> curChildren = getChildrenByCode(current); if (!curChildren.isEmpty()) { descendants.addAll(curChildren); } } return result; }
From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) { if (!directory.exists()) { throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist"); }/*from w w w . j a va 2 s . co m*/ //Create the file filter to use when searching for files to import final FileFilter fileFilter; if (pattern != null) { fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes); } else { fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes); } //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "import"); //Setup reporting file final File importReport = new File(logDirectory, "data-import.txt"); final PrintWriter reportWriter; try { reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + importReport, e); } //Convert directory to URI String to provide better logging output final URI directoryUri = directory.toURI(); final String directoryUriStr = directoryUri.toString(); IMPORT_BASE_DIR.set(directoryUriStr); try { //Scan the specified directory for files to import logger.info("Scanning for files to Import from: {}", directory); final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options); this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor); final long resourceCount = fileProcessor.getResourceCount(); logger.info("Found {} files to Import from: {}", resourceCount, directory); //See if the import should fail on error final boolean failOnError = options != null ? options.isFailOnError() : true; //Map of files to import, grouped by type final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport(); //Import the data files for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) { final Queue<Resource> files = dataToImport.remove(portalDataKey); if (files == null) { continue; } final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>(); final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final int fileCount = files.size(); logger.info("Importing {} files of type {}", fileCount, portalDataKey); reportWriter.println(portalDataKey + "," + fileCount); while (!files.isEmpty()) { final Resource file = files.poll(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong importTime = new AtomicLong(-1); //Create import task final Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { IMPORT_BASE_DIR.set(directoryUriStr); importTime.set(System.nanoTime()); try { importData(file, portalDataKey); } finally { importTime.set(System.nanoTime() - importTime.get()); IMPORT_BASE_DIR.remove(); } } }; //Submit the import task final Future<?> importFuture = this.importExportThreadPool.submit(task); //Add the future for tracking importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime)); } //Wait for all of the imports on of this type to complete final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException( failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n"); } reportWriter.flush(); } if (!dataToImport.isEmpty()) { throw new IllegalStateException( "The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet()); } logger.info("For a detailed report on the data import see " + importReport); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to import", e); } finally { IOUtils.closeQuietly(reportWriter); IMPORT_BASE_DIR.remove(); } }