List of usage examples for java.util LinkedList removeFirst
public E removeFirst()
From source file:edu.ucla.cs.scai.canali.core.index.utils.BiomedicalOntologyUtils.java
private void computeEquivalentClassGroups() throws IOException { //load all classes and assign an id to them //dbpedia classes are loaded first String regex = "(\\s|\\t)*<([^<>]*)>(\\s|\\t)*<([^<>]*)>(\\s|\\t)*(<|\")(.*)(>|\")"; Pattern p = Pattern.compile(regex); for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/2000/01/rdf-schema#Class") && (s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { if ((s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); }/*w ww . j av a 2s . co m*/ if ((v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !classIds.containsKey(v)) { classIds.put(v, classIds.size() + 1); } } } l = in.readLine(); } } } //now non-dpedia classes are loaded for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String s = m.group(2); String a = m.group(4); String v = m.group(7); if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") && v.equals("http://www.w3.org/2000/01/rdf-schema#Class") && !(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } else if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { if (!(s.startsWith("http://www.dbpedia.org") || s.startsWith("http://dbpedia.org")) && !classIds.containsKey(s)) { classIds.put(s, classIds.size() + 1); } if (!(v.startsWith("http://www.dbpedia.org") || v.startsWith("http://dbpedia.org")) && !classIds.containsKey(v)) { classIds.put(v, classIds.size() + 1); } } } l = in.readLine(); } } } //create the equivalentClassEdges sets equivalentClassEdges = new HashSet[classIds.size() + 1]; classById = new String[classIds.size() + 1]; for (Map.Entry<String, Integer> e : classIds.entrySet()) { classById[e.getValue()] = e.getKey(); } for (String fileName : fileNames) { try (BufferedReader in = new BufferedReader(new FileReader(downloadedFilesPath + fileName))) { String l = in.readLine(); while (l != null) { Matcher m = p.matcher(l); if (m.find()) { String a = m.group(4); if (a.equals("http://www.w3.org/2002/07/owl#equivalentClass")) { String s = m.group(2); int idS = classIds.get(s); String v = m.group(7); int idV = classIds.get(v); if (equivalentClassEdges[idS] == null) { equivalentClassEdges[idS] = new HashSet<>(); } equivalentClassEdges[idS].add(idV); if (equivalentClassEdges[idV] == null) { equivalentClassEdges[idV] = new HashSet<>(); } equivalentClassEdges[idV].add(idS); } /* else if (a.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) { String s = m.group(2); String v = m.group(7); if (v.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")) { properties.add(s); } else if (v.equals("http://www.w3.org/2000/01/rdf-schema#Class")) { classes.add(s); } }*/ } l = in.readLine(); } } } //manually add an equivalence //http://www4.wiwiss.fu-berlin.de/sider/resource/sider/drugs equivalentClass { String s = "http://www4.wiwiss.fu-berlin.de/sider/resource/sider/drugs"; int idS = classIds.get(s); String v = "http://dbpedia.org/ontology/Drug"; int idV = classIds.get(v); if (equivalentClassEdges[idS] == null) { equivalentClassEdges[idS] = new HashSet<>(); } equivalentClassEdges[idS].add(idV); if (equivalentClassEdges[idV] == null) { equivalentClassEdges[idV] = new HashSet<>(); } equivalentClassEdges[idV].add(idS); } equivalentClass = new int[classIds.size() + 1]; int i = 1; while (i < equivalentClass.length) { LinkedList<Integer> q = new LinkedList<>(); q.addLast(i); while (!q.isEmpty()) { int j = q.removeFirst(); if (equivalentClass[j] != 0) { if (equivalentClass[j] != i) { System.out.println("Error"); System.exit(0); } } else { equivalentClass[j] = i; if (equivalentClassEdges[j] != null) { for (int k : equivalentClassEdges[j]) { q.addLast(k); } } } } i++; while (i < equivalentClass.length && equivalentClass[i] != 0) { i++; } } }
From source file:com.trsst.Command.java
public int doPush(Client client, CommandLine commands, LinkedList<String> arguments, PrintStream out) { // if a second argument was specified if (arguments.size() < 2) { printPushUsage();// w ww . j a v a2 s . c o m return 127; // "command not found" } URL url; String host = arguments.removeFirst().toString(); Client destinationClient; try { url = new URL(host); // this argument is a server url destinationClient = new Client(url); System.err.println("Using service: " + host); } catch (MalformedURLException e) { printPushUsage(); return 127; // "command not found" } for (String id : arguments) { System.out.println(destinationClient.push(client.pull(id), url)); // Feed feed = client.pull(id); // if ( feed != null ) { // feed = client.push(feed, url); // if ( feed != null ) { // out.println(feed); // } else { // System.err.println("Failed to push feed for id: " + id); // } // } else { // System.err.println("Failed to pull feed for id: " + id); // } } return 0; // "OK" }
From source file:com.heliosapm.script.AbstractDeployedScript.java
public String[] getPathSegments(final int trim) { if (trim == 0) return pathSegments.clone(); final int pLength = pathSegments.length; final int absTrim = Math.abs(trim); if (absTrim > pLength) throw new IllegalArgumentException("The requested trim [" + trim + "] is larger than the path segment [" + pathSegments.length + "]"); if (absTrim == pLength) return new String[0]; LinkedList<String> psegs = new LinkedList<String>(Arrays.asList(pathSegments)); for (int i = 0; i < absTrim; i++) { if (trim < 0) psegs.removeFirst(); else// w w w .j a v a2 s. com psegs.removeLast(); } return psegs.toArray(new String[pLength - absTrim]); }
From source file:edu.cuny.cat.stat.HistoricalReport.java
protected void removeNShouts(final int n, final LinkedList<Shout> shouts) { for (int i = 0; i < n; i++) { final Shout shout = shouts.removeFirst(); if (isDebugging() && (sortedShouts.getCount(shout) > 1)) { HistoricalReport.logger.info(sortedShouts.getCount(shout) + " " + shout.toString()); }//from w w w . ja v a2 s . co m final int count = sortedShouts.getCount(shout); // TODO: which one is removed ? or doesn't matter? sortedShouts.remove(shout, 1); if (count - sortedShouts.getCount(shout) != 1) { HistoricalReport.logger.error("failed in removing exactly the single shout: " + shout); HistoricalReport.logger.error(count + " -> " + sortedShouts.getCount(shout) + " " + shout); } matchedShouts.remove(shout); // if the shout is the last one with the id, remove its record in shoutMap if (shout == getMappedShout(shout.getId())) { shoutMap.remove(shout.getId()); // } else { // HistoricalReport.logger.info("Earlier shout " + prettyString(shout) // + " removed from shout list, which has a later modifying shout " // + prettyString(getMappedShout(shout.getId())) + " in shout map."); } } }
From source file:solidbase.core.UpgradeFile.java
/** * Retrieves all versions that are reachable from the given source version. The current version is also considered. * * @param source The source version.//from w w w .j ava2 s. c o m * @param targeting Already targeting a specific version. * @param downgradesAllowed Allow downgrades. * @param result This set gets filled with all versions that are reachable from the given source version. */ protected void collectReachableVersions(String source, String targeting, boolean downgradesAllowed, Set<String> result) { if (!this.versions.contains(source)) throw new FatalException("The current database version " + StringUtils.defaultString(source, "<no version>") + " is not available in the upgrade file. Maybe this version is deprecated or the wrong upgrade file is used."); if (targeting == null) result.add(source); // The source is reachable Collection<UpgradeSegment> segments = this.segments.get(source); // Get all segments with the given source if (segments == null) return; // Queue contains segments that await processing LinkedList<UpgradeSegment> queue = new LinkedList<UpgradeSegment>(); // Fill queue with segments if (targeting != null) { for (UpgradeSegment segment : segments) if (targeting.equals(segment.getTarget())) queue.add(segment); // Add segment to the end of the list if (queue.isEmpty()) throw new FatalException("The database is incompletely upgraded to version " + targeting + ", but that version is not reachable from version " + StringUtils.defaultString(source, "<no version>")); } else queue.addAll(segments); // Process the queue while (!queue.isEmpty()) { UpgradeSegment segment = queue.removeFirst(); // pop() is not available in java 5 if (!result.contains(segment.getTarget())) // Already there? if (downgradesAllowed || !segment.isDowngrade()) // Downgrades allowed? { result.add(segment.getTarget()); if (!segment.isOpen()) // Stop when segment is open. { segments = this.segments.get(segment.getTarget()); // Add the next to the queue if (segments != null) queue.addAll(segments); // Add segments to the end of the list } } } }
From source file:com.datatorrent.stram.client.EventsAgent.java
public List<EventInfo> getLatestEvents(String appId, int limit) { LinkedList<EventInfo> result = new LinkedList<EventInfo>(); String dir = getEventsDirectory(appId); if (dir == null) { return null; }/*ww w . ja v a 2 s. co m*/ long totalNumEvents = 0; IndexFileBufferedReader ifbr = null; LinkedList<Pair<String, Long>> partFiles = new LinkedList<Pair<String, Long>>(); try { ifbr = new IndexFileBufferedReader(new InputStreamReader( stramAgent.getFileSystem().open(new Path(dir, FSPartFileCollection.INDEX_FILE))), dir); EventsIndexLine indexLine; while ((indexLine = (EventsIndexLine) ifbr.readIndexLine()) != null) { if (indexLine.isEndLine) { continue; } partFiles.add(new Pair<String, Long>(indexLine.partFile, indexLine.numEvents)); totalNumEvents += indexLine.numEvents; } } catch (Exception ex) { LOG.warn("Got exception when reading events", ex); return result; } finally { IOUtils.closeQuietly(ifbr); } long offset = 0; while (totalNumEvents > limit && !partFiles.isEmpty()) { Pair<String, Long> head = partFiles.getFirst(); if (totalNumEvents - head.second < limit) { offset = Math.max(0, totalNumEvents - limit); break; } totalNumEvents -= head.second; partFiles.removeFirst(); } String lastProcessPartFile = null; for (Pair<String, Long> partFile : partFiles) { BufferedReader partBr = null; try { partBr = new BufferedReader( new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, partFile.first)))); processPartFile(partBr, null, null, offset, limit, result); offset = 0; lastProcessPartFile = partFile.first; } catch (Exception ex) { LOG.warn("Got exception when reading events", ex); } finally { IOUtils.closeQuietly(partBr); } } BufferedReader partBr = null; try { String extraPartFile = getNextPartFile(lastProcessPartFile); if (extraPartFile != null && limit > 0) { partBr = new BufferedReader( new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, extraPartFile)))); processPartFile(partBr, null, null, 0, Integer.MAX_VALUE, result); } } catch (Exception ex) { // ignore } finally { IOUtils.closeQuietly(partBr); } while (result.size() > limit) { result.removeFirst(); } return result; }
From source file:net.timewalker.ffmq4.storage.data.impl.journal.BlockBasedDataStoreJournal.java
private void recycleUnusedJournalFiles() throws JournalException { LinkedList<JournalFile> unusedJournalFiles = null; // Look for unused journal files synchronized (journalFiles) { while (journalFiles.size() > 0) { JournalFile journalFile = journalFiles.getFirst(); if (journalFile.isComplete() && journalFile.getLastTransactionId() < lastStoreTransactionId) { if (unusedJournalFiles == null) unusedJournalFiles = new LinkedList<>(); unusedJournalFiles.addLast(journalFile); journalFiles.removeFirst(); // Remove from list } else break; }//from w ww .j av a2 s.co m } // Recycle unused journal files if (unusedJournalFiles != null) { while (!unusedJournalFiles.isEmpty()) { JournalFile journalFile = unusedJournalFiles.removeFirst(); if (keepJournalFiles) journalFile.close(); else { log.debug("[" + baseName + "] Recycling unused journal file : " + journalFile); File recycledFile = journalFile.closeAndRecycle(); synchronized (recycledJournalFiles) { recycledJournalFiles.addLast(recycledFile); } } } } }
From source file:org.apache.tapestry.util.AdaptorRegistry.java
/** * Searches the registration Map for a match, based on inheritance. * * <p>Searches class inheritance first, then interfaces (in a rather vague order). * Really should match the order from the JVM spec. * * <p>There's a degenerate case where we may check the same interface more than once: * <ul>/* ww w. j ava 2 s . c o m*/ * <li>Two interfaces, I1 and I2 * <li>Two classes, C1 and C2 * <li>I2 extends I1 * <li>C2 extends C1 * <li>C1 implements I1 * <li>C2 implements I2 * <li>The search will be: C2, C1, I2, I1, I1 * <li>I1 is searched twice, because C1 implements it, and I2 extends it * <li>There are other such cases, but none of them cause infinite loops * and most are rare (we could guard against it, but its relatively expensive). * <li>Multiple checks only occur if we don't find a registration * </ul> * * <p> * This method is only called from a synchronized block, so it is * implicitly synchronized. * **/ private Object searchForAdaptor(Class subjectClass) { LinkedList queue = null; Object result = null; if (LOG.isDebugEnabled()) LOG.debug("Searching for adaptor for class " + Tapestry.getClassName(subjectClass)); // Step one: work up through the class inheritance. Class searchClass = subjectClass; // Primitive types have null, not Object, as their parent // class. while (searchClass != Object.class && searchClass != null) { result = registrations.get(searchClass); if (result != null) return result; // Not an exact match. If the search class // implements any interfaces, add them to the queue. Class[] interfaces = searchClass.getInterfaces(); int length = interfaces.length; if (queue == null && length > 0) queue = new LinkedList(); for (int i = 0; i < length; i++) queue.addLast(interfaces[i]); // Advance up to the next superclass searchClass = getSuperclass(searchClass); } // Ok, the easy part failed, lets start searching // interfaces. if (queue != null) { while (!queue.isEmpty()) { searchClass = (Class) queue.removeFirst(); result = registrations.get(searchClass); if (result != null) return result; // Interfaces can extend other interfaces; add them // to the queue. Class[] interfaces = searchClass.getInterfaces(); int length = interfaces.length; for (int i = 0; i < length; i++) queue.addLast(interfaces[i]); } } // Not a match on interface; our last gasp is to check // for a registration for java.lang.Object result = registrations.get(Object.class); if (result != null) return result; // No match? That's rare ... and an error. throw new IllegalArgumentException( Tapestry.format("AdaptorRegistry.adaptor-not-found", Tapestry.getClassName(subjectClass))); }
From source file:de.interactive_instruments.ShapeChange.Model.EA.EADocument.java
public void executeCommonInitializationProcedure() throws ShapeChangeAbortException { // determine if specific packages should not be loaded this.excludedPackageNames = options.getExcludedPackages(); /** Cache classes and packages */ // First set up initial evaluation tasks of packages consisting // of the models in the repository class EvalTask { PackageInfoEA fatherPI;//from ww w. j av a 2 s .c om org.sparx.Package eaPackage; EvalTask(PackageInfoEA fpi, org.sparx.Package p) { fatherPI = fpi; eaPackage = p; } } StatusBoard.getStatusBoard().statusChanged(STATUS_EADOCUMENT_READMODEL); LinkedList<EvalTask> evalp = new LinkedList<EvalTask>(); Collection<org.sparx.Package> model = repository.GetModels(); for (org.sparx.Package p : model) { // Check if this model and all its contents shall be excluded String name = p.GetName(); if (excludedPackageNames != null && excludedPackageNames.contains(name)) { // stop processing this model and continue with the next continue; } evalp.addLast(new EvalTask(null, p)); } // Now remove tasks from the list, adding further tasks as we proceed // until we have no more tasks to evaluate while (evalp.size() > 0) { // Remove next evaluation task EvalTask et = evalp.removeFirst(); org.sparx.Package pack = et.eaPackage; PackageInfoEA fpi = et.fatherPI; // Check if this package and all its contents shall be excluded from // the model String name = pack.GetName(); if (excludedPackageNames != null && excludedPackageNames.contains(name)) { // stop processing this package and continue with the next continue; } // Add to package cache. The PackageInfo Ctor does the necessary // parent/child linkage of packages Element packelmt = pack.GetElement(); PackageInfoEA pi = new PackageInfoEA(this, fpi, pack, packelmt); fPackageById.put(pi.id(), pi); if (packelmt != null) this.fPackageByElmtId.put(new Integer(packelmt.GetElementID()).toString(), pi); // Now pick all classes and add these to their to caches. for (org.sparx.Element elmt : pack.GetElements()) { String type = elmt.GetType(); if (!type.equals("DataType") && !type.equals("Class") && !type.equals("Interface") && !type.equals("Enumeration")) continue; ClassInfoEA ci = new ClassInfoEA(this, pi, elmt); fClassById.put(ci.id(), ci); // TODO What's happening to identical class names? How is this // supposed to be handled? Open issue.While classifier names // have to be // unique per app schema only, it is a legacy from Rational Rose // that it is expected that classifier names are unique in the // whole // model. The correct solution would be to add namespace // qualifiers. fClassByName.put(ci.name(), ci); } // Add next level packages for further evaluation for (org.sparx.Package pnxt : pack.GetPackages()) { evalp.addLast(new EvalTask(pi, pnxt)); } } StatusBoard.getStatusBoard().statusChanged(STATUS_EADOCUMENT_ESTABLISHCLASSES); /** * Now that all classes are collected, in a second go establish class * derivation hierarchy and all other associations between classes. */ for (ClassInfoEA ci : fClassById.values()) { // Generalization - class derivation hierarchy ci.establishClassDerivationHierarchy(); // Other associations where the class is source or target ci.establishAssociations(); } String checkingConstraints = options.parameter("checkingConstraints"); if (checkingConstraints == null || !checkingConstraints.toLowerCase().trim().equals("disabled")) { StatusBoard.getStatusBoard().statusChanged(STATUS_EADOCUMENT_READCONSTARINTS); // TODO The following may be removed when constraints have been // tested. /** In a third go collect all constraints */ for (ClassInfoEA ci : fClassById.values()) { ci.constraints(); SortedMap<StructuredNumber, PropertyInfo> props = ci.properties(); for (PropertyInfo pi : props.values()) pi.constraints(); } } /** * Loop over all schemas (i.e packages with a target namespace) and * store the schema location, so that it can be added in import * statements */ SortedSet<PackageInfo> schemas = schemas(""); for (Iterator<PackageInfo> i = schemas.iterator(); i.hasNext();) { PackageInfo pi = i.next(); options.addSchemaLocation(pi.targetNamespace(), pi.xsdDocument()); } // ============================== // load diagrams if so requested String loadDiagrams = options.parameter("loadDiagrams"); if (loadDiagrams != null && loadDiagrams.equalsIgnoreCase("true")) { java.io.File tmpDir = options.imageTmpDir(); if (tmpDir.exists()) { // probably content from previous run, delete the content of the directory try { FileUtils.deleteDirectory(tmpDir); } catch (IOException e) { result.addWarning(null, 34, tmpDir.getAbsolutePath()); } if (!tmpDir.exists()) { try { FileUtils.forceMkdir(tmpDir); } catch (IOException e) { result.addWarning(null, 32, tmpDir.getAbsolutePath()); } } } AtomicInteger imgIdCounter = new AtomicInteger(0); SortedSet<? extends PackageInfo> selectedSchema = this.selectedSchemas(); for (PackageInfo pi : selectedSchema) { if (pi == null) { continue; } // Only process schemas in a namespace and name that matches a // user-selected pattern if (options.skipSchema(null, pi)) continue; saveDiagrams(imgIdCounter, "img", tmpDir, escapeFileName(tmpDir.getName()), pi); } } }
From source file:com.googlecode.psiprobe.controllers.logs.FollowController.java
protected ModelAndView handleLogFile(HttpServletRequest request, HttpServletResponse response, LogDestination logDest) throws Exception { ModelAndView mv = new ModelAndView(getViewName()); File file = logDest.getFile(); if (file.exists()) { LinkedList lines = new LinkedList(); long actualLength = file.length(); long lastKnownLength = ServletRequestUtils.getLongParameter(request, "lastKnownLength", 0); long currentLength = ServletRequestUtils.getLongParameter(request, "currentLength", actualLength); long maxReadLines = ServletRequestUtils.getLongParameter(request, "maxReadLines", 0); if (lastKnownLength > currentLength || lastKnownLength > actualLength || currentLength > actualLength) { ///*from ww w.j a v a 2s . c o m*/ // file length got reset // lastKnownLength = 0; lines.add(" ------------- THE FILE HAS BEEN TRUNCATED --------------"); } BackwardsFileStream bfs = new BackwardsFileStream(file, currentLength); try { BackwardsLineReader br = new BackwardsLineReader(bfs); long readSize = 0; long totalReadSize = currentLength - lastKnownLength; String s; while (readSize < totalReadSize && (s = br.readLine()) != null) { if (!s.equals("")) { lines.addFirst(s); readSize += s.length(); } else { readSize++; } if (maxReadLines != 0 && lines.size() >= maxReadLines) { break; } } if (lastKnownLength != 0 && readSize > totalReadSize) { lines.removeFirst(); } } finally { bfs.close(); } mv.addObject("lines", lines); } return mv; }