List of usage examples for java.util LinkedList removeLast
public E removeLast()
From source file:com.multimedia.service.wallpaper.CmsWallpaperService.java
@Override public void preUploadWallpapers(StatusBean usb) { File upload_dir = new File(wallpaper_service.getUploadPath()); OnlyFilesFilter filenameFilter = new OnlyFilesFilter(); usb.setDone(0);//from ww w .j a v a 2 s . com usb.setTotal(scanFolder(upload_dir)); if (upload_dir.exists()) { boolean upload_made = true; int upload_count = 0; File pre_upload_directory = new File(wallpaper_service.getUploadPath(), "pre_upload"); if (!pre_upload_directory.exists()) pre_upload_directory.mkdir(); File cur_dir = null; File description_file; boolean pre_uploaded; Long id_pages_cur; String page_name; LinkedList<File> files = new LinkedList<File>(); files.addLast(upload_dir); while (!files.isEmpty()) { if (upload_made) { cur_dir = new File(pre_upload_directory, String.valueOf(upload_count)); while (cur_dir.exists()) { cur_dir = new File(pre_upload_directory, String.valueOf(upload_count)); upload_count++; } cur_dir.mkdir(); Iterator<String> dimmensions = wallpaper_service.getDimmensions().keySet().iterator(); while (dimmensions.hasNext()) { String dimmension = dimmensions.next(); File pre_uploaded_dimm = new File(cur_dir, dimmension); if (!pre_uploaded_dimm.exists()) pre_uploaded_dimm.mkdir(); } upload_count++; } File f = files.removeLast(); pre_uploaded = false; upload_made = false; //logger.debug("test file: '"+f.getAbsolutePath()+"'"); if (f.isDirectory()) { id_pages_cur = null; page_name = null; //search for DESCRIPTION_FILE description_file = new File(f, DESCRIPTION_FILE); if (description_file.exists()) { id_pages_cur = null; try { BufferedReader reader = new BufferedReader( new InputStreamReader(new FileInputStream(description_file), "UTF-8")); String line; while ((line = reader.readLine()) != null) { if (line.startsWith("id=")) { id_pages_cur = Long.parseLong(line.substring(3), 10); } else if (line.startsWith("name=")) { page_name = line.substring(5); } else if (line.startsWith("pre_uploaded=true")) { //means that this folder contains subfolders with pre uploaded images //i.e. wallpapers are allready resized and are stored in an appropriate folders //but they still must be checked pre_uploaded = true; } } } catch (IOException ex) { logger.error("", ex); } } File[] files_temp = f.listFiles(); for (File tmp : files_temp) { if (tmp.isFile()) { if (!tmp.getName().equals(DESCRIPTION_FILE) && id_pages_cur != null) { usb.setCur_name(tmp.getAbsolutePath()); logger.debug("preparing upload file: '" + tmp.getAbsolutePath() + "'"); if (Utils.saveScaledImageFileToDisk(tmp, wallpaper_service.getDimmensions(), cur_dir)) { tmp.delete(); usb.increaseDone(1); upload_made = true; } } //else error } else if (!pre_uploaded) { files.addLast(tmp); } } //create a description file if (upload_made) { createDescriptionFile(cur_dir, id_pages_cur, page_name, true); cur_dir = null; } } } if (cur_dir != null) { description_file = new File(cur_dir, DESCRIPTION_FILE); if (!description_file.exists()) FileUtils.deleteFiles(cur_dir, true); } } }
From source file:org.nuxeo.elasticsearch.query.NxqlQueryConverter.java
public static QueryBuilder toESQueryBuilder(final String nxql, final CoreSession session) { final LinkedList<ExpressionBuilder> builders = new LinkedList<>(); SQLQuery nxqlQuery = getSqlQuery(nxql); if (session != null) { nxqlQuery = addSecurityPolicy(session, nxqlQuery); }/*from w ww . j av a 2 s .c o m*/ final ExpressionBuilder ret = new ExpressionBuilder(null); builders.add(ret); final ArrayList<String> fromList = new ArrayList<>(); nxqlQuery.accept(new DefaultQueryVisitor() { private static final long serialVersionUID = 1L; @Override public void visitFromClause(FromClause node) { FromList elements = node.elements; SchemaManager schemaManager = Framework.getLocalService(SchemaManager.class); for (int i = 0; i < elements.size(); i++) { String type = elements.get(i); if (NXQLQueryMaker.TYPE_DOCUMENT.equalsIgnoreCase(type)) { // From Document means all doc types fromList.clear(); return; } Set<String> types = schemaManager.getDocumentTypeNamesExtending(type); if (types != null) { fromList.addAll(types); } } } @Override public void visitMultiExpression(MultiExpression node) { for (Iterator<Operand> it = node.values.iterator(); it.hasNext();) { it.next().accept(this); if (it.hasNext()) { node.operator.accept(this); } } } @Override public void visitSelectClause(SelectClause node) { // NOP } @Override public void visitExpression(Expression node) { Operator op = node.operator; if (op == Operator.AND || op == Operator.OR || op == Operator.NOT) { builders.add(new ExpressionBuilder(op.toString())); super.visitExpression(node); ExpressionBuilder expr = builders.removeLast(); if (!builders.isEmpty()) { builders.getLast().merge(expr); } } else { Reference ref = node.lvalue instanceof Reference ? (Reference) node.lvalue : null; String name = ref != null ? ref.name : node.lvalue.toString(); String value = null; if (node.rvalue instanceof Literal) { value = ((Literal) node.rvalue).asString(); } else if (node.rvalue != null) { value = node.rvalue.toString(); } Object[] values = null; if (node.rvalue instanceof LiteralList) { LiteralList items = (LiteralList) node.rvalue; values = new Object[items.size()]; int i = 0; for (Literal item : items) { values[i++] = item.asString(); } } // add expression to the last builder EsHint hint = (ref != null) ? ref.esHint : null; builders.getLast() .add(makeQueryFromSimpleExpression(op.toString(), name, value, values, hint, session)); } } }); QueryBuilder queryBuilder = ret.get(); if (!fromList.isEmpty()) { return QueryBuilders.boolQuery().must(queryBuilder).filter(makeQueryFromSimpleExpression("IN", NXQL.ECM_PRIMARYTYPE, null, fromList.toArray(), null, null).filter); } return queryBuilder; }
From source file:org.nuxeo.ecm.core.storage.dbs.DBSSession.java
protected String copyRecurse(String sourceId, String parentId, LinkedList<String> ancestorIds, String name) { String copyId = copy(sourceId, parentId, ancestorIds, name); ancestorIds.addLast(copyId);/*from w ww .ja v a 2 s . co m*/ for (String childId : getChildrenIds(sourceId)) { copyRecurse(childId, copyId, ancestorIds, null); } ancestorIds.removeLast(); return copyId; }
From source file:gsn.http.datarequest.DownloadData.java
@Override public void outputResult(OutputStream os) { PrintWriter respond = new PrintWriter(os); Iterator<Entry<String, AbstractQuery>> iter = qbuilder.getSqlQueries().entrySet().iterator(); Entry<String, AbstractQuery> nextSqlQuery; DataEnumerator de = null;/*from www.j av a 2 s . c om*/ try { if (ot == AllowedOutputType.xml) { respond.println("<result>"); } while (iter.hasNext()) { nextSqlQuery = iter.next(); Connection connection = null; connection = Main.getStorage(nextSqlQuery.getKey()).getConnection(); de = Main.getStorage(nextSqlQuery.getKey()).streamedExecuteQuery(nextSqlQuery.getValue(), true, connection); //get units in hash map HashMap<String, String> fieldToUnitMap = new HashMap<String, String>(); VSensorConfig sensorConfig = Mappings.getVSensorConfig(nextSqlQuery.getKey()); DataField[] dataFieldArray = sensorConfig.getOutputStructure(); for (DataField df : dataFieldArray) { String unit = df.getUnit(); if (unit == null || unit.trim().length() == 0) unit = ""; fieldToUnitMap.put(df.getName().toLowerCase(), unit); } logger.debug("Data Enumerator: " + de); if (ot == AllowedOutputType.csv) { respond.println("# vsname:" + nextSqlQuery.getKey()); respond.println("# query:" + nextSqlQuery.getValue().getStandardQuery() + (nextSqlQuery.getValue().getLimitCriterion() == null ? "" : "(" + nextSqlQuery.getValue().getLimitCriterion() + ")")); for (KeyValue df : sensorConfig.getAddressing()) { respond.println( "# " + df.getKey().toString().toLowerCase() + ":" + df.getValue().toString()); } respond.println("# description:" + sensorConfig.getDescription()); } else if (ot == AllowedOutputType.xml) { respond.println("\t<!-- " + nextSqlQuery.getValue().getStandardQuery() + " -->"); for (KeyValue df : sensorConfig.getAddressing()) { respond.println( "\t<!-- " + StringEscapeUtils.escapeXml(df.getKey().toString().toLowerCase()) + ":" + StringEscapeUtils.escapeXml(df.getValue().toString()) + " -->"); } respond.println("\t<!-- description:" + StringEscapeUtils.escapeXml(sensorConfig.getDescription()) + " -->"); respond.println("\t<data vsname=\"" + nextSqlQuery.getKey() + "\">"); } FieldsCollection fc = qbuilder.getVsnamesAndStreams().get(nextSqlQuery.getKey()); boolean wantTimed = true; boolean firstLine = true; LinkedList<StreamElement> streamElements = new LinkedList<StreamElement>(); while (de.hasMoreElements()) { streamElements.add(de.nextElement()); } double valsPerVS = MAX_SAMPLE_VALUES / numberOfFieldsInRequest(); if (requestParameters.containsKey("sample") && "true".equalsIgnoreCase(requestParameters.get("sample")[0]) && streamElements.size() > valsPerVS) { //sampling int numOfVals = streamElements.size(); int left = (int) valsPerVS; int valsForAvg = (int) Math.ceil(numOfVals / valsPerVS); if (requestParameters.containsKey("sampling_percentage")) { try { String percentageString = requestParameters.get("sampling_percentage")[0]; int percentage = Integer.parseInt(percentageString); if (percentage > 0 && percentage <= 100 && numOfVals * percentage > 100) { left = numOfVals * percentage / 100; valsForAvg = (int) Math.ceil(numOfVals / left); } } catch (Exception e) { } } while (!streamElements.isEmpty()) { StreamElement se = null; if (numOfVals > left) { StreamElement[] seForSampling = new StreamElement[valsForAvg]; for (int i = 0; i < valsForAvg; i++) { seForSampling[i] = streamElements.removeLast(); } numOfVals -= valsForAvg; left--; se = sampleSkip(seForSampling); } else { se = streamElements.removeLast(); } if (ot == AllowedOutputType.csv) { formatCSVElement(respond, se, wantTimed, csvDelimiter, firstLine, fieldToUnitMap); } else if (ot == AllowedOutputType.xml) { formatXMLElement(respond, se, wantTimed, firstLine, fieldToUnitMap); } firstLine = false; } } else { while (!streamElements.isEmpty()) { if (ot == AllowedOutputType.csv) { formatCSVElement(respond, streamElements.removeLast(), wantTimed, csvDelimiter, firstLine, fieldToUnitMap); } else if (ot == AllowedOutputType.xml) { formatXMLElement(respond, streamElements.removeLast(), wantTimed, firstLine, fieldToUnitMap); } firstLine = false; } } if (ot == AllowedOutputType.xml) respond.println("\t</data>"); } if (ot == AllowedOutputType.xml) { respond.println("</result>"); } } catch (SQLException e) { logger.debug(e.getMessage()); } finally { respond.flush(); if (de != null) de.close(); } }
From source file:com.hipu.bdb.util.FileUtils.java
/** * Retrieve a number of lines from the file around the given * position, as when paging forward or backward through a file. * /*from www. j a v a 2 s.com*/ * @param file File to retrieve lines * @param position offset to anchor lines * @param signedDesiredLineCount lines requested; if negative, * want this number of lines ending with a line containing * the position; if positive, want this number of lines, * all starting at or after position. * @param lines List<String> to insert found lines * @param lineEstimate int estimate of line size, 0 means use default * of 128 * @return LongRange indicating the file offsets corresponding to * the beginning of the first line returned, and the point * after the end of the last line returned * @throws IOException */ @SuppressWarnings("unchecked") public static LongRange pagedLines(File file, long position, int signedDesiredLineCount, List<String> lines, int lineEstimate) throws IOException { // consider negative positions as from end of file; -1 = last byte if (position < 0) { position = file.length() + position; } // calculate a reasonably sized chunk likely to have all desired lines if (lineEstimate == 0) { lineEstimate = 128; } int desiredLineCount = Math.abs(signedDesiredLineCount); long startPosition; long fileEnd = file.length(); int bufferSize = (desiredLineCount + 5) * lineEstimate; if (signedDesiredLineCount > 0) { // reading forward; include previous char in case line-end startPosition = position - 1; } else { // reading backward startPosition = position - bufferSize + (2 * lineEstimate); } if (startPosition < 0) { startPosition = 0; } if (startPosition + bufferSize > fileEnd) { bufferSize = (int) (fileEnd - startPosition); } // read that reasonable chunk FileInputStream fis = new FileInputStream(file); fis.getChannel().position(startPosition); byte[] buf = new byte[bufferSize]; IOUtils.closeQuietly(fis); // find all line starts fully in buffer // (positions after a line-end, per line-end definition in // BufferedReader.readLine) LinkedList<Integer> lineStarts = new LinkedList<Integer>(); if (startPosition == 0) { lineStarts.add(0); } boolean atLineEnd = false; boolean eatLF = false; int i; for (i = 0; i < bufferSize; i++) { if ((char) buf[i] == '\n' && eatLF) { eatLF = false; continue; } if (atLineEnd) { atLineEnd = false; lineStarts.add(i); if (signedDesiredLineCount < 0 && startPosition + i > position) { // reached next line past position, read no more break; } } if ((char) buf[i] == '\r') { atLineEnd = true; eatLF = true; continue; } if ((char) buf[i] == '\n') { atLineEnd = true; } } if (startPosition + i == fileEnd) { // add phantom lineStart after end lineStarts.add(bufferSize); } int foundFullLines = lineStarts.size() - 1; // if found no lines if (foundFullLines < 1) { if (signedDesiredLineCount > 0) { if (startPosition + bufferSize == fileEnd) { // nothing more to read: return nothing return new LongRange(fileEnd, fileEnd); } else { // retry with larger lineEstimate return pagedLines(file, position, signedDesiredLineCount, lines, Math.max(bufferSize, lineEstimate)); } } else { // try again with much larger line estimate // TODO: fail gracefully before growing to multi-MB buffers return pagedLines(file, position, signedDesiredLineCount, lines, bufferSize); } } // trim unneeded lines while (signedDesiredLineCount > 0 && startPosition + lineStarts.getFirst() < position) { // discard lines starting before desired position lineStarts.removeFirst(); } while (lineStarts.size() > desiredLineCount + 1) { if (signedDesiredLineCount < 0 && (startPosition + lineStarts.get(1) <= position)) { // discard from front until reach line containing target position lineStarts.removeFirst(); } else { lineStarts.removeLast(); } } int firstLine = lineStarts.getFirst(); int partialLine = lineStarts.getLast(); LongRange range = new LongRange(startPosition + firstLine, startPosition + partialLine); List<String> foundLines = IOUtils .readLines(new ByteArrayInputStream(buf, firstLine, partialLine - firstLine)); if (foundFullLines < desiredLineCount && signedDesiredLineCount < 0 && startPosition > 0) { // if needed and reading backward, read more lines from earlier range = expandRange(range, pagedLines(file, range.getMinimumLong() - 1, signedDesiredLineCount + foundFullLines, lines, bufferSize / foundFullLines)); } lines.addAll(foundLines); if (signedDesiredLineCount < 0 && range.getMaximumLong() < position) { // did not get line containining start position range = expandRange(range, pagedLines(file, partialLine, 1, lines, bufferSize / foundFullLines)); } if (signedDesiredLineCount > 0 && foundFullLines < desiredLineCount && range.getMaximumLong() < fileEnd) { // need more forward lines range = expandRange(range, pagedLines(file, range.getMaximumLong(), desiredLineCount - foundFullLines, lines, bufferSize / foundFullLines)); } return range; }
From source file:org.archive.util.FileUtils.java
/** * Retrieve a number of lines from the file around the given * position, as when paging forward or backward through a file. * /*ww w.j a v a 2 s . co m*/ * @param file File to retrieve lines * @param position offset to anchor lines * @param signedDesiredLineCount lines requested; if negative, * want this number of lines ending with a line containing * the position; if positive, want this number of lines, * all starting at or after position. * @param lines List<String> to insert found lines * @param lineEstimate int estimate of line size, 0 means use default * of 128 * @return LongRange indicating the file offsets corresponding to * the beginning of the first line returned, and the point * after the end of the last line returned * @throws IOException */ @SuppressWarnings("unchecked") public static LongRange pagedLines(File file, long position, int signedDesiredLineCount, List<String> lines, int lineEstimate) throws IOException { // consider negative positions as from end of file; -1 = last byte if (position < 0) { position = file.length() + position; } // calculate a reasonably sized chunk likely to have all desired lines if (lineEstimate == 0) { lineEstimate = 128; } int desiredLineCount = Math.abs(signedDesiredLineCount); long startPosition; long fileEnd = file.length(); int bufferSize = (desiredLineCount + 5) * lineEstimate; if (signedDesiredLineCount > 0) { // reading forward; include previous char in case line-end startPosition = position - 1; } else { // reading backward startPosition = position - bufferSize + (2 * lineEstimate); } if (startPosition < 0) { startPosition = 0; } if (startPosition + bufferSize > fileEnd) { bufferSize = (int) (fileEnd - startPosition); } // read that reasonable chunk FileInputStream fis = new FileInputStream(file); fis.getChannel().position(startPosition); byte[] buf = new byte[bufferSize]; ArchiveUtils.readFully(fis, buf); IOUtils.closeQuietly(fis); // find all line starts fully in buffer // (positions after a line-end, per line-end definition in // BufferedReader.readLine) LinkedList<Integer> lineStarts = new LinkedList<Integer>(); if (startPosition == 0) { lineStarts.add(0); } boolean atLineEnd = false; boolean eatLF = false; int i; for (i = 0; i < bufferSize; i++) { if ((char) buf[i] == '\n' && eatLF) { eatLF = false; continue; } if (atLineEnd) { atLineEnd = false; lineStarts.add(i); if (signedDesiredLineCount < 0 && startPosition + i > position) { // reached next line past position, read no more break; } } if ((char) buf[i] == '\r') { atLineEnd = true; eatLF = true; continue; } if ((char) buf[i] == '\n') { atLineEnd = true; } } if (startPosition + i == fileEnd) { // add phantom lineStart after end lineStarts.add(bufferSize); } int foundFullLines = lineStarts.size() - 1; // if found no lines if (foundFullLines < 1) { if (signedDesiredLineCount > 0) { if (startPosition + bufferSize == fileEnd) { // nothing more to read: return nothing return new LongRange(fileEnd, fileEnd); } else { // retry with larger lineEstimate return pagedLines(file, position, signedDesiredLineCount, lines, Math.max(bufferSize, lineEstimate)); } } else { // try again with much larger line estimate // TODO: fail gracefully before growing to multi-MB buffers return pagedLines(file, position, signedDesiredLineCount, lines, bufferSize); } } // trim unneeded lines while (signedDesiredLineCount > 0 && startPosition + lineStarts.getFirst() < position) { // discard lines starting before desired position lineStarts.removeFirst(); } while (lineStarts.size() > desiredLineCount + 1) { if (signedDesiredLineCount < 0 && (startPosition + lineStarts.get(1) <= position)) { // discard from front until reach line containing target position lineStarts.removeFirst(); } else { lineStarts.removeLast(); } } int firstLine = lineStarts.getFirst(); int partialLine = lineStarts.getLast(); LongRange range = new LongRange(startPosition + firstLine, startPosition + partialLine); List<String> foundLines = IOUtils .readLines(new ByteArrayInputStream(buf, firstLine, partialLine - firstLine)); if (foundFullLines < desiredLineCount && signedDesiredLineCount < 0 && startPosition > 0) { // if needed and reading backward, read more lines from earlier range = expandRange(range, pagedLines(file, range.getMinimumLong() - 1, signedDesiredLineCount + foundFullLines, lines, bufferSize / foundFullLines)); } lines.addAll(foundLines); if (signedDesiredLineCount < 0 && range.getMaximumLong() < position) { // did not get line containining start position range = expandRange(range, pagedLines(file, partialLine, 1, lines, bufferSize / foundFullLines)); } if (signedDesiredLineCount > 0 && foundFullLines < desiredLineCount && range.getMaximumLong() < fileEnd) { // need more forward lines range = expandRange(range, pagedLines(file, range.getMaximumLong(), desiredLineCount - foundFullLines, lines, bufferSize / foundFullLines)); } return range; }
From source file:de.tudarmstadt.ukp.wikipedia.parser.mediawiki.ModularParser.java
private void getLineSpans(SpanManager sm, LinkedList<Span> lineSpans) { sm.manageList(lineSpans);//from w ww .j ava 2 s .com int start = 0; int end; while ((end = sm.indexOf(lineSeparator, start)) != -1) { lineSpans.add(new Span(start, end).trimTrail(sm)); start = end + lineSeparator.length(); } lineSpans.add(new Span(start, sm.length()).trimTrail(sm)); while (!lineSpans.isEmpty() && lineSpans.getFirst().length() == 0) { lineSpans.removeFirst(); } while (!lineSpans.isEmpty() && lineSpans.getLast().length() == 0) { lineSpans.removeLast(); } }
From source file:org.artifactory.repo.service.RepositoryServiceImpl.java
private ItemInfo collectLastModified(RepoPath pathToSearch) { TreeBrowsingCriteria criteria = new TreeBrowsingCriteriaBuilder().applySecurity().build(); ItemTree itemTree = new ItemTree(pathToSearch, criteria); LinkedList<ItemNode> fringe = Lists.newLinkedList(); fringe.add(itemTree.getRootNode());/*from ww w . j a v a 2 s. c o m*/ ItemInfo lastModified = null; while (!fringe.isEmpty()) { ItemNode last = fringe.removeLast(); if (last.hasChildren()) { fringe.addAll(last.getChildren()); } if (!last.isFolder()) { if (lastModified == null || last.getItemInfo().getLastModified() > lastModified.getLastModified()) { lastModified = last.getItemInfo(); } } } return lastModified; }
From source file:com.rapleaf.hank.storage.incremental.IncrementalPartitionUpdater.java
/** * Return the list of versions needed to update to the specific version given that * the specified current version and cached bases are available. * * @param currentVersion/*from ww w.jav a 2s. com*/ * @param cachedBases * @param updatingToVersion * @return * @throws java.io.IOException */ protected IncrementalUpdatePlan computeUpdatePlan(DomainVersion currentVersion, Set<DomainVersion> cachedBases, DomainVersion updatingToVersion) throws IOException { LinkedList<DomainVersion> updatePlanVersions = new LinkedList<DomainVersion>(); // Backtrack versions (ignoring defunct versions) until we find: // - a base (no parent) // - or the current version (which is by definition a base or a rebased delta) // - or a version that is a base and that is cached DomainVersion parentVersion = updatingToVersion; while (parentVersion != null) { // Ignore completely defunct versions if (!parentVersion.isDefunct()) { // If a version along the path is still open, abort if (!DomainVersions.isClosed(parentVersion)) { throw new IOException("Detected a domain version that is still open" + " along the path from current version to version to update to: " + " domain: " + domain + " open version: " + parentVersion + " current version: " + currentVersion + " updating to version: " + updatingToVersion); } // If backtrack to current version, use it and stop backtracking if (currentVersion != null && parentVersion.equals(currentVersion)) { // If we only need the current version, we don't need any plan if (updatePlanVersions.isEmpty()) { return null; } else { updatePlanVersions.add(parentVersion); break; } } // If backtrack to cached base version, use it and stop backtracking if (cachedBases.contains(parentVersion)) { updatePlanVersions.add(parentVersion); break; } // Add backtracked version to versions needed updatePlanVersions.add(parentVersion); } // Move to parent version parentVersion = getParentDomainVersion(parentVersion); } if (updatePlanVersions.isEmpty()) { return null; } // The base is the last version that was added (a base, the current version or a cached base) DomainVersion base = updatePlanVersions.removeLast(); // Reverse list of deltas as we have added versions going backwards Collections.reverse(updatePlanVersions); return new IncrementalUpdatePlan(base, updatePlanVersions); }
From source file:com.redhat.persistence.oql.QFrame.java
private void render(LinkedList joins, List where, QFrame oroot, QFrame root, Set emitted) { // If the first non empty frame is outer we treat it as inner. if (m_outer && !joins.isEmpty()) { oroot = this; }/* w w w .jav a2 s . co m*/ Code table = null; if (m_table != null && m_duplicate == null) { table = new Code(m_table).add(" ").add(alias()); } else if (m_tableExpr != null && m_duplicate == null) { table = m_tableExpr.emit(m_generator).add(" ").add(alias()); } if (table != null) { joins.addFirst(JFrame.leaf(table, this, oroot)); } List children = getChildren(); for (int i = 0; i < children.size(); i++) { QFrame child = (QFrame) children.get(i); child.render(joins, where, oroot, root, emitted); } if (m_condition != null) { Code c = m_condition.emit(m_generator); if (!c.isTrue() && !emitted.contains(c)) { m_used.clear(); frames(m_condition, m_used); boolean join = false; for (Iterator it = joins.iterator(); it.hasNext();) { JFrame frame = (JFrame) it.next(); boolean modified = m_used.removeAll(frame.defined); if (m_used.isEmpty()) { // We default to putting things in the where // clause here because oracle won't resolve // external variable references correctly when // they appear in join conditions. if (oroot.equals(root)) { where.add(c); } else if (frame.froot != null && oroot.equals(frame.froot)) { frame.join = frame.join.add(" and ").add(c); } else { throw new IllegalStateException( "unable to place condition: " + m_condition + " " + c + trace(joins)); } } else if (modified) { join = true; break; } } if (join) { JFrame right = (JFrame) joins.removeFirst(); if (joins.isEmpty()) { throw new IllegalStateException( "unresolved variable in condition: " + m_condition + " " + c + trace(joins)); } LinkedList skipped = null; JFrame left = (JFrame) joins.removeFirst(); while (true) { m_used.clear(); frames(m_condition, m_used); m_used.removeAll(right.defined); boolean cross = m_used.removeAll(left.defined); if (m_used.isEmpty()) { joins.addFirst(JFrame.join(left, right, c)); break; } else if (joins.isEmpty()) { throw new IllegalStateException( "unresolved variable in condition: " + m_condition + " " + c + trace(joins)); } else if (cross) { JFrame lefter = (JFrame) joins.removeFirst(); left = JFrame.cross(lefter, left); } else { if (skipped == null) { skipped = new LinkedList(); } skipped.addLast(left); left = (JFrame) joins.removeFirst(); } } if (skipped != null) { while (!skipped.isEmpty()) { joins.addFirst(skipped.removeLast()); } } } emitted.add(c); } } }