List of usage examples for java.util List listIterator
ListIterator<E> listIterator();
From source file:com.oneops.ops.dao.PerfDataAccessor.java
public void purgeMetrics(long time, String bucket) { int rowCount = 0; int totalColCount = 0; int totalColDeleted = 0; Long start = 1L;//from w ww. j a v a 2 s .c o m Long end = time; // safeguard not to delete anything in past week long now = System.currentTimeMillis() / 1000; logger.info(" now: " + now); logger.info("startEpoch: " + time); if (time + (60 * 60 * 24 * 7) > now) { logger.error("input time too soon - cannot be within past week"); return; } int maxColumns = (int) (end - start); int pageSize = 1000; String lastKey = null; Mutator<byte[]> mutator = HFactory.createMutator(keyspace, bytesSerializer); RangeSlicesQuery<String, Long, Double> query = HFactory .createRangeSlicesQuery(keyspace, stringSerializer, longSerializer, doubleSerializer) .setColumnFamily(DATA_CF).setReturnKeysOnly().setRowCount(pageSize); while (true) { query.setKeys(lastKey, null); QueryResult<OrderedRows<String, Long, Double>> result = query.execute(); OrderedRows<String, Long, Double> orderedRows = result.get(); Iterator<Row<String, Long, Double>> rowsIterator = orderedRows.iterator(); // we'll skip this first one, since it is the same as the last one // from previous time we executed if (lastKey != null && rowsIterator != null) rowsIterator.next(); while (rowsIterator.hasNext()) { Row<String, Long, Double> row = rowsIterator.next(); if (!row.getKey().endsWith("-" + bucket)) { continue; } rowCount++; lastKey = row.getKey(); List<byte[]> keys = new ArrayList<byte[]>(); keys.add(row.getKey().getBytes()); MultigetSliceQuery<byte[], Long, Double> multigetSliceQuery = HFactory .createMultigetSliceQuery(keyspace, bytesSerializer, longSerializer, doubleSerializer) .setColumnFamily(DATA_CF).setKeys(keys).setRange(start, end, false, maxColumns); QueryResult<Rows<byte[], Long, Double>> colResult = multigetSliceQuery.execute(); Rows<byte[], Long, Double> rows = colResult.get(); int sampleCount = 0; int deletedCount = 0; for (Row<byte[], Long, Double> rowResult : rows) { List<HColumn<Long, Double>> cols = rowResult.getColumnSlice().getColumns(); Iterator<HColumn<Long, Double>> listIter = cols.listIterator(); while (listIter.hasNext()) { HColumn<Long, Double> c = (HColumn<Long, Double>) listIter.next(); if (c.getName() < time) { mutator.addDeletion(row.getKey().getBytes(), DATA_CF, c.getName(), longSerializer); deletedCount++; } sampleCount++; } totalColDeleted += deletedCount; totalColCount += sampleCount; mutator.execute(); } logger.info(row.getKey() + ": " + sampleCount + " deleted: " + deletedCount); if (rows.getCount() < pageSize) break; } logger.info("rows: " + rowCount + " cols: " + totalColCount + " deleted: " + totalColDeleted); if (orderedRows.getCount() < pageSize) break; } }
From source file:com.redhat.rhn.frontend.action.errata.ErrataSearchAction.java
protected List performSearch(HttpServletRequest request, Long sessionId, String searchString, String mode, DynaActionForm formIn) throws XmlRpcFault, MalformedURLException { LOG.debug("Performing errata search"); RequestContext ctx = new RequestContext(request); Org org = ctx.getCurrentUser().getOrg(); // call search server XmlRpcClient client = new XmlRpcClient(ConfigDefaults.get().getSearchServerUrl(), true); String path = null;// w w w.j a v a 2 s.c o m List args = new ArrayList(); args.add(sessionId); // do a package search instead of an errata one. This uses // a different lucene index to find pkgs then reconciles // them with the errata later. if (OPT_PKG_NAME.equals(mode)) { args.add("package"); } else { args.add("errata"); } List results = new ArrayList(); // // Note: This is how "issue date" search works. // It functions in one of 2 ways, depending on the state of "searchString" // 1) It's a database lookup for all errata issued between the given range // - OR - // 2) It's a filter performed AFTER the regular search. // // The database lookup happens when no searchstring was specified, // i.e. searchString is blank. This signifies to do a full lookup to the // database....through the search-server as "db.search". // // The second responsibility is to filter results from a returned search. // This will happen when searchString is not empty AND issue date search // has been activated. Search will proceed as normal, then the final step // will be to filter the results by issue date. // Boolean dateSearch = getOptionIssueDateSearch(request); LOG.debug("Datesearch is " + dateSearch); Date startDate = getPickerDate(request, "start"); Date endDate = getPickerDate(request, "end"); if (dateSearch && StringUtils.isBlank(searchString)) { // this is a full issue date search, not just a filter args.add("listErrataByIssueDateRange:(" + getDateString(startDate) + ", " + getDateString(endDate) + ")"); } else { args.add(preprocessSearchString(searchString, mode)); } if ((dateSearch && StringUtils.isBlank(searchString)) || OPT_CVE.equals(mode)) { // Tells search server to search the database path = "db.search"; } else { Boolean fineGrained = (Boolean) formIn.get(FINE_GRAINED); args.add(fineGrained); // Tells search server to use the lucene index path = "index.search"; } if (LOG.isDebugEnabled()) { LOG.debug("Calling to search server (XMLRPC): \"index.search\", args=" + args); } results = (List) client.invoke(path, args); if (LOG.isDebugEnabled()) { LOG.debug("results = [" + results + "]"); } if (results.isEmpty()) { return Collections.emptyList(); } // need to make the search server results usable by database // so we can get the actual results we are to display to the user. // also save the items into a Map for lookup later. List<Long> ids = new ArrayList<Long>(); Map<Long, Integer> lookupmap = new HashMap<Long, Integer>(); // do it in reverse because the search server can return more than one // record for a given package name, but that means if we don't go // in reverse we risk getting the wrong rank in the lookupmap. // for example, [{id:125,name:gtk},{id:127,name:gtk}{id:200,name:kernel}] // if we go forward we end up with gtk:1 and kernel:2 but we wanted // kernel:2, gtk:0. for (int x = results.size() - 1; x >= 0; x--) { Map item = (Map) results.get(x); lookupmap.put(new Long((String) item.get("id")), x); Long id = new Long((String) item.get("id")); ids.add(id); } // The database does not maintain the order of the where clause. // In order to maintain the ranking from the search server, we // need to reorder the database results to match. This will lead // to a better user experience. List<ErrataOverview> unsorted = new ArrayList<ErrataOverview>(); if (OPT_PKG_NAME.equals(mode)) { unsorted = ErrataManager.searchByPackageIdsWithOrg(ids, ctx.getCurrentUser().getOrg()); } else { unsorted = fleshOutErrataOverview(ids, org); } if (OPT_CVE.equals(mode)) { // Flesh out all CVEs for each errata returned..generally this is a // small number of Errata to operate on. for (ErrataOverview eo : unsorted) { DataResult dr = ErrataManager.errataCVEs(eo.getId()); eo.setCves(dr); } } List<ErrataOverview> filtered = new ArrayList<ErrataOverview>(); // Filter based on errata type selected List<ErrataOverview> filteredByType = new ArrayList<ErrataOverview>(); filteredByType = filterByAdvisoryType(unsorted, formIn); List<ErrataOverview> filteredByIssueDate = new ArrayList<ErrataOverview>(); if (dateSearch && !StringUtils.isBlank(searchString)) { // search string is not blank, therefore a search was run so filter the results LOG.debug( "Performing filter on issue date, we only want records between " + startDate + " - " + endDate); filteredByIssueDate = filterByIssueDate(filteredByType, startDate, endDate); filtered.addAll(filteredByIssueDate); } else { // skip issue date filter filtered.addAll(filteredByType); } if (LOG.isDebugEnabled()) { LOG.debug(filtered.size() + " records have passed being filtered " + "and will be displayed."); } // TODO: need to figure out a way to properly sort the // errata from a package search. What we get back from the // search server is pid, pkg-name in relevant order. // What we get back from searchByPackageIds, is an unsorted // list of ErrataOverviews where each one contains more than one // package-name, but no package ids. if (OPT_PKG_NAME.equals(mode)) { return filtered; } // Using a lookup map created from the results returned by search server. // The issue is that the search server returns us a list in a order which is // relevant to score the object received from the search. // When we "flesh" out the ErrataOverview by calling into the database we // lose this order, that's what we are trying to reclaim, this way when then // results are returned to the webpage they will be in a meaningfull order. List<ErrataOverview> ordered = new LinkedList<ErrataOverview>(); for (ErrataOverview eo : filtered) { if (LOG.isDebugEnabled()) { LOG.debug("Processing eo: " + eo.getAdvisory() + " id: " + eo.getId()); } int idx = lookupmap.get(eo.getId()); if (ordered.isEmpty()) { ordered.add(eo); continue; } boolean added = false; for (ListIterator itr = ordered.listIterator(); itr.hasNext();) { ErrataOverview curpo = (ErrataOverview) itr.next(); int curidx = lookupmap.get(curpo.getId()); if (idx <= curidx) { itr.previous(); itr.add(eo); added = true; break; } } if (!added) { ordered.add(eo); } } return ordered; }
From source file:playground.christoph.evacuation.analysis.EvacuationTimePictureWriter.java
private void histogramToKMZ(String transportMode, BasicLocation location, List<Double> listWithoutNaN) throws IOException { String filename = createFilenameFromLocation(location, "_" + transportMode + HISTOGRAM); if (filename == null) return;/* w ww. java 2 s.c o m*/ double[] array = new double[listWithoutNaN.size()]; int i = 0; ListIterator<Double> iter = listWithoutNaN.listIterator(); while (iter.hasNext()) { array[i] = iter.next(); i++; } // if no valid travel times exist -> create an empty histogram if (array.length == 0) { array = new double[1]; array[0] = 0.0; } writeChartToKmz(filename, createHistogramChart(transportMode, array), HISTOGRAMWIDTH, HISTOGRAMHEIGHT); }
From source file:com.vizury.videocache.product.ProductDetail.java
private ProductDetail[] getProductDataFromList(CacheConnect cache, String productList, HashMap<String, ProductDetail> recommendedProductDetail, int numberOfRecommendedProducts) { String[] productIdArray = productList.replace("\"", "").split(","); List<ProductDetail> productDetailList = new ArrayList<>(); List<ProductDetail> requestProductDetailList = new ArrayList<>(); for (String pid : productIdArray) { if (!pid.equals(productId)) { if (!recommendedProductDetail.containsKey(namespace + "_1_" + pid)) { requestProductDetailList.add(new ProductDetail(pid, namespace)); }//from w ww. ja va 2 s . com productDetailList.add(new ProductDetail(pid, namespace)); } } Map<String, Object> productDetailMap = cache.getBulk(requestProductDetailList, "_1_"); if (productDetailMap != null) { ListIterator iterator = productDetailList.listIterator(); while (iterator.hasNext()) { ProductDetail productDetail = (ProductDetail) iterator.next(); if (productDetailMap.containsKey(namespace + "_1_" + productDetail.getProductId())) { productDetail.jsonToProductDetail( (String) productDetailMap.get(namespace + "_1_" + productDetail.getProductId())); recommendedProductDetail.put(namespace + "_1_" + productDetail.getProductId(), productDetail); } else { iterator.set(recommendedProductDetail.get(namespace + "_1_" + productDetail.getProductId())); } } } else { return null; } if (productDetailList.size() <= numberOfRecommendedProducts) { return productDetailList.toArray(new ProductDetail[productDetailList.size()]); } else { Random rand = new Random(); int randomIndex; int index; ProductDetail[] productDetail = new ProductDetail[numberOfRecommendedProducts]; for (index = 0; index < numberOfRecommendedProducts; index++) { randomIndex = rand.nextInt(productDetailList.size()); productDetail[index] = productDetailList.get(randomIndex); productDetailList.remove(randomIndex); } return productDetail; } }
From source file:org.apache.fop.layoutmgr.inline.FootnoteLayoutManager.java
/** {@inheritDoc} */ @Override//from w ww . j av a 2 s .c o m public List getNextKnuthElements(LayoutContext context, int alignment) { // for the moment, this LM is set as the citationLM's parent // later on, when this LM will have nothing more to do, the citationLM's parent // will be set to the fotnoteLM's parent citationLM.setParent(this); citationLM.initialize(); bodyLM.setParent(this); bodyLM.initialize(); // get Knuth elements representing the footnote citation List returnedList = new LinkedList(); while (!citationLM.isFinished()) { List partialList = citationLM.getNextKnuthElements(context, alignment); if (partialList != null) { returnedList.addAll(partialList); } } if (returnedList.size() == 0) { //Inline part of the footnote is empty. Need to send back an auxiliary //zero-width, zero-height inline box so the footnote gets painted. KnuthSequence seq = new InlineKnuthSequence(); //Need to use an aux. box, otherwise, the line height can't be forced to zero height. forcedAnchor = new KnuthInlineBox(0, null, null, true); seq.add(forcedAnchor); returnedList.add(seq); } setFinished(true); addAnchor(returnedList); // "wrap" the Position stored in each list inside returnedList ListIterator listIterator = returnedList.listIterator(); ListIterator elementIterator = null; KnuthSequence list = null; ListElement element = null; while (listIterator.hasNext()) { list = (KnuthSequence) listIterator.next(); elementIterator = list.listIterator(); while (elementIterator.hasNext()) { element = (KnuthElement) elementIterator.next(); element.setPosition(notifyPos(new NonLeafPosition(this, element.getPosition()))); } } return returnedList; }
From source file:com.oneops.ops.dao.PerfDataAccessor.java
/** * Gets the perf data series. Contains tmp code for conversion of data cf to sharded data cfs * * @param req the request object/*from w w w . ja v a 2 s. c o m*/ * @return the perf data series */ public String getPerfDataSeries(PerfDataRequest req) { Long start = Long.valueOf(req.getStart()); Long end = Long.valueOf(req.getEnd()); int maxColumns = (int) (end - start); StringBuilder jsonOut = new StringBuilder(""); try { long startTime = System.currentTimeMillis(); String stat = "rra-average"; if (req.getStat_function() != null) { stat = "rra-" + req.getStat_function(); } String rra = getRraByStat(stat, req.getStep()); int step = alignRraStep(req.getStep()); List<byte[]> keys = new ArrayList<byte[]>(); StringBuilder sb = new StringBuilder(""); long adjustedStart = start - start % step; String dataCF = DATA_CF + "_" + getShard(step); if (isTestMode) dataCF += "_test"; MultigetSliceQuery<byte[], Long, Double> multigetSliceQuery = HFactory .createMultigetSliceQuery(keyspace, bytesSerializer, longSerializer, doubleSerializer); multigetSliceQuery.setColumnFamily(dataCF); keys = new ArrayList<>(); sb = new StringBuilder(""); for (int i = 0; i < req.getMetrics().length; i++) { String metricDs = req.getMetrics()[i]; String key = Long.valueOf(req.getCi_id()).toString() + ":" + metricDs + ":" + rra; keys.add(key.getBytes()); sb.append(" " + key); } multigetSliceQuery.setKeys(keys); multigetSliceQuery.setRange(adjustedStart, end, false, maxColumns); logger.info("start:" + start + " end:" + end + " for: " + sb); long cassStart = System.currentTimeMillis(); QueryResult<Rows<byte[], Long, Double>> result = multigetSliceQuery.execute(); Rows<byte[], Long, Double> rows = result.get(); long cassEnd = System.currentTimeMillis(); long cassDuration = cassEnd - cassStart; // put the by-metric results into 1 csv-like table // (time,metric1,metric2,etc) // ... should find faster way to do this, but still 10x faster than // gwt DataTable serialization int rowCount = 0; int totalSampleCount = 0; HashMap<String, HashMap<Long, Double>> resultMap = new HashMap<String, HashMap<Long, Double>>(); for (Row<byte[], Long, Double> row : rows) { String rowKey = new String(row.getKey()); HashMap<Long, Double> results = null; if (resultMap.containsKey(rowKey)) { results = resultMap.get(rowKey); } else { results = new HashMap<>(); resultMap.put(rowKey, results); } List<HColumn<Long, Double>> cols = row.getColumnSlice().getColumns(); Iterator<HColumn<Long, Double>> listIter = cols.listIterator(); while (listIter.hasNext()) { HColumn<Long, Double> c = (HColumn<Long, Double>) listIter.next(); results.put(c.getName(), c.getValue()); } } for (String rowKey : resultMap.keySet()) { HashMap<Long, Double> results = resultMap.get(rowKey); if (rowCount > 0) { jsonOut.append(",\n"); } String[] keyParts = rowKey.split(":"); String ciId = keyParts[0]; String metric = keyParts[1] + ":" + keyParts[2]; jsonOut.append("{ \"header\":{\"ci_id\":" + ciId + ", \"metric\":\"" + metric + "\", \"step\":" + step + ", \"start\":" + adjustedStart + "},\n "); jsonOut.append("\"data\":["); long currentBucket = adjustedStart; int sampleCount = 0; int emptyCount = 0; SortedSet<Long> sortedKeys = new TreeSet<>(results.keySet()); for (long sampleBucket : sortedKeys) { double value = results.get(sampleBucket); if (sampleBucket != currentBucket) { while (sampleBucket > currentBucket) { if (sampleCount > 0) { jsonOut.append(","); } jsonOut.append("null"); currentBucket += step; emptyCount++; sampleCount++; } } if (sampleCount > 0) { jsonOut.append(","); } jsonOut.append((Math.round(value * 1000.0) / 1000.0)); currentBucket += step; totalSampleCount++; sampleCount++; } jsonOut.append("]}"); rowCount++; logger.debug("got samples:" + sampleCount + " gaps:" + emptyCount); } long endTime = System.currentTimeMillis(); long duration = endTime - startTime; logger.debug("getPerfData took: " + duration + " ms (cass query: " + cassDuration + " ms) returning: " + totalSampleCount + " rows of " + rowCount + " metrics"); } catch (HectorException he) { he.printStackTrace(); } return jsonOut.toString(); }
From source file:edu.cornell.mannlib.vitro.webapp.controller.json.GetEntitiesByVClass.java
@Override protected JSONArray process() throws ServletException { log.debug("in getEntitiesByVClass()"); String vclassURI = vreq.getParameter("vclassURI"); WebappDaoFactory daos = vreq.getUnfilteredWebappDaoFactory(); if (vclassURI == null) { throw new ServletException("getEntitiesByVClass(): no value for 'vclassURI' found in the HTTP request"); }/*from www .j av a 2s. co m*/ VClass vclass = daos.getVClassDao().getVClassByURI(vclassURI); if (vclass == null) { throw new ServletException("getEntitiesByVClass(): could not find vclass for uri '" + vclassURI + "'"); } List<Individual> entsInVClass = daos.getIndividualDao().getIndividualsByVClass(vclass); if (entsInVClass == null) { throw new ServletException( "getEntitiesByVClass(): null List<Individual> retruned by getIndividualsByVClass() for " + vclassURI); } int numberOfEntsInVClass = entsInVClass.size(); List<Individual> entsToReturn = new ArrayList<Individual>(REPLY_SIZE); String requestHash = null; int count = 0; boolean more = false; /* we have a large number of items to send back so we need to stash the list in the session scope */ if (entsInVClass.size() > REPLY_SIZE) { more = true; HttpSession session = vreq.getSession(true); requestHash = Integer.toString((vclassURI + System.currentTimeMillis()).hashCode()); session.setAttribute(requestHash, entsInVClass); ListIterator<Individual> entsFromVclass = entsInVClass.listIterator(); while (entsFromVclass.hasNext() && count < REPLY_SIZE) { entsToReturn.add(entsFromVclass.next()); entsFromVclass.remove(); count++; } if (log.isDebugEnabled()) { log.debug("getEntitiesByVClass(): Creating reply with continue token, found " + numberOfEntsInVClass + " Individuals"); } } else { if (log.isDebugEnabled()) log.debug("getEntitiesByVClass(): sending " + numberOfEntsInVClass + " Individuals without continue token"); entsToReturn = entsInVClass; count = entsToReturn.size(); } //put all the entities on the JSON array JSONArray ja = individualsToJson(entsToReturn); //put the responseGroup number on the end of the JSON array if (more) { try { JSONObject obj = new JSONObject(); obj.put("resultGroup", "true"); obj.put("size", count); obj.put("total", numberOfEntsInVClass); StringBuffer nextUrlStr = vreq.getRequestURL(); nextUrlStr.append("?").append("getEntitiesByVClass").append("=1&").append("resultKey=") .append(requestHash); obj.put("nextUrl", nextUrlStr.toString()); ja.put(obj); } catch (JSONException je) { throw new ServletException("unable to create continuation as JSON: " + je.getMessage()); } } log.debug("done with getEntitiesByVClass()"); return ja; }
From source file:atfrogs.ox.api.OXWebDAVApi.java
/** * Returns a list of all OXUser objects of group members of group with * given gid. You should not pass a pattern as gid. If done so you'll get * the list of OXUser objects of the group being the first search result. * /*from w w w. j a va 2 s . com*/ * @param gid the gid of group requested. * @return the list of OXUser objects. * @throws OXWebDAVApiException if an error occurs during operation. */ public List getMembersOfGroup(String gid) throws OXWebDAVApiException { List memberIdList; // of String List userList; // of OXUser ListIterator iterator; if (gid == null) return null; try { memberIdList = this.getMemberIdsOfGroup(gid); if (memberIdList == null) return null; userList = new Vector(); iterator = memberIdList.listIterator(); while (iterator.hasNext()) { String uid; OXUser user; uid = (String) iterator.next(); user = this.getOXUser(uid); userList.add(user); } } catch (Exception exc) { exc.printStackTrace(); throw new OXWebDAVApiException(exc.getMessage(), exc); } return userList; // note: may be empty }
From source file:com.ephesoft.dcma.tablefinder.share.TableRowFinderUtility.java
/** * Get lines of the table area in a page. * //from ww w.ja va2s . c o m * @param searchTableConfigDataCarrier {@link SearchTableConfigDataCarrier} * @param lineDataCarrierList {@link List}<{@link LineDataCarrier}> * @param hocrPage {@link HocrPage} * @param lineDataCarrier {@link LineDataCarrier} Line data carrier that will have new selected spans. * @param tableBoundaryDataCarrier * @throws DCMAApplicationException */ private static void getTableLines(final SearchTableConfigDataCarrier searchTableConfigDataCarrier, List<LineDataCarrier> lineDataCarrierList, final HocrPage hocrPage, LineDataCarrier lineDataCarrier, TableBoundaryDataCarrier tableBoundaryDataCarrier) throws DCMAApplicationException { final String pageID = hocrPage.getPageID(); LOGGER.debug("HocrPage page ID : ", pageID); final Spans spans = hocrPage.getSpans(); final List<Span> linkedList = getSortedSpanList(spans); if (null == linkedList || linkedList.isEmpty()) { LOGGER.debug("Return linked list is null for the page id = ", pageID); } else { final ListIterator<Span> listItr = linkedList.listIterator(); if (null == listItr) { LOGGER.debug("Return list iterator is null for the page id = ", pageID); } else { final String startPattern = searchTableConfigDataCarrier.getStartPattern(); final float fuzzyMatchThresholdValue = searchTableConfigDataCarrier.getFuzzyMatchThresholdValue(); List<DataCarrier> startDataCarrier = tableBoundaryDataCarrier.getStartDataCarrier(); if (null == startDataCarrier) { while (listItr.hasNext()) { final Span span = listItr.next(); try { List<Span> spanList = lineDataCarrier.getSpanList(); if (spanList.isEmpty()) { spanList.add(span); } else { if (null != span) { if (isSameLineSpan(spanList, span)) { spanList.add(span); } else { lineDataCarrier = new LineDataCarrier(pageID); spanList = lineDataCarrier.getSpanList(); spanList.add(span); } } } final DataCarrier finalDataCarrier = PatternMatcherUtil.findFuzzyPattern( lineDataCarrier.getLineRowData(), startPattern, fuzzyMatchThresholdValue, spanList); if (null == finalDataCarrier) { startDataCarrier = findPattern(lineDataCarrier.getLineRowData(), startPattern, spanList); } else { startDataCarrier = new ArrayList<DataCarrier>(0); } if (null != startDataCarrier) { LOGGER.debug("Start pattern found for table where start pattern : ", startPattern); addLine(lineDataCarrierList, lineDataCarrier); break; } } catch (final Exception exception) { LOGGER.error(exception.getMessage(), exception); } } } if (startDataCarrier != null) { tableBoundaryDataCarrier.setStartDataCarrier(startDataCarrier); LOGGER.info("Finding end pattern."); while (listItr.hasNext()) { final Span span = listItr.next(); List<Span> spanList = lineDataCarrier.getSpanList(); if (spanList.isEmpty()) { spanList.add(span); } else { if (isSameLineSpan(spanList, span)) { spanList.add(span); } else { lineDataCarrier = new LineDataCarrier(pageID); addLine(lineDataCarrierList, lineDataCarrier); spanList = lineDataCarrier.getSpanList(); spanList.add(span); } } List<DataCarrier> endDataCarrier = null; final String endPattern = searchTableConfigDataCarrier.getEndPattern(); final DataCarrier finalDataCarrier = PatternMatcherUtil.findFuzzyPattern( lineDataCarrier.getLineRowData(), endPattern, fuzzyMatchThresholdValue, spanList); if (null == finalDataCarrier) { endDataCarrier = findPattern(lineDataCarrier.getLineRowData(), endPattern, lineDataCarrier.getSpanList()); } else { endDataCarrier = new ArrayList<DataCarrier>(0); } if (null != endDataCarrier) { LOGGER.debug("End pattern found for table where end pattern : ", endPattern); tableBoundaryDataCarrier.setEndDataCarrier(endDataCarrier); break; } } } else { LOGGER.debug("No start pattern found for table where start pattern : ", startPattern); } } } }
From source file:org.apache.fop.layoutmgr.BlockStackingLayoutManager.java
/** * "wrap" the Position inside each element moving the elements from * SourceList to targetList/*from w w w . j av a 2 s .c om*/ * @param sourceList source list * @param targetList target list receiving the wrapped position elements * @param force if true, every Position is wrapped regardless of its LM of origin */ protected void wrapPositionElements(List sourceList, List targetList, boolean force) { ListIterator listIter = sourceList.listIterator(); Object tempElement; while (listIter.hasNext()) { tempElement = listIter.next(); if (tempElement instanceof ListElement) { wrapPositionElement((ListElement) tempElement, targetList, force); } else if (tempElement instanceof List) { wrapPositionElements((List) tempElement, targetList, force); } } }