List of usage examples for java.util LinkedList addAll
public boolean addAll(Collection<? extends E> c)
From source file:io.hops.transaction.lock.INodeLock.java
private List<INode> findChildrenRecursively(INode lastINode) throws StorageException, TransactionContextException { LinkedList<INode> children = new LinkedList<>(); LinkedList<INode> unCheckedDirs = new LinkedList<>(); if (lastINode != null) { if (lastINode instanceof INodeDirectory) { unCheckedDirs.add(lastINode); }//w ww . jav a 2 s.c om } // Find all the children in the sub-directories. while (!unCheckedDirs.isEmpty()) { INode next = unCheckedDirs.poll(); if (next instanceof INodeDirectory) { setINodeLockType(TransactionLockTypes.INodeLockType.READ_COMMITTED); //locking the parent is sufficient List<INode> clist = ((INodeDirectory) next).getChildrenList(); unCheckedDirs.addAll(clist); children.addAll(clist); } } LOG.debug("Added " + children.size() + " children."); return children; }
From source file:org.eclipse.che.api.vfs.server.VirtualFileSystemImpl.java
@Path("replace/{path:.*}") @Override//from w w w.j a va 2s . com public void replace(@PathParam("path") String path, List<ReplacementSet> replacements, @QueryParam("lockToken") String lockToken) throws NotFoundException, ForbiddenException, ConflictException, ServerException { VirtualFile projectRoot = mountPoint.getVirtualFile(path); if (!projectRoot.isFolder()) { throw new ConflictException("Given path must be an project root folder. "); } final Map<String, ReplacementContainer> changesPerFile = new HashMap<>(); // fill changes matrix first for (final ReplacementSet replacement : replacements) { for (final String regex : replacement.getFiles()) { Pattern pattern = Pattern.compile(regex); ItemNode rootNode = getTree(projectRoot.getId(), -1, false, PropertyFilter.ALL_FILTER); LinkedList<ItemNode> q = new LinkedList<>(); q.add(rootNode); while (!q.isEmpty()) { ItemNode node = q.pop(); Item item = node.getItem(); if (item.getItemType().equals(ItemType.FOLDER)) { q.addAll(node.getChildren()); } else if (item.getItemType().equals(ItemType.FILE)) { // for cases like: src/main/java/(.*) String itemInternalPath = item.getPath().substring(projectRoot.getPath().length() + 1); if (pattern.matcher(item.getName()).matches() || pattern.matcher(itemInternalPath).matches()) { ReplacementContainer container = (changesPerFile.get(item.getPath()) != null) ? changesPerFile.get(item.getPath()) : new ReplacementContainer(); for (Variable variable : replacement.getEntries()) { String replaceMode = variable.getReplacemode(); if (replaceMode == null || "variable_singlepass".equals(replaceMode)) { container.getVariableProps().put(variable.getFind(), variable.getReplace()); } else if ("text_multipass".equals(replaceMode)) { container.getTextProps().put(variable.getFind(), variable.getReplace()); } } changesPerFile.put(item.getPath(), container); } } } } } //now apply changes matrix for (Map.Entry<String, ReplacementContainer> entry : changesPerFile.entrySet()) { try { if (entry.getValue().hasReplacements()) { ContentStream cs = mountPoint.getVirtualFile(entry.getKey()).getContent(); String content = IoUtil.readAndCloseQuietly(cs.getStream()); String modified = Deserializer.resolveVariables(content, entry.getValue().getVariableProps(), false); for (Map.Entry<String, String> replacement : entry.getValue().getTextProps().entrySet()) { if (modified.contains(replacement.getKey())) { modified = modified.replace(replacement.getKey(), replacement.getValue()); } } //better to compare big strings by hash codes first if (!(content.hashCode() == modified.hashCode()) || !content.equals(modified)) { mountPoint.getVirtualFile(entry.getKey()).updateContent( new ByteArrayInputStream(modified.getBytes(StandardCharsets.UTF_8)), lockToken); } } } catch (IOException e) { LOG.warn(e.getMessage(), e); } } }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtil.java
/** * Given the location of the 'middle snake', split the diff in two parts * and recurse./*from w ww . jav a2 s . c o m*/ * * @param text1 Old string to be diffed. * @param text2 New string to be diffed. * @param x Index of split point in text1. * @param y Index of split point in text2. * @param deadline Time at which to bail if not yet complete. * @return LinkedList of Diff objects. */ private LinkedList<Diff<T>> diff_bisectSplit(List<T> text1, List<T> text2, int x, int y, long deadline) { List<T> text1a = text1.subList(0, x); List<T> text2a = text2.subList(0, y); List<T> text1b = text1.subList(x, text1.size()); List<T> text2b = text2.subList(y, text2.size()); // Compute both diffs serially. LinkedList<Diff<T>> diffs = diff_main(text1a, text2a, false, deadline); LinkedList<Diff<T>> diffsb = diff_main(text1b, text2b, false, deadline); diffs.addAll(diffsb); return diffs; }
From source file:org.dataconservancy.ui.services.MockDcsConnector.java
/** * Finds all entities that have the Deliverable Unit referenced by {@code archive_id} as an ancestor. * In the example below, the manifestation and file (identified by man_id and file_id, respectively) are * ancestors of the DU identified by archive_id. * <pre>//from w ww . jav a 2s. c om * DU (archive_id) <- Manifestation (man_id) -> File (file_id) * </pre> * This method searches through the {@code archiveUtil} instance, looking for ancestors of the supplied * {@code archive_id} and adds them to the {@code result}. * <p/> * * @param result the list to add ancestors to * @param archive_id the identifier of the Deliverable Unit to find ancestors of */ private void performAncestrySearch(LinkedList<DcsEntity> result, String archive_id) { Set<DcsEntity> descendants = new HashSet<DcsEntity>(); java.util.Collection<DcsEntity> entities = new HashSet<DcsEntity>(); for (Set<DcsEntity> allEntities : archiveUtil.getEntities().values()) { entities.addAll(allEntities); } DcsDeliverableUnitRef archive_du_ref = new DcsDeliverableUnitRef(archive_id); for (DcsEntity entity : entities) { if (entity instanceof DcsDeliverableUnit) { DcsDeliverableUnit du = (DcsDeliverableUnit) entity; if (du.getId().equals(archive_id) || du.getParents().contains(archive_du_ref)) { result.add(entity); add_descendants(entity, entities, descendants); } } } result.addAll(descendants); }
From source file:org.apache.fop.layoutmgr.table.TableContentLayoutManager.java
/** * Creates Knuth elements by iterating over a TableRowIterator. * @param iter TableRowIterator instance to fetch rows from * @param context Active LayoutContext/* w w w. ja va 2 s .c o m*/ * @param alignment alignment indicator * @param bodyType Indicates what kind of body is being processed * (BODY, HEADER or FOOTER) * @return An element list */ private LinkedList getKnuthElementsForRowIterator(TableRowIterator iter, LayoutContext context, int alignment, int bodyType) { LinkedList returnList = new LinkedList(); EffRow[] rowGroup = iter.getNextRowGroup(); // TODO homogenize the handling of keeps and breaks context.clearKeepsPending(); context.setBreakBefore(Constants.EN_AUTO); context.setBreakAfter(Constants.EN_AUTO); Keep keepWithPrevious = Keep.KEEP_AUTO; int breakBefore = Constants.EN_AUTO; if (rowGroup != null) { RowGroupLayoutManager rowGroupLM = new RowGroupLayoutManager(getTableLM(), rowGroup, stepper); List nextRowGroupElems = rowGroupLM.getNextKnuthElements(context, alignment, bodyType); keepWithPrevious = keepWithPrevious.compare(context.getKeepWithPreviousPending()); breakBefore = context.getBreakBefore(); int breakBetween = context.getBreakAfter(); returnList.addAll(nextRowGroupElems); while ((rowGroup = iter.getNextRowGroup()) != null) { rowGroupLM = new RowGroupLayoutManager(getTableLM(), rowGroup, stepper); //Note previous pending keep-with-next and clear the strength //(as the layout context is reused) Keep keepWithNextPending = context.getKeepWithNextPending(); context.clearKeepWithNextPending(); //Get elements for next row group nextRowGroupElems = rowGroupLM.getNextKnuthElements(context, alignment, bodyType); /* * The last break element produced by TableStepper (for the previous row * group) may be used to represent the break between the two row groups. * Its penalty value and break class must just be overridden by the * characteristics of the keep or break between the two. * * However, we mustn't forget that if the after border of the last row of * the row group is thicker in the normal case than in the trailing case, * an additional glue will be appended to the element list. So we may have * to go two steps backwards in the list. */ //Determine keep constraints Keep keep = keepWithNextPending.compare(context.getKeepWithPreviousPending()); context.clearKeepWithPreviousPending(); keep = keep.compare(getTableLM().getKeepTogether()); int penaltyValue = keep.getPenalty(); int breakClass = keep.getContext(); breakBetween = BreakUtil.compareBreakClasses(breakBetween, context.getBreakBefore()); if (breakBetween != Constants.EN_AUTO) { penaltyValue = -KnuthElement.INFINITE; breakClass = breakBetween; } BreakElement breakElement; ListIterator elemIter = returnList.listIterator(returnList.size()); ListElement elem = (ListElement) elemIter.previous(); if (elem instanceof KnuthGlue) { breakElement = (BreakElement) elemIter.previous(); } else { breakElement = (BreakElement) elem; } breakElement.setPenaltyValue(penaltyValue); breakElement.setBreakClass(breakClass); returnList.addAll(nextRowGroupElems); breakBetween = context.getBreakAfter(); } } /* * The last break produced for the last row-group of this table part must be * removed, because the breaking after the table will be handled by TableLM. * Unless the element list ends with a glue, which must be kept to accurately * represent the content. In such a case the break is simply disabled by setting * its penalty to infinite. */ ListIterator elemIter = returnList.listIterator(returnList.size()); ListElement elem = (ListElement) elemIter.previous(); if (elem instanceof KnuthGlue) { BreakElement breakElement = (BreakElement) elemIter.previous(); breakElement.setPenaltyValue(KnuthElement.INFINITE); } else { elemIter.remove(); } context.updateKeepWithPreviousPending(keepWithPrevious); context.setBreakBefore(breakBefore); //fox:widow-content-limit int widowContentLimit = getTableLM().getTable().getWidowContentLimit().getValue(); if (widowContentLimit != 0 && bodyType == TableRowIterator.BODY) { ElementListUtils.removeLegalBreaks(returnList, widowContentLimit); } //fox:orphan-content-limit int orphanContentLimit = getTableLM().getTable().getOrphanContentLimit().getValue(); if (orphanContentLimit != 0 && bodyType == TableRowIterator.BODY) { ElementListUtils.removeLegalBreaksFromEnd(returnList, orphanContentLimit); } return returnList; }
From source file:org.j2free.admin.ReflectionMarshaller.java
private ReflectionMarshaller(Class klass) throws Exception { instructions = new HashMap<Field, Converter>(); LinkedList<Field> fieldsToMarshall = new LinkedList<Field>(); // Only marshallOut entities if (!klass.isAnnotationPresent(Entity.class)) throw new Exception("Provided class is not an @Entity"); // Add the declared fields fieldsToMarshall.addAll(Arrays.asList(klass.getDeclaredFields())); /* Inheritence support * Continue up the inheritance ladder until: * - There are no more super classes (zuper == null), or * - The super class is not an @Entity *///from w ww. ja v a 2s. c om Class zuper = klass; while ((zuper = zuper.getSuperclass()) != null) { // get out if we find a super class that isn't an @Entity if (!klass.isAnnotationPresent(Entity.class)) break; // Add the declared fields // @todo, improve the inheritance support, the current way will overwrite // overridden fields in subclasses with the super class's field fieldsToMarshall.addAll(Arrays.asList(zuper.getDeclaredFields())); } /* By now, fieldsToMarshall should contain all the fields * so it's time to figure out how to access them. */ Method getter, setter; Converter converter; for (Field field : fieldsToMarshall) { int mod = field.getModifiers(); if (Modifier.isStatic(mod) || Modifier.isFinal(mod)) { log.debug("Skipping final or static field " + field.getName()); continue; } getter = setter = null; // if direct access doesn't work, look for JavaBean // getters and setters String fieldName = field.getName(); Class fieldType = field.getType(); try { getter = getGetter(field); } catch (NoSuchMethodException nsme) { log.debug("Failed to find getter for " + fieldName); } try { setter = getSetter(field); } catch (NoSuchMethodException nsme) { log.debug("Failed to find setter for " + fieldName); } if (getter == null && setter == null) { // Shit, we didn't figure out how to access it log.debug("Could not access field: " + field.getName()); } else { converter = new Converter(getter, setter); if (field.isAnnotationPresent(Id.class)) { log.debug("Found entityIdFied for " + klass.getName() + ": " + field.getName()); entityIdField = field; embeddedId = false; } if (field.isAnnotationPresent(EmbeddedId.class)) { log.debug("Found embedded entityIdFied for " + klass.getName() + ": " + field.getName()); entityIdField = field; embeddedId = true; } if (field.isAnnotationPresent(GeneratedValue.class) || setter == null) { converter.setReadOnly(true); } if (field.getType().isAnnotationPresent(Entity.class)) { converter.setEntity(fieldType); } Class superClass = field.getType(); if (superClass != null) { do { if (superClass == Collection.class) { try { Type type = field.getGenericType(); String typeString = type.toString(); while (typeString.matches("[^<]+?<[^>]+?>")) typeString = typeString.substring(typeString.indexOf("<") + 1, typeString.indexOf(">")); Class collectionType = Class.forName(typeString); converter.setCollection(collectionType); if (collectionType.getAnnotation(Entity.class) != null) converter.setEntity(collectionType); log.debug(field.getName() + " is entity = " + converter.isEntity()); log.debug(field.getName() + " collectionType = " + converter.getType().getSimpleName()); } catch (Exception e) { log.debug("error getting collection type", e); } finally { break; } } superClass = superClass.getSuperclass(); } while (superClass != null); } instructions.put(field, converter); } } }
From source file:org.vaadin.addons.sitekit.module.content.ContentModule.java
@Override public void injectDynamicContent(final SiteDescriptor dynamicSiteDescriptor) { final Company company = Site.getCurrent().getSiteContext().getObject(Company.class); final EntityManager entityManager = Site.getCurrent().getSiteContext().getObject(EntityManager.class); final User user = ((SecurityProviderSessionImpl) Site.getCurrent().getSecurityProvider()) .getUserFromSession();// w w w . j av a 2 s. com final List<Group> groups; if (user == null) { groups = new ArrayList<Group>(); groups.add(UserDao.getGroup(entityManager, company, "anonymous")); } else { groups = UserDao.getUserGroups(entityManager, company, user); } final List<Content> contents = ContentDao.getContens(entityManager, company); final LinkedList<Content> queue = new LinkedList<Content>(); final Map<String, List<Content>> dependencies = new HashMap<String, List<Content>>(); for (final Content content : contents) { final String dependency; if (!StringUtils.isEmpty(content.getAfterPage())) { dependency = content.getAfterPage(); } else if (!StringUtils.isEmpty(content.getParentPage())) { dependency = content.getParentPage(); } else { dependency = null; } if (dependency != null) { if (!dependencies.containsKey(dependency)) { dependencies.put(dependency, new ArrayList<Content>()); } dependencies.get(dependency).add(content); } else { queue.add(content); } } final List<Content> ordered = new ArrayList<Content>(); while (queue.size() > 0) { final Content content = queue.removeFirst(); ordered.add(content); if (dependencies.containsKey(content.getPage())) { queue.addAll(dependencies.get(content.getPage())); } } final NavigationVersion navigationVersion = dynamicSiteDescriptor.getNavigation().getProductionVersion(); for (final Content content : ordered) { boolean viewPrivilege = PrivilegeCache.hasPrivilege(company, user, "view", content.getContentId()); if (!viewPrivilege) { for (final Group group : groups) { if (PrivilegeCache.hasPrivilege(company, group, "view", content.getContentId())) { viewPrivilege = true; break; } } } if (!viewPrivilege) { continue; } boolean editPrivilege = UserDao.hasUserPrivilege(entityManager, user, "edit", content.getContentId()); if (!editPrivilege) { for (final Group group : groups) { if (UserDao.hasGroupPrivilege(entityManager, group, "edit", content.getContentId())) { editPrivilege = true; break; } } } final String page = content.getPage(); if (page == null) { continue; } final String parentPage = content.getParentPage(); final String afterPage = content.getAfterPage(); final String title = content.getTitle(); final MarkupType markupType = content.getMarkupType(); final String markup = content.getMarkup(); if (StringUtils.isEmpty(parentPage)) { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addRootPage(0, page); navigationVersion.setDefaultPageName(page); } else { navigationVersion.addRootPage(afterPage, page); } } else { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addChildPage(parentPage, page); } else { navigationVersion.addChildPage(parentPage, afterPage, page); } } // Describe content view. final ViewDescriptor viewDescriptor = new ViewDescriptor(page, title, DefaultView.class); viewDescriptor.getProductionVersion().setDynamic(true); if (editPrivilege) { viewDescriptor.setViewletClass("content", RenderFlow.class, content); } else { viewDescriptor.setViewletClass("content", RenderViewlet.class, markup); } dynamicSiteDescriptor.getViewDescriptors().add(viewDescriptor); } }
From source file:cellularAutomata.analysis.PricingDistributionAnalysis.java
/** * Plots the pricing data as a time series. *//* www. ja va 2 s .c om*/ private void plotTimeSeriesData() { for (int i = 0; i < numberOfStates; i++) { // create a list of all data points (exclude 0 if necessary) LinkedList<Point2D.Double> allPoints = new LinkedList<Point2D.Double>(); int startPosition = 0; if (!plotZeroState) { startPosition = 1; } allPoints.addAll(timeSeriesOfDistributionList[i]); // set the min and max values on the plot Point2D firstPoint = (Point2D) timeSeriesOfDistributionList[i].getFirst(); timeSeriesPlot[i].setMaximumXValue(firstPoint.getX() + MAX_NUMBER_TO_PLOT - 1); timeSeriesPlot[i].setMinimumXValue(0); timeSeriesPlot[i].setMinimumYValue(0.0); timeSeriesPlot[i].setXAxisLabel("Histogram"); timeSeriesPlot[i].setYAxisLabel("% bin"); timeSeriesPlot[i].showPlotLines(false); // set the max y-value double maxYValue = 0.0; Iterator<Point2D.Double> iterator = allPoints.iterator(); while (iterator.hasNext()) { Point2D.Double point = iterator.next(); if (point.y > maxYValue) { maxYValue = point.y; } } // now round up to the nearest tenth and add 0.1. This gives some // wiggle room before the plot will have to redraw the y-axis. If // redraw the y-axis too often, it will look bad. This crazy case // statement ensures that we don't get something like 0.7999999 // instead // of 0.8 (which is what was happening when I would divide by 10.0 // using // a formula to calculate the maxYValue). switch ((int) Math.ceil(maxYValue * 10.0)) { case 0: maxYValue = 0.1; break; case 1: maxYValue = 0.2; break; case 2: maxYValue = 0.3; break; case 3: maxYValue = 0.4; break; case 4: maxYValue = 0.5; break; case 5: maxYValue = 0.6; break; case 6: maxYValue = 0.7; break; case 7: maxYValue = 0.8; break; case 8: maxYValue = 0.9; break; case 9: maxYValue = 1.0; break; case 10: maxYValue = 1.0; break; case 11: maxYValue = 1.0; break; case 12: maxYValue = 1.0; break; } if (maxYValue > 1.0) { maxYValue = 1.0; } maxYValue = maxYValue * 1; timeSeriesPlot[i].setMaximumYValue(maxYValue); // draw some extra points on the y axes (looks good) int numberOfInteriorYValueLabels = (int) ((maxYValue / 20)); double[] yValues = new double[numberOfInteriorYValueLabels]; for (int j = 0; j < yValues.length; j++) { double answer = (j * 20); yValues[j] = answer; } timeSeriesPlot[i].setExtraYAxisValues(yValues); // specify colors for the points Color[] colorArray = new Color[allPoints.size()]; CellStateView view = Cell.getView(); Color stateColor = view.getDisplayColor(new IntegerCellState(i), null, new Coordinate(0, 0)); for (int j = 0; j < timeSeriesOfDistributionList[i].size(); j++) { colorArray[j] = stateColor; } timeSeriesPlot[i].setPointDisplayColors(colorArray); // draw the points! timeSeriesPlot[i].drawPoints(allPoints); } }
From source file:org.bubblecloud.ilves.module.content.ContentModule.java
@Override public void injectDynamicContent(final SiteDescriptor dynamicSiteDescriptor) { final Company company = Site.getCurrent().getSiteContext().getObject(Company.class); final EntityManager entityManager = Site.getCurrent().getSiteContext().getObject(EntityManager.class); final User user = ((SecurityProviderSessionImpl) Site.getCurrent().getSecurityProvider()) .getUserFromSession();/*from w w w . jav a 2 s . co m*/ final List<Group> groups; if (user == null) { groups = new ArrayList<Group>(); groups.add(UserDao.getGroup(entityManager, company, "anonymous")); } else { groups = UserDao.getUserGroups(entityManager, company, user); } final List<Content> contents = ContentDao.getContens(entityManager, company); final LinkedList<Content> queue = new LinkedList<Content>(); final Map<String, List<Content>> dependencies = new HashMap<String, List<Content>>(); for (final Content content : contents) { final String dependency; if (!StringUtils.isEmpty(content.getAfterPage())) { dependency = content.getAfterPage(); } else if (!StringUtils.isEmpty(content.getParentPage())) { dependency = content.getParentPage(); } else { dependency = null; } if (dependency != null) { if (!dependencies.containsKey(dependency)) { dependencies.put(dependency, new ArrayList<Content>()); } dependencies.get(dependency).add(content); } else { queue.add(content); } } final List<Content> ordered = new ArrayList<Content>(); while (queue.size() > 0) { final Content content = queue.removeFirst(); ordered.add(content); if (dependencies.containsKey(content.getPage())) { queue.addAll(dependencies.get(content.getPage())); } } final NavigationVersion navigationVersion = dynamicSiteDescriptor.getNavigation().getProductionVersion(); for (final Content content : ordered) { boolean viewPrivilege = PrivilegeCache.hasPrivilege(entityManager, company, user, "view", content.getContentId()); if (!viewPrivilege) { for (final Group group : groups) { if (PrivilegeCache.hasPrivilege(entityManager, company, group, "view", content.getContentId())) { viewPrivilege = true; break; } } } if (!viewPrivilege) { continue; } boolean editPrivilege = UserDao.hasUserPrivilege(entityManager, user, "edit", content.getContentId()); if (!editPrivilege) { for (final Group group : groups) { if (UserDao.hasGroupPrivilege(entityManager, group, "edit", content.getContentId())) { editPrivilege = true; break; } } } final String page = content.getPage(); if (page == null) { continue; } final String parentPage = content.getParentPage(); final String afterPage = content.getAfterPage(); final String title = content.getTitle(); final MarkupType markupType = content.getMarkupType(); final String markup = content.getMarkup(); if (StringUtils.isEmpty(parentPage)) { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addRootPage(0, page); navigationVersion.setDefaultPageName(page); } else { navigationVersion.addRootPage(afterPage, page); } } else { if (StringUtils.isEmpty(afterPage)) { navigationVersion.addChildPage(parentPage, page); } else { navigationVersion.addChildPage(parentPage, afterPage, page); } } // Describe content view. final ViewDescriptor viewDescriptor = new ViewDescriptor(page, title, DefaultValoView.class); viewDescriptor.getProductionVersion().setDynamic(true); if (editPrivilege) { viewDescriptor.setViewletClass("content", RenderFlow.class, content); } else { viewDescriptor.setViewletClass("content", RenderViewlet.class, markup); } dynamicSiteDescriptor.getViewDescriptors().add(viewDescriptor); } }
From source file:com.opengamma.util.db.management.AbstractDbManagement.java
@Override public void clearTables(String catalog, String schema, Collection<String> ignoredTables) { LinkedList<String> script = new LinkedList<String>(); Connection conn = null;/*from w w w . ja v a 2 s.co m*/ try { if (!getCatalogCreationStrategy().catalogExists(catalog)) { return; // nothing to clear } conn = connect(catalog); setActiveSchema(conn, schema); Statement statement = conn.createStatement(); // Clear tables SQL List<String> tablesToClear = new ArrayList<String>(); for (String name : getAllTables(catalog, schema, statement)) { if (!ignoredTables.contains(name.toLowerCase())) { tablesToClear.add(name); } } List<String> clearTablesCommands = getClearTablesCommand(schema, tablesToClear); script.addAll(clearTablesCommands); for (String name : tablesToClear) { Table table = new Table(name); if (matches(table.getName().toLowerCase(), Pattern.compile(".*?hibernate_sequence"))) { // if it's a sequence table, reset it script.add("INSERT INTO " + table.getQualifiedName(getHibernateDialect(), null, schema) + " values ( 1 )"); } } // Now execute it all. Constraints are taken into account by retrying the failed statement after all // dependent tables have been cleared first. int i = 0; int maxAttempts = script.size() * 3; // make sure the loop eventually terminates. Important if there's a cycle in the table dependency graph SQLException latestException = null; while (i < maxAttempts && !script.isEmpty()) { String sql = script.remove(); try { statement.executeUpdate(sql); } catch (SQLException e) { // assume it failed because of a constraint violation // try deleting other tables first - make this the new last statement latestException = e; script.add(sql); } i++; } statement.close(); if (i == maxAttempts && !script.isEmpty()) { throw new OpenGammaRuntimeException( "Failed to clear tables - is there a cycle in the table dependency graph?", latestException); } } catch (SQLException e) { throw new OpenGammaRuntimeException("Failed to clear tables", e); } finally { try { if (conn != null) { conn.close(); } } catch (SQLException e) { } } }