List of usage examples for java.util Collection remove
boolean remove(Object o);
From source file:ubic.gemma.core.analysis.preprocess.VectorMergingServiceTest.java
@Test final public void test() throws Exception { /*/*from w w w. j a va 2 s .co m*/ * Need a persistent experiment that uses multiple array designs. Then merge the designs, switch the vectors, * and merge the vectors. GSE3443 */ /* * The experiment uses the following GPLs * * GPL2868, GPL2933, GPL2934, GPL2935, GPL2936, GPL2937, GPL2938 * * Example of a sequence appearing on more than one platform: N57553 */ geoService.setGeoDomainObjectGenerator( new GeoDomainObjectGeneratorLocal(this.getTestFileBasePath("gse3443merge"))); Collection<?> results = geoService.fetchAndLoad("GSE3443", false, false, false); ee = (ExpressionExperiment) results.iterator().next(); ee = this.eeService.thawLite(ee); // fix for unknown log scale for (QuantitationType qt : ee.getQuantitationTypes()) { if (qt.getIsPreferred()) { qt.setScale(ScaleType.LOG2); quantitationTypeService.update(qt); } } Collection<ArrayDesign> aas = eeService.getArrayDesignsUsed(ee); assertEquals(7, aas.size()); /* * Check number of sequences across all platforms. This is how many elements we need on the new platform, plus * extras for duplicated sequences (e.g. elements that don't have a sequence...) */ Collection<ArrayDesign> taas = new HashSet<>(); Set<BioSequence> oldbs = new HashSet<>(); for (ArrayDesign arrayDesign : aas) { arrayDesign = arrayDesignService.thaw(arrayDesign); taas.add(arrayDesign); for (CompositeSequence cs : arrayDesign.getCompositeSequences()) { log.info(cs + " " + cs.getBiologicalCharacteristic()); oldbs.add(cs.getBiologicalCharacteristic()); } } assertEquals(63, oldbs.size()); /* * Check total size of elements across all 7 platforms. */ int totalElements = 0; for (ArrayDesign arrayDesign : taas) { totalElements += arrayDesign.getCompositeSequences().size(); } assertEquals(140, totalElements); ArrayDesign firstaa = taas.iterator().next(); aas.remove(firstaa); assertNull(firstaa.getMergedInto()); mergedAA = arrayDesignMergeService.merge(firstaa, taas, "testMerge" + RandomStringUtils.randomAlphabetic(5), "merged" + RandomStringUtils.randomAlphabetic(5), false); assertEquals(72, mergedAA.getCompositeSequences().size()); Set<BioSequence> seenBs = new HashSet<>(); for (CompositeSequence cs : mergedAA.getCompositeSequences()) { seenBs.add(cs.getBiologicalCharacteristic()); } assertEquals(63, seenBs.size()); // just to make this explicit. The new array design has to contain all the old sequences. assertEquals(oldbs.size(), seenBs.size()); ee = eeService.thaw(ee); ee = eePlatformSwitchService.switchExperimentToArrayDesign(ee, mergedAA); ee = eeService.thaw(ee); // check we actually got switched over. for (BioAssay ba : ee.getBioAssays()) { assertEquals(mergedAA, ba.getArrayDesignUsed()); } for (RawExpressionDataVector v : ee.getRawExpressionDataVectors()) { assertEquals(mergedAA, v.getDesignElement().getArrayDesign()); } assertEquals(16, ee.getQuantitationTypes().size()); assertEquals(1828, ee.getRawExpressionDataVectors().size()); ee = vectorMergingService.mergeVectors(ee); // check we got the right processed data Collection<ProcessedExpressionDataVector> pvs = processedExpressionDataVectorService .getProcessedDataVectors(ee); assertEquals(72, pvs.size()); assertEquals(978, ee.getRawExpressionDataVectors().size()); ee = eeService.thaw(ee); Collection<DoubleVectorValueObject> processedDataArrays = processedExpressionDataVectorService .getProcessedDataArrays(ee, 50); assertEquals(50, processedDataArrays.size()); }
From source file:org.batoo.jpa.core.impl.collections.ManagedCollection.java
/** * Merges the collection with the entity * /* ww w . j a v a 2 s . c o m*/ * @param entityManager * the entity manager * @param instance * the new entity * @param requiresFlush * if an implicit flush is required * @param processed * registry of processed entities * @param instances * the persisted instances * * @since 2.0.0 */ @SuppressWarnings({ "unchecked", "rawtypes" }) public void mergeWith(EntityManagerImpl entityManager, Object instance, MutableBoolean requiresFlush, IdentityHashMap<Object, Object> processed, LinkedList<ManagedInstance<?>> instances) { final ArrayList<E> mergedChildren = Lists.newArrayList(); final Object children = this.mapping.get(instance); // if it is a managed collection and not initialized then skip the merge if ((children instanceof ManagedCollection) && !((ManagedCollection<E>) children).isInitialized()) { return; } final Collection<E> collection; if (children instanceof Collection) { collection = (Collection<E>) children; } else { collection = ((Map<?, E>) children).values(); } // merge all the new children for (final E child : collection) { mergedChildren.add(entityManager.mergeImpl(child, requiresFlush, processed, instances, this.mapping.cascadesMerge())); } // make a snapshot this.snapshot(); final Collection<E> delegate = this.getDelegate(); boolean changed = false; final SessionImpl session = entityManager.getSession(); final PluralAssociationMappingImpl<?, ?, ?> inversePluralMapping = (this.inverse != null) && (this.inverse.getAttribute() instanceof PluralAttributeImpl) ? // (PluralAssociationMappingImpl<?, ?, ?>) this.inverse : null; // TODO needs to be overridden by ManagedMap // add the new children for (int i = 0; i < mergedChildren.size(); i++) { final E child = mergedChildren.get(i); if (!delegate.contains(child)) { this.getDelegate().add(child); if (this.inverse != null) { if (inversePluralMapping != null) { final Collection inverseCollection = (Collection<?>) inversePluralMapping.get(child); if (!inverseCollection.contains(this.managedInstance.getInstance())) { inverseCollection.add(this.managedInstance.getInstance()); } } else { this.inverse.set(session.get(child).getInstance(), this.managedInstance.getInstance()); } } changed = true; } } // remove the non existent children final ArrayList<E> delegateList = Lists.newArrayList(delegate); for (int i = 0; i < delegateList.size(); i++) { final E child = delegateList.get(i); if (!mergedChildren.contains(child)) { this.removeChild(child); if (this.inverse != null) { if (inversePluralMapping != null) { final Collection inverseCollection = (Collection<?>) inversePluralMapping.get(child); inverseCollection.remove(this.managedInstance.getInstance()); } else { this.inverse.set(session.get(child).getInstance(), null); } } changed = true; } } if (changed) { this.changed(); } }
From source file:org.usergrid.tools.DupAdminRepair.java
@Override public void runTool(CommandLine line) throws Exception { String outputDir = line.getOptionValue("output"); String emailsDir = String.format("%s/emails", outputDir); String usernamesDir = String.format("%s/usernames", outputDir); createDir(emailsDir);/*from w w w . java 2s .c o m*/ createDir(usernamesDir); startSpring(); logger.info("Starting crawl of all admins"); EntityManager em = emf.getEntityManager(CassandraService.MANAGEMENT_APPLICATION_ID); Application app = em.getApplication(); // search for all orgs Query query = new Query(); query.setLimit(PAGE_SIZE); Results r = null; Multimap<String, UUID> emails = HashMultimap.create(); Multimap<String, UUID> usernames = HashMultimap.create(); do { r = em.searchCollection(app, "users", query); for (Entity entity : r.getEntities()) { emails.put(entity.getProperty("email").toString().toLowerCase(), entity.getUuid()); usernames.put(entity.getProperty("username").toString().toLowerCase(), entity.getUuid()); } query.setCursor(r.getCursor()); logger.info("Searching next page"); } while (r != null && r.size() == PAGE_SIZE); // now go through and print out duplicate emails for (String username : usernames.keySet()) { Collection<UUID> ids = usernames.get(username); if (ids.size() > 1) { logger.info("Found multiple users with the username {}", username); // force the username to be reset to the user's email resolveUsernameConflicts(usernamesDir, username, ids); } } for (String email : emails.keySet()) { Collection<UUID> ids = emails.get(email); if (ids.size() > 1) { // get the admin the same way as the rest tier, this way the OTHER // admins will be removed UserInfo targetUser = managementService.getAdminUserByEmail(email); if (targetUser == null) { List<UUID> tempIds = new ArrayList<UUID>(ids); Collections.sort(tempIds); UUID toLoad = tempIds.get(0); logger.warn("Could not load target user by email {}, loading by UUID {} instead", email, toLoad); targetUser = managementService.getAdminUserByUuid(toLoad); ids.remove(toLoad); } UUID targetId = targetUser.getUuid(); ids.remove(targetId); logger.warn("Found multiple admins with the email {}. Retaining uuid {}", email, targetId); FileWriter file = new FileWriter(String.format("%s/%s.all", emailsDir, email)); Map<String, Object> userOrganizationData = managementService.getAdminUserOrganizationData(targetId); file.write(JsonUtils.mapToFormattedJsonString(userOrganizationData)); for (UUID id : ids) { userOrganizationData = managementService.getAdminUserOrganizationData(id); file.write(JsonUtils.mapToFormattedJsonString(userOrganizationData)); file.write("\n\n"); mergeAdmins(emailsDir, id, targetId); } file.flush(); file.close(); // force the index update after all other admins have been merged logger.info("Forcing re-index of admin with email {} and id {}", email, targetId); User targetUserEntity = em.get(targetUser.getUuid(), User.class); em.update(targetUserEntity); FileWriter merged = new FileWriter(String.format("%s/%s.merged", emailsDir, email)); userOrganizationData = managementService.getAdminUserOrganizationData(targetUser.getUuid()); merged.write(JsonUtils.mapToFormattedJsonString(userOrganizationData)); merged.flush(); merged.close(); } } logger.info("Repair complete"); }
From source file:org.apache.hadoop.hdfs.server.namenode.NNStorage.java
/** * Set the storage directories which will be used. This should only ever be * called from inside NNStorage. However, it needs to remain package private * for testing, as StorageDirectories need to be reinitialised after using * Mockito.spy() on this class, as Mockito doesn't work well with inner * classes, such as StorageDirectory in this case. * * Synchronized due to initialization of storageDirs and removedStorageDirs. * * @param fsNameDirs Locations to store images. * @param fsEditsDirs Locations to store edit logs. * @param locationMap location descriptors * @throws IOException// ww w .jav a 2s . c o m */ public synchronized void setStorageDirectories(Collection<URI> fsNameDirs, Collection<URI> fsEditsDirs, Map<URI, NNStorageLocation> locationMap) throws IOException { this.storageDirs.clear(); this.removedStorageDirs.clear(); for (URI dirName : fsNameDirs) { boolean isAlsoEdits = false; for (URI editsDirName : fsEditsDirs) { if (editsDirName.compareTo(dirName) == 0) { isAlsoEdits = true; fsEditsDirs.remove(editsDirName); break; } } NameNodeDirType dirType = (isAlsoEdits) ? NameNodeDirType.IMAGE_AND_EDITS : NameNodeDirType.IMAGE; // Add to the list of storage directories, only if the // URI is of type file:// if (dirName.getScheme().compareTo(JournalType.FILE.name().toLowerCase()) == 0) { this.addStorageDir(new NNStorageDirectory(new File(dirName.getPath()), dirType, locationMap == null ? null : locationMap.get(dirName))); } } // Add edits dirs if they are different from name dirs for (URI dirName : fsEditsDirs) { checkSchemeConsistency(dirName); // Add to the list of storage directories, only if the // URI is of type file:// if (dirName.getScheme().compareTo(JournalType.FILE.name().toLowerCase()) == 0) this.addStorageDir(new NNStorageDirectory(new File(dirName.getPath()), NameNodeDirType.EDITS, locationMap == null ? null : locationMap.get(dirName))); } }
From source file:ch.algotrader.service.TransactionPersistenceServiceImpl.java
/** * {@inheritDoc}//ww w. ja v a 2 s. c o m */ @Override @Transactional(propagation = Propagation.REQUIRED) public String resetCashBalances() { // get all existing cashBalances Collection<CashBalance> existingCashBalances = this.cashBalanceDao.loadAll(); // sum all transactions Collection<Transaction> transactions = this.transactionDao.loadAll(); BigDecimalMap<Pair<Strategy, Currency>> map = new BigDecimalMap<>(); for (Transaction transaction : transactions) { transaction.initializeSecurity(HibernateInitializer.INSTANCE); // process all currenyAmounts for (CurrencyAmountVO currencyAmount : transaction.getAttributions()) { map.increment(new Pair<>(transaction.getStrategy(), currencyAmount.getCurrency()), currencyAmount.getAmount()); } } // create cash balances StringBuilder buffer = new StringBuilder(); for (Map.Entry<Pair<Strategy, Currency>, BigDecimal> entry : map.entrySet()) { Strategy strategy = entry.getKey().getFirst(); Currency currency = entry.getKey().getSecond(); BigDecimal amount = entry.getValue().setScale(this.commonConfig.getPortfolioDigits(), BigDecimal.ROUND_HALF_UP); CashBalance cashBalance = this.cashBalanceDao.findByStrategyAndCurrency(strategy, currency); if (cashBalance != null) { existingCashBalances.remove(cashBalance); BigDecimal oldAmount = cashBalance.getAmount(); if (oldAmount.doubleValue() != amount.doubleValue()) { cashBalance.setAmount(amount); String info = "adjusted cashBalance " + cashBalance + " from " + oldAmount; LOGGER.info(info); buffer.append(info + "\n"); } } else { cashBalance = CashBalance.Factory.newInstance(); cashBalance.setCurrency(currency); cashBalance.setAmount(amount); cashBalance.setStrategy(strategy); this.cashBalanceDao.save(cashBalance); String info = "created cashBalance " + cashBalance; LOGGER.info(info); buffer.append(info + "\n"); } } // remove all obsolete cashBalances for (CashBalance cashBalance : existingCashBalances) { Strategy strategy = cashBalance.getStrategy(); String info = "removed cashBalance " + cashBalance; LOGGER.info(info); buffer.append(info + "\n"); } this.cashBalanceDao.deleteAll(existingCashBalances); return buffer.toString(); }
From source file:org.artifactory.build.BuildServiceImpl.java
/** * Tries to match an artifact to a path based on it's name. If a match is found it is added to the result set * and removed from the liist//from ww w . j a v a 2 s . c om */ private void tryExactArtifactToFileInfoMatch(Set<ArtifactoryBuildArtifact> results, final AqlBaseFullRowImpl result, Collection<Artifact> artifacts) { Artifact idMatch = Iterables.find(artifacts, new Predicate<Artifact>() { @Override public boolean apply(Artifact input) { return input.getName() != null && input.getName().equals(result.getName()); } }); results.add(new ArtifactoryBuildArtifact(idMatch, (FileInfo) AqlConverts.toFileInfo.apply(result))); log.debug("Matched artifact {} to path {}", idMatch.getName(), AqlUtils.fromAql(result)); artifacts.remove(idMatch); }
From source file:test.edu.uci.ics.jung.graph.predicates.EdgePredicateTest.java
public void testEnforcesEdgePredicate() { Predicate p = new UserDatumEdgePredicate("key", "a"); Collection predicates = g.getEdgeConstraints(); assertFalse(PredicateUtils.enforcesEdgeConstraint(g, p)); v1 = g.addVertex(new SparseVertex()); try {//from w w w .j av a 2 s.c o m predicates.add(p); fail("should not allow new predicates in a non-empty graph " + p); } catch (IllegalArgumentException iae) { } g.removeAllVertices(); predicates.add(p); v1 = g.addVertex(new SparseVertex()); v2 = g.addVertex(new SparseVertex()); v3 = g.addVertex(new SparseVertex()); Edge e2 = new DirectedSparseEdge(v1, v2); Edge e3 = new DirectedSparseEdge(v3, v3); Edge e4 = new DirectedSparseEdge(v2, v1); e2.addUserDatum("key", "a", UserData.SHARED); e3.addUserDatum("key", "a", UserData.SHARED); e4.addUserDatum("key", "a", UserData.SHARED); g.addEdge(e2); g.addEdge(e3); g.addEdge(e4); assertTrue(PredicateUtils.enforcesEdgeConstraint(g, p)); try { Edge e5 = new DirectedSparseEdge(v2, v3); g.addEdge(e5); fail(p.toString()); } catch (IllegalArgumentException iae) { } Edge e6 = new DirectedSparseEdge(v3, v2); e6.addUserDatum("key", "a", UserData.SHARED); g.addEdge(e6); assertTrue(predicates.remove(p)); assertFalse(predicates.remove(p)); assertTrue(PredicateUtils.satisfiesEdgeConstraint(g, p)); }
From source file:org.apache.kylin.measure.topn.TopNMeasureType.java
@Override public CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc topN) { // TopN measure can (and only can) provide one numeric measure and one literal dimension // e.g. select seller, sum(gmv) from ... group by seller order by 2 desc limit 100 List<TblColRef> literalCol = getTopNLiteralColumn(topN.getFunction()); for (TblColRef colRef : literalCol) { if (digest.filterColumns.contains(colRef) == true) { // doesn't allow filtering by topn literal column return null; }/*from w w w . j av a2s . c o m*/ } if (digest.groupbyColumns.containsAll(literalCol) == false) return null; // check digest requires only one measure if (digest.aggregations.size() == 1) { // the measure function must be SUM FunctionDesc onlyFunction = digest.aggregations.iterator().next(); if (isTopNCompatibleSum(topN.getFunction(), onlyFunction) == false) return null; unmatchedDimensions.removeAll(literalCol); unmatchedAggregations.remove(onlyFunction); return new CapabilityInfluence() { @Override public double suggestCostMultiplier() { return 0.3; // make sure TopN get ahead of other matched realizations } }; } if (digest.aggregations.size() == 0) { // directly query the UHC column without sorting unmatchedDimensions.removeAll(literalCol); return new CapabilityInfluence() { @Override public double suggestCostMultiplier() { return 2.0; // topn can answer but with a higher cost } }; } return null; }
From source file:com.nextep.designer.sqlgen.mysql.impl.MySqlCapturer.java
@Override public Collection<IIndex> getIndexes(ICaptureContext context, IProgressMonitor m) { final IProgressMonitor monitor = new CustomProgressMonitor(m, 100, true); monitor.subTask(MySQLMessages.getString("capturer.mysql.retrievingIndexes")); //$NON-NLS-1$ final Connection conn = (Connection) context.getConnectionObject(); Collection<IIndex> indexes = Collections.emptyList(); Statement stmt = null;/*from www .j a v a2 s. c o m*/ ResultSet rset = null; try { stmt = conn.createStatement(); indexes = jdbcCapturer.getIndexes(context, monitor); final Collection<IBasicTable> indexedTables = new HashSet<IBasicTable>(); final Map<String, IIndex> indexMap = new HashMap<String, IIndex>(); for (IIndex index : new ArrayList<IIndex>(indexes)) { final IBasicTable t = index.getIndexedTable(); // Eliminating PRIMARY named index (name based, it seems that is // is the way MySql // makes the difference, a bit crappy) if ("PRIMARY".equals(index.getIndexName())) { //$NON-NLS-1$ indexes.remove(index); } else { indexedTables.add(t); final String indexName = CaptureHelper.getUniqueIndexName(index); indexMap.put(indexName, index); } } for (IBasicTable table : indexedTables) { final String tabName = table.getName(); // Getting Mysql specific information that we could not get // elsewhere try { rset = stmt.executeQuery("show index from `" + tabName + "`"); //$NON-NLS-1$ //$NON-NLS-2$ while (rset.next()) { final String prefixLength = rset.getString("Sub_part"); //$NON-NLS-1$ final String indexType = rset.getString("Index_type"); //$NON-NLS-1$ final String columnName = rset.getString("Column_name"); //$NON-NLS-1$ final String indexName = rset.getString("Key_name"); //$NON-NLS-1$ final String indexUniqueName = CaptureHelper.getUniqueObjectName(tabName, indexName); final IIndex i = indexMap.get(indexUniqueName); if (i instanceof IMySQLIndex) { final IMySQLIndex index = (IMySQLIndex) i; if (prefixLength != null) { final String indexedColName = CaptureHelper.getUniqueObjectName(tabName, columnName); final IBasicColumn c = (IBasicColumn) context.getCapturedObject( IElementType.getInstance(IBasicColumn.TYPE_ID), indexedColName); if (c != null && index != null) { index.setColumnPrefixLength(c.getReference(), Integer.valueOf(prefixLength)); } } if (!"BTREE".equals(indexType)) { //$NON-NLS-1$ if (index != null) { index.setIndexType(IndexType.valueOf(indexType)); } } } } } finally { CaptureHelper.safeClose(rset, null); } } } catch (SQLException e) { LOGGER.warn( MessageFormat.format(MySQLMessages.getString("capturer.mysql.fetchIndexesError"), //$NON-NLS-1$ e.getMessage()), e); } finally { CaptureHelper.safeClose(null, stmt); } monitor.worked(1); return indexes; }
From source file:de.vandermeer.skb.interfaces.transformers.textformat.Text_To_FormattedText.java
@Override default Collection<StrBuilder> transform(String s) { //check all settings for being valid, throw exceptions if not IsTransformer.super.transform(s); Validate.validState(/*from ww w. ja v a 2 s . c om*/ ArrayUtils.contains(new int[] { ALIGN_LEFT, ALIGN_RIGHT, ALIGN_CENTER, ALIGN_JUSTIFIED, ALIGN_JUSTIFIED_LEFT, ALIGN_JUSTIFIED_RIGHT }, this.getAlignment()), "unknown alignment <" + this.getAlignment() + ">"); Validate.validState(ArrayUtils.contains( new int[] { FORMAT_NONE, FORMAT_HANGING_PARAGRAPH, FORMAT_FIRST_LINE, FORMAT_FIRSTLINE_AND_HANGINGPARAGRAPH, FORMAT_DROPCAP, FORMAT_DROPCAP_WITH_PADDING }, this.getFormat()), "unknown format <" + this.getFormat() + ">"); Validate.validState(this.getTextWidth() > 0, "text width is less than 1, was <" + this.getTextWidth() + ">"); Validate.notNull(this.getInnerWsChar()); Validate.notNull(this.getLeftPaddingChar()); Validate.notNull(this.getRightPaddingChar()); Validate.notNull(this.getCollectionStrategy()); Validate.validState(this.getHangingIndentation() > 0, "hanging paragraph indentation was less than null, setting was <" + this.getHangingIndentation() + ">"); Validate.validState(this.getFirstlineIndentation() > 0, "first line indentation was less than null, setting was <" + this.getFirstlineIndentation() + ">"); Validate.validState(this.getCharsBetweenDroppcapAndText() > 0, "characters between dropped capital letter and text lines was less than 1, setting was <" + this.getCharsBetweenDroppcapAndText() + ">"); Validate.validState(this.getLinesAfterDropcap() > 0, "lines added after a dropped capital letter was less than 1, setting was <" + this.getLinesAfterDropcap() + ">"); Collection<StrBuilder> ret = this.getCollectionStrategy().get(); //if nothing is to be done return string with blanks if (StringUtils.isBlank(s)) { ret.add(new StrBuilder().appendPadding(this.getTextWidth(), ' ')); return ret; } if (this.getFormat() == FORMAT_DROPCAP || this.getFormat() == FORMAT_DROPCAP_WITH_PADDING) { Validate.notNull(this.getDropCap()); Validate.noNullElements(this.getDropCap()); int l = 0; for (String ds : this.getDropCap()) { if (l != 0) { Validate.validState(l == ds.length(), "dropped capital letter has some variations in length in the array, not alowed"); } l = ds.length(); } } //all validated, start the transformation //first remove all excessive whitespaces from the string String text = String_To_NoWs.convert(s); //create a string array from the input text according to the format settings Pair<ArrayList<String>, ArrayList<String>> pair = null; int topWidth = 0; int bottomWidth = 0; switch (this.getFormat()) { case FORMAT_NONE: topWidth = bottomWidth = this.getTextWidth(); pair = Text_To_WrappedFormat.convert(text, topWidth); Validate.isTrue(pair.getLeft().size() == 0); break; case FORMAT_HANGING_PARAGRAPH: topWidth = this.getTextWidth(); bottomWidth = this.getTextWidth() - this.getHangingIndentation(); pair = Text_To_WrappedFormat.convert(text, bottomWidth, Pair.of(1, topWidth)); Validate.isTrue(pair.getLeft().size() == 1); break; case FORMAT_FIRST_LINE: topWidth = this.getTextWidth() - this.getFirstlineIndentation(); bottomWidth = this.getTextWidth(); pair = Text_To_WrappedFormat.convert(text, bottomWidth, Pair.of(1, topWidth)); Validate.isTrue(pair.getLeft().size() == 1); break; case FORMAT_FIRSTLINE_AND_HANGINGPARAGRAPH: topWidth = this.getTextWidth() - this.getFirstlineIndentation(); bottomWidth = this.getTextWidth() - this.getHangingIndentation(); pair = Text_To_WrappedFormat.convert(text, bottomWidth, Pair.of(1, topWidth)); Validate.isTrue(pair.getLeft().size() == 1); break; case FORMAT_DROPCAP: topWidth = this.getTextWidth() - this.getDropCap()[0].length() - 1; bottomWidth = this.getTextWidth(); pair = Text_To_WrappedFormat.convert(text.substring(1), bottomWidth, Pair.of(this.getDropCap().length + this.getLinesAfterDropcap(), topWidth)); Validate.isTrue(pair.getLeft().size() == this.getDropCap().length + 1); break; case FORMAT_DROPCAP_WITH_PADDING: topWidth = this.getTextWidth() - this.getDropCap()[0].length() - this.getCharsBetweenDroppcapAndText(); bottomWidth = this.getTextWidth(); pair = Text_To_WrappedFormat.convert(text.substring(1), bottomWidth, Pair.of(this.getDropCap().length + this.getLinesAfterDropcap(), topWidth)); Validate.isTrue(pair.getLeft().size() == this.getDropCap().length + 1); } //we have a pair of wrapped lines (top and bottom), apply alignment Transformer<String, StrBuilder> topTr = null; Transformer<String, StrBuilder> bottomTr = null; switch (this.getAlignment()) { case ALIGN_LEFT: topTr = String_To_LeftPadded.create(topWidth, this.getRightPaddingChar(), this.getInnerWsChar(), null); bottomTr = String_To_LeftPadded.create(bottomWidth, this.getRightPaddingChar(), this.getInnerWsChar(), null); break; case ALIGN_RIGHT: topTr = String_To_RightPadded.create(topWidth, this.getLeftPaddingChar(), this.getInnerWsChar(), null); bottomTr = String_To_RightPadded.create(bottomWidth, this.getLeftPaddingChar(), this.getInnerWsChar(), null); break; case ALIGN_CENTER: topTr = String_To_Centered.create(topWidth, this.getLeftPaddingChar(), this.getRightPaddingChar(), this.getInnerWsChar(), null); bottomTr = String_To_Centered.create(bottomWidth, this.getLeftPaddingChar(), this.getRightPaddingChar(), this.getInnerWsChar(), null); break; case ALIGN_JUSTIFIED: case ALIGN_JUSTIFIED_LEFT: case ALIGN_JUSTIFIED_RIGHT: topTr = String_To_Justified.create(topWidth, this.getInnerWsChar(), null); bottomTr = String_To_Justified.create(bottomWidth, this.getInnerWsChar(), null); break; } Collection<StrBuilder> top = ClusterElementTransformer.create().transform(pair.getLeft(), topTr, this.getCollectionStrategy()); Collection<StrBuilder> bottom = ClusterElementTransformer.create().transform(pair.getRight(), bottomTr, this.getCollectionStrategy()); //adjust the last line of we had the special justified alignments if (bottom.size() > 0 && (this.getAlignment() == ALIGN_JUSTIFIED_LEFT || this.getAlignment() == ALIGN_JUSTIFIED_RIGHT)) { // remove last entry in the collection, we want to replace that one Object[] objAr = bottom.toArray(); Object line = objAr[objAr.length - 1]; bottom.remove(objAr[objAr.length - 1]); //get the string back to a normal string String lineString = line.toString().replaceAll(this.getInnerWsChar().toString(), " ").replaceAll("\\h+", " "); // now add a new one with the requested alignment if (this.getAlignment() == ALIGN_JUSTIFIED_LEFT) { bottom.add(String_To_LeftPadded.convert(lineString, bottomWidth, this.getRightPaddingChar())); } if (this.getAlignment() == ALIGN_JUSTIFIED_RIGHT) { bottom.add(String_To_RightPadded.convert(lineString, bottomWidth, this.getLeftPaddingChar())); } } //do the format post processing switch (this.getFormat()) { case FORMAT_NONE: ret.addAll(top); ret.addAll(bottom); break; case FORMAT_HANGING_PARAGRAPH: ret.addAll(top); for (StrBuilder b : bottom) { ret.add(new StrBuilder().appendPadding(this.getHangingIndentation(), this.getLeftPaddingChar()) .append(b)); } break; case FORMAT_FIRST_LINE: for (StrBuilder t : top) { ret.add(new StrBuilder().appendPadding(this.getFirstlineIndentation(), this.getLeftPaddingChar()) .append(t)); } ret.addAll(bottom); break; case FORMAT_FIRSTLINE_AND_HANGINGPARAGRAPH: for (StrBuilder t : top) { ret.add(new StrBuilder().appendPadding(this.getFirstlineIndentation(), this.getLeftPaddingChar()) .append(t)); } for (StrBuilder b : bottom) { ret.add(new StrBuilder().appendPadding(this.getHangingIndentation(), this.getLeftPaddingChar()) .append(b)); } break; case FORMAT_DROPCAP: int count = 0; for (StrBuilder t : top) { if (count < this.getDropCap().length) { ret.add(new StrBuilder().append(this.getDropCap()[count]).append(' ').append(t)); } else { ret.add(new StrBuilder().appendPadding(this.getDropCap()[0].length(), ' ').append(' ') .append(t)); } count++; } ret.addAll(bottom); break; case FORMAT_DROPCAP_WITH_PADDING: count = 0; for (StrBuilder t : top) { if (count < this.getDropCap().length) { ret.add(new StrBuilder().append(this.getDropCap()[count]) .appendPadding(this.getCharsBetweenDroppcapAndText(), ' ').append(t)); } else { ret.add(new StrBuilder().appendPadding(this.getDropCap()[0].length(), ' ') .appendPadding(this.getCharsBetweenDroppcapAndText(), ' ').append(t)); } count++; } ret.addAll(bottom); } return ret; }