List of usage examples for java.lang StringBuffer lastIndexOf
@Override public int lastIndexOf(String str)
From source file:fedora.server.access.dissemination.DisseminationService.java
/** * <p>// w w w . j a v a2 s .c o m * Removes any optional userInputParms which remain in the dissemination * URL. This occurs when a method has optional parameters and the user does * not supply a value for one or more of the optional parameters. The result * is a syntax similar to "parm=(PARM_BIND_KEY)". This method removes these * non-supplied optional parameters from the string. * </p> * * @param dissURL * String to be processed. * @return An edited string with parameters removed where no value was * specified for any optional parameters. */ private String stripParms(String dissURL) { // if no parameters, simply return passed in string. if (dissURL.indexOf("?") == -1) { return dissURL; } String requestURI = dissURL.substring(0, dissURL.indexOf("?") + 1); String parmString = dissURL.substring(dissURL.indexOf("?") + 1, dissURL.length()); String[] parms = parmString.split("&"); StringBuffer sb = new StringBuffer(); for (String element : parms) { int len = element.length() - 1; if (element.lastIndexOf(")") != len) { sb.append(element + "&"); } } int index = sb.lastIndexOf("&"); if (index != -1 && index + 1 == sb.length()) { sb.replace(index, sb.length(), ""); } return requestURI + sb.toString(); }
From source file:dk.netarkivet.harvester.datamodel.RunningJobsInfoDBDAO.java
/** * Stores a {@link StartedJobInfo} record to the persistent storage. * The record is stored in the monitor table, and if the elapsed time since * the last history sample is equal or superior to the history sample rate, * also to the history table.// w w w. jav a2s. co m * @param startedJobInfo the record to store. */ @Override public synchronized void store(StartedJobInfo startedJobInfo) { ArgumentNotValid.checkNotNull(startedJobInfo, "StartedJobInfo startedJobInfo"); Connection c = HarvestDBConnection.get(); try { PreparedStatement stm = null; // First is there a record in the monitor table? boolean update = false; try { stm = c.prepareStatement( "SELECT jobId FROM runningJobsMonitor" + " WHERE jobId=? AND harvestName=?"); stm.setLong(1, startedJobInfo.getJobId()); stm.setString(2, startedJobInfo.getHarvestName()); // One row expected, as per PK definition update = stm.executeQuery().next(); } catch (SQLException e) { String message = "SQL error checking running jobs monitor table" + "\n" + ExceptionUtils.getSQLExceptionCause(e); log.warn(message, e); throw new IOFailure(message, e); } try { // Update or insert latest progress information for this job c.setAutoCommit(false); StringBuffer sql = new StringBuffer(); if (update) { sql.append("UPDATE runningJobsMonitor SET "); StringBuffer columns = new StringBuffer(); for (HM_COLUMN setCol : HM_COLUMN.values()) { columns.append(setCol.name() + "=?, "); } sql.append(columns.substring(0, columns.lastIndexOf(","))); sql.append(" WHERE jobId=? AND harvestName=?"); } else { sql.append("INSERT INTO runningJobsMonitor ("); sql.append(HM_COLUMN.getColumnsInOrder()); sql.append(") VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); } stm = c.prepareStatement(sql.toString()); stm.setLong(HM_COLUMN.jobId.rank(), startedJobInfo.getJobId()); stm.setString(HM_COLUMN.harvestName.rank(), startedJobInfo.getHarvestName()); stm.setLong(HM_COLUMN.elapsedSeconds.rank(), startedJobInfo.getElapsedSeconds()); stm.setString(HM_COLUMN.hostUrl.rank(), startedJobInfo.getHostUrl()); stm.setDouble(HM_COLUMN.progress.rank(), startedJobInfo.getProgress()); stm.setLong(HM_COLUMN.queuedFilesCount.rank(), startedJobInfo.getQueuedFilesCount()); stm.setLong(HM_COLUMN.totalQueuesCount.rank(), startedJobInfo.getTotalQueuesCount()); stm.setLong(HM_COLUMN.activeQueuesCount.rank(), startedJobInfo.getActiveQueuesCount()); stm.setLong(HM_COLUMN.retiredQueuesCount.rank(), startedJobInfo.getRetiredQueuesCount()); stm.setLong(HM_COLUMN.exhaustedQueuesCount.rank(), startedJobInfo.getExhaustedQueuesCount()); stm.setLong(HM_COLUMN.alertsCount.rank(), startedJobInfo.getAlertsCount()); stm.setLong(HM_COLUMN.downloadedFilesCount.rank(), startedJobInfo.getDownloadedFilesCount()); stm.setLong(HM_COLUMN.currentProcessedKBPerSec.rank(), startedJobInfo.getCurrentProcessedKBPerSec()); stm.setLong(HM_COLUMN.processedKBPerSec.rank(), startedJobInfo.getProcessedKBPerSec()); stm.setDouble(HM_COLUMN.currentProcessedDocsPerSec.rank(), startedJobInfo.getCurrentProcessedDocsPerSec()); stm.setDouble(HM_COLUMN.processedDocsPerSec.rank(), startedJobInfo.getProcessedDocsPerSec()); stm.setInt(HM_COLUMN.activeToeCount.rank(), startedJobInfo.getActiveToeCount()); stm.setInt(HM_COLUMN.status.rank(), startedJobInfo.getStatus().ordinal()); stm.setTimestamp(HM_COLUMN.tstamp.rank(), new Timestamp(startedJobInfo.getTimestamp().getTime())); if (update) { stm.setLong(HM_COLUMN.values().length + 1, startedJobInfo.getJobId()); stm.setString(HM_COLUMN.values().length + 2, startedJobInfo.getHarvestName()); } stm.executeUpdate(); c.commit(); } catch (SQLException e) { String message = "SQL error storing started job info " + startedJobInfo + " in monitor table" + "\n" + ExceptionUtils.getSQLExceptionCause(e); log.warn(message, e); throw new IOFailure(message, e); } finally { DBUtils.closeStatementIfOpen(stm); DBUtils.rollbackIfNeeded(c, "store started job info", startedJobInfo); } // Should we store an history record? Long lastHistoryStore = lastSampleDateByJobId.get(startedJobInfo.getJobId()); long time = System.currentTimeMillis(); boolean shouldSample = lastHistoryStore == null || time >= lastHistoryStore + HISTORY_SAMPLE_RATE; if (!shouldSample) { return; // we're done } try { c.setAutoCommit(false); stm = c.prepareStatement("INSERT INTO runningJobsHistory (" + HM_COLUMN.getColumnsInOrder() + ") VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); stm.setLong(HM_COLUMN.jobId.rank(), startedJobInfo.getJobId()); stm.setString(HM_COLUMN.harvestName.rank(), startedJobInfo.getHarvestName()); stm.setLong(HM_COLUMN.elapsedSeconds.rank(), startedJobInfo.getElapsedSeconds()); stm.setString(HM_COLUMN.hostUrl.rank(), startedJobInfo.getHostUrl()); stm.setDouble(HM_COLUMN.progress.rank(), startedJobInfo.getProgress()); stm.setLong(HM_COLUMN.queuedFilesCount.rank(), startedJobInfo.getQueuedFilesCount()); stm.setLong(HM_COLUMN.totalQueuesCount.rank(), startedJobInfo.getTotalQueuesCount()); stm.setLong(HM_COLUMN.activeQueuesCount.rank(), startedJobInfo.getActiveQueuesCount()); stm.setLong(HM_COLUMN.retiredQueuesCount.rank(), startedJobInfo.getRetiredQueuesCount()); stm.setLong(HM_COLUMN.exhaustedQueuesCount.rank(), startedJobInfo.getExhaustedQueuesCount()); stm.setLong(HM_COLUMN.alertsCount.rank(), startedJobInfo.getAlertsCount()); stm.setLong(HM_COLUMN.downloadedFilesCount.rank(), startedJobInfo.getDownloadedFilesCount()); stm.setLong(HM_COLUMN.currentProcessedKBPerSec.rank(), startedJobInfo.getCurrentProcessedKBPerSec()); stm.setLong(HM_COLUMN.processedKBPerSec.rank(), startedJobInfo.getProcessedKBPerSec()); stm.setDouble(HM_COLUMN.currentProcessedDocsPerSec.rank(), startedJobInfo.getCurrentProcessedDocsPerSec()); stm.setDouble(HM_COLUMN.processedDocsPerSec.rank(), startedJobInfo.getProcessedDocsPerSec()); stm.setInt(HM_COLUMN.activeToeCount.rank(), startedJobInfo.getActiveToeCount()); stm.setInt(HM_COLUMN.status.rank(), startedJobInfo.getStatus().ordinal()); stm.setTimestamp(HM_COLUMN.tstamp.rank(), new Timestamp(startedJobInfo.getTimestamp().getTime())); stm.executeUpdate(); c.commit(); } catch (SQLException e) { String message = "SQL error storing started job info " + startedJobInfo + " in history table" + "\n" + ExceptionUtils.getSQLExceptionCause(e); log.warn(message, e); throw new IOFailure(message, e); } finally { DBUtils.closeStatementIfOpen(stm); DBUtils.rollbackIfNeeded(c, "store started job info", startedJobInfo); } // Remember last sampling date lastSampleDateByJobId.put(startedJobInfo.getJobId(), time); } finally { HarvestDBConnection.release(c); } }
From source file:org.versly.rest.wsdoc.AnnotationProcessor.java
String jsonSchemaFromTypeMirror(TypeMirror type) { String serializedSchema = null; if (type.getKind().isPrimitive() || type.getKind() == TypeKind.VOID) { return null; }// ww w. j a v a 2 s. co m // we need the dto class to generate schema using jackson json-schema module // note: Types.erasure() provides canonical names whereas Class.forName() wants a "regular" name, // so forName will fail for nested and inner classes as "regular" names use $ between parent and child. Class dtoClass = null; StringBuffer erasure = new StringBuffer(_typeUtils.erasure(type).toString()); for (boolean done = false; !done;) { try { dtoClass = Class.forName(erasure.toString()); done = true; } catch (ClassNotFoundException e) { if (erasure.lastIndexOf(".") != -1) { erasure.setCharAt(erasure.lastIndexOf("."), '$'); } else { done = true; } } } // if we were able to figure out the dto class, use jackson json-schema module to serialize it Exception e = null; if (dtoClass != null) { try { ObjectMapper m = new ObjectMapper(); m.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING); m.registerModule(new JodaModule()); SchemaFactoryWrapper visitor = new SchemaFactoryWrapper(); m.acceptJsonFormatVisitor(m.constructType(dtoClass), visitor); serializedSchema = m.writeValueAsString(visitor.finalSchema()); } catch (Exception ex) { e = ex; } } // report warning if we were not able to generate schema for non-primitive type if (serializedSchema == null) { this.processingEnv.getMessager().printMessage(Diagnostic.Kind.WARNING, "cannot generate json-schema for class " + type.toString() + " (erasure " + erasure + "), " + ((e != null) ? ("exception: " + e.getMessage()) : "class not found")); } return serializedSchema; }
From source file:org.squale.squalix.tools.compiling.java.parser.wsad.JWSADParser.java
/** * @param pClasspathFormat une chane dans le format "classpath" (i.e. spare par ";") * @param pPathToAdd le chemin ajouter//from w ww.jav a2 s. co m * @return pClasspathFormat laquelle on a ajout pPath */ private String getClasspathFormatAfterAdd(String pClasspathFormat, String pPathToAdd) { StringBuffer s = new StringBuffer(pClasspathFormat); s.append(pPathToAdd); /* si la chane ne se termine pas un ";" */ if (s.lastIndexOf(mConfiguration.getClasspathSeparator()) != (s.length() - 1)) { s.append(mConfiguration.getClasspathSeparator()); } return s.toString(); }
From source file:com.sap.research.connectivity.gw.GWOperationsUtils.java
public void addRemoteFieldInPersistenceMethods(JavaSourceFileEditor entityClassFile, Map.Entry<String[], String> fieldObj) { ArrayList<JavaSourceMethod> globalMethodList = entityClassFile.getGlobalMethodList(); String pluralRemoteEntity = GwUtils.getInflectorPlural(entityClassFile.CLASS_NAME, Locale.ENGLISH); String smallRemoteEntity = StringUtils.uncapitalize(entityClassFile.CLASS_NAME); for (JavaSourceMethod method : globalMethodList) { String methodName = method.getMethodName(); /*// www . j a v a2 s . c om * We insert the new field in the persist and merge methods */ if (methodName.endsWith("persist") || methodName.endsWith("merge")) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); methodBody.insert(methodBody.lastIndexOf(".execute()"), makeGWPersistFieldCode("", fieldObj)); method.setMethodBody(methodBody.toString()); } /* * We insert the new field in the findAll and find<Entity>Entries methods */ else if (methodName.endsWith("findAll" + pluralRemoteEntity) || methodName.endsWith("find" + entityClassFile.CLASS_NAME + "Entries")) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); methodBody.insert(methodBody.indexOf("virtual" + entityClassFile.CLASS_NAME + "List.add"), makeGWShowFieldCode("", smallRemoteEntity + "Instance", smallRemoteEntity + "Item", fieldObj)); method.setMethodBody(methodBody.toString()); } /* * We insert the new field in the find<Entity> method */ else if (methodName.endsWith("find" + entityClassFile.CLASS_NAME)) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); methodBody.insert(methodBody.indexOf("return "), makeGWShowFieldCode("", "virtual" + entityClassFile.CLASS_NAME, smallRemoteEntity, fieldObj)); method.setMethodBody(methodBody.toString()); } } }
From source file:org.jboss.tusk.smartdata.ejb.SearcherEJB.java
private String pageAndMakeJSON(List<String> results, int from, int to) { if (from < 1 || to < 1 || from > to) { //return all results if from or to is < 1, //or if from > to (ie it's an invalid range), //or if from is greater than the number of results LOG.info("Returning all results because range is either not given or is invalid."); return ispnService.makeJSONFromResultList(results, false); } else {//from ww w . j a v a2 s. c om long start = System.currentTimeMillis(); String completeJSON = ispnService.makeJSONFromResultList(results, false); //this is a hack to do paging, I know, but it's the best I can do on short notice StringBuffer buf = new StringBuffer(); String[] pieces = completeJSON.split("\\{\"key\""); int numItems = pieces.length - 1; //subtract 1 because the first item is empty //the range must not start too high; if so, just return everything if (from > numItems) { return completeJSON; } int effectiveFrom = Math.min(from, numItems); int effectiveTo = Math.min(to, numItems); LOG.info("Paging on " + numItems + " matches with effective from and to of " + effectiveFrom + "-" + effectiveTo + "."); //'to' and 'from' start at 1, so we need to subtract 1 from them to get the corresponding indexes... //but there is also an empty first element of pieces[], so we add 1 back to 'from' and 'to' //and 'from' is inclusive... for (int i = effectiveFrom; i <= effectiveTo; i++) { buf.append("{\"key\"").append(pieces[i]); } //remove trailing ", " if necessary if (buf.lastIndexOf(", ") == (buf.length() - 2)) { buf.deleteCharAt(buf.length() - 2); } LOG.info("Paging took " + (System.currentTimeMillis() - start) + " ms."); return buf.toString(); } }
From source file:com.sap.research.connectivity.gw.GWOperationsUtils.java
private void addRelationshipInPersistenceMethods(JavaSourceFileEditor entityClassFile, String nav, String javaType, String associationType) { // TODO Auto-generated method stub ArrayList<JavaSourceMethod> globalMethodList = entityClassFile.getGlobalMethodList(); String pluralRemoteEntity = GwUtils.getInflectorPlural(entityClassFile.CLASS_NAME, Locale.ENGLISH); String smallRemoteEntity = StringUtils.uncapitalize(entityClassFile.CLASS_NAME); for (JavaSourceMethod method : globalMethodList) { String methodName = method.getMethodName(); /*// w w w. j a v a 2s.c o m * We insert the relation in the persist and merge methods */ if (methodName.endsWith("persist")) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); methodBody.insert(methodBody.lastIndexOf("newEntity = newEntityRequest"), makeGWPersistRelationshipCode(nav, javaType, associationType, "\t\t")); method.setMethodBody(methodBody.toString()); } else if (methodName.endsWith("merge")) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); methodBody.insert(methodBody.lastIndexOf("boolean modifyRequest = modifyEntityRequest"), makeGWMergeRelationshipCode(nav, javaType, associationType, "\t\t")); method.setMethodBody(methodBody.toString()); } /* * We insert the relation in the findAll and find<Entity>Entries methods */ else if (methodName.endsWith("findAll" + pluralRemoteEntity) || methodName.endsWith("find" + entityClassFile.CLASS_NAME + "Entries")) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); int insertPosition = methodBody.indexOf("} catch (Exception relationshipsException)"); boolean isFirstManyToMany = true; if ("OneToMany ManyToMany".contains(associationType)) { String manyToManyInsertReferenceString = StringUtils.uncapitalize(entityClassFile.CLASS_NAME) + "Link.isCollection()) {"; if (methodBody.indexOf(manyToManyInsertReferenceString) > -1) { insertPosition = methodBody.indexOf(manyToManyInsertReferenceString); isFirstManyToMany = false; } } methodBody.insert(insertPosition, makeGWShowRelationshipCode(entityClassFile.CLASS_NAME, smallRemoteEntity + "Instance", smallRemoteEntity + "Item", ODATA_KEY, nav, javaType, associationType, "\t\t", isFirstManyToMany)); method.setMethodBody(methodBody.toString()); } /* * We insert the relation in the find<Entity> method */ else if (methodName.endsWith("find" + entityClassFile.CLASS_NAME)) { StringBuffer methodBody = new StringBuffer(method.getMethodBody()); int insertPosition = methodBody.indexOf("} catch (Exception relationshipsException)"); boolean isFirstManyToMany = true; if ("OneToMany ManyToMany".contains(associationType)) { String manyToManyInsertReferenceString = StringUtils.uncapitalize(entityClassFile.CLASS_NAME) + "Link.isCollection()) {"; if (methodBody.indexOf(manyToManyInsertReferenceString) > -1) { insertPosition = methodBody.indexOf(manyToManyInsertReferenceString); isFirstManyToMany = false; } } methodBody.insert(insertPosition, makeGWShowRelationshipCode(entityClassFile.CLASS_NAME, "virtual" + entityClassFile.CLASS_NAME, smallRemoteEntity, "OEntityKey.parse(" + GwUtils.GW_CONNECTION_FIELD_NAME + ".getDecodedRemoteKey(Id))", nav, javaType, associationType, "\t\t\t", isFirstManyToMany)); method.setMethodBody(methodBody.toString()); } } }
From source file:org.ebayopensource.turmeric.tools.library.utils.TypeLibraryUtilities.java
public static String getContentFromSunEpisodeForMasterEpisode(InputStream inputStream, boolean isFirstFile) throws IOException { Charset defaultCharset = Charset.defaultCharset(); InputStreamReader isr = null; BufferedReader reader = null; StringBuffer strBuff = new StringBuffer(); String lineStr = ""; boolean startOfContentReached = false; try {//from w ww.j av a 2s .c o m isr = new InputStreamReader(inputStream, defaultCharset); reader = new BufferedReader(isr); while ((lineStr = reader.readLine()) != null) { if (lineStr.trim().contains(TypeLibraryConstants.MASTER_EPISODE_TURMERIC_START_COMMNENT)) { startOfContentReached = true; if (!isFirstFile) { // if this sun-jaxb.episode file is not the first episode file then we will have to skip two more lines reader.readLine(); reader.readLine(); } break; } } if (startOfContentReached) { while ((lineStr = reader.readLine()) != null) { if (lineStr.trim().contains(TypeLibraryConstants.MASTER_EPISODE_TURMERIC_END_COMMNENT)) { int index = strBuff.lastIndexOf("</bindings>"); if (index > 0) strBuff = new StringBuffer(strBuff.substring(0, index)); break; } strBuff.append(lineStr + "\n"); } } } finally { CodeGenUtil.closeQuietly(reader); CodeGenUtil.closeQuietly(isr); } return strBuff.toString(); }
From source file:org.kuali.ole.sys.document.workflow.OLESearchableAttribute.java
@Override public List<DocumentAttribute> extractDocumentAttributes(ExtensionDefinition extensionDefinition, DocumentWithContent documentWithContent) { if (LOG.isDebugEnabled()) { LOG.debug("extractDocumentAttributes( " + extensionDefinition + ", " + documentWithContent + " )"); }/*from w w w . j a va 2 s . c om*/ List<DocumentAttribute> searchAttrValues = super.extractDocumentAttributes(extensionDefinition, documentWithContent); String docId = documentWithContent.getDocument().getDocumentId(); DocumentService docService = SpringContext.getBean(DocumentService.class); Document doc = null; try { doc = docService.getByDocumentHeaderIdSessionless(docId); } catch (WorkflowException we) { } if (doc != null) { if (doc instanceof AmountTotaling) { DocumentAttributeDecimal.Builder searchableAttributeValue = DocumentAttributeDecimal.Builder .create(OLEPropertyConstants.FINANCIAL_DOCUMENT_TOTAL_AMOUNT); searchableAttributeValue.setValue(((AmountTotaling) doc).getTotalDollarAmount().bigDecimalValue()); searchAttrValues.add(searchableAttributeValue.build()); } if (doc instanceof AccountingDocument) { AccountingDocument accountingDoc = (AccountingDocument) doc; searchAttrValues.addAll(harvestAccountingDocumentSearchableAttributes(accountingDoc)); } boolean indexedLedgerDoc = false; if (doc instanceof GeneralLedgerPostingDocument && !indexedLedgerDoc) { GeneralLedgerPostingDocument GLPostingDoc = (GeneralLedgerPostingDocument) doc; searchAttrValues.addAll(harvestGLPDocumentSearchableAttributes(GLPostingDoc)); } if (doc instanceof PurchasingAccountsPayableDocument) { PurchasingAccountsPayableDocument purchasingAccountsPayableDocument = (PurchasingAccountsPayableDocument) doc; searchAttrValues .addAll(harvestPurchasingAccountsPayableDocument(purchasingAccountsPayableDocument)); } if (doc instanceof OleLineItemReceivingDocument | doc instanceof OleCorrectionReceivingDocument) { ReceivingDocument receivingDocument = (ReceivingDocument) doc; searchAttrValues.addAll(harvestReceivingDocument(receivingDocument)); } if (doc instanceof OleInvoiceDocument) { StringBuffer purchaseOrderDocumentNums = new StringBuffer(); OleInvoiceDocument invoiceDocument = (OleInvoiceDocument) doc; for (Object purApItem : invoiceDocument.getItems()) { OleInvoiceItem invoiceItem = (OleInvoiceItem) purApItem; if (invoiceItem.getPurchaseOrderIdentifier() != null) { purchaseOrderDocumentNums.append(invoiceItem.getPurchaseOrderIdentifier().toString() + ","); } } int len = purchaseOrderDocumentNums.lastIndexOf(","); if (len > 0) { purchaseOrderDocumentNums.replace(len, len + 1, " "); } DocumentAttributeString.Builder poDocNumSearchableAttributeValue = DocumentAttributeString.Builder .create(OleSelectConstant.InvoiceSearch.PO_DOC_NUMS); poDocNumSearchableAttributeValue.setValue(purchaseOrderDocumentNums.toString()); searchAttrValues.add(poDocNumSearchableAttributeValue.build()); DateFormat sourceFormat = new SimpleDateFormat("dd-MM-yyyy"); String invoiceDate = sourceFormat.format(invoiceDocument.getInvoiceDate()); String invoicePayDate = sourceFormat.format(invoiceDocument.getInvoicePayDate()); DocumentAttributeString.Builder invDateSearchableAttributeValue = DocumentAttributeString.Builder .create(OleSelectConstant.InvoiceSearch.PO_DIS_INV_DT); invDateSearchableAttributeValue.setValue(invoiceDate); searchAttrValues.add(invDateSearchableAttributeValue.build()); DocumentAttributeString.Builder invPayDateSearchableAttributeValue = DocumentAttributeString.Builder .create(OleSelectConstant.InvoiceSearch.PO_DIS_INV_PAY_DT); invPayDateSearchableAttributeValue.setValue(invoicePayDate); searchAttrValues.add(invPayDateSearchableAttributeValue.build()); } } return searchAttrValues; }
From source file:com.wizecommerce.hecuba.HecubaClientManager.java
/** * Takes a list of comma delimited urls, and a list of comma delimited port numbers. * It will match the pairs of port to hostnames. If the counts don't match, then it will * backfill with the last valid port number. * @param locationURLs address of cassandra nodes * @param ports ports that cassandra nodes are listening on * @return locationURLs combined with ports (location1:port1,location2:port2,...) *//*from w ww.j a va 2 s . c o m*/ protected String getListOfNodesAndPorts(String locationURLs, String ports) { final String paramSeparator = ConfigUtils.getInstance().getConfiguration() .getString(HecubaConstants.GLOBAL_PROP_NAME_PREFIX + ".hecuba.path.separator", ":"); final String[] splittedPorts = ports.split(paramSeparator); final String[] splittedLocationURLs = locationURLs.split(paramSeparator); final StringBuffer listOfNodesAndPortsBuffer = new StringBuffer(); String port = ""; for (int index = 0; index < splittedLocationURLs.length; index++) { final String locationURL = splittedLocationURLs[index]; if (index < splittedPorts.length) { port = splittedPorts[index]; } if (StringUtils.isEmpty(port)) { port = "9160"; } listOfNodesAndPortsBuffer.append(locationURL).append(":").append(port).append(","); } return listOfNodesAndPortsBuffer.substring(0, listOfNodesAndPortsBuffer.lastIndexOf(",")); }