List of usage examples for java.io PrintWriter append
public PrintWriter append(char c)
From source file:org.kalypso.model.hydrology.internal.preprocessing.writer.NetFileWriter.java
private void appendNodeList(final Node[] nodes, final PrintWriter netBuffer) throws IOException, SensorException { // FIXME: theses nodes do not contain the branching nodes final Map<Node, ZuflussBean> nodeInfos = new LinkedHashMap<>(); /* First collect info and potentially add branching nodes */ for (final Node node : nodes) { final ZuflussBean zuflussBean = createZuflussBean(node); // FIXME: strange we map the source-node here... nodeInfos.put(node, zuflussBean); final Node branchingNode = zuflussBean.getBranchingNode(); if (branchingNode != null) { // FIXME: ... but check for the target node here; this can't be right, can it? // FIXME: comment is not correct /* Do not overwrite existing info */ // FIXME: in reality, we make sure a node that is the target of a branching, gets a default bean; but this happens anyways, so what? -> if the target itself has a branching, it might get // removed, if the nodes are in the correct order... if (!nodeInfos.containsKey(branchingNode)) nodeInfos.put(branchingNode, new ZuflussBean(0, 0, 0, 0, 0, null, StringUtils.EMPTY)); }/*from w ww . jav a 2 s .c o m*/ } /* Write thes infos to file */ final Set<Entry<Node, ZuflussBean>> entrySet = nodeInfos.entrySet(); for (final Entry<Node, ZuflussBean> entry : entrySet) { final Node node = entry.getKey(); final ZuflussBean zuflussBean = entry.getValue(); final int nodeID = m_idManager.getAsciiID(node); netBuffer.format("%5d", nodeID); //$NON-NLS-1$ netBuffer.format("%5d", zuflussBean.getIzug()); //$NON-NLS-1$ netBuffer.format("%5d", zuflussBean.getIabg()); //$NON-NLS-1$ netBuffer.format("%5d", zuflussBean.getIueb()); //$NON-NLS-1$ netBuffer.format("%5d", zuflussBean.getIzuf()); //$NON-NLS-1$ netBuffer.format("%5d\n", zuflussBean.getIvzwg()); //$NON-NLS-1$ //$NON-NLS-2$ netBuffer.append(zuflussBean.getArgument()); // FIXME: what additional nodes? /* TODO: we should also consider the additional nodes by QQ rleations; but as QQ rleations do not work..... */ } // ENDKNOTEN netBuffer.append(" 9001 0 0 0 0 0\n"); //$NON-NLS-1$ netBuffer.append("10000 0 0 0 0 0\n"); //$NON-NLS-1$ }
From source file:org.jbpm.designer.server.ResourceList.java
protected void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { //get transmitted parameter String value = req.getParameter("allocation"); PrintWriter pw = res.getWriter(); res.setContentType("text/json"); res.setCharacterEncoding("UTF-8"); try {/*from w w w .j av a2 s. c o m*/ JSONObject jsonObject = new JSONObject(); JSONArray dataElements = new JSONArray(); if (value.equals("direct")) { //create array with distinct names for direct allocation String[] name = new String[7]; name[0] = "Peter Fischer"; name[1] = "Axel Koch"; name[2] = "Nadja Richter"; name[3] = "Petra Larsen"; name[4] = "Lars Herrmann"; name[5] = "Vanessa Schmidt"; name[6] = "Petra Weber"; for (int i = 0; i < name.length; i++) { JSONObject directAllocation = new JSONObject(); directAllocation.put("resource", name[i]); dataElements.put(directAllocation); } } else if (value.equals("functional")) { //create array with distinct functional roles for role-based allocation String[] functionalRole = new String[4]; functionalRole[0] = "sales manager"; functionalRole[1] = "sales representative"; functionalRole[2] = "consultant"; functionalRole[3] = "legal expert"; for (int i = 0; i < functionalRole.length; i++) { JSONObject functionalRoleAllocation = new JSONObject(); functionalRoleAllocation.put("resource", functionalRole[i]); dataElements.put(functionalRoleAllocation); } } else if (value.equals("organisational")) { //create array with distinct organisational roles for organisational allocation String[] organisationalRole = new String[3]; organisationalRole[0] = "Business Contract Signing Group"; organisationalRole[1] = "Internal Credit Unit"; organisationalRole[2] = "Researcher"; for (int i = 0; i < organisationalRole.length; i++) { JSONObject organisationalRoleAllocation = new JSONObject(); organisationalRoleAllocation.put("resource", organisationalRole[i]); dataElements.put(organisationalRoleAllocation); } } //final return value - JSON object with the appropriate content jsonObject.put("resource", dataElements); pw.append(jsonObject.toString()); } catch (JSONException exception) { exception.printStackTrace(); } }
From source file:org.apache.jackrabbit.oak.plugins.segment.Segment.java
@Override public String toString() { StringWriter string = new StringWriter(); PrintWriter writer = new PrintWriter(string); int length = data.remaining(); writer.format("Segment %s (%d bytes)%n", id, length); String segmentInfo = getSegmentInfo(); if (segmentInfo != null) { writer.format("Info: %s%n", segmentInfo); }//from w w w . jav a2 s.c o m if (id.isDataSegmentId()) { writer.println("--------------------------------------------------------------------------"); int refcount = getRefCount(); for (int refid = 0; refid < refcount; refid++) { writer.format("reference %02x: %s%n", refid, getRefId(refid)); } int rootcount = data.getShort(ROOT_COUNT_OFFSET) & 0xffff; int pos = data.position() + refcount * 16; for (int rootid = 0; rootid < rootcount; rootid++) { writer.format("root %d: %s at %04x%n", rootid, RecordType.values()[data.get(pos + rootid * 3) & 0xff], data.getShort(pos + rootid * 3 + 1) & 0xffff); } int blobrefcount = data.getShort(BLOBREF_COUNT_OFFSET) & 0xffff; pos += rootcount * 3; for (int blobrefid = 0; blobrefid < blobrefcount; blobrefid++) { int offset = data.getShort(pos + blobrefid * 2) & 0xffff; SegmentBlob blob = new SegmentBlob(new RecordId(id, offset << RECORD_ALIGN_BITS)); writer.format("blobref %d: %s at %04x%n", blobrefid, blob.getBlobId(), offset); } } writer.println("--------------------------------------------------------------------------"); int pos = data.limit() - ((length + 15) & ~15); while (pos < data.limit()) { writer.format("%04x: ", (MAX_SEGMENT_SIZE - data.limit() + pos) >> RECORD_ALIGN_BITS); for (int i = 0; i < 16; i++) { if (i > 0 && i % 4 == 0) { writer.append(' '); } if (pos + i >= data.position()) { byte b = data.get(pos + i); writer.format("%02x ", b & 0xff); } else { writer.append(" "); } } writer.append(' '); for (int i = 0; i < 16; i++) { if (pos + i >= data.position()) { byte b = data.get(pos + i); if (b >= ' ' && b < 127) { writer.append((char) b); } else { writer.append('.'); } } else { writer.append(' '); } } writer.println(); pos += 16; } writer.println("--------------------------------------------------------------------------"); writer.close(); return string.toString(); }
From source file:com.github.jknack.handlebars.maven.PrecompilePlugin.java
@Override protected void doExecute() throws Exception { notNull(prefix, "The prefix parameter is required."); notNull(output, "The output parameter is required."); File basedir = new File(prefix); File output = new File(this.output); boolean error = true; PrintWriter writer = null; InputStream runtimeIS = null; try {//from ww w. j ava 2s .c o m String realPrefix = basedir.getPath(); Handlebars handlebars = new Handlebars(new FileTemplateLoader(basedir, suffix)); final List<CharSequence> extras = new ArrayList<CharSequence>(); @SuppressWarnings("unchecked") List<String> classpathElements = project.getRuntimeClasspathElements(); final URL[] classpath = new URL[classpathElements.size()]; for (int i = 0; i < classpath.length; i++) { classpath[i] = new File(classpathElements.get(i)).toURI().toURL(); } i18nJs(handlebars, extras, classpath); i18n(handlebars); /** * Silent any missing helper. */ silentHelpers(handlebars); File parent = output.getParentFile(); if (parent != null && !parent.exists()) { parent.mkdirs(); } writer = new PrintWriter(output, encoding); if (includeRuntime) { runtimeIS = getClass().getResourceAsStream("/handlebars.runtime.js"); IOUtil.copy(runtimeIS, writer); } List<File> files; if (templates != null && templates.size() > 0) { files = new ArrayList<File>(); for (String templateName : templates) { File file = FileUtils.getFile(basedir + File.separator + templateName + suffix); if (file.exists()) { files.add(file); } } } else { files = FileUtils.getFiles(basedir, "**/*" + suffix, null); } Collections.sort(files); getLog().info("Compiling templates..."); getLog().debug("Options:"); getLog().debug(" output: " + output); getLog().debug(" prefix: " + realPrefix); getLog().debug(" suffix: " + suffix); getLog().debug(" minimize: " + minimize); getLog().debug(" includeRuntime: " + includeRuntime); if (!amd) { writer.append("(function () {\n"); } Context nullContext = Context.newContext(null); for (File file : files) { String templateName = file.getPath().replace(realPrefix, "").replace(suffix, ""); if (templateName.startsWith(File.separator)) { templateName = templateName.substring(File.separator.length()); } templateName = templateName.replace(File.separator, "/"); getLog().debug("compiling: " + templateName); handlebars.compile(templateName).apply(nullContext); Template template = handlebars.compileInline("{{precompile \"" + templateName + "\"}}"); Map<String, Object> hash = new HashMap<String, Object>(); hash.put("wrapper", amd ? "amd" : "none"); Options opts = new Options.Builder(handlebars, PrecompileHelper.NAME, TagType.VAR, nullContext, template).setHash(hash).build(); writer.append("// Source: ").append(file.getPath()).append("\n"); writer.append(PrecompileHelper.INSTANCE.apply(templateName, opts)).append("\n\n"); } // extras for (CharSequence extra : extras) { writer.append(extra).append("\n"); } if (!amd) { writer.append("\n})();"); } writer.flush(); IOUtil.close(writer); if (minimize) { minimize(output); } if (files.size() > 0) { getLog().info(" templates were saved in: " + output); error = false; } else { getLog().warn(" no templates were found"); } } finally { IOUtil.close(runtimeIS); IOUtil.close(writer); if (error) { output.delete(); } } }
From source file:org.kalypso.model.wspm.pdb.ui.internal.gaf.GafWriter.java
private void writePoint(final GafKind kind, final Point point, final PrintWriter writer) { /* Get the data to write. */ final CrossSection crossSection = point.getCrossSectionPart().getCrossSection(); final double station = crossSection.getStation().doubleValue(); final String id = point.getName(); String y = "-1"; //$NON-NLS-1$ final BigDecimal width = point.getWidth(); if (width != null) y = String.format("%.4f", width.doubleValue()); //$NON-NLS-1$ String z = "-1"; //$NON-NLS-1$ final BigDecimal height = point.getHeight(); if (height != null) z = String.format("%.4f", height.doubleValue()); //$NON-NLS-1$ final Roughness roughness = point.getRoughness(); final Vegetation vegetation = point.getVegetation(); final int rk = Integer.parseInt(roughness.getId().getName()); final int bk = Integer.parseInt(vegetation.getId().getName()); /* geo location */ final com.vividsolutions.jts.geom.Point location = point.getLocation(); final Double hw = location == null ? null : location.getY(); final Double rw = location == null ? null : location.getX(); final String hwStr = hw == null ? StringUtils.EMPTY : String.format("%.4f", hw); //$NON-NLS-1$ final String rwStr = rw == null ? StringUtils.EMPTY : String.format("%.4f", rw); //$NON-NLS-1$ final String code = point.getCode(); final String hyk = StringUtils.isBlank(point.getHyk()) ? EMPTY_HYK_CODE : point.getHyk(); /* the gaf writer might produce multiple hyk codes, we need to write one line per single code */ final String[] hyks = StringUtils.split(hyk, IGafConstants.HYK_CODE_SEPARATOR); /* depending on the export mode, the code/hyk gets tweaked; first tweak than count lines, because tweak may reduce codes to normal lines */ final Pair<String, String>[] tweakedCodes = tweakHyks(kind, code, hyks); int count = 0; for (final Pair<String, String> tweakedPair : tweakedCodes) { final String tweakedCode = tweakedPair.getLeft(); final String tweakedHyk = tweakedPair.getRight(); final String line = String.format(Locale.PRC, GAF_LINE, station, id, y, z, tweakedCode, rk, bk, hwStr, rwStr, tweakedHyk);/*www .j a v a 2s . com*/ // REMARK: store first line in file, additional hyk lines are stored in separate file // IMPORTANT: order of lines is given by order of hyk codes made by the PPPartBuilder (PA,PE, LU,RU, LBOK,RBOK) if (count++ == 0) writer.append(line).println(); else m_additionalLines.add(line); } }
From source file:com.ephesoft.gxt.foldermanager.server.UploadDownloadFilesServlet.java
private void uploadFile(HttpServletRequest req, HttpServletResponse resp, String currentBatchUploadFolderName) throws IOException { PrintWriter printWriter = resp.getWriter(); File tempFile = null;/*from w w w .j av a 2 s.c o m*/ InputStream instream = null; OutputStream out = null; String uploadFileName = ""; try { if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); uploadFileName = ""; String uploadFilePath = ""; List<FileItem> items; try { items = upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField()) { uploadFileName = item.getName(); if (uploadFileName != null) { uploadFileName = uploadFileName .substring(uploadFileName.lastIndexOf(File.separator) + 1); } uploadFilePath = currentBatchUploadFolderName + File.separator + uploadFileName; try { instream = item.getInputStream(); tempFile = new File(uploadFilePath); out = new FileOutputStream(tempFile); byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { out.write(buf, 0, len); } } catch (FileNotFoundException e) { printWriter.write("Unable to create the upload folder.Please try again."); } catch (IOException e) { printWriter.write("Unable to read the file.Please try again."); } finally { if (out != null) { out.close(); } if (instream != null) { instream.close(); } } } } } catch (FileUploadException e) { printWriter.write("Unable to read the form contents.Please try again."); } } else { printWriter.write("Request contents type is not supported."); } printWriter.write("currentBatchUploadFolderName:" + currentBatchUploadFolderName); printWriter.append("|"); printWriter.append("fileName:").append(uploadFileName); printWriter.append("|"); } finally { printWriter.flush(); printWriter.close(); } }
From source file:com.doculibre.constellio.feedprotocol.FeedServlet.java
@Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { LOG.fine("FeedServlet: doPost(...)"); // Check that we have a file upload request boolean isMultipart = ServletFileUpload.isMultipartContent(request); PrintWriter out = null; try {// www. j av a2 s. c om out = response.getWriter(); if (isMultipart) { ServletFileUpload upload = new ServletFileUpload(); String datasource = null; String feedtype = null; FileItemIterator iter = upload.getItemIterator(request); while (iter.hasNext()) { FileItemStream item = iter.next(); //Disabled to allow easier update from HTML forms //if (item.isFormField()) { if (item.getFieldName().equals(FeedParser.XML_DATASOURCE)) { InputStream itemStream = null; try { itemStream = item.openStream(); datasource = IOUtils.toString(itemStream); } finally { IOUtils.closeQuietly(itemStream); } } else if (item.getFieldName().equals(FeedParser.XML_FEEDTYPE)) { InputStream itemStream = null; try { itemStream = item.openStream(); feedtype = IOUtils.toString(itemStream); } finally { IOUtils.closeQuietly(itemStream); } } else if (item.getFieldName().equals(FeedParser.XML_DATA)) { try { if (StringUtils.isBlank(datasource)) { throw new IllegalArgumentException("Datasource is blank"); } if (StringUtils.isBlank(feedtype)) { throw new IllegalArgumentException("Feedtype is blank"); } InputStream contentStream = null; try { contentStream = item.openStream(); final Feed feed = new FeedStaxParser().parse(datasource, feedtype, contentStream); Callable<Object> processFeedTask = new Callable<Object>() { @Override public Object call() throws Exception { FeedProcessor feedProcessor = new FeedProcessor(feed); feedProcessor.processFeed(); return null; } }; threadPoolExecutor.submit(processFeedTask); out.append(GsaFeedConnection.SUCCESS_RESPONSE); return; } catch (Exception e) { LOG.log(Level.SEVERE, "Exception while processing contentStream", e); } finally { IOUtils.closeQuietly(contentStream); } } finally { IOUtils.closeQuietly(out); } } //} } } } catch (Throwable e) { LOG.log(Level.SEVERE, "Exception while uploading", e); } finally { IOUtils.closeQuietly(out); } out.append(GsaFeedConnection.INTERNAL_ERROR_RESPONSE); }
From source file:org.openmrs.web.servlet.QuickReportServlet.java
private void doAttendedClinic(VelocityContext velocityContext, PrintWriter report, HttpServletRequest request) throws ServletException { EncounterService es = Context.getEncounterService(); LocationService ls = Context.getLocationService(); DateFormat dateFormat = Context.getDateFormat(); velocityContext.put("date", dateFormat); Calendar cal = Calendar.getInstance(); Date start;/*from w w w . j a v a 2 s .c o m*/ Date end; String startDate = request.getParameter("startDate"); String endDate = request.getParameter("endDate"); String location = request.getParameter("location"); if (startDate != null && startDate.length() != 0) { try { cal.setTime(dateFormat.parse(startDate)); } catch (ParseException e) { throw new ServletException("Error parsing 'Start Date'", e); } } else { cal.setTime(new Date()); } // if they don't input an end date, assume they meant "this week" if (endDate == null || "".equals(endDate)) { while (cal.get(Calendar.DAY_OF_WEEK) != Calendar.SUNDAY) { cal.add(Calendar.DAY_OF_MONTH, -1); } start = cal.getTime(); cal.add(Calendar.DAY_OF_MONTH, 7); end = cal.getTime(); } else { // they put in an end date, assume literal start and end start = cal.getTime(); try { cal.setTime(dateFormat.parse(endDate)); } catch (ParseException e) { throw new ServletException("Error parsing 'End Date'", e); } end = cal.getTime(); } Collection<Encounter> encounters = null; if (location == null || "".equals(location)) { EncounterSearchCriteria encounterSearchCriteria = new EncounterSearchCriteriaBuilder() .setIncludeVoided(true).setFromDate(start).setToDate(end).createEncounterSearchCriteria(); encounters = es.getEncounters(encounterSearchCriteria); } else { Location locationObj = ls.getLocation(Integer.valueOf(location)); EncounterSearchCriteria encounterSearchCriteria = new EncounterSearchCriteriaBuilder() .setIncludeVoided(true).setLocation(locationObj).setFromDate(start).setToDate(end) .createEncounterSearchCriteria(); encounters = es.getEncounters(encounterSearchCriteria); } if (encounters != null) { velocityContext.put("encounters", encounters); } else { report.append("No Encounters found"); } }
From source file:org.ramadda.repository.database.DatabaseManager.java
/** * Actually write the tables//ww w . j a va 2s . com * * @param pw What to write to * @param packageName Tables.java package name * @param what _more_ * * @throws Exception on badness */ private void writeTables(PrintWriter pw, String packageName, String[] what) throws Exception { String sp1 = " "; String sp2 = sp1 + sp1; String sp3 = sp1 + sp1 + sp1; pw.append( "/**Generated by running: java org.unavco.projects.gsac.repository.UnavcoGsacDatabaseManager**/\n\n"); pw.append("package " + packageName + ";\n\n"); pw.append("import org.ramadda.sql.SqlUtil;\n\n"); pw.append("//J-\n"); pw.append("public abstract class Tables {\n"); pw.append(sp1 + "public abstract String getName();\n"); pw.append(sp1 + "public abstract String getColumns();\n"); Connection connection = getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); ResultSet catalogs = dbmd.getCatalogs(); ResultSet tables = dbmd.getTables(null, null, null, what); HashSet seenTables = new HashSet(); while (tables.next()) { String tableName = tables.getString("TABLE_NAME"); // System.err.println ("NAME:" + tableName); String TABLENAME = tableName.toUpperCase(); if (seenTables.contains(TABLENAME)) { continue; } seenTables.add(TABLENAME); String tableType = tables.getString("TABLE_TYPE"); if (Misc.equals(tableType, "INDEX")) { continue; } if (tableName.indexOf("$") >= 0) { continue; } if (tableType == null) { continue; } if ((tableType != null) && tableType.startsWith("SYSTEM")) { continue; } ResultSet columns = dbmd.getColumns(null, null, tableName, null); List colNames = new ArrayList(); pw.append("\n\n"); pw.append(sp1 + "public static class " + TABLENAME + " extends Tables {\n"); pw.append(sp2 + "public static final String NAME = \"" + tableName.toLowerCase() + "\";\n"); pw.append("\n"); pw.append(sp2 + "public String getName() {return NAME;}\n"); pw.append(sp2 + "public String getColumns() {return COLUMNS;}\n"); System.out.println("processing table:" + TABLENAME); String tableVar = null; List colVars = new ArrayList(); HashSet seen = new HashSet(); while (columns.next()) { String colName = columns.getString("COLUMN_NAME").toLowerCase(); String colSize = columns.getString("COLUMN_SIZE"); String COLNAME = colName.toUpperCase(); if (seen.contains(COLNAME)) { continue; } seen.add(COLNAME); COLNAME = COLNAME.replace("#", ""); colNames.add("COL_" + COLNAME); pw.append(sp2 + "public static final String COL_" + COLNAME + " = NAME + \"." + colName + "\";\n"); pw.append(sp2 + "public static final String COL_NODOT_" + COLNAME + " = \"" + colName + "\";\n"); /* pw.append(sp2 + "public static final String ORA_" + COLNAME + " = \"" + colName + "\";\n"); */ } pw.append("\n"); pw.append(sp2 + "public static final String[] ARRAY = new String[] {\n"); pw.append(sp3 + StringUtil.join(",", colNames)); pw.append("\n"); pw.append(sp2 + "};\n"); pw.append(sp2 + "public static final String COLUMNS = SqlUtil.comma(ARRAY);\n"); pw.append(sp2 + "public static final String NODOT_COLUMNS = SqlUtil.commaNoDot(ARRAY);\n"); pw.append(sp2 + "public static final String INSERT =" + "SqlUtil.makeInsert(NAME, NODOT_COLUMNS," + "SqlUtil.getQuestionMarks(ARRAY.length));\n"); pw.append(sp1 + "public static final " + TABLENAME + " table = new " + TABLENAME + "();\n"); pw.append(sp1 + "}\n\n"); } pw.append("\n\n}\n"); }
From source file:com.francelabs.datafari.servlets.GetFieldsInfo.java
/** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse * response)//from w w w .j a v a 2s .c om */ @Override protected void doGet(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException { request.setCharacterEncoding("utf8"); response.setContentType("application/json"); final PrintWriter out = response.getWriter(); try { // Define the Solr hostname, port and protocol final String solrserver = "localhost"; final String solrport = "8983"; final String protocol = "http"; // Use Solr Schema REST API to get the list of fields final HttpClient httpClient = HttpClientBuilder.create().build(); final HttpHost httpHost = new HttpHost(solrserver, Integer.parseInt(solrport), protocol); final HttpGet httpGet = new HttpGet("/solr/FileShare/schema/fields"); final HttpResponse httpResponse = httpClient.execute(httpHost, httpGet); // Construct the jsonResponse final JSONObject jsonResponse = new JSONObject(); if (httpResponse.getStatusLine().getStatusCode() == 200) { // Status of the API response is OK final JSONObject json = new JSONObject(EntityUtils.toString(httpResponse.getEntity())); final JSONArray fieldsJson = json.getJSONArray("fields"); // Load the list of denied fields final String strDeniedFieldsList = AdvancedSearchConfiguration.getInstance() .getProperty(AdvancedSearchConfiguration.DENIED_FIELD_LIST); final Set<String> deniedFieldsSet = new HashSet<>(Arrays.asList(strDeniedFieldsList.split(","))); for (int i = 0; i < fieldsJson.length(); i++) { final JSONObject field = (JSONObject) fieldsJson.get(i); // If a fieldname has been provided, it means that this // servlet // only // needs to return infos on this specific field if (request.getParameter("fieldName") != null) { final String fieldName = request.getParameter("fieldName"); if (field.getString("name").equals(fieldName)) { jsonResponse.append("field", field); break; } } else { if (!deniedFieldsSet.contains(field.getString("name")) && (!field.has("indexed") || field.getBoolean("indexed")) && !field.getString("name").startsWith("allow_") && !field.getString("name").startsWith("deny_") && !field.getString("name").startsWith("_")) { jsonResponse.append("field", field); } } } out.print(jsonResponse); } else { // Status of the API response is an error logger.error("Error while retrieving the fields from the Schema API of Solr: " + httpResponse.getStatusLine().toString()); out.append( "Error while retrieving the fields from the Schema API of Solr, please retry, if the problem persists contact your system administrator. Error Code : 69026"); } out.close(); } catch (final IOException e) { logger.error("Error while retrieving the fields from the Schema API of Solr", e); out.append( "Error while retrieving the fields from the Schema API of Solr, please retry, if the problem persists contact your system administrator. Error Code : 69026"); out.close(); } }