List of usage examples for java.io PrintWriter write
public void write(String s)
From source file:csg.files.CSGFiles.java
public void saveCourseData(AppDataComponent courseData, String filePath) throws IOException { CourseData courseDataManager = (CourseData) courseData; JsonArrayBuilder courseArrayBuilder = Json.createArrayBuilder(); JsonArray courseArray = courseArrayBuilder.build(); CSGWorkspace workspace = (CSGWorkspace) app.getWorkspaceComponent(); JsonObject courseJson = Json.createObjectBuilder().add(JSON_SUBJECT, courseDataManager.getSubject()) .add(JSON_NUMBER, courseDataManager.getNumber()).add(JSON_SEMESTER, courseDataManager.getSemester()) .add(JSON_YEAR, courseDataManager.getYear()).add(JSON_TITLE, courseDataManager.getTitle()) .add(JSON_INSTRUCTORNAME, courseDataManager.getInsName()) .add(JSON_INSTRUCTORHOME, courseDataManager.getInsHome()) .add(JSON_BANNER, courseDataManager.getBannerLink()) .add(JSON_LEFTFOOTER, courseDataManager.getLeftFooterLink()) .add(JSON_RIGHTFOOTER, courseDataManager.getRightFooterLink()) .add(JSON_STYLESHEET, courseDataManager.getStyleSheet()).build(); ObservableList<CourseTemplate> templates = courseDataManager.getTemplates(); for (CourseTemplate template : templates) { JsonObject cJson = Json.createObjectBuilder().add(JSON_USE, template.isUse().getValue()) .add(JSON_NAVBAR, template.getNavbarTitle()).add(JSON_FILENAME, template.getFileName()) .add(JSON_SCRIPT, template.getScript()).build(); courseArrayBuilder.add(cJson);// ww w .j a v a 2 s .c o m } courseArray = courseArrayBuilder.build(); JsonObject dataManagerJSO = Json.createObjectBuilder().add(JSON_COURSE, courseJson) .add(JSON_COURSETEMPLATE, courseArray).build(); // AND NOW OUTPUT IT TO A JSON FILE WITH PRETTY PRINTING Map<String, Object> properties = new HashMap<>(1); properties.put(JsonGenerator.PRETTY_PRINTING, true); JsonWriterFactory writerFactory = Json.createWriterFactory(properties); StringWriter sw = new StringWriter(); JsonWriter jsonWriter = writerFactory.createWriter(sw); jsonWriter.writeObject(dataManagerJSO); jsonWriter.close(); // INIT THE WRITER OutputStream os = new FileOutputStream(filePath); JsonWriter jsonFileWriter = Json.createWriter(os); jsonFileWriter.writeObject(dataManagerJSO); String prettyPrinted = sw.toString(); PrintWriter pw = new PrintWriter(filePath); pw.write(prettyPrinted); pw.close(); }
From source file:csg.files.CSGFiles.java
public void saveProjectsData(AppDataComponent courseData, AppDataComponent projectData, String filePath) throws IOException { CourseData courseDataManager = (CourseData) courseData; ProjectData projectDataManager = (ProjectData) projectData; ObservableList<Student> students = projectDataManager.getStudents(); JsonArrayBuilder studentArrayBuilder = Json.createArrayBuilder(); JsonArrayBuilder teamArrayBuilder = Json.createArrayBuilder(); ObservableList<Team> teams = projectDataManager.getTeams(); for (Team team : teams) { for (Student student : students) { if (student.getTeam().equals(team.getName())) { studentArrayBuilder.add(student.getFirstName() + " " + student.getLastName()); }// w w w . ja v a 2s .c om } JsonArray studentArray = studentArrayBuilder.build(); JsonObject teamsJson = Json.createObjectBuilder().add(JSON_NAME, team.getName()) .add(JSON_STUDENTS, studentArray).add(JSON_LINK, team.getLink()).build(); teamArrayBuilder.add(teamsJson); } JsonArray teamArray = teamArrayBuilder.build(); CSGWorkspace workspace = (CSGWorkspace) app.getWorkspaceComponent(); JsonArrayBuilder courseJsonBuilder = Json.createArrayBuilder(); JsonObject coursesJson = Json.createObjectBuilder() .add(JSON_SEMESTER, courseDataManager.getSemester() + " " + courseDataManager.getYear()) .add(JSON_PROJECTS, teamArray).build(); courseJsonBuilder.add(coursesJson); JsonArray courseJsonArr = courseJsonBuilder.build(); JsonObject dataManagerJSO = Json.createObjectBuilder().add(JSON_WORK, courseJsonArr).build(); // AND NOW OUTPUT IT TO A JSON FILE WITH PRETTY PRINTING Map<String, Object> properties = new HashMap<>(1); properties.put(JsonGenerator.PRETTY_PRINTING, true); JsonWriterFactory writerFactory = Json.createWriterFactory(properties); StringWriter sw = new StringWriter(); JsonWriter jsonWriter = writerFactory.createWriter(sw); jsonWriter.writeObject(dataManagerJSO); jsonWriter.close(); // INIT THE WRITER OutputStream os = new FileOutputStream(filePath); JsonWriter jsonFileWriter = Json.createWriter(os); jsonFileWriter.writeObject(dataManagerJSO); String prettyPrinted = sw.toString(); PrintWriter pw = new PrintWriter(filePath); pw.write(prettyPrinted); pw.close(); }
From source file:com.globalsight.dispatcher.controller.MTProfilesController.java
@RequestMapping(value = "/testHost", method = RequestMethod.POST) public void testHost(HttpServletRequest p_request, HttpServletResponse p_response) { MachineTranslationProfile mtProfile = getMTProfile(p_request); try {// w ww . ja v a 2 s. co m PrintWriter writer = p_response.getWriter(); if (MTP_DAO.isMtProfileNameExisted(mtProfile) || StringUtils.isEmpty(mtProfile.getMtProfileName())) { JSONObject jso = new JSONObject(); jso.put("ExceptionInfo", "Well:The Name has been in used!"); writer.write(jso.toString()); } // if promt and ao will test return false in case not save // just for session else if (testMTCommonOptions(mtProfile, writer)) { MTP_DAO.saveMTProfile(mtProfile); JSONObject jso = new JSONObject(); jso.put("Info", "saved"); writer.write(jso.toString()); writer.close(); } writer.close(); } catch (JSONException e) { } catch (IOException e) { // p_response.getWriter() Error } catch (JAXBException e) { // MTP_DAO.saveMTProfile Error } }
From source file:com.vcredit.lrh.microservice.gateway.api.redis.SecurityHandlerRedis.java
public void process(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException { HttpServletResponse httpServletResponse = (HttpServletResponse) response; HttpServletRequest servletRequest = (HttpServletRequest) request; String token = ""; String clientVersion = ""; String deviceType = ""; //header??/*from ww w . j av a 2 s .c o m*/ String base64Str = servletRequest.getHeader("clientHeader"); if (!StringUtils.isEmpty(base64Str)) { String headerJsonStr = Base64Utils.getFromBase64(base64Str); JSONObject headerJson = JSONObject.parseObject(headerJsonStr); token = headerJson.getString("accessToken"); try { clientVersion = headerJson.getString("apiClientVersion"); deviceType = headerJson.getString("deviceType"); //add by xuhui 20170406 if (!StringUtils.isEmpty(clientVersion)) { if ("iOS".equals(deviceType) && iosAppVertion.compareTo(clientVersion) > 0) { JSONObject jSONObject = new JSONObject(); PrintWriter pw = httpServletResponse.getWriter(); jSONObject.put("type", LrhConstants.ErrorCodeTypeEnum.FORCEUPDATE.getCode()); jSONObject.put("success", true); jSONObject.put("code", 201); Map<String, Object> map = new HashMap(); //? map.put("updateUrl", ""); map.put("updateInfo", ""); map.put("updateTargetVersion", iosAppVertion); map.put("forceUpdate", true); map.put("appType", "iOS"); jSONObject.put("data", map); pw.write(jSONObject.toJSONString()); pw.flush(); } if ("android".equals(deviceType) && andriodAppVersion.compareTo(clientVersion) > 0) { JSONObject jSONObject = new JSONObject(); PrintWriter pw = httpServletResponse.getWriter(); jSONObject.put("type", LrhConstants.ErrorCodeTypeEnum.FORCEUPDATE.getCode()); jSONObject.put("success", true); jSONObject.put("code", 201); Map<String, Object> map = new HashMap(); //? map.put("updateUrl", "http://10.154.40.42:7777/lrh_apk_android_20_v0.0.1/vcredit_lrh_debug_v0.0.2_2017_0421_1041_Vcredit_TecentQQ.apk"); map.put("updateInfo", ""); map.put("updateTargetVersion", andriodAppVersion); map.put("forceUpdate", true); map.put("appType", "android"); jSONObject.put("data", map); pw.write(jSONObject.toJSONString()); pw.flush(); } } } catch (Exception e) { } } // String token = request.getParameter("accessToken") == null ? accessTokenFromHeader : request.getParameter("accessToken"); if (StringUtils.isEmpty(token)) { token = servletRequest.getSession().getId().toUpperCase(); } String openId = request.getParameter("open_id"); httpServletResponse.setHeader("Content-Type", "application/json"); try { if (servletRequest.getServletPath().equals(securityProperties.getLoginSecurityUrl())) { chain.doFilter(request, response); } else if (servletRequest.getServletPath().equals("/favicon.ico")) { chain.doFilter(request, response); // PrintWriter pw = httpServletResponse.getWriter(); // pw.write("favicon.ico"); // pw.flush(); } else if (openId == null && null == token) { unauthorizedRequest(httpServletResponse); } else if (needAuthentication(servletRequest.getServletPath())) { // JSONObject currentUser = securityService.getUserByAccessToken(token); JSONObject currentUser = redisTemplate.get(RedisCacheKeys.ACCOUNT_CACHE_TOKEN + token, JSONObject.class); if (null == currentUser) { unauthorizedRequest(httpServletResponse); } else { chain.doFilter(request, response); } } else { chain.doFilter(request, response); } } catch (ServletException ex) { logger.error(ex.getMessage(), ex); serverErrorRequest(httpServletResponse); } }
From source file:com.ankang.report.register.impl.MonitorRegister.java
private void loadFile() { File file = new File(getFilePath()); if (!file.exists()) { BufferedReader br = null; PrintWriter pw = null; try {/*from w w w. jav a2 s. c o m*/ file.getParentFile().mkdir(); file.createNewFile(); URL url = this.getClass().getClassLoader().getResource("/"); String path = url.toString().split("file:/")[1]; br = new BufferedReader(new InputStreamReader( new FileInputStream(new File(path + this.FILE_PATH_SOURCE)), "UTF-8")); pw = new PrintWriter(new FileWriter(file, true)); String line = null; while ((line = br.readLine()) != null) { pw.write(line); pw.println(); pw.flush(); } } catch (IOException e) { logger.error("report.cc create fail", e); throw new ReportException("report.cc create fail [%s]", file.getPath(), e); } finally { if (pw != null) { pw.close(); } if (br != null) { try { br.close(); } catch (IOException e) { logger.error("IO flow off exception", e); } } } } this.monitorFile = file; }
From source file:com.ephesoft.gxt.admin.server.ImportIndexFieldUploadServlet.java
/** * This API is used to process uploaded file and unzip file in export-batch-folder . * /*from w w w .ja va2 s . c o m*/ * @param upload {@link ServletFileUpload} uploaded file. * @param req {@link HttpServletRequest}. * @param printWriter {@link PrintWriter}. * @param parentDirPath {@link String} to create absolute unzip directory path. * @return {@link File} temporary file after unzip. */ private String processUploadedFile(final ServletFileUpload upload, final HttpServletRequest req, final PrintWriter printWriter, final String parentDirPath) { String tempUnZipDir = ""; String zipFileName = ""; String zipPathname = ""; File tempZipFile = null; List<FileItem> items; try { items = upload.parseRequest(req); for (final FileItem item : items) { if (!item.isFormField()) {// && "importFile".equals(item.getFieldName())) { zipPathname = getZipPath(item, parentDirPath); zipFileName = getZipFileName(item); tempZipFile = copyItemContentInFile(item, zipPathname, printWriter); } } } catch (final FileUploadException e) { printWriter.write("Unable to read the form contents.Please try again."); } tempUnZipDir = parentDirPath + File.separator + zipFileName.substring(0, zipFileName.lastIndexOf('.')) + System.nanoTime(); try { FileUtils.unzip(tempZipFile, tempUnZipDir); if (!FileUtils.isDirectoryHasAllValidExtensionFiles(tempUnZipDir, SERIALIZATION_EXT)) { FileUtils.deleteQuietly(tempZipFile); FileUtils.deleteDirectoryAndContentsRecursive(new File(tempUnZipDir), true); tempUnZipDir = ""; log.error("Invalid zip file."); //throw new Exception(); } } catch (final Exception e) { printWriter.write("Unable to unzip the file.Please try again."); tempZipFile.delete(); } return tempUnZipDir; }
From source file:com.ctb.prism.report.api.RequirejsConfigServlet.java
/** * */// w w w.ja v a 2 s . c om public void service(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { response.setContentType("text/javascript; charset=UTF-8"); setNoExpire(response); PrintWriter out = response.getWriter(); WebUtil webUtil = WebUtil.getInstance(getJasperReportsContext()); List<RequirejsModuleMapping> requirejsMappings = getJasperReportsContext() .getExtensions(RequirejsModuleMapping.class); Map<String, String> modulePaths = new LinkedHashMap<String, String>(); for (RequirejsModuleMapping requirejsMapping : requirejsMappings) { String modulePath = requirejsMapping.getPath(); if (requirejsMapping.isClasspathResource()) { modulePath = request.getContextPath() + webUtil.getResourcesBasePath() + modulePath; } modulePaths.put(requirejsMapping.getName(), modulePath); } Map<String, Object> contextMap = new HashMap<String, Object>(); contextMap.put("contextPath", request.getContextPath()); contextMap.put("modulePaths", modulePaths.entrySet()); out.write(VelocityUtil.processTemplate(REQUIREJS_CONFIG_TEMPLATE, contextMap)); }
From source file:org.ala.spatial.web.services.GDMWSController.java
private void generateMetadata(String[] layers, String area, String pid, String outputdir) { try {/*from w w w. ja v a 2 s. com*/ LayerDAO layerDao = Client.getLayerDao(); int i = 0; System.out.println("Generating metadata..."); StringBuilder sbMetadata = new StringBuilder(); sbMetadata.append( "<!doctype html><head><meta charset='utf-8'><title>Genralized Dissimilarity Model</title><meta name='description' content='ALA GDM Metadata'>"); sbMetadata.append( "<style type='text/css'>body{font-family:Verdana,'Lucida Sans';font-size:small;}div#core{display:block;clear:both;margin-bottom:20px;}section{width:95%;margin:0 15px;border-bottom:1px solid #000;}.clearfix:after{content:'.';display:block;clear:both;visibility:hidden;line-height:0;height:0;}.clearfix{display:inline-block;}html[xmlns] .clearfix{display:block;}* html .clearfix{height:1%;}</style>"); sbMetadata.append( "</head><body><div id=wrapper><header><h1>Genralized Dissimilarity Model</h1></header><div id=core class=clearfix><section><p>"); sbMetadata.append( "This GDM model was created Wed Feb 29 20:50:37 EST 2012. Data available in this folder is available for further analysis. </p>"); sbMetadata.append("<h3>Your options:</h3><ul>"); sbMetadata.append("<li>Model reference number:").append(pid).append("</li>"); sbMetadata.append("<li>Assemblage:").append("").append("</li>"); sbMetadata.append("<li>Area:").append(area).append("</li>"); sbMetadata.append("<li>Layers: <ul>"); for (i = 0; i < layers.length; i++) { sbMetadata.append("<li>").append(layerDao.getLayerByName(layers[i]).getDisplayname()) .append("</li>"); } sbMetadata.append("</li></ul></li></ul></section>"); sbMetadata.append( "<section><h3>Response Histogram (observed dissimilarity class):</h3><p> The Response Histogram plots the distribution of site pairs within each observed dissimilarity class. The final column in the dissimilarity class > 1 represents the number of site pairs that are totally dissimilar from each other. This chart provides an overview of potential bias in the distribution of the response data. </p><p><img src='plots/resphist.png'/></p></section><section><h3>Observed versus predicted compositional dissimilarity (raw data plot):</h3><p> The 'Raw data' scatter plot presents the Observed vs Predicted degree of compositional dissimilarity for a given model run. Each dot on the chart represents a site-pair. The line represents the perfect 1:1 fit. (Note that the scale and range of values on the x and y axes differ). </p><p> This chart provides a snapshot overview of the degree of scatter in the data. That is, how well the predicted compositional dissimilarity between site pairs matches the actual compositional dissimilarity present in each site pair. </p><p><img src='plots/obspredissim.png'/></p></section><section><h3>Observed compositional dissimilarity vs predicted ecological distance (link function applied to the raw data plot):</h3><p> The 'link function applied' scatter plot presents the Observed compositional dissimilarity vs Predicted ecological distance. Here, the link function has been applied to the predicted compositional dissimilarity to generate the predicted ecological distance. Each dot represents a site-pair. The line represents the perfect 1:1 fit. The scatter of points signifies noise in the relationship between the response and predictor variables. </p><p><img src='plots/dissimdist.png'/></p></section><section><h3>Predictor Histogram:</h3><p> The Predictor Histogram plots the relative contribution (sum of coefficient values) of each environmental gradient layer that is relevant to the model. The sum of coefficient values is a measure of the amount of predicted compositional dissimilarity between site pairs. </p><p> Predictor variables that contribute little to explaining variance in compositional dissimilarity between site pairs have low relative contribution values. Predictor variables that do not make any contribution to explaining variance in compositional dissimilarity between site pairs (i.e., all coefficient values are zero) are not shown. </p><p><img src='plots/predhist.png'/></p></section><section><h3>Fitted Functions:</h3><p> The model output presents the response (compositional turnover) predicted by variation in each predictor. The shape of the predictor is represented by three I-splines, the values of which are defined by the environmental data distribution: min, max and median (i.e., 0, 50 and 100th percentiles). The GDM model estimates the coefficients of the I-splines for each predictor. The coefficient provides an indication of the total amount of compositional turnover correlated with each value at the 0, 50 and 100th percentiles. The sum of these coefficient values is an indicator of the relative importance of each predictor to compositional turnover. </p><p> The coefficients are applied to the ecological distance from the minimum percentile for a predictor. These plots of fitted functions show the sort of monotonic transformations that will take place to a predictor to render it in GDM space. The relative maximum y values (sum of coefficient values) indicate the amount of influence that each predictor makes to the total GDM prediction. </p><p><a href='plots/maxtx.png'><img src='plots/maxtx_thumb.png'/></a><a href='plots/minti.png'><img src='plots/minti_thumb.png'/></a><a href='plots/radnx.png'><img src='plots/radnx_thumb.png'/></a><a href='plots/rainx.png'><img src='plots/rainx_thumb.png'/></a></p></section></div><footer><p>© <a href='http://www.ala.org.au/'>Atlas of Living Australia 2012</a></p></footer></div></body></html>"); sbMetadata.append(""); File spFile = new File(outputdir + "gdm.html"); System.out.println("Writing metadata to: " + spFile.getAbsolutePath()); PrintWriter spWriter; spWriter = new PrintWriter(new BufferedWriter(new FileWriter(spFile))); spWriter.write(sbMetadata.toString()); spWriter.close(); } catch (IOException ex) { Logger.getLogger(GDMWSController.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:edu.cornell.med.icb.goby.modes.CompactAlignmentToTranscriptCountsMode.java
private void processTranscriptAlignment(final String basename) throws IOException { final AlignmentReaderImpl reader = new AlignmentReaderImpl(basename); PrintWriter outputWriter = null; try {/*from w ww. j a v a2 s . c om*/ WeightsInfo weights = null; if (useWeights) { weights = CompactAlignmentToAnnotationCountsMode.loadWeights(basename, useWeights, weightId); if (weights != null) { System.err.println( "Weights have been provided and loaded and will be used to reweight transcript counts."); } } outputWriter = new PrintWriter(new FileWriter(outputFile)); // outputWriter.write("# One line per reference id. Count indicates the number of times a query \n" + // "# partially overlaps a target, given the various quality filters used to create the alignment.\n"); outputWriter.write("sampleId\treferenceId\tcount\tlog10(count+1)\tcumulativeBasesAligned\n"); reader.readHeader(); final int numberOfReferences = reader.getNumberOfTargets(); // The following is the raw count per transcript, or reweighted count per transcript when use-weights==true final double[] numberOfReadsPerReference = new double[numberOfReferences]; final int[] cumulativeBasesPerReference = new int[numberOfReferences]; System.out.printf("Scanning alignment %s%n", basename); for (final Alignments.AlignmentEntry alignmentEntry : reader) { final int referenceIndex = alignmentEntry.getTargetIndex(); numberOfReadsPerReference[referenceIndex] += (weights != null ? weights.getWeight(alignmentEntry.getQueryIndex()) : 1); cumulativeBasesPerReference[referenceIndex] += Math.min(alignmentEntry.getQueryAlignedLength(), alignmentEntry.getTargetAlignedLength()); } final IndexedIdentifier targetIds = reader.getTargetIdentifiers(); final DoubleIndexedIdentifier targetIdBackward = new DoubleIndexedIdentifier(targetIds); final String sampleId = FilenameUtils.getBaseName(basename); deCalculator.reserve(numberOfReferences, inputFiles.length); int numAlignedReadsInSample = 0; // define elements that will be tested for differential expression: for (int referenceIndex = 0; referenceIndex < numberOfReferences; ++referenceIndex) { final String transcriptId = targetIdBackward.getId(referenceIndex).toString(); final int index = deCalculator.defineElement(transcriptId, DifferentialExpressionCalculator.ElementType.TRANSCRIPT); deCalculator.defineElementLength(index, reader.getTargetLength(referenceIndex)); } // observe elements: for (int referenceIndex = 0; referenceIndex < numberOfReferences; ++referenceIndex) { outputWriter.printf("%s\t%s\t%g\t%g\t%d%n", basename, targetIdBackward.getId(referenceIndex), numberOfReadsPerReference[referenceIndex], Math.log10(numberOfReadsPerReference[referenceIndex] + 1), cumulativeBasesPerReference[referenceIndex]); final String transcriptId = targetIdBackward.getId(referenceIndex).toString(); deCalculator.observe(sampleId, transcriptId, numberOfReadsPerReference[referenceIndex]); numAlignedReadsInSample += numberOfReadsPerReference[referenceIndex]; } deCalculator.setNumAlignedInSample(sampleId, numAlignedReadsInSample); outputWriter.flush(); } finally { IOUtils.closeQuietly(outputWriter); reader.close(); } }
From source file:org.ala.spatial.web.services.GDMWSController.java
private String generateStep1Paramfile(String[] layers, String layersPath, String speciesfile, String outputdir) {/*from w w w . jav a 2 s. c om*/ try { LayerDAO layerDao = Client.getLayerDao(); StringBuilder envLayers = new StringBuilder(); StringBuilder useEnvLayers = new StringBuilder(); StringBuilder predSpline = new StringBuilder(); for (int i = 0; i < layers.length; i++) { envLayers.append("EnvGrid").append(i + 1).append("=").append(layersPath).append(layers[i]) .append("\n"); envLayers.append("EnvGridName").append(i + 1).append("=") .append(layerDao.getLayerByName(layers[i]).getDisplayname()).append("\n"); useEnvLayers.append("UseEnv").append(i + 1).append("=1").append("\n"); predSpline.append("PredSpl").append(i + 1).append("=3").append("\n"); } StringBuilder sbOut = new StringBuilder(); sbOut.append("[GDMODEL]").append("\n").append("WorkspacePath=" + outputdir).append("\n") .append("RespDataType=RD_SitePlusSpecies").append("\n").append("PredDataType=ED_GridData") .append("\n").append("Quantiles=QUANTS_FromData").append("\n").append("UseEuclidean=0") .append("\n").append("UseSubSample=1").append("\n").append("NumSamples=10000").append("\n") .append("[RESPONSE]").append("\n").append("InputData=" + speciesfile).append("\n") .append("UseWeights=0").append("\n").append("[PREDICTORS]").append("\n").append("EuclSpl=3") .append("\n").append("NumPredictors=" + layers.length).append("\n").append(envLayers) .append("\n").append(useEnvLayers).append("\n").append(predSpline).append("\n"); PrintWriter spWriter = new PrintWriter( new BufferedWriter(new FileWriter(outputdir + "gdm_params.txt"))); spWriter.write(sbOut.toString()); spWriter.close(); return outputdir + "gdm_params.txt"; } catch (Exception e) { System.out.println("Unable to write the initial params file"); e.printStackTrace(System.out); } return ""; }