List of usage examples for java.io StringReader close
public void close()
From source file:edu.jhu.pha.vospace.rest.TransfersController.java
/** * Submit the job to database/*www . j a v a2 s . c o m*/ * @param xmlNode the job XML document * @param username the username of the job owner * @return the job ID */ public static UUID submitJob(String xmlNode, String username) { StringReader strRead = new StringReader(xmlNode); UUID jobUID = UUID.randomUUID(); try { JobDescription job = new JobDescription(); job.setId(jobUID.toString()); job.setUsername(username); job.setStartTime(Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime()); job.setState(JobDescription.STATE.PENDING); SAXBuilder xmlBuilder = new SAXBuilder(); Element nodeElm = xmlBuilder.build(strRead).getRootElement(); List<Element> paramNodes = nodeElm.getChildren(); for (Iterator<Element> it = paramNodes.iterator(); it.hasNext();) { Element param = it.next(); if (param.getName().equals("target")) { try { job.setTarget(param.getValue()); } catch (URISyntaxException e) { logger.error("Error in job parse: " + e.getMessage()); throw new BadRequestException("InvalidURI"); } } else if (param.getName().equals("direction")) { JobDescription.DIRECTION direct = JobDescription.DIRECTION.LOCAL; if (param.getValue().toUpperCase().endsWith("SPACE")) direct = JobDescription.DIRECTION.valueOf(param.getValue().toUpperCase()); job.setDirection(direct); if (direct == JobDescription.DIRECTION.PULLFROMVOSPACE) { job.addProtocol(conf.getString("transfers.protocol.httpget"), conf.getString("application.url") + "/data/" + job.getId()); } else if (direct == JobDescription.DIRECTION.PUSHTOVOSPACE) { job.addProtocol(conf.getString("transfers.protocol.httpput"), conf.getString("application.url") + "/data/" + job.getId()); } else if (direct == JobDescription.DIRECTION.LOCAL) { try { job.setDirectionTarget(param.getValue()); } catch (URISyntaxException e) { logger.error("Error in job parse: " + e.getMessage()); throw new BadRequestException("InvalidURI"); } } } else if (param.getName().equals("view")) { job.addView(param.getValue()); } else if (param.getName().equals("keepBytes")) { job.setKeepBytes(Boolean.parseBoolean(param.getValue())); } else if (param.getName().equals("protocol")) { String protocol = param.getAttributeValue("uri"); String protocolEndpoint = param.getChildText("protocolEndpoint", Namespace.getNamespace(VOS_NAMESPACE)); if (job.getDirection().equals(DIRECTION.PULLFROMVOSPACE) || job.getDirection().equals(DIRECTION.PUSHTOVOSPACE)) { protocolEndpoint = conf.getString("application.url") + "/data/" + job.getId(); } if (null != protocol && null != protocolEndpoint) job.addProtocol(protocol, protocolEndpoint); else throw new BadRequestException("InvalidArgument"); } } JobsProcessor.getDefaultImpl().submitJob(username, job); } catch (JDOMException e) { e.printStackTrace(); throw new InternalServerErrorException(e); } catch (IOException e) { logger.error(e); throw new InternalServerErrorException(e); } catch (IllegalArgumentException e) { logger.error("Error calling the job task: " + e.getMessage()); throw new InternalServerErrorException("InternalFault"); } finally { strRead.close(); } return jobUID; }
From source file:org.openmrs.module.UpdateFileParser.java
/** * Parse the contents of the update.rdf file. * * @throws ModuleException/*from w w w . j av a 2s .c o m*/ */ public void parse() throws ModuleException { StringReader stringReader = null; try { Document updateDoc = null; try { stringReader = new StringReader(content); InputSource inputSource = new InputSource(stringReader); inputSource.setSystemId("./"); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); // Disable resolution of external entities. See TRUNK-3942 db.setEntityResolver(new EntityResolver() { public InputSource resolveEntity(String publicId, String systemId) { return new InputSource(new StringReader("")); } }); updateDoc = db.parse(inputSource); } catch (Exception e) { log.warn("Unable to parse content"); throw new ModuleException("Error parsing update.rdf file: " + content, e); } Element rootNode = updateDoc.getDocumentElement(); String configVersion = rootNode.getAttribute("configVersion"); if (!validConfigVersions().contains(configVersion)) { throw new ModuleException( "Invalid configVersion: '" + configVersion + "' found In content: " + content); } if ("1.0".equals(configVersion)) { // the only update in the xml file is the 'best fit' this.moduleId = getElement(rootNode, configVersion, "moduleId"); this.currentVersion = getElement(rootNode, configVersion, "currentVersion"); this.downloadURL = getElement(rootNode, configVersion, "downloadURL"); } else if ("1.1".equals(configVersion)) { this.moduleId = rootNode.getAttribute("moduleId"); NodeList nodes = rootNode.getElementsByTagName("update"); this.currentVersion = ""; // default to the lowest version possible // loop over all 'update' tags for (Integer i = 0; i < nodes.getLength(); i++) { Element currentNode = (Element) nodes.item(i); String currentVersion = getElement(currentNode, configVersion, "currentVersion"); // if the currently saved version is less than the current tag if (ModuleUtil.compareVersion(this.currentVersion, currentVersion) < 0) { String requireOpenMRSVersion = getElement(currentNode, configVersion, "requireOpenMRSVersion"); // if the openmrs code version is compatible, this node is a winner if (requireOpenMRSVersion == null || ModuleUtil.matchRequiredVersions( OpenmrsConstants.OPENMRS_VERSION_SHORT, requireOpenMRSVersion)) { this.currentVersion = currentVersion; this.downloadURL = getElement(currentNode, configVersion, "downloadURL"); } } } } } catch (ModuleException e) { // rethrow the moduleException throw e; } finally { if (stringReader != null) { stringReader.close(); } } }
From source file:com.globalsight.everest.edit.offline.upload.UploadApi.java
/** * Loads the wrapped text file into an OfflinePageData object. Loads RTF * list view files./* w w w . j ava 2 s . c o m*/ * * @param p_rtfDoc * the RTF DOM. * @return if there are no errors, null is returned. If there are errors, a * fully formed HTML error report page is returned. */ private String load_GS_WRAPPED_UNICODE_TEXT_File(RtfDocument p_rtfDoc, String p_fileName) { // --------------------------------------------------------------- // NOTE: // --------------------------------------------------------------- // We are half way to direct RTF reading for list-view. For // now we still get segments the old way (by getting a plain // text dump from the Rtf reader and passing that to the // original plain text parser). However, we do now read/load // segment annotations with the new RTF parser. // --------------------------------------------------------------- // ----------------------------------- // Load comments (eventually loadListViewOneWorkFile should // load the entire file) // ----------------------------------- try { m_errWriter.setFileName(p_fileName); m_uploadPageData.setLoadConversionLineBreak(m_normalizedLB); m_uploadPageData.loadListViewOneWorkFile(p_rtfDoc); // set err writer's page, task and job ids m_errWriter.processOfflinePageData(m_uploadPageData); } catch (Throwable ex) { String msg = ""; if (ex instanceof GeneralException) { msg = ((GeneralException) ex).getMessage(m_uiLocale.getLocale()); } else { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); ex.printStackTrace(pw); msg = sw.toString(); } String args[] = { EditUtil.encodeHtmlEntities(msg) }; String errMsg = MessageFormat.format(m_messages.getString("FormatTwoLoadError"), (Object[]) args); CATEGORY.error(errMsg); m_errWriter.addFileErrorMsg(errMsg); return m_errWriter.buildPage().toString(); } // ----------------------------------- // Now load segments the old way. // Eventually loadListViewOneWorkFile (above) // should load the entire file. // ----------------------------------- StringReader p_reader = null; String c = RtfAPI.getText(p_rtfDoc); try { StringBuffer content = new StringBuffer(); StringReader sreader = new StringReader(c); BufferedReader br = new BufferedReader(sreader); String line = br.readLine(); while (line != null) { boolean ignoreThisLine = line.startsWith(SEGMENT_PAGE_NAME_KEY) || line.startsWith(SEGMENT_FILE_PATH_KEY) || line.startsWith(HEADER_JOB_NAME) || line.startsWith(HEADER_JOB_ID); if (!ignoreThisLine) { content.append(line).append("\r\n"); } line = br.readLine(); } sreader.close(); br.close(); p_reader = new StringReader(content.toString()); } catch (Exception e) { p_reader = new StringReader(c); } return loadListViewTextFile(p_reader, p_fileName, true); }
From source file:jp.terasoluna.fw.web.taglib.WriteTag.java
/** * <p>Jn^O???s?B</p>//from w w w . j a va2 s . c o m * * @return int ???w * @exception JspException JSPO??? */ @Override public int doStartTag() throws JspException { //v?bean? if (ignore && TagUtil.lookup(pageContext, name, scope) == null) { return SKIP_BODY; } //v?v?peB? Object value = TagUtil.lookup(pageContext, name, property, scope); if (value == null) { if (replaceNullToNbsp) { TagUtil.write(pageContext, " "); } return SKIP_BODY; } String output = value.toString(); if (output.length() == 0) { if (replaceNullToNbsp) { TagUtil.write(pageContext, " "); return SKIP_BODY; } // ???AreplaceNullToNbspfalse?A // addBR?]<br>t^?A // return?B } //v?peBl\ StringReader sr = null; BufferedReader br = null; try { sr = new StringReader(output); br = new BufferedReader(sr); StringBuilder sbuilder = new StringBuilder(); StringBuilder strBuilder = new StringBuilder(); String tmpLine = null; int sizeMngCount = 1; int index = 0; // List?sR?[h?i[?B List<String> lines = new ArrayList<String>(); while ((tmpLine = br.readLine()) != null) { lines.add(tmpLine); } for (String line : lines) { if (index > 0 && replaceLFtoBR) { // ?sR?[h<br> sbuilder.append("<br>"); // JEgZbg sizeMngCount = 1; } if (!"".equals(line)) { strBuilder.setLength(0); char ch = line.charAt(0); for (int i = 0; i < line.length(); i++, sizeMngCount++) { ch = line.charAt(i); strBuilder.append(ch); // wTCY??A\nt^?B if (fillColumn > 0 && sizeMngCount > 1 && sizeMngCount % fillColumn == 0) { // ?AwTCY?A???A\nt^?B // ?sreplaceLFtoBR == true (f?[^?sR?[h?A<br>) // ??I?s?s (?I[?A?sR?[h?s) if (i == line.length() - 1 && replaceLFtoBR) { // } else if (i == line.length() - 1 && index == lines.size() - 1) { // } else { strBuilder.append("\n"); } } } line = strBuilder.toString(); } if (filter) { // Tj^CY line = TagUtil.filter(line); } // pXy?[X u?A\n<br>u char[] content = line.toCharArray(); StringBuilder result = new StringBuilder(); for (int i = 0; i < content.length; i++) { switch (content[i]) { case ' ': if (replaceSpToNbsp) { result.append(" "); } else { result.append(content[i]); } break; case '\n': result.append("<br>"); break; default: result.append(content[i]); break; } } sbuilder.append(result); ++index; } // v?peBl<br>t^ if (addBR) { sbuilder.append("<br>"); } output = sbuilder.toString(); } catch (IOException e) { log.error("StringReader IO error."); throw new JspTagException(e.getMessage()); } finally { if (sr != null) { sr.close(); } try { if (br != null) { br.close(); } } catch (IOException e1) { if (log.isWarnEnabled()) { log.warn("StringReader close error : " + e1); } } } TagUtil.write(pageContext, output); return SKIP_BODY; }
From source file:com.beligum.core.utils.AssetPacker.java
private static String minify(File file) { String retVal = ""; StringReader in = null; StringWriter out = null;//www . j a v a 2s .c o m try { retVal = FileUtils.readFileToString(file); /* * Pre-processing */ //remove 'special comments' so they get removed anyway (for libraries la bootstrap, jquery,...) retVal = retVal.replace("/*!", "/*"); in = new StringReader(retVal); out = new StringWriter(); if (file != null && file.exists()) { if (file.getAbsolutePath().endsWith(".js")) { JavaScriptCompressor compressor = new JavaScriptCompressor(in, new ErrorReporter() { public void warning(String message, String sourceName, int line, String lineSource, int lineOffset) { Logger.warn(message); } public void error(String message, String sourceName, int line, String lineSource, int lineOffset) { Logger.error(message); } public EvaluatorException runtimeError(String message, String sourceName, int line, String lineSource, int lineOffset) { error(message, sourceName, line, lineSource, lineOffset); return new EvaluatorException(message); } }); /* * Display informational messages and warnings. */ boolean verbose = false; /* * This minifies AND obfuscates local symbols, disable to minify only. */ boolean munge = true; /* Preserve unnecessary semicolons (such as right before a '}') This option * is useful when compressed code has to be run through JSLint (which is the * case of YUI for example) */ boolean preserveAllSemiColons = false; /* * Disable all the built-in micro optimizations. */ boolean disableOptimizations = false; compressor.compress(out, -1, munge, verbose, preserveAllSemiColons, disableOptimizations); retVal = out.toString(); /* * For Google Closure, switched to YuiCompressor cause it also provided css support * com.google.javascript.jscomp.Compiler compiler = new com.google.javascript.jscomp.Compiler(); CompilerOptions options = new CompilerOptions(); CompilationLevel.SIMPLE_OPTIMIZATIONS.setOptionsForCompilationLevel(options); //WarningLevel.QUIET.setOptionsForWarningLevel(options); //compiler.setLoggingLevel(Level.ALL); //options.setLanguageIn(CompilerOptions.LanguageMode.ECMASCRIPT5); //Processes goog.provide() and goog.require() calls //options.closurePass = true; List<SourceFile> externs = new ArrayList<SourceFile>(); List<SourceFile> inputs = new ArrayList<SourceFile>(); inputs.add(SourceFile.fromFile(file)); com.google.javascript.jscomp.Result compileResult = compiler.compile(externs, inputs, options); if (compileResult.success) { retVal = compiler.toSource(); } else { throw new Exception(compileResult.debugLog); } */ } else if (file.getAbsolutePath().endsWith(".css")) { File minFile = new File( file.getAbsolutePath().substring(0, file.getAbsolutePath().length() - ".css".length()) + ".min.css"); //we always re-minify, just to be sure... CssCompressor cssCompressor = new CssCompressor(in); cssCompressor.compress(out, -1); retVal = out.toString(); } else { throw new Exception("Can't minify this file; unknown source type."); } } else { Logger.error("Trying to minify a file that doesn't exist: " + file.getAbsolutePath()); } } catch (Exception e) { Logger.warn("Error while minifying file " + file.getAbsolutePath(), e); } finally { if (in != null) { in.close(); } if (out != null) { try { out.close(); } catch (IOException e) { } } } return retVal; }
From source file:de.intranda.goobi.plugins.CSICMixedImport.java
/** * /*from w w w . j a v a2 s . c o m*/ * Specialized convertData to convert only the specified String inString from marc to mods * * @param inString * @return */ private Fileformat convertData(String inString) { Fileformat ff = null; Document doc; StringReader sr = null; try { sr = new StringReader(inString); doc = new SAXBuilder().build(sr); // remove namespaces Element docRoot = doc.getRootElement(); docRoot = setNamespaceRecursive(docRoot, null); Element newRecord = new Element("record"); List<Element> eleList = new ArrayList<Element>(); for (Object obj : docRoot.getChildren()) { Element child = (Element) obj; eleList.add(child); } for (Element element : eleList) { element.detach(); } newRecord.setContent(eleList); for (Object obj : newRecord.getChildren()) { Element child = (Element) obj; child.setNamespace(null); } // newRecord = removeDuplicateChildren(newRecord); docRoot.detach(); doc.setRootElement(newRecord); // logger.debug(new XMLOutputter().outputString(doc)); if (doc != null && doc.hasRootElement()) { XSLTransformer transformer = new XSLTransformer(XSLT_PATH); Document docMods = transformer.transform(doc); // logger.debug(new XMLOutputter().outputString(docMods)); ff = new MetsMods(prefs); DigitalDocument dd = new DigitalDocument(); ff.setDigitalDocument(dd); Element eleMods = docMods.getRootElement(); if (eleMods.getName().equals("modsCollection")) { eleMods = eleMods.getChild("mods", null); } // Determine the root docstruct type dsType = null; dsAnchorType = null; String typeOfResource = null; boolean belongsToPeriodical = false; boolean belongsToSeries = false; boolean isManuscript = false; boolean belongsToMultiVolume = false; // handle TypeOfResource List<Element> eleTypeOfResourceList = eleMods.getChildren("typeOfResource", null); if (eleTypeOfResourceList != null) { for (Element eleTypeOfResource : eleTypeOfResourceList) { String resourceLabel = eleTypeOfResource.getAttributeValue("displayLabel"); if (resourceLabel != null && resourceLabel.contains("SE")) { belongsToPeriodical = true; } if ("yes".equals(eleTypeOfResource.getAttributeValue("manuscript"))) { isManuscript = true; } typeOfResource = eleTypeOfResource.getTextTrim(); } } // handle physicalDescription List<Element> physicalDescriptionList = eleMods.getChildren("physicalDescription", null); if (physicalDescriptionList != null) { for (Element physDescr : physicalDescriptionList) { List<Element> eleFormList = physDescr.getChildren("form", null); if (eleFormList != null) { for (Element eleForm : eleFormList) { if (eleForm.getAttribute("authority") != null && eleForm.getValue().contentEquals("Manuscrito")) { isManuscript = true; } } } } } // handle archive List<Element> recordInfoList = eleMods.getChildren("recordInfo", null); if (physicalDescriptionList != null) { for (Element recordInfo : recordInfoList) { List<Element> eleIdList = recordInfo.getChildren("recordIdentifier", null); if (eleIdList != null) { for (Element eleId : eleIdList) { String id = eleId.getTextTrim(); if (id != null && id.startsWith("CSICAR")) { archiveImport = true; } } } } } // handle relatedSeries List<Element> eleRelatedSeriesList = eleMods.getChildren("relatedItem", null); if (relatedSeriesIsAnchor && eleRelatedSeriesList != null) { for (Element eleRelatedSeries : eleRelatedSeriesList) { if (eleRelatedSeries != null && eleRelatedSeries.getAttribute("type") != null && eleRelatedSeries.getAttribute("type").getValue().contentEquals("series")) { belongsToSeries = true; } } } if (idMap.get(currentIdentifier.replaceAll("\\D", "")) != null) { if (idMap.get(currentIdentifier.replaceAll("\\D", "")) == true) { belongsToMultiVolume = true; } else if ((identifierSuffix != null && identifierSuffix.startsWith("V")) && ((!belongsToPeriodical && !belongsToSeries))) { belongsToMultiVolume = true; } // This volume is part of a Series/Multivolume work // if (!belongsToPeriodical && !belongsToSeries) { // } } boolean multipart = belongsToMultiVolume || belongsToPeriodical || belongsToSeries; dsType = docTypeConfig.getDocType(typeOfResource, multipart, archiveImport); dsAnchorType = docTypeConfig.getAnchorType(typeOfResource, multipart, archiveImport); // remove unnecessary suffixes for everything but multivolumes if (!belongsToMultiVolume) { if (idMap.get(currentIdentifier.replaceAll("\\D", "")) != null && idMap.get(currentIdentifier.replaceAll("\\D", "")) == true) { // need suffix } else { identifierSuffix = null; } } logger.debug("Docstruct type: " + dsType); DocStruct dsVolume = dd.createDocStruct(prefs.getDocStrctTypeByName(dsType)); if (dsVolume == null) { logger.error("Could not create DocStructType " + dsVolume); return null; } DocStruct dsAnchor = null; if (dsAnchorType != null) { logger.debug("Anchor type: " + dsAnchorType); dsAnchor = dd.createDocStruct(prefs.getDocStrctTypeByName(dsAnchorType)); if (dsAnchor == null) { logger.error("Could not create DocStructType " + dsAnchorType); } try { dsAnchor.addChild(dsVolume); } catch (TypeNotAllowedAsChildException e) { logger.error("Could not attach " + dsAnchorType + " to anchor " + dsType); } dd.setLogicalDocStruct(dsAnchor); } else { dd.setLogicalDocStruct(dsVolume); } DocStruct dsBoundBook = dd.createDocStruct(prefs.getDocStrctTypeByName("BoundBook")); dd.setPhysicalDocStruct(dsBoundBook); //get volume number of this item Integer volumeNo = ModsUtils.getNumberFromString(this.getCurrentSuffix()); if (volumeNo == null) { volumeNo = currentVolume; } // Collect MODS metadata ModsUtils.parseModsSection(this, dsVolume, dsAnchor, dsBoundBook, eleMods, volumeNo, currentPieceDesignation); currentIdentifier = ModsUtils.getIdentifier(prefs, dsVolume); currentTitle = ModsUtils.getTitle(prefs, dsVolume); currentAuthor = ModsUtils.getAuthor(prefs, dsVolume); logger.debug("Author:" + currentAuthor + ", Title: " + currentTitle); // create source ("import") Folder if (importFolder != null) { File tempDir = new File(importFolder, getProcessTitle().replace(".xml", "")); sourceFolder = new File(tempDir, "import"); sourceFolder.mkdirs(); } if (logConversionLoss) { try { File marcLossFile = new File(logFolder, currentIdentifier + "_MarcLoss.xml"); Document lossDoc = getMarcModsLoss(doc, docMods); CommonUtils.getFileFromDocument(marcLossFile, lossDoc); File modsFile = new File(logFolder, currentIdentifier + "_Mods.xml"); CommonUtils.getFileFromDocument(modsFile, docMods); } catch (IOException e) { logger.error(e); } } if (!deleteTempFiles) { try { File modsFile = new File(sourceFolder, "modsTemp.xml"); CommonUtils.getFileFromDocument(modsFile, docMods); } catch (IOException e) { logger.error(e); } } // Add 'pathimagefiles' try { Metadata mdForPath = new Metadata(prefs.getMetadataTypeByName("pathimagefiles")); mdForPath.setValue("./" + currentIdentifier); dsBoundBook.addMetadata(mdForPath); } catch (MetadataTypeNotAllowedException e1) { logger.error("MetadataTypeNotAllowedException while reading images", e1); } catch (DocStructHasNoTypeException e1) { logger.error("DocStructHasNoTypeException while reading images", e1); } // Add collection names attached to the current record if (currentCollectionList != null) { MetadataType mdTypeCollection = prefs.getMetadataTypeByName("singleDigCollection"); for (String collection : currentCollectionList) { Metadata mdCollection = new Metadata(mdTypeCollection); mdCollection.setValue(collection); dsVolume.addMetadata(mdCollection); // if (dsAnchor != null) { // dsAnchor.addMetadata(mdCollection); // } } } } } catch (JDOMException e) { logger.error(e.getMessage(), e); return null; } catch (IOException e) { logger.error(e.getMessage(), e); } catch (PreferencesException e) { logger.error(e.getMessage(), e); } catch (TypeNotAllowedForParentException e) { logger.error(e.getMessage(), e); } catch (MetadataTypeNotAllowedException e) { logger.error(e.getMessage(), e); } finally { // try{ if (sr != null) { sr.close(); } // } catch(IOException e) { // logger.error("Error closing String reader"); // } } return ff; }
From source file:org.opendatakit.aggregate.servlet.ResetUsersAndPermissionsServlet.java
/** * Processes the multipart form that contains the csv file which holds the * list of users and thier permissions. Returns success if the changes have * been applied; false otherwise.//from w ww.j av a2 s .c o m */ @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (req.getScheme().equals("http")) { logger.warn("Resetting users and capabilities over http"); } CallingContext cc = ContextFactory.getCallingContext(this, req); Double openRosaVersion = getOpenRosaVersion(req); // verify request is multipart if (!ServletFileUpload.isMultipartContent(req)) { resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.NO_MULTI_PART_CONTENT); return; } StringBuilder warnings = new StringBuilder(); // TODO Add in form title process so it will update the changes in the XML // of form try { // process form MultiPartFormData resetUsersAndPermissions = new MultiPartFormData(req); MultiPartFormItem usersAndPermissionsCsv = resetUsersAndPermissions .getFormDataByFieldName(ACCESS_DEF_PRAM); String inputCsv = null; if (usersAndPermissionsCsv != null) { // TODO: changed added output stream writer. probably something better // exists inputCsv = usersAndPermissionsCsv.getStream().toString(HtmlConsts.UTF8_ENCODE); } StringReader csvContentReader = null; RFC4180CsvReader csvReader = null; try { // we need to build up the UserSecurityInfo records for all the users ArrayList<UserSecurityInfo> users = new ArrayList<UserSecurityInfo>(); // build reader for the csv content csvContentReader = new StringReader(inputCsv); csvReader = new RFC4180CsvReader(csvContentReader); // get the column headings -- these mimic those in Site Admin / Permissions table. // Order is irrelevant; no change-password column. // String[] columns; int row = 0; for (;;) { ++row; columns = csvReader.readNext(); if (columns == null) { logger.error("users and capabilities .csv upload - empty csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities .csv is empty"); return; } // count non-blank columns int nonBlankColCount = 0; for (String col : columns) { if (col != null && col.trim().length() != 0) { ++nonBlankColCount; } } // if there are fewer than 4 columns, it must be a comment field. // if there are 4 or more columns, then we expect it to be the column headers // for the users and capabilities table. We could require just 3, but that // would not be very useful or realistic. if (nonBlankColCount < 4) continue; break; } if (row != 1) { logger.warn("users and capabilities .csv upload -- interpreting row " + row + " as the column header row"); warnings.append("<tr><td>Interpreting row " + row + " as the column header row.</td></tr>"); } // TODO: validate column headings.... int idxUsername = -1; int idxFullName = -1; int idxUserType = -1; int idxDataCollector = -1; int idxDataViewer = -1; int idxFormManager = -1; int idxSyncTables = -1; int idxTablesSU = -1; int idxTablesAdmin = -1; int idxSiteAdmin = -1; for (int i = 0; i < columns.length; ++i) { String heading = columns[i]; if (heading == null || heading.trim().length() == 0) { continue; } heading = heading.trim(); // 'Username' is required if ("Username".compareToIgnoreCase(heading) == 0) { if (idxUsername != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Username' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Username' is repeated"); return; } idxUsername = i; } // 'Full Name' is optional. The value in 'Username' will be used to construct this if unspecified. else if ("Full Name".compareToIgnoreCase(heading) == 0) { if (idxFullName != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Full Name' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Full Name' is repeated"); return; } idxFullName = i; } // 'Account Type' is required else if ("Account Type".compareToIgnoreCase(heading) == 0) { if (idxUserType != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Account Type' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Account Type' is repeated"); return; } idxUserType = i; } // Permissions columns begin here. All are optional else if ("Data Collector".compareToIgnoreCase(heading) == 0) { if (idxDataCollector != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Data Collector' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Data Collector' is repeated"); return; } idxDataCollector = i; } else if ("Data Viewer".compareToIgnoreCase(heading) == 0) { if (idxDataViewer != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Data Viewer' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Data Viewer' is repeated"); return; } idxDataViewer = i; } else if ("Form Manager".compareToIgnoreCase(heading) == 0) { if (idxFormManager != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Form Manager' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Form Manager' is repeated"); return; } idxFormManager = i; } else if ("Synchronize Tables".compareToIgnoreCase(heading) == 0) { if (idxSyncTables != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Synchronize Tables' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Synchronize Tables' is repeated"); return; } idxSyncTables = i; } else if ("Tables Super-user".compareToIgnoreCase(heading) == 0) { if (idxTablesSU != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Tables Super-user' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Tables Super-user' is repeated"); return; } idxTablesSU = i; } else if ("Administer Tables".compareToIgnoreCase(heading) == 0) { if (idxTablesAdmin != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Administer Tables' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Administer Tables' is repeated"); return; } idxTablesAdmin = i; } else if ("Site Administrator".compareToIgnoreCase(heading) == 0) { if (idxSiteAdmin != -1) { logger.error( "users and capabilities .csv upload - invalid csv file -- column header 'Site Administrator' is repeated"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Site Administrator' is repeated"); return; } idxSiteAdmin = i; } else { logger.warn("users and capabilities .csv upload - invalid csv file -- column header '" + heading + "' is not recognized"); warnings.append("<tr><td>Column header '" + heading + "' is not recognized and will be ignored.</tr></td>"); } } if (idxUsername == -1) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Username' is missing"); return; } if (idxUserType == -1) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- column header 'Account Type' is missing"); return; } while ((columns = csvReader.readNext()) != null) { ++row; // empty -- silently skip if (columns.length == 0) continue; // count non-blank columns int nonBlankColCount = 0; for (String col : columns) { if (col != null && col.trim().length() != 0) { ++nonBlankColCount; } } // all blank-- silently skip if (nonBlankColCount == 0) continue; // ignore rows where... // the row is not long enough to include the Username and Account Type columns if (columns.length <= idxUsername || columns.length <= idxUserType) { warnings.append("<tr><td>Ignoring row " + row + " -- does not specify a Username and/or Account Type.</tr></td>"); continue; } // ignore rows where... // Username is not specified or it is not the anonymousUser and Account Type is blank if ((columns[idxUsername] == null || columns[idxUsername].trim().length() == 0) || (!columns[idxUsername].equals(User.ANONYMOUS_USER) && (columns[idxUserType] == null || columns[idxUserType].trim().length() == 0))) { warnings.append("<tr><td>Ignoring row " + row + " -- Username is not the " + User.ANONYMOUS_USER + " and no Account Type specified.</tr></td>"); continue; } String accType = (idxUserType == -1) ? "ODK" : columns[idxUserType]; UserType type = (accType == null) ? UserType.ANONYMOUS : UserType.REGISTERED; if ((type != UserType.ANONYMOUS) && (columns[idxUsername] == null)) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- username not specified"); return; } String username; String email; String fullname = (idxFullName == -1 || columns.length < idxFullName) ? null : columns[idxFullName]; if (accType == null) { username = User.ANONYMOUS_USER; email = null; fullname = User.ANONYMOUS_USER_NICKNAME; } else if ("ODK".equals(accType)) { Collection<Email> emails = EmailParser.parseEmails(columns[idxUsername]); if (emails.size() != 1) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- username \'" + columns[idxUsername] + "\' contains illegal characters (e.g., spaces)"); return; } email = null; Email parsedValue = emails.iterator().next(); if (parsedValue.getType() == Form.EMAIL) { username = parsedValue.getEmail().substring(EmailParser.K_MAILTO.length()); } else { username = parsedValue.getUsername(); } if (fullname == null) { fullname = parsedValue.getFullName(); } } else if ("Google".equals(accType)) { username = null; Collection<Email> emails = EmailParser.parseEmails(columns[idxUsername]); if (emails == null || emails.size() == 0) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- username \'" + columns[idxUsername] + "\' could not be parsed into valid e-mail"); return; } if (emails.size() != 1) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- username \'" + columns[idxUsername] + "\' could not be parsed into a valid e-mail"); return; } // will execute loop once email = null; for (Email e : emails) { if (e.getType() != Email.Form.EMAIL) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- username \'" + columns[idxUsername] + "\' could not be parsed into a valid e-mail"); return; } email = e.getEmail(); if (fullname == null) { fullname = e.getFullName(); } } } else { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- Account Type \'" + accType + "\' is neither 'ODK' nor 'Google' nor blank (anonymous)"); return; } UserSecurityInfo info = new UserSecurityInfo(username, fullname, email, type); // now add permissions TreeSet<GrantedAuthorityName> authorities = new TreeSet<GrantedAuthorityName>(); if (idxDataCollector != -1 && columns.length > idxDataCollector && columns[idxDataCollector] != null && columns[idxDataCollector].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_DATA_COLLECTORS); } if (idxDataViewer != -1 && columns.length > idxDataViewer && columns[idxDataViewer] != null && columns[idxDataViewer].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_DATA_VIEWERS); } if (idxFormManager != -1 && columns.length > idxFormManager && columns[idxFormManager] != null && columns[idxFormManager].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_FORM_MANAGERS); } if (idxSyncTables != -1 && columns.length > idxSyncTables && columns[idxSyncTables] != null && columns[idxSyncTables].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_SYNCHRONIZE_TABLES); } if (idxTablesSU != -1 && columns.length > idxTablesSU && columns[idxTablesSU] != null && columns[idxTablesSU].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_SUPER_USER_TABLES); } if (idxTablesAdmin != -1 && columns.length > idxTablesAdmin && columns[idxTablesAdmin] != null && columns[idxTablesAdmin].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_ADMINISTER_TABLES); } if (idxSiteAdmin != -1 && columns.length > idxSiteAdmin && columns[idxSiteAdmin] != null && columns[idxSiteAdmin].trim().length() != 0) { authorities.add(GrantedAuthorityName.GROUP_SITE_ADMINS); } info.setAssignedUserGroups(authorities); users.add(info); } // allGroups is empty. This is currently not used. ArrayList<GrantedAuthorityName> allGroups = new ArrayList<GrantedAuthorityName>(); // now scan for duplicate entries for the same username { HashMap<String, HashSet<UserSecurityInfo>> multipleRows = new HashMap<String, HashSet<UserSecurityInfo>>(); for (UserSecurityInfo i : users) { if (i.getType() != UserType.REGISTERED) { continue; } if (i.getUsername() != null) { HashSet<UserSecurityInfo> existing; existing = multipleRows.get(i.getUsername()); if (existing == null) { existing = new HashSet<UserSecurityInfo>(); multipleRows.put(i.getUsername(), existing); } existing.add(i); } } for (Entry<String, HashSet<UserSecurityInfo>> entry : multipleRows.entrySet()) { if (entry.getValue().size() != 1) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- " + "multiple rows define the capabilities for the same username: " + entry.getKey()); return; } } } // and scan for duplicate entries for the same e-mail address { HashMap<String, HashSet<UserSecurityInfo>> multipleRows = new HashMap<String, HashSet<UserSecurityInfo>>(); for (UserSecurityInfo i : users) { if (i.getType() != UserType.REGISTERED) { continue; } if (i.getEmail() != null) { HashSet<UserSecurityInfo> existing; existing = multipleRows.get(i.getEmail()); if (existing == null) { existing = new HashSet<UserSecurityInfo>(); multipleRows.put(i.getEmail(), existing); } existing.add(i); } } for (Entry<String, HashSet<UserSecurityInfo>> entry : multipleRows.entrySet()) { if (entry.getValue().size() != 1) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- " + "multiple rows define the capabilities for the same e-mail: " + entry.getKey().substring(EmailParser.K_MAILTO.length())); return; } } } // now scan for the anonymousUser UserSecurityInfo anonUser = null; for (UserSecurityInfo i : users) { if (i.getType() == UserType.ANONYMOUS) { if (anonUser != null) { logger.error("users and capabilities .csv upload - invalid csv file"); resp.sendError(HttpServletResponse.SC_BAD_REQUEST, ErrorConsts.MISSING_PARAMS + "\nusers and capabilities invalid .csv -- " + "multiple rows define the capabilities for the anonymousUser - did you forget to specify Account Type?"); return; } anonUser = i; } } // and figure out whether the anonymousUser currently has ROLE_ATTACHMENT_VIEWER capabilities // (these allow Google Earth to access the server). // // If it does, preserve that capability. // To do this, fetch the existing info for anonymous... UserSecurityInfo anonExisting = new UserSecurityInfo(User.ANONYMOUS_USER, User.ANONYMOUS_USER_NICKNAME, null, UserSecurityInfo.UserType.ANONYMOUS); SecurityServiceUtil.setAuthenticationListsForSpecialUser(anonExisting, GrantedAuthorityName.USER_IS_ANONYMOUS, cc); // test if the existing anonymous had the capability if (anonExisting.getAssignedUserGroups().contains(GrantedAuthorityName.ROLE_ATTACHMENT_VIEWER)) { if (anonUser == null) { // no anonUser specified in the incoming .csv -- add it with just that capability. TreeSet<GrantedAuthorityName> auths = new TreeSet<GrantedAuthorityName>(); auths.add(GrantedAuthorityName.ROLE_ATTACHMENT_VIEWER); anonExisting.setAssignedUserGroups(auths); users.add(anonExisting); } else { // add this capability to the existing set of capabilities anonUser.getAssignedUserGroups().add(GrantedAuthorityName.ROLE_ATTACHMENT_VIEWER); } } SecurityServiceUtil.setStandardSiteAccessConfiguration(users, allGroups, cc); // GAE requires some settle time before these entries will be // accurately retrieved. Do not re-fetch the form after it has been // uploaded. resp.setStatus(HttpServletResponse.SC_OK); if (openRosaVersion == null) { // web page -- show HTML response resp.setContentType(HtmlConsts.RESP_TYPE_HTML); resp.setCharacterEncoding(HtmlConsts.UTF8_ENCODE); PrintWriter out = resp.getWriter(); StringBuilder headerString = new StringBuilder(); headerString.append("<link rel=\"stylesheet\" type=\"text/css\" href=\""); headerString.append(cc.getWebApplicationURL(ServletConsts.AGGREGATE_STYLE)); headerString.append("\" />"); headerString.append("<link rel=\"stylesheet\" type=\"text/css\" href=\""); headerString.append(cc.getWebApplicationURL(ServletConsts.UPLOAD_BUTTON_STYLE_RESOURCE)); headerString.append("\" />"); headerString.append("<link rel=\"stylesheet\" type=\"text/css\" href=\""); headerString.append(cc.getWebApplicationURL(ServletConsts.UPLOAD_TABLE_STYLE_RESOURCE)); headerString.append("\" />"); headerString.append("<link rel=\"stylesheet\" type=\"text/css\" href=\""); headerString.append(cc.getWebApplicationURL(ServletConsts.UPLOAD_NAVIGATION_STYLE_RESOURCE)); headerString.append("\" />"); // header info beginBasicHtmlResponse(TITLE_INFO, headerString.toString(), resp, cc); if (warnings.length() != 0) { out.write("<p>users and capabilities .csv uploaded with warnings.</p>" + "<table>"); out.write(warnings.toString()); out.write("</table>"); } else { out.write("<p>Successful users and capabilities .csv upload.</p>"); } out.write("<p>Click "); out.write(HtmlUtil.createHref(cc.getWebApplicationURL(ADDR), "here", false)); out.write(" to return to Upload users and capabilities .csv page.</p>"); finishBasicHtmlResponse(resp); } else { addOpenRosaHeaders(resp); resp.setContentType(HtmlConsts.RESP_TYPE_XML); resp.setCharacterEncoding(HtmlConsts.UTF8_ENCODE); PrintWriter out = resp.getWriter(); out.write("<OpenRosaResponse xmlns=\"http://openrosa.org/http/response\">"); if (warnings.length() != 0) { StringBuilder b = new StringBuilder(); b.append("<p>users and capabilities .csv uploaded with warnings.</p>" + "<table>"); b.append(warnings.toString()); b.append("</table>"); out.write("<message>"); out.write(StringEscapeUtils.escapeXml10(b.toString())); out.write("</message>"); } else { out.write("<message>Successful users and capabilities .csv upload.</message>"); } out.write("</OpenRosaResponse>"); } } catch (DatastoreFailureException e) { logger.error("users and capabilities .csv upload persistence error: " + e.toString()); e.printStackTrace(); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ErrorConsts.PERSISTENCE_LAYER_PROBLEM + "\n" + e.toString()); } catch (AccessDeniedException e) { logger.error("users and capabilities .csv upload access denied error: " + e.toString()); e.printStackTrace(); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.toString()); } finally { if (csvReader != null) { csvReader.close(); } if (csvContentReader != null) { csvContentReader.close(); } } } catch (FileUploadException e) { logger.error("users and capabilities .csv upload persistence error: " + e.toString()); e.printStackTrace(resp.getWriter()); resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ErrorConsts.UPLOAD_PROBLEM); } }