List of usage examples for org.jdom2 Element setAttribute
public Element setAttribute(final String name, final String value)
This sets an attribute value for this element.
From source file:edu.ucla.loni.server.ServerUtils.java
License:Open Source License
/** * Writes the access file for the root directory * Side Effect: Updates accessFileModified in the database */// ww w . jav a2 s .c o m public static void writeAccessFile(Directory root) throws Exception { File accessFile = getAccessFile(root); if (!accessFile.exists()) { boolean success = accessFile.createNewFile(); if (!success) { throw new Exception("Could not create access file"); } } // <files> Element filesRoot = new Element("files"); // For each pipe, add a <file> child to <files> Pipefile[] pipes = Database.selectPipefiles(root.dirId); if (pipes != null) { for (Pipefile p : pipes) { // Only add the pipefile if access is not empty if (!p.access.equals("")) { Element file = new Element("file"); file.setAttribute("type", p.type); file.setAttribute("name", p.name); file.setAttribute("package", p.packageName); // Add agents String[] agents = p.access.split(","); for (String agent : agents) { if (!agent.equals("")) { file.addContent(agentElement(agent)); } } filesRoot.addContent(file); } } } // <groups> Element groupsRoot = new Element("groups"); // For each group, add a <group> child to <groups> Group[] groups = Database.selectGroups(root.dirId); if (groups != null) { for (Group g : groups) { Element group = new Element("group"); group.setAttribute("name", g.name); // Add agents String[] agents = g.users.split(","); for (String agent : agents) { if (!agent.equals("")) { group.addContent(agentElement(agent)); } } groupsRoot.addContent(group); } } // Root Element Element access = new Element("access"); access.addContent(filesRoot); access.addContent(groupsRoot); // Document Document doc = new Document(); doc.addContent(access); // Write document writeXML(accessFile, doc); // Update when the access file was written root.accessModified = new Timestamp(accessFile.lastModified()); Database.updateDirectory(root); }
From source file:edu.utep.cs.jasg.apiGenerator.APIGenerator.java
License:Open Source License
/** Creates JDom nodes of the terminals contained in the parser specification file. * Maps the name of the terminals contained in the Beaver specification file to the value * of the terminals extracted from a JFlex specification file */ private void createTerminalElements() { Element terminals = new Element("terminals"); for (String terminal : parserModel.getTerminals()) { Element terminalElement = new Element("terminal"); terminalElement.setAttribute("name", terminal); if (scannerModel.getTokens().containsKey(terminal)) { Element symbolSet = new Element("symbol_set"); Iterator<String> symbolList = scannerModel.getTokens().get(terminal).iterator(); while (symbolList.hasNext()) { String symbol = symbolList.next(); symbolSet.addContent(new Element("symbol").setAttribute(new Attribute("value", symbol))); }/* w ww . j a va 2 s . c o m*/ terminalElement.addContent(symbolSet); } terminals.addContent(terminalElement); } doc.getRootElement().addContent(terminals); }
From source file:edu.wisc.ssec.mcidasv.data.hydra.SuomiNPPDataSource.java
License:Open Source License
public void setup() throws VisADException { // which format, NASA or NOAA? isNOAA = false;/*from w w w.ja v a 2s. c o m*/ // store filenames for possible bundle unpersistence for (Object o : sources) { oldSources.add((String) o); } // time zone for product labels SimpleTimeZone stz = new SimpleTimeZone(0, "GMT"); sdf.setTimeZone(stz); sdfOut.setTimeZone(stz); // looking to populate 3 things - path to lat, path to lon, path to relevant products String pathToLat = null; String pathToLon = null; Set<String> pathToProducts = new LinkedHashSet<>(); Map<String, String> prodToDesc = new HashMap<>(); // flag to differentiate VIIRS from one of the other Suomi sensors boolean isVIIRS = true; // check source filenames to see if this is a combined product. everything // from last file separator to first underscore should be product info int lastSeparator = filename.lastIndexOf(File.separatorChar); int firstUnderscore = filename.indexOf("_", lastSeparator + 1); String prodStr = filename.substring(lastSeparator + 1, firstUnderscore); // only do this check for NOAA data if (filename.endsWith(".h5")) { isNOAA = true; StringTokenizer st = new StringTokenizer(prodStr, "-"); logger.debug("SNPPDS check for embedded GEO, tokenizing: " + prodStr); while (st.hasMoreTokens()) { String singleProd = st.nextToken(); for (int i = 0; i < JPSSUtilities.geoProductIDs.length; i++) { if (singleProd.equals(JPSSUtilities.geoProductIDs[i])) { logger.debug("Setting isCombinedProduct true, Found embedded GEO: " + singleProd); isCombinedProduct = true; break; } } } } // various metatdata we'll need to gather on a per-product basis Map<String, String> unsignedFlags = new LinkedHashMap<>(); Map<String, String> unpackFlags = new LinkedHashMap<>(); // geo product IDs for each granule Set<String> geoProductIDs = new LinkedHashSet<>(); // aggregations will use sets of NetCDFFile readers List<NetCDFFile> ncdfal = new ArrayList<>(); // we should be able to find an XML Product Profile for each data/product type SuomiNPPProductProfile nppPP = null; // and also Profile metadata for geolocation variables boolean haveGeoMetaData = false; // number of source granules which make up the data source int granuleCount = 1; try { nppPP = new SuomiNPPProductProfile(); // for each source file provided, find the appropriate geolocation, // get the nominal time and various other granule-level metadata Iterator keyIterator = filenameMap.keySet().iterator(); while (keyIterator.hasNext()) { String keyStr = (String) keyIterator.next(); List fileNames = (List) filenameMap.get(keyStr); granuleCount = fileNames.size(); setProperty(Constants.PROP_GRANULE_COUNT, granuleCount + " Granule"); for (int fileCount = 0; fileCount < granuleCount; fileCount++) { // need to open the main NetCDF file to determine the geolocation product NetcdfFile ncfile = null; String fileAbsPath = null; try { fileAbsPath = (String) fileNames.get(fileCount); logger.debug("Trying to open file: " + fileAbsPath); ncfile = NetcdfFile.open(fileAbsPath); if (!isCombinedProduct) { if (isNOAA) { Attribute a = ncfile.findGlobalAttribute("N_GEO_Ref"); logger.debug("Value of GEO global attribute: " + a.getStringValue()); String tmpGeoProductID = a.getStringValue(); geoProductIDs.add(tmpGeoProductID); } else { geoProductIDs.add(keyStr.replace("L1B", "GEO")); } } Group rg = ncfile.getRootGroup(); List<Group> gl = rg.getGroups(); if (gl != null) { for (Group g : gl) { logger.trace("Group name: " + g.getFullName()); if (isNOAA) { // when we find the Data_Products group, go down another group level and pull out // what we will use for nominal day and time (for now anyway). // XXX TJJ fileCount check is so we don't count the GEO file in time array! if (g.getFullName().contains("Data_Products") && (fileCount != fileNames.size())) { List<Group> dpg = g.getGroups(); // cycle through once looking for XML Product Profiles for (Group subG : dpg) { String subName = subG.getFullName(); // use actual product, not geolocation, to id XML Product Profile if (!subName.contains("-GEO")) { // determine the instrument name (VIIRS, ATMS, CrIS, OMPS) instrumentName = subG.findAttribute("Instrument_Short_Name"); // note any EDR products, will need to check for and remove // fill scans later Attribute adtt = subG.findAttribute("N_Dataset_Type_Tag"); if (adtt != null) { String baseName = adtt.getStringValue(); if ((baseName != null) && (baseName.equals("EDR"))) { // have to loop through sub groups variables to determine band List<Variable> tmpVar = subG.getVariables(); for (Variable v : tmpVar) { // if Imagery EDR attribute for band is specified, save it Attribute mBand = v.findAttribute("Band_ID"); if (mBand != null) { whichEDR = mBand.getStringValue(); } } } } // This is also where we find the attribute which tells us which // XML Product Profile to use! Attribute axpp = subG.findAttribute("N_Collection_Short_Name"); if (axpp != null) { String baseName = axpp.getStringValue(); productName = baseName; // TJJ Apr 2018 // Hack so we can look at CrIS Full Spectrum, until we can // track down existence of an official Product Profile for it. // http://mcidas.ssec.wisc.edu/inquiry-v/?inquiry=2634 // The regular SDR profile lets us visualize it. if (productName.equals("CrIS-FS-SDR")) productName = "CrIS-SDR"; String productProfileFileName = nppPP .getProfileFileName(productName); logger.trace("Found profile: " + productProfileFileName); if (productProfileFileName == null) { throw new Exception( "XML Product Profile not found in catalog"); } try { nppPP.addMetaDataFromFile(productProfileFileName); } catch (Exception nppppe) { logger.error("Error parsing XML Product Profile: " + productProfileFileName); throw new Exception("XML Product Profile Error", nppppe); } } } } // 2nd pass through sub-group to extract date/time for aggregation for (Group subG : dpg) { List<Variable> vl = subG.getVariables(); for (Variable v : vl) { Attribute aDate = v.findAttribute("AggregateBeginningDate"); Attribute aTime = v.findAttribute("AggregateBeginningTime"); // did we find the attributes we are looking for? if ((aDate != null) && (aTime != null)) { // set time for display to day/time of 1st granule examined if (!nameHasBeenSet) { String sDate = aDate.getStringValue(); String sTime = aTime.getStringValue(); logger.debug("For day/time, using: " + sDate + sTime.substring(0, sTime.indexOf('Z') - 3)); Date d = sdf.parse( sDate + sTime.substring(0, sTime.indexOf('Z') - 3)); theDate = d; setName(instrumentName.getStringValue() + " " + sdfOut.format(d)); nameHasBeenSet = true; } break; } } } if (!nameHasBeenSet) { throw new VisADException("No date time found in Suomi NPP granule"); } } } else { // NASA data - date/time from global attribute // set time for display to day/time of 1st granule examined Attribute timeStartNASA = ncfile.findGlobalAttribute("time_coverage_start"); Date d = sdfNASA.parse(timeStartNASA.getStringValue()); theDate = d; if (!nameHasBeenSet) { instrumentName = ncfile.findGlobalAttribute("instrument"); setName(instrumentName.getStringValue() + " " + sdfOut.format(d)); nameHasBeenSet = true; } } } } } catch (Exception e) { logger.warn("Exception during processing of file: " + fileAbsPath); throw (e); } finally { ncfile.close(); } } } // build each union aggregation element Iterator<String> iterator = geoProductIDs.iterator(); for (int elementNum = 0; elementNum < granuleCount; elementNum++) { String s = null; // build an XML (NCML actually) representation of the union aggregation of these two files Namespace ns = Namespace.getNamespace("http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"); Element root = new Element("netcdf", ns); Document document = new Document(root); Element agg = new Element("aggregation", ns); agg.setAttribute("type", "union"); // TJJ - Loop over filename map, could be several products that need to be aggregated Set set = filenameMap.keySet(); Iterator mapIter = set.iterator(); while (mapIter.hasNext()) { String key = (String) mapIter.next(); List l = (List) filenameMap.get(key); Element fData = new Element("netcdf", ns); fData.setAttribute("location", (String) l.get(elementNum)); agg.addContent(fData); s = (String) l.get(elementNum); } String geoFilename = null; Element fGeo = new Element("netcdf", ns); ; if (!isCombinedProduct) { if (isNOAA) { geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1); // check if we have the whole file name or just the prefix String geoProductID = iterator.next(); if (geoProductID.endsWith("h5")) { geoFilename += geoProductID; } else { geoFilename += geoProductID; geoFilename += s.substring(s.lastIndexOf(File.separatorChar) + 6); } // Be sure file as specified by N_GEO_Ref global attribute really is there. File tmpGeo = new File(geoFilename); if (!tmpGeo.exists()) { // Ok, the expected file defined (supposedly) exactly by a global att is not there... // We need to check for similar geo files with different creation dates String geoFileRelative = geoFilename .substring(geoFilename.lastIndexOf(File.separatorChar) + 1); // also check for Terrain Corrected version of geo String geoTerrainCorrected = geoFileRelative; geoTerrainCorrected = geoTerrainCorrected.replace("OD", "TC"); geoTerrainCorrected = geoTerrainCorrected.replace("MG", "TC"); // now we make a file filter, and see if a matching geo file is present File fList = new File( geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); // current directory FilenameFilter geoFilter = new FilenameFilter() { public boolean accept(File dir, String name) { if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX_NOAA)) { return true; } else { return false; } } }; File[] files = fList.listFiles(geoFilter); for (File file : files) { if (file.isDirectory()) { continue; } // get the file name for convenience String fName = file.getName(); // is it one of the standard Ellipsoid geo types we are looking for? if (fName.substring(0, 5).equals(geoFileRelative.substring(0, 5))) { int geoStartIdx = geoFileRelative.indexOf("_d"); int prdStartIdx = fName.indexOf("_d"); String s1 = geoFileRelative.substring(geoStartIdx, geoStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); String s2 = fName.substring(prdStartIdx, prdStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); if (s1.equals(s2)) { geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; break; } } // same check, but for Terrain Corrected version if (fName.substring(0, 5).equals(geoTerrainCorrected.substring(0, 5))) { int geoStartIdx = geoTerrainCorrected.indexOf("_d"); int prdStartIdx = fName.indexOf("_d"); String s1 = geoTerrainCorrected.substring(geoStartIdx, geoStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); String s2 = fName.substring(prdStartIdx, prdStartIdx + JPSSUtilities.NOAA_CREATION_DATE_INDEX); if (s1.equals(s2)) { geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; break; } } } } } else { // NASA format geoFilename = JPSSUtilities.replaceLast(s, "L1B", "GEO"); // get list of files in current directory File fList = new File( geoFilename.substring(0, geoFilename.lastIndexOf(File.separatorChar) + 1)); // make a NASA style file filter, and see if a matching geo file is present FilenameFilter geoFilter = new FilenameFilter() { public boolean accept(File dir, String name) { if (name.matches(JPSSUtilities.SUOMI_GEO_REGEX_NASA)) { return true; } else { return false; } } }; File[] files = fList.listFiles(geoFilter); for (File file : files) { if (file.isDirectory()) { continue; } // get the file name for convenience String fName = file.getName(); String tmpStr = geoFilename.substring(s.lastIndexOf(File.separatorChar) + 1, s.lastIndexOf(File.separatorChar) + (JPSSUtilities.NASA_CREATION_DATE_INDEX + 1)); if (fName.substring(0, JPSSUtilities.NASA_CREATION_DATE_INDEX) .equals(tmpStr.substring(0, JPSSUtilities.NASA_CREATION_DATE_INDEX))) { geoFilename = s.substring(0, s.lastIndexOf(File.separatorChar) + 1) + fName; break; } } } logger.debug("Determined GEO file name should be: " + geoFilename); fGeo.setAttribute("location", geoFilename); // add this to list used if we create a zipped bundle geoSources.add(geoFilename); agg.addContent(fGeo); } root.addContent(agg); XMLOutputter xmlOut = new XMLOutputter(); String ncmlStr = xmlOut.outputString(document); ByteArrayInputStream is = new ByteArrayInputStream(ncmlStr.getBytes()); MultiDimensionReader netCDFReader = new NetCDFFile(is); // let's try and look through the NetCDF reader and see what we can learn... NetcdfFile ncdff = ((NetCDFFile) netCDFReader).getNetCDFFile(); Group rg = ncdff.getRootGroup(); // this is a list filled with unpacked qflag products, if any ArrayList<VariableDS> qfProds = new ArrayList<VariableDS>(); // this is a list filled with pseudo Brightness Temp variables converted from Radiance ArrayList<VariableDS> btProds = new ArrayList<VariableDS>(); List<Group> gl = rg.getGroups(); if (gl != null) { int xDimNASA = -1; int yDimNASA = -1; // Make a first pass to determine the shape of the geolocation data for (Group g : gl) { if (g.getFullName().contains("geolocation_data")) { List<Variable> vl = g.getVariables(); for (Variable v : vl) { if (v.getShortName().equals("latitude")) { // XXX TJJ Nov 2015 // Hack because fill value in attribute does not match // what I am seeing in the data. Attribute fillAtt = new Attribute("_FillValue", -999.0); v.addAttribute(fillAtt); pathToLat = v.getFullName(); pathToProducts.add(v.getFullName()); prodToDesc.put(v.getFullName(), v.getDescription()); xDimNASA = v.getDimension(0).getLength(); yDimNASA = v.getDimension(1).getLength(); } if (v.getShortName().equals("longitude")) { // XXX TJJ Nov 2015 // Hack because fill value in attribute does not match // what I am seeing in the data. Attribute fillAtt = new Attribute("_FillValue", -999.0); v.addAttribute(fillAtt); pathToLon = v.getFullName(); pathToProducts.add(v.getFullName()); prodToDesc.put(v.getFullName(), v.getDescription()); } } } } for (Group g : gl) { logger.debug("Group name: " + g.getFullName()); // NASA only - looking through observation_data and geolocation_data if (g.getFullName().contains("observation_data")) { List<Variable> vl = g.getVariables(); for (Variable v : vl) { // keep any data which matches geolocation dimensions if (v.getDimension(0).getLength() == xDimNASA && v.getDimension(1).getLength() == yDimNASA) { logger.debug("Adding product: " + v.getFullName()); pathToProducts.add(v.getFullName()); prodToDesc.put(v.getFullName(), v.getDescription()); Attribute aUnsigned = v.findAttribute("_Unsigned"); if (aUnsigned != null) { unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); } else { unsignedFlags.put(v.getFullName(), "false"); } // store units in a map for later Attribute unitAtt = v.findAttribute("units"); if (unitAtt != null) { unitsNASA.put(v.getShortName(), unitAtt.getStringValue()); } else { unitsNASA.put(v.getShortName(), "Unknown"); } // TJJ Nov 2018 - SIPS V2+ mods // Regridding with bow-tie interpolation wasn't working since there are // now multiple fill value categories and we need to look specifically // for the bowtie deletion flag Attribute longNameAtt = v.findAttribute("long_name"); String longName = "empty"; if (longNameAtt != null) longName = longNameAtt.getStringValue(); if (longName.contains("reflectance") || longName.contains("radiance")) { Attribute flagMeanings = v .findAttribute(JPSSUtilities.SIPS_FLAG_MEANINGS_ATTRIBUTE); // If this is not null, we must be v2.0.0 or higher if (flagMeanings != null) { String meanings = flagMeanings.getStringValue(); // Tokenize meanings string, multiple flags defined there StringTokenizer st = new StringTokenizer(meanings); int bowtieIdx = -1; boolean foundBowTieAttribute = false; String tokStr = null; while (st.hasMoreTokens()) { tokStr = st.nextToken(); bowtieIdx++; if (tokStr.equals(JPSSUtilities.SIPS_BOWTIE_DELETED_FLAG)) { foundBowTieAttribute = true; break; } } if (foundBowTieAttribute) { Attribute flagValues = v .findAttribute(JPSSUtilities.SIPS_FLAG_VALUES_ATTRIBUTE); Array flagValsArr = flagValues.getValues(); int bowTieVal = (int) flagValsArr.getInt(bowtieIdx); Attribute a1 = new Attribute("_FillValue", bowTieVal); v.addAttribute(a1); } } } // TJJ Feb 2016 - Create BT variables where applicable if ((v.getShortName().matches("M12|M13|M14|M15|M16")) || (v.getShortName().matches("I04|I05"))) { // Get the LUT variable, load into primitive array Variable lut = g .findVariable(v.getShortName() + "_brightness_temperature_lut"); int[] lutShape = lut.getShape(); logger.debug("Handling NASA LUT Variable, LUT size: " + lutShape[0]); // pull out valid min, max - these will be used for our new VariableDS Attribute aVMin = lut.findAttribute("valid_min"); Attribute aVMax = lut.findAttribute("valid_max"); Attribute fillAtt = lut.findAttribute("_FillValue"); logger.debug("valid_min from LUT: " + aVMin.getNumericValue()); logger.debug("valid_max from LUT: " + aVMax.getNumericValue()); // A little hacky, but at this point the class is such a mess // that what's a little more, right? Load M12-M16, I4-I5 LUTS if (v.getShortName().matches("M12")) { m12LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { m12LUT[lutIdx] = lutArray.get(lutIdx); } } if (v.getShortName().matches("M13")) { m13LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { m13LUT[lutIdx] = lutArray.get(lutIdx); } } if (v.getShortName().matches("M14")) { m14LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { m14LUT[lutIdx] = lutArray.get(lutIdx); } } if (v.getShortName().matches("M15")) { m15LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { m15LUT[lutIdx] = lutArray.get(lutIdx); } } if (v.getShortName().matches("M16")) { m16LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { m16LUT[lutIdx] = lutArray.get(lutIdx); } } if (v.getShortName().matches("I04")) { i04LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { i04LUT[lutIdx] = lutArray.get(lutIdx); } } if (v.getShortName().matches("I05")) { i05LUT = new float[lutShape[0]]; ArrayFloat.D1 lutArray = (ArrayFloat.D1) lut.read(); for (int lutIdx = 0; lutIdx < lutShape[0]; lutIdx++) { i05LUT[lutIdx] = lutArray.get(lutIdx); } } // Create a pseudo-variable, fill using LUT // make a copy of the source variable // NOTE: by using a VariableDS here, the original // variable is used for the I/O, this matters! VariableDS vBT = new VariableDS(g, v, false); // Name is orig name plus suffix vBT.setShortName(v.getShortName() + "_BT"); vBT.addAttribute(fillAtt); vBT.addAttribute(aVMin); vBT.addAttribute(aVMax); if (v.getShortName().matches("M12")) { lutMap.put(vBT.getFullName(), m12LUT); } if (v.getShortName().matches("M13")) { lutMap.put(vBT.getFullName(), m13LUT); } if (v.getShortName().matches("M14")) { lutMap.put(vBT.getFullName(), m14LUT); } if (v.getShortName().matches("M15")) { lutMap.put(vBT.getFullName(), m15LUT); } if (v.getShortName().matches("M16")) { lutMap.put(vBT.getFullName(), m16LUT); } if (v.getShortName().matches("I04")) { lutMap.put(vBT.getFullName(), i04LUT); } if (v.getShortName().matches("I05")) { lutMap.put(vBT.getFullName(), i05LUT); } pathToProducts.add(vBT.getFullName()); String newName = vBT.getDescription().replace("radiance", "brightness temperature"); prodToDesc.put(vBT.getFullName(), newName); btProds.add(vBT); } } } } if (g.getFullName().contains("geolocation_data")) { List<Variable> vl = g.getVariables(); for (Variable v : vl) { // keep any data which matches geolocation dimensions if (v.getDimension(0).getLength() == xDimNASA && v.getDimension(1).getLength() == yDimNASA) { // except we already found Lat and Lon, skip those if ((v.getShortName().equals("latitude")) || (v.getShortName().equals("latitude"))) continue; logger.debug("Adding product: " + v.getFullName()); pathToProducts.add(v.getFullName()); prodToDesc.put(v.getFullName(), v.getDescription()); } } } // NOAA only - we are looking through All_Data, finding displayable data if (g.getFullName().contains("All_Data")) { List<Group> adg = g.getGroups(); int xDim = -1; int yDim = -1; // two sub-iterations, first one to find geolocation and product dimensions for (Group subG : adg) { logger.debug("Sub group name: " + subG.getFullName()); String subName = subG.getFullName(); if (subName.contains("-GEO")) { // this is the geolocation data String geoBaseName = subG.getShortName(); geoBaseName = geoBaseName.substring(0, geoBaseName.indexOf('_')); if (!haveGeoMetaData) { String geoProfileFileName = nppPP.getProfileFileName(geoBaseName); // also add meta data from geolocation profile nppPP.addMetaDataFromFile(geoProfileFileName); haveGeoMetaData = true; } List<Variable> vl = subG.getVariables(); for (Variable v : vl) { if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude")) { pathToLat = v.getFullName(); logger.debug("Ellipsoid Lat/Lon Variable: " + v.getFullName()); // get the dimensions of the lat variable Dimension dAlongTrack = v.getDimension(0); yDim = dAlongTrack.getLength(); Dimension dAcrossTrack = v.getDimension(1); xDim = dAcrossTrack.getLength(); logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); } if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude")) { // we got dimensions from lat, don't need 'em twice, but need path pathToLon = v.getFullName(); } } // one more pass in case there is terrain-corrected Lat/Lon for (Variable v : vl) { if (v.getFullName().endsWith(SEPARATOR_CHAR + "Latitude_TC")) { pathToLat = v.getFullName(); logger.debug("Switched Lat/Lon Variable to TC: " + v.getFullName()); // get the dimensions of the lat variable Dimension dAlongTrack = v.getDimension(0); yDim = dAlongTrack.getLength(); Dimension dAcrossTrack = v.getDimension(1); xDim = dAcrossTrack.getLength(); logger.debug("Lat across track dim: " + dAcrossTrack.getLength()); } if (v.getFullName().endsWith(SEPARATOR_CHAR + "Longitude_TC")) { // we got dimensions from lat, don't need 'em twice, but need path pathToLon = v.getFullName(); } } } } // second to identify displayable products for (Group subG : adg) { // this is the product data List<Variable> vl = subG.getVariables(); for (Variable v : vl) { boolean useThis = false; String vName = v.getFullName(); logger.trace("Variable: " + vName); String varShortName = vName.substring(vName.lastIndexOf(SEPARATOR_CHAR) + 1); // Special code to handle quality flags. We throw out anything // that does not match bounds of the geolocation data if (varShortName.startsWith("QF")) { logger.trace("Handling Quality Flag: " + varShortName); // this check is done later for ALL variables, but we need // it early here to weed out those quality flags that are // simply a small set of data w/no granule geo nbounds boolean xScanOk = false; boolean yScanOk = false; List<Dimension> dl = v.getDimensions(); // toss out > 2D Quality Flags if (dl.size() > 2) { logger.trace("SKIPPING QF, > 2D: " + varShortName); continue; } for (Dimension d : dl) { // in order to consider this a displayable product, make sure // both scan direction dimensions are present and look like a granule if (d.getLength() == xDim) { xScanOk = true; } if (d.getLength() == yDim) { yScanOk = true; } } if (!(xScanOk && yScanOk)) { logger.trace("SKIPPING QF, does not match geo bounds: " + varShortName); continue; } ArrayList<QualityFlag> qfal = nppPP.getQualityFlags(varShortName); if (qfal != null) { for (QualityFlag qf : qfal) { qf.setPackedName(vName); // make a copy of the qflag variable // NOTE: by using a VariableDS here, the original // variable is used for the I/O, this matters! VariableDS vqf = new VariableDS(subG, v, false); // prefix with QF num to help guarantee uniqueness across groups // this will cover most cases, but could still be dupe names // within a single QF. This is handled when fetching XMLPP metadata vqf.setShortName(varShortName.substring(0, 3) + "_" + qf.getName()); logger.debug("New QF var full name: " + vqf.getFullName()); qfProds.add(vqf); qfMap.put(vqf.getFullName(), qf); } } } // for CrIS instrument, first find dimensions of var matching // CrIS filter, then throw out all variables which don't match // those dimensions if (instrumentName.getStringValue().equals("CrIS")) { if (!vName.contains("GEO")) { if (!varShortName.startsWith(crisFilter)) { logger.trace("Skipping variable: " + varShortName); continue; } } else { // these variables are all GEO-related // if they match lat/lon bounds, keep them List<Dimension> dl = v.getDimensions(); if (dl.size() == 3) { boolean isDisplayableCrIS = true; for (Dimension d : dl) { if ((d.getLength() != xDim) && (d.getLength() != yDim) && (d.getLength() != 9)) { isDisplayableCrIS = false; } } if (!isDisplayableCrIS) { continue; } } } } DataType dt = v.getDataType(); if ((dt.getSize() != 4) && (dt.getSize() != 2) && (dt.getSize() != 1)) { continue; } List<Dimension> dl = v.getDimensions(); if (dl.size() > 4) { continue; } // for now, skip any 3D VIIRS data if (instrumentName.getStringValue().equals("VIIRS")) { if (dl.size() == 3) { continue; } } boolean xScanOk = false; boolean yScanOk = false; for (Dimension d : dl) { // in order to consider this a displayable product, make sure // both scan direction dimensions are present and look like a granule if (d.getLength() == xDim) { xScanOk = true; } if (d.getLength() == yDim) { yScanOk = true; } } if (xScanOk && yScanOk) { useThis = true; } // For ATMS, only 3-D variable we pass through is BrightnessTemperature // Dimensions for BT are (lon, lat, channel) if (instrumentName.getStringValue().equals("ATMS")) { if (dl.size() == 3) { boolean isDisplayableATMS = false; for (Dimension d : dl) { if (d.getLength() == JPSSUtilities.ATMSChannelCenterFrequencies.length) { isDisplayableATMS = true; logger.trace( "This variable has a dimension matching num ATMS channels"); break; } } if (!isDisplayableATMS) useThis = false; } } // sensor data with a channel dimension if (useThis) { if ((instrumentName.getStringValue().equals("CrIS")) || (instrumentName.getStringValue().equals("ATMS")) || (instrumentName.getStringValue().contains("OMPS"))) { isVIIRS = false; logger.debug("Handling non-VIIRS data source..."); } } if (useThis) { // loop through the variable list again, looking for a corresponding "Factors" float scaleVal = 1f; float offsetVal = 0f; boolean unpackFlag = false; // if the granule has an entry for this variable name // get the data, data1 = scale, data2 = offset // create and poke attributes with this data // endif String factorsVarName = nppPP.getScaleFactorName(varShortName); if (factorsVarName != null) { logger.debug("Mapping: " + varShortName + " to: " + factorsVarName); for (Variable fV : vl) { if (fV.getShortName().equals(factorsVarName)) { logger.trace("Pulling scale and offset values from variable: " + fV.getShortName()); ucar.ma2.Array a = fV.read(); float[] so = (float[]) a.copyTo1DJavaArray(); scaleVal = so[0]; offsetVal = so[1]; logger.trace("Scale value: " + scaleVal + ", Offset value: " + offsetVal); unpackFlag = true; break; } } } // poke in scale/offset attributes for now Attribute a1 = new Attribute("scale_factor", scaleVal); v.addAttribute(a1); Attribute a2 = new Attribute("add_offset", offsetVal); v.addAttribute(a2); // add valid range and fill value attributes here // try to fill in valid range if (nppPP.hasNameAndMetaData(varShortName)) { String rangeMin = nppPP.getRangeMin(varShortName); String rangeMax = nppPP.getRangeMax(varShortName); logger.trace("range min: " + rangeMin + ", range max: " + rangeMax); // only store range attribute if VALID range found if ((rangeMin != null) && (rangeMax != null)) { int[] shapeArr = new int[] { 2 }; ArrayFloat af = new ArrayFloat(shapeArr); try { af.setFloat(0, Float.parseFloat(rangeMin)); } catch (NumberFormatException nfe) { af.setFloat(0, new Float(Integer.MIN_VALUE)); } try { af.setFloat(1, Float.parseFloat(rangeMax)); } catch (NumberFormatException nfe) { af.setFloat(1, new Float(Integer.MAX_VALUE)); } Attribute rangeAtt = new Attribute("valid_range", af); v.addAttribute(rangeAtt); } // check for and load fill values too... // we need to check two places, first, the XML product profile ArrayList<Float> fval = nppPP.getFillValues(varShortName); // 2nd, does the variable already have one defined? // if there was already a fill value associated with this variable, make // sure we bring that along for the ride too... Attribute aFill = v.findAttribute("_FillValue"); // determine size of our fill value array int fvArraySize = 0; if (aFill != null) fvArraySize++; if (!fval.isEmpty()) fvArraySize += fval.size(); int[] fillShape = new int[] { fvArraySize }; // allocate the array ArrayFloat afFill = new ArrayFloat(fillShape); // and FINALLY, fill it! if (!fval.isEmpty()) { for (int fillIdx = 0; fillIdx < fval.size(); fillIdx++) { afFill.setFloat(fillIdx, fval.get(fillIdx)); logger.trace( "Adding fill value (from XML): " + fval.get(fillIdx)); } } if (aFill != null) { Number n = aFill.getNumericValue(); // is the data unsigned? Attribute aUnsigned = v.findAttribute("_Unsigned"); float fillValAsFloat = Float.NaN; if (aUnsigned != null) { if (aUnsigned.getStringValue().equals("true")) { DataType fvdt = aFill.getDataType(); logger.trace("Data String: " + aFill.toString()); logger.trace("DataType primitive type: " + fvdt.getPrimitiveClassType()); // signed byte that needs conversion? if (fvdt.getPrimitiveClassType() == byte.class) { fillValAsFloat = (float) Util .unsignedByteToInt(n.byteValue()); } else if (fvdt.getPrimitiveClassType() == short.class) { fillValAsFloat = (float) Util .unsignedShortToInt(n.shortValue()); } else { fillValAsFloat = n.floatValue(); } } } afFill.setFloat(fvArraySize - 1, fillValAsFloat); logger.trace( "Adding fill value (from variable): " + fillValAsFloat); } Attribute fillAtt = new Attribute("_FillValue", afFill); v.addAttribute(fillAtt); } Attribute aUnsigned = v.findAttribute("_Unsigned"); if (aUnsigned != null) { unsignedFlags.put(v.getFullName(), aUnsigned.getStringValue()); } else { unsignedFlags.put(v.getFullName(), "false"); } if (unpackFlag) { unpackFlags.put(v.getFullName(), "true"); } else { unpackFlags.put(v.getFullName(), "false"); } logger.debug("Adding product: " + v.getFullName()); pathToProducts.add(v.getFullName()); prodToDesc.put(v.getFullName(), v.getDescription()); } } } } } } // add in any unpacked qflag products for (VariableDS qfV : qfProds) { // skip the spares - they are reserved for future use if (qfV.getFullName().endsWith("Spare")) { continue; } // String.endsWith is case sensitive so gotta check both cases if (qfV.getFullName().endsWith("spare")) { continue; } ncdff.addVariable(qfV.getGroup(), qfV); logger.trace("Adding QF product: " + qfV.getFullName()); pathToProducts.add(qfV.getFullName()); prodToDesc.put(qfV.getFullName(), qfV.getDescription()); unsignedFlags.put(qfV.getFullName(), "true"); unpackFlags.put(qfV.getFullName(), "false"); } // add in any pseudo BT products from NASA data for (Variable vBT : btProds) { logger.trace("Adding BT product: " + vBT.getFullName()); ncdff.addVariable(vBT.getGroup(), vBT); unsignedFlags.put(vBT.getFullName(), "true"); unpackFlags.put(vBT.getFullName(), "false"); } ncdfal.add((NetCDFFile) netCDFReader); } } catch (Exception e) { logger.error("cannot create NetCDF reader for files selected", e); if (e.getMessage() != null && e.getMessage().equals("XML Product Profile Error")) { throw new VisADException("Unable to extract metadata from required XML Product Profile", e); } } // TJJ Feb 2018 // Doing a reorder of variable names here, as per HP's request from // http://mcidas.ssec.wisc.edu/inquiry-v/?inquiry=2613 if (isVIIRS) { // Copy the variable Set to a sortable List List<String> sortedList = new ArrayList(pathToProducts); Collections.sort(sortedList, new VIIRSSort()); // Clear the original data structure which retains insert order // (it's a LinkedHashSet) pathToProducts.clear(); // Re-add the variables in corrected order for (String s : sortedList) { pathToProducts.add(s); } } // initialize the aggregation reader object try { if (isNOAA) { nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "Track", "XTrack", isVIIRS); ((GranuleAggregation) nppAggReader).setQfMap(qfMap); } else { nppAggReader = new GranuleAggregation(ncdfal, pathToProducts, "number_of_lines", "number_of_pixels", isVIIRS); ((GranuleAggregation) nppAggReader).setLUTMap(lutMap); } } catch (Exception e) { throw new VisADException("Unable to initialize aggregation reader", e); } // make sure we found valid data if (pathToProducts.size() == 0) { throw new VisADException("No data found in files selected"); } logger.debug("Number of adapters needed: " + pathToProducts.size()); adapters = new MultiDimensionAdapter[pathToProducts.size()]; Hashtable<String, String[]> properties = new Hashtable<>(); Iterator<String> iterator = pathToProducts.iterator(); int pIdx = 0; boolean adapterCreated = false; while (iterator.hasNext()) { String pStr = iterator.next(); logger.debug("Working on adapter number " + (pIdx + 1) + ": " + pStr); Map<String, Object> swathTable = SwathAdapter.getEmptyMetadataTable(); Map<String, Object> spectTable = SpectrumAdapter.getEmptyMetadataTable(); swathTable.put("array_name", pStr); swathTable.put("lon_array_name", pathToLon); swathTable.put("lat_array_name", pathToLat); swathTable.put("XTrack", "XTrack"); swathTable.put("Track", "Track"); swathTable.put("geo_Track", "Track"); swathTable.put("geo_XTrack", "XTrack"); // TJJ is this even needed? Is product_name used anywhere? if (productName == null) productName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); swathTable.put("product_name", productName); swathTable.put("_mapping", prodToDesc); // array_name common to spectrum table spectTable.put("array_name", pStr); spectTable.put("product_name", productName); spectTable.put("_mapping", prodToDesc); if (!isVIIRS) { // 3D data is either ATMS, OMPS, or CrIS if ((instrumentName.getShortName() != null) && (instrumentName.getStringValue().equals("ATMS"))) { spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); swathTable.put("array_dimension_names", new String[] { "Track", "XTrack", "Channel" }); swathTable.put("lon_array_dimension_names", new String[] { "Track", "XTrack" }); swathTable.put("lat_array_dimension_names", new String[] { "Track", "XTrack" }); spectTable.put("array_dimension_names", new String[] { "Track", "XTrack", "Channel" }); spectTable.put("lon_array_dimension_names", new String[] { "Track", "XTrack" }); spectTable.put("lat_array_dimension_names", new String[] { "Track", "XTrack" }); spectTable.put(SpectrumAdapter.channelType, "wavelength"); spectTable.put(SpectrumAdapter.channels_name, "Channel"); spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); spectTable.put(SpectrumAdapter.y_dim_name, "Track"); int numChannels = JPSSUtilities.ATMSChannelCenterFrequencies.length; float[] bandArray = new float[numChannels]; String[] bandNames = new String[numChannels]; for (int bIdx = 0; bIdx < numChannels; bIdx++) { bandArray[bIdx] = JPSSUtilities.ATMSChannelCenterFrequencies[bIdx]; bandNames[bIdx] = "Channel " + (bIdx + 1); } spectTable.put(SpectrumAdapter.channelValues, bandArray); spectTable.put(SpectrumAdapter.bandNames, bandNames); } else { if (instrumentName.getStringValue().equals("CrIS")) { swathTable.put("XTrack", "dim1"); swathTable.put("Track", "dim0"); swathTable.put("geo_XTrack", "dim1"); swathTable.put("geo_Track", "dim0"); swathTable.put("product_name", "CrIS_SDR"); swathTable.put(SpectrumAdapter.channelIndex_name, "dim3"); swathTable.put(SpectrumAdapter.FOVindex_name, "dim2"); spectTable.put(SpectrumAdapter.channelIndex_name, "dim3"); spectTable.put(SpectrumAdapter.FOVindex_name, "dim2"); spectTable.put(SpectrumAdapter.x_dim_name, "dim1"); spectTable.put(SpectrumAdapter.y_dim_name, "dim0"); } else if (instrumentName.getStringValue().contains("OMPS")) { spectTable.put(SpectrumAdapter.channelIndex_name, "Channel"); swathTable.put(SpectrumAdapter.channelIndex_name, "Channel"); swathTable.put("array_dimension_names", new String[] { "Track", "XTrack", "Channel" }); swathTable.put("lon_array_dimension_names", new String[] { "Track", "XTrack" }); swathTable.put("lat_array_dimension_names", new String[] { "Track", "XTrack" }); spectTable.put("array_dimension_names", new String[] { "Track", "XTrack", "Channel" }); spectTable.put("lon_array_dimension_names", new String[] { "Track", "XTrack" }); spectTable.put("lat_array_dimension_names", new String[] { "Track", "XTrack" }); spectTable.put(SpectrumAdapter.channelType, "wavelength"); spectTable.put(SpectrumAdapter.channels_name, "Channel"); spectTable.put(SpectrumAdapter.x_dim_name, "XTrack"); spectTable.put(SpectrumAdapter.y_dim_name, "Track"); int numChannels = 200; if (instrumentName.getStringValue().equals("OMPS-TC")) { numChannels = 260; } logger.debug("Setting up OMPS adapter, num channels: " + numChannels); float[] bandArray = new float[numChannels]; String[] bandNames = new String[numChannels]; for (int bIdx = 0; bIdx < numChannels; bIdx++) { bandArray[bIdx] = bIdx; bandNames[bIdx] = "Channel " + (bIdx + 1); } spectTable.put(SpectrumAdapter.channelValues, bandArray); spectTable.put(SpectrumAdapter.bandNames, bandNames); } else { // sorry, if we can't id the instrument, we can't display the data! throw new VisADException("Unable to determine instrument name"); } } } else { swathTable.put("array_dimension_names", new String[] { "Track", "XTrack" }); swathTable.put("lon_array_dimension_names", new String[] { "Track", "XTrack" }); swathTable.put("lat_array_dimension_names", new String[] { "Track", "XTrack" }); } swathTable.put("scale_name", "scale_factor"); swathTable.put("offset_name", "add_offset"); swathTable.put("fill_value_name", "_FillValue"); swathTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); spectTable.put("range_name", pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1)); // set the valid range hash if data is available if (nppPP != null) { if (nppPP.getRangeMin(pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1)) != null) { swathTable.put("valid_range", "valid_range"); } } String unsignedAttributeStr = unsignedFlags.get(pStr); if ((unsignedAttributeStr != null) && (unsignedAttributeStr.equals("true"))) { swathTable.put("unsigned", unsignedAttributeStr); } String unpackFlagStr = unpackFlags.get(pStr); if ((unpackFlagStr != null) && (unpackFlagStr.equals("true"))) { swathTable.put("unpack", "true"); } // For Suomi NPP data, do valid range check AFTER applying scale/offset swathTable.put("range_check_after_scaling", "true"); // pass in a GranuleAggregation reader... if (!isVIIRS) { if (instrumentName.getStringValue().equals("ATMS")) { adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); adapterCreated = true; SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); DataCategory.createCategory("MultiSpectral"); categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, "BrightnessTemperature", "BrightnessTemperature", "SuomiNPP", "ATMS"); msd.setInitialWavenumber(JPSSUtilities.ATMSChannelCenterFrequencies[0]); multiSpectralData.add(msd); } if (instrumentName.getStringValue().equals("CrIS")) { if (pStr.contains(crisFilter)) { adapters[pIdx] = new CrIS_SDR_SwathAdapter(nppAggReader, swathTable); adapterCreated = true; CrIS_SDR_Spectrum csa = new CrIS_SDR_Spectrum(nppAggReader, spectTable); DataCategory.createCategory("MultiSpectral"); categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); MultiSpectralData msd = new CrIS_SDR_MultiSpectralData( (CrIS_SDR_SwathAdapter) adapters[pIdx], csa); msd.setInitialWavenumber(csa.getInitialWavenumber()); msd_CrIS.add(msd); } } if (instrumentName.getStringValue().contains("OMPS")) { adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); adapterCreated = true; SpectrumAdapter sa = new SpectrumAdapter(nppAggReader, spectTable); DataCategory.createCategory("MultiSpectral"); categories = DataCategory.parseCategories("MultiSpectral;MultiSpectral;IMAGE"); MultiSpectralData msd = new MultiSpectralData((SwathAdapter) adapters[pIdx], sa, "RadianceEarth", "RadianceEarth", "SuomiNPP", "OMPS"); msd.setInitialWavenumber(0); multiSpectralData.add(msd); } if (pIdx == 0) { // generate default subset for ATMS and OMPS if (!instrumentName.getStringValue().equals("CrIS")) { defaultSubset = multiSpectralData.get(pIdx).getDefaultSubset(); } } } else { // setting NOAA-format units String varName = pStr.substring(pStr.indexOf(SEPARATOR_CHAR) + 1); String varShortName = pStr.substring(pStr.lastIndexOf(SEPARATOR_CHAR) + 1); String units = nppPP.getUnits(varShortName); // setting NASA-format units if (!isNOAA) { units = unitsNASA.get(varShortName); // Need to set _BT variables manually, since they are created on the fly if (varShortName.endsWith("_BT")) units = "Kelvin"; } if (units == null) units = "Unknown"; Unit u = null; try { u = Parser.parse(units); } catch (NoSuchUnitException e) { u = new DerivedUnit(units); logger.debug("Unknown units: " + units); } catch (ParseException e) { u = new DerivedUnit(units); logger.debug("Unparseable units: " + units); } // associate this variable with these units, if not done already RealType.getRealType(varName, u); adapters[pIdx] = new SwathAdapter(nppAggReader, swathTable); adapterCreated = true; if (pIdx == 0) { defaultSubset = adapters[pIdx].getDefaultSubset(); } categories = DataCategory.parseCategories("IMAGE"); } // only increment count if we created an adapter, some products are skipped if (adapterCreated) pIdx++; adapterCreated = false; } if (msd_CrIS.size() > 0) { try { MultiSpectralAggr aggr = new MultiSpectralAggr( msd_CrIS.toArray(new MultiSpectralData[msd_CrIS.size()])); aggr.setInitialWavenumber(902.25f); multiSpectralData.add(aggr); defaultSubset = ((MultiSpectralData) msd_CrIS.get(0)).getDefaultSubset(); } catch (Exception e) { logger.error("Exception: ", e); } } // Merge with pre-set properties Hashtable tmpHt = getProperties(); tmpHt.putAll(properties); setProperties(tmpHt); }
From source file:elh.eus.absa.CorpusReader.java
License:Open Source License
/** * print annotations in Semeval-absa 2015 format * * @param savePath string : path for the file to save the data * @throws ParserConfigurationException//from ww w . j a va2 s. co m */ public void print2Semeval2015format(String savePath) throws ParserConfigurationException { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); // root elements org.w3c.dom.Document doc = docBuilder.newDocument(); org.w3c.dom.Element rootElement = doc.createElement("Reviews"); doc.appendChild(rootElement); for (String rev : getReviews().keySet()) { // review elements org.w3c.dom.Element review = doc.createElement("Review"); rootElement.appendChild(review); // set id attribute to sentence element review.setAttribute("rid", rev); // Sentences element org.w3c.dom.Element sentences = doc.createElement("sentences"); review.appendChild(sentences); List<String> processed = new ArrayList<String>(); for (String sent : this.revSents.get(rev)) { if (processed.contains(sent)) { continue; } else { processed.add(sent); } //System.err.println("creating elements for sentence "+sent); // sentence elements org.w3c.dom.Element sentence = doc.createElement("sentence"); sentences.appendChild(sentence); // set attribute to sentence element sentence.setAttribute("id", sent); // text element of the current sentence org.w3c.dom.Element text = doc.createElement("text"); sentence.appendChild(text); text.setTextContent(getSentences().get(sent)); // Opinions element org.w3c.dom.Element opinions = doc.createElement("Opinions"); sentence.appendChild(opinions); for (Opinion op : getSentenceOpinions(sent)) { if (op.getCategory().equalsIgnoreCase("NULL")) { continue; } // opinion elements org.w3c.dom.Element opinion = doc.createElement("Opinion"); opinions.appendChild(opinion); // set attributes to the opinion element opinion.setAttribute("target", op.getTarget()); opinion.setAttribute("category", op.getCategory()); opinion.setAttribute("polarity", op.getPolarity()); opinion.setAttribute("from", op.getFrom().toString()); opinion.setAttribute("to", op.getTo().toString()); } } } // write the content into xml file try { TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); DOMSource source = new DOMSource(doc); StreamResult result = new StreamResult(new File(savePath)); // Output to console for testing //StreamResult result = new StreamResult(System.out); transformer.transform(source, result); System.err.println("File saved to run.xml"); } catch (TransformerException e) { System.err.println("CorpusReader: error when trying to print generated xml result file."); e.printStackTrace(); } }
From source file:es.ucm.fdi.ac.Analysis.java
License:Open Source License
/** * Saves the analysis/*from w w w .java 2s .co m*/ * @return an element that contains the whole document * @throws IOException */ public Element saveToXML() throws IOException { Element root = new Element("analysis"); root.setAttribute("version", VERSION_STRING); root.setAttribute("created", new SimpleDateFormat("dd/MM/yyyy HH:mm").format(new Date())); root.addContent(sourceSet.saveToXML()); Element annotations = new Element("annotations"); for (Submission s : subs) { if (!s.getAnnotations().isEmpty()) { annotations.addContent(s.saveToXML()); } } root.addContent(annotations); Element tests = new Element("tests"); for (Test t : appliedTests) { if (!t.isCancelled()) { tests.addContent(t.saveToXML()); } } root.addContent(tests); return root; }
From source file:es.ucm.fdi.ac.Annotation.java
License:Open Source License
public Element saveToXML() throws IOException { Element element = new Element("annotation"); // Add labels attribute StringBuilder labelsString = new StringBuilder(); for (Label l : labels) { labelsString.append(l.toString()); labelsString.append(" "); }/*w w w . jav a2 s. c o m*/ element.setAttribute("labels", labelsString.toString().trim()); // Add comment if (commentary != null) { element.setText(commentary.trim()); } // Add optional attributes if (author != null) { element.setAttribute("author", author.trim()); } if (date != null) { element.setAttribute("date", dateFormat.format(date)); } if (target != null) { element.setAttribute("target", target); } if (localFile != null) { element.setAttribute("localFile", localFile); } if (targetFile != null) { element.setAttribute("targetFile", targetFile); } return element; }
From source file:es.ucm.fdi.ac.extract.CompositeFilter.java
License:Open Source License
public void saveInner(Element e) throws IOException { e.setAttribute("operation", op.toString().toLowerCase()); // Add child filters for (FileTreeFilter filter : filters) { e.addContent(filter.saveToXML()); }// ww w . ja v a 2 s. co m }
From source file:es.ucm.fdi.ac.extract.FileTreeFilter.java
License:Open Source License
public Element saveToXML() throws IOException { Element filterElement = new Element("filter"); filterElement.setAttribute("class", this.getClass().getName()); saveInner(filterElement);//from w w w . j a va2 s . c o m return filterElement; }
From source file:es.ucm.fdi.ac.extract.FileTypeFilter.java
License:Open Source License
@Override public void saveInner(Element e) throws IOException { e.setAttribute("type", type.name()); }
From source file:es.ucm.fdi.ac.extract.PatternFilter.java
License:Open Source License
@Override public void saveInner(Element e) throws IOException { e.setAttribute("class", this.getClass().getName()); e.setText(getPattern().trim());/*from w w w . java2s . c o m*/ }