List of usage examples for java.io StreamTokenizer TT_EOL
int TT_EOL
To view the source code for java.io StreamTokenizer TT_EOL.
Click Source Link
From source file:org.wso2.andes.server.security.access.config.PlainConfiguration.java
@Override public RuleSet load() throws ConfigurationException { RuleSet ruleSet = super.load(); try {/*from ww w . j av a2 s . c o m*/ _st = new StreamTokenizer(new BufferedReader(new FileReader(_file))); _st.resetSyntax(); // setup the tokenizer _st.commentChar(COMMENT); // single line comments _st.eolIsSignificant(true); // return EOL as a token _st.lowerCaseMode(true); // case insensitive tokens _st.ordinaryChar('='); // equals is a token _st.ordinaryChar(CONTINUATION); // continuation character (when followed by EOL) _st.quoteChar('"'); // double quote _st.quoteChar('\''); // single quote _st.whitespaceChars('\u0000', '\u0020'); // whitespace (to be ignored) TODO properly _st.wordChars('a', 'z'); // unquoted token characters [a-z] _st.wordChars('A', 'Z'); // [A-Z] _st.wordChars('0', '9'); // [0-9] _st.wordChars('_', '_'); // underscore _st.wordChars('-', '-'); // dash _st.wordChars('.', '.'); // dot _st.wordChars('*', '*'); // star _st.wordChars('@', '@'); // at _st.wordChars(':', ':'); // colon // parse the acl file lines Stack<String> stack = new Stack<String>(); int current; do { current = _st.nextToken(); switch (current) { case StreamTokenizer.TT_EOF: case StreamTokenizer.TT_EOL: if (stack.isEmpty()) { break; // blank line } // pull out the first token from the bottom of the stack and check arguments exist String first = stack.firstElement(); stack.removeElementAt(0); if (stack.isEmpty()) { throw new ConfigurationException(String.format(NOT_ENOUGH_TOKENS_MSG, getLine())); } // check for and parse optional initial number for ACL lines Integer number = null; if (StringUtils.isNumeric(first)) { // set the acl number and get the next element number = Integer.valueOf(first); first = stack.firstElement(); stack.removeElementAt(0); } if (StringUtils.equalsIgnoreCase(ACL, first)) { parseAcl(number, stack); } else if (number == null) { if (StringUtils.equalsIgnoreCase(GROUP, first)) { parseGroup(stack); } else if (StringUtils.equalsIgnoreCase(CONFIG, first)) { parseConfig(stack); } else { throw new ConfigurationException( String.format(UNRECOGNISED_INITIAL_MSG, first, getLine())); } } else { throw new ConfigurationException(String.format(NUMBER_NOT_ALLOWED_MSG, first, getLine())); } // reset stack, start next line stack.clear(); break; case StreamTokenizer.TT_NUMBER: stack.push(Integer.toString(Double.valueOf(_st.nval).intValue())); break; case StreamTokenizer.TT_WORD: stack.push(_st.sval); // token break; default: if (_st.ttype == CONTINUATION) { int next = _st.nextToken(); if (next == StreamTokenizer.TT_EOL) { break; // continue reading next line } // invalid location for continuation character (add one to line beacuse we ate the EOL) throw new ConfigurationException(String.format(PREMATURE_CONTINUATION_MSG, getLine() + 1)); } else if (_st.ttype == '\'' || _st.ttype == '"') { stack.push(_st.sval); // quoted token } else { stack.push(Character.toString((char) _st.ttype)); // single character } } } while (current != StreamTokenizer.TT_EOF); if (!stack.isEmpty()) { throw new ConfigurationException(String.format(PREMATURE_EOF_MSG, getLine())); } } catch (IllegalArgumentException iae) { throw new ConfigurationException(String.format(PARSE_TOKEN_FAILED_MSG, getLine()), iae); } catch (FileNotFoundException fnfe) { throw new ConfigurationException(String.format(CONFIG_NOT_FOUND_MSG, getFile().getName()), fnfe); } catch (IOException ioe) { throw new ConfigurationException(String.format(CANNOT_LOAD_MSG, getFile().getName()), ioe); } return ruleSet; }
From source file:uk.ac.leeds.ccg.andyt.projects.fluvialglacial.SlopeAreaAnalysis.java
protected TreeMap<Integer, Object[]> readSwissData(File fileIn) { TreeMap<Integer, Object[]> result; result = new TreeMap<Integer, Object[]>(); BufferedReader br;//from w w w .j a v a2s . c o m br = Generic_StaticIO.getBufferedReader(fileIn); StreamTokenizer st; st = new StreamTokenizer(br); Generic_StaticIO.setStreamTokenizerSyntax5(st); st.wordChars('(', '('); st.wordChars(')', ')'); st.wordChars('%', '%'); Generic_StaticIO.skipline(st); int token; String line = ""; String[] fields; try { token = st.nextToken(); int ID; //int pointID; while (token != StreamTokenizer.TT_EOF) { switch (token) { case StreamTokenizer.TT_EOL: //flowacc,area (km2),slope_25_(%),proglac_ID,COUNT //12.11111069,0.00756944,32.33880000000,0,250631 fields = line.split(sComma); ID = Integer.valueOf(fields[3]); if (ID > 0) { //BigDecimal flowacc; BigDecimal area; BigDecimal slope; Object[] data; BigDecimal maxx; BigDecimal maxy; BigDecimal minx; BigDecimal miny; data = result.get(ID); ArrayList<Generic_XYNumericalData> theGeneric_XYNumericalData; if (data == null) { data = new Object[5]; theGeneric_XYNumericalData = new ArrayList<Generic_XYNumericalData>(); maxx = BigDecimal.ZERO; maxy = BigDecimal.ZERO; minx = BigDecimal.valueOf(Double.MAX_VALUE); miny = BigDecimal.valueOf(Double.MAX_VALUE); data[0] = theGeneric_XYNumericalData; data[1] = maxx; data[2] = minx; data[3] = maxy; data[4] = miny; result.put(ID, data); } else { theGeneric_XYNumericalData = (ArrayList<Generic_XYNumericalData>) data[0]; maxx = (BigDecimal) data[1]; minx = (BigDecimal) data[2]; maxy = (BigDecimal) data[3]; miny = (BigDecimal) data[4]; } //pointID = Integer.valueOf(fields[4]); //flowacc = new BigDecimal(fields[0]); area = new BigDecimal(fields[1]); if (area.compareTo(BigDecimal.ZERO) == 1) { area = Generic_BigDecimal.log(10, area, 10, RoundingMode.HALF_UP); } else { area = BigDecimal.ZERO; } slope = new BigDecimal(fields[2]); if (slope.compareTo(BigDecimal.ZERO) == 1) { slope = Generic_BigDecimal.log(10, slope, 10, RoundingMode.HALF_UP); } else { slope = BigDecimal.ZERO; } Generic_XYNumericalData point; point = new Generic_XYNumericalData(slope, area); theGeneric_XYNumericalData.add(point); data[0] = theGeneric_XYNumericalData; data[1] = maxx.max(slope); data[2] = minx.min(slope); data[3] = maxy.max(area); data[4] = miny.min(area); } break; case StreamTokenizer.TT_WORD: line = st.sval; break; } token = st.nextToken(); } } catch (IOException ex) { Logger.getLogger(SlopeAreaAnalysis.class.getName()).log(Level.SEVERE, null, ex); } return result; }
From source file:uk.ac.leeds.ccg.andyt.projects.fluvialglacial.SlopeAreaAnalysis.java
protected TreeMap<Integer, Object[]> readAustriaData(File fileIn) { TreeMap<Integer, Object[]> result; result = new TreeMap<Integer, Object[]>(); BufferedReader br;/*from w w w. j av a2s .c o m*/ br = Generic_StaticIO.getBufferedReader(fileIn); StreamTokenizer st; st = new StreamTokenizer(br); Generic_StaticIO.setStreamTokenizerSyntax5(st); st.wordChars('(', '('); st.wordChars(')', ')'); st.wordChars('%', '%'); Generic_StaticIO.skipline(st); int token; String line = ""; String[] fields; try { token = st.nextToken(); int ID; //int pointID; while (token != StreamTokenizer.TT_EOF) { switch (token) { case StreamTokenizer.TT_EOL: //flowacc,area (km2),slope_25_(%),proglac_ID,COUNT //12.11111069,0.00756944,32.33880000000,0,250631 fields = line.split(sComma); ID = Double.valueOf(fields[1]).intValue(); if (ID > 0) { //BigDecimal flowacc; BigDecimal area; BigDecimal slope; Object[] data; BigDecimal maxx; BigDecimal maxy; BigDecimal minx; BigDecimal miny; data = result.get(ID); ArrayList<Generic_XYNumericalData> theGeneric_XYNumericalData; if (data == null) { data = new Object[5]; theGeneric_XYNumericalData = new ArrayList<Generic_XYNumericalData>(); maxx = BigDecimal.ZERO; maxy = BigDecimal.ZERO; minx = BigDecimal.valueOf(Double.MAX_VALUE); miny = BigDecimal.valueOf(Double.MAX_VALUE); data[0] = theGeneric_XYNumericalData; data[1] = maxx; data[2] = minx; data[3] = maxy; data[4] = miny; result.put(ID, data); } else { theGeneric_XYNumericalData = (ArrayList<Generic_XYNumericalData>) data[0]; maxx = (BigDecimal) data[1]; minx = (BigDecimal) data[2]; maxy = (BigDecimal) data[3]; miny = (BigDecimal) data[4]; } //pointID = Integer.valueOf(fields[4]); //flowacc = new BigDecimal(fields[0]); area = new BigDecimal(fields[3]); if (area.compareTo(BigDecimal.ZERO) == 1) { area = Generic_BigDecimal.log(10, area, 10, RoundingMode.HALF_UP); } else { area = BigDecimal.ZERO; } slope = new BigDecimal(fields[2]); if (slope.compareTo(BigDecimal.ZERO) == 1) { slope = Generic_BigDecimal.log(10, slope, 10, RoundingMode.HALF_UP); } else { slope = BigDecimal.ZERO; } Generic_XYNumericalData point; point = new Generic_XYNumericalData(slope, area); theGeneric_XYNumericalData.add(point); data[0] = theGeneric_XYNumericalData; data[1] = maxx.max(slope); data[2] = minx.min(slope); data[3] = maxy.max(area); data[4] = miny.min(area); } break; case StreamTokenizer.TT_WORD: line = st.sval; break; } token = st.nextToken(); } } catch (IOException ex) { Logger.getLogger(SlopeAreaAnalysis.class.getName()).log(Level.SEVERE, null, ex); } return result; }