List of usage examples for java.io StreamTokenizer TT_WORD
int TT_WORD
To view the source code for java.io StreamTokenizer TT_WORD.
Click Source Link
From source file:com.denimgroup.threadfix.framework.impl.rails.RailsModelParser.java
private void processAttrAccessible(int type, String stringValue, String charValue) { if (type == StreamTokenizer.TT_WORD && stringValue.startsWith(":") && stringValue.length() > 1) { stringValue = stringValue.substring(1); modelAttributes.add(stringValue); return;/* ww w. j ava 2 s . c o m*/ } else if (",".equals(charValue)) { return; } else { currentModelState = ModelState.INIT; return; } }
From source file:net.duckling.ddl.service.render.dml.ParseHtmlImg.java
public Map parseArgs(String argstring) throws IOException { HashMap<String, String> arglist = new HashMap<String, String>(); ////w w w. ja v a 2s . c om // Protection against funny users. // if (argstring == null) { return arglist; } StringReader in = new StringReader(argstring); StreamTokenizer tok = new StreamTokenizer(in); int type; String param = null; String value = null; tok.eolIsSignificant(true); boolean potentialEmptyLine = false; boolean quit = false; while (!quit) { String s; type = tok.nextToken(); switch (type) { case StreamTokenizer.TT_EOF: quit = true; s = null; break; case StreamTokenizer.TT_WORD: s = tok.sval; potentialEmptyLine = false; break; case StreamTokenizer.TT_EOL: quit = potentialEmptyLine; potentialEmptyLine = true; s = null; break; case StreamTokenizer.TT_NUMBER: s = Integer.toString(new Double(tok.nval).intValue()); potentialEmptyLine = false; break; case '\'': s = tok.sval; break; default: s = null; } // // Assume that alternate words on the line are // parameter and value, respectively. // if (s != null) { if (param == null) { param = s; } else { value = s; arglist.put(param, value); param = null; } } } // // Now, we'll check the body. // if (potentialEmptyLine) { StringWriter out = new StringWriter(); FileUtil.copyContents(in, out); String bodyContent = out.toString(); if (bodyContent != null) { arglist.put(PARAM_BODY, bodyContent); } } return arglist; }
From source file:eu.scape_project.pt.util.PipedArgsParser.java
/** * Matches (stdin >)? (command (| command)*)? (> stdout)? * //www .j a va 2 s . c om * Grammar: * S = STDIN > COMMANDS > STDOUT * S = STDIN > > STDOUT * S = COMMANDS > STDOUT * S = STDIN > COMMANDS * S = COMMANDS * COMMANDS = COMMAND (| COMMAND)* * COMMAND = TOOL ACTION PAIR* * PAIR = --literal="literal" * TOOL = literal * ACTION = literal * STDIN = literal * STDOUT = literal * * Transformed with semantic attributes: * * S = literal R * synthesized attributes: * : S.stdin = R.stdin * : S.stdout = R.stdout * : S.commands = R.commands * inherited attributes: * : R.x = literal * R = '>' literal R2 * synthesized attributes: * : R.stdin = R.x * : R.commands = R2.commands * : R.stdout = R2.stdout * inherited attributes: * : R2.tool = literal * R = R2 * synthesized attributes: * : R.stdin = R2.stdin * : R.commands = R2.commands * : R.stdout = R2.stdout * inherited attributes: * : R2.tool = R.x * R2 = literal_1 ('-' PAIR)* ('|' COMMAND)* ('>' literal_2)? * synthesized attributes: * : foreach PAIR: _pairs += PAIR.pair * : foreach COMMAND: _commands += COMMAND.command * : R2.commands = [new Command( * tool=R.tool, * action=literal_1, * pairs=_pairs)] * + _commands * : R2.stdout = literal_2 * : R2.stdin = "" * PAIR = '-' literal '=' quoted_literal * synthesized attributes: * : PAIR.pair = (literal, quoted_literal) * COMMAND = literal_1 literal_2 ('-' PAIR)* * synthesized attributes: * : COMMAND.command = new Command( * tool=literal_1, * action=literal_2, * pairs=PAIRS.pairs ) * */ private Varbox S() throws IOException { if (nextToken() != '"' && currentToken() != StreamTokenizer.TT_WORD) throw new IOException("input must start with a literal"); return R(tokenizer.sval); }
From source file:com.github.lindenb.jvarkit.tools.biostar.Biostar103303.java
private void readGTF(String uri, SAMSequenceDictionary dict) throws IOException { int count_exons = 0; final Set<String> unknown = new HashSet<String>(); LOG.info("Reading " + uri); final Pattern tab = Pattern.compile("[\t]"); final Map<String, GTFGene> transcript2gene = new HashMap<String, GTFGene>(); LineIterator iter = IOUtils.openURIForLineIterator(uri); while (iter.hasNext()) { String line = iter.next(); if (line.startsWith("#")) continue; String tokens[] = tab.split(line); if (tokens.length < 9) continue; if (!tokens[2].equals("exon")) continue; if (dict.getSequence(tokens[0]) == null) { if (!unknown.contains(tokens[0])) { LOG.warn("chromosome in " + line + " not in SAMSequenceDictionary "); unknown.add(tokens[0]);//ww w . j a v a 2 s. c o m } continue; } String transcript_id = null, gene_id = null, gene_name = null, exon_id = null; StreamTokenizer st = new StreamTokenizer(new StringReader(tokens[8])); st.wordChars('_', '_'); String key = null; while (st.nextToken() != StreamTokenizer.TT_EOF) { String s = null; switch (st.ttype) { case StreamTokenizer.TT_NUMBER: s = String.valueOf(st.nval); break; case '"': case '\'': case StreamTokenizer.TT_WORD: s = st.sval; break; case ';': break; default: break; } if (s == null) continue; if (key == null) { key = s; } else { if (key.equals("transcript_id")) { transcript_id = s; } else if (key.equals("gene_id")) { gene_id = s; } else if (key.equals("gene_name")) { gene_name = s; } else if (key.equals("exon_id")) { exon_id = s; } key = null; } } if (transcript_id == null || transcript_id.isEmpty()) continue; GTFGene gene = transcript2gene.get(tokens[0] + " " + transcript_id); if (gene == null) { gene = new GTFGene(); gene.transcript_id = transcript_id; gene.gene_id = gene_id; gene.gene_name = gene_name; gene.chrom = tokens[0]; transcript2gene.put(tokens[0] + " " + transcript_id, gene); } GTFGene.Exon exon = gene.createExon(Integer.parseInt(tokens[3]), Integer.parseInt(tokens[4])); exon.exon_id = exon_id; } CloserUtil.close(iter); for (GTFGene g : transcript2gene.values()) { Collections.sort(g.exons, new Comparator<GTFGene.Exon>() { @Override public int compare(GTFGene.Exon o1, GTFGene.Exon o2) { return o1.start - o2.start; } }); for (int i = 0; i < g.exons.size(); ++i) { GTFGene.Exon exon = g.exons.get(i); exon.index = i; if (i > 0) { GTFGene.Exon prev = g.exons.get(i - 1); if (prev.end >= exon.start) { throw new IOException("exons " + (i) + " and " + (i + 1) + " overlap in " + g); } } Interval interval = new Interval(g.chrom, exon.start, exon.end); List<GTFGene.Exon> L = exonMap.get(interval); if (L == null) { L = new ArrayList<GTFGene.Exon>(1); exonMap.put(interval, L); } L.add(exon); ++count_exons; } } LOG.info("End Reading " + uri + " N=" + count_exons); }
From source file:com.denimgroup.threadfix.framework.impl.rails.RailsControllerParser.java
private void processClass(int type, String stringValue, String charValue) { if (type == StreamTokenizer.TT_WORD && stringValue != null) { String ctrlName = stringValue; if (ctrlName.endsWith("Controller")) { int i = ctrlName.lastIndexOf("Controller"); ctrlName = ctrlName.substring(0, i); }/* w w w . j a v a 2s. co m*/ currentRailsController.setControllerName(ctrlName); currentCtrlState = ControllerState.INIT; } }
From source file:eu.scape_project.pt.util.PipedArgsParser.java
private Varbox R(String x) throws IOException { if (nextToken() == '>') { if (nextToken() != '"' && currentToken() != StreamTokenizer.TT_WORD) throw new IOException("a tool name must follow after token >"); Varbox r = R2(tokenizer.sval);/*from ww w. ja v a 2 s. c o m*/ r.stdin = x; return r; } tokenizer.pushBack(); return R2(x); }
From source file:com.denimgroup.threadfix.framework.impl.rails.RailsControllerParser.java
private void processMethod(int type, String stringValue, String charValue) { if (type == StreamTokenizer.TT_WORD && stringValue != null) { currentCtrlMethod.setMethodName(stringValue); currentCtrlState = ControllerState.INIT; }// w w w . ja v a 2s .com }
From source file:eu.scape_project.pt.util.PipedArgsParser.java
private Varbox R2(String tool) throws IOException { if (nextToken() != '"' && currentToken() != StreamTokenizer.TT_WORD) throw new IOException("an action name must follow after tool"); Command c = new Command(); c.action = tokenizer.sval;// www .j a va2 s .co m c.tool = tool; Varbox r = new Varbox(); nextToken(); while (currentToken() == '-') { Entry<String, String> pair = PAIR(); c.pairs.put(pair.getKey(), pair.getValue()); nextToken(); } r.commands.add(c); while (currentToken() == '|') { r.commands.add(COMMAND()); } if (currentToken() == '>') { if (nextToken() != '"' && currentToken() != StreamTokenizer.TT_WORD) throw new IOException("a stdout literal must follow after token '>'"); r.stdout = tokenizer.sval; } return r; }
From source file:com.denimgroup.threadfix.framework.impl.rails.RailsControllerParser.java
private void processParams(int type, String stringValue, String charValue) { if (type == StreamTokenizer.TT_WORD && stringValue.startsWith(":") && stringValue.length() > 1) { stringValue = stringValue.substring(1); // addMethodParam(stringValue); if (currentParamName == null) currentParamName = stringValue; else/*from w w w . jav a 2 s .co m*/ currentParamName = currentParamName.concat(".").concat(stringValue); return; } else if ("[".equals(charValue) || "]".equals(charValue)) { return; } else { addMethodParam(currentParamName); currentParamName = null; currentCtrlState = ControllerState.INIT; return; } }
From source file:cross.io.xml.FragmentXMLSerializer.java
/** * @param name/*from w w w . j a v a2 s.co m*/ * @param dims * @param data * @return */ private Array handleData(final String name, final Dimension[] dims, final Element data, final Range[] ranges) { EvalTools.notNull(dims, this); final String dec = new String(Base64.decode(data.getText(), Base64.GZIP)); final StreamTokenizer st = new StreamTokenizer(new StringReader(dec)); final NumberFormat nf = NumberFormat.getNumberInstance(Locale.US); final int[] shape = new int[dims.length]; int d = 0; for (final Dimension dim : dims) { shape[d++] = dim.getLength(); } Array a = null; IndexIterator idx = null; int tok = -1; // log.info("DataType of array: {}", // dt.getPrimitiveClassType().getName()); Mode m = Mode.UNDEF; Object o = null; try { while ((tok = st.nextToken()) != StreamTokenizer.TT_EOL) { if (tok == StreamTokenizer.TT_WORD) { if (m == Mode.UNDEF) { try { o = nf.parse(st.sval); if (o instanceof Double) { m = Mode.DOUBLE; a = Array.factory(DataType.DOUBLE, shape); } else if (o instanceof Float) { m = Mode.FLOAT; a = Array.factory(DataType.FLOAT, shape); } else if (o instanceof Long) { m = Mode.LONG; a = Array.factory(DataType.LONG, shape); } else if (o instanceof Integer) { m = Mode.INTEGER; a = Array.factory(DataType.INT, shape); } else if (o instanceof Byte) { m = Mode.BYTE; a = Array.factory(DataType.BYTE, shape); } else if (o instanceof Short) { m = Mode.SHORT; a = Array.factory(DataType.SHORT, shape); } } catch (final ParseException pe) { if (st.sval.equalsIgnoreCase("true") || st.sval.equalsIgnoreCase("false")) { m = Mode.BOOLEAN; a = Array.factory(DataType.BOOLEAN, shape); } else { m = Mode.STRING; a = Array.factory(DataType.STRING, shape); } } } else { if (idx == null) { idx = a.getIndexIterator(); } switch (m) { case DOUBLE: { idx.setDoubleNext((Double) o); break; } case FLOAT: { idx.setFloatNext((Float) o); break; } case INTEGER: { idx.setIntNext((Integer) o); break; } case LONG: { idx.setLongNext((Long) o); break; } case BYTE: { idx.setByteNext((Byte) o); break; } case SHORT: { idx.setShortNext((Short) o); break; } case BOOLEAN: { idx.setBooleanNext(Boolean.parseBoolean(st.sval)); break; } case STRING: { idx.setObjectNext(st.sval); break; } case OBJECT: { throw new IllegalArgumentException("Could not handle type"); } } } } } } catch (final IOException e) { log.warn("Could not read data for {}", name); } if (a != null && ranges != null && ranges.length != 0) { try { return a.section(Arrays.asList(ranges)); } catch (InvalidRangeException ex) { log.warn("Invalid range while trying to subset array: ", ex); } } return a; }