List of usage examples for java.util Stack peek
public synchronized E peek()
From source file:org.apache.fop.fonts.type1.AFMParser.java
private String parseCharMetrics(String line, Stack<Object> stack, String afmFileName) throws IOException { String trimmedLine = line.trim(); if (END_CHAR_METRICS.equals(trimmedLine)) { return trimmedLine; }// w w w . j a v a 2 s .c om AFMFile afm = (AFMFile) stack.peek(); String encoding = afm.getEncodingScheme(); CharMetricsHandler charMetricsHandler = CharMetricsHandler.getHandler(VALUE_PARSERS, encoding); AFMCharMetrics chm = charMetricsHandler.parse(trimmedLine, stack, afmFileName); afm.addCharMetrics(chm); return null; }
From source file:org.wso2.carbon.mediator.datamapper.DataMapperMediator.java
/** * Retrieve property values and insert into a map * * @param propertiesNamesList Required properties * @param synCtx Message context * @return Map filed with property name and the value */// w w w .j a v a2s .c o m private Map<String, Map<String, Object>> getPropertiesMap(List<String> propertiesNamesList, MessageContext synCtx) { Map<String, Map<String, Object>> propertiesMap = new HashMap<>(); String[] contextAndName; Object value; org.apache.axis2.context.MessageContext axis2MsgCtx = ((Axis2MessageContext) synCtx) .getAxis2MessageContext(); HashMap functionProperties = new HashMap(); Stack<TemplateContext> templeteContextStack = ((Stack) synCtx .getProperty(SynapseConstants.SYNAPSE__FUNCTION__STACK)); if (templeteContextStack != null && !templeteContextStack.isEmpty()) { TemplateContext templateContext = templeteContextStack.peek(); functionProperties.putAll(templateContext.getMappedValues()); } for (String propertyName : propertiesNamesList) { contextAndName = propertyName.split("\\['|'\\]"); switch (contextAndName[0].toUpperCase()) { case DEFAULT_CONTEXT: case SYNAPSE_CONTEXT: value = synCtx.getProperty(contextAndName[1]); break; case TRANSPORT_CONTEXT: value = ((Map) axis2MsgCtx.getProperty(TRANSPORT_HEADERS)).get(contextAndName[1]); break; case AXIS2_CONTEXT: value = axis2MsgCtx.getProperty(contextAndName[1]); break; case AXIS2_CLIENT_CONTEXT: value = axis2MsgCtx.getOptions().getProperty(contextAndName[1]); break; case OPERATIONS_CONTEXT: value = axis2MsgCtx.getOperationContext().getProperty(contextAndName[1]); break; case FUNCTION_CONTEXT: value = functionProperties.get(contextAndName[1]); break; default: log.warn(contextAndName[0] + " scope is not found. Setting it to an empty value."); value = EMPTY_STRING; } if (value == null) { log.warn(propertyName + "not found. Setting it to an empty value."); value = EMPTY_STRING; } insertToMap(propertiesMap, contextAndName, value); } return propertiesMap; }
From source file:net.dv8tion.jda.core.entities.impl.MessageImpl.java
@Override public synchronized String getStrippedContent() { if (strippedContent == null) { String tmp = getContent(); //all the formatting keys to keep track of String[] keys = new String[] { "*", "_", "`", "~~" }; //find all tokens (formatting strings described above) TreeSet<FormatToken> tokens = new TreeSet<>((t1, t2) -> Integer.compare(t1.start, t2.start)); for (String key : keys) { Matcher matcher = Pattern.compile(Pattern.quote(key)).matcher(tmp); while (matcher.find()) { tokens.add(new FormatToken(key, matcher.start())); }// ww w . j a v a 2s . c o m } //iterate over all tokens, find all matching pairs, and add them to the list toRemove Stack<FormatToken> stack = new Stack<>(); List<FormatToken> toRemove = new ArrayList<>(); boolean inBlock = false; for (FormatToken token : tokens) { if (stack.empty() || !stack.peek().format.equals(token.format) || stack.peek().start + token.format.length() == token.start) { //we are at opening tag if (!inBlock) { //we are outside of block -> handle normally if (token.format.equals("`")) { //block start... invalidate all previous tags stack.clear(); inBlock = true; } stack.push(token); } else if (token.format.equals("`")) { //we are inside of a block -> handle only block tag stack.push(token); } } else if (!stack.empty()) { //we found a matching close-tag toRemove.add(stack.pop()); toRemove.add(token); if (token.format.equals("`") && stack.empty()) { //close tag closed the block inBlock = false; } } } //sort tags to remove by their start-index and iteratively build the remaining string Collections.sort(toRemove, (t1, t2) -> Integer.compare(t1.start, t2.start)); StringBuilder out = new StringBuilder(); int currIndex = 0; for (FormatToken formatToken : toRemove) { if (currIndex < formatToken.start) { out.append(tmp.substring(currIndex, formatToken.start)); } currIndex = formatToken.start + formatToken.format.length(); } if (currIndex < tmp.length()) { out.append(tmp.substring(currIndex)); } //return the stripped text, escape all remaining formatting characters (did not have matching open/close before or were left/right of block strippedContent = out.toString().replace("*", "\\*").replace("_", "\\_").replace("~", "\\~"); } return strippedContent; }
From source file:com._4dconcept.springframework.data.marklogic.core.query.QueryBuilder.java
@Nullable private Criteria buildCriteria(Object bean, MarklogicPersistentEntity<?> entity) { Stack<Criteria> stack = new Stack<>(); PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); entity.doWithProperties((PropertyHandler<MarklogicPersistentProperty>) property -> { Object value = propertyAccessor.getProperty(property); if (hasContent(value)) { if (stack.empty()) { stack.push(buildCriteria(property, value)); } else { Criteria criteria = stack.peek(); if (criteria.getOperator() == null) { Criteria andCriteria = new Criteria(Criteria.Operator.and, new ArrayList<>(Arrays.asList(criteria, buildCriteria(property, value)))); stack.pop();/*w w w . ja va2s. co m*/ stack.push(andCriteria); } else { Criteria subCriteria = buildCriteria(property, value); if (subCriteria != null) { criteria.add(subCriteria); } } } } }); return stack.empty() ? null : stack.peek(); }
From source file:org.jumpmind.metl.core.runtime.component.XmlFormatter.java
private boolean addModelEntityXml(Stack<DocElement> parentStack, Document generatedXml, String entityId) { DocElement entityDocElement = entityAttributeDtls.get(entityId); boolean entityUsed = false; if (entityDocElement != null) { entityUsed = true;/*from w w w .j a v a2 s.c o m*/ while (!parentStack.isEmpty() && parentStack.peek().level >= entityDocElement.level) { parentStack.pop(); } // TODO: this guys parent entity might be the last entity, not // static // data. What i have below works assuming the entities line up // parent to child // still needs work if (parentStack.isEmpty() || parentStack.peek().level < entityDocElement.level - 1) { fillStackWithStaticParentElements(parentStack, entityDocElement, generatedXml); } Element entityElementToAdd = entityDocElement.xmlElement.clone(); DocElement parentToAttach = parentStack.peek(); parentToAttach.xmlElement.addContent(0, entityElementToAdd); parentStack .push(new DocElement(entityDocElement.level, entityElementToAdd, null, entityDocElement.xpath)); } return entityUsed; }
From source file:org.apache.cocoon.components.treeprocessor.variables.PreparedVariableResolver.java
public final String resolve(InvokeContext context, Map objectModel) throws PatternException { List mapStack = null; // get the stack only when necessary - lazy inside the loop int stackSize = 0; if (needsMapStack) { if (context == null) { throw new PatternException("Need an invoke context to resolve " + this); }/* www. ja v a 2 s . c o m*/ mapStack = context.getMapStack(); stackSize = mapStack.size(); } Stack stack = new Stack(); for (Iterator i = tokens.iterator(); i.hasNext();) { Token token = (Token) i.next(); Token last; switch (token.getType()) { case TEXT: if (stack.empty()) { stack.push(new Token(EXPR, token.getStringValue())); } else { last = (Token) stack.peek(); if (last.hasType(EXPR)) { last.merge(token); } else { stack.push(new Token(EXPR, token.getStringValue())); } } break; case CLOSE: Token expr = (Token) stack.pop(); Token lastButOne = (Token) stack.pop(); Token result; if (expr.hasType(COLON)) { // i.e. nothing was specified after the colon stack.pop(); // Pop the OPEN result = processModule(lastButOne, EMPTY_TOKEN, objectModel, context, mapStack, stackSize); } else if (lastButOne.hasType(COLON)) { Token module = (Token) stack.pop(); stack.pop(); // Pop the OPEN result = processModule(module, expr, objectModel, context, mapStack, stackSize); } else { result = processVariable(expr, mapStack, stackSize); } if (stack.empty()) { stack.push(result); } else { last = (Token) stack.peek(); if (last.hasType(EXPR)) { last.merge(result); } else { stack.push(result); } } break; case OPEN: case COLON: case ANCHOR_VAR: case THREADSAFE_MODULE: case STATEFUL_MODULE: case ROOT_SITEMAP_VARIABLE: default: { stack.push(token); break; } } } if (stack.size() != 1) { throw new PatternException("Evaluation error in expression: " + originalExpr); } return ((Token) stack.pop()).getStringValue(); }
From source file:io.warp10.worf.WorfInteractive.java
public String runTemplate(Properties config, String warp10Configuration) throws WorfException { try {//from w w w. j a va 2 s .c o m out.println("The configuration file is a template"); WorfTemplate template = new WorfTemplate(config, warp10Configuration); out.println("Generating crypto keys..."); for (String cryptoKey : template.getCryptoKeys()) { String keySize = template.generateCryptoKey(cryptoKey); if (keySize != null) { out.println(keySize + " bits secured key for " + cryptoKey + " generated"); } else { out.println("Unable to generate " + cryptoKey + ", template error"); } } out.println("Crypto keys generated"); Stack<Pair<String, String[]>> fieldsStack = template.getFieldsStack(); if (fieldsStack.size() > 0) { out.println("Update configuration..."); } while (!fieldsStack.isEmpty()) { Pair<String, String[]> templateValues = fieldsStack.peek(); String replaceValue = null; // get user input switch (templateValues.getValue()[0]) { case "path": replaceValue = readInputPath(reader, out, templateValues.getValue()[2]); break; case "host": replaceValue = readHost(reader, out, templateValues.getValue()[2]); break; case "int": replaceValue = readInteger(reader, out, templateValues.getValue()[2]); break; } if (replaceValue == null) { out.println("Unable to update " + templateValues.getValue()[1] + " key, enter a valid " + templateValues.getValue()[0]); continue; } // replace template value template.updateField(templateValues.getKey(), replaceValue); // field updated pop fieldsStack.pop(); } out.println("Configuration updated."); // save file Path warp10ConfigurationPath = Paths.get(warp10Configuration); String outputFileName = warp10ConfigurationPath.getFileName().toString(); outputFileName = outputFileName.replace("template", "conf"); String outputPath = readInputPath(reader, out, "save config:output path", warp10ConfigurationPath.getParent().toString()); String outputFilename = readInputString(reader, out, "save config:output filename", outputFileName); if (Strings.isNullOrEmpty(outputPath) || Strings.isNullOrEmpty(outputFilename)) { throw new Exception("Path or filename empty, unable to save configuration file!"); } StringBuilder sb = new StringBuilder(); sb.append(outputPath); if (!outputPath.endsWith(File.separator)) { sb.append(File.separator); } sb.append(outputFilename); warp10Configuration = sb.toString(); template.saveConfig(warp10Configuration); out.println("Configuration saved. filepath=" + warp10Configuration); out.println("Reading warp10 configuration " + warp10Configuration); return warp10Configuration; } catch (Exception exp) { throw new WorfException("Unexpected Worf error:" + exp.getMessage()); } }
From source file:net.riezebos.thoth.commands.CommentCommand.java
protected Section parseSections(String body, String contextName, String fileName) throws ContentManagerException { CommentManager commentManager = getThothEnvironment().getCommentManager(); Pattern sectionStartPattern = Pattern.compile(DETAILSTART + "(.*?)" + MARKER); Pattern sectionEndPattern = Pattern.compile(DETAILEND); Stack<Section> sections = new Stack<>(); Section main = new Section(fileName); main.setComments(commentManager.getComments(contextName, fileName, null)); sections.push(main);//from ww w. j av a 2 s. com for (String line : body.split("\n")) { Matcher matcher = sectionStartPattern.matcher(line); if (matcher.find()) { String path = matcher.group(1); Section subSection = new Section(path); List<Comment> comments = commentManager.getComments(contextName, path, null); subSection.setComments(comments); sections.peek().addSection(subSection); sections.push(subSection); } else if (sectionEndPattern.matcher(line).find()) { sections.pop(); } else sections.peek().addSection(line); } return main; }
From source file:com.webcohesion.ofx4j.io.tagsoup.TestTagSoupOFXReader.java
/** * tests using sax to parse an OFX doc./* ww w.j a v a2 s . c o m*/ */ public void testVersion1() throws Exception { TagSoupOFXReader reader = new TagSoupOFXReader(); final Map<String, String> headers = new HashMap<String, String>(); final Stack<Map<String, Object>> aggregateStack = new Stack<Map<String, Object>>(); TreeMap<String, Object> root = new TreeMap<String, Object>(); aggregateStack.push(root); reader.setContentHandler(new DefaultHandler() { @Override public void onHeader(String name, String value) { LOG.debug(name + ":" + value); headers.put(name, value); } @Override public void onElement(String name, String value) { char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + name + "=" + value); aggregateStack.peek().put(name, value); } @Override public void startAggregate(String aggregateName) { char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + aggregateName + " {"); TreeMap<String, Object> aggregate = new TreeMap<String, Object>(); aggregateStack.peek().put(aggregateName, aggregate); aggregateStack.push(aggregate); } @Override public void endAggregate(String aggregateName) { aggregateStack.pop(); char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + "}"); } }); reader.parse(TestNanoXMLOFXReader.class.getResourceAsStream("example-response.ofx")); assertEquals(9, headers.size()); assertEquals(1, aggregateStack.size()); assertSame(root, aggregateStack.pop()); }
From source file:com.webcohesion.ofx4j.io.tagsoup.TestTagSoupOFXReader.java
/** * tests using sax to parse an OFX doc./*from w w w.j av a 2s .c o m*/ */ public void testSimpleVersion1() throws Exception { TagSoupOFXReader reader = new TagSoupOFXReader(); final Map<String, String> headers = new HashMap<String, String>(); final Stack<Map<String, Object>> aggregateStack = new Stack<Map<String, Object>>(); TreeMap<String, Object> root = new TreeMap<String, Object>(); aggregateStack.push(root); reader.setContentHandler(new DefaultHandler() { @Override public void onHeader(String name, String value) { LOG.debug(name + ":" + value); headers.put(name, value); } @Override public void onElement(String name, String value) { char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + name + "=" + value); aggregateStack.peek().put(name, value); } @Override public void startAggregate(String aggregateName) { char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + aggregateName + " {"); TreeMap<String, Object> aggregate = new TreeMap<String, Object>(); aggregateStack.peek().put(aggregateName, aggregate); aggregateStack.push(aggregate); } @Override public void endAggregate(String aggregateName) { aggregateStack.pop(); char[] tabs = new char[aggregateStack.size() * 2]; Arrays.fill(tabs, ' '); LOG.debug(new String(tabs) + "}"); } }); reader.parse(TestNanoXMLOFXReader.class.getResourceAsStream("simple.ofx")); assertEquals(9, headers.size()); assertEquals(1, aggregateStack.size()); assertSame(root, aggregateStack.pop()); }