List of usage examples for java.util Stack Stack
public Stack()
From source file:com.aurel.track.admin.customize.category.filter.execute.SavedFilterExecuteAction.java
/** * Whether the unwrapped filter contains parameter or not * @return//from w w w . jav a 2 s .c om */ public String unwrappedContainsParameter() { boolean containsParameter = false; if (filterSubType != null && filterSubType.intValue() == QUERY_PURPOSE.TREE_FILTER) { if (filterUpperTO == null) { //nothing was submitted to selects filterUpperTO = new FilterUpperTO(); } filterUpperTO.setFieldExpressionSimpleList( FilterSubmitHandler.createFieldExpressionSimpleListAfterSubmit(simpleMatcherRelationMap, simpleDisplayValueMap, locale)); List<FieldExpressionInTreeTO> fieldExpressionsInTreeList = FilterSubmitHandler .createFieldExpressionInTreeListAfterSubmit(inTreeDisplayValueMap, inTreeMatcherRelationMap, fieldMap, fieldMomentMap, operationMap, parenthesisOpenedMap, parenthesisClosedMap, fieldExpressionOrderMap, locale); QNode rootNode = null; try { rootNode = TreeFilterSaverBL.transformExpressionListToTree(fieldExpressionsInTreeList, new Stack<QNode>()); if (!instant) { //not instant but possibly instantly modified saved filters with parameters //(instant filters never have parameters) if (FilterSelectsParametersUtil.containsParameter(filterUpperTO) || QNodeParametersUtil.containsParameter(rootNode)) { //even after being instantly edited it still contains unspecified parameters containsParameter = true; session.put(SavedFilterExecuteAction.FILTER_UPPER_WITH_PARAM, filterUpperTO); session.put(SavedFilterExecuteAction.TREE_WITH_PARAM, rootNode); } else { //no parameters: redirect to item navigator after saving String filterExpression = null; try { filterExpression = TreeFilterFacade.getInstance().getFilterExpression(null, filterUpperTO, fieldExpressionsInTreeList); } catch (Exception e) { String errorKey = e.getMessage(); String errorMessage = LocalizeUtil.getLocalizedTextFromApplicationResources(errorKey, locale); LOGGER.warn("Transforming the instant query to expression failed with " + errorMessage); } QueryContext queryContext = new QueryContext(); queryContext.setQueryType(ItemNavigatorBL.QUERY_TYPE.INSTANT); queryContext.setQueryID(filterSubType); queryContext.setFilterExpression(filterExpression); LastExecutedBL.storeLastExecutedQuery(personBean.getObjectID(), queryContext); } } } catch (Exception e) { String errorMessage = LocalizeUtil.getLocalizedTextFromApplicationResources(e.getMessage(), locale); JSONUtility.encodeJSON(servletResponse, JSONUtility.encodeJSONFailure(errorMessage)); return null; } } JSONUtility.encodeJSON(servletResponse, JSONUtility.encodeJSONBoolean(containsParameter)); return null; }
From source file:gov.medicaid.screening.dao.impl.NurseAnesthetistsLicenseDAOBean.java
/** * Parses the full name into a User object. * * @param fullName the full name displayed on the site * @return the parsed name// w ww. j av a2 s .c om */ private User parseName(String fullName) { fullName = fullName.substring(0, fullName.indexOf(",")); // remove certificate title User user = new User(); Stack<String> nameParts = new Stack<String>(); for (String string : fullName.split(" ")) { nameParts.push(string); } user.setLastName(nameParts.pop()); if (nameParts.size() > 1) { user.setMiddleName(nameParts.pop()); } StringBuffer sb = new StringBuffer(); while (!nameParts.isEmpty()) { sb.insert(0, nameParts.pop() + " "); } user.setFirstName(sb.toString().trim()); return user; }
From source file:geva.Mapper.GEGrammar.java
String generateNameFromTree(DerivationTree tree) { StringBuilder builder = new StringBuilder(); Stack<DerivationNode> nodeStack = new Stack<DerivationNode>(); nodeStack.push((DerivationNode) tree.getRoot()); while (nodeStack.empty() == false) { DerivationNode nodes = nodeStack.pop(); if (nodes != null) { if (nodes.getCodonIndex() != -1) { builder.append(nodes.getCodonPick()); }//from w w w .ja v a 2s . com if (nodes.size() != 0) { builder.append('['); nodeStack.push(null); for (int i = nodes.size(); i > 0; i--) { nodeStack.push((DerivationNode) nodes.get(i - 1)); } } } else { builder.append(']'); } } return builder.toString(); }
From source file:net.leegorous.jsc.JavaScriptCombiner.java
public static String translate2RelatePath(File standard, File file) throws IOException { if (standard.isFile()) standard = standard.getParentFile(); String standardPath = standard.getCanonicalPath().replaceAll("\\\\", "/"); File target = file;//from ww w . j a v a 2s . c om Stack stack = new Stack(); if (file.isFile()) { stack.push(target.getName()); target = file.getParentFile(); } String targetPath = file.getCanonicalPath().replaceAll("\\\\", "/"); StringBuffer result = new StringBuffer(); while (!targetPath.startsWith(standardPath)) { standardPath = standardPath.substring(0, standardPath.lastIndexOf("/")); result.append("../"); } while (!standardPath.equals(target.getCanonicalPath().replaceAll("\\\\", "/"))) { stack.push(target.getName()); target = target.getParentFile(); } while (stack.size() > 1) { result.append((String) stack.pop()).append("/"); } if (stack.size() > 0) result.append((String) stack.pop()); // System.out.println(standardPath+"\n"+targetPath); return result.toString(); }
From source file:FibonacciHeap.java
/** * Creates a String representation of this Fibonacci heap. * * @return String of this.//from w w w . j av a 2 s.co m */ public String toString() { if (minNode == null) { return "FibonacciHeap=[]"; } // create a new stack and put root on it Stack<FibonacciHeapNode<T>> stack = new Stack<FibonacciHeapNode<T>>(); stack.push(minNode); StringBuffer buf = new StringBuffer(512); buf.append("FibonacciHeap=["); // do a simple breadth-first traversal on the tree while (!stack.empty()) { FibonacciHeapNode<T> curr = stack.pop(); buf.append(curr); buf.append(", "); if (curr.child != null) { stack.push(curr.child); } FibonacciHeapNode<T> start = curr; curr = curr.right; while (curr != start) { buf.append(curr); buf.append(", "); if (curr.child != null) { stack.push(curr.child); } curr = curr.right; } } buf.append(']'); return buf.toString(); }
From source file:gdt.data.grain.Support.java
/** * Intersect two string arrays //from ww w . jav a 2 s . c om * @param list1 first array * @param list2 second array * @return the result string array. */ public static String[] intersect(String[] list1, String[] list2) { if (list2 == null || list1 == null) { return null; } Stack<String> s1 = new Stack<String>(); Stack<String> s2 = new Stack<String>(); for (String aList2 : list2) s2.push(aList2); String line$; boolean found; String member$ = null; while (!s2.isEmpty()) { try { found = false; line$ = s2.pop().toString(); if (line$ == null) continue; for (String aList1 : list1) { member$ = aList1; if (line$.equals(member$)) { found = true; break; } } if (found) Support.addItem(member$, s1); } catch (Exception e) { Logger.getLogger(Support.class.getName()).info("intersect:" + e.toString()); } } return s1.toArray(new String[0]); }
From source file:com.ikanow.infinit.e.application.utils.LogstashConfigUtils.java
public static BasicDBObject parseLogstashConfig(String configFile, StringBuffer error) { BasicDBObject tree = new BasicDBObject(); // Stage 0: remove escaped "s and 's (for the purpose of the validation): // (prevents tricksies with escaped "s and then #s) // (http://stackoverflow.com/questions/5082398/regex-to-replace-single-backslashes-excluding-those-followed-by-certain-chars) configFile = configFile.replaceAll("(?<!\\\\)(?:((\\\\\\\\)*)\\\\)[\"']", "X"); //TESTED (by hand - using last 2 fields of success_2_1) // Stage 1: remove #s, and anything in quotes (for the purpose of the validation) configFile = configFile.replaceAll("(?m)(?:([\"'])(?:(?!\\1).)*\\1)", "VALUE").replaceAll("(?m)(?:#.*$)", "");/*from ww w. j a va2s.co m*/ //TESTED (2_1 - including with a # inside the ""s - Event_Date -> Event_#Date) //TESTED (2_2 - various combinations of "s nested inside 's) ... yes that is a negative lookahead up there - yikes! // Stage 2: get a nested list of objects int depth = 0; int ifdepth = -1; Stack<Integer> ifStack = new Stack<Integer>(); BasicDBObject inputOrFilter = null; Matcher m = _navigateLogstash.matcher(configFile); // State: String currTopLevelBlockName = null; String currSecondLevelBlockName = null; BasicDBObject currSecondLevelBlock = null; while (m.find()) { boolean simpleField = false; //DEBUG //System.out.println("--DEPTH="+depth + " GROUP=" + m.group() + " IFS" + Arrays.toString(ifStack.toArray())); //System.out.println("STATES: " + currTopLevelBlockName + " AND " + currSecondLevelBlockName); if (m.group().equals("}")) { if (ifdepth == depth) { // closing an if statement ifStack.pop(); if (ifStack.isEmpty()) { ifdepth = -1; } else { ifdepth = ifStack.peek(); } } //TESTED (1_1bc, 2_1) else { // closing a processing block depth--; if (depth < 0) { // {} Mismatch error.append("{} Mismatch (})"); return null; } //TESTED (1_1abc) } } else { // new attribute! String typeName = m.group(1); if (null == typeName) { // it's an if statement or a string value typeName = m.group(4); if (null != typeName) { simpleField = true; } } else if (typeName.equalsIgnoreCase("else")) { // It's an if statement.. typeName = null; } if (null == typeName) { // if statement after all // Just keep track of ifs so we can ignore them ifStack.push(depth); ifdepth = depth; // (don't increment depth) } //TESTED (1_1bc, 2_1) else { // processing block String subTypeName = m.group(3); if (null != subTypeName) { // eg codec.multiline typeName = typeName + "." + subTypeName; } //TESTED (2_1, 2_3) if (depth == 0) { // has to be one of input/output/filter) String topLevelType = typeName.toLowerCase(); if (topLevelType.equalsIgnoreCase("input") || topLevelType.equalsIgnoreCase("filter")) { if (tree.containsField(topLevelType)) { error.append("Multiple input or filter blocks: " + topLevelType); return null; } //TESTED (1_3ab) else { inputOrFilter = new BasicDBObject(); tree.put(topLevelType, inputOrFilter); // Store state: currTopLevelBlockName = topLevelType; } //TESTED (*) } else { if (topLevelType.equalsIgnoreCase("output")) { error.append( "Not allowed output blocks - these are appended automatically by the logstash harvester"); } else { error.append("Unrecognized processing block: " + topLevelType); } return null; } //TESTED (1_4a) } else if (depth == 1) { // processing blocks String subElType = typeName.toLowerCase(); // Some validation: can't include a type called "filter" anywhere if ((null != currTopLevelBlockName) && currTopLevelBlockName.equals("input")) { if (subElType.equals("filter") || subElType.endsWith(".filter")) { error.append("Not allowed sub-elements of input called 'filter' (1)"); return null; } } //TESTED (1_5b) BasicDBList subElements = (BasicDBList) inputOrFilter.get(subElType); if (null == subElements) { subElements = new BasicDBList(); inputOrFilter.put(subElType, subElements); } BasicDBObject newEl = new BasicDBObject(); subElements.add(newEl); // Store state: currSecondLevelBlockName = subElType; currSecondLevelBlock = newEl; } //TESTED (*) else if (depth == 2) { // attributes of processing blocks // we'll just store the field names for these and do any simple validation that was too complicated for the regexes String subSubElType = typeName.toLowerCase(); // Validation: if (null != currTopLevelBlockName) { // 1] sincedb path if (currTopLevelBlockName.equals("input") && (null != currSecondLevelBlockName)) { // (don't care what the second level block name is - no sincedb allowed) if (subSubElType.equalsIgnoreCase("sincedb_path")) { error.append("Not allowed sincedb_path in input.* block"); return null; } //TESTED (1_5a) // 2] no sub-(-sub etc)-elements of input called filter if (subSubElType.equals("filter") || subSubElType.endsWith(".filter")) { error.append("Not allowed sub-elements of input called 'filter' (2)"); return null; } //TESTED (1_5c) } } // Store in map: if (null != currSecondLevelBlock) { currSecondLevelBlock.put(subSubElType, new BasicDBObject()); } } // (won't go any deeper than this) if (!simpleField) { depth++; } } } } if (0 != depth) { error.append("{} Mismatch ({)"); return null; } //TESTED (1_2a) return tree; }
From source file:eu.annocultor.converters.geonames.GeonamesCsvToRdf.java
Set<String> allParents(String uri, String country) { Set<String> all = new HashSet<String>(); Collection<String> directParents = broader.getCollection(uri); Stack<String> toCheckForParents = new Stack<String>(); if (directParents != null) { toCheckForParents.addAll(directParents); }/* w ww . j av a2s. co m*/ while (!toCheckForParents.isEmpty()) { String parent = toCheckForParents.pop(); if (parent != null && all.add(parent)) { Collection<String> parents = broader.getCollection(parent); if (parents != null) { toCheckForParents.addAll(directParents); } } } String countryUri = countryCodeToGeonamesCode.get(country); if (countryUri != null) { all.add(countryUri); } return all; }
From source file:com.sqewd.open.dal.core.persistence.db.AbstractDbPersister.java
private List<AbstractEntity> read(final String query, final Class<?> type, final int limit, final Connection conn) throws Exception { // Make sure the type for the class is available. StructEntityReflect enref = ReflectionUtils.get().getEntityMetadata(type); boolean joinedList = AbstractJoinGraph.hasJoinedList(enref); SQLQuery parser = new SQLQuery(type); String selectsql = parser.parse(query, limit); Statement stmnt = conn.createStatement(); List<AbstractEntity> entities = new ArrayList<AbstractEntity>(); HashMap<String, AbstractEntity> refindx = null; try {//from w w w .j av a2s.co m log.debug("SELECT SQL [" + selectsql + "]"); ResultSet rs = stmnt.executeQuery(selectsql); try { if (joinedList) { refindx = new HashMap<String, AbstractEntity>(); } while (rs.next()) { if (!joinedList) { AbstractJoinGraph gr = AbstractJoinGraph.lookup(type); Object obj = type.newInstance(); if (!(obj instanceof AbstractEntity)) throw new Exception("Unsupported Entity type [" + type.getCanonicalName() + "]"); AbstractEntity entity = (AbstractEntity) obj; Stack<KeyValuePair<Class<?>>> path = new Stack<KeyValuePair<Class<?>>>(); KeyValuePair<Class<?>> cls = new KeyValuePair<Class<?>>(); cls.setValue(entity.getClass()); path.push(cls); EntityHelper.setEntity(entity, rs, gr, path); entities.add(entity); } else { EntityHelper.setEntity(enref, refindx, rs); } } } finally { if (rs != null && !rs.isClosed()) { rs.close(); } } if (joinedList) { for (String key : refindx.keySet()) { entities.add(refindx.get(key)); } } return entities; } finally { if (stmnt != null && !stmnt.isClosed()) { stmnt.close(); } } }
From source file:alluxio.job.move.MoveDefinition.java
/** * Returns {@link URIStatus} for all paths under the specified path, including the path itself. * * The statuses will be listed in the order they are visited by depth-first search. * * @param path the target path/*from ww w . jav a2 s.com*/ * @return a list of the {@link URIStatus} for all paths under the given path * @throws Exception if an exception occurs */ private List<URIStatus> getPathStatuses(AlluxioURI path) throws Exception { // Depth-first search to to find all files under path. Stack<AlluxioURI> pathsToConsider = new Stack<>(); pathsToConsider.add(path); List<URIStatus> allStatuses = Lists.newArrayList(); while (!pathsToConsider.isEmpty()) { AlluxioURI nextPath = pathsToConsider.pop(); URIStatus status = mFileSystem.getStatus(nextPath); allStatuses.add(status); if (status.isFolder()) { List<URIStatus> childStatuses = mFileSystem.listStatus(nextPath); for (URIStatus childStatus : childStatuses) { if (childStatus.isFolder()) { pathsToConsider.push(new AlluxioURI(childStatus.getPath())); } else { allStatuses.add(childStatus); } } } } return ImmutableList.copyOf(allStatuses); }