List of usage examples for java.util Stack Stack
public Stack()
From source file:com.pvo.activity.MainFragmentActivity.java
private void selectItem(int position) { selectedPosition = position;/*from w w w . j a v a 2 s .co m*/ // update the main content by replacing fragments pvoFragment = null; fragmentStack = new Stack<Fragment>(); switch (position) { //Notification case 0: //pvoFragment = new AllNotificationActivity(); pvoFragment = new ZNotificationMainFragment(); /*Bundle myPropBundel = new Bundle(); myPropBundel.putString("Type", "Search"); pvoFragment = new ZMyPropertyListActivity(); pvoFragment.setArguments(myPropBundel);*/ break; //Notification case 1: //pvoFragment = new AllNotificationActivity(); pvoFragment = new ZShortListMainFragment(); /*Bundle myPropBundel = new Bundle(); myPropBundel.putString("Type", "Search"); pvoFragment = new ZMyPropertyListActivity(); pvoFragment.setArguments(myPropBundel);*/ break; //Search Property case 2: pvoFragment = new SearchPropertyMainActivity(); break; //Search Requirement case 3: pvoFragment = new SearchRequirementMainActivity(); break; //Find Agent case 4: pvoFragment = new FindAgentActivity(); break; //Find Property by Id case 5: pvoFragment = new FindPropertyByIdActivity(); break; // My Account Info case 6: pvoFragment = new MyAccountActivity(); break; //Nominee case 7: pvoFragment = new NomineeListActivity(); break; //Change Password case 8: pvoFragment = new ChangePasswordActivity(); break; //My Property case 9: Bundle myPropertyBundel = new Bundle(); myPropertyBundel.putString("Type", "Search"); pvoFragment = new ZMyPropertyListActivity(); pvoFragment.setArguments(myPropertyBundel); break; //My Requirement case 10: Bundle myRequirementBundel = new Bundle(); myRequirementBundel.putString("Type", "Search"); pvoFragment = new MyRequirementListActivity(); pvoFragment.setArguments(myRequirementBundel); break; //prefered Broker case 11: pvoFragment = new PreferreBrokerListActivity(); break; //Public Property case 12: Bundle publicPropertyBundel = new Bundle(); publicPropertyBundel.putString("Type", "Search"); pvoFragment = new PublicPropertyListActivity(); pvoFragment.setArguments(publicPropertyBundel); break; //Public Requirement case 13: Bundle publicRequirementBundel = new Bundle(); publicRequirementBundel.putString("Type", "Search"); pvoFragment = new PublicRequirementListActivity(); pvoFragment.setArguments(publicRequirementBundel); break; //Utilities case 14: startActivity(new Intent(getApplicationContext(), DashboardNewActivity.class)); break; //Send GCM id /*case 14: SharedPreferences prefs = this.getSharedPreferences(SplashScreenActivity.PVOREGID, Context.MODE_PRIVATE); // prefs.getString(SplashScreenActivity.PROPERTY_REG_ID,""); Intent i = new Intent(Intent.ACTION_SEND); i.setType("message/rfc822"); i.putExtra(Intent.EXTRA_EMAIL, new String[] { "hirenk@websoptimization.com","nikunj@websoptimization.com","niravj@websoptimization.com" }); i.putExtra(Intent.EXTRA_SUBJECT, "Device Id and GCM Id"); i.putExtra(Intent.EXTRA_TEXT,"GCM Id: "+prefs.getString(SplashScreenActivity.PROPERTY_REG_ID,"")); startActivity(Intent.createChooser(i,"Choose an Email client :")); break;*/ //Logout case 15: //final = new UserSessionManager(getApplicationContext()); // Clearing all user data from Shared Preferences deleteGsmIdService = new GsmIdDeleteService(); WebserviceClient deleteGCMIdWebserviceClient = new WebserviceClient(MainFragmentActivity.this, deleteGsmIdService); deleteGCMIdWebserviceClient.setResponseListner(new ResponseListner() { @Override public void handleResponse(Object response) { JSONObject jsonObject = (JSONObject) response; if (jsonObject != null) { try { if (String.valueOf(jsonObject.get(Constant.DeleteRegisterGSM.API_STATUS)).equals("1")) { Toast.makeText(getApplicationContext(), String.valueOf(jsonObject.get(Constant.DeleteRegisterGSM.API_MESSAGE)), Toast.LENGTH_LONG).show(); userSessionManager.logoutUser(); } else { Toast.makeText(getApplicationContext(), String.valueOf(jsonObject.get(Constant.DeleteRegisterGSM.API_MESSAGE)), Toast.LENGTH_LONG).show(); } } catch (JSONException e) { e.printStackTrace(); } } } }); deleteGCMIdWebserviceClient.execute(userSessionManager.getSessionValue(Constant.Login.PHONE_NUMBER)); break; //Property detail case 16: Bundle propDetailBundel = new Bundle(); propDetailBundel.putString("propertyid", intent.getStringExtra("PropertyId")); pvoFragment = new ZPropertyDetail(); pvoFragment.setArguments(propDetailBundel); break; } //redirect the screen if (pvoFragment != null) { fragmentManager = getSupportFragmentManager(); redirectScreen(pvoFragment); // update selected item and title, then close the drawer mDrawerList.setItemChecked(position, true); mDrawerList.setSelection(position); if (position != 16) setTitle(title[position]); else { setTitle("Property Detail"); } mDrawerLayout.closeDrawer(mDrawerList); } }
From source file:com.espertech.esper.epl.parse.EPLTreeWalker.java
/** * Ctor.//w w w . ja v a2 s . c om * @param engineImportService is required to resolve lib-calls into static methods or configured aggregation functions * @param variableService for variable access * @param input is the tree nodes to walk * @param defaultStreamSelector - the configuration for which insert or remove streams (or both) to produce * @param engineURI engine URI * @param configurationInformation configuration info */ public EPLTreeWalker(TreeNodeStream input, CommonTokenStream tokenStream, EngineImportService engineImportService, VariableService variableService, SchedulingService schedulingService, SelectClauseStreamSelectorEnum defaultStreamSelector, String engineURI, ConfigurationInformation configurationInformation, PatternNodeFactory patternNodeFactory, ContextManagementService contextManagementService, List<String> scriptBodies, ExprDeclaredService exprDeclaredService) { super(input); this.tokenStream = tokenStream; this.engineImportService = engineImportService; this.variableService = variableService; this.defaultStreamSelector = defaultStreamSelector; this.timeProvider = schedulingService; this.patternNodeFactory = patternNodeFactory; this.exprEvaluatorContext = new ExprEvaluatorContextTimeOnly(timeProvider); this.engineURI = engineURI; this.configurationInformation = configurationInformation; this.schedulingService = schedulingService; this.contextManagementService = contextManagementService; this.scriptBodies = scriptBodies; this.exprDeclaredService = exprDeclaredService; if (defaultStreamSelector == null) { throw new IllegalArgumentException("Default stream selector is null"); } statementSpec = new StatementSpecRaw(defaultStreamSelector); statementSpecStack = new Stack<StatementSpecRaw>(); astExprNodeMapStack = new Stack<Map<Tree, ExprNode>>(); // statement-global items expressionDeclarations = new ExpressionDeclDesc(); statementSpec.setExpressionDeclDesc(expressionDeclarations); scriptExpressions = new ArrayList<ExpressionScriptProvided>(1); statementSpec.setScriptExpressions(scriptExpressions); }
From source file:com.webcohesion.ofx4j.io.nanoxml.TestNanoXMLOFXReader.java
/** * tests for closing tags in v1/*w w w .ja va 2s .c o m*/ */ public void testClosingTagsVersion1() throws Exception { NanoXMLOFXReader reader = new NanoXMLOFXReader(); final Map<String, List<String>> headers = new HashMap<String, List<String>>(); final Stack<Map<String, List<Object>>> aggregateStack = new Stack<Map<String, List<Object>>>(); TreeMap<String, List<Object>> root = new TreeMap<String, List<Object>>(); aggregateStack.push(root); reader.setContentHandler(getNewDefaultHandler(headers, aggregateStack)); reader.parse(TestNanoXMLOFXReader.class.getResourceAsStream("closing-tags.ofx")); assertEquals(9, headers.size()); assertEquals(1, aggregateStack.size()); assertSame(root, aggregateStack.pop()); TreeMap<String, List<Object>> OFX = (TreeMap<String, List<Object>>) root.remove("OFX").get(0); assertNotNull(OFX); TreeMap<String, List<Object>> SIGNONMSGSRSV1 = (TreeMap<String, List<Object>>) OFX.remove("SIGNONMSGSRSV1") .get(0); assertNotNull(SIGNONMSGSRSV1); TreeMap<String, List<Object>> SONRS = (TreeMap<String, List<Object>>) SIGNONMSGSRSV1.remove("SONRS").get(0); assertNotNull(SONRS); TreeMap<String, List<Object>> STATUS = (TreeMap<String, List<Object>>) SONRS.remove("STATUS").get(0); assertNotNull(STATUS); TreeMap<String, List<Object>> FI = (TreeMap<String, List<Object>>) SONRS.remove("FI").get(0); assertNotNull(FI); assertEquals("0", STATUS.remove("CODE").get(0).toString().trim()); assertEquals("INFO", STATUS.remove("SEVERITY").get(0).toString().trim()); assertTrue(STATUS.isEmpty()); assertEquals("20100717152132", SONRS.remove("DTSERVER").get(0).toString().trim()); assertEquals("ENG", SONRS.remove("LANGUAGE").get(0).toString().trim()); assertEquals("ameritrade.com", FI.remove("ORG").get(0).toString().trim()); assertTrue(SONRS.isEmpty()); assertTrue(SIGNONMSGSRSV1.isEmpty()); assertTrue(OFX.isEmpty()); assertTrue(root.isEmpty()); }
From source file:com.mirth.connect.connectors.file.FileReceiver.java
@Override protected void poll() { eventController.dispatchEvent(new ConnectionStatusEvent(getChannelId(), getMetaDataId(), getSourceName(), ConnectionStatusEventType.POLLING)); try {/*w ww. ja v a 2s .co m*/ String channelId = getChannelId(); String channelName = getChannel().getName(); URI uri = fileConnector .getEndpointURI(replacer.replaceValues(connectorProperties.getHost(), channelId, channelName)); String readDir = fileConnector.getPathPart(uri); String username = replacer.replaceValues(connectorProperties.getUsername(), channelId, channelName); String password = replacer.replaceValues(connectorProperties.getPassword(), channelId, channelName); filenamePattern = replacer.replaceValues(connectorProperties.getFileFilter(), channelId, channelName); SftpSchemeProperties sftpProperties = null; SchemeProperties schemeProperties = connectorProperties.getSchemeProperties(); if (schemeProperties instanceof SftpSchemeProperties) { sftpProperties = (SftpSchemeProperties) schemeProperties.clone(); sftpProperties .setKeyFile(replacer.replaceValues(sftpProperties.getKeyFile(), channelId, channelName)); sftpProperties.setPassPhrase( replacer.replaceValues(sftpProperties.getPassPhrase(), channelId, channelName)); sftpProperties.setKnownHostsFile( replacer.replaceValues(sftpProperties.getKnownHostsFile(), channelId, channelName)); } fileSystemOptions = new FileSystemConnectionOptions(uri, username, password, sftpProperties); if (connectorProperties.isDirectoryRecursion()) { Set<String> visitedDirectories = new HashSet<String>(); Stack<String> directoryStack = new Stack<String>(); directoryStack.push(readDir); FileInfo[] files; while ((files = listFilesRecursively(visitedDirectories, directoryStack)) != null) { processFiles(files); } } else { processFiles(listFiles(readDir)); } } catch (Throwable t) { eventController.dispatchEvent(new ErrorEvent(getChannelId(), getMetaDataId(), null, ErrorEventType.SOURCE_CONNECTOR, getSourceName(), connectorProperties.getName(), null, t)); logger.error("Error polling in channel: " + getChannelId(), t); } finally { eventController.dispatchEvent(new ConnectionStatusEvent(getChannelId(), getMetaDataId(), getSourceName(), ConnectionStatusEventType.IDLE)); } }
From source file:org.martus.client.swingui.UiMainWindow.java
public UiMainWindow() throws Exception { try {//from www .j a va 2 s .c o m warnIfThisJarNotSigned(); } catch (Exception e) { e.printStackTrace(); showMessageDialog("Error attempting to verify jar"); throw new RuntimeException(e); } try { warnIfCryptoJarsNotLoaded(); } catch (Exception e) { e.printStackTrace(); showMessageDialog("Unknown error attempting to locate crypto jars"); throw new RuntimeException(e); } cursorStack = new Stack(); ModelessBusyDlg splashScreen = createSplashScreen(); try { session = new UiSession(); getSession().initalizeUiState(); // Pop up a nag screen if this is an unofficial private release // NOTE NAG screen now could be localized // new UiNotifyDlg(this, "Martus - Test Version", // new String[] {"THIS IS A PRE-RELEASE TEST VERSION OF MARTUS.", // "Please contact martus@bentech.org with any feedback or questions."}, // new String[] {"OK"}); // Uncomment the call to restrictToOnlyTestServers for test builds which might // generate bad data that we don't want cluttering up production servers // restrictToOnlyTestServers(); } catch (MartusApp.MartusAppInitializationException e) { MartusLogger.logException(e); initializationErrorExitMartusDlg(e.getMessage()); } finally { splashScreen.endDialog(); } }
From source file:com.mirth.connect.model.converters.NCPDPReader.java
private void parseSegment(String segment, ContentHandler contentHandler) throws SAXException { if (StringUtils.isBlank(segment)) { return;// w w w . jav a 2 s .c o m } boolean inCounter = false; boolean inCount = false; boolean hasMoreFields = true; String segmentId = StringUtils.EMPTY; String subSegment = StringUtils.EMPTY; Stack<String> fieldStack = new Stack<String>(); int fieldDelimeterIndex = segment.indexOf(fieldDelimeter); if (fieldDelimeterIndex == 0) { segment = segment.substring(fieldDelimeterIndex + fieldDelimeter.length()); fieldDelimeterIndex = segment.indexOf(fieldDelimeter); } if (fieldDelimeterIndex == -1) { logger.warn("Empty segment with no field seperators. Make sure batch file processing is disabled."); hasMoreFields = false; segmentId = segment; } else { segmentId = segment.substring(0, fieldDelimeterIndex); subSegment = segment.substring(fieldDelimeterIndex + fieldDelimeter.length(), segment.length()); } contentHandler.startElement("", NCPDPReference.getInstance().getSegment(segmentId, version), "", null); while (hasMoreFields) { fieldDelimeterIndex = subSegment.indexOf(fieldDelimeter); // not last field String field; if (fieldDelimeterIndex != -1) { field = subSegment.substring(0, subSegment.indexOf(fieldDelimeter)); subSegment = subSegment.substring(fieldDelimeterIndex + fieldDelimeter.length()); } else { field = subSegment; hasMoreFields = false; } String fieldId = field.substring(0, 2); String fieldDescription = NCPDPReference.getInstance().getDescription(fieldId, version); String fieldMessage = field.substring(2); if (inCount && !isRepeatingField(fieldDescription) && !fieldDescription.endsWith("Count")) { // if we are were in count field then end the element contentHandler.endElement("", fieldStack.pop(), ""); if (fieldStack.size() == 0) { inCount = false; } } if (fieldDescription.endsWith("Counter")) { if (inCounter) { contentHandler.endElement("", fieldStack.pop(), ""); } inCounter = true; AttributesImpl attr = new AttributesImpl(); attr.addAttribute("", "counter", "counter", "", fieldMessage); contentHandler.startElement("", fieldDescription, "", attr); fieldStack.push(fieldDescription); } else if (fieldDescription.endsWith("Count")) { // count field, add complex element inCount = true; AttributesImpl attr = new AttributesImpl(); attr.addAttribute("", fieldDescription, fieldDescription, "", fieldMessage); // start the repeating field element contentHandler.startElement("", fieldDescription, "", attr); fieldStack.push(fieldDescription); } else { contentHandler.startElement("", fieldDescription, "", null); contentHandler.characters(fieldMessage.toCharArray(), 0, fieldMessage.length()); contentHandler.endElement("", fieldDescription, ""); } } while (fieldStack.size() > 0) { // close remaining count and counters contentHandler.endElement("", fieldStack.pop(), ""); } contentHandler.endElement("", NCPDPReference.getInstance().getSegment(segmentId, version), ""); }
From source file:ru.emdev.ldap.util.EmDevSchemaLdifExtractor.java
/** * Calculates the destination file./*from w ww . ja v a2 s .c om*/ * * @param resource the source file * @return the destination file's parent directory */ private File getDestinationFile(File resource) { File parent = resource.getParentFile(); Stack<String> fileComponentStack = new Stack<String>(); fileComponentStack.push(resource.getName()); while (parent != null) { if (parent.getName().equals("schema")) { // All LDIF files besides the schema.ldif are under the // schema/schema base path. So we need to add one more // schema component to all LDIF files minus this schema.ldif fileComponentStack.push("schema"); return assembleDestinationFile(fileComponentStack); } fileComponentStack.push(parent.getName()); if (parent.equals(parent.getParentFile()) || parent.getParentFile() == null) { throw new IllegalStateException(I18n.err(I18n.ERR_08005)); } parent = parent.getParentFile(); } /* this seems retarded so I replaced it for now with what is below it will not break from loop above unless parent == null so the if is never executed - just the else is executed every time if ( parent != null ) { return assembleDestinationFile( fileComponentStack ); } else { throw new IllegalStateException( "parent cannot be null" ); } */ throw new IllegalStateException(I18n.err(I18n.ERR_08006)); }
From source file:com.ikanow.aleph2.harvest.logstash.utils.LogstashConfigUtils.java
@SuppressWarnings("deprecation") public static ObjectNode parseLogstashConfig(String configFile, StringBuffer error) { ObjectNode tree = _mapper.createObjectNode(); // Stage 0: remove escaped "s and 's (for the purpose of the validation): // (prevents tricksies with escaped "s and then #s) // (http://stackoverflow.com/questions/5082398/regex-to-replace-single-backslashes-excluding-those-followed-by-certain-chars) configFile = configFile.replaceAll("(?<!\\\\)(?:((\\\\\\\\)*)\\\\)[\"']", "X"); //TESTED (by hand - using last 2 fields of success_2_1) // Stage 1: remove #s, and anything in quotes (for the purpose of the validation) configFile = configFile.replaceAll("(?m)(?:([\"'])(?:(?!\\1).)*\\1)", "VALUE").replaceAll("(?m)(?:#.*$)", "");//from ww w.ja v a2 s .c o m //TESTED (2_1 - including with a # inside the ""s - Event_Date -> Event_#Date) //TESTED (2_2 - various combinations of "s nested inside 's) ... yes that is a negative lookahead up there - yikes! // Stage 2: get a nested list of objects int depth = 0; int ifdepth = -1; Stack<Integer> ifStack = new Stack<Integer>(); ObjectNode inputOrFilter = null; Matcher m = _navigateLogstash.matcher(configFile); // State: String currTopLevelBlockName = null; String currSecondLevelBlockName = null; ObjectNode currSecondLevelBlock = null; while (m.find()) { boolean simpleField = false; //DEBUG //System.out.println("--DEPTH="+depth + " GROUP=" + m.group() + " IFS" + Arrays.toString(ifStack.toArray())); //System.out.println("STATES: " + currTopLevelBlockName + " AND " + currSecondLevelBlockName); if (m.group().equals("}")) { if (ifdepth == depth) { // closing an if statement ifStack.pop(); if (ifStack.isEmpty()) { ifdepth = -1; } else { ifdepth = ifStack.peek(); } } //TESTED (1_1bc, 2_1) else { // closing a processing block depth--; if (depth < 0) { // {} Mismatch error.append("{} Mismatch (})"); return null; } //TESTED (1_1abc) } } else { // new attribute! String typeName = m.group(1); if (null == typeName) { // it's an if statement or a string value typeName = m.group(4); if (null != typeName) { simpleField = true; } } else if (typeName.equalsIgnoreCase("else")) { // It's an if statement.. typeName = null; } if (null == typeName) { // if statement after all // Just keep track of ifs so we can ignore them ifStack.push(depth); ifdepth = depth; // (don't increment depth) } //TESTED (1_1bc, 2_1) else { // processing block String subTypeName = m.group(3); if (null != subTypeName) { // eg codec.multiline typeName = typeName + "." + subTypeName; } //TESTED (2_1, 2_3) if (depth == 0) { // has to be one of input/output/filter) String topLevelType = typeName.toLowerCase(); if (topLevelType.equalsIgnoreCase("input") || topLevelType.equalsIgnoreCase("filter")) { if (tree.has(topLevelType)) { error.append("Multiple input or filter blocks: " + topLevelType); return null; } //TESTED (1_3ab) else { inputOrFilter = _mapper.createObjectNode(); tree.put(topLevelType, inputOrFilter); // Store state: currTopLevelBlockName = topLevelType; } //TESTED (*) } else { if (topLevelType.equalsIgnoreCase("output")) { error.append( "Not allowed output blocks - these are appended automatically by the logstash harvester"); } else { error.append("Unrecognized processing block: " + topLevelType); } return null; } //TESTED (1_4a) } else if ((depth == 1) && (null != inputOrFilter)) { // processing blocks String subElType = typeName.toLowerCase(); // Some validation: can't include a type called "filter" anywhere if ((null != currTopLevelBlockName) && currTopLevelBlockName.equals("input")) { if (subElType.equals("filter") || subElType.endsWith(".filter")) { error.append("Not allowed sub-elements of input called 'filter' (1)"); return null; } } //TESTED (1_5b) ArrayNode subElements = (ArrayNode) inputOrFilter.get(subElType); if (null == subElements) { subElements = _mapper.createArrayNode(); inputOrFilter.put(subElType, subElements); } ObjectNode newEl = _mapper.createObjectNode(); subElements.add(newEl); // Store state: currSecondLevelBlockName = subElType; currSecondLevelBlock = newEl; } //TESTED (*) else if (depth == 2) { // attributes of processing blocks // we'll just store the field names for these and do any simple validation that was too complicated for the regexes String subSubElType = typeName.toLowerCase(); // Validation: if (null != currTopLevelBlockName) { // 1] sincedb path if (currTopLevelBlockName.equals("input") && (null != currSecondLevelBlockName)) { // (don't care what the second level block name is - no sincedb allowed) if (subSubElType.equalsIgnoreCase("sincedb_path")) { error.append("Not allowed sincedb_path in input.* block"); return null; } //TESTED (1_5a) // 2] no sub-(-sub etc)-elements of input called filter if (subSubElType.equals("filter") || subSubElType.endsWith(".filter")) { error.append("Not allowed sub-elements of input called 'filter' (2)"); return null; } //TESTED (1_5c) } } // Store in map: if (null != currSecondLevelBlock) { currSecondLevelBlock.put(subSubElType, _mapper.createObjectNode()); } } // (won't go any deeper than this) if (!simpleField) { depth++; } } } } if (0 != depth) { error.append("{} Mismatch ({)"); return null; } //TESTED (1_2a) return tree; }
From source file:net.riezebos.thoth.commands.CommentCommand.java
protected Section parseSections(String body, String contextName, String fileName) throws ContentManagerException { CommentManager commentManager = getThothEnvironment().getCommentManager(); Pattern sectionStartPattern = Pattern.compile(DETAILSTART + "(.*?)" + MARKER); Pattern sectionEndPattern = Pattern.compile(DETAILEND); Stack<Section> sections = new Stack<>(); Section main = new Section(fileName); main.setComments(commentManager.getComments(contextName, fileName, null)); sections.push(main);/* w w w.j av a2 s .c o m*/ for (String line : body.split("\n")) { Matcher matcher = sectionStartPattern.matcher(line); if (matcher.find()) { String path = matcher.group(1); Section subSection = new Section(path); List<Comment> comments = commentManager.getComments(contextName, path, null); subSection.setComments(comments); sections.peek().addSection(subSection); sections.push(subSection); } else if (sectionEndPattern.matcher(line).find()) { sections.pop(); } else sections.peek().addSection(line); } return main; }
From source file:com.taobao.tdhs.jdbc.sqlparser.ParseSQL.java
private boolean checkSpecialStr(String sqlstring, String searchStr) { //????searchStr Stack<String> stack = new Stack<String>(); boolean exist_danyinhao = false; for (int i = 0; i < sqlstring.length(); i++) { ///*ww w .jav a2 s . c o m*/ if (sqlstring.substring(i, i + 1).equals("'") == false) { stack.push(sqlstring.substring(i, i + 1)); } //' if (sqlstring.substring(i, i + 1).equals("'")) { //?\,?,\,,?? int count = 0; int k = i; boolean real_danyinhao; while (k - 1 >= 0 && sqlstring.substring(k - 1, k).equals("\\") == true) { k--; count++; } //System.out.println("\\:"+count); if (count % 2 == 0) { //?? real_danyinhao = true; } else { //???,value real_danyinhao = false; stack.push(sqlstring.substring(i, i + 1)); } if (real_danyinhao == true) { if (exist_danyinhao == false) { exist_danyinhao = true; stack.push(sqlstring.substring(i, i + 1)); } else { boolean find_real_danyinhao = false; while (find_real_danyinhao == false) { while (!stack.pop().equals("'")) { ; } //???,??\ if (stack.isEmpty() == false && stack.peek().equals("\\")) { //?,??? count = 0; while (stack.peek().equals("\\")) { stack.pop(); count++; } if (count % 2 == 0) { //? find_real_danyinhao = true; } else { //? find_real_danyinhao = false; } } else { // find_real_danyinhao = true; } } exist_danyinhao = false; } } } } //end for logger.debug(stack.toString()); if (stack.isEmpty() == false && stack.search(searchStr) > -1) { stack.clear(); return true; } else { return false; } }