001package gudusoft.gsqlparser.parser; 002 003import gudusoft.gsqlparser.EDbVendor; 004import gudusoft.gsqlparser.EErrorType; 005import gudusoft.gsqlparser.EFindSqlStateType; 006import gudusoft.gsqlparser.ESqlStatementType; 007import gudusoft.gsqlparser.ETokenStatus; 008import gudusoft.gsqlparser.ETokenType; 009import gudusoft.gsqlparser.TBaseType; 010import gudusoft.gsqlparser.TCustomLexer; 011import gudusoft.gsqlparser.TCustomParser; 012import gudusoft.gsqlparser.TCustomSqlStatement; 013// Note: Generated lexer class has lowercase 'a' in class name 014import gudusoft.gsqlparser.TLexerathena; 015import gudusoft.gsqlparser.TParserAthena; 016import gudusoft.gsqlparser.TSourceToken; 017import gudusoft.gsqlparser.TSourceTokenList; 018import gudusoft.gsqlparser.TStatementList; 019import gudusoft.gsqlparser.TSyntaxError; 020import gudusoft.gsqlparser.compiler.TASTEvaluator; 021import gudusoft.gsqlparser.compiler.TContext; 022import gudusoft.gsqlparser.compiler.TFrame; 023import gudusoft.gsqlparser.compiler.TGlobalScope; 024import gudusoft.gsqlparser.resolver.TSQLResolver; 025import gudusoft.gsqlparser.sqlcmds.ISqlCmds; 026import gudusoft.gsqlparser.sqlcmds.SqlCmdsFactory; 027import gudusoft.gsqlparser.sqlenv.TSQLEnv; 028import gudusoft.gsqlparser.stmt.TCreateTableSqlStatement; 029import gudusoft.gsqlparser.stmt.TUnknownSqlStatement; 030import gudusoft.gsqlparser.stmt.oracle.TSqlplusCmdStatement; 031 032import java.util.Stack; 033 034/** 035 * AWS Athena SQL parser implementation. 036 * 037 * <p>This parser handles Athena-specific SQL syntax including: 038 * <ul> 039 * <li>Presto/Trino-based SQL syntax</li> 040 * <li>MySQL-style comment handling</li> 041 * <li>PL/SQL-like procedural blocks (BEGIN/END)</li> 042 * <li>Dynamic delimiter support</li> 043 * </ul> 044 * 045 * <p><b>Implementation Status:</b> MIGRATED 046 * <ul> 047 * <li><b>Completed:</b> Migrated from TGSqlParser to AbstractSqlParser</li> 048 * <li><b>Current:</b> Fully self-contained Athena parser</li> 049 * </ul> 050 * 051 * <p><b>Design Notes:</b> 052 * <ul> 053 * <li>Extends {@link AbstractSqlParser} using template method pattern</li> 054 * <li>Uses single parser: {@link TParserAthena}</li> 055 * <li>Primary delimiter: semicolon (;)</li> 056 * <li>Supports PL/SQL-like blocks with BEGIN/END</li> 057 * </ul> 058 * 059 * @see SqlParser 060 * @see AbstractSqlParser 061 * @see TLexerathena 062 * @see TParserAthena 063 * @since 3.2.0.0 064 */ 065public class AthenaSqlParser extends AbstractSqlParser { 066 067 // ========== Lexer and Parser ========== 068 069 /** The Athena lexer used for tokenization (note: lowercase 'a' in generated class name) */ 070 public TLexerathena flexer; 071 072 /** The Athena parser used for parsing */ 073 private TParserAthena fparser; 074 075 // ========== Statement Extraction State ========== 076 077 /** Current statement being built during raw extraction */ 078 private TCustomSqlStatement gcurrentsqlstatement; 079 080 /** 081 * Construct Athena SQL parser. 082 * <p> 083 * Configures the parser for Athena database with semicolon (;) as the default delimiter. 084 */ 085 public AthenaSqlParser() { 086 super(EDbVendor.dbvathena); 087 this.delimiterChar = ';'; 088 this.defaultDelimiterStr = ";"; 089 090 // Create lexer once - will be reused for all parsing operations 091 this.flexer = new TLexerathena(); 092 this.flexer.delimiterchar = this.delimiterChar; 093 this.flexer.defaultDelimiterStr = this.defaultDelimiterStr; 094 095 // Set parent's lexer reference for shared tokenization logic 096 this.lexer = this.flexer; 097 098 // Create parser once - will be reused for all parsing operations 099 this.fparser = new TParserAthena(null); 100 this.fparser.lexer = this.flexer; 101 } 102 103 // ========== Abstract Method Implementations ========== 104 105 @Override 106 protected TCustomLexer getLexer(ParserContext context) { 107 return this.flexer; 108 } 109 110 @Override 111 protected TCustomParser getParser(ParserContext context, TSourceTokenList tokens) { 112 this.fparser.sourcetokenlist = tokens; 113 return this.fparser; 114 } 115 116 @Override 117 protected TCustomParser getSecondaryParser(ParserContext context, TSourceTokenList tokens) { 118 // Athena does not have a secondary parser 119 return null; 120 } 121 122 // ========== Phase 2: Tokenization (Hook Pattern) ========== 123 124 @Override 125 protected void tokenizeVendorSql() { 126 doathenatexttotokenlist(); 127 } 128 129 /** 130 * Tokenize Athena SQL text into source tokens. 131 * <p> 132 * Migrated from TGSqlParser.doathenatexttotokenlist() (lines 4664-4694). 133 * <p> 134 * Handles: 135 * <ul> 136 * <li>MySQL-style comment validation</li> 137 * <li>Dynamic delimiter changes (like MySQL DELIMITER command)</li> 138 * </ul> 139 */ 140 private void doathenatexttotokenlist() { 141 TSourceToken asourcetoken; 142 int yychar; 143 boolean startDelimiter = false; 144 145 flexer.tmpDelimiter = ""; 146 147 asourcetoken = getanewsourcetoken(); 148 if (asourcetoken == null) return; 149 yychar = asourcetoken.tokencode; 150 151 while (yychar > 0) { 152 sourcetokenlist.add(asourcetoken); 153 asourcetoken = getanewsourcetoken(); 154 if (asourcetoken == null) break; 155 checkMySQLCommentToken(asourcetoken); 156 157 if ((asourcetoken.tokencode == TBaseType.lexnewline) && (startDelimiter)) { 158 startDelimiter = false; 159 flexer.tmpDelimiter = sourcetokenlist.get(sourcetokenlist.size() - 1).getAstext(); 160 } 161 162 yychar = asourcetoken.tokencode; 163 } 164 } 165 166 /** 167 * Check MySQL-style comment tokens. 168 * <p> 169 * Migrated from TGSqlParser.checkMySQLCommentToken() (lines 4604-4619). 170 * <p> 171 * Note: This method is mostly a no-op as the validation logic is commented out 172 * in the original implementation. 173 */ 174 private void checkMySQLCommentToken(TSourceToken cmtToken) { 175 // Original implementation is commented out - keeping as no-op 176 // This matches the original TGSqlParser behavior 177 } 178 179 // ========== Phase 3: Raw Statement Extraction (Hook Pattern) ========== 180 181 @Override 182 protected void setupVendorParsersForExtraction() { 183 this.fparser.sqlcmds = this.sqlcmds; 184 this.fparser.sourcetokenlist = this.sourcetokenlist; 185 } 186 187 @Override 188 protected void extractVendorRawStatements(SqlParseResult.Builder builder) { 189 doathenagetrawsqlstatements(builder); 190 } 191 192 /** 193 * Extract raw SQL statements from token list. 194 * <p> 195 * Migrated from TGSqlParser.doathenagetrawsqlstatements() (lines 6724+). 196 * <p> 197 * This method implements a state machine to identify statement boundaries: 198 * <ul> 199 * <li>Regular SQL statements terminated by semicolon</li> 200 * <li>PL/SQL-like blocks with BEGIN/END pairs</li> 201 * <li>Slash (/) and period (.) terminators for procedural blocks</li> 202 * </ul> 203 */ 204 private void doathenagetrawsqlstatements(SqlParseResult.Builder builder) { 205 int waitingEnd = 0; 206 boolean foundEnd = false; 207 208 if (TBaseType.assigned(sqlstatements)) sqlstatements.clear(); 209 if (!TBaseType.assigned(sourcetokenlist)) { 210 builder.errorCode(-1); 211 return; 212 } 213 214 gcurrentsqlstatement = null; 215 EFindSqlStateType gst = EFindSqlStateType.stnormal; 216 TSourceToken lcprevsolidtoken = null; 217 TSourceToken ast = null; 218 219 for (int i = 0; i < sourcetokenlist.size(); i++) { 220 221 if ((ast != null) && (ast.issolidtoken())) 222 lcprevsolidtoken = ast; 223 224 ast = sourcetokenlist.get(i); 225 sourcetokenlist.curpos = i; 226 227 switch (gst) { 228 case sterror: { 229 if (ast.tokentype == ETokenType.ttsemicolon) { 230 appendToken(gcurrentsqlstatement, ast); 231 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 232 gst = EFindSqlStateType.stnormal; 233 } else { 234 appendToken(gcurrentsqlstatement, ast); 235 } 236 break; 237 } // sterror 238 239 case stnormal: { 240 if ((ast.tokencode == TBaseType.cmtdoublehyphen) 241 || (ast.tokencode == TBaseType.cmtslashstar) 242 || (ast.tokencode == TBaseType.lexspace) 243 || (ast.tokencode == TBaseType.lexnewline) 244 || (ast.tokentype == ETokenType.ttsemicolon)) { 245 if (gcurrentsqlstatement != null) { 246 appendToken(gcurrentsqlstatement, ast); 247 } 248 249 if ((lcprevsolidtoken != null) && (ast.tokentype == ETokenType.ttsemicolon)) { 250 if (lcprevsolidtoken.tokentype == ETokenType.ttsemicolon) { 251 // ;;;; continuous semicolon, treat it as comment 252 ast.tokentype = ETokenType.ttsimplecomment; 253 ast.tokencode = TBaseType.cmtdoublehyphen; 254 } 255 } 256 257 continue; 258 } 259 260 // find a tokentext to start sql or plsql mode 261 gcurrentsqlstatement = sqlcmds.issql(ast, gst, gcurrentsqlstatement); 262 263 if (gcurrentsqlstatement != null) { 264 if (gcurrentsqlstatement.isathenaplsql()) { 265 gst = EFindSqlStateType.ststoredprocedure; 266 appendToken(gcurrentsqlstatement, ast); 267 foundEnd = false; 268 if ((ast.tokencode == TBaseType.rrw_begin) 269 || (ast.tokencode == TBaseType.rrw_package) 270 || (ast.searchToken(TBaseType.rrw_package, 4) != null)) { 271 waitingEnd = 1; 272 } 273 } else { 274 gst = EFindSqlStateType.stsql; 275 appendToken(gcurrentsqlstatement, ast); 276 } 277 } else { 278 // error tokentext found 279 this.syntaxErrors.add(new TSyntaxError(ast.getAstext(), ast.lineNo, (ast.columnNo < 0 ? 0 : ast.columnNo), 280 "Error when tokenlize", EErrorType.spwarning, TBaseType.MSG_WARNING_ERROR_WHEN_TOKENIZE, null, ast.posinlist)); 281 282 ast.tokentype = ETokenType.tttokenlizererrortoken; 283 gst = EFindSqlStateType.sterror; 284 285 gcurrentsqlstatement = new TUnknownSqlStatement(vendor); 286 gcurrentsqlstatement.sqlstatementtype = ESqlStatementType.sstinvalid; 287 appendToken(gcurrentsqlstatement, ast); 288 } 289 290 break; 291 } // stnormal 292 293 case stsql: { 294 if (ast.tokentype == ETokenType.ttsemicolon) { 295 gst = EFindSqlStateType.stnormal; 296 appendToken(gcurrentsqlstatement, ast); 297 gcurrentsqlstatement.semicolonended = ast; 298 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 299 System.out.println(" [RAW] Found semicolon, completing statement. Token count=" + gcurrentsqlstatement.sourcetokenlist.size()); 300 } 301 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 302 continue; 303 } 304 305 if (sourcetokenlist.sqlplusaftercurtoken()) { // most probably is / cmd 306 gst = EFindSqlStateType.stnormal; 307 appendToken(gcurrentsqlstatement, ast); 308 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 309 continue; 310 } 311 appendToken(gcurrentsqlstatement, ast); 312 break; 313 } // case stsql 314 315 case ststoredprocedure: { 316 if (ast.tokencode == TBaseType.rrw_begin) { 317 waitingEnd++; 318 } else if (ast.tokencode == TBaseType.rrw_if) { 319 if (ast.searchToken(TBaseType.rrw_end, -1) == null) { 320 // this is not if after END 321 waitingEnd++; 322 } 323 } else if (ast.tokencode == TBaseType.rrw_case) { 324 if (ast.searchToken(TBaseType.rrw_end, -1) == null) { 325 // this is not case after END 326 waitingEnd++; 327 } 328 } else if (ast.tokencode == TBaseType.rrw_loop) { 329 if (ast.searchToken(TBaseType.rrw_end, -1) == null) { 330 // this is not loop after END 331 waitingEnd++; 332 } 333 } else if (ast.tokencode == TBaseType.rrw_end) { 334 foundEnd = true; 335 waitingEnd--; 336 if (waitingEnd < 0) { 337 waitingEnd = 0; 338 } 339 } 340 341 if ((ast.tokentype == ETokenType.ttslash) && (ast.tokencode == TBaseType.sqlpluscmd)) { 342 // Slash terminator 343 ast.tokenstatus = ETokenStatus.tsignorebyyacc; 344 gst = EFindSqlStateType.stnormal; 345 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 346 347 // make / a sqlplus cmd 348 gcurrentsqlstatement = new TSqlplusCmdStatement(vendor); 349 appendToken(gcurrentsqlstatement, ast); 350 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 351 } else if ((ast.tokentype == ETokenType.ttperiod) && (sourcetokenlist.returnaftercurtoken(false)) && (sourcetokenlist.returnbeforecurtoken(false))) { 352 // single dot at a separate line 353 ast.tokenstatus = ETokenStatus.tsignorebyyacc; 354 gst = EFindSqlStateType.stnormal; 355 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 356 357 // make ttperiod a sqlplus cmd 358 gcurrentsqlstatement = new TSqlplusCmdStatement(vendor); 359 appendToken(gcurrentsqlstatement, ast); 360 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 361 } else { 362 appendToken(gcurrentsqlstatement, ast); 363 if ((ast.tokentype == ETokenType.ttsemicolon) && (waitingEnd == 0) && (foundEnd)) { 364 gst = EFindSqlStateType.stnormal; 365 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 366 } 367 } 368 369 if (ast.tokencode == TBaseType.sqlpluscmd) { 370 // change tokencode back to keyword or ident, because sqlplus cmd 371 // in a sql statement (almost is plsql block) is not really a sqlplus cmd 372 int m = flexer.getkeywordvalue(ast.getAstext()); 373 if (m != 0) { 374 ast.tokencode = m; 375 } else { 376 ast.tokencode = TBaseType.ident; 377 } 378 } 379 380 break; 381 } // case ststoredprocedure 382 } 383 } 384 385 // Handle incomplete statement at end of file 386 if ((gcurrentsqlstatement != null) && 387 ((gst == EFindSqlStateType.stsql) || (gst == EFindSqlStateType.ststoredprocedure) || (gst == EFindSqlStateType.sterror))) { 388 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 389 System.out.println(" [RAW] Incomplete statement at EOF. Token count=" + gcurrentsqlstatement.sourcetokenlist.size()); 390 } 391 onRawStatementComplete(this.parserContext, gcurrentsqlstatement, this.fparser, null, this.sqlstatements, false, builder); 392 } 393 394 // Populate builder with extracted statements (CRITICAL: this was missing!) 395 builder.sqlStatements(this.sqlstatements); 396 builder.errorCode(0); 397 builder.errorMessage(""); 398 } 399 400 /** 401 * Helper method to append a token to a statement. 402 * <p> 403 * Sets the token's statement reference and adds it to the statement's token list. 404 */ 405 private void appendToken(TCustomSqlStatement statement, TSourceToken token) { 406 if (statement == null || token == null) { 407 return; 408 } 409 token.stmt = statement; 410 statement.sourcetokenlist.add(token); 411 } 412 413 // ========== Phase 4: Statement Parsing ========== 414 415 @Override 416 protected TStatementList performParsing(ParserContext context, TCustomParser parser, 417 TCustomParser secondaryParser, TSourceTokenList tokens, 418 TStatementList rawStatements) { 419 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 420 System.out.println("AthenaSqlParser.performParsing() CALLED with " + 421 (rawStatements != null ? rawStatements.size() : 0) + " statements"); 422 } 423 424 // Store references 425 this.fparser = (TParserAthena) parser; 426 this.sourcetokenlist = tokens; 427 this.parserContext = context; 428 429 // Initialize sqlcmds 430 this.sqlcmds = SqlCmdsFactory.get(vendor); 431 this.fparser.sqlcmds = this.sqlcmds; 432 433 // Initialize global context using inherited method 434 initializeGlobalContext(); 435 436 // Parse each statement 437 for (int i = 0; i < sqlstatements.size(); i++) { 438 TCustomSqlStatement stmt = sqlstatements.getRawSql(i); 439 try { 440 stmt.setFrameStack(frameStack); 441 int parseResult = stmt.parsestatement(null, false, context.isOnlyNeedRawParseTree()); 442 443 // Error recovery 444 boolean doRecover = TBaseType.ENABLE_ERROR_RECOVER_IN_CREATE_TABLE; 445 if (doRecover && ((parseResult != 0) || (stmt.getErrorCount() > 0))) { 446 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 447 System.out.println("AthenaSqlParser: Triggering error recovery for " + stmt.sqlstatementtype); 448 System.out.println(" parseResult=" + parseResult + ", errorCount=" + stmt.getErrorCount()); 449 if (stmt.sqlstatementtype == ESqlStatementType.sstcreatetable) { 450 TCreateTableSqlStatement ct = (TCreateTableSqlStatement)stmt; 451 System.out.println(" columns before recovery: " + ct.getColumnList().size()); 452 } 453 } 454 handleCreateTableErrorRecovery(stmt); 455 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE && stmt.sqlstatementtype == ESqlStatementType.sstcreatetable) { 456 TCreateTableSqlStatement ct = (TCreateTableSqlStatement)stmt; 457 System.out.println(" columns after recovery: " + ct.getColumnList().size()); 458 } 459 } 460 461 // Collect errors 462 if ((parseResult != 0) || (stmt.getErrorCount() > 0)) { 463 copyErrorsFromStatement(stmt); 464 } 465 } catch (Exception ex) { 466 // Use inherited exception handler 467 handleStatementParsingException(stmt, i, ex); 468 continue; 469 } 470 } 471 472 // Clean up frame stack 473 if (globalFrame != null) { 474 globalFrame.popMeFromStack(frameStack); 475 } 476 477 // Final debug check before returning 478 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 479 System.out.println("AthenaSqlParser: total statements = " + sqlstatements.size()); 480 for (int i = 0; i < sqlstatements.size(); i++) { 481 TCustomSqlStatement stmt = sqlstatements.get(i); 482 System.out.println(" Statement[" + i + "]: " + stmt.sqlstatementtype + " @ " + System.identityHashCode(stmt)); 483 if (stmt.sqlstatementtype == ESqlStatementType.sstcreatetable) { 484 TCreateTableSqlStatement ct = (TCreateTableSqlStatement)stmt; 485 System.out.println(" columns = " + ct.getColumnList().size()); 486 } 487 } 488 } 489 490 return sqlstatements; 491 } 492 493 /** 494 * Handle CREATE TABLE error recovery. 495 * <p> 496 * Migrated from TGSqlParser.doparse() (lines 16914-16971). 497 * <p> 498 * Attempts to recover from parsing errors in CREATE TABLE statements by 499 * marking unparseable table properties as sqlpluscmd and retrying. 500 * This allows parsing CREATE TABLE statements with vendor-specific extensions 501 * that may not be in the grammar. 502 */ 503 private void handleCreateTableErrorRecovery(TCustomSqlStatement stmt) { 504 // Check if this is a CREATE TABLE or CREATE INDEX statement 505 if (!(stmt.sqlstatementtype == ESqlStatementType.sstcreatetable || 506 stmt.sqlstatementtype == ESqlStatementType.sstcreateindex)) { 507 return; 508 } 509 510 // Check if strict parsing is disabled 511 if (TBaseType.c_createTableStrictParsing) { 512 return; 513 } 514 515 TCustomSqlStatement errorSqlStatement = stmt; 516 517 int nested = 0; 518 boolean isIgnore = false; 519 boolean isFoundIgnoreToken = false; 520 TSourceToken firstIgnoreToken = null; 521 522 // Iterate through tokens to find the closing parenthesis of table definition 523 for (int k = 0; k < errorSqlStatement.sourcetokenlist.size(); k++) { 524 TSourceToken st = errorSqlStatement.sourcetokenlist.get(k); 525 526 if (isIgnore) { 527 // We're past the table definition, mark tokens as ignoreable 528 if (st.issolidtoken() && (st.tokencode != ';')) { 529 isFoundIgnoreToken = true; 530 if (firstIgnoreToken == null) { 531 firstIgnoreToken = st; 532 } 533 } 534 // Mark all tokens (except semicolon) as sqlpluscmd to ignore them 535 if (st.tokencode != ';') { 536 st.tokencode = TBaseType.sqlpluscmd; 537 } 538 continue; 539 } 540 541 // Track closing parentheses 542 if (st.tokencode == (int) ')') { 543 nested--; 544 if (nested == 0) { 545 // Found the closing parenthesis of table definition 546 // Check if next tokens are "AS ( SELECT" (CTAS pattern) 547 boolean isSelect = false; 548 TSourceToken st1 = st.searchToken(TBaseType.rrw_as, 1); 549 if (st1 != null) { 550 TSourceToken st2 = st.searchToken((int) '(', 2); 551 if (st2 != null) { 552 TSourceToken st3 = st.searchToken(TBaseType.rrw_select, 3); 553 isSelect = (st3 != null); 554 } 555 } 556 // If not a CTAS, start ignoring subsequent tokens 557 if (!isSelect) { 558 isIgnore = true; 559 } 560 } 561 } 562 563 // Track opening parentheses 564 if ((st.tokencode == (int) '(') || (st.tokencode == TBaseType.left_parenthesis_2)) { 565 nested++; 566 } 567 } 568 569 // For Oracle, validate that the first ignored token is a valid table property 570 // For Athena, we don't have this validation, so skip this check 571 // (Athena doesn't use searchOracleTablePros) 572 573 // If we found ignoreable tokens, clear errors and retry parsing 574 if (isFoundIgnoreToken) { 575 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 576 System.out.println(" Found ignoreable tokens, clearing errors and re-parsing..."); 577 } 578 errorSqlStatement.clearError(); 579 int retryResult = errorSqlStatement.parsestatement(null, false, parserContext.isOnlyNeedRawParseTree()); 580 if (TBaseType.DUMP_RESOLVER_LOG_TO_CONSOLE) { 581 System.out.println(" Retry parse result: " + retryResult); 582 if (stmt.sqlstatementtype == ESqlStatementType.sstcreatetable) { 583 TCreateTableSqlStatement ct = (TCreateTableSqlStatement)stmt; 584 System.out.println(" Final column count after retry: " + ct.getColumnList().size()); 585 } 586 } 587 } 588 } 589 590 // ========== Phase 5: Semantic Analysis & Interpretation ========== 591 592 @Override 593 protected void performSemanticAnalysis(ParserContext context, TStatementList statements) { 594 if (!TBaseType.isEnableResolver()) { 595 return; 596 } 597 598 if (getSyntaxErrors().isEmpty()) { 599 TSQLResolver resolver = new TSQLResolver(this.globalContext, statements); 600 resolver.resolve(); 601 } 602 } 603 604 @Override 605 protected void performInterpreter(ParserContext context, TStatementList statements) { 606 if (!TBaseType.ENABLE_INTERPRETER) { 607 return; 608 } 609 610 if (getSyntaxErrors().isEmpty()) { 611 TGlobalScope interpreterScope = new TGlobalScope(sqlEnv); 612 TASTEvaluator astEvaluator = new TASTEvaluator(statements, interpreterScope); 613 astEvaluator.eval(); 614 } 615 } 616 617 @Override 618 public EDbVendor getVendor() { 619 return vendor; 620 } 621 622 @Override 623 public String toString() { 624 return "AthenaSqlParser{vendor=" + vendor + "}"; 625 } 626}