001package gudusoft.gsqlparser.parser; 002 003import gudusoft.gsqlparser.EDbVendor; 004import gudusoft.gsqlparser.TBaseType; 005import gudusoft.gsqlparser.TCustomLexer; 006import gudusoft.gsqlparser.TCustomParser; 007import gudusoft.gsqlparser.TCustomSqlStatement; 008import gudusoft.gsqlparser.TLexerNetezza; 009import gudusoft.gsqlparser.TParserNetezza; 010import gudusoft.gsqlparser.TSourceToken; 011import gudusoft.gsqlparser.TSourceTokenList; 012import gudusoft.gsqlparser.TStatementList; 013import gudusoft.gsqlparser.TSyntaxError; 014import gudusoft.gsqlparser.EFindSqlStateType; 015import gudusoft.gsqlparser.ETokenType; 016import gudusoft.gsqlparser.ETokenStatus; 017import gudusoft.gsqlparser.ESqlStatementType; 018import gudusoft.gsqlparser.EErrorType; 019import gudusoft.gsqlparser.stmt.TUnknownSqlStatement; 020import gudusoft.gsqlparser.stmt.oracle.TSqlplusCmdStatement; 021import gudusoft.gsqlparser.sqlcmds.ISqlCmds; 022import gudusoft.gsqlparser.sqlcmds.SqlCmdsFactory; 023import gudusoft.gsqlparser.compiler.TContext; 024import gudusoft.gsqlparser.sqlenv.TSQLEnv; 025import gudusoft.gsqlparser.compiler.TGlobalScope; 026import gudusoft.gsqlparser.compiler.TFrame; 027import gudusoft.gsqlparser.resolver.TSQLResolver; 028import gudusoft.gsqlparser.TLog; 029import gudusoft.gsqlparser.compiler.TASTEvaluator; 030 031import java.io.BufferedReader; 032import java.util.ArrayList; 033import java.util.List; 034import java.util.Stack; 035 036/** 037 * Netezza database SQL parser implementation. 038 * 039 * <p>This parser handles Netezza-specific SQL syntax including: 040 * <ul> 041 * <li>Netezza PL/SQL blocks (procedures, functions)</li> 042 * <li>SQL*Plus-like commands</li> 043 * <li>Netezza-specific DML/DDL</li> 044 * <li>Type casting expressions</li> 045 * <li>Special token handling (INNER, NOT DEFERRABLE, etc.)</li> 046 * </ul> 047 * 048 * <p><b>Design Notes:</b> 049 * <ul> 050 * <li>Extends {@link AbstractSqlParser} using the template method pattern</li> 051 * <li>Uses {@link TLexerNetezza} for tokenization</li> 052 * <li>Uses {@link TParserNetezza} for parsing</li> 053 * <li>Delimiter character: ';' for SQL statements</li> 054 * </ul> 055 * 056 * <p><b>Usage Example:</b> 057 * <pre> 058 * // Get Netezza parser from factory 059 * SqlParser parser = SqlParserFactory.get(EDbVendor.dbvnetezza); 060 * 061 * // Build context 062 * ParserContext context = new ParserContext.Builder(EDbVendor.dbvnetezza) 063 * .sqlText("SELECT * FROM Employees WHERE DepartmentID = 10") 064 * .build(); 065 * 066 * // Parse 067 * SqlParseResult result = parser.parse(context); 068 * 069 * // Access statements 070 * TStatementList statements = result.getSqlStatements(); 071 * </pre> 072 * 073 * @see SqlParser 074 * @see AbstractSqlParser 075 * @see TLexerNetezza 076 * @see TParserNetezza 077 */ 078public class NetezzaSqlParser extends AbstractSqlParser { 079 080 /** 081 * Construct Netezza SQL parser. 082 * <p> 083 * Configures the parser for Netezza database with default delimiter (;). 084 * <p> 085 * Following the original TGSqlParser pattern, the lexer and parser are 086 * created once in the constructor and reused for all parsing operations. 087 */ 088 public NetezzaSqlParser() { 089 super(EDbVendor.dbvnetezza); 090 this.delimiterChar = ';'; 091 this.defaultDelimiterStr = ";"; 092 093 // Create lexer once - will be reused for all parsing operations 094 this.flexer = new TLexerNetezza(); 095 this.flexer.delimiterchar = this.delimiterChar; 096 this.flexer.defaultDelimiterStr = this.defaultDelimiterStr; 097 098 // Set parent's lexer reference for shared tokenization logic 099 this.lexer = this.flexer; 100 101 // Create parser once - will be reused for all parsing operations 102 this.fparser = new TParserNetezza(null); 103 this.fparser.lexer = this.flexer; 104 } 105 106 // ========== Parser Components ========== 107 108 /** The Netezza lexer used for tokenization */ 109 public TLexerNetezza flexer; 110 111 /** SQL parser (for Netezza statements) */ 112 private TParserNetezza fparser; 113 114 /** Current statement being built during extraction */ 115 private TCustomSqlStatement gcurrentsqlstatement; 116 117 // Note: Global context and frame stack fields inherited from AbstractSqlParser: 118 // - protected TContext globalContext 119 // - protected TSQLEnv sqlEnv 120 // - protected Stack<TFrame> frameStack 121 // - protected TFrame globalFrame 122 123 // ========== AbstractSqlParser Abstract Methods Implementation ========== 124 125 /** 126 * Return the Netezza lexer instance. 127 */ 128 @Override 129 protected TCustomLexer getLexer(ParserContext context) { 130 return this.flexer; 131 } 132 133 /** 134 * Return the Netezza SQL parser instance with updated token list. 135 */ 136 @Override 137 protected TCustomParser getParser(ParserContext context, TSourceTokenList tokens) { 138 this.fparser.sourcetokenlist = tokens; 139 return this.fparser; 140 } 141 142 /** 143 * Return null for secondary parser (Netezza only uses one parser). 144 */ 145 @Override 146 protected TCustomParser getSecondaryParser(ParserContext context, TSourceTokenList tokens) { 147 // Netezza does not use a secondary parser like Oracle (which has PL/SQL parser) 148 return null; 149 } 150 151 /** 152 * Call Netezza-specific tokenization logic. 153 * <p> 154 * Delegates to donetezzatexttotokenlist which handles Netezza's 155 * specific keyword recognition and token generation. 156 */ 157 @Override 158 protected void tokenizeVendorSql() { 159 donetezzatexttotokenlist(); 160 } 161 162 /** 163 * Setup Netezza parser for raw statement extraction. 164 * <p> 165 * Netezza uses a single parser, so we inject sqlcmds and update 166 * the token list for the main parser only. 167 */ 168 @Override 169 protected void setupVendorParsersForExtraction() { 170 // Inject sqlcmds into parser (required for make_stmt) 171 this.fparser.sqlcmds = this.sqlcmds; 172 173 // Update token list for parser 174 this.fparser.sourcetokenlist = this.sourcetokenlist; 175 } 176 177 /** 178 * Call Netezza-specific raw statement extraction logic. 179 * <p> 180 * Delegates to donetezzagetrawsqlstatements which handles Netezza's 181 * statement delimiters. 182 */ 183 @Override 184 protected void extractVendorRawStatements(SqlParseResult.Builder builder) { 185 donetezzagetrawsqlstatements(builder); 186 } 187 188 /** 189 * Perform full parsing of statements with syntax checking. 190 * <p> 191 * This method orchestrates the parsing of all statements. 192 */ 193 @Override 194 protected TStatementList performParsing(ParserContext context, 195 TCustomParser parser, 196 TCustomParser secondaryParser, 197 TSourceTokenList tokens, 198 TStatementList rawStatements) { 199 // Store references 200 this.fparser = (TParserNetezza) parser; 201 this.sourcetokenlist = tokens; 202 this.parserContext = context; 203 204 // Use the raw statements passed from AbstractSqlParser.parse() 205 this.sqlstatements = rawStatements; 206 207 // Initialize statement parsing infrastructure 208 this.sqlcmds = SqlCmdsFactory.get(vendor); 209 210 // Inject sqlcmds into parser (required for make_stmt and other methods) 211 this.fparser.sqlcmds = this.sqlcmds; 212 213 // Initialize global context for semantic analysis 214 initializeGlobalContext(); 215 216 // Parse each statement with exception handling for robustness 217 for (int i = 0; i < sqlstatements.size(); i++) { 218 TCustomSqlStatement stmt = sqlstatements.getRawSql(i); 219 220 try { 221 stmt.setFrameStack(frameStack); 222 223 // Parse the statement 224 int parseResult = stmt.parsestatement(null, false, context.isOnlyNeedRawParseTree()); 225 226 // Handle error recovery for CREATE TABLE/INDEX 227 boolean doRecover = TBaseType.ENABLE_ERROR_RECOVER_IN_CREATE_TABLE; 228 if (doRecover && ((parseResult != 0) || (stmt.getErrorCount() > 0))) { 229 handleCreateTableErrorRecovery(stmt); 230 } 231 232 // Collect syntax errors 233 if ((parseResult != 0) || (stmt.getErrorCount() > 0)) { 234 copyErrorsFromStatement(stmt); 235 } 236 237 } catch (Exception ex) { 238 // Use inherited exception handler from AbstractSqlParser 239 // This provides consistent error handling across all database parsers 240 handleStatementParsingException(stmt, i, ex); 241 continue; 242 } 243 } 244 245 // Clean up frame stack 246 if (globalFrame != null) { 247 globalFrame.popMeFromStack(frameStack); 248 } 249 250 return this.sqlstatements; 251 } 252 253 /** 254 * Handle error recovery for CREATE TABLE/INDEX statements. 255 */ 256 private void handleCreateTableErrorRecovery(TCustomSqlStatement stmt) { 257 if (((stmt.sqlstatementtype == ESqlStatementType.sstcreatetable) 258 || (stmt.sqlstatementtype == ESqlStatementType.sstcreateindex)) 259 && (!TBaseType.c_createTableStrictParsing)) { 260 261 int nested = 0; 262 boolean isIgnore = false, isFoundIgnoreToken = false; 263 TSourceToken firstIgnoreToken = null; 264 265 for (int k = 0; k < stmt.sourcetokenlist.size(); k++) { 266 TSourceToken st = stmt.sourcetokenlist.get(k); 267 if (isIgnore) { 268 if (st.issolidtoken() && (st.tokencode != ';')) { 269 isFoundIgnoreToken = true; 270 if (firstIgnoreToken == null) { 271 firstIgnoreToken = st; 272 } 273 } 274 if (st.tokencode != ';') { 275 st.tokencode = TBaseType.sqlpluscmd; 276 } 277 continue; 278 } 279 if (st.tokencode == (int) ')') { 280 nested--; 281 if (nested == 0) { 282 boolean isSelect = false; 283 TSourceToken st1 = st.searchToken(TBaseType.rrw_as, 1); 284 if (st1 != null) { 285 TSourceToken st2 = st.searchToken((int) '(', 2); 286 if (st2 != null) { 287 TSourceToken st3 = st.searchToken(TBaseType.rrw_select, 3); 288 isSelect = (st3 != null); 289 } 290 } 291 if (!isSelect) isIgnore = true; 292 } 293 } else if (st.tokencode == (int) '(') { 294 nested++; 295 } 296 } 297 298 if (isFoundIgnoreToken) { 299 stmt.clearError(); 300 stmt.parsestatement(null, false); 301 } 302 } 303 } 304 305 /** 306 * Perform Netezza-specific semantic analysis using TSQLResolver. 307 */ 308 @Override 309 protected void performSemanticAnalysis(ParserContext context, TStatementList statements) { 310 if (TBaseType.isEnableResolver() && getSyntaxErrors().isEmpty()) { 311 TSQLResolver resolver = new TSQLResolver(globalContext, statements); 312 resolver.resolve(); 313 } 314 } 315 316 /** 317 * Perform interpretation/evaluation on parsed statements. 318 */ 319 @Override 320 protected void performInterpreter(ParserContext context, TStatementList statements) { 321 if (TBaseType.ENABLE_INTERPRETER && getSyntaxErrors().isEmpty()) { 322 TLog.clearLogs(); 323 TGlobalScope interpreterScope = new TGlobalScope(sqlEnv); 324 TLog.enableInterpreterLogOnly(); 325 TASTEvaluator astEvaluator = new TASTEvaluator(statements, interpreterScope); 326 astEvaluator.eval(); 327 } 328 } 329 330 // ========== Netezza-Specific Tokenization ========== 331 332 /** 333 * Netezza-specific tokenization logic. 334 * <p> 335 * Extracted from: TGSqlParser.donetezzatexttotokenlist() (lines 3723-3894) 336 */ 337 private void donetezzatexttotokenlist() { 338 boolean insqlpluscmd = false; 339 boolean isvalidplace = true; 340 boolean waitingreturnforfloatdiv = false; 341 boolean waitingreturnforsemicolon = false; 342 boolean continuesqlplusatnewline = false; 343 344 TSourceToken lct = null, prevst = null; 345 346 TSourceToken asourcetoken, lcprevst; 347 int yychar; 348 349 asourcetoken = getanewsourcetoken(); 350 if (asourcetoken == null) return; 351 yychar = asourcetoken.tokencode; 352 353 while (yychar > 0) { 354 sourcetokenlist.add(asourcetoken); 355 switch (yychar) { 356 case TBaseType.cmtdoublehyphen: 357 case TBaseType.cmtslashstar: 358 case TBaseType.lexspace: { 359 if (insqlpluscmd) { 360 asourcetoken.insqlpluscmd = true; 361 } 362 break; 363 } 364 case TBaseType.lexnewline: { 365 if (insqlpluscmd) { 366 insqlpluscmd = false; 367 isvalidplace = true; 368 369 if (continuesqlplusatnewline) { 370 insqlpluscmd = true; 371 isvalidplace = false; 372 asourcetoken.insqlpluscmd = true; 373 } 374 } 375 376 if (waitingreturnforsemicolon) { 377 isvalidplace = true; 378 } 379 if (waitingreturnforfloatdiv) { 380 isvalidplace = true; 381 lct.tokencode = TBaseType.sqlpluscmd; 382 if (lct.tokentype != ETokenType.ttslash) { 383 lct.tokentype = ETokenType.ttsqlpluscmd; 384 } 385 } 386 flexer.insqlpluscmd = insqlpluscmd; 387 break; 388 } //case newline 389 default: { 390 //solid tokentext 391 continuesqlplusatnewline = false; 392 waitingreturnforsemicolon = false; 393 waitingreturnforfloatdiv = false; 394 if (insqlpluscmd) { 395 asourcetoken.insqlpluscmd = true; 396 if (asourcetoken.getAstext().equalsIgnoreCase("-")) { 397 continuesqlplusatnewline = true; 398 } 399 } else { 400 if (asourcetoken.tokentype == ETokenType.ttsemicolon) { 401 waitingreturnforsemicolon = true; 402 } 403 if ((asourcetoken.tokentype == ETokenType.ttslash) 404 && (isvalidplace || (IsValidPlaceForDivToSqlplusCmd(sourcetokenlist, asourcetoken.posinlist)))) { 405 lct = asourcetoken; 406 waitingreturnforfloatdiv = true; 407 } 408 if ((isvalidplace) && isvalidsqlpluscmdInPostgresql(asourcetoken.toString())) { 409 asourcetoken.tokencode = TBaseType.sqlpluscmd; 410 if (asourcetoken.tokentype != ETokenType.ttslash) { 411 asourcetoken.tokentype = ETokenType.ttsqlpluscmd; 412 } 413 insqlpluscmd = true; 414 flexer.insqlpluscmd = insqlpluscmd; 415 } 416 } 417 isvalidplace = false; 418 419 // the inner keyword tokentext should be convert to TBaseType.ident when 420 // next solid tokentext is not join 421 422 if (prevst != null) { 423 if (prevst.tokencode == TBaseType.rrw_inner) { 424 if (asourcetoken.tokencode != flexer.getkeywordvalue("JOIN")) { 425 prevst.tokencode = TBaseType.ident; 426 } 427 } 428 429 if ((prevst.tokencode == TBaseType.rrw_not) 430 && (asourcetoken.tokencode == flexer.getkeywordvalue("DEFERRABLE"))) { 431 prevst.tokencode = flexer.getkeywordvalue("NOT_DEFERRABLE"); 432 } 433 } 434 435 if (asourcetoken.tokencode == TBaseType.rrw_inner) { 436 prevst = asourcetoken; 437 } else if (asourcetoken.tokencode == TBaseType.rrw_not) { 438 prevst = asourcetoken; 439 } else { 440 prevst = null; 441 } 442 443 if ((asourcetoken.tokencode == flexer.getkeywordvalue("DIRECT_LOAD")) 444 || (asourcetoken.tokencode == flexer.getkeywordvalue("ALL"))) { 445 // RW_COMPRESS RW_FOR RW_ALL RW_OPERATIONS 446 // RW_COMPRESS RW_FOR RW_DIRECT_LOAD RW_OPERATIONS 447 // change rw_for to TBaseType.rw_for1, it conflicts with compress for update in create materialized view 448 449 lcprevst = getprevsolidtoken(asourcetoken); 450 if (lcprevst != null) { 451 if (lcprevst.tokencode == TBaseType.rrw_for) 452 lcprevst.tokencode = TBaseType.rw_for1; 453 } 454 } 455 456 if (asourcetoken.tokencode == TBaseType.rrw_dense_rank) { 457 //keep keyword can be column alias, make keep in keep_denserankclause as a different token code 458 TSourceToken stKeep = asourcetoken.searchToken(TBaseType.rrw_keep, -2); 459 if (stKeep != null) { 460 stKeep.tokencode = TBaseType.rrw_keep_before_dense_rank; 461 } 462 } 463 } 464 } 465 466 asourcetoken = getanewsourcetoken(); 467 if (asourcetoken != null) { 468 yychar = asourcetoken.tokencode; 469 } else { 470 yychar = 0; 471 472 if (waitingreturnforfloatdiv) { 473 // / at the end of line treat as sqlplus command 474 lct.tokencode = TBaseType.sqlpluscmd; 475 if (lct.tokentype != ETokenType.ttslash) { 476 lct.tokentype = ETokenType.ttsqlpluscmd; 477 } 478 } 479 } 480 481 if ((yychar == 0) && (prevst != null)) { 482 if (prevst.tokencode == TBaseType.rrw_inner) { 483 prevst.tokencode = TBaseType.ident; 484 } 485 } 486 } 487 } 488 489 /** 490 * Get previous solid token in token list. 491 */ 492 private TSourceToken getprevsolidtoken(TSourceToken currentToken) { 493 if (currentToken == null || currentToken.posinlist == 0) { 494 return null; 495 } 496 497 for (int i = currentToken.posinlist - 1; i >= 0; i--) { 498 TSourceToken token = this.sourcetokenlist.get(i); 499 if (token.issolidtoken()) { 500 return token; 501 } 502 } 503 return null; 504 } 505 506 /** 507 * Check if a valid place for division operator to be treated as SQL*Plus command. 508 * <p> 509 * Migrated from TGSqlParser.IsValidPlaceForDivToSqlplusCmd() (lines 2641-2655). 510 */ 511 private boolean IsValidPlaceForDivToSqlplusCmd(TSourceTokenList pstlist, int pPos) { 512 boolean ret = false; 513 514 if ((pPos <= 0) || (pPos > pstlist.size() - 1)) return ret; 515 //token directly before div must be ttreturn without space appending it 516 TSourceToken lcst = pstlist.get(pPos - 1); 517 if (lcst.tokentype != ETokenType.ttreturn) { 518 return ret; 519 } 520 521 if (!(lcst.getAstext().charAt(lcst.getAstext().length() - 1) == ' ')) { 522 ret = true; 523 } 524 525 return ret; 526 } 527 528 /** 529 * Check if a string is a valid SQL*Plus command in PostgreSQL/Netezza context. 530 * <p> 531 * Migrated from TGSqlParser.isvalidsqlpluscmdInPostgresql() (lines 2658-2660). 532 * <p> 533 * Note: This is a placeholder function that always returns false. 534 */ 535 private boolean isvalidsqlpluscmdInPostgresql(String astr) { 536 return false; 537 } 538 539 // ========== Netezza-Specific Raw Statement Extraction ========== 540 541 /** 542 * Netezza-specific raw statement extraction logic. 543 * <p> 544 * Extracted from: TGSqlParser.donetezzagetrawsqlstatements() (lines 9802-10069) 545 */ 546 private void donetezzagetrawsqlstatements(SqlParseResult.Builder builder) { 547 int waitingEnd = 0; 548 boolean foundEnd = false; 549 550 if (TBaseType.assigned(sqlstatements)) sqlstatements.clear(); 551 if (!TBaseType.assigned(sourcetokenlist)) { 552 builder.errorCode(-1); 553 builder.errorMessage("Source token list is null"); 554 return; 555 } 556 557 gcurrentsqlstatement = null; 558 EFindSqlStateType gst = EFindSqlStateType.stnormal; 559 TSourceToken lcprevsolidtoken = null, ast = null; 560 561 for (int i = 0; i < sourcetokenlist.size(); i++) { 562 563 if ((ast != null) && (ast.issolidtoken())) 564 lcprevsolidtoken = ast; 565 566 ast = sourcetokenlist.get(i); 567 sourcetokenlist.curpos = i; 568 569 // Change token code if necessary for type cast tokens 570 if (ast.tokencode == TBaseType.rrw_int) { 571 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_int_cast; 572 } else if (ast.tokencode == TBaseType.rrw_integer) { 573 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_integer_cast; 574 } else if (ast.tokencode == TBaseType.rrw_smallint) { 575 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_smallint_cast; 576 } else if (ast.tokencode == TBaseType.rrw_bigint) { 577 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_bigint_cast; 578 } else if (ast.tokencode == TBaseType.rrw_real) { 579 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_real_cast; 580 } else if (ast.tokencode == TBaseType.rrw_float) { 581 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_float_cast; 582 } else if (ast.tokencode == TBaseType.rrw_numeric) { 583 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_numeric_cast; 584 } else if (ast.tokencode == TBaseType.rrw_bit) { 585 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_bit_cast; 586 } else if (ast.tokencode == TBaseType.rrw_char) { 587 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_char_cast; 588 } else if (ast.tokencode == TBaseType.rrw_nchar) { 589 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_nchar_cast; 590 } else if (ast.tokencode == TBaseType.rrw_varchar) { 591 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_varchar_cast; 592 } else if (ast.tokencode == TBaseType.rrw_character) { 593 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_character_cast; 594 } else if (ast.tokencode == TBaseType.rrw_date) { 595 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_date_cast; 596 } else if (ast.tokencode == TBaseType.rrw_time) { 597 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_time_cast; 598 } else if (ast.tokencode == TBaseType.rrw_timestamp) { 599 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_timestamp_cast; 600 } else if (ast.tokencode == TBaseType.rrw_interval) { 601 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_interval_cast; 602 } else if (ast.tokencode == TBaseType.rrw_decimal) { 603 if (isTypeCastToken(ast)) ast.tokencode = TBaseType.rrw_decimal_cast; 604 } 605 606 switch (gst) { 607 case sterror: { 608 if (ast.tokentype == ETokenType.ttsemicolon) { 609 gcurrentsqlstatement.sourcetokenlist.add(ast); 610 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 611 gst = EFindSqlStateType.stnormal; 612 } else { 613 gcurrentsqlstatement.sourcetokenlist.add(ast); 614 } 615 break; 616 } //sterror 617 618 case stnormal: { 619 if ((ast.tokencode == TBaseType.cmtdoublehyphen) 620 || (ast.tokencode == TBaseType.cmtslashstar) 621 || (ast.tokencode == TBaseType.lexspace) 622 || (ast.tokencode == TBaseType.lexnewline) 623 || (ast.tokentype == ETokenType.ttsemicolon)) { 624 if (gcurrentsqlstatement != null) { 625 gcurrentsqlstatement.sourcetokenlist.add(ast); 626 } 627 628 if ((lcprevsolidtoken != null) && (ast.tokentype == ETokenType.ttsemicolon)) { 629 if (lcprevsolidtoken.tokentype == ETokenType.ttsemicolon) { 630 // ;;;; continuous semicolon,treat it as comment 631 ast.tokentype = ETokenType.ttsimplecomment; 632 ast.tokencode = TBaseType.cmtdoublehyphen; 633 } 634 } 635 636 continue; 637 } 638 639 if (ast.tokencode == TBaseType.sqlpluscmd) { 640 gst = EFindSqlStateType.stsqlplus; 641 gcurrentsqlstatement = new TSqlplusCmdStatement(vendor); 642 gcurrentsqlstatement.sourcetokenlist.add(ast); 643 continue; 644 } 645 646 // find a tokentext to start sql or plsql mode 647 gcurrentsqlstatement = sqlcmds.issql(ast, gst, gcurrentsqlstatement); 648 649 if (gcurrentsqlstatement != null) { 650 if (gcurrentsqlstatement.isnzplsql()) { 651 gst = EFindSqlStateType.ststoredprocedure; 652 gcurrentsqlstatement.sourcetokenlist.add(ast); 653 foundEnd = false; 654 if ((ast.tokencode == TBaseType.rrw_begin) 655 || (ast.tokencode == TBaseType.rrw_package) 656 || (ast.searchToken(TBaseType.rrw_package, 4) != null)) { 657 waitingEnd = 1; 658 } 659 } else { 660 gst = EFindSqlStateType.stsql; 661 gcurrentsqlstatement.sourcetokenlist.add(ast); 662 } 663 } else { 664 //error tokentext found 665 666 this.syntaxErrors.add(new TSyntaxError(ast.getAstext(), ast.lineNo, (ast.columnNo < 0 ? 0 : ast.columnNo) 667 , "Error when tokenlize", EErrorType.spwarning, TBaseType.MSG_WARNING_ERROR_WHEN_TOKENIZE, null, ast.posinlist)); 668 669 ast.tokentype = ETokenType.tttokenlizererrortoken; 670 gst = EFindSqlStateType.sterror; 671 672 gcurrentsqlstatement = new TUnknownSqlStatement(vendor); 673 gcurrentsqlstatement.sqlstatementtype = ESqlStatementType.sstinvalid; 674 gcurrentsqlstatement.sourcetokenlist.add(ast); 675 } 676 677 break; 678 } // stnormal 679 680 case stsqlplus: { 681 if (ast.insqlpluscmd) { 682 gcurrentsqlstatement.sourcetokenlist.add(ast); 683 } else { 684 gst = EFindSqlStateType.stnormal; //this tokentext must be newline, 685 gcurrentsqlstatement.sourcetokenlist.add(ast); // so add it here 686 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 687 } 688 689 break; 690 }//case stsqlplus 691 692 case stsql: { 693 if (ast.tokentype == ETokenType.ttsemicolon) { 694 gst = EFindSqlStateType.stnormal; 695 gcurrentsqlstatement.sourcetokenlist.add(ast); 696 gcurrentsqlstatement.semicolonended = ast; 697 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 698 continue; 699 } 700 701 if (sourcetokenlist.sqlplusaftercurtoken()) { //most probably is / cmd 702 gst = EFindSqlStateType.stnormal; 703 gcurrentsqlstatement.sourcetokenlist.add(ast); 704 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 705 continue; 706 } 707 gcurrentsqlstatement.sourcetokenlist.add(ast); 708 break; 709 }//case stsql 710 711 case ststoredprocedure: { 712 if ((ast.tokencode == TBaseType.rrw_begin)) { 713 waitingEnd++; 714 } else if ((ast.tokencode == TBaseType.rrw_if)) { 715 if (ast.searchToken(TBaseType.rrw_end, -1) == null) { 716 //this is not if after END 717 waitingEnd++; 718 } 719 } else if ((ast.tokencode == TBaseType.rrw_case)) { 720 if (ast.searchToken(TBaseType.rrw_end, -1) == null) { 721 //this is not case after END 722 waitingEnd++; 723 } 724 } else if ((ast.tokencode == TBaseType.rrw_loop)) { 725 if (ast.searchToken(TBaseType.rrw_end, -1) == null) { 726 //this is not loop after END 727 waitingEnd++; 728 } 729 } else if (ast.tokencode == TBaseType.rrw_end) { 730 foundEnd = true; 731 waitingEnd--; 732 if (waitingEnd < 0) { 733 waitingEnd = 0; 734 } 735 } 736 737 if ((ast.tokentype == ETokenType.ttslash) && (ast.tokencode == TBaseType.sqlpluscmd)) { 738 // TPlsqlStatementParse(asqlstatement).TerminatorToken := ast; 739 ast.tokenstatus = ETokenStatus.tsignorebyyacc; 740 gst = EFindSqlStateType.stnormal; 741 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 742 743 //make / a sqlplus cmd 744 gcurrentsqlstatement = new TSqlplusCmdStatement(vendor); 745 gcurrentsqlstatement.sourcetokenlist.add(ast); 746 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 747 } else if ((ast.tokentype == ETokenType.ttperiod) && (sourcetokenlist.returnaftercurtoken(false)) && (sourcetokenlist.returnbeforecurtoken(false))) { 748 // single dot at a separate line 749 ast.tokenstatus = ETokenStatus.tsignorebyyacc; 750 gst = EFindSqlStateType.stnormal; 751 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 752 753 //make ttperiod a sqlplus cmd 754 gcurrentsqlstatement = new TSqlplusCmdStatement(vendor); 755 gcurrentsqlstatement.sourcetokenlist.add(ast); 756 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 757 } else { 758 gcurrentsqlstatement.sourcetokenlist.add(ast); 759 if ((ast.tokentype == ETokenType.ttsemicolon) && (waitingEnd == 0) && (foundEnd) && (gcurrentsqlstatement.OracleStatementCanBeSeparatedByBeginEndPair())) { 760 gst = EFindSqlStateType.stnormal; 761 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 762 } else if ((ast.searchToken(TBaseType.rrw_create, 1) != null) && (ast.searchToken(TBaseType.rrw_procedure, 4) != null) && (waitingEnd == 0)) { 763 gst = EFindSqlStateType.stnormal; 764 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder); 765 } 766 } 767 768 break; 769 } //ststoredprocedure 770 } //switch 771 }//for 772 773 //last statement 774 if ((gcurrentsqlstatement != null) && 775 ((gst == EFindSqlStateType.stsqlplus) || (gst == EFindSqlStateType.stsql) || (gst == EFindSqlStateType.ststoredprocedure) || 776 (gst == EFindSqlStateType.sterror))) { 777 onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, true, builder); 778 } 779 780 builder.errorCode(syntaxErrors.size()); 781 builder.sqlStatements(this.sqlstatements); 782 } 783 784 /** 785 * Check if a token is a type cast token. 786 * <p> 787 * Migrated from TGSqlParser.isTypeCastToken() (lines 9791-9800). 788 * <p> 789 * Distinguishes between function-style type casts and regular expressions: 790 * <ul> 791 * <li>INT(column_name) - type cast (returns true)</li> 792 * <li>INT(5) - numeric literal (returns false)</li> 793 * </ul> 794 */ 795 private boolean isTypeCastToken(TSourceToken ast) { 796 boolean istypecasetoken = false; 797 TSourceToken st = ast.searchToken('(', 1); 798 if (st != null) { 799 TSourceToken nst = st.searchToken(TBaseType.iconst, 1); 800 istypecasetoken = (nst == null); 801 } 802 return istypecasetoken; 803 } 804}