001package gudusoft.gsqlparser.parser;
002
003import gudusoft.gsqlparser.EDbVendor;
004import gudusoft.gsqlparser.TBaseType;
005import gudusoft.gsqlparser.TCustomLexer;
006import gudusoft.gsqlparser.TCustomParser;
007import gudusoft.gsqlparser.TCustomSqlStatement;
008import gudusoft.gsqlparser.TLexerVertica;
009import gudusoft.gsqlparser.TParserVertica;
010import gudusoft.gsqlparser.TSourceToken;
011import gudusoft.gsqlparser.TSourceTokenList;
012import gudusoft.gsqlparser.TStatementList;
013import gudusoft.gsqlparser.TSyntaxError;
014import gudusoft.gsqlparser.EFindSqlStateType;
015import gudusoft.gsqlparser.ETokenType;
016import gudusoft.gsqlparser.ETokenStatus;
017import gudusoft.gsqlparser.ESqlStatementType;
018import gudusoft.gsqlparser.EErrorType;
019import gudusoft.gsqlparser.stmt.TUnknownSqlStatement;
020import gudusoft.gsqlparser.stmt.oracle.TSqlplusCmdStatement;
021import gudusoft.gsqlparser.sqlcmds.ISqlCmds;
022import gudusoft.gsqlparser.sqlcmds.SqlCmdsFactory;
023import gudusoft.gsqlparser.compiler.TContext;
024import gudusoft.gsqlparser.sqlenv.TSQLEnv;
025import gudusoft.gsqlparser.compiler.TGlobalScope;
026import gudusoft.gsqlparser.compiler.TFrame;
027import gudusoft.gsqlparser.resolver.TSQLResolver;
028import gudusoft.gsqlparser.TLog;
029import gudusoft.gsqlparser.compiler.TASTEvaluator;
030
031import java.io.BufferedReader;
032import java.util.ArrayList;
033import java.util.List;
034import java.util.Stack;
035
036/**
037 * Vertica SQL parser implementation.
038 *
039 * <p>This parser handles Vertica-specific SQL syntax including:
040 * <ul>
041 *   <li>Vertica PL/SQL (stored procedures, functions)</li>
042 *   <li>BEGIN/END blocks</li>
043 *   <li>Vertica-specific functions (GREATEST, LEAST, DATE)</li>
044 *   <li>Statement delimiters (semicolon, slash, period)</li>
045 *   <li>IF/CASE/LOOP/FOR/WHILE constructs</li>
046 * </ul>
047 *
048 * <p><b>Design Notes:</b>
049 * <ul>
050 *   <li>Extends {@link AbstractSqlParser} using the template method pattern</li>
051 *   <li>Uses {@link TLexerVertica} for tokenization</li>
052 *   <li>Uses {@link TParserVertica} for parsing</li>
053 *   <li>Delimiter character: ';' for SQL statements</li>
054 *   <li>Tokenization delegates to PostgreSQL (Vertica is PostgreSQL-based)</li>
055 * </ul>
056 *
057 * <p><b>Usage Example:</b>
058 * <pre>
059 * // Get Vertica parser from factory
060 * SqlParser parser = SqlParserFactory.get(EDbVendor.dbvvertica);
061 *
062 * // Build context
063 * ParserContext context = new ParserContext.Builder(EDbVendor.dbvvertica)
064 *     .sqlText("SELECT * FROM employees WHERE dept_id = 10")
065 *     .build();
066 *
067 * // Parse
068 * SqlParseResult result = parser.parse(context);
069 *
070 * // Access statements
071 * TStatementList statements = result.getSqlStatements();
072 * </pre>
073 *
074 * @see SqlParser
075 * @see AbstractSqlParser
076 * @see TLexerVertica
077 * @see TParserVertica
078 * @since 3.2.0.0
079 */
080public class VerticaSqlParser extends AbstractSqlParser {
081
082    /**
083     * Construct Vertica SQL parser.
084     * <p>
085     * Configures the parser for Vertica database with default delimiter (;).
086     * <p>
087     * Following the original TGSqlParser pattern, the lexer and parser are
088     * created once in the constructor and reused for all parsing operations.
089     */
090    public VerticaSqlParser() {
091        super(EDbVendor.dbvvertica);
092        this.delimiterChar = ';';
093        this.defaultDelimiterStr = ";";
094
095        // Create lexer once - will be reused for all parsing operations
096        this.flexer = new TLexerVertica();
097        this.flexer.delimiterchar = this.delimiterChar;
098        this.flexer.defaultDelimiterStr = this.defaultDelimiterStr;
099
100        // Set parent's lexer reference for shared tokenization logic
101        this.lexer = this.flexer;
102
103        // Create parser once - will be reused for all parsing operations
104        this.fparser = new TParserVertica(null);
105        this.fparser.lexer = this.flexer;
106    }
107
108    // ========== Parser Components ==========
109
110    /** The Vertica lexer used for tokenization */
111    public TLexerVertica flexer;
112
113    /** Vertica SQL parser */
114    private TParserVertica fparser;
115
116    /** Current statement being built during extraction */
117    private TCustomSqlStatement gcurrentsqlstatement;
118
119    // Note: Global context and frame stack fields inherited from AbstractSqlParser:
120    // - protected TContext globalContext
121    // - protected TSQLEnv sqlEnv
122    // - protected Stack<TFrame> frameStack
123    // - protected TFrame globalFrame
124
125    // ========== AbstractSqlParser Abstract Methods Implementation ==========
126
127    /**
128     * Return the Vertica lexer instance.
129     */
130    @Override
131    protected TCustomLexer getLexer(ParserContext context) {
132        return this.flexer;
133    }
134
135    /**
136     * Return the Vertica SQL parser instance with updated token list.
137     */
138    @Override
139    protected TCustomParser getParser(ParserContext context, TSourceTokenList tokens) {
140        this.fparser.sourcetokenlist = tokens;
141        return this.fparser;
142    }
143
144    /**
145     * Vertica does not use a secondary parser (unlike Oracle with PL/SQL parser).
146     */
147    @Override
148    protected TCustomParser getSecondaryParser(ParserContext context, TSourceTokenList tokens) {
149        return null;
150    }
151
152    /**
153     * Call Vertica-specific tokenization logic.
154     * <p>
155     * Vertica tokenization delegates to PostgreSQL tokenization since
156     * Vertica is based on PostgreSQL.
157     */
158    @Override
159    protected void tokenizeVendorSql() {
160        doverticatexttotokenlist();
161    }
162
163    /**
164     * Setup Vertica parser for raw statement extraction.
165     * <p>
166     * Vertica uses a single parser, so we inject sqlcmds and update
167     * the token list for the main parser only.
168     */
169    @Override
170    protected void setupVendorParsersForExtraction() {
171        // Inject sqlcmds into parser (required for make_stmt)
172        this.fparser.sqlcmds = this.sqlcmds;
173
174        // Update token list for parser
175        this.fparser.sourcetokenlist = this.sourcetokenlist;
176    }
177
178    /**
179     * Call Vertica-specific raw statement extraction logic.
180     * <p>
181     * Delegates to doverticagetrawsqlstatements which handles Vertica's
182     * statement delimiters and PL/SQL block detection.
183     */
184    @Override
185    protected void extractVendorRawStatements(SqlParseResult.Builder builder) {
186        doverticagetrawsqlstatements(builder);
187    }
188
189    /**
190     * Perform full parsing of statements with syntax checking.
191     * <p>
192     * This method orchestrates the parsing of all statements.
193     */
194    @Override
195    protected TStatementList performParsing(ParserContext context,
196                                           TCustomParser parser,
197                                           TCustomParser secondaryParser,
198                                           TSourceTokenList tokens,
199                                           TStatementList rawStatements) {
200        // Store references
201        this.fparser = (TParserVertica) parser;
202        this.sourcetokenlist = tokens;
203        this.parserContext = context;
204
205        // Use the raw statements passed from AbstractSqlParser.parse()
206        this.sqlstatements = rawStatements;
207
208        // Initialize statement parsing infrastructure
209        this.sqlcmds = SqlCmdsFactory.get(vendor);
210
211        // Inject sqlcmds into parser (required for make_stmt and other methods)
212        this.fparser.sqlcmds = this.sqlcmds;
213
214        // Initialize global context for semantic analysis
215        initializeGlobalContext();
216
217        // Parse each statement with exception handling for robustness
218        for (int i = 0; i < sqlstatements.size(); i++) {
219            TCustomSqlStatement stmt = sqlstatements.getRawSql(i);
220
221            try {
222                stmt.setFrameStack(frameStack);
223
224                // Parse the statement
225                int parseResult = stmt.parsestatement(null, false, context.isOnlyNeedRawParseTree());
226
227                // Handle error recovery for CREATE TABLE/INDEX
228                boolean doRecover = TBaseType.ENABLE_ERROR_RECOVER_IN_CREATE_TABLE;
229                if (doRecover && ((parseResult != 0) || (stmt.getErrorCount() > 0))) {
230                    handleCreateTableErrorRecovery(stmt);
231                }
232
233                // Collect syntax errors
234                if ((parseResult != 0) || (stmt.getErrorCount() > 0)) {
235                    copyErrorsFromStatement(stmt);
236                }
237
238            } catch (Exception ex) {
239                // Use inherited exception handler from AbstractSqlParser
240                // This provides consistent error handling across all database parsers
241                handleStatementParsingException(stmt, i, ex);
242                continue;
243            }
244        }
245
246        // Clean up frame stack
247        if (globalFrame != null) {
248            globalFrame.popMeFromStack(frameStack);
249        }
250
251        return this.sqlstatements;
252    }
253
254    // Note: initializeGlobalContext() inherited from AbstractSqlParser
255
256    /**
257     * Handle CREATE TABLE/INDEX error recovery.
258     * <p>
259     * Migrated from original TGSqlParser logic.
260     */
261    private void handleCreateTableErrorRecovery(TCustomSqlStatement stmt) {
262        if ((stmt.sqlstatementtype != ESqlStatementType.sstcreatetable) &&
263            (stmt.sqlstatementtype != ESqlStatementType.sstcreateindex)) {
264            return;
265        }
266
267        TSourceTokenList tokens = stmt.sourcetokenlist;
268        if (tokens == null) return;
269
270        boolean hasIgnoreable = false;
271        for (int j = 0; j < tokens.size(); j++) {
272            TSourceToken token = tokens.get(j);
273            if (token.tokenstatus == ETokenStatus.tsignorebyyacc) {
274                hasIgnoreable = true;
275                break;
276            }
277        }
278
279        if (hasIgnoreable) {
280            int oldPos = tokens.curpos;
281            tokens.curpos = 0;
282
283            try {
284                stmt.parsestatement(null, false, parserContext.isOnlyNeedRawParseTree());
285            } catch (Exception ex) {
286                // Ignore retry exceptions
287            } finally {
288                tokens.curpos = oldPos;
289            }
290        }
291    }
292
293    /**
294     * Perform semantic analysis on parsed statements.
295     */
296    @Override
297    protected void performSemanticAnalysis(ParserContext context, TStatementList statements) {
298        if (!TBaseType.isEnableResolver()) {
299            return;
300        }
301
302        if (getSyntaxErrors().isEmpty()) {
303            TSQLResolver resolver = new TSQLResolver(this.globalContext, statements);
304            resolver.resolve();
305        }
306    }
307
308    /**
309     * Perform interpretation/evaluation on parsed statements.
310     */
311    @Override
312    protected void performInterpreter(ParserContext context, TStatementList statements) {
313        if (!TBaseType.ENABLE_INTERPRETER) {
314            return;
315        }
316
317        if (getSyntaxErrors().isEmpty()) {
318            TLog.clearLogs();
319            TGlobalScope interpreterScope = new TGlobalScope(sqlEnv);
320            TLog.enableInterpreterLogOnly();
321            TASTEvaluator astEvaluator = new TASTEvaluator(statements, interpreterScope);
322            astEvaluator.eval();
323        }
324    }
325
326    // ========== Tokenization Methods ==========
327
328    /**
329     * Vertica tokenization - delegates to PostgreSQL tokenization.
330     * <p>
331     * Since Vertica is based on PostgreSQL, we use the PostgreSQL tokenization logic.
332     */
333    private void doverticatexttotokenlist() {
334        dopostgresqltexttotokenlist();
335    }
336
337    /**
338     * PostgreSQL-style tokenization logic.
339     * <p>
340     * Migrated from original TGSqlParser.dopostgresqltexttotokenlist().
341     */
342    private void dopostgresqltexttotokenlist() {
343
344        boolean insqlpluscmd = false;
345        boolean isvalidplace = true;
346        boolean waitingreturnforfloatdiv = false;
347        boolean waitingreturnforsemicolon = false;
348        boolean continuesqlplusatnewline = false;
349
350        TSourceToken lct = null, prevst = null;
351
352        TSourceToken asourcetoken, lcprevst;
353        int yychar;
354
355        asourcetoken = getanewsourcetoken();
356        if (asourcetoken == null) return;
357        yychar = asourcetoken.tokencode;
358
359        while (yychar > 0) {
360            sourcetokenlist.add(asourcetoken);
361            switch (yychar) {
362                case TBaseType.cmtdoublehyphen:
363                case TBaseType.cmtslashstar:
364                case TBaseType.lexspace: {
365                    if (insqlpluscmd) {
366                        asourcetoken.insqlpluscmd = true;
367                    }
368                    break;
369                }
370                case TBaseType.lexnewline: {
371                    if (insqlpluscmd) {
372                        asourcetoken.insqlpluscmd = true;
373                        if (!continuesqlplusatnewline) {
374                            insqlpluscmd = false;
375                        }
376                        continuesqlplusatnewline = false;
377                    }
378                    break;
379                }
380                case TBaseType.sqlpluscmd: {
381                    insqlpluscmd = true;
382                    isvalidplace = true;
383                    break;
384                }
385                default: {
386                    if (insqlpluscmd) {
387                        asourcetoken.insqlpluscmd = true;
388                        if (asourcetoken.toString().equalsIgnoreCase("-")) {
389                            continuesqlplusatnewline = true;
390                        }
391                    }
392                    break;
393                }
394            }
395
396            lcprevst = asourcetoken;
397            asourcetoken = getanewsourcetoken();
398            if (asourcetoken == null) break;
399            yychar = asourcetoken.tokencode;
400        }
401    }
402
403    // ========== Raw Statement Extraction ==========
404
405    /**
406     * Extract raw SQL statements from token list.
407     * <p>
408     * Migrated from original TGSqlParser.doverticagetrawsqlstatements().
409     * <p>
410     * This method handles Vertica's specific statement delimiters and
411     * PL/SQL block detection with BEGIN/END pairs.
412     */
413    private void doverticagetrawsqlstatements(SqlParseResult.Builder builder) {
414        int waitingEnd = 0;
415        boolean foundEnd = false;
416
417        if (TBaseType.assigned(sqlstatements)) sqlstatements.clear();
418        if (!TBaseType.assigned(sourcetokenlist)) {
419            builder.errorCode(-1);
420            return;
421        }
422
423        gcurrentsqlstatement = null;
424        EFindSqlStateType gst = EFindSqlStateType.stnormal;
425        TSourceToken lcprevsolidtoken = null, ast = null;
426
427        for (int i = 0; i < sourcetokenlist.size(); i++) {
428
429            if ((ast != null) && (ast.issolidtoken()))
430                lcprevsolidtoken = ast;
431
432            ast = sourcetokenlist.get(i);
433            sourcetokenlist.curpos = i;
434
435            // Vertica-specific token adjustments
436            if (ast.tokencode == TBaseType.rrw_date) {
437                TSourceToken st1 = ast.nextSolidToken();
438                if (st1 != null) {
439                    if (st1.tokencode == '(') {
440                        ast.tokencode = TBaseType.rrw_vertica_date_function;
441                    }
442                }
443            } else if ((ast.tokencode == TBaseType.rrw_vertica_greatest) ||
444                       (ast.tokencode == TBaseType.rrw_vertica_least)) {
445                TSourceToken st1 = ast.nextSolidToken();
446                if (st1 != null) {
447                    if (st1.tokencode != '(') {
448                        ast.tokencode = TBaseType.ident;
449                    }
450                }
451            }
452
453            switch (gst) {
454                case sterror: {
455                    if (ast.tokentype == ETokenType.ttsemicolon) {
456                        gcurrentsqlstatement.sourcetokenlist.add(ast);
457                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
458                        gst = EFindSqlStateType.stnormal;
459                    } else {
460                        gcurrentsqlstatement.sourcetokenlist.add(ast);
461                    }
462                    break;
463                } //sterror
464
465                case stnormal: {
466                    if ((ast.tokencode == TBaseType.cmtdoublehyphen)
467                            || (ast.tokencode == TBaseType.cmtslashstar)
468                            || (ast.tokencode == TBaseType.lexspace)
469                            || (ast.tokencode == TBaseType.lexnewline)
470                            || (ast.tokentype == ETokenType.ttsemicolon)) {
471                        if (gcurrentsqlstatement != null) {
472                            gcurrentsqlstatement.sourcetokenlist.add(ast);
473                        }
474
475                        if ((lcprevsolidtoken != null) && (ast.tokentype == ETokenType.ttsemicolon)) {
476                            if (lcprevsolidtoken.tokentype == ETokenType.ttsemicolon) {
477                                // ;;;; continuous semicolon, treat it as comment
478                                ast.tokentype = ETokenType.ttsimplecomment;
479                                ast.tokencode = TBaseType.cmtdoublehyphen;
480                            }
481                        }
482
483                        continue;
484                    }
485
486                    if (ast.tokencode == TBaseType.sqlpluscmd) {
487                        gst = EFindSqlStateType.stsqlplus;
488                        gcurrentsqlstatement = new TSqlplusCmdStatement(vendor);
489                        gcurrentsqlstatement.sourcetokenlist.add(ast);
490                        continue;
491                    }
492
493                    // Find a token to start SQL or PL/SQL mode
494                    gcurrentsqlstatement = sqlcmds.issql(ast, gst, gcurrentsqlstatement);
495
496                    if (gcurrentsqlstatement != null) {
497                        if (gcurrentsqlstatement.isverticaplsql()) {
498                            gst = EFindSqlStateType.ststoredprocedure;
499                            gcurrentsqlstatement.sourcetokenlist.add(ast);
500                            foundEnd = true;
501                            if ((ast.tokencode == TBaseType.rrw_begin)
502                                    || (ast.tokencode == TBaseType.rrw_package)
503                                    || (ast.searchToken(TBaseType.rrw_package, 4) != null)) {
504                                waitingEnd = 1;
505                            }
506                        } else {
507                            gst = EFindSqlStateType.stsql;
508                            gcurrentsqlstatement.sourcetokenlist.add(ast);
509                        }
510                    } else {
511                        // Error token found
512                        this.syntaxErrors.add(new TSyntaxError(ast.getAstext(), ast.lineNo,
513                                (ast.columnNo < 0 ? 0 : ast.columnNo), "Error when tokenlize",
514                                EErrorType.spwarning, TBaseType.MSG_WARNING_ERROR_WHEN_TOKENIZE,
515                                null, ast.posinlist));
516
517                        ast.tokentype = ETokenType.tttokenlizererrortoken;
518                        gst = EFindSqlStateType.sterror;
519
520                        gcurrentsqlstatement = new TUnknownSqlStatement(vendor);
521                        gcurrentsqlstatement.sqlstatementtype = ESqlStatementType.sstinvalid;
522                        gcurrentsqlstatement.sourcetokenlist.add(ast);
523                    }
524
525                    break;
526                } // stnormal
527
528                case stsqlplus: {
529                    if (ast.insqlpluscmd) {
530                        gcurrentsqlstatement.sourcetokenlist.add(ast);
531                    } else {
532                        gst = EFindSqlStateType.stnormal; // this token must be newline
533                        gcurrentsqlstatement.sourcetokenlist.add(ast); // so add it here
534                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
535                    }
536
537                    break;
538                }//case stsqlplus
539
540                case stsql: {
541                    if (ast.tokentype == ETokenType.ttsemicolon) {
542                        gst = EFindSqlStateType.stnormal;
543                        gcurrentsqlstatement.sourcetokenlist.add(ast);
544                        gcurrentsqlstatement.semicolonended = ast;
545                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
546                        continue;
547                    }
548
549                    if (sourcetokenlist.sqlplusaftercurtoken()) { // most probably is / cmd
550                        gst = EFindSqlStateType.stnormal;
551                        gcurrentsqlstatement.sourcetokenlist.add(ast);
552                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
553                        continue;
554                    }
555                    gcurrentsqlstatement.sourcetokenlist.add(ast);
556                    break;
557                }//case stsql
558
559                case ststoredprocedure: {
560                    if (ast.tokencode == TBaseType.rrw_begin) {
561                        waitingEnd++;
562                        foundEnd = false;
563                    } else if (ast.tokencode == TBaseType.rrw_if) {
564                        if (ast.searchToken(TBaseType.rrw_end, -1) == null) {
565                            // this is not IF after END
566                            waitingEnd++;
567                        }
568                    } else if (ast.tokencode == TBaseType.rrw_case) {
569                        if (ast.searchToken(TBaseType.rrw_end, -1) == null) {
570                            // this is not CASE after END
571                            waitingEnd++;
572                        }
573                    } else if (ast.tokencode == TBaseType.rrw_loop) {
574                        if (ast.searchToken(TBaseType.rrw_end, -1) == null) {
575                            // this is not LOOP after END
576                            waitingEnd++;
577                        }
578                    } else if (ast.tokencode == TBaseType.rrw_end) {
579                        foundEnd = true;
580                        waitingEnd--;
581                        if (waitingEnd < 0) {
582                            waitingEnd = 0;
583                        }
584                    }
585
586                    if ((ast.tokentype == ETokenType.ttslash) && (ast.tokencode == TBaseType.sqlpluscmd)) {
587                        // Terminator token
588                        ast.tokenstatus = ETokenStatus.tsignorebyyacc;
589                        gst = EFindSqlStateType.stnormal;
590                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
591
592                        // make / a sqlplus cmd
593                        gcurrentsqlstatement = new TSqlplusCmdStatement(vendor);
594                        gcurrentsqlstatement.sourcetokenlist.add(ast);
595                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
596                    } else if ((ast.tokentype == ETokenType.ttperiod) &&
597                               (sourcetokenlist.returnaftercurtoken(false)) &&
598                               (sourcetokenlist.returnbeforecurtoken(false))) {
599                        // single dot at a separate line
600                        ast.tokenstatus = ETokenStatus.tsignorebyyacc;
601                        gst = EFindSqlStateType.stnormal;
602                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
603
604                        // make ttperiod a sqlplus cmd
605                        gcurrentsqlstatement = new TSqlplusCmdStatement(vendor);
606                        gcurrentsqlstatement.sourcetokenlist.add(ast);
607                        onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
608                    } else {
609                        gcurrentsqlstatement.sourcetokenlist.add(ast);
610                        if ((ast.tokentype == ETokenType.ttsemicolon) && (waitingEnd == 0) &&
611                            (foundEnd) && (gcurrentsqlstatement.VerticaStatementCanBeSeparatedByBeginEndPair())) {
612                            gst = EFindSqlStateType.stnormal;
613                            onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, false, builder);
614                        }
615                    }
616
617                    if (ast.tokencode == TBaseType.sqlpluscmd) {
618                        // change tokencode back to keyword or ident, because sqlplus cmd
619                        // in a sql statement (almost is plsql block) is not really a sqlplus cmd
620                        int m = flexer.getkeywordvalue(ast.getAstext());
621                        if (m != 0) {
622                            ast.tokencode = m;
623                        } else {
624                            ast.tokencode = TBaseType.ident;
625                        }
626                    }
627
628                    break;
629                } //ststoredprocedure
630            } //switch
631        }//for
632
633        // last statement
634        if ((gcurrentsqlstatement != null) &&
635                ((gst == EFindSqlStateType.stsqlplus) || (gst == EFindSqlStateType.stsql) ||
636                 (gst == EFindSqlStateType.ststoredprocedure) ||
637                 (gst == EFindSqlStateType.sterror))) {
638            onRawStatementComplete(parserContext, gcurrentsqlstatement, fparser, null, sqlstatements, true, builder);
639        }
640
641        // Set results in builder
642        builder.sqlStatements(this.sqlstatements);
643        builder.errorCode(syntaxErrors.size());
644    }
645
646    @Override
647    public String toString() {
648        return "VerticaSqlParser{vendor=" + vendor + "}";
649    }
650}