From 58a65feadf048c6164f6c1cf05690bee412a4630 Mon Sep 17 00:00:00 2001 From: JackWang032 <2522134117@qq.com> Date: Thu, 2 Jan 2025 19:26:22 +0800 Subject: [PATCH] feat: add sqlSplitStrategy options --- src/parser/common/basicSQL.ts | 21 ++++++++-- src/parser/common/semanticContextCollector.ts | 38 +++++++++++++++++-- src/parser/common/types.ts | 24 ++++++++++++ src/parser/flink/index.ts | 13 +++++-- src/parser/hive/index.ts | 13 +++++-- src/parser/impala/index.ts | 13 +++++-- src/parser/mysql/index.ts | 13 +++++-- src/parser/postgresql/index.ts | 13 +++++-- src/parser/spark/index.ts | 13 +++++-- src/parser/trino/index.ts | 13 +++++-- .../contextCollect/fixtures/semantic.sql | 3 +- .../semanticContextCollector.test.ts | 30 ++++++++++++++- .../hive/contextCollect/fixtures/semantic.sql | 5 ++- .../semanticContextCollector.test.ts | 28 ++++++++++++++ .../contextCollect/fixtures/semantic.sql | 5 ++- .../semanticContextCollector.test.ts | 28 ++++++++++++++ .../contextCollect/fixtures/semantic.sql | 5 ++- .../semanticContextCollector.test.ts | 28 ++++++++++++++ .../contextCollect/fixtures/semantic.sql | 5 ++- .../semanticContextCollector.test.ts | 28 ++++++++++++++ .../contextCollect/fixtures/semantic.sql | 5 ++- .../semanticContextCollector.test.ts | 28 ++++++++++++++ .../contextCollect/fixtures/semantic.sql | 5 ++- .../semanticContextCollector.test.ts | 28 ++++++++++++++ 24 files changed, 367 insertions(+), 38 deletions(-) diff --git a/src/parser/common/basicSQL.ts b/src/parser/common/basicSQL.ts index 785a9aae..9c633fc1 100644 --- a/src/parser/common/basicSQL.ts +++ b/src/parser/common/basicSQL.ts @@ -14,7 +14,13 @@ import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3'; import { SQLParserBase } from '../../lib/SQLParserBase'; import { findCaretTokenIndex } from './findCaretTokenIndex'; import { ctxToText, tokenToWord, WordRange, TextSlice } from './textAndWord'; -import { CaretPosition, LOCALE_TYPE, Suggestions, SyntaxSuggestion } from './types'; +import { + CaretPosition, + LOCALE_TYPE, + SemanticCollectOptions, + Suggestions, + SyntaxSuggestion, +} from './types'; import { ParseError, ErrorListener } from './parseErrorListener'; import { ErrorStrategy } from './errorStrategy'; import type { SplitListener } from './splitListener'; @@ -101,7 +107,8 @@ export abstract class BasicSQL< protected abstract createSemanticContextCollector( input: string, caretPosition: CaretPosition, - allTokens: Token[] + allTokens: Token[], + options?: SemanticCollectOptions ): SemanticContextCollector; /** @@ -466,15 +473,21 @@ export abstract class BasicSQL< * Get semantic context infos * @param input source string * @param caretPosition caret position, such as cursor position + * @param options semantic context options * @returns analyzed semantic context */ - public getSemanticContextAtCaretPosition(input: string, caretPosition: CaretPosition) { + public getSemanticContextAtCaretPosition( + input: string, + caretPosition: CaretPosition, + options?: SemanticCollectOptions + ) { const allTokens = this.getAllTokens(input); const parseTree = this.parseWithCache(input); const statementContextListener = this.createSemanticContextCollector( input, caretPosition, - allTokens + allTokens, + options ); this.listen(statementContextListener, parseTree); diff --git a/src/parser/common/semanticContextCollector.ts b/src/parser/common/semanticContextCollector.ts index e9c45531..5f9b49e7 100644 --- a/src/parser/common/semanticContextCollector.ts +++ b/src/parser/common/semanticContextCollector.ts @@ -1,11 +1,21 @@ import { ErrorNode, ParserRuleContext, TerminalNode, Token } from 'antlr4ng'; import { findCaretTokenIndex } from '../common/findCaretTokenIndex'; -import { CaretPosition, SemanticContext } from '../common/types'; +import { + CaretPosition, + SemanticCollectOptions, + SemanticContext, + SqlSplitStrategy, +} from '../common/types'; export const SQL_SPLIT_SYMBOL_TEXT = ';'; abstract class SemanticContextCollector { - constructor(_input: string, caretPosition: CaretPosition, allTokens: Token[]) { + constructor( + _input: string, + caretPosition: CaretPosition, + allTokens: Token[], + options?: SemanticCollectOptions + ) { // If caretPosition token is whiteSpace, tokenIndex may be undefined. const tokenIndex = findCaretTokenIndex(caretPosition, allTokens); @@ -13,6 +23,10 @@ abstract class SemanticContextCollector { this._tokenIndex = tokenIndex; } this._allTokens = allTokens; + this.options = { + ...this.options, + ...options, + }; if (allTokens?.length) { let i = tokenIndex ? tokenIndex - 1 : allTokens.length - 1; @@ -50,6 +64,10 @@ abstract class SemanticContextCollector { } } + public readonly options: SemanticCollectOptions = { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + }; + private _tokenIndex: number; private _allTokens: Token[] = []; @@ -117,6 +135,8 @@ abstract class SemanticContextCollector { * It should be called in each language's own `enterStatement`. */ protected visitStatement(ctx: ParserRuleContext) { + if (this.options.sqlSplitStrategy === SqlSplitStrategy.STRICT) return; + const isWhiteSpaceToken = this._tokenIndex === undefined || this._allTokens[this._tokenIndex]?.type === this.getWhiteSpaceRuleType() || @@ -135,7 +155,12 @@ abstract class SemanticContextCollector { * Uncomplete keyword will be error node */ visitErrorNode(node: ErrorNode): void { - if (node.symbol.tokenIndex !== this._tokenIndex || this._isNewStatement) return; + if ( + node.symbol.tokenIndex !== this._tokenIndex || + this._isNewStatement || + this.options.sqlSplitStrategy === SqlSplitStrategy.STRICT + ) + return; let parent: ParserRuleContext | null = node.parent as ParserRuleContext; let currentNode: TerminalNode | ParserRuleContext = node; @@ -188,7 +213,12 @@ abstract class SemanticContextCollector { } visitTerminal(node: TerminalNode): void { - if (node.symbol.tokenIndex !== this._tokenIndex || this._isNewStatement) return; + if ( + node.symbol.tokenIndex !== this._tokenIndex || + this._isNewStatement || + this.options.sqlSplitStrategy === SqlSplitStrategy.STRICT + ) + return; let currentNode: TerminalNode | ParserRuleContext = node; let parent = node.parent as ParserRuleContext | null; diff --git a/src/parser/common/types.ts b/src/parser/common/types.ts index ba80214c..75ce221b 100644 --- a/src/parser/common/types.ts +++ b/src/parser/common/types.ts @@ -73,3 +73,27 @@ export type LOCALE_TYPE = 'zh_CN' | 'en_US'; export interface SemanticContext { isNewStatement: boolean; } + +export enum SqlSplitStrategy { + /** Only end the statement with semicolon symbol */ + STRICT, + /** Based on parse tree to split statements */ + LOOSE, +} + +export interface SemanticCollectOptions { + /** + * `sqlSplitStrategy` will affects the result of `isNewStatement`; + * + * For example: + * + * The sql is "select id from t1 create\" + * + * - `SqlSplitStrategy.STRICT`: split symbol `;` is missing after select statement so that it considerd as one statement, and `isNewStatement` is false + * + * - `SqlSplitStrategy.LOOSE`: in parse tree, it will parse to "select id from t1" and "create" two single statement, so `isNewStatement` is true + * + * @default SqlSplitStrategy.STRICT + */ + sqlSplitStrategy?: SqlSplitStrategy; +} diff --git a/src/parser/flink/index.ts b/src/parser/flink/index.ts index ee5ee646..165982b8 100644 --- a/src/parser/flink/index.ts +++ b/src/parser/flink/index.ts @@ -2,7 +2,13 @@ import { CharStream, CommonTokenStream, Token } from 'antlr4ng'; import { CandidatesCollection } from 'antlr4-c3'; import { FlinkSqlLexer } from '../../lib/flink/FlinkSqlLexer'; import { FlinkSqlParser, ProgramContext } from '../../lib/flink/FlinkSqlParser'; -import { CaretPosition, EntityContextType, Suggestions, SyntaxSuggestion } from '../common/types'; +import { + CaretPosition, + EntityContextType, + SemanticCollectOptions, + Suggestions, + SyntaxSuggestion, +} from '../common/types'; import { BasicSQL } from '../common/basicSQL'; import { StmtContextType } from '../common/entityCollector'; import { FlinkSqlSplitListener } from './flinkSplitListener'; @@ -52,9 +58,10 @@ export class FlinkSQL extends BasicSQL { protected createSemanticContextCollector( input: string, caretPosition: CaretPosition, - allTokens: Token[] + allTokens: Token[], + options?: SemanticCollectOptions ) { - return new MySqlSemanticContextCollector(input, caretPosition, allTokens); + return new MySqlSemanticContextCollector(input, caretPosition, allTokens, options); } protected processCandidates( diff --git a/src/parser/postgresql/index.ts b/src/parser/postgresql/index.ts index 1bca00c0..189d2ef2 100644 --- a/src/parser/postgresql/index.ts +++ b/src/parser/postgresql/index.ts @@ -3,7 +3,13 @@ import { CharStream, CommonTokenStream, Token } from 'antlr4ng'; import { PostgreSqlLexer } from '../../lib/postgresql/PostgreSqlLexer'; import { PostgreSqlParser, ProgramContext } from '../../lib/postgresql/PostgreSqlParser'; -import { CaretPosition, EntityContextType, Suggestions, SyntaxSuggestion } from '../common/types'; +import { + CaretPosition, + EntityContextType, + SemanticCollectOptions, + Suggestions, + SyntaxSuggestion, +} from '../common/types'; import { BasicSQL } from '../common/basicSQL'; import { StmtContextType } from '../common/entityCollector'; import { PostgreSqlEntityCollector } from './postgreEntityCollector'; @@ -56,9 +62,10 @@ export class PostgreSQL extends BasicSQL = new Set([ diff --git a/test/parser/flink/contextCollect/fixtures/semantic.sql b/test/parser/flink/contextCollect/fixtures/semantic.sql index 6af23f59..f11cdb2d 100644 --- a/test/parser/flink/contextCollect/fixtures/semantic.sql +++ b/test/parser/flink/contextCollect/fixtures/semantic.sql @@ -28,5 +28,4 @@ SEL INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) WITH ('connector' = 'kafka') -CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +CREATE \ No newline at end of file diff --git a/test/parser/flink/contextCollect/semanticContextCollector.test.ts b/test/parser/flink/contextCollect/semanticContextCollector.test.ts index 5f694b96..024eca23 100644 --- a/test/parser/flink/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/flink/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { FlinkSQL } from 'src/parser/flink'; import { readSQLByRange } from 'test/helper'; @@ -26,7 +27,7 @@ describe('Flink semantic context collector tests', () => { }); test('not new statement with uncomplete keyword', () => { - const sql = readSQLByRange({ sql: text }, { startLine: 5, endLine: 19 }); + const sql = readSQLByRange({ sql: text }, { startLine: 5, endLine: 5 }); const { isNewStatement } = flinkSql.getSemanticContextAtCaretPosition(sql, { lineNumber: 1, column: 22, @@ -122,4 +123,31 @@ describe('Flink semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = flinkSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = flinkSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); }); diff --git a/test/parser/hive/contextCollect/fixtures/semantic.sql b/test/parser/hive/contextCollect/fixtures/semantic.sql index 49e442d2..ab601ff8 100644 --- a/test/parser/hive/contextCollect/fixtures/semantic.sql +++ b/test/parser/hive/contextCollect/fixtures/semantic.sql @@ -25,4 +25,7 @@ INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +INSERT INTO t1 VALUES(1); + +CREATE TABLE a1(id INT) +CREATE VIEW diff --git a/test/parser/hive/contextCollect/semanticContextCollector.test.ts b/test/parser/hive/contextCollect/semanticContextCollector.test.ts index 767428cf..d56fd1bb 100644 --- a/test/parser/hive/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/hive/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { HiveSQL } from 'src/parser/hive'; import { readSQLByRange } from 'test/helper'; @@ -122,4 +123,31 @@ describe('Hive semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = hiveSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = hiveSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); }); diff --git a/test/parser/impala/contextCollect/fixtures/semantic.sql b/test/parser/impala/contextCollect/fixtures/semantic.sql index 52335b20..2ecb93a4 100644 --- a/test/parser/impala/contextCollect/fixtures/semantic.sql +++ b/test/parser/impala/contextCollect/fixtures/semantic.sql @@ -25,4 +25,7 @@ INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +INSERT INTO t1 VALUES(1); + +CREATE TABLE a1(id INT) +CREATE VIEW \ No newline at end of file diff --git a/test/parser/impala/contextCollect/semanticContextCollector.test.ts b/test/parser/impala/contextCollect/semanticContextCollector.test.ts index 59e67498..d6f7d724 100644 --- a/test/parser/impala/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/impala/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { ImpalaSQL } from 'src/parser/impala'; import { readSQLByRange } from 'test/helper'; @@ -122,4 +123,31 @@ describe('Impala semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = impalaSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = impalaSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); }); diff --git a/test/parser/mysql/contextCollect/fixtures/semantic.sql b/test/parser/mysql/contextCollect/fixtures/semantic.sql index 093c39a9..d7753857 100644 --- a/test/parser/mysql/contextCollect/fixtures/semantic.sql +++ b/test/parser/mysql/contextCollect/fixtures/semantic.sql @@ -25,4 +25,7 @@ INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +INSERT INTO t1 VALUES(1); + +CREATE TABLE a1(id INT) +CREATE VIEW \ No newline at end of file diff --git a/test/parser/mysql/contextCollect/semanticContextCollector.test.ts b/test/parser/mysql/contextCollect/semanticContextCollector.test.ts index 367491d5..e84e8aee 100644 --- a/test/parser/mysql/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/mysql/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { MySQL } from 'src/parser/mysql'; import { readSQLByRange } from 'test/helper'; @@ -122,4 +123,31 @@ describe('MySQL semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = mySQL.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = mySQL.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); }); diff --git a/test/parser/postgresql/contextCollect/fixtures/semantic.sql b/test/parser/postgresql/contextCollect/fixtures/semantic.sql index 514c3ee2..d1b4de02 100644 --- a/test/parser/postgresql/contextCollect/fixtures/semantic.sql +++ b/test/parser/postgresql/contextCollect/fixtures/semantic.sql @@ -25,4 +25,7 @@ INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +INSERT INTO t1 VALUES(1); + +CREATE TABLE a1(id INT) +CREATE VIEW \ No newline at end of file diff --git a/test/parser/postgresql/contextCollect/semanticContextCollector.test.ts b/test/parser/postgresql/contextCollect/semanticContextCollector.test.ts index 3ecdea50..8e18bbd8 100644 --- a/test/parser/postgresql/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/postgresql/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { PostgreSQL } from 'src/parser/postgresql'; import { readSQLByRange } from 'test/helper'; @@ -122,4 +123,31 @@ describe('PostgreSQL semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = postgreSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = postgreSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); }); diff --git a/test/parser/spark/contextCollect/fixtures/semantic.sql b/test/parser/spark/contextCollect/fixtures/semantic.sql index 49e442d2..0b72ff00 100644 --- a/test/parser/spark/contextCollect/fixtures/semantic.sql +++ b/test/parser/spark/contextCollect/fixtures/semantic.sql @@ -25,4 +25,7 @@ INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +INSERT INTO t1 VALUES(1); + +CREATE TABLE a1(id INT) +CREATE VIEW \ No newline at end of file diff --git a/test/parser/spark/contextCollect/semanticContextCollector.test.ts b/test/parser/spark/contextCollect/semanticContextCollector.test.ts index 946773a5..086aeff9 100644 --- a/test/parser/spark/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/spark/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { SparkSQL } from 'src/parser/spark'; import { readSQLByRange } from 'test/helper'; @@ -122,4 +123,31 @@ describe('Spark semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = sparkSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = sparkSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); }); diff --git a/test/parser/trino/contextCollect/fixtures/semantic.sql b/test/parser/trino/contextCollect/fixtures/semantic.sql index 49e442d2..0b72ff00 100644 --- a/test/parser/trino/contextCollect/fixtures/semantic.sql +++ b/test/parser/trino/contextCollect/fixtures/semantic.sql @@ -25,4 +25,7 @@ INSERT INTO t1 VALUES(1); CREATE TABLE a1(id INT) CREATE VIEW -INSERT INTO t1 VALUES(1); \ No newline at end of file +INSERT INTO t1 VALUES(1); + +CREATE TABLE a1(id INT) +CREATE VIEW \ No newline at end of file diff --git a/test/parser/trino/contextCollect/semanticContextCollector.test.ts b/test/parser/trino/contextCollect/semanticContextCollector.test.ts index a2351b84..d2f3f18b 100644 --- a/test/parser/trino/contextCollect/semanticContextCollector.test.ts +++ b/test/parser/trino/contextCollect/semanticContextCollector.test.ts @@ -1,5 +1,6 @@ import fs from 'fs'; import path from 'path'; +import { SqlSplitStrategy } from 'src/parser/common/types'; import { TrinoSQL } from 'src/parser/trino'; import { readSQLByRange } from 'test/helper'; @@ -122,4 +123,31 @@ describe('Trino semantic context collector tests', () => { }); expect(isNewStatement).toBeFalsy(); }); + + test('test sqlSplitStrategy', () => { + const sql = readSQLByRange({ sql: text }, { startLine: 30, endLine: 31 }); + const { isNewStatement: isNewStatement1 } = trinoSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.LOOSE, + } + ); + expect(isNewStatement1).toBeTruthy(); + + const { isNewStatement: isNewStatement2 } = trinoSql.getSemanticContextAtCaretPosition( + sql, + { + lineNumber: 2, + column: 7, + }, + { + sqlSplitStrategy: SqlSplitStrategy.STRICT, + } + ); + expect(isNewStatement2).toBeFalsy(); + }); });