Skip to content

Commit

Permalink
feat: use dialect when tokenizing (apache#30614)
Browse files Browse the repository at this point in the history
  • Loading branch information
betodealmeida authored Oct 15, 2024
1 parent d6b2e86 commit 4cac7fe
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions superset/sql/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def split_script(
script: str,
engine: str,
) -> list[SQLStatement]:
if engine in SQLGLOT_DIALECTS:
if dialect := SQLGLOT_DIALECTS.get(engine):
try:
return [
cls(ast.sql(), engine, ast)
Expand All @@ -297,7 +297,7 @@ def split_script(
remainder = script

try:
tokens = sqlglot.tokenize(script)
tokens = sqlglot.tokenize(script, dialect)
except sqlglot.errors.TokenError as ex:
raise SupersetParseError(
script,
Expand Down
2 changes: 1 addition & 1 deletion tests/unit_tests/sql_parse_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ def test_extract_tables_illdefined() -> None:
extract_tables('SELECT * FROM "tbname')
assert (
str(excinfo.value)
== "You may have an error in your SQL statement. Unable to parse script"
== "You may have an error in your SQL statement. Unable to tokenize script"
)

# odd edge case that works
Expand Down

0 comments on commit 4cac7fe

Please sign in to comment.