Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support advanced MSSQL Dialect table notation (default schema, delimters, linked server) #529

Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
simplify code
  • Loading branch information
Santos, Tyler (Boston) committed Oct 23, 2024
commit 760e9d48427faa2361996926dc9b5a9cf1786e15
15 changes: 5 additions & 10 deletions sql_metadata/parser.py
Original file line number Diff line number Diff line change
@@ -144,8 +144,7 @@ def tokens(self) -> List[SQLToken]: # noqa: C901
combine_flag = False
for index, tok in enumerate(self.non_empty_tokens):
# combine dot separated identifiers
#import ipdb; ipdb.set_trace()
if not tok.is_keyword and self._is_token_part_of_complex_identifier(token=tok, index=index):
if self._is_token_part_of_complex_identifier(token=tok, index=index):
combine_flag = True
continue
token = SQLToken(
@@ -351,7 +350,6 @@ def tables(self) -> List[str]:
with_names = self.with_names

for token in self._not_parsed_tokens:
#import ipdb; ipdb.set_trace()
if token.is_potential_table_name:
if (
token.is_alias_of_table_or_alias_of_subquery
@@ -1013,6 +1011,8 @@ def _is_token_part_of_complex_identifier(
Checks if token is a part of complex identifier like
<schema>.<table>.<column> or <table/sub_query>.<column>
"""
if token.is_keyword:
return False
return str(token) == "." or (
index + 1 < self.tokens_length
and str(self.non_empty_tokens[index + 1]) == "."
@@ -1033,18 +1033,13 @@ def _combine_tokens(self, index: int, value: str) -> Tuple[str, bool]:
"""
Checks if complex identifier is longer and follows back until it's finished
"""
keep_going = False
if index > 1:
prev_value = self.non_empty_tokens[index - 1]
if prev_value.is_keyword:
if not self._is_token_part_of_complex_identifier(prev_value, index-1):
return value, False
if str(prev_value) == ".":
keep_going = True
if str(self.non_empty_tokens[index - 2]) == ".":
keep_going = True
prev_value = str(prev_value).strip('`')
value = f"{prev_value}{value}"
return value, keep_going
return value, True
return value, False

def _get_sqlparse_tokens(self, parsed) -> None: