Skip to content

Commit

Permalink
Correctly #directives when +/: are not operators
Browse files Browse the repository at this point in the history
  • Loading branch information
TeamSpen210 committed Jun 27, 2024
1 parent 245a0a0 commit ece8055
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 4 deletions.
5 changes: 3 additions & 2 deletions src/srctools/_tokenizer.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -872,8 +872,9 @@ cdef class Tokenizer(BaseTokenizer):
return DIRECTIVE, self.buf_get_text().casefold()

elif (
next_char in BARE_DISALLOWED or
(next_char == b':' and self.flags.colon_operator)
next_char in BARE_DISALLOWED
or (next_char == b':' and self.flags.colon_operator)
or (next_char == b'+' and self.flags.plus_operator)
):
# We need to repeat this, so we return the ending
# char next. If it's not allowed, that'll error on
Expand Down
6 changes: 5 additions & 1 deletion src/srctools/tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,11 @@ def _get_token(self) -> Tuple[Token, str]:
value_chars = []
while True:
next_char = self._next_char()
if next_char in BARE_DISALLOWED:
if (
next_char in BARE_DISALLOWED
or (next_char == ':' and self.colon_operator)
or (next_char == '+' and self.plus_operator)
):
# We need to repeat this, so we return the ending char next.
# If it's not allowed, that'll error on next call.
self._char_index -= 1
Expand Down
11 changes: 10 additions & 1 deletion tests/test_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def check_tokens(
assert isinstance(token, tuple)
if isinstance(comp_token, tuple):
comp_type, comp_value = comp_token
assert comp_type is token[0] and comp_value == token[1], (
assert token[0] is comp_type and token[1] == comp_value, (
f"got {token[0]}({token[1]!r}), "
f"expected {comp_type}({comp_value!r}) @ pos {i}={tokens[i - 2: i + 1]}"
)
Expand Down Expand Up @@ -623,6 +623,15 @@ def test_conditional_op(py_c_token: Type[Tokenizer], op: str, option: str, tok:
(Token.STRING, 'call'), Token.BRACE_CLOSE,
])

# Test directives
check_tokens(py_c_token(f'\n#word{op}Two', **disabled), [
Token.NEWLINE, (Token.DIRECTIVE, f'word{op}two'),
])
check_tokens(py_c_token(f'\n#word{op}Two', **enabled), [
Token.NEWLINE, (Token.DIRECTIVE, 'word'),
tok, (Token.STRING, 'Two'),
])


def test_invalid_bracket(py_c_token: Type[Tokenizer]) -> None:
"""Test detecting various invalid combinations of [] brackets."""
Expand Down

0 comments on commit ece8055

Please sign in to comment.