diff --git a/src/srctools/_tokenizer.pyx b/src/srctools/_tokenizer.pyx index b1a6c3de..2c31bf66 100644 --- a/src/srctools/_tokenizer.pyx +++ b/src/srctools/_tokenizer.pyx @@ -872,8 +872,9 @@ cdef class Tokenizer(BaseTokenizer): return DIRECTIVE, self.buf_get_text().casefold() elif ( - next_char in BARE_DISALLOWED or - (next_char == b':' and self.flags.colon_operator) + next_char in BARE_DISALLOWED + or (next_char == b':' and self.flags.colon_operator) + or (next_char == b'+' and self.flags.plus_operator) ): # We need to repeat this, so we return the ending # char next. If it's not allowed, that'll error on diff --git a/src/srctools/tokenizer.py b/src/srctools/tokenizer.py index 4b2f300a..71f1714b 100644 --- a/src/srctools/tokenizer.py +++ b/src/srctools/tokenizer.py @@ -586,7 +586,11 @@ def _get_token(self) -> Tuple[Token, str]: value_chars = [] while True: next_char = self._next_char() - if next_char in BARE_DISALLOWED: + if ( + next_char in BARE_DISALLOWED + or (next_char == ':' and self.colon_operator) + or (next_char == '+' and self.plus_operator) + ): # We need to repeat this, so we return the ending char next. # If it's not allowed, that'll error on next call. self._char_index -= 1 diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index 8771361b..31bcbea3 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -172,7 +172,7 @@ def check_tokens( assert isinstance(token, tuple) if isinstance(comp_token, tuple): comp_type, comp_value = comp_token - assert comp_type is token[0] and comp_value == token[1], ( + assert token[0] is comp_type and token[1] == comp_value, ( f"got {token[0]}({token[1]!r}), " f"expected {comp_type}({comp_value!r}) @ pos {i}={tokens[i - 2: i + 1]}" ) @@ -623,6 +623,15 @@ def test_conditional_op(py_c_token: Type[Tokenizer], op: str, option: str, tok: (Token.STRING, 'call'), Token.BRACE_CLOSE, ]) + # Test directives + check_tokens(py_c_token(f'\n#word{op}Two', **disabled), [ + Token.NEWLINE, (Token.DIRECTIVE, f'word{op}two'), + ]) + check_tokens(py_c_token(f'\n#word{op}Two', **enabled), [ + Token.NEWLINE, (Token.DIRECTIVE, 'word'), + tok, (Token.STRING, 'Two'), + ]) + def test_invalid_bracket(py_c_token: Type[Tokenizer]) -> None: """Test detecting various invalid combinations of [] brackets."""