diff --git a/README.md b/README.md
index 88ca759..d5e946b 100644
--- a/README.md
+++ b/README.md
@@ -41,7 +41,7 @@ The following tools run on the local system:
* Gramformer needs to be installed manually:
```pip install git+https://github.com/PrithivirajDamodaran/Gramformer.git```
-* hf_checker: Huggingface `text2text-generation` pipline based analyser. See the [flan-t5-large-grammar-synthesis](https://huggingface.co/pszemraj/flan-t5-large-grammar-synthesis) model for an example.
+* hf_checker: Huggingface `text2text-generation` pipeline based analyser. See the [flan-t5-large-grammar-synthesis](https://huggingface.co/pszemraj/flan-t5-large-grammar-synthesis) model for an example.
Models
- pszemraj/grammar-synthesis-small
@@ -51,7 +51,7 @@ The following tools run on the local system:
- pszemraj/bart-base-grammar-synthesis
-* hf_instruction_checker: Huggingface `text2text-generation` pipline based
+* hf_instruction_checker: Huggingface `text2text-generation` pipeline based
analyser using instruction tuned models. See the Grammarly's
[CoEdIT](https://github.com/vipulraheja/coedit) model for an example. Supports
error checking and text generation, such as paraphrasing, through the `%HF%`
@@ -67,7 +67,7 @@ magic command (see the OpenAI analyser below).
jbochi/candle-coedit-quantized
-* [hf_completion](https://huggingface.co/docs/transformers/task_summary#language-modeling): Huggingface `fill-mask` pipline based text completion.
+* [hf_completion](https://huggingface.co/docs/transformers/task_summary#language-modeling): Huggingface `fill-mask` pipeline based text completion.
### Tools using remote services
diff --git a/tests/lsp_test_client/session.py b/tests/lsp_test_client/session.py
index 2d09421..a8b8b75 100644
--- a/tests/lsp_test_client/session.py
+++ b/tests/lsp_test_client/session.py
@@ -159,7 +159,7 @@ def text_document_signature_help(self, signature_help_params):
return fut.result()
def text_document_definition(self, definition_params):
- """Sends text document defintion request to LSP server."""
+ """Sends text document definition request to LSP server."""
fut = self._send_request(
"textDocument/definition", params=definition_params
)
diff --git a/textLSP/analysers/grammarbot/grammarbot.py b/textLSP/analysers/grammarbot/grammarbot.py
index 91a9389..47910e1 100644
--- a/textLSP/analysers/grammarbot/grammarbot.py
+++ b/textLSP/analysers/grammarbot/grammarbot.py
@@ -40,7 +40,7 @@ def __init__(self, language_server: LanguageServer, config: dict, name: str):
# TODO save this somewhere
self._remaining_requests = None
if GrammarBotAnalyser.CONFIGURATION_API_KEY not in self.config:
- raise ConfigurationError('Reqired parameter: grammarbot.api_key')
+ raise ConfigurationError('Required parameter: grammarbot.api_key')
self._headers = {
'content-type': 'application/x-www-form-urlencoded',
'X-RapidAPI-Key': self.config[GrammarBotAnalyser.CONFIGURATION_API_KEY],
diff --git a/textLSP/analysers/hf_completion/hf_completion.py b/textLSP/analysers/hf_completion/hf_completion.py
index 183ba6b..1e74bf3 100644
--- a/textLSP/analysers/hf_completion/hf_completion.py
+++ b/textLSP/analysers/hf_completion/hf_completion.py
@@ -82,7 +82,7 @@ def get_completions(self, params: Optional[CompletionParams] = None) -> Optional
# in the file might not be mapped to the cleaned_source which leads to
# unexpected behaviour
uncleaned_offset = max(0, doc.offset_at_position(params.position)-1)
- # XXX: this still get's activated in e.g. commented lines
+ # XXX: this still gets activated in e.g. commented lines
if doc.source[uncleaned_offset] in {' ', '\n'}:
return self._get_text_completions(paragraph, in_paragraph_offset)
diff --git a/textLSP/analysers/hf_instruction_checker/hf_instruction_checker.py b/textLSP/analysers/hf_instruction_checker/hf_instruction_checker.py
index 281f3b6..4d4aaba 100644
--- a/textLSP/analysers/hf_instruction_checker/hf_instruction_checker.py
+++ b/textLSP/analysers/hf_instruction_checker/hf_instruction_checker.py
@@ -27,7 +27,7 @@
class HFInstructionCheckerAnalyser(HFCheckerAnalyser):
- CONFIGURATION_INSTRUCITON = 'instruction'
+ CONFIGURATION_INSTRUCTION = 'instruction'
CONFIGURATION_PROMPT_MAGIC = 'prompt_magic'
SETTINGS_DEFAULT_INSTRUCTION = 'Fix the grammar:'
@@ -36,12 +36,12 @@ class HFInstructionCheckerAnalyser(HFCheckerAnalyser):
def __init__(self, language_server: LanguageServer, config: dict, name: str):
super().__init__(language_server, config, name)
- instruction = self.config.get(self.CONFIGURATION_INSTRUCITON, self.SETTINGS_DEFAULT_INSTRUCTION)
+ instruction = self.config.get(self.CONFIGURATION_INSTRUCTION, self.SETTINGS_DEFAULT_INSTRUCTION)
if instruction is None:
- self.config[self.CONFIGURATION_INSTRUCITON] = ''
+ self.config[self.CONFIGURATION_INSTRUCTION] = ''
def corrector(self, text):
- instruction = self.config.get(self.CONFIGURATION_INSTRUCITON, self.SETTINGS_DEFAULT_INSTRUCTION)
+ instruction = self.config.get(self.CONFIGURATION_INSTRUCTION, self.SETTINGS_DEFAULT_INSTRUCTION)
inp = f'{instruction} {text}' if len(instruction) > 0 else text
return self._corrector(inp)
diff --git a/textLSP/analysers/openai/openai.py b/textLSP/analysers/openai/openai.py
index 93fbf10..2ab9eb3 100644
--- a/textLSP/analysers/openai/openai.py
+++ b/textLSP/analysers/openai/openai.py
@@ -50,7 +50,7 @@ class OpenAIAnalyser(Analyser):
def __init__(self, language_server: LanguageServer, config: dict, name: str):
super().__init__(language_server, config, name)
if self.CONFIGURATION_API_KEY not in self.config:
- raise ConfigurationError(f'Reqired parameter: {name}.{self.CONFIGURATION_API_KEY}')
+ raise ConfigurationError(f'Required parameter: {name}.{self.CONFIGURATION_API_KEY}')
self._client = OpenAI(api_key=self.config[self.CONFIGURATION_API_KEY])
def _chat_endpoint(
diff --git a/textLSP/documents/document.py b/textLSP/documents/document.py
index bcdf097..532303d 100644
--- a/textLSP/documents/document.py
+++ b/textLSP/documents/document.py
@@ -49,7 +49,7 @@ def __init__(self, *args, config: Dict = None, **kwargs):
@property
def language(self) -> str:
if self._language is None:
- self._update_langauge(
+ self._update_language(
self.config.get(
BaseDocument.CONFIGURATION_LANGUAGE, BaseDocument.DEFAULT_LANGUAGE
)
@@ -58,7 +58,7 @@ def language(self) -> str:
return self._language
- def _update_langauge(self, lang: str):
+ def _update_language(self, lang: str):
"""
Parameters:
lang: str -- Language code or `auto` for automatic language detection.
@@ -596,7 +596,7 @@ def _get_node_and_iterator_for_edit(
old_first_interval_end_point = (0, 0)
if start_point < old_first_interval_end_point:
# there's new content at the beginning, we need to parse the next
- # subtree as well, since there are no necesary whitespace tokens in
+ # subtree as well, since there are no necessary whitespace tokens in
# the current text_intervals
tmp_point = old_tree_first_node_new_end_point
else:
@@ -888,7 +888,7 @@ def _build_updated_text_intervals(
last_changed_point
)
if last_idx+1 >= len(self._text_intervals):
- # we are actully at the end of the file so add the final newline
+ # we are actually at the end of the file so add the final newline
text_intervals.add_interval(new_intervals[-1])
else:
self._handle_intervals_after_edit_shifted(
diff --git a/textLSP/types.py b/textLSP/types.py
index 5e5e98a..67b0280 100644
--- a/textLSP/types.py
+++ b/textLSP/types.py
@@ -171,7 +171,7 @@ def get_interval_at_offset(self, offset: int) -> OffsetPositionInterval:
def get_idx_at_position(self, position: Position, strict=True) -> int:
"""
- :param strict: If Flase, return the idx of the next (or last) interval if does not exist
+ :param strict: If False, return the idx of the next (or last) interval if does not exist
"""
idx = bisect.bisect_left(self._position_end_line, position.line)
length = len(self)
@@ -209,7 +209,7 @@ def get_idx_at_position(self, position: Position, strict=True) -> int:
def get_interval_at_position(self, position: Position, strict=True) -> OffsetPositionInterval:
"""
- :param strict: If Flase, return the object of the next (or last) interval if does not exist
+ :param strict: If False, return the object of the next (or last) interval if does not exist
"""
idx = self.get_idx_at_position(position, strict)
if idx is None: