Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix typos #31

Merged
merged 1 commit into from
Jun 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ The following tools run on the local system:
* Gramformer needs to be installed manually:

```pip install git+https://github.com/PrithivirajDamodaran/Gramformer.git```
* hf_checker: Huggingface `text2text-generation` pipline based analyser. See the [flan-t5-large-grammar-synthesis](https://huggingface.co/pszemraj/flan-t5-large-grammar-synthesis) model for an example.
* hf_checker: Huggingface `text2text-generation` pipeline based analyser. See the [flan-t5-large-grammar-synthesis](https://huggingface.co/pszemraj/flan-t5-large-grammar-synthesis) model for an example.
<details><summary>Models</summary>
<ul>
<li>pszemraj/grammar-synthesis-small</li>
Expand All @@ -51,7 +51,7 @@ The following tools run on the local system:
<li>pszemraj/bart-base-grammar-synthesis</li>
</ul>
</details>
* hf_instruction_checker: Huggingface `text2text-generation` pipline based
* hf_instruction_checker: Huggingface `text2text-generation` pipeline based
analyser using instruction tuned models. See the Grammarly's
[CoEdIT](https://github.com/vipulraheja/coedit) model for an example. Supports
error checking and text generation, such as paraphrasing, through the `%HF%`
Expand All @@ -67,7 +67,7 @@ magic command (see the OpenAI analyser below).
<li>jbochi/candle-coedit-quantized</li>
</ul>
</details>
* [hf_completion](https://huggingface.co/docs/transformers/task_summary#language-modeling): Huggingface `fill-mask` pipline based text completion.
* [hf_completion](https://huggingface.co/docs/transformers/task_summary#language-modeling): Huggingface `fill-mask` pipeline based text completion.

### Tools using remote services

Expand Down
2 changes: 1 addition & 1 deletion tests/lsp_test_client/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def text_document_signature_help(self, signature_help_params):
return fut.result()

def text_document_definition(self, definition_params):
"""Sends text document defintion request to LSP server."""
"""Sends text document definition request to LSP server."""
fut = self._send_request(
"textDocument/definition", params=definition_params
)
Expand Down
2 changes: 1 addition & 1 deletion textLSP/analysers/grammarbot/grammarbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, language_server: LanguageServer, config: dict, name: str):
# TODO save this somewhere
self._remaining_requests = None
if GrammarBotAnalyser.CONFIGURATION_API_KEY not in self.config:
raise ConfigurationError('Reqired parameter: grammarbot.api_key')
raise ConfigurationError('Required parameter: grammarbot.api_key')
self._headers = {
'content-type': 'application/x-www-form-urlencoded',
'X-RapidAPI-Key': self.config[GrammarBotAnalyser.CONFIGURATION_API_KEY],
Expand Down
2 changes: 1 addition & 1 deletion textLSP/analysers/hf_completion/hf_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def get_completions(self, params: Optional[CompletionParams] = None) -> Optional
# in the file might not be mapped to the cleaned_source which leads to
# unexpected behaviour
uncleaned_offset = max(0, doc.offset_at_position(params.position)-1)
# XXX: this still get's activated in e.g. commented lines
# XXX: this still gets activated in e.g. commented lines
if doc.source[uncleaned_offset] in {' ', '\n'}:
return self._get_text_completions(paragraph, in_paragraph_offset)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@


class HFInstructionCheckerAnalyser(HFCheckerAnalyser):
CONFIGURATION_INSTRUCITON = 'instruction'
CONFIGURATION_INSTRUCTION = 'instruction'
CONFIGURATION_PROMPT_MAGIC = 'prompt_magic'

SETTINGS_DEFAULT_INSTRUCTION = 'Fix the grammar:'
Expand All @@ -36,12 +36,12 @@ class HFInstructionCheckerAnalyser(HFCheckerAnalyser):
def __init__(self, language_server: LanguageServer, config: dict, name: str):
super().__init__(language_server, config, name)

instruction = self.config.get(self.CONFIGURATION_INSTRUCITON, self.SETTINGS_DEFAULT_INSTRUCTION)
instruction = self.config.get(self.CONFIGURATION_INSTRUCTION, self.SETTINGS_DEFAULT_INSTRUCTION)
if instruction is None:
self.config[self.CONFIGURATION_INSTRUCITON] = ''
self.config[self.CONFIGURATION_INSTRUCTION] = ''

def corrector(self, text):
instruction = self.config.get(self.CONFIGURATION_INSTRUCITON, self.SETTINGS_DEFAULT_INSTRUCTION)
instruction = self.config.get(self.CONFIGURATION_INSTRUCTION, self.SETTINGS_DEFAULT_INSTRUCTION)
inp = f'{instruction} {text}' if len(instruction) > 0 else text

return self._corrector(inp)
Expand Down
2 changes: 1 addition & 1 deletion textLSP/analysers/openai/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class OpenAIAnalyser(Analyser):
def __init__(self, language_server: LanguageServer, config: dict, name: str):
super().__init__(language_server, config, name)
if self.CONFIGURATION_API_KEY not in self.config:
raise ConfigurationError(f'Reqired parameter: {name}.{self.CONFIGURATION_API_KEY}')
raise ConfigurationError(f'Required parameter: {name}.{self.CONFIGURATION_API_KEY}')
self._client = OpenAI(api_key=self.config[self.CONFIGURATION_API_KEY])

def _chat_endpoint(
Expand Down
8 changes: 4 additions & 4 deletions textLSP/documents/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def __init__(self, *args, config: Dict = None, **kwargs):
@property
def language(self) -> str:
if self._language is None:
self._update_langauge(
self._update_language(
self.config.get(
BaseDocument.CONFIGURATION_LANGUAGE, BaseDocument.DEFAULT_LANGUAGE
)
Expand All @@ -58,7 +58,7 @@ def language(self) -> str:

return self._language

def _update_langauge(self, lang: str):
def _update_language(self, lang: str):
"""
Parameters:
lang: str -- Language code or `auto` for automatic language detection.
Expand Down Expand Up @@ -596,7 +596,7 @@ def _get_node_and_iterator_for_edit(
old_first_interval_end_point = (0, 0)
if start_point < old_first_interval_end_point:
# there's new content at the beginning, we need to parse the next
# subtree as well, since there are no necesary whitespace tokens in
# subtree as well, since there are no necessary whitespace tokens in
# the current text_intervals
tmp_point = old_tree_first_node_new_end_point
else:
Expand Down Expand Up @@ -888,7 +888,7 @@ def _build_updated_text_intervals(
last_changed_point
)
if last_idx+1 >= len(self._text_intervals):
# we are actully at the end of the file so add the final newline
# we are actually at the end of the file so add the final newline
text_intervals.add_interval(new_intervals[-1])
else:
self._handle_intervals_after_edit_shifted(
Expand Down
4 changes: 2 additions & 2 deletions textLSP/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def get_interval_at_offset(self, offset: int) -> OffsetPositionInterval:

def get_idx_at_position(self, position: Position, strict=True) -> int:
"""
:param strict: If Flase, return the idx of the next (or last) interval if does not exist
:param strict: If False, return the idx of the next (or last) interval if does not exist
"""
idx = bisect.bisect_left(self._position_end_line, position.line)
length = len(self)
Expand Down Expand Up @@ -209,7 +209,7 @@ def get_idx_at_position(self, position: Position, strict=True) -> int:

def get_interval_at_position(self, position: Position, strict=True) -> OffsetPositionInterval:
"""
:param strict: If Flase, return the object of the next (or last) interval if does not exist
:param strict: If False, return the object of the next (or last) interval if does not exist
"""
idx = self.get_idx_at_position(position, strict)
if idx is None:
Expand Down
Loading