Skip to content

Commit

Permalink
Add conversation starters, input history now, comes from file
Browse files Browse the repository at this point in the history
  • Loading branch information
yorevs committed Oct 31, 2024
1 parent 5570856 commit 4ee5f26
Show file tree
Hide file tree
Showing 14 changed files with 152 additions and 135 deletions.
19 changes: 0 additions & 19 deletions docs/devel/misc/askai-questions.txt

This file was deleted.

8 changes: 4 additions & 4 deletions src/main/askai/core/askai_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ class AskAiMessages(metaclass=Singleton):

@staticmethod
@lru_cache
def get_translator(from_lang: Language, to_lang: Language) -> AITranslator:
return AskAiMessages.TRANSLATOR(from_lang, to_lang)
def get_translator(from_lang: Language) -> AITranslator:
return AskAiMessages.TRANSLATOR(from_lang, configs.language)

def __init__(self):
# fmt: off
Expand All @@ -49,7 +49,7 @@ def accurate_responses(self) -> list[str]:

@property
def translator(self) -> AITranslator:
return AskAiMessages.get_translator(Language.EN_US, configs.language)
return AskAiMessages.get_translator(Language.EN_US)

@lru_cache(maxsize=256)
def translate(self, text: AnyStr) -> str:
Expand All @@ -58,7 +58,7 @@ def translate(self, text: AnyStr) -> str:
:return: The translated text.
"""
# Avoid translating debug messages.
if re.match(r"^~~\[DEBUG]~~.*", str(text), flags=re.IGNORECASE | re.MULTILINE):
if re.match(r"^~~\[DEBUG]~~", str(text), flags=re.IGNORECASE | re.DOTALL | re.MULTILINE):
return text
return self.translator.translate(str(text))

Expand Down
3 changes: 3 additions & 0 deletions src/main/askai/core/askai_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@
# Make sure the AskAI directory is exported.
os.environ["ASKAI_DIR"] = str(ASKAI_DIR)

# AskAi conversation starters
CONVERSATION_STARTERS: Path = Path(classpath.resource_path / "conversation-starters.txt")


class AskAiSettings(metaclass=Singleton):
"""The AskAI 'SetMan' Settings."""
Expand Down
59 changes: 33 additions & 26 deletions src/main/askai/core/component/cache_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,22 @@
Copyright (c) 2024, HomeSetup
"""
from askai.core.askai_configs import configs
from askai.core.askai_settings import ASKAI_DIR
from clitt.core.tui.line_input.keyboard_input import KeyboardInput
import os
import re
from collections import namedtuple
from pathlib import Path
from shutil import copyfile
from typing import Optional

from clitt.core.tui.line_input.keyboard_input import KeyboardInput
from hspylib.core.enums.charset import Charset
from hspylib.core.metaclass.singleton import Singleton
from hspylib.core.tools.commons import file_is_not_empty
from hspylib.core.tools.text_tools import hash_text
from hspylib.core.tools.text_tools import hash_text, ensure_endswith
from hspylib.modules.cache.ttl_cache import TTLCache
from pathlib import Path
from typing import Optional

import re
from askai.core.askai_configs import configs
from askai.core.askai_settings import ASKAI_DIR, CONVERSATION_STARTERS

# AskAI cache root directory.
CACHE_DIR: Path = Path(f"{ASKAI_DIR}/cache")
Expand Down Expand Up @@ -73,6 +77,9 @@
if not PERSIST_DIR.exists():
PERSIST_DIR.mkdir(parents=True, exist_ok=True)

ASKAI_INPUT_HISTORY_FILE: Path = Path(CACHE_DIR / "askai-input-history.txt")
if not file_is_not_empty(str(ASKAI_INPUT_HISTORY_FILE)):
copyfile(str(CONVERSATION_STARTERS), str(ASKAI_INPUT_HISTORY_FILE))

CacheEntry = namedtuple("CacheEntry", ["key", "expires"])

Expand All @@ -87,21 +94,12 @@ class is designed to store and retrieve cached data efficiently, reducing the ne

ASKAI_CACHE_KEYS: str = "askai-cache-keys"

ASKAI_INPUT_CACHE_KEY: str = "askai-input-history"

ASKAI_CONTEXT_KEY: str = "askai-context-key"

_TTL_CACHE: TTLCache[str] = TTLCache(ttl_minutes=configs.ttl)

def __init__(self):
keys: str | None = self._TTL_CACHE.read(self.ASKAI_CACHE_KEYS)
self._cache_keys: set[str] = set(map(str.strip, keys.split(",") if keys else {}))

@property
def keys(self) -> set[str]:
return self._cache_keys

def audio_file_path(self, text: str, voice: str = "onyx", audio_format: str = "mp3") -> tuple[str, bool]:
@staticmethod
def audio_file_path(text: str, voice: str = "onyx", audio_format: str = "mp3") -> tuple[str, bool]:
"""Retrieve the hashed audio file path and determine whether the file already exists.
:param text: The text that the audio represents.
:param voice: The AI voice used for speech synthesis (default is "onyx").
Expand All @@ -112,6 +110,14 @@ def audio_file_path(self, text: str, voice: str = "onyx", audio_format: str = "m
audio_file_path = f"{str(AUDIO_DIR)}/askai-{hash_text(key)}.{audio_format}"
return audio_file_path, file_is_not_empty(audio_file_path)

def __init__(self):
keys: str | None = self._TTL_CACHE.read(self.ASKAI_CACHE_KEYS)
self._cache_keys: set[str] = set(map(str.strip, keys.split(",") if keys else {}))

@property
def keys(self) -> set[str]:
return self._cache_keys

def save_reply(self, text: str, reply: str) -> Optional[str]:
"""Save an AI reply into the TTL (Time-To-Live) cache.
:param text: The text to be cached.
Expand Down Expand Up @@ -156,25 +162,26 @@ def clear_replies(self) -> list[str]:
return list(map(self.del_reply, sorted(self.keys)))

def read_input_history(self) -> list[str]:
"""Retrieve line input queries from the TTL (Time-To-Live) cache.
"""Retrieve line input queries from the history file.
:return: A list of input queries stored in the cache.
"""
hist_str: str = self._TTL_CACHE.read(self.ASKAI_INPUT_CACHE_KEY)
return hist_str.split(",") if hist_str else []
return ASKAI_INPUT_HISTORY_FILE.read_text().split(os.linesep)

def save_input_history(self, history: list[str] = None) -> str:
"""Save input queries into the TTL (Time-To-Live) cache.
def save_input_history(self, history: list[str] = None) -> None:
"""Save input queries into the history file.
:param history: A list of input queries to be saved. If None, the current input history will be saved.
:return: The temporary file name of the saved entry.
"""
return self._TTL_CACHE.save(self.ASKAI_INPUT_CACHE_KEY, ",".join(history or KeyboardInput.history()))
if history := (history or KeyboardInput.history()):
with open(str(ASKAI_INPUT_HISTORY_FILE), 'w', encoding=Charset.UTF_8.val) as f_hist:
list(map(lambda h: f_hist.write(ensure_endswith(os.linesep, h)),
filter(lambda h: not h.startswith('/'), history)))

def load_input_history(self, predefined: list[str] = None) -> list[str]:
"""Load input queries from the TTL (Time-To-Live) cache extending it with a predefined input history.
:param predefined: A list of predefined input queries to be appended to the final list.
:return: A list of input queries loaded from the cache.
"""
history = self.read_input_history()
history = self.read_input_history() or list()
if predefined:
history.extend(list(filter(lambda c: c not in history, predefined)))
return history
Expand Down
8 changes: 6 additions & 2 deletions src/main/askai/core/processors/splitter/splitter_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@

import logging as log

from askai.core.support.text_formatter import text_formatter


class SplitterActions(metaclass=Singleton):
"""Class that provides the splitter some actionable items."""
Expand All @@ -54,7 +56,8 @@ def wrap_answer(question: str, answer: str, model_result: ModelResult = ModelRes
:return: An optional formatted string containing the wrapped answer.
"""
output: str = answer
args = {"user": prompt.user.title(), "idiom": shared.idiom, "context": answer, "question": question}
ctx: str = text_formatter.strip_format(answer)
args = {"user": prompt.user.title(), "idiom": shared.idiom, "context": ctx, "question": question}
prompt_args: list[str] = [k for k in args.keys()]
model: ResponseModel = ResponseModel.of_model(model_result.mid)
events.reply.emit(reply=AIReply.full(msg.model_select(model)))
Expand Down Expand Up @@ -83,9 +86,10 @@ def refine_answer(question: str, answer: str, acc_response: AccResponse | None =
:param acc_response: The final accuracy response, if available.
"""
if acc_response and acc_response.reasoning:
ctx: str = str(shared.context.flat("HISTORY"))
ctx: str = text_formatter.strip_format(str(shared.context.flat("HISTORY")))
args = {
"locale": configs.language.locale,
"user": prompt.user.title(),
"improvements": acc_response.details,
"context": ctx,
"response": answer,
Expand Down
118 changes: 61 additions & 57 deletions src/main/askai/core/processors/splitter/splitter_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from askai.core.processors.splitter.splitter_pipeline import SplitterPipeline
from askai.core.processors.splitter.splitter_states import States
from askai.core.support.text_formatter import text_formatter as tf, text_formatter
from askai.exception.exceptions import InaccurateResponse


class SplitterExecutor(Thread):
Expand Down Expand Up @@ -64,67 +65,70 @@ def interrupt(self, ev: Event) -> None:
def run(self) -> None:
"""Execute the splitter pipeline."""

with Live(Spinner("dots", f"[green]{self.pipeline.state}…[/green]", style="green"), console=tf.console) as live:
while not (self._interrupted or self.pipeline.state == States.COMPLETE):
self.pipeline.track_previous()
if 1 < configs.max_router_retries < 1 + self.pipeline.failures[self.pipeline.state.value]:
self.display(
f"\n[red] Max retries exceeded: {configs.max_agent_retries}[/red]\n", True)
break
if 1 < configs.max_iteractions < 1 + self.pipeline.iteractions:
self.display(
f"\n[red] Max iteractions exceeded: {configs.max_iteractions}[/red]\n", True)
break
match self.pipeline.state:
case States.STARTUP:
if self.pipeline.st_startup():
self.pipeline.ev_pipeline_started()
case States.MODEL_SELECT:
if self.pipeline.st_model_select():
self.pipeline.ev_model_selected()
case States.TASK_SPLIT:
if self.pipeline.st_task_split():
if self.pipeline.is_direct():
self.display("[yellow]√ Direct answer provided[/yellow]")
self.pipeline.ev_direct_answer()
else:
self.display(f"[green]√ Action plan created[/green]")
self.pipeline.ev_plan_created()
case States.EXECUTE_TASK:
if self.pipeline.st_execute_task():
self.pipeline.ev_task_executed()
case States.ACC_CHECK:
acc_color: AccColor = self.pipeline.st_accuracy_check()
c_name: str = acc_color.color.casefold()
try:
with Live(Spinner("dots", f"[green]{self.pipeline.state}…[/green]", style="green"), console=tf.console) as live:
while not (self._interrupted or self.pipeline.state == States.COMPLETE):
self.pipeline.track_previous()
if 1 < configs.max_router_retries < 1 + self.pipeline.failures[self.pipeline.state.value]:
self.display(
f"[green]√ Accuracy check: [{c_name}]{c_name.upper()}[/{c_name}][/green]")
if acc_color.passed(AccColor.GOOD):
self.pipeline.ev_accuracy_passed()
elif acc_color.passed(AccColor.MODERATE):
self.pipeline.ev_refine_required()
else:
self.pipeline.ev_accuracy_failed()
case States.REFINE_ANSWER:
if self.pipeline.st_refine_answer():
self.pipeline.ev_answer_refined()
case States.WRAP_ANSWER:
if self.pipeline.st_final_answer():
self.pipeline.ev_final_answer()
case _:
f"\n[red] Max retries exceeded: {configs.max_agent_retries}[/red]\n", True)
break
if 1 < configs.max_iteractions < 1 + self.pipeline.iteractions:
self.display(
f"[red] Error: Machine halted before complete!({self.pipeline.state})[/red]", True)
f"\n[red] Max iteractions exceeded: {configs.max_iteractions}[/red]\n", True)
break
match self.pipeline.state:
case States.STARTUP:
if self.pipeline.st_startup():
self.pipeline.ev_pipeline_started()
case States.MODEL_SELECT:
if self.pipeline.st_model_select():
self.pipeline.ev_model_selected()
case States.TASK_SPLIT:
if self.pipeline.st_task_split():
if self.pipeline.is_direct():
self.display("[yellow]√ Direct answer provided[/yellow]")
self.pipeline.ev_direct_answer()
else:
self.display(f"[green]√ Action plan created[/green]")
self.pipeline.ev_plan_created()
case States.EXECUTE_TASK:
if self.pipeline.st_execute_task():
self.pipeline.ev_task_executed()
case States.ACC_CHECK:
acc_color: AccColor = self.pipeline.st_accuracy_check()
c_name: str = acc_color.color.casefold()
self.display(
f"[green]√ Accuracy check: [{c_name}]{c_name.upper()}[/{c_name}][/green]")
if acc_color.passed(AccColor.GOOD):
self.pipeline.ev_accuracy_passed()
elif acc_color.passed(AccColor.MODERATE):
self.pipeline.ev_refine_required()
else:
self.pipeline.ev_accuracy_failed()
case States.REFINE_ANSWER:
if self.pipeline.st_refine_answer():
self.pipeline.ev_answer_refined()
case States.WRAP_ANSWER:
if self.pipeline.st_final_answer():
self.pipeline.ev_final_answer()
case _:
self.display(
f"[red] Error: Machine halted before complete!({self.pipeline.state})[/red]", True)
break

execution_status: bool = self.pipeline.previous != self.pipeline.state
execution_status_str: str = (
f"{'[green]√[/green]' if execution_status else '[red]X[/red]'}"
f" {str(self.pipeline.previous)}"
)
self.pipeline.failures[self.pipeline.state.value] += 1 if not execution_status else 0
self.display(f"[green]{execution_status_str}[/green]")
live.update(Spinner("dots", f"[green]{self.pipeline.state}…[/green]", style="green"))
self.pipeline.iteractions += 1
cursor.erase_line()
execution_status: bool = self.pipeline.previous != self.pipeline.state
execution_status_str: str = (
f"{'[green]√[/green]' if execution_status else '[red]X[/red]'}"
f" {str(self.pipeline.previous)}"
)
self.pipeline.failures[self.pipeline.state.value] += 1 if not execution_status else 0
self.display(f"[green]{execution_status_str}[/green]")
live.update(Spinner("dots", f"[green]{self.pipeline.state}…[/green]", style="green"))
self.pipeline.iteractions += 1
cursor.erase_line()
except InaccurateResponse:
live.update(Spinner("dots", f"[red]AI failed to respond. Retrying…[/red]", style="green"))

if configs.is_debug:
final_state: States = self.pipeline.state
Expand Down
2 changes: 1 addition & 1 deletion src/main/askai/core/router/tools/terminal.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _build_filters_() -> str:
if path_obj.exists and path_obj.is_dir:
cmd_line: str = (
f'find {folder} -maxdepth 1 -type f {_build_filters_() if filters else ""} '
f"-exec ls -lLht {{}} + 2>/dev/null | sort -k9,9"
f"! -name '.*' -exec ls -oLhtu {{}} + 2>/dev/null | sort -k9,9"
)
status, output = execute_bash(cmd_line)
if status:
Expand Down
4 changes: 2 additions & 2 deletions src/main/askai/language/ai_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def translate(self, text: AnyStr) -> str:

try:
# Perform batch translation
translated_texts: list[str] = list(map(self.translate_text, texts_to_translate))
translated_texts: list[str] = list(map(self._translate_text, texts_to_translate))
except Exception as err:
events.reply.emit(reply=AIReply.debug(f"Error during batch translation: {err}"))
return text
Expand All @@ -63,7 +63,7 @@ def translate(self, text: AnyStr) -> str:

return translated_text

def translate_text(self, text: AnyStr, **kwargs) -> str:
def _translate_text(self, text: AnyStr, **kwargs) -> str:
"""Translate text from the source language to the target language.
:param text: Text to translate.
:return: The translated text.
Expand Down
2 changes: 1 addition & 1 deletion src/main/askai/language/translators/argos_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __init__(self, from_idiom: Language, to_idiom: Language):
self._argos_model = argos_model

@lru_cache
def translate_text(self, text: AnyStr, **kwargs) -> str:
def _translate_text(self, text: AnyStr, **kwargs) -> str:
"""Translate text from the source language to the target language.
:param text: Text to translate.
:return: The translated text.
Expand Down
2 changes: 1 addition & 1 deletion src/main/askai/language/translators/deepl_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(self, source_lang: Language, target_lang: Language):
self._translator: Translator | None = None

@lru_cache
def translate_text(self, text: AnyStr, **kwargs) -> str:
def _translate_text(self, text: AnyStr, **kwargs) -> str:
"""Translate text from the source language to the target language.
:param text: Text to translate.
:return: The translated text.
Expand Down
2 changes: 1 addition & 1 deletion src/main/askai/language/translators/marian_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(self, from_idiom: Language, to_idiom: Language):
self._tokenizer = MarianTokenizer.from_pretrained(self.MODEL_NAME)

@lru_cache
def translate_text(self, text: AnyStr, **kwargs) -> str:
def _translate_text(self, text: AnyStr, **kwargs) -> str:
"""Translate text from the source language to the target language.
:param text: Text to translate.
:return: The translated text.
Expand Down
Loading

0 comments on commit 4ee5f26

Please sign in to comment.