Skip to content

Commit

Permalink
Improving comments
Browse files Browse the repository at this point in the history
  • Loading branch information
yorevs committed Oct 31, 2024
1 parent f8503ed commit 96ea494
Show file tree
Hide file tree
Showing 11 changed files with 179 additions and 46 deletions.
47 changes: 32 additions & 15 deletions src/main/askai/core/askai.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,22 @@
Copyright (c) 2024, HomeSetup
"""
import logging as log
import os
import re
import sys
import threading
from pathlib import Path
from typing import List, Optional, TypeAlias, Any

from click import UsageError
from clitt.core.term.terminal import terminal
from hspylib.core.enums.charset import Charset
from hspylib.core.tools.commons import file_is_not_empty, is_debugging
from hspylib.core.zoned_datetime import DATE_FORMAT, now, TIME_FORMAT
from hspylib.modules.application.exit_status import ExitStatus
from openai import RateLimitError

from askai.__classpath__ import classpath
from askai.core.askai_configs import configs
from askai.core.askai_events import events
Expand All @@ -26,22 +42,8 @@
from askai.core.support.chat_context import ChatContext
from askai.core.support.shared_instances import shared
from askai.core.support.utilities import read_stdin
from askai.exception.exceptions import (ImpossibleQuery, InaccurateResponse, IntelligibleAudioError,
MaxInteractionsReached, TerminatingQuery)
from askai.exception.exceptions import *
from askai.tui.app_icons import AppIcons
from click import UsageError
from hspylib.core.enums.charset import Charset
from hspylib.core.tools.commons import file_is_not_empty, is_debugging
from hspylib.core.zoned_datetime import DATE_FORMAT, now, TIME_FORMAT
from hspylib.modules.application.exit_status import ExitStatus
from openai import RateLimitError
from pathlib import Path
from typing import List, Optional, TypeAlias

import logging as log
import os
import re
import sys

QueryString: TypeAlias = str | List[str] | None

Expand Down Expand Up @@ -76,6 +78,7 @@ def __init__(
self._mode: RouterMode = shared.mode
self._console_path = Path(f"{CACHE_DIR}/askai-{self.session_id}.md")
self._query_prompt: str | None = None
self._abort_count: int = 0

if not self._console_path.exists():
self._console_path.touch()
Expand Down Expand Up @@ -120,6 +123,20 @@ def app_settings(self) -> list[tuple[str, ...]]:
all_settings.append(r)
return all_settings

def abort(self, signals: Any, frame: Any) -> None:
"""Hook the SIGINT signal for cleanup or execution interruption. If two signals arrive within 1 second, exit the application.
:param signals: Signal number from the operating system.
:param frame: Current stack frame at the time of signal interruption.
"""
log.warning(f"User interrupted: signals: {signals} frame: {frame}")
self._abort_count += 1
if self._abort_count > 1:
log.warning(f"User aborted. Exiting!")
sys.exit(ExitStatus.ABORTED)
events.abort.emit(message="User interrupted")
threading.Timer(1, lambda: setattr(self, '_abort_count', 0)).start()
terminal.restore()

def run(self) -> None:
"""Run the application."""
...
Expand Down
3 changes: 3 additions & 0 deletions src/main/askai/core/askai_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
Copyright (c) 2024, HomeSetup
"""
import signal

from askai.core.askai import AskAi
from askai.core.askai_configs import configs
from askai.core.askai_events import *
Expand Down Expand Up @@ -71,6 +73,7 @@ def __init__(

def run(self) -> None:
"""Run the application."""
signal.signal(signal.SIGINT, self.abort)
while question := (self._query_string or self._input()):
status, output = self.ask_and_reply(question)
if not status:
Expand Down
8 changes: 8 additions & 0 deletions src/main/askai/core/askai_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,14 @@ def is_rag(self) -> bool:
def is_rag(self, value: bool) -> None:
settings.put("askai.rag.enabled", value)

@property
def is_assistive(self) -> bool:
return settings.get_bool("askai.router.assistive.enabled")

@is_assistive.setter
def is_assistive(self, value: bool) -> None:
settings.put("askai.router.assistive.enabled", value)

@property
def language(self) -> Language:
# Lookup order: Settings -> Locale -> Environment.
Expand Down
3 changes: 3 additions & 0 deletions src/main/askai/core/askai_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@

ASKAI_BUS_NAME: str = "askai-reply-bus"

ABORT_EVENT: str = "askai-abort-event"

REPLY_EVENT: str = "askai-reply-event"

MIC_LISTENING_EVENT: str = "askai-mic-listening-event"
Expand All @@ -35,6 +37,7 @@ class AskAiEvents(Enumeration):
# fmt: off
ASKAI_BUS = FluidEventBus(
ASKAI_BUS_NAME,
abort=FluidEvent(ABORT_EVENT, message=None),
reply=FluidEvent(REPLY_EVENT, erase_last=False),
listening=FluidEvent(MIC_LISTENING_EVENT, listening=True),
device_changed=FluidEvent(DEVICE_CHANGED_EVENT, device=None),
Expand Down
1 change: 1 addition & 0 deletions src/main/askai/core/askai_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ def defaults(self) -> None:
self._settings.put("askai.preferred.language", "askai", "")
self._settings.put("askai.router.mode.default", "askai", "splitter")
self._settings.put("askai.router.pass.threshold", "askai", "moderate")
self._settings.put("askai.router.assistive.enabled", "askai", False)
self._settings.put("askai.default.engine", "askai", "openai")
self._settings.put("askai.default.engine.model", "askai", "gpt-3.5-turbo")
self._settings.put("askai.verbosity.level", "askai", 3)
Expand Down
29 changes: 21 additions & 8 deletions src/main/askai/core/processors/splitter/splitter_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,17 +41,17 @@


class SplitterActions(metaclass=Singleton):
"""TODO"""
"""Class that provides the splitter some actionable items."""

INSTANCE: 'SplitterActions'

@staticmethod
def wrap_answer(question: str, answer: str, model_result: ModelResult = ModelResult.default()) -> str:
def wrap_answer(question: str, answer: str, model_result: ModelResult = ModelResult.default()) -> Optional[str]:
"""Provide a final answer to the user by wrapping the AI response with additional context.
:param question: The user's question.
:param answer: The AI's response to the question.
:param model_result: The result from the selected routing model (default is ModelResult.default()).
:return: A formatted string containing the final answer.
:return: An optional formatted string containing the wrapped answer.
"""
output: str = answer
args = {"user": prompt.user.title(), "idiom": shared.idiom, "context": answer, "question": question}
Expand All @@ -70,13 +70,14 @@ def wrap_answer(question: str, answer: str, model_result: ModelResult = ModelRes
pass # Default is to leave the last AI response as is

# Save the conversation to use with the task agent executor.
shared.memory.save_context({"input": question}, {"output": output})
if output:
shared.memory.save_context({"input": question}, {"output": output})

return output

@staticmethod
def refine_answer(question: str, answer: str, acc_response: AccResponse | None = None) -> str:
"""TODO
"""Refine the AI response when required.
:param question: The user's question.
:param answer: The AI's response to the question.
:param acc_response: The final accuracy response, if available.
Expand All @@ -98,7 +99,10 @@ def refine_answer(question: str, answer: str, acc_response: AccResponse | None =

@staticmethod
def process_action(action: SimpleNamespace) -> Optional[str]:
"""TODO"""
"""Execute an action requested by the AI.
:param action: Action to be executed, encapsulated in a SimpleNamespace.
:return: Output resulted from the action execution as a string, or None if no output.
"""
path_str: str | None = (
"Path: " + action.path
if hasattr(action, "path") and action.path.upper() not in ["N/A", "NONE", ""]
Expand All @@ -110,13 +114,17 @@ def __init__(self):
self._rag: RAGProvider = RAGProvider("task-splitter.csv")

def splitter_template(self, query: str) -> ChatPromptTemplate:
"""Retrieve the processor Template."""
"""Retrieve the processor template based on the given query.
:param query: The input query to process and retrieve the template for.
:return: A ChatPromptTemplate object that matches the query.
"""

evaluation: str = str(shared.context.flat("EVALUATION"))
template = PromptTemplate(
input_variables=["os_type", "shell", "datetime", "home", "agent_tools", "rag"],
template=prompt.read_prompt("task-splitter.txt"),
)

return ChatPromptTemplate.from_messages(
[
(
Expand All @@ -137,7 +145,12 @@ def splitter_template(self, query: str) -> ChatPromptTemplate:
)

def split(self, question: str, model: ModelResult = ModelResult.default()) -> Optional[ActionPlan]:
"""Invoke the LLM to split the tasks and create an action plan."""
"""Invoke the LLM to split the tasks and create an action plan.
:param question: The input question to be processed.
:param model: The model used to generate the action plan, defaulting to ModelResult.default().
:return: An optional ActionPlan generated from the provided question.
"""

runnable = self.splitter_template(question) | lc_llm.create_chat_model(Temperature.COLDEST.temp)
runnable = RunnableWithMessageHistory(
runnable, shared.context.flat, input_messages_key="input", history_messages_key="chat_history"
Expand Down
19 changes: 17 additions & 2 deletions src/main/askai/core/processors/splitter/splitter_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,13 @@

from clitt.core.term.cursor import cursor
from hspylib.core.tools.commons import is_debugging
from hspylib.modules.eventbus.event import Event
from rich.live import Live
from rich.spinner import Spinner
from rich.text import Text

from askai.core.askai_configs import configs
from askai.core.askai_events import AskAiEvents, ASKAI_BUS_NAME, ABORT_EVENT
from askai.core.enums.acc_color import AccColor
from askai.core.processors.splitter.splitter_pipeline import SplitterPipeline
from askai.core.processors.splitter.splitter_states import States
Expand All @@ -35,19 +37,32 @@ class SplitterExecutor(Thread):
def __init__(self, query: str):
super().__init__()
self._pipeline = SplitterPipeline(query)
self._interrupted = False
AskAiEvents.bus(ASKAI_BUS_NAME).subscribe(ABORT_EVENT, self.interrupt)

@property
def pipeline(self) -> SplitterPipeline:
return self._pipeline

def display(self, text: str) -> None:
"""TODO"""
"""Display a debug message if debugging mode is active.
:param text: The debug message to display
"""
if is_debugging():
text_formatter.console.print(Text.from_markup(text))

def interrupt(self, ev: Event) -> None:
"""Interrupt the active execution pipeline.
:param ev: The interruption event,
"""
self._interrupted = True
self.display(f"[red] Execution interrupted => {ev.args.message} ![/red]")

def run(self) -> None:
"""Execute the splitter pipeline."""

with Live(Spinner("dots", f"[green]{self.pipeline.state}…[/green]", style="green"), console=tf.console) as live:
while not self.pipeline.state == States.COMPLETE:
while not (self._interrupted or self.pipeline.state == States.COMPLETE):
self.pipeline.track_previous()
if 1 < configs.max_router_retries < 1 + self.pipeline.failures[self.pipeline.state.value]:
self.display(
Expand Down
Loading

0 comments on commit 96ea494

Please sign in to comment.