Skip to content

Commit

Permalink
refactor: ai_utils functions moved into providers/__init__.py
Browse files Browse the repository at this point in the history
  • Loading branch information
srtaalej committed Aug 8, 2024
1 parent 649cf7d commit 8adc78c
Show file tree
Hide file tree
Showing 12 changed files with 83 additions and 70 deletions.
13 changes: 7 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,25 +73,26 @@ black .

`manifest.json` is a configuration for Slack apps. With a manifest, you can create an app with a pre-defined configuration, or adjust the configuration of an existing app.


### `app.py`

`app.py` is the entry point for the application and is the file you'll run to start the server. This project aims to keep this file as thin as possible, primarily using it as a way to route inbound requests.


### `/listeners`

Every incoming request is routed to a "listener". Inside this directory, we group each listener based on the Slack Platform feature used, so `/listeners/commands` handles incoming [Slash Commands](https://api.slack.com/interactivity/slash-commands) requests, `/listeners/events` handles [Events](https://api.slack.com/apis/events-api) and so on.

### `/ai`

#### `ai/ai_utils`
This module is responsible for handling interactions with the APIs and processing their responses. It is composed of several files:

* `ai_constants.py`: Defines constants used throughout the AI module.
* `get_available_apis.py`: Retrieves a list of available API models. When displaying the app home, this function is called to determine which APIs are eligible for selection based on whether their respective API keys have been set.
* `handle_response.py`: Processes responses from API providers.

<a name="byo-llm"></a>
#### `ai/providers`
This module contains classes for communicating with different API providers, such as [Anthropic](https://www.anthropic.com/) and [OpenAI](https://openai.com/). To add your own LLM, create a new class for it using the `base_provider.py` as an example, then update `get_available_apis.py` and `handle_response.py` to include and utilize your new class for API communication.
This module contains classes for communicating with different API providers, such as [Anthropic](https://www.anthropic.com/) and [OpenAI](https://openai.com/). To add your own LLM, create a new class for it using the `base_api.py` as an example, then update `get_available_apis.py` and `handle_response.py` to include and utilize your new class for API communication.

* `__init__.py`:
This file contains utility functions for handling responses from the provider APIs and retreiving available providers.

### `/state_store`

Expand Down
File renamed without changes.
13 changes: 0 additions & 13 deletions ai/ai_utils/get_available_providers.py

This file was deleted.

24 changes: 0 additions & 24 deletions ai/ai_utils/handle_response.py

This file was deleted.

38 changes: 36 additions & 2 deletions ai/providers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,50 @@
from .anthropic import AnthropicAPI
from .openai import OpenAI_API
from ..ai_constants import DEFAULT_SYSTEM_CONTENT
from logging import Logger
from state_store.get_user_state import get_user_state
from typing import Optional, List

"""
The `_get_provider()` function returns an instance of the appropriate API provider based on the given provider name.
New providers must be added below.
New AI providers must be added below
`get_available_providers()`
This function retrieves available API models from different AI providers.
It combines the available models into a single dictionary.
`_get_provider()`
This function returns an instance of the appropriate API provider based on the given provider name.
`get_provider_response`()
This function retrieves the user's selected API provider and model,
sets the model, and generates a response.
Note that context is an optional parameter because some functionalities,
such as commands, do not allow access to conversation history if the bot
isn't in the channel where the command is run.
"""


def get_available_providers():
return {**AnthropicAPI().get_models(), **OpenAI_API().get_models()}


def _get_provider(provider_name: str):
if provider_name.lower() == "openai":
return OpenAI_API()
elif provider_name.lower() == "anthropic":
return AnthropicAPI()
else:
raise ValueError(f"Unknown provider: {provider_name}")


def get_provider_response(user_id: str, prompt: str, context: Optional[List] = None, system_content=DEFAULT_SYSTEM_CONTENT):
try:
formatted_context = "\n".join([f"{msg['user']}: {msg['text']}" for msg in context])
full_prompt = f"Prompt: {prompt}\nContext: {formatted_context}"
provider_name, model_name = get_user_state(user_id)
provider = _get_provider(provider_name)
provider.set_model(model_name)
return provider.generate_response(full_prompt, system_content)
except Exception as e:
Logger.error(e)
8 changes: 5 additions & 3 deletions listeners/commands/ask_command.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from slack_bolt import Ack, Say, BoltContext
from logging import Logger
from ai.ai_utils.handle_response import get_provider_response
from ai.providers import get_provider_response

# Callback for handling the 'ask-bolty' command. It acknowledges the command, retrieves the user's ID and prompt,
# checks if the prompt is empty, and responds with either an error message or the provider's response.
"""
Callback for handling the 'ask-bolty' command. It acknowledges the command, retrieves the user's ID and prompt,
checks if the prompt is empty, and responds with either an error message or the provider's response.
"""


def ask_callback(ack: Ack, command, say: Say, logger: Logger, context: BoltContext):
Expand Down
4 changes: 2 additions & 2 deletions listeners/events/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from slack_bolt import App
from .app_home_opened import app_home_opened_callback
from .app_mentioned import app_mentioned_callback
from .dm_sent import dm_sent_callback
from .app_messaged import app_messaged_callback


def register(app: App):
app.event("app_home_opened")(app_home_opened_callback)
app.event("app_mention")(app_mentioned_callback)
app.event("message")(dm_sent_callback)
app.event("message")(app_messaged_callback)
10 changes: 6 additions & 4 deletions listeners/events/app_home_opened.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from logging import Logger
from ai.ai_utils.get_available_providers import get_available_providers
from ai.providers import get_available_providers
from slack_sdk import WebClient
from state_store.get_user_state import get_user_state

# Callback for handling the 'app_home_opened' event. It checks if the event is for the 'home' tab,
# generates a list of model options for a dropdown menu, retrieves the user's state to set the initial option,
# and publishes a view to the user's home tab in Slack.
"""
Callback for handling the 'app_home_opened' event. It checks if the event is for the 'home' tab,
generates a list of model options for a dropdown menu, retrieves the user's state to set the initial option,
and publishes a view to the user's home tab in Slack.
"""


def app_home_opened_callback(event: dict, logger: Logger, client: WebClient):
Expand Down
9 changes: 6 additions & 3 deletions listeners/events/app_mentioned.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
from ai.ai_utils.handle_response import get_provider_response
from ai.providers import get_provider_response
from logging import Logger
from slack_sdk import WebClient
from slack_bolt import Say
from ..listener_utils.listener_constants import DEFAULT_LOADING_TEXT, MENTION_WITHOUT_TEXT
from ..listener_utils.parse_conversation import parse_conversation

"""
Handles the event when the app is mentioned in a Slack channel, retrieves the conversation context,
and generates an AI response if text is provided, otherwise sends a default response
"""


# Handles the event when the app is mentioned in a Slack channel, retrieves the conversation context,
# and generates an AI response if text is provided, otherwise sends a default response
def app_mentioned_callback(client: WebClient, event: dict, logger: Logger, say: Say):
try:
channel_id = event.get("channel")
Expand Down
13 changes: 8 additions & 5 deletions listeners/events/dm_sent.py → listeners/events/app_messaged.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
from ai.ai_utils.ai_constants import DM_SYSTEM_CONTENT
from ai.ai_utils.handle_response import get_provider_response
from ai.ai_constants import DM_SYSTEM_CONTENT
from ai.providers import get_provider_response
from logging import Logger
from slack_bolt import Say
from slack_sdk import WebClient
from ..listener_utils.listener_constants import DEFAULT_LOADING_TEXT
from ..listener_utils.parse_conversation import parse_conversation

"""
Handles the event when a direct message is sent to the bot, retrieves the conversation context,
and generates an AI response.
"""

# Handles the event when a direct message is sent to the bot, retrieves the conversation context,
# and generates an AI response.
def dm_sent_callback(client: WebClient, event: dict, logger: Logger, say: Say):

def app_messaged_callback(client: WebClient, event: dict, logger: Logger, say: Say):
channel_id = event.get("channel")
thread_ts = event.get("thread_ts")
user_id = event.get("user")
Expand Down
11 changes: 7 additions & 4 deletions listeners/functions/summary_function.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
from ai.ai_utils.handle_response import get_provider_response
from ai.providers import get_provider_response
from logging import Logger
from slack_bolt import Complete, Fail, Ack
from slack_sdk import WebClient
from ..listener_utils.listener_constants import SUMMARIZE_CHANNEL_WORKFLOW
from ..listener_utils.parse_conversation import parse_conversation

"""
Handles the event to summarize a Slack channel's conversation history.
It retrieves the conversation history, parses it, generates a summary using an AI response,
and completes the workflow with the summary or fails if an error occurs.
"""


# Handles the event to summarize a Slack channel's conversation history.
# It retrieves the conversation history, parses it, generates a summary using an AI response,
# and completes the workflow with the summary or fails if an error occurs.
def handle_summary_function_callback(
ack: Ack, inputs: dict, fail: Fail, logger: Logger, client: WebClient, complete: Complete
):
Expand Down
10 changes: 6 additions & 4 deletions listeners/listener_utils/parse_conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@
from typing import Optional, List
from slack_sdk.web.slack_response import SlackResponse

"""
Parses a conversation history, excluding messages from the bot,
and formats it as a string with user IDs and their messages.
Used in `app_mentioned_callback`, `dm_sent_callback`,
and `handle_summary_function_callback`."""


# Parses a conversation history, excluding messages from the bot,
# and formats it as a string with user IDs and their messages.
# Used in `app_mentioned_callback`, `dm_sent_callback`,
# and `handle_summary_function_callback`.
def parse_conversation(conversation: SlackResponse) -> Optional[List[dict]]:
parsed = []
try:
Expand Down

0 comments on commit 8adc78c

Please sign in to comment.