Skip to content

Commit

Permalink
chore: implementing feedback on readme, ai, and listeners
Browse files Browse the repository at this point in the history
  • Loading branch information
srtaalej committed Aug 8, 2024
1 parent 130c46e commit af85791
Show file tree
Hide file tree
Showing 8 changed files with 20 additions and 7 deletions.
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ Inspired by [ChatGPT-in-Slack](https://github.com/seratch/ChatGPT-in-Slack/tree/
Before getting started, make sure you have a development workspace where you have permissions to install apps. If you don’t have one setup, go ahead and [create one](https://slack.com/create).
## Installation

#### Prerequisites
* To use the OpenAI and Anthropic models, you must have an account with sufficient credits.

#### Create a Slack App
1. Open [https://api.slack.com/apps/new](https://api.slack.com/apps/new) and choose "From an app manifest"
2. Choose the workspace you want to install the application to
Expand All @@ -29,7 +32,7 @@ Before you can run the app, you'll need to store some environment variables.
2. Click ***Basic Information** from the left hand menu and follow the steps in the App-Level Tokens section to create an app-level token with the `connections:write` scope. Copy this token. You will store this in your environment as `SLACK_APP_TOKEN`.

```zsh
# Replace with your app token, bot token, and the token for whichever API(s) you plan on using
# Run these commands in the terminal. Replace with your app token, bot token, and the token for whichever API(s) you plan on using
export SLACK_BOT_TOKEN=<your-bot-token>
export SLACK_APP_TOKEN=<your-app-token>
export OPENAI_API_KEY=<your-api-key>
Expand Down
Empty file removed ai/__init__.py
Empty file.
2 changes: 2 additions & 0 deletions ai/ai_utils/handle_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@
from state_store.get_user_state import get_user_state
from typing import Optional, List
from ..providers import _get_provider

"""
This file defines the `get_provider_response` function which retrieves the user's selected API provider and model,
sets the model, and generates a response.
"""


def get_provider_response(user_id: str, prompt: str, context: Optional[List] = None, system_content=DEFAULT_SYSTEM_CONTENT):
try:
formatted_context = "\n".join([f"{msg['user']}: {msg['text']}" for msg in context])
Expand Down
2 changes: 2 additions & 0 deletions ai/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
The `_get_provider()` function returns an instance of the appropriate API provider based on the given provider name.
New providers must be added below.
"""


def _get_provider(provider_name: str):
if provider_name.lower() == "openai":
return OpenAI_API()
Expand Down
4 changes: 2 additions & 2 deletions ai/providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class AnthropicAPI(BaseAPIProvider):

def __init__(self):
self.api_key = os.environ.get("ANTHROPIC_API_KEY")
self.client = anthropic.Anthropic(api_key=self.api_key)

def set_model(self, model_name: str):
if model_name not in self.MODELS.keys():
Expand All @@ -32,8 +33,7 @@ def get_models(self) -> dict:

def generate_response(self, prompt: str, system_content: str) -> str:
try:
client = anthropic.Anthropic(api_key=self.api_key)
response = client.messages.create(
response = self.client.messages.create(
model=self.current_model,
system=system_content,
messages=[{"role": "user", "content": [{"type": "text", "text": prompt}]}],
Expand Down
4 changes: 2 additions & 2 deletions ai/providers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ class OpenAI_API(BaseAPIProvider):

def __init__(self):
self.api_key = os.environ.get("OPENAI_API_KEY")
self.client = openai.OpenAI(api_key=self.api_key)

def set_model(self, model_name: str):
if model_name not in self.MODELS.keys():
Expand All @@ -29,8 +30,7 @@ def get_models(self) -> dict:

def generate_response(self, prompt: str, system_content: str) -> str:
try:
client = openai.OpenAI(api_key=self.api_key)
response = client.chat.completions.create(
response = self.client.chat.completions.create(
model=self.current_model,
n=1,
messages=[{"role": "system", "content": system_content}, {"role": "user", "content": prompt}],
Expand Down
3 changes: 3 additions & 0 deletions listeners/commands/ask_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
from logging import Logger
from ai.ai_utils.handle_response import get_provider_response

# Callback for handling the 'ask-bolty' command. It acknowledges the command, retrieves the user's ID and prompt,
# checks if the prompt is empty, and responds with either an error message or the provider's response.


def ask_callback(ack: Ack, command, say: Say, logger: Logger, context: BoltContext):
try:
Expand Down
7 changes: 5 additions & 2 deletions listeners/events/app_home_opened.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
from logging import Logger
from ai.ai_utils.get_available_providers import get_available_providers
from slack_bolt import BoltContext
from slack_sdk import WebClient
from state_store.get_user_state import get_user_state

# Callback for handling the 'app_home_opened' event. It checks if the event is for the 'home' tab,
# generates a list of model options for a dropdown menu, retrieves the user's state to set the initial option,
# and publishes a view to the user's home tab in Slack.

def app_home_opened_callback(event: dict, logger: Logger, client: WebClient, context: BoltContext):

def app_home_opened_callback(event: dict, logger: Logger, client: WebClient):
if event["tab"] != "home":
return

Expand Down

0 comments on commit af85791

Please sign in to comment.