From 1b999668d2e0633ea953dad5a4789143e1e3f5f6 Mon Sep 17 00:00:00 2001
From: Simonas <20096648+simjak@users.noreply.github.com>
Date: Fri, 5 Jan 2024 12:25:07 +0200
Subject: [PATCH 01/24] feat: updated function calling example
---
.gitignore | 3 +-
coverage.xml | 536 +++++++++++++----------
docs/examples/function_calling.ipynb | 379 +++++++++++-----
docs/examples/test.ann | Bin 64764 -> 0 bytes
poetry.lock | 578 ++++++++++++-------------
semantic_router/encoders/__init__.py | 2 +-
semantic_router/encoders/fastembed.py | 1 +
semantic_router/layer.py | 19 +-
semantic_router/route.py | 1 +
semantic_router/schema.py | 1 -
semantic_router/utils/function_call.py | 26 +-
semantic_router/utils/splitters.py | 12 +-
tests/unit/test_layer.py | 43 +-
tests/unit/test_splitters.py | 6 +-
14 files changed, 932 insertions(+), 675 deletions(-)
delete mode 100644 docs/examples/test.ann
diff --git a/.gitignore b/.gitignore
index 219091bd..8335baaf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,4 +17,5 @@ mac.env
.coverage
.coverage.*
.pytest_cache
-test.py
\ No newline at end of file
+test.py
+output
diff --git a/coverage.xml b/coverage.xml
index f4b8af22..6726da20 100644
--- a/coverage.xml
+++ b/coverage.xml
@@ -1,12 +1,12 @@
-
-
+
+
/Users/jakit/customers/aurelio/semantic-router/semantic_router
-
+
@@ -17,7 +17,7 @@
-
+
@@ -33,94 +33,94 @@
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
-
+
@@ -156,13 +156,13 @@
-
+
-
+
-
+
-
+
@@ -187,127 +187,131 @@
-
-
-
-
+
-
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+
-
+
-
+
-
-
+
+
+
-
-
-
+
+
+
-
+
+
+
-
-
-
-
-
-
+
+
-
+
-
+
+
+
-
+
-
-
-
+
-
+
+
-
+
+
+
-
+
-
+
+
-
-
-
-
-
-
-
+
+
+
+
-
+
-
-
-
+
+
+
+
+
-
-
-
+
+
-
-
+
+
+
+
-
-
-
-
+
+
+
+
+
+
+
@@ -328,7 +332,7 @@
-
+
@@ -347,17 +351,17 @@
-
-
+
+
-
-
-
+
+
+
@@ -379,64 +383,74 @@
-
+
-
+
-
+
-
-
+
+
-
-
+
+
-
-
-
-
+
+
+
+
-
+
+
-
+
-
+
-
+
-
+
-
+
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -445,7 +459,8 @@
-
+
+
@@ -461,7 +476,7 @@
-
+
@@ -481,29 +496,29 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
-
+
@@ -521,8 +536,8 @@
-
-
+
+
@@ -533,6 +548,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -582,13 +631,13 @@
-
+
-
+
@@ -597,39 +646,39 @@
-
+
-
+
-
+
-
-
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
+
+
-
-
-
+
+
+
-
+
@@ -637,22 +686,15 @@
-
-
-
+
+
-
-
-
+
+
+
+
-
-
-
-
-
-
-
@@ -698,6 +740,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/examples/function_calling.ipynb b/docs/examples/function_calling.ipynb
index d082468b..7b0347bc 100644
--- a/docs/examples/function_calling.ipynb
+++ b/docs/examples/function_calling.ipynb
@@ -9,23 +9,61 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 29,
"metadata": {},
"outputs": [],
"source": [
- "def get_time(location: str) -> str:\n",
- " \"\"\"Useful to get the time in a specific location\"\"\"\n",
- " print(f\"Result from: `get_time` function with location: `{location}`\")\n",
- " return \"get_time\"\n",
+ "from datetime import datetime\n",
+ "from zoneinfo import ZoneInfo\n",
+ "\n",
+ "\n",
+ "def get_time(timezone: str) -> str:\n",
+ " \"\"\"Finds the current time in a specific timezone.\n",
+ "\n",
+ " :param timezone: The timezone to find the current time in, should\n",
+ " be a valid timezone from the IANA Time Zone Database like\n",
+ " \"America/New_York\" or \"Europe/London\".\n",
+ " :type timezone: str\n",
+ " :return: The current time in the specified timezone.\"\"\"\n",
+ " now = datetime.now(ZoneInfo(timezone))\n",
+ " print(f\"Invoked `get_time` function with timezone: `{timezone}`\")\n",
+ " return now.strftime(\"%H:%M\")\n",
"\n",
"\n",
"def get_news(category: str, country: str) -> str:\n",
" \"\"\"Useful to get the news in a specific country\"\"\"\n",
" print(\n",
- " f\"Result from: `get_news` function with category: `{category}` \"\n",
+ " f\"Invoked: `get_news` function with category: `{category}` \"\n",
" f\"and country: `{country}`\"\n",
" )\n",
- " return \"get_news\""
+ " return \"Results from dummy news API\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 30,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Invoked `get_time` function with timezone: `America/New_York`\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'05:02'"
+ ]
+ },
+ "execution_count": 30,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "get_time(\"America/New_York\")"
]
},
{
@@ -37,27 +75,27 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 31,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-20 12:21:30 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
- "\u001b[32m2023-12-20 12:21:33 INFO semantic_router.utils.logger Generated route config:\n",
+ "\u001b[32m2024-01-05 12:02:35 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
+ "\u001b[32m2024-01-05 12:02:38 INFO semantic_router.utils.logger Generated route config:\n",
"{\n",
" \"name\": \"get_time\",\n",
" \"utterances\": [\n",
- " \"What's the time in New York?\",\n",
- " \"Can you tell me the time in Tokyo?\",\n",
- " \"What's the current time in London?\",\n",
+ " \"What's the current time in New York?\",\n",
+ " \"Can you tell me the time in London?\",\n",
+ " \"What's the current time in Tokyo?\",\n",
" \"Can you give me the time in Sydney?\",\n",
- " \"What's the time in Paris?\"\n",
+ " \"What's the current time in Berlin?\"\n",
" ]\n",
"}\u001b[0m\n",
- "\u001b[32m2023-12-20 12:21:33 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
- "\u001b[32m2023-12-20 12:21:38 INFO semantic_router.utils.logger Generated route config:\n",
+ "\u001b[32m2024-01-05 12:02:38 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
+ "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Generated route config:\n",
"{\n",
" \"name\": \"get_news\",\n",
" \"utterances\": [\n",
@@ -67,100 +105,172 @@
" \"Get me the breaking news from the UK\",\n",
" \"What's the latest in Germany?\"\n",
" ]\n",
- "}\u001b[0m\n",
- "/var/folders/gf/cvm58m_x6pvghy227n5cmx5w0000gn/T/ipykernel_65737/1850296463.py:10: RuntimeWarning: coroutine 'Route.from_dynamic_route' was never awaited\n",
- " route_config = RouteConfig(routes=routes)\n",
- "RuntimeWarning: Enable tracemalloc to get the object allocation traceback\n"
+ "}\u001b[0m\n"
]
}
],
"source": [
- "from semantic_router.route import Route, RouteConfig\n",
+ "from semantic_router import Route\n",
"\n",
"functions = [get_time, get_news]\n",
+ "\n",
"routes = []\n",
"\n",
"for function in functions:\n",
- " route = await Route.from_dynamic_route(entity=function)\n",
- " routes.append(route)\n",
+ " route = Route.from_dynamic_route(entity=function)\n",
+ " routes.append(route)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 32,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# You can manually add or remove routes\n",
"\n",
- "route_config = RouteConfig(routes=routes)"
+ "get_weather_route = Route(\n",
+ " name=\"get_weather\",\n",
+ " utterances=[\n",
+ " \"what is the weather in SF\",\n",
+ " \"what is the current temperature in London?\",\n",
+ " \"tomorrow's weather in Paris?\",\n",
+ " ],\n",
+ " function_schema=None,\n",
+ ")\n",
+ "routes.append(get_weather_route)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Add routes to the layer config"
]
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 33,
"metadata": {},
"outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "\u001b[32m2023-12-19 17:46:43 INFO semantic_router.utils.logger Added route `get_weather`\u001b[0m\n",
- "\u001b[32m2023-12-19 17:46:43 INFO semantic_router.utils.logger Removed route `get_weather`\u001b[0m\n"
- ]
- },
{
"data": {
"text/plain": [
- "[{'name': 'get_time',\n",
- " 'utterances': [\"What's the time in New York?\",\n",
- " 'Can you tell me the time in Tokyo?',\n",
- " \"What's the current time in London?\",\n",
- " 'Can you give me the time in Sydney?',\n",
- " \"What's the time in Paris?\"],\n",
- " 'description': None},\n",
- " {'name': 'get_news',\n",
- " 'utterances': ['Tell me the latest news from the United States',\n",
- " \"What's happening in India today?\",\n",
- " 'Can you give me the top stories from Japan',\n",
- " 'Get me the breaking news from the UK',\n",
- " \"What's the latest in Germany?\"],\n",
- " 'description': None}]"
+ "{'encoder_type': 'openai',\n",
+ " 'encoder_name': 'text-embedding-ada-002',\n",
+ " 'routes': [{'name': 'get_time',\n",
+ " 'utterances': [\"What's the current time in New York?\",\n",
+ " 'Can you tell me the time in London?',\n",
+ " \"What's the current time in Tokyo?\",\n",
+ " 'Can you give me the time in Sydney?',\n",
+ " \"What's the current time in Berlin?\"],\n",
+ " 'description': None,\n",
+ " 'function_schema': {'name': 'get_time',\n",
+ " 'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n be a valid timezone from the IANA Time Zone Database like\\n \"America/New_York\" or \"Europe/London\".\\n:type timezone: str\\n:return: The current time in the specified timezone.',\n",
+ " 'signature': '(timezone: str) -> str',\n",
+ " 'output': \"\"}},\n",
+ " {'name': 'get_news',\n",
+ " 'utterances': ['Tell me the latest news from the United States',\n",
+ " \"What's happening in India today?\",\n",
+ " 'Can you give me the top stories from Japan',\n",
+ " 'Get me the breaking news from the UK',\n",
+ " \"What's the latest in Germany?\"],\n",
+ " 'description': None,\n",
+ " 'function_schema': {'name': 'get_news',\n",
+ " 'description': 'Useful to get the news in a specific country',\n",
+ " 'signature': '(category: str, country: str) -> str',\n",
+ " 'output': \"\"}},\n",
+ " {'name': 'get_weather',\n",
+ " 'utterances': ['what is the weather in SF',\n",
+ " 'what is the current temperature in London?',\n",
+ " \"tomorrow's weather in Paris?\"],\n",
+ " 'description': None,\n",
+ " 'function_schema': None}]}"
]
},
- "execution_count": 7,
+ "execution_count": 33,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "# You can manually add or remove routes\n",
- "\n",
- "get_weather_route = Route(\n",
- " name=\"get_weather\",\n",
- " utterances=[\n",
- " \"what is the weather in SF\",\n",
- " \"what is the current temperature in London?\",\n",
- " \"tomorrow's weather in Paris?\",\n",
- " ],\n",
- ")\n",
- "route_config.add(get_weather_route)\n",
- "\n",
- "route_config.remove(\"get_weather\")\n",
+ "from semantic_router.layer import LayerConfig\n",
"\n",
- "route_config.to_dict()"
+ "layer_config = LayerConfig(routes=routes)\n",
+ "layer_config.to_dict()"
]
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 34,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "Route(name='get_time', utterances=[\"What's the time in New York?\", 'Can you tell me the time in Tokyo?', \"What's the current time in London?\", 'Can you give me the time in Sydney?', \"What's the time in Paris?\"], description=None)"
+ "Route(name='get_time', utterances=[\"What's the current time in New York?\", 'Can you tell me the time in London?', \"What's the current time in Tokyo?\", 'Can you give me the time in Sydney?', \"What's the current time in Berlin?\"], description=None, function_schema={'name': 'get_time', 'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n be a valid timezone from the IANA Time Zone Database like\\n \"America/New_York\" or \"Europe/London\".\\n:type timezone: str\\n:return: The current time in the specified timezone.', 'signature': '(timezone: str) -> str', 'output': \"\"})"
]
},
- "execution_count": 8,
+ "execution_count": 34,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Get a route by name\n",
- "route_config.get(\"get_time\")"
+ "layer_config.get(\"get_time\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 35,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Removed route `get_weather`\u001b[0m\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "{'encoder_type': 'openai',\n",
+ " 'encoder_name': 'text-embedding-ada-002',\n",
+ " 'routes': [{'name': 'get_time',\n",
+ " 'utterances': [\"What's the current time in New York?\",\n",
+ " 'Can you tell me the time in London?',\n",
+ " \"What's the current time in Tokyo?\",\n",
+ " 'Can you give me the time in Sydney?',\n",
+ " \"What's the current time in Berlin?\"],\n",
+ " 'description': None,\n",
+ " 'function_schema': {'name': 'get_time',\n",
+ " 'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n be a valid timezone from the IANA Time Zone Database like\\n \"America/New_York\" or \"Europe/London\".\\n:type timezone: str\\n:return: The current time in the specified timezone.',\n",
+ " 'signature': '(timezone: str) -> str',\n",
+ " 'output': \"\"}},\n",
+ " {'name': 'get_news',\n",
+ " 'utterances': ['Tell me the latest news from the United States',\n",
+ " \"What's happening in India today?\",\n",
+ " 'Can you give me the top stories from Japan',\n",
+ " 'Get me the breaking news from the UK',\n",
+ " \"What's the latest in Germany?\"],\n",
+ " 'description': None,\n",
+ " 'function_schema': {'name': 'get_news',\n",
+ " 'description': 'Useful to get the news in a specific country',\n",
+ " 'signature': '(category: str, country: str) -> str',\n",
+ " 'output': \"\"}}]}"
+ ]
+ },
+ "execution_count": 35,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Remove a route by name\n",
+ "layer_config.remove(\"get_weather\")\n",
+ "layer_config.to_dict()"
]
},
{
@@ -172,19 +282,19 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 36,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-19 17:46:43 INFO semantic_router.utils.logger Saving route config to route_config.json\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Saving route config to output/layer_config.json\u001b[0m\n"
]
}
],
"source": [
- "route_config.to_file(\"route_config.json\")"
+ "layer_config.to_file(\"output/layer_config.json\")"
]
},
{
@@ -198,37 +308,59 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Load from local file"
+ "Load config from local file"
]
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 37,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-19 17:46:43 INFO semantic_router.utils.logger Loading route config from route_config.json\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Loading route config from output/layer_config.json\u001b[0m\n"
]
}
],
"source": [
- "from semantic_router.route import RouteConfig\n",
+ "from semantic_router.layer import LayerConfig\n",
"\n",
- "route_config = RouteConfig.from_file(\"route_config.json\")"
+ "layer_config = LayerConfig.from_file(\"output/layer_config.json\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Initialize routing layer"
]
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 38,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ ]
+ }
+ ],
"source": [
+ "import os\n",
+ "from getpass import getpass\n",
"from semantic_router import RouteLayer\n",
"\n",
- "route_layer = RouteLayer(routes=route_config.routes)"
+ "# https://dashboard.cohere.com/\n",
+ "os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n",
+ " \"Enter Cohere API Key: \"\n",
+ ")\n",
+ "\n",
+ "layer = RouteLayer.from_config(config=layer_config)"
]
},
{
@@ -240,71 +372,110 @@
},
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": 39,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-19 17:46:43 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:02:42 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
+ {
+ "data": {
+ "text/plain": [
+ "RouteChoice(name='get_time', function_call={'timezone': 'Europe/Stockholm'})"
+ ]
+ },
+ "execution_count": 39,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "layer(\"What is the time in Stockholm?\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Define function execution method"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 41,
+ "metadata": {},
+ "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "Calling function: get_time\n",
- "Result from: `get_time` function with location: `Stockholm`\n"
+ "Query: What is the time in Stockholm?\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-19 17:46:49 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:03:45 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
- "Calling function: get_news\n",
- "Result from: `get_news` function with category: `tech` and country: `Lithuania`\n"
+ "Invoked `get_time` function with timezone: `Europe/Stockholm`\n",
+ "11:03\n",
+ "Query: What are the tech news in the US?\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[33m2023-12-19 17:46:52 WARNING semantic_router.utils.logger No function found, calling LLM...\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:03:47 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
- "data": {
- "text/plain": [
- "'Hello! How can I assist you today?'"
- ]
- },
- "execution_count": 12,
- "metadata": {},
- "output_type": "execute_result"
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Invoked: `get_news` function with category: `tech` and country: `US`\n",
+ "Results from dummy news API\n",
+ "Query: The capital of France?\n",
+ "The capital of France is Paris. It's a beautiful city known for its art, culture, and cuisine. Have you ever been there?\n"
+ ]
}
],
"source": [
- "from semantic_router.utils.function_call import route_and_execute\n",
+ "from semantic_router.schema import RouteChoice\n",
+ "from semantic_router.utils import llm\n",
"\n",
- "tools = [get_time, get_news]\n",
"\n",
- "await route_and_execute(\n",
- " query=\"What is the time in Stockholm?\", functions=tools, route_layer=route_layer\n",
- ")\n",
- "await route_and_execute(\n",
- " query=\"What is the tech news in the Lithuania?\",\n",
- " functions=tools,\n",
- " route_layer=route_layer,\n",
- ")\n",
- "await route_and_execute(query=\"Hi!\", functions=tools, route_layer=route_layer)"
+ "def route_and_execute(query, functions, layer):\n",
+ " route_choice: RouteChoice = layer(query)\n",
+ "\n",
+ " for function in functions:\n",
+ " if function.__name__ == route_choice.name:\n",
+ " if route_choice.function_call:\n",
+ " return function(**route_choice.function_call)\n",
+ "\n",
+ " # If no function is found, use the LLM for general queries\n",
+ " return llm.llm(query)\n",
+ "\n",
+ "\n",
+ "queries = [\n",
+ " \"What is the time in Stockholm?\",\n",
+ " \"What are the tech news in the US?\",\n",
+ " \"The capital of France?\",\n",
+ "]\n",
+ "\n",
+ "for query in queries:\n",
+ " print(f\"Query: {query}\")\n",
+ " print(route_and_execute(query, functions, layer))"
]
}
],
diff --git a/docs/examples/test.ann b/docs/examples/test.ann
deleted file mode 100644
index 85db20d54995bcd7f1199d1816a069e14558aa40..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 64764
zcmeH~|C3d98ONUmgaO-F1q8R$gRp{#@jPw5C>^bLip3n2V
zpV#wzK6gJ=X74oVVceycMT%^-HX{D{t|M#N03P7QVstW@7dV
z?^ow6Z037DvEW3`ifzopHr&j8Mc!-y-=5&!mY9Eu=N9hA6N|c5%zqD?5`Hf%SU8e7
zmnIfIoHO5AF!wJ0?a
zVexF}u>x713*e7L+(eJL{d1-^WNBQ<5`Mu728$RI^G!pL%-h1Nr9=@%?
zXMMrqK?RtS&nD)ymo0dY@zL0hz#n<|FUb4n#Cwc6yUFkA#QbEw^DR1^hgZeoBHz~#
z_iWj0J~_|g`_8h3+lUME_+ZKWR`|d)wRh8)w+YQBsl`AT{t@2;iPu!Iu$tVvp=)_!
z?hs?|qq#rxsEw_{|5Ws@$M0X!LhXZXFdV}C``|MHyLspz&NT|{f54^{A9Xpi7UnO(
z=R)ke!rJk?9)IJ>V<|Shh}nbZ)g=qAq#iSfJEvf_`pbF#nZ)dQa$kY(E%^PA9RA1s
zKfL#?m>-O;`S^Sb|LM1j&{j(?Jwr!kW4AKajXUEpHIJU7
z4gq?|KiHdCG!$L<^5k8dji%lm7B9qZ42&0}=}E4~uuXgABcA^Pe|p2UbXd?G-S0G5
zxT?XdF0tS`G)*Sn@kk4eQB2!1F(d^C{O2Ws9bxufAfDW4t#uPt!y7Fskn`cRsnH
z%hP}ENBCwfMN_%c4{;6g)A2`$^8vNkhTQ<_&YB3OV}r)%AmeMXnbTm!L(H9z)>AO+
z3rG4Yo=MDWiT!V$>A|oM{qig9$1}c|7@NqQH5{VH&p`WfG*#0>X!X;m>u21b;`>DW
z4&dGluZt`62R7)AtNGp&pKvSQ&UfOu5or4
z+utM>?jxr+ut~Y^K|?#^Z{;k4NAZ{F>Ib)3^;V=O{T}L>*0sdE;pm}0;b43ZM&qma
zX1#=uP~VaG`ZfIjnKyu&KfoG!8#X&(cADHi$H$bCh4-Q9TWI+@>z}z1IR~TBPcH`3
z__ly}$MOC5#2h^xl8{o7P
z#SG1U81qBy(B@k4u?M@g=uF4bz3vk9+zsEq!G+ot8_D%+#5l~_>&->(whyh{sKZvi
z^(OYsj7_3vzJkUUwBH7YBiP`_zlxt!O9Q#3dqF?mSyOQd?d-#TH}`Z+_n@{zV8cG|
z*wdnQ*skRJAoTtWcAfEk47>m0w<)yf06r!V^F?BH=1T1kqPIUDwvC=8m>N!oyS<(SDrYRQRg=1A8jx{9j){5`4Hnp;__SK*#{%|TTRZ~8T>3l
z*DJ)l8V#(&Xg~bF&%D=DPV~e|*!E@5y_Nms8zr-o)ILe|@E#GL-jAre;WK(WvG5>z
z(Ca@zLlrjdj8{{WA21#eYcsy+0s9o2%h_MBv6Cf>+3!3)T^e%(npvyCv(&$WwtZZ1
z^Kc1ROYWMo1y^8ejM3}y4QTx>vCH%adstjYoJOv*_*qS^^tUbH>W)8Z5NxOB_mgi=
z=G^TpxSrZQ1fO1MPxJgZnlD2iePR1L%>NiabI4~GKI@r-X4i<#dbEtHn13IZl5zL1?Wv}Tj6{lv1mBj$D}OCm)|5e08aO!t0%r*(w~*Ijc-}$p9w+7yn3l-v
zP4tXoKKoU8Eitc3H4~Hmi|N^7GrjWweurVVgPP4I=dH|7d!-4z&9JD&?{nB+1oIE6
zCG*ee8iegP3+7oz#iqn8{a=mW4Sy3p$=&**?HY8_fAI|dryGmUN%)?C?(XAkxutvcS7G@v`rsQhpsx)UJsQ}1@x^<*pLs5|``)KNb5U#O=a;85*WA
z)=s>Y*e~M!V`8$8y7d25?1Ck5*vPdIZZFWwdF&3u2~8pEC+vi;&$#~tJrn8u{^XeU
zF20HvV*gQc?th}FnZ4~!IPw2O%svu-8y|n-nZ3hy(J!a)QIlAFGjmxNQ8WICpYHF$
z&|KBILxBE>fCz|y2#A0Ph=2%)fCyxbfX)J0tGMzN0TB=Z5fA|p5CIVofiDT@ETB#h
z0TB=Z5fA|p5CIVofou}cSsYKmYKmYKm@W$Kxct$)?HbPfCz|y2#A0Ph=2%)fX)Ky0TB=Z5fA|p5CIVo0TIY10i6Z1
zS$AbE0wN#+A|L`HAOa#F0y+z*2Sh*wL_h>YKm=5.3)"]
-dev = ["attrs[docs,tests]", "pre-commit"]
+dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
+tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]]
name = "backoff"
@@ -203,33 +204,33 @@ files = [
[[package]]
name = "black"
-version = "23.12.0"
+version = "23.12.1"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
- {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"},
- {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"},
- {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"},
- {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"},
- {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"},
- {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"},
- {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"},
- {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"},
- {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"},
- {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"},
- {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"},
- {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"},
- {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"},
- {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"},
- {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"},
- {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"},
- {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"},
- {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"},
- {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"},
- {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"},
- {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"},
- {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"},
+ {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"},
+ {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"},
+ {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"},
+ {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"},
+ {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"},
+ {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"},
+ {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"},
+ {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"},
+ {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"},
+ {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"},
+ {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"},
+ {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"},
+ {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"},
+ {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"},
+ {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"},
+ {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"},
+ {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"},
+ {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"},
+ {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"},
+ {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"},
+ {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"},
+ {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"},
]
[package.dependencies]
@@ -439,13 +440,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "cohere"
-version = "4.39"
+version = "4.40"
description = "Python SDK for the Cohere API"
optional = false
python-versions = ">=3.8,<4.0"
files = [
- {file = "cohere-4.39-py3-none-any.whl", hash = "sha256:7f157b7ac0a70b1dda77dc56c4fc063e8d21efcd2bb13759cd5b6839080405e7"},
- {file = "cohere-4.39.tar.gz", hash = "sha256:9e94bb1e5b2e2d464738e0ab3c99ed2879c043cccc90ecbeffd124e81867745d"},
+ {file = "cohere-4.40-py3-none-any.whl", hash = "sha256:75dac8369d97fadc05901352d9db64a0ca6cd40c08423f3c4691f57eb7b131e7"},
+ {file = "cohere-4.40.tar.gz", hash = "sha256:d9e5c1fa7f80a193c03330a634954b927bf188ead7dcfdb51865480f73aebda8"},
]
[package.dependencies]
@@ -503,13 +504,13 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
[[package]]
name = "comm"
-version = "0.2.0"
+version = "0.2.1"
description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc."
optional = false
python-versions = ">=3.8"
files = [
- {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"},
- {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"},
+ {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"},
+ {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"},
]
[package.dependencies]
@@ -520,63 +521,63 @@ test = ["pytest"]
[[package]]
name = "coverage"
-version = "7.3.3"
+version = "7.4.0"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "coverage-7.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d874434e0cb7b90f7af2b6e3309b0733cde8ec1476eb47db148ed7deeb2a9494"},
- {file = "coverage-7.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6621dccce8af666b8c4651f9f43467bfbf409607c604b840b78f4ff3619aeb"},
- {file = "coverage-7.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1367aa411afb4431ab58fd7ee102adb2665894d047c490649e86219327183134"},
- {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f0f8f0c497eb9c9f18f21de0750c8d8b4b9c7000b43996a094290b59d0e7523"},
- {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0338c4b0951d93d547e0ff8d8ea340fecf5885f5b00b23be5aa99549e14cfd"},
- {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d31650d313bd90d027f4be7663dfa2241079edd780b56ac416b56eebe0a21aab"},
- {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9437a4074b43c177c92c96d051957592afd85ba00d3e92002c8ef45ee75df438"},
- {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17d9cb06c13b4f2ef570355fa45797d10f19ca71395910b249e3f77942a837"},
- {file = "coverage-7.3.3-cp310-cp310-win32.whl", hash = "sha256:eee5e741b43ea1b49d98ab6e40f7e299e97715af2488d1c77a90de4a663a86e2"},
- {file = "coverage-7.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:593efa42160c15c59ee9b66c5f27a453ed3968718e6e58431cdfb2d50d5ad284"},
- {file = "coverage-7.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c944cf1775235c0857829c275c777a2c3e33032e544bcef614036f337ac37bb"},
- {file = "coverage-7.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eda7f6e92358ac9e1717ce1f0377ed2b9320cea070906ece4e5c11d172a45a39"},
- {file = "coverage-7.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c854c1d2c7d3e47f7120b560d1a30c1ca221e207439608d27bc4d08fd4aeae8"},
- {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:222b038f08a7ebed1e4e78ccf3c09a1ca4ac3da16de983e66520973443b546bc"},
- {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff4800783d85bff132f2cc7d007426ec698cdce08c3062c8d501ad3f4ea3d16c"},
- {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc200cec654311ca2c3f5ab3ce2220521b3d4732f68e1b1e79bef8fcfc1f2b97"},
- {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:307aecb65bb77cbfebf2eb6e12009e9034d050c6c69d8a5f3f737b329f4f15fb"},
- {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ffb0eacbadb705c0a6969b0adf468f126b064f3362411df95f6d4f31c40d31c1"},
- {file = "coverage-7.3.3-cp311-cp311-win32.whl", hash = "sha256:79c32f875fd7c0ed8d642b221cf81feba98183d2ff14d1f37a1bbce6b0347d9f"},
- {file = "coverage-7.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:243576944f7c1a1205e5cd658533a50eba662c74f9be4c050d51c69bd4532936"},
- {file = "coverage-7.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a2ac4245f18057dfec3b0074c4eb366953bca6787f1ec397c004c78176a23d56"},
- {file = "coverage-7.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9191be7af41f0b54324ded600e8ddbcabea23e1e8ba419d9a53b241dece821d"},
- {file = "coverage-7.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c0b1b8b5a4aebf8fcd227237fc4263aa7fa0ddcd4d288d42f50eff18b0bac4"},
- {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee453085279df1bac0996bc97004771a4a052b1f1e23f6101213e3796ff3cb85"},
- {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1191270b06ecd68b1d00897b2daddb98e1719f63750969614ceb3438228c088e"},
- {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:007a7e49831cfe387473e92e9ff07377f6121120669ddc39674e7244350a6a29"},
- {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:af75cf83c2d57717a8493ed2246d34b1f3398cb8a92b10fd7a1858cad8e78f59"},
- {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:811ca7373da32f1ccee2927dc27dc523462fd30674a80102f86c6753d6681bc6"},
- {file = "coverage-7.3.3-cp312-cp312-win32.whl", hash = "sha256:733537a182b5d62184f2a72796eb6901299898231a8e4f84c858c68684b25a70"},
- {file = "coverage-7.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:e995efb191f04b01ced307dbd7407ebf6e6dc209b528d75583277b10fd1800ee"},
- {file = "coverage-7.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbd8a5fe6c893de21a3c6835071ec116d79334fbdf641743332e442a3466f7ea"},
- {file = "coverage-7.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50c472c1916540f8b2deef10cdc736cd2b3d1464d3945e4da0333862270dcb15"},
- {file = "coverage-7.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e9223a18f51d00d3ce239c39fc41410489ec7a248a84fab443fbb39c943616c"},
- {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f501e36ac428c1b334c41e196ff6bd550c0353c7314716e80055b1f0a32ba394"},
- {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475de8213ed95a6b6283056d180b2442eee38d5948d735cd3d3b52b86dd65b92"},
- {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afdcc10c01d0db217fc0a64f58c7edd635b8f27787fea0a3054b856a6dff8717"},
- {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fff0b2f249ac642fd735f009b8363c2b46cf406d3caec00e4deeb79b5ff39b40"},
- {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a1f76cfc122c9e0f62dbe0460ec9cc7696fc9a0293931a33b8870f78cf83a327"},
- {file = "coverage-7.3.3-cp38-cp38-win32.whl", hash = "sha256:757453848c18d7ab5d5b5f1827293d580f156f1c2c8cef45bfc21f37d8681069"},
- {file = "coverage-7.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad2453b852a1316c8a103c9c970db8fbc262f4f6b930aa6c606df9b2766eee06"},
- {file = "coverage-7.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b15e03b8ee6a908db48eccf4e4e42397f146ab1e91c6324da44197a45cb9132"},
- {file = "coverage-7.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89400aa1752e09f666cc48708eaa171eef0ebe3d5f74044b614729231763ae69"},
- {file = "coverage-7.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c59a3e59fb95e6d72e71dc915e6d7fa568863fad0a80b33bc7b82d6e9f844973"},
- {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ede881c7618f9cf93e2df0421ee127afdfd267d1b5d0c59bcea771cf160ea4a"},
- {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3bfd2c2f0e5384276e12b14882bf2c7621f97c35320c3e7132c156ce18436a1"},
- {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f3bad1a9313401ff2964e411ab7d57fb700a2d5478b727e13f156c8f89774a0"},
- {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:65d716b736f16e250435473c5ca01285d73c29f20097decdbb12571d5dfb2c94"},
- {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a702e66483b1fe602717020a0e90506e759c84a71dbc1616dd55d29d86a9b91f"},
- {file = "coverage-7.3.3-cp39-cp39-win32.whl", hash = "sha256:7fbf3f5756e7955174a31fb579307d69ffca91ad163467ed123858ce0f3fd4aa"},
- {file = "coverage-7.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cad9afc1644b979211989ec3ff7d82110b2ed52995c2f7263e7841c846a75348"},
- {file = "coverage-7.3.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:d299d379b676812e142fb57662a8d0d810b859421412b4d7af996154c00c31bb"},
- {file = "coverage-7.3.3.tar.gz", hash = "sha256:df04c64e58df96b4427db8d0559e95e2df3138c9916c96f9f6a4dd220db2fdb7"},
+ {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"},
+ {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"},
+ {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"},
+ {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"},
+ {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"},
+ {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"},
+ {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"},
+ {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"},
+ {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"},
+ {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"},
+ {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"},
+ {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"},
+ {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"},
+ {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"},
+ {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"},
+ {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"},
+ {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"},
+ {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"},
+ {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"},
+ {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"},
+ {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"},
+ {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"},
+ {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"},
+ {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"},
+ {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"},
+ {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"},
+ {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"},
+ {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"},
+ {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"},
+ {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"},
+ {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"},
+ {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"},
+ {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"},
+ {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"},
+ {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"},
+ {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"},
+ {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"},
+ {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"},
+ {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"},
+ {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"},
+ {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"},
+ {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"},
+ {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"},
+ {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"},
+ {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"},
+ {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"},
+ {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"},
+ {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"},
+ {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"},
+ {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"},
+ {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"},
+ {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"},
]
[package.dependencies]
@@ -625,13 +626,13 @@ files = [
[[package]]
name = "distro"
-version = "1.8.0"
+version = "1.9.0"
description = "Distro - an OS platform information API"
optional = false
python-versions = ">=3.6"
files = [
- {file = "distro-1.8.0-py3-none-any.whl", hash = "sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff"},
- {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"},
+ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
+ {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
]
[[package]]
@@ -678,42 +679,42 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth
[[package]]
name = "fastavro"
-version = "1.9.1"
+version = "1.9.2"
description = "Fast read/write of AVRO files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fastavro-1.9.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:de181b2e67f1f42f0c15f87ff530dda88cfe2efc91653b6d38d0aaf4c8800bbf"},
- {file = "fastavro-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84616dae53581733aac1161685d35c269922cee79170d8a1f7dbc56c8e2c6a95"},
- {file = "fastavro-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee78b13e118468e6796a97857a02dd2a8076f2946c6ab992a25597ee60a8963"},
- {file = "fastavro-1.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70df5a6428e5c60b08d92a3cf955d2c658e0460059654b0490c908d429bcf332"},
- {file = "fastavro-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:47f66f8282f7b2b70d4edc1c1853c154a9db14693a20fc1fa78859eb091c6beb"},
- {file = "fastavro-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b558a789b1a24be3b471a2d430a1583e4e18b09896a27ce80211d40c91d3895a"},
- {file = "fastavro-1.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cd00aa7a7e463538b3930b27ea98270af11de3a6436b658086802964ae53cfc7"},
- {file = "fastavro-1.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d8037a800914acfd2d17894accfdd9ba96e316bce173e3ac2bc36c9d6f91adb"},
- {file = "fastavro-1.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5a6d15fd3783165113b8292058f06c555fecb7b0bbc0dfd391dc6f320675157"},
- {file = "fastavro-1.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:46e69d9fe30ccba8a1a22c2ed2e88deb4ae1ce42f47495f59bd1cac60c3f3e75"},
- {file = "fastavro-1.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a06ae6b12b4dfe8fa6c84019a949b44067bf5d7fb051f7101a9093dc2c8c7631"},
- {file = "fastavro-1.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:47f18c4f3f5a945c32d386402cf007f700433fd1b9b6af78eb35ee09a29ba8ad"},
- {file = "fastavro-1.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ce88e5bc88d3d210dca99b69cffc6a7a0538815e86e806730cd79914ac9c17f"},
- {file = "fastavro-1.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16ebff73e08bc6437746e36a131f3a025d49b5867f5975bcc4a3e57cafcb3338"},
- {file = "fastavro-1.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b5ebcf4ea3b50cfb80c7cd363e57daab8c2662b85de9ced838e32b5a46a106f"},
- {file = "fastavro-1.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2d52c69089f6ce7110665149ced29cb68f2f1cd6812b28ebb53b158b53e069f7"},
- {file = "fastavro-1.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e21c23b7d19df260244ae8fb4470ce27399dc1c0129fa523285e39d8ff7b5ef8"},
- {file = "fastavro-1.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:28886022b9c5e5175e44aa04ed10d733b7503028092e38e61ecafe006f839362"},
- {file = "fastavro-1.9.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:dfdfb3706646397f1c71e6652c9ca23ed29698c5f1bd20f32903589d3ae62219"},
- {file = "fastavro-1.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9b1edaebef41500028b6bfbef1a46dc2e5b23f8a5dbde8d8c087b290572e5d2"},
- {file = "fastavro-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ff2184d82788ff6d986372e72add561700ccdedea13b649593604d078dbf674"},
- {file = "fastavro-1.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:13ae31bb1b9ee69109e4032946d94ab92c1f1c49194917e64bb7f5923ba4f8fd"},
- {file = "fastavro-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a9f549ee83ae4df5bc952552caad2011272d20a9fb0cddd50ff3fa1edd8d11a9"},
- {file = "fastavro-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:b0265fbec0a268baadf3482eb92d0a4f644f68f8dc266a19a0440b7a28987564"},
- {file = "fastavro-1.9.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0533a430aafe75bc02fe66391361a5f374f08375a89ec93365cb15c016e7f911"},
- {file = "fastavro-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dc30d9fa7592b0a652911466a7898547277e7f054e23f95fc5d0e8b88788174"},
- {file = "fastavro-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b786f872d5caa34b8c18f2ed73efd99b8b8e1c404342a4242cf3ad7344bdd46c"},
- {file = "fastavro-1.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a9a7213d80eb5e47ffb471c089cfbc19ec5b2390b75f6ef2e09e8678c0f7aeb"},
- {file = "fastavro-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0b9e9cb05500ed8578ce614a5df4b2b525ded2674320725d405435925addd446"},
- {file = "fastavro-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:abfef36fdd2cdbf3b7e7551f6506b908f24e241eebc2ab14e7ff6862679fd1ef"},
- {file = "fastavro-1.9.1.tar.gz", hash = "sha256:f37011d66de8ba81b26760db0478009a14c08ebfd34269b3390abfd4616b308f"},
+ {file = "fastavro-1.9.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:223cecf135fd29b83ca6a30035b15b8db169aeaf8dc4f9a5d34afadc4b31638a"},
+ {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e08c9be8c6f7eed2cf30f8b64d50094cba38a81b751c7db9f9c4be2656715259"},
+ {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394f06cc865c6fbae3bbca323633a28a5d914c55dc2c1cdefb75432456ef8f6f"},
+ {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7a7caadd47bdd04bda534ff70b4b98d2823800c488fd911918115aec4c4dc09b"},
+ {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:68478a1b8a583d83ad6550e9dceac6cbb148a99a52c3559a0413bf4c0b9c8786"},
+ {file = "fastavro-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:b59a1123f1d534743af33fdbda80dd7b9146685bdd7931eae12bee6203065222"},
+ {file = "fastavro-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:887c20dc527a549764c91f9e48ece071f2f26d217af66ebcaeb87bf29578fee5"},
+ {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46458f78b481c12db62d3d8a81bae09cb0b5b521c0d066c6856fc2746908d00d"},
+ {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f4a2a4bed0e829f79fa1e4f172d484b2179426e827bcc80c0069cc81328a5af"},
+ {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6167f9bbe1c5a28fbc2db767f97dbbb4981065e6eeafd4e613f6fe76c576ffd4"},
+ {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d574bc385f820da0404528157238de4e5fdd775d2cb3d05b3b0f1b475d493837"},
+ {file = "fastavro-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ec600eb15b3ec931904c5bf8da62b3b725cb0f369add83ba47d7b5e9322f92a0"},
+ {file = "fastavro-1.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c82b0761503420cd45f7f50bc31975ac1c75b5118e15434c1d724b751abcc249"},
+ {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db62d9b8c944b8d9c481e5f980d5becfd034bdd58c72e27c9333bd504b06bda0"},
+ {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65e61f040bc9494646f42a466e9cd428783b82d7161173f3296710723ba5a453"},
+ {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6278b93cdd5bef1778c0232ce1f265137f90bc6be97a5c1dd7e0d99a406c0488"},
+ {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cd003ddea5d89720194b6e57011c37221d9fc4ddc750e6f4723516eb659be686"},
+ {file = "fastavro-1.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:43f09d100a26e8b59f30dde664d93e423b648e008abfc43132608a18fe8ddcc2"},
+ {file = "fastavro-1.9.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:3ddffeff5394f285c69f9cd481f47b6cf62379840cdbe6e0dc74683bd589b56e"},
+ {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e75a2b2ec697d2058a7d96522e921f03f174cf9049ace007c24be7ab58c5370"},
+ {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd2e8fd0567483eb0fdada1b979ad4d493305dfdd3f351c82a87df301f0ae1f"},
+ {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c652dbe3f087c943a5b89f9a50a574e64f23790bfbec335ce2b91a2ae354a443"},
+ {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba73e9a1822162f1b3a43de0362f29880014c5c4d49d63ad7fcce339ef73ea2"},
+ {file = "fastavro-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:beeef2964bbfd09c539424808539b956d7425afbb7055b89e2aa311374748b56"},
+ {file = "fastavro-1.9.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d5fa48266d75e057b27d8586b823d6d7d7c94593fd989d75033eb4c8078009fb"},
+ {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b69aeb0d063f5955a0e412f9779444fc452568a49db75a90a8d372f9cb4a01c8"},
+ {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce336c59fb40fdb8751bda8cc6076cfcdf9767c3c107f6049e049166b26c61f"},
+ {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:581036e18661f045415a51ad528865e1d7ba5a9690a3dede9e6ea50f94ed6c4c"},
+ {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b6b5c3cda569c0a130fd2d08d4c53a326ede7e05174a24eda08f7698f70eda"},
+ {file = "fastavro-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:d33e40f246bf07f106f9d2da68d0234efcc62276b6e35bde00ff920ea7f871fd"},
+ {file = "fastavro-1.9.2.tar.gz", hash = "sha256:5c1ffad986200496bd69b5c4748ae90b5d934d3b1456f33147bee3a0bb17f89b"},
]
[package.extras]
@@ -923,13 +924,13 @@ trio = ["trio (>=0.22.0,<0.23.0)"]
[[package]]
name = "httpx"
-version = "0.25.2"
+version = "0.26.0"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
files = [
- {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"},
- {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"},
+ {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"},
+ {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"},
]
[package.dependencies]
@@ -1035,13 +1036,13 @@ files = [
[[package]]
name = "ipykernel"
-version = "6.27.1"
+version = "6.28.0"
description = "IPython Kernel for Jupyter"
optional = false
python-versions = ">=3.8"
files = [
- {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"},
- {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"},
+ {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"},
+ {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"},
]
[package.dependencies]
@@ -1055,7 +1056,7 @@ matplotlib-inline = ">=0.1"
nest-asyncio = "*"
packaging = "*"
psutil = "*"
-pyzmq = ">=20"
+pyzmq = ">=24"
tornado = ">=6.1"
traitlets = ">=5.4.0"
@@ -1158,13 +1159,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt
[[package]]
name = "jupyter-core"
-version = "5.5.0"
+version = "5.7.0"
description = "Jupyter core package. A base package on which Jupyter projects rely."
optional = false
python-versions = ">=3.8"
files = [
- {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"},
- {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"},
+ {file = "jupyter_core-5.7.0-py3-none-any.whl", hash = "sha256:16eea462f7dad23ba9f86542bdf17f830804e2028eb48d609b6134d91681e983"},
+ {file = "jupyter_core-5.7.0.tar.gz", hash = "sha256:cb8d3ed92144d2463a3c5664fdd686a3f0c1442ea45df8babb1c1a9e6333fe03"},
]
[package.dependencies]
@@ -1336,38 +1337,38 @@ files = [
[[package]]
name = "mypy"
-version = "1.7.1"
+version = "1.8.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"},
- {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"},
- {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"},
- {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"},
- {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"},
- {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"},
- {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"},
- {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"},
- {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"},
- {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"},
- {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"},
- {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"},
- {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"},
- {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"},
- {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"},
- {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"},
- {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"},
- {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"},
- {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"},
- {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"},
- {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"},
- {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"},
- {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"},
- {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"},
- {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"},
- {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"},
- {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"},
+ {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"},
+ {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"},
+ {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"},
+ {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"},
+ {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"},
+ {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"},
+ {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"},
+ {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"},
+ {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"},
+ {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"},
+ {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"},
+ {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"},
+ {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"},
+ {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"},
+ {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"},
+ {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"},
+ {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"},
+ {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"},
+ {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"},
+ {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"},
+ {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"},
+ {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"},
+ {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"},
+ {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"},
+ {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"},
+ {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"},
+ {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"},
]
[package.dependencies]
@@ -1546,13 +1547,13 @@ sympy = "*"
[[package]]
name = "openai"
-version = "1.5.0"
+version = "1.6.1"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-1.5.0-py3-none-any.whl", hash = "sha256:42d8c84b0714c990e18afe81d37f8a64423e8196bf7157b8ea665b8d8f393253"},
- {file = "openai-1.5.0.tar.gz", hash = "sha256:4cd91e97988ccd6c44f815107def9495cbc718aeb8b28be33a87b6fa2c432508"},
+ {file = "openai-1.6.1-py3-none-any.whl", hash = "sha256:bc9f774838d67ac29fb24cdeb2d58faf57de8b311085dcd1348f7aa02a96c7ee"},
+ {file = "openai-1.6.1.tar.gz", hash = "sha256:d553ca9dbf9486b08e75b09e8671e4f638462aaadccfced632bf490fc3d75fa2"},
]
[package.dependencies]
@@ -1562,7 +1563,7 @@ httpx = ">=0.23.0,<1"
pydantic = ">=1.9.0,<3"
sniffio = "*"
tqdm = ">4"
-typing-extensions = ">=4.5,<5"
+typing-extensions = ">=4.7,<5"
[package.extras]
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
@@ -1849,13 +1850,13 @@ files = [
[[package]]
name = "pytest"
-version = "7.4.3"
+version = "7.4.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
- {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
+ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
+ {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
]
[package.dependencies]
@@ -1871,13 +1872,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no
[[package]]
name = "pytest-asyncio"
-version = "0.23.2"
+version = "0.23.3"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-asyncio-0.23.2.tar.gz", hash = "sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc"},
- {file = "pytest_asyncio-0.23.2-py3-none-any.whl", hash = "sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f"},
+ {file = "pytest-asyncio-0.23.3.tar.gz", hash = "sha256:af313ce900a62fbe2b1aed18e37ad757f1ef9940c6b6a88e2954de38d6b1fb9f"},
+ {file = "pytest_asyncio-0.23.3-py3-none-any.whl", hash = "sha256:37a9d912e8338ee7b4a3e917381d1c95bfc8682048cb0fbc35baba316ec1faba"},
]
[package.dependencies]
@@ -1991,7 +1992,6 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1999,15 +1999,8 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -2024,7 +2017,6 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -2032,7 +2024,6 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -2145,99 +2136,104 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""}
[[package]]
name = "regex"
-version = "2023.10.3"
+version = "2023.12.25"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.7"
files = [
- {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"},
- {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"},
- {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"},
- {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"},
- {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"},
- {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"},
- {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"},
- {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"},
- {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"},
- {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"},
- {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"},
- {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"},
- {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"},
- {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"},
- {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"},
- {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"},
- {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"},
- {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"},
- {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"},
- {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"},
- {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"},
- {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"},
- {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"},
- {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"},
- {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"},
- {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"},
- {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"},
- {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"},
- {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"},
- {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"},
- {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"},
- {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"},
- {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"},
- {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"},
- {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"},
- {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"},
- {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"},
- {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"},
- {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"},
- {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"},
- {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"},
- {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"},
- {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"},
- {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"},
- {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"},
- {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"},
- {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"},
- {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"},
- {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"},
- {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"},
- {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"},
- {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"},
- {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"},
- {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"},
- {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"},
- {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"},
- {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"},
- {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"},
- {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"},
- {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"},
- {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"},
- {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"},
- {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"},
- {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"},
- {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"},
- {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"},
- {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"},
- {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"},
- {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"},
- {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"},
- {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"},
- {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"},
- {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"},
- {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"},
- {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"},
- {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"},
- {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"},
- {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"},
- {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"},
- {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"},
- {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"},
- {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"},
- {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"},
- {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"},
- {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"},
- {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"},
- {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"},
- {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"},
+ {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"},
+ {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"},
+ {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"},
+ {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"},
+ {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"},
+ {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"},
+ {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"},
+ {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"},
+ {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"},
+ {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"},
+ {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"},
+ {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"},
+ {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"},
+ {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"},
]
[[package]]
@@ -2263,28 +2259,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "ruff"
-version = "0.1.8"
+version = "0.1.11"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa"},
- {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f"},
- {file = "ruff-0.1.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9"},
- {file = "ruff-0.1.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664"},
- {file = "ruff-0.1.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26"},
- {file = "ruff-0.1.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05"},
- {file = "ruff-0.1.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30"},
- {file = "ruff-0.1.8-py3-none-win32.whl", hash = "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3"},
- {file = "ruff-0.1.8-py3-none-win_amd64.whl", hash = "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a"},
- {file = "ruff-0.1.8-py3-none-win_arm64.whl", hash = "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6"},
- {file = "ruff-0.1.8.tar.gz", hash = "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62"},
+ {file = "ruff-0.1.11-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a7f772696b4cdc0a3b2e527fc3c7ccc41cdcb98f5c80fdd4f2b8c50eb1458196"},
+ {file = "ruff-0.1.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:934832f6ed9b34a7d5feea58972635c2039c7a3b434fe5ba2ce015064cb6e955"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea0d3e950e394c4b332bcdd112aa566010a9f9c95814844a7468325290aabfd9"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bd4025b9c5b429a48280785a2b71d479798a69f5c2919e7d274c5f4b32c3607"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1ad00662305dcb1e987f5ec214d31f7d6a062cae3e74c1cbccef15afd96611d"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4b077ce83f47dd6bea1991af08b140e8b8339f0ba8cb9b7a484c30ebab18a23f"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a88efecec23c37b11076fe676e15c6cdb1271a38f2b415e381e87fe4517f18"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b25093dad3b055667730a9b491129c42d45e11cdb7043b702e97125bcec48a1"},
+ {file = "ruff-0.1.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231d8fb11b2cc7c0366a326a66dafc6ad449d7fcdbc268497ee47e1334f66f77"},
+ {file = "ruff-0.1.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:09c415716884950080921dd6237767e52e227e397e2008e2bed410117679975b"},
+ {file = "ruff-0.1.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0f58948c6d212a6b8d41cd59e349751018797ce1727f961c2fa755ad6208ba45"},
+ {file = "ruff-0.1.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:190a566c8f766c37074d99640cd9ca3da11d8deae2deae7c9505e68a4a30f740"},
+ {file = "ruff-0.1.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6464289bd67b2344d2a5d9158d5eb81025258f169e69a46b741b396ffb0cda95"},
+ {file = "ruff-0.1.11-py3-none-win32.whl", hash = "sha256:9b8f397902f92bc2e70fb6bebfa2139008dc72ae5177e66c383fa5426cb0bf2c"},
+ {file = "ruff-0.1.11-py3-none-win_amd64.whl", hash = "sha256:eb85ee287b11f901037a6683b2374bb0ec82928c5cbc984f575d0437979c521a"},
+ {file = "ruff-0.1.11-py3-none-win_arm64.whl", hash = "sha256:97ce4d752f964ba559c7023a86e5f8e97f026d511e48013987623915431c7ea9"},
+ {file = "ruff-0.1.11.tar.gz", hash = "sha256:f9d4d88cb6eeb4dfe20f9f0519bd2eaba8119bde87c3d5065c541dbae2b5a2cb"},
]
[[package]]
@@ -2521,13 +2517,13 @@ telegram = ["requests"]
[[package]]
name = "traitlets"
-version = "5.14.0"
+version = "5.14.1"
description = "Traitlets Python configuration system"
optional = false
python-versions = ">=3.8"
files = [
- {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"},
- {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"},
+ {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"},
+ {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"},
]
[package.extras]
diff --git a/semantic_router/encoders/__init__.py b/semantic_router/encoders/__init__.py
index 4bb1eb37..9d3a027e 100644
--- a/semantic_router/encoders/__init__.py
+++ b/semantic_router/encoders/__init__.py
@@ -1,8 +1,8 @@
from semantic_router.encoders.base import BaseEncoder
from semantic_router.encoders.bm25 import BM25Encoder
from semantic_router.encoders.cohere import CohereEncoder
-from semantic_router.encoders.openai import OpenAIEncoder
from semantic_router.encoders.fastembed import FastEmbedEncoder
+from semantic_router.encoders.openai import OpenAIEncoder
__all__ = [
"BaseEncoder",
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index d324058d..4bb46b85 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -1,4 +1,5 @@
from typing import Any, List, Optional
+
import numpy as np
from pydantic import BaseModel, PrivateAttr
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 5b2aad84..0315a281 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -55,7 +55,7 @@ def __init__(
self,
routes: list[Route] = [],
encoder_type: str = "openai",
- encoder_name: str | None = None,
+ encoder_name: str | None = "text-embedding-ada-002",
):
self.encoder_type = encoder_type
if encoder_name is None:
@@ -107,15 +107,24 @@ def to_file(self, path: str):
"""Save the routes to a file in JSON or YAML format"""
logger.info(f"Saving route config to {path}")
_, ext = os.path.splitext(path)
+
+ # Check file extension before creating directories or files
+ if ext not in [".json", ".yaml", ".yml"]:
+ raise ValueError(
+ "Unsupported file type. Only .json and .yaml are supported"
+ )
+
+ dir_name = os.path.dirname(path)
+
+ # Create the directory if it doesn't exist and dir_name is not an empty string
+ if dir_name and not os.path.exists(dir_name):
+ os.makedirs(dir_name)
+
with open(path, "w") as f:
if ext == ".json":
json.dump(self.to_dict(), f, indent=4)
elif ext in [".yaml", ".yml"]:
yaml.safe_dump(self.to_dict(), f)
- else:
- raise ValueError(
- "Unsupported file type. Only .json and .yaml are supported"
- )
def add(self, route: Route):
self.routes.append(route)
diff --git a/semantic_router/route.py b/semantic_router/route.py
index 12afa7fe..3fc717ef 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -70,6 +70,7 @@ def from_dynamic_route(cls, entity: Union[BaseModel, Callable]):
"""
schema = function_call.get_schema(item=entity)
dynamic_route = cls._generate_dynamic_route(function_schema=schema)
+ dynamic_route.function_schema = schema
return dynamic_route
@classmethod
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index 465cfaac..360442f6 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -8,7 +8,6 @@
CohereEncoder,
OpenAIEncoder,
)
-
from semantic_router.utils.splitters import semantic_splitter
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index 2ead3ab5..4319a8ec 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -4,6 +4,7 @@
from pydantic import BaseModel
+from semantic_router.schema import RouteChoice
from semantic_router.utils.llm import llm
from semantic_router.utils.logger import logger
@@ -105,23 +106,14 @@ def is_valid_inputs(inputs: dict[str, Any], function_schema: dict[str, Any]) ->
return False
-def call_function(function: Callable, inputs: dict[str, str]):
- try:
- return function(**inputs)
- except TypeError as e:
- logger.error(f"Error calling function: {e}")
-
-
# TODO: Add route layer object to the input, solve circular import issue
-async def route_and_execute(query: str, functions: list[Callable], route_layer):
- function_name = route_layer(query)
- if not function_name:
- logger.warning("No function found, calling LLM...")
- return llm(query)
+async def route_and_execute(query: str, functions: list[Callable], layer) -> Any:
+ route_choice: RouteChoice = layer(query)
for function in functions:
- if function.__name__ == function_name:
- print(f"Calling function: {function.__name__}")
- schema = get_schema(function)
- inputs = extract_function_inputs(query, schema)
- call_function(function, inputs)
+ if function.__name__ == route_choice.name:
+ if route_choice.function_call:
+ return function(**route_choice.function_call)
+
+ logger.warning("No function found, calling LLM.")
+ return llm(query)
diff --git a/semantic_router/utils/splitters.py b/semantic_router/utils/splitters.py
index 514ae821..74601520 100644
--- a/semantic_router/utils/splitters.py
+++ b/semantic_router/utils/splitters.py
@@ -1,4 +1,5 @@
import numpy as np
+
from semantic_router.encoders import BaseEncoder
@@ -13,13 +14,15 @@ def semantic_splitter(
Method 1: "consecutive_similarity_drop" - This method splits documents based on
the changes in similarity scores between consecutive documents.
- Method 2: "cumulative_similarity_drop" - This method segments the documents based on the
- changes in cumulative similarity score of the documents within the same split.
+ Method 2: "cumulative_similarity_drop" - This method segments the documents based
+ on the changes in cumulative similarity score of the documents within the same
+ split.
Args:
encoder (BaseEncoder): Encoder for document embeddings.
docs (list[str]): Documents to split.
- threshold (float): The similarity drop value that will trigger a new document split.
+ threshold (float): The similarity drop value that will trigger a new document
+ split.
split_method (str): The method to use for splitting.
Returns:
@@ -64,7 +67,8 @@ def semantic_splitter(
else:
raise ValueError(
- "Invalid 'split_method'. Choose either 'consecutive_similarity_drop' or 'cumulative_similarity_drop'."
+ "Invalid 'split_method'. Choose either 'consecutive_similarity_drop' or"
+ " 'cumulative_similarity_drop'."
)
splits[f"split {curr_split_num}"] = docs[curr_split_start_idx:]
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 45b57472..32754997 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -1,4 +1,5 @@
import os
+import tempfile
from unittest.mock import mock_open, patch
import pytest
@@ -175,28 +176,30 @@ def test_failover_score_threshold(self, base_encoder):
assert route_layer.score_threshold == 0.82
def test_json(self, openai_encoder, routes):
- os.environ["OPENAI_API_KEY"] = "test_api_key"
- route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
- route_layer.to_json("test_output.json")
- assert os.path.exists("test_output.json")
- route_layer_from_file = RouteLayer.from_json("test_output.json")
- assert (
- route_layer_from_file.index is not None
- and route_layer_from_file.categories is not None
- )
- os.remove("test_output.json")
+ with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
+ os.environ["OPENAI_API_KEY"] = "test_api_key"
+ route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
+ route_layer.to_json(temp.name)
+ assert os.path.exists(temp.name)
+ route_layer_from_file = RouteLayer.from_json(temp.name)
+ assert (
+ route_layer_from_file.index is not None
+ and route_layer_from_file.categories is not None
+ )
+ os.remove(temp.name)
def test_yaml(self, openai_encoder, routes):
- os.environ["OPENAI_API_KEY"] = "test_api_key"
- route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
- route_layer.to_yaml("test_output.yaml")
- assert os.path.exists("test_output.yaml")
- route_layer_from_file = RouteLayer.from_yaml("test_output.yaml")
- assert (
- route_layer_from_file.index is not None
- and route_layer_from_file.categories is not None
- )
- os.remove("test_output.yaml")
+ with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
+ os.environ["OPENAI_API_KEY"] = "test_api_key"
+ route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
+ route_layer.to_yaml(temp.name)
+ assert os.path.exists(temp.name)
+ route_layer_from_file = RouteLayer.from_yaml(temp.name)
+ assert (
+ route_layer_from_file.index is not None
+ and route_layer_from_file.categories is not None
+ )
+ os.remove(temp.name)
def test_config(self, openai_encoder, routes):
os.environ["OPENAI_API_KEY"] = "test_api_key"
diff --git a/tests/unit/test_splitters.py b/tests/unit/test_splitters.py
index bcd8f62b..ac9c037c 100644
--- a/tests/unit/test_splitters.py
+++ b/tests/unit/test_splitters.py
@@ -1,7 +1,9 @@
-import pytest
from unittest.mock import Mock
-from semantic_router.utils.splitters import semantic_splitter
+
+import pytest
+
from semantic_router.schema import Conversation, Message
+from semantic_router.utils.splitters import semantic_splitter
def test_semantic_splitter_consecutive_similarity_drop():
From 6572f3fa83c2d281463821fba29216da06f172cf Mon Sep 17 00:00:00 2001
From: Simonas <20096648+simjak@users.noreply.github.com>
Date: Fri, 5 Jan 2024 12:46:33 +0200
Subject: [PATCH 02/24] fix: set default layer encoder name to None
---
docs/examples/function_calling.ipynb | 81 +++++++++++++++++-----------
semantic_router/layer.py | 2 +-
2 files changed, 50 insertions(+), 33 deletions(-)
diff --git a/docs/examples/function_calling.ipynb b/docs/examples/function_calling.ipynb
index 7b0347bc..401ab04a 100644
--- a/docs/examples/function_calling.ipynb
+++ b/docs/examples/function_calling.ipynb
@@ -9,7 +9,7 @@
},
{
"cell_type": "code",
- "execution_count": 29,
+ "execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -41,7 +41,7 @@
},
{
"cell_type": "code",
- "execution_count": 30,
+ "execution_count": 2,
"metadata": {},
"outputs": [
{
@@ -54,10 +54,10 @@
{
"data": {
"text/plain": [
- "'05:02'"
+ "'05:44'"
]
},
- "execution_count": 30,
+ "execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
@@ -75,15 +75,17 @@
},
{
"cell_type": "code",
- "execution_count": 31,
+ "execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:02:35 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
- "\u001b[32m2024-01-05 12:02:38 INFO semantic_router.utils.logger Generated route config:\n",
+ "/Users/jakit/customers/aurelio/semantic-router/.venv/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+ " from .autonotebook import tqdm as notebook_tqdm\n",
+ "\u001b[32m2024-01-05 12:44:13 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
+ "\u001b[32m2024-01-05 12:44:16 INFO semantic_router.utils.logger Generated route config:\n",
"{\n",
" \"name\": \"get_time\",\n",
" \"utterances\": [\n",
@@ -94,8 +96,8 @@
" \"What's the current time in Berlin?\"\n",
" ]\n",
"}\u001b[0m\n",
- "\u001b[32m2024-01-05 12:02:38 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
- "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Generated route config:\n",
+ "\u001b[32m2024-01-05 12:44:16 INFO semantic_router.utils.logger Generating dynamic route...\u001b[0m\n",
+ "\u001b[32m2024-01-05 12:44:19 INFO semantic_router.utils.logger Generated route config:\n",
"{\n",
" \"name\": \"get_news\",\n",
" \"utterances\": [\n",
@@ -123,7 +125,7 @@
},
{
"cell_type": "code",
- "execution_count": 32,
+ "execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
@@ -150,14 +152,21 @@
},
{
"cell_type": "code",
- "execution_count": 33,
+ "execution_count": 5,
"metadata": {},
"outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "\u001b[32m2024-01-05 12:44:23 INFO semantic_router.utils.logger Using default openai encoder: None\u001b[0m\n"
+ ]
+ },
{
"data": {
"text/plain": [
"{'encoder_type': 'openai',\n",
- " 'encoder_name': 'text-embedding-ada-002',\n",
+ " 'encoder_name': None,\n",
" 'routes': [{'name': 'get_time',\n",
" 'utterances': [\"What's the current time in New York?\",\n",
" 'Can you tell me the time in London?',\n",
@@ -188,7 +197,7 @@
" 'function_schema': None}]}"
]
},
- "execution_count": 33,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -202,7 +211,7 @@
},
{
"cell_type": "code",
- "execution_count": 34,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
@@ -211,7 +220,7 @@
"Route(name='get_time', utterances=[\"What's the current time in New York?\", 'Can you tell me the time in London?', \"What's the current time in Tokyo?\", 'Can you give me the time in Sydney?', \"What's the current time in Berlin?\"], description=None, function_schema={'name': 'get_time', 'description': 'Finds the current time in a specific timezone.\\n\\n:param timezone: The timezone to find the current time in, should\\n be a valid timezone from the IANA Time Zone Database like\\n \"America/New_York\" or \"Europe/London\".\\n:type timezone: str\\n:return: The current time in the specified timezone.', 'signature': '(timezone: str) -> str', 'output': \"\"})"
]
},
- "execution_count": 34,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -223,21 +232,21 @@
},
{
"cell_type": "code",
- "execution_count": 35,
+ "execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Removed route `get_weather`\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:44:32 INFO semantic_router.utils.logger Removed route `get_weather`\u001b[0m\n"
]
},
{
"data": {
"text/plain": [
"{'encoder_type': 'openai',\n",
- " 'encoder_name': 'text-embedding-ada-002',\n",
+ " 'encoder_name': None,\n",
" 'routes': [{'name': 'get_time',\n",
" 'utterances': [\"What's the current time in New York?\",\n",
" 'Can you tell me the time in London?',\n",
@@ -262,7 +271,7 @@
" 'output': \"\"}}]}"
]
},
- "execution_count": 35,
+ "execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
@@ -282,14 +291,14 @@
},
{
"cell_type": "code",
- "execution_count": 36,
+ "execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Saving route config to output/layer_config.json\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:45:12 INFO semantic_router.utils.logger Saving route config to output/layer_config.json\u001b[0m\n"
]
}
],
@@ -313,14 +322,15 @@
},
{
"cell_type": "code",
- "execution_count": 37,
+ "execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Loading route config from output/layer_config.json\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:45:15 INFO semantic_router.utils.logger Loading route config from output/layer_config.json\u001b[0m\n",
+ "\u001b[32m2024-01-05 12:45:15 INFO semantic_router.utils.logger Using default openai encoder: None\u001b[0m\n"
]
}
],
@@ -339,14 +349,14 @@
},
{
"cell_type": "code",
- "execution_count": 38,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:02:41 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:45:50 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
]
}
],
@@ -372,14 +382,14 @@
},
{
"cell_type": "code",
- "execution_count": 39,
+ "execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:02:42 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:45:53 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
@@ -388,7 +398,7 @@
"RouteChoice(name='get_time', function_call={'timezone': 'Europe/Stockholm'})"
]
},
- "execution_count": 39,
+ "execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
@@ -406,7 +416,7 @@
},
{
"cell_type": "code",
- "execution_count": 41,
+ "execution_count": 13,
"metadata": {},
"outputs": [
{
@@ -420,7 +430,7 @@
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:03:45 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:45:58 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
@@ -428,7 +438,7 @@
"output_type": "stream",
"text": [
"Invoked `get_time` function with timezone: `Europe/Stockholm`\n",
- "11:03\n",
+ "11:46\n",
"Query: What are the tech news in the US?\n"
]
},
@@ -436,7 +446,7 @@
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-05 12:03:47 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[32m2024-01-05 12:46:00 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
@@ -477,6 +487,13 @@
" print(f\"Query: {query}\")\n",
" print(route_and_execute(query, functions, layer))"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 0315a281..e2c8286b 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -55,7 +55,7 @@ def __init__(
self,
routes: list[Route] = [],
encoder_type: str = "openai",
- encoder_name: str | None = "text-embedding-ada-002",
+ encoder_name: str | None = None,
):
self.encoder_type = encoder_type
if encoder_name is None:
From 6815425b584782f90d7971c413e1c531939119f7 Mon Sep 17 00:00:00 2001
From: Ismail Ashraq
Date: Sat, 6 Jan 2024 19:21:04 +0500
Subject: [PATCH 03/24] Add llms
---
semantic_router/layer.py | 14 ++++++-
semantic_router/llms/__init__.py | 7 ++++
semantic_router/llms/base.py | 11 +++++
semantic_router/llms/cohere.py | 43 ++++++++++++++++++++
semantic_router/llms/openai.py | 51 +++++++++++++++++++++++
semantic_router/llms/openrouter.py | 56 ++++++++++++++++++++++++++
semantic_router/route.py | 26 ++++++++----
semantic_router/schema.py | 8 ++++
semantic_router/utils/function_call.py | 20 +++++----
9 files changed, 220 insertions(+), 16 deletions(-)
create mode 100644 semantic_router/llms/__init__.py
create mode 100644 semantic_router/llms/base.py
create mode 100644 semantic_router/llms/cohere.py
create mode 100644 semantic_router/llms/openai.py
create mode 100644 semantic_router/llms/openrouter.py
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 5b2aad84..46eb2eb7 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -9,6 +9,7 @@
CohereEncoder,
OpenAIEncoder,
)
+from semantic_router.llms import BaseLLM
from semantic_router.linear import similarity_matrix, top_scores
from semantic_router.route import Route
from semantic_router.schema import Encoder, EncoderType, RouteChoice
@@ -142,12 +143,16 @@ class RouteLayer:
score_threshold: float = 0.82
def __init__(
- self, encoder: BaseEncoder | None = None, routes: list[Route] | None = None
+ self,
+ encoder: BaseEncoder | None = None,
+ llm: BaseLLM | None = None,
+ routes: list[Route] | None = None,
):
logger.info("Initializing RouteLayer")
self.index = None
self.categories = None
self.encoder = encoder if encoder is not None else CohereEncoder()
+ self.llm = llm
self.routes: list[Route] = routes if routes is not None else []
# decide on default threshold based on encoder
if isinstance(encoder, OpenAIEncoder):
@@ -168,6 +173,13 @@ def __call__(self, text: str) -> RouteChoice:
if passed:
# get chosen route object
route = [route for route in self.routes if route.name == top_class][0]
+ if route.function_schema and not isinstance(route.llm, BaseLLM):
+ if not self.llm:
+ raise ValueError(
+ "LLM is required for dynamic routes. Please ensure the 'llm' is set."
+ )
+ else:
+ route.llm = self.llm
return route(text)
else:
# if no route passes threshold, return empty route choice
diff --git a/semantic_router/llms/__init__.py b/semantic_router/llms/__init__.py
new file mode 100644
index 00000000..446f7c42
--- /dev/null
+++ b/semantic_router/llms/__init__.py
@@ -0,0 +1,7 @@
+from semantic_router.llms.base import BaseLLM
+from semantic_router.llms.openai import OpenAI
+from semantic_router.llms.openrouter import OpenRouter
+from semantic_router.llms.cohere import Cohere
+
+
+__all__ = ["BaseLLM", "OpenAI", "OpenRouter", "Cohere"]
diff --git a/semantic_router/llms/base.py b/semantic_router/llms/base.py
new file mode 100644
index 00000000..2a1a038e
--- /dev/null
+++ b/semantic_router/llms/base.py
@@ -0,0 +1,11 @@
+from pydantic import BaseModel
+
+
+class BaseLLM(BaseModel):
+ name: str
+
+ class Config:
+ arbitrary_types_allowed = True
+
+ def __call__(self, prompt) -> str | None:
+ raise NotImplementedError("Subclasses must implement this method")
diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py
new file mode 100644
index 00000000..80512d5c
--- /dev/null
+++ b/semantic_router/llms/cohere.py
@@ -0,0 +1,43 @@
+import os
+import cohere
+from semantic_router.llms import BaseLLM
+from semantic_router.schema import Message
+
+
+class Cohere(BaseLLM):
+ client: cohere.Client | None = None
+
+ def __init__(
+ self,
+ name: str | None = None,
+ cohere_api_key: str | None = None,
+ ):
+ if name is None:
+ name = os.getenv("COHERE_CHAT_MODEL_NAME", "command")
+ super().__init__(name=name)
+ cohere_api_key = cohere_api_key or os.getenv("COHERE_API_KEY")
+ if cohere_api_key is None:
+ raise ValueError("Cohere API key cannot be 'None'.")
+ try:
+ self.client = cohere.Client(cohere_api_key)
+ except Exception as e:
+ raise ValueError(f"Cohere API client failed to initialize. Error: {e}")
+
+ def __call__(self, messages: list[Message]) -> str:
+ if self.client is None:
+ raise ValueError("Cohere client is not initialized.")
+ try:
+ completion = self.client.chat(
+ model=self.name,
+ chat_history=[m.to_cohere() for m in messages[:-1]],
+ message=messages[-1].content,
+ )
+
+ output = completion.text
+
+ if not output:
+ raise Exception("No output generated")
+ return output
+
+ except Exception as e:
+ raise ValueError(f"Cohere API call failed. Error: {e}")
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
new file mode 100644
index 00000000..18b6e706
--- /dev/null
+++ b/semantic_router/llms/openai.py
@@ -0,0 +1,51 @@
+import os
+import openai
+from semantic_router.utils.logger import logger
+from semantic_router.llms import BaseLLM
+from semantic_router.schema import Message
+
+
+class OpenAI(BaseLLM):
+ client: openai.OpenAI | None
+ temperature: float | None
+ max_tokens: int | None
+
+ def __init__(
+ self,
+ name: str | None = None,
+ openai_api_key: str | None = None,
+ temperature: float = 0.01,
+ max_tokens: int = 200,
+ ):
+ if name is None:
+ name = os.getenv("OPENAI_CHAT_MODEL_NAME", "gpt-3.5-turbo")
+ super().__init__(name=name)
+ api_key = openai_api_key or os.getenv("OPENAI_API_KEY")
+ if api_key is None:
+ raise ValueError("OpenAI API key cannot be 'None'.")
+ try:
+ self.client = openai.OpenAI(api_key=api_key)
+ except Exception as e:
+ raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
+ self.temperature = temperature
+ self.max_tokens = max_tokens
+
+ def __call__(self, messages: list[Message]) -> str:
+ if self.client is None:
+ raise ValueError("OpenAI client is not initialized.")
+ try:
+ completion = self.client.chat.completions.create(
+ model=self.name,
+ messages=[m.to_openai() for m in messages],
+ temperature=self.temperature,
+ max_tokens=self.max_tokens,
+ )
+
+ output = completion.choices[0].message.content
+
+ if not output:
+ raise Exception("No output generated")
+ return output
+ except Exception as e:
+ logger.error(f"LLM error: {e}")
+ raise Exception(f"LLM error: {e}")
diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py
new file mode 100644
index 00000000..3b7a9b49
--- /dev/null
+++ b/semantic_router/llms/openrouter.py
@@ -0,0 +1,56 @@
+import os
+import openai
+from semantic_router.utils.logger import logger
+from semantic_router.llms import BaseLLM
+from semantic_router.schema import Message
+
+
+class OpenRouter(BaseLLM):
+ client: openai.OpenAI | None
+ base_url: str | None
+ temperature: float | None
+ max_tokens: int | None
+
+ def __init__(
+ self,
+ name: str | None = None,
+ openrouter_api_key: str | None = None,
+ base_url: str = "https://openrouter.ai/api/v1",
+ temperature: float = 0.01,
+ max_tokens: int = 200,
+ ):
+ if name is None:
+ name = os.getenv(
+ "OPENROUTER_CHAT_MODEL_NAME", "mistralai/mistral-7b-instruct"
+ )
+ super().__init__(name=name)
+ self.base_url = base_url
+ api_key = openrouter_api_key or os.getenv("OPENROUTER_API_KEY")
+ if api_key is None:
+ raise ValueError("OpenRouter API key cannot be 'None'.")
+ try:
+ self.client = openai.OpenAI(api_key=api_key, base_url=self.base_url)
+ except Exception as e:
+ raise ValueError(f"OpenRouter API client failed to initialize. Error: {e}")
+ self.temperature = temperature
+ self.max_tokens = max_tokens
+
+ def __call__(self, messages: list[Message]) -> str:
+ if self.client is None:
+ raise ValueError("OpenRouter client is not initialized.")
+ try:
+ completion = self.client.chat.completions.create(
+ model=self.name,
+ messages=[m.to_openai() for m in messages],
+ temperature=self.temperature,
+ max_tokens=self.max_tokens,
+ )
+
+ output = completion.choices[0].message.content
+
+ if not output:
+ raise Exception("No output generated")
+ return output
+ except Exception as e:
+ logger.error(f"LLM error: {e}")
+ raise Exception(f"LLM error: {e}")
diff --git a/semantic_router/route.py b/semantic_router/route.py
index 12afa7fe..7a8803d7 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -4,11 +4,13 @@
from pydantic import BaseModel
+from semantic_router.llms import BaseLLM
from semantic_router.schema import RouteChoice
from semantic_router.utils import function_call
-from semantic_router.utils.llm import llm
from semantic_router.utils.logger import logger
+from semantic_router.schema import Message
+
def is_valid(route_config: str) -> bool:
try:
@@ -43,12 +45,17 @@ class Route(BaseModel):
utterances: list[str]
description: str | None = None
function_schema: dict[str, Any] | None = None
+ llm: BaseLLM | None = None
def __call__(self, query: str) -> RouteChoice:
if self.function_schema:
+ if not self.llm:
+ raise ValueError(
+ "LLM is required for dynamic routes. Please ensure the 'llm' is set."
+ )
# if a function schema is provided we generate the inputs
extracted_inputs = function_call.extract_function_inputs(
- query=query, function_schema=self.function_schema
+ query=query, llm=self.llm, function_schema=self.function_schema
)
func_call = extracted_inputs
else:
@@ -60,16 +67,16 @@ def to_dict(self):
return self.dict()
@classmethod
- def from_dict(cls, data: dict):
+ def from_dict(cls, data: dict[str, Any]):
return cls(**data)
@classmethod
- def from_dynamic_route(cls, entity: Union[BaseModel, Callable]):
+ def from_dynamic_route(cls, llm: BaseLLM, entity: Union[BaseModel, Callable]):
"""
Generate a dynamic Route object from a function or Pydantic model using LLM
"""
schema = function_call.get_schema(item=entity)
- dynamic_route = cls._generate_dynamic_route(function_schema=schema)
+ dynamic_route = cls._generate_dynamic_route(llm=llm, function_schema=schema)
return dynamic_route
@classmethod
@@ -85,7 +92,7 @@ def _parse_route_config(cls, config: str) -> str:
raise ValueError("No tags found in the output.")
@classmethod
- def _generate_dynamic_route(cls, function_schema: dict[str, Any]):
+ def _generate_dynamic_route(cls, llm: BaseLLM, function_schema: dict[str, Any]):
logger.info("Generating dynamic route...")
prompt = f"""
@@ -113,7 +120,8 @@ def _generate_dynamic_route(cls, function_schema: dict[str, Any]):
{function_schema}
"""
- output = llm(prompt)
+ llm_input = [Message(role="user", content=prompt)]
+ output = llm(llm_input)
if not output:
raise Exception("No output generated for dynamic route")
@@ -122,5 +130,7 @@ def _generate_dynamic_route(cls, function_schema: dict[str, Any]):
logger.info(f"Generated route config:\n{route_config}")
if is_valid(route_config):
- return Route.from_dict(json.loads(route_config))
+ route_config_dict = json.loads(route_config)
+ route_config_dict["llm"] = llm
+ return Route.from_dict(route_config_dict)
raise Exception("No config generated")
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index 465cfaac..62eecc7d 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -49,6 +49,14 @@ class Message(BaseModel):
role: str
content: str
+ def to_openai(self):
+ if self.role.lower() not in ["user", "assistant", "system"]:
+ raise ValueError("Role must be either 'user', 'assistant' or 'system'")
+ return {"role": self.role, "content": self.content}
+
+ def to_cohere(self):
+ return {"role": self.role, "message": self.content}
+
class Conversation(BaseModel):
messages: list[Message]
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index 2ead3ab5..d93d027c 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -4,7 +4,8 @@
from pydantic import BaseModel
-from semantic_router.utils.llm import llm
+from semantic_router.llms import BaseLLM
+from semantic_router.schema import Message
from semantic_router.utils.logger import logger
@@ -40,7 +41,9 @@ def get_schema(item: Union[BaseModel, Callable]) -> dict[str, Any]:
return schema
-def extract_function_inputs(query: str, function_schema: dict[str, Any]) -> dict:
+def extract_function_inputs(
+ query: str, llm: BaseLLM, function_schema: dict[str, Any]
+) -> dict:
logger.info("Extracting function input...")
prompt = f"""
@@ -71,8 +74,8 @@ def extract_function_inputs(query: str, function_schema: dict[str, Any]) -> dict
schema: {function_schema}
Result:
"""
-
- output = llm(prompt)
+ llm_input = [Message(role="user", content=prompt)]
+ output = llm(llm_input)
if not output:
raise Exception("No output generated for extract function input")
@@ -113,15 +116,18 @@ def call_function(function: Callable, inputs: dict[str, str]):
# TODO: Add route layer object to the input, solve circular import issue
-async def route_and_execute(query: str, functions: list[Callable], route_layer):
+async def route_and_execute(
+ query: str, llm: BaseLLM, functions: list[Callable], route_layer
+):
function_name = route_layer(query)
if not function_name:
logger.warning("No function found, calling LLM...")
- return llm(query)
+ llm_input = [Message(role="user", content=query)]
+ return llm(llm_input)
for function in functions:
if function.__name__ == function_name:
print(f"Calling function: {function.__name__}")
schema = get_schema(function)
- inputs = extract_function_inputs(query, schema)
+ inputs = extract_function_inputs(query, llm, schema)
call_function(function, inputs)
From 7a8eab0138cbcd44f4eec9b03183dba82816d554 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sat, 6 Jan 2024 16:26:02 +0100
Subject: [PATCH 04/24] created fastembed and hybrid dependency groups
---
docs/00-introduction.ipynb | 8 +--
docs/01-save-load-from-file.ipynb | 16 +++---
docs/02-dynamic-routes.ipynb | 8 +--
docs/03-basic-langchain-agent.ipynb | 10 ++--
poetry.lock | 82 +++++++++++++--------------
pyproject.toml | 8 ++-
semantic_router/encoders/bm25.py | 9 ++-
semantic_router/encoders/fastembed.py | 5 +-
semantic_router/layer.py | 9 +++
semantic_router/schema.py | 4 ++
10 files changed, 88 insertions(+), 71 deletions(-)
diff --git a/docs/00-introduction.ipynb b/docs/00-introduction.ipynb
index 95222c2a..ae6b768d 100644
--- a/docs/00-introduction.ipynb
+++ b/docs/00-introduction.ipynb
@@ -157,7 +157,7 @@
"source": [
"from semantic_router.layer import RouteLayer\n",
"\n",
- "dl = RouteLayer(encoder=encoder, routes=routes)"
+ "rl = RouteLayer(encoder=encoder, routes=routes)"
]
},
{
@@ -184,7 +184,7 @@
}
],
"source": [
- "dl(\"don't you love politics?\")"
+ "rl(\"don't you love politics?\")"
]
},
{
@@ -204,7 +204,7 @@
}
],
"source": [
- "dl(\"how's the weather today?\")"
+ "rl(\"how's the weather today?\")"
]
},
{
@@ -231,7 +231,7 @@
}
],
"source": [
- "dl(\"I'm interested in learning about llama 2\")"
+ "rl(\"I'm interested in learning about llama 2\")"
]
},
{
diff --git a/docs/01-save-load-from-file.ipynb b/docs/01-save-load-from-file.ipynb
index 6f084a9a..715679ce 100644
--- a/docs/01-save-load-from-file.ipynb
+++ b/docs/01-save-load-from-file.ipynb
@@ -132,7 +132,7 @@
" \"Enter Cohere API Key: \"\n",
")\n",
"\n",
- "layer = RouteLayer(routes=routes)"
+ "rl = RouteLayer(routes=routes)"
]
},
{
@@ -156,7 +156,7 @@
}
],
"source": [
- "layer.to_json(\"layer.json\")"
+ "rl.to_json(\"layer.json\")"
]
},
{
@@ -190,9 +190,9 @@
"import json\n",
"\n",
"with open(\"layer.json\", \"r\") as f:\n",
- " router_json = json.load(f)\n",
+ " layer_json = json.load(f)\n",
"\n",
- "print(router_json)"
+ "print(layer_json)"
]
},
{
@@ -217,7 +217,7 @@
}
],
"source": [
- "layer = RouteLayer.from_json(\"layer.json\")"
+ "rl = RouteLayer.from_json(\"layer.json\")"
]
},
{
@@ -244,9 +244,9 @@
],
"source": [
"print(\n",
- " f\"\"\"{layer.encoder.type=}\n",
- "{layer.encoder.name=}\n",
- "{layer.routes=}\"\"\"\n",
+ " f\"\"\"{rl.encoder.type=}\n",
+ "{rl.encoder.name=}\n",
+ "{rl.routes=}\"\"\"\n",
")"
]
},
diff --git a/docs/02-dynamic-routes.ipynb b/docs/02-dynamic-routes.ipynb
index 2b17da17..d8078cb2 100644
--- a/docs/02-dynamic-routes.ipynb
+++ b/docs/02-dynamic-routes.ipynb
@@ -125,7 +125,7 @@
" \"Enter Cohere API Key: \"\n",
")\n",
"\n",
- "layer = RouteLayer(routes=routes)"
+ "rl = RouteLayer(routes=routes)"
]
},
{
@@ -152,7 +152,7 @@
}
],
"source": [
- "layer(\"how's the weather today?\")"
+ "rl(\"how's the weather today?\")"
]
},
{
@@ -291,7 +291,7 @@
}
],
"source": [
- "layer.add(time_route)"
+ "rl.add(time_route)"
]
},
{
@@ -330,7 +330,7 @@
" \"Enter OpenRouter API Key: \"\n",
")\n",
"\n",
- "layer(\"what is the time in new york city?\")"
+ "rl(\"what is the time in new york city?\")"
]
},
{
diff --git a/docs/03-basic-langchain-agent.ipynb b/docs/03-basic-langchain-agent.ipynb
index 09294c78..3bfd3ba5 100644
--- a/docs/03-basic-langchain-agent.ipynb
+++ b/docs/03-basic-langchain-agent.ipynb
@@ -223,7 +223,7 @@
"from semantic_router import RouteLayer\n",
"from semantic_router.encoders import OpenAIEncoder\n",
"\n",
- "layer = RouteLayer(encoder=OpenAIEncoder(), routes=routes)"
+ "rl = RouteLayer(encoder=OpenAIEncoder(), routes=routes)"
]
},
{
@@ -258,7 +258,7 @@
}
],
"source": [
- "layer(\"should I buy ON whey or MP?\")"
+ "rl(\"should I buy ON whey or MP?\")"
]
},
{
@@ -284,7 +284,7 @@
}
],
"source": [
- "layer(\"how's the weather today?\")"
+ "rl(\"how's the weather today?\")"
]
},
{
@@ -310,7 +310,7 @@
}
],
"source": [
- "layer(\"how do I get big arms?\")"
+ "rl(\"how do I get big arms?\")"
]
},
{
@@ -382,7 +382,7 @@
"outputs": [],
"source": [
"def semantic_layer(query: str):\n",
- " route = layer(query)\n",
+ " route = rl(query)\n",
" if route.name == \"get_time\":\n",
" query += f\" (SYSTEM NOTE: {get_time()})\"\n",
" elif route.name == \"supplement_brand\":\n",
diff --git a/poetry.lock b/poetry.lock
index effd3033..815226ea 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "aiohttp"
@@ -472,7 +472,7 @@ files = [
name = "coloredlogs"
version = "15.0.1"
description = "Colored terminal output for Python's logging module"
-optional = false
+optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
@@ -727,7 +727,7 @@ zstandard = ["zstandard"]
name = "fastembed"
version = "0.1.3"
description = "Fast, light, accurate library built for retrieval embedding generation"
-optional = false
+optional = true
python-versions = ">=3.8.0,<3.12"
files = [
{file = "fastembed-0.1.3-py3-none-any.whl", hash = "sha256:98b6c6d9effec8c96d97048e59cdd53627b16a70fcdbfa7c663772de66e11b3a"},
@@ -746,7 +746,7 @@ tqdm = ">=4.65,<5.0"
name = "filelock"
version = "3.13.1"
description = "A platform independent file lock."
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
@@ -762,7 +762,7 @@ typing = ["typing-extensions (>=4.8)"]
name = "flatbuffers"
version = "23.5.26"
description = "The FlatBuffers serialization format for Python"
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "flatbuffers-23.5.26-py2.py3-none-any.whl", hash = "sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1"},
@@ -859,7 +859,7 @@ files = [
name = "fsspec"
version = "2023.12.2"
description = "File-system specification"
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"},
@@ -950,7 +950,7 @@ socks = ["socksio (==1.*)"]
name = "huggingface-hub"
version = "0.19.4"
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
-optional = false
+optional = true
python-versions = ">=3.8.0"
files = [
{file = "huggingface_hub-0.19.4-py3-none-any.whl", hash = "sha256:dba013f779da16f14b606492828f3760600a1e1801432d09fe1c33e50b825bb5"},
@@ -983,7 +983,7 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t
name = "humanfriendly"
version = "10.0"
description = "Human friendly output for text interfaces using Python"
-optional = false
+optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
@@ -1127,7 +1127,7 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
name = "joblib"
version = "1.3.2"
description = "Lightweight pipelining with Python functions"
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"},
@@ -1195,7 +1195,7 @@ traitlets = "*"
name = "mmh3"
version = "3.1.0"
description = "Python wrapper for MurmurHash (MurmurHash3), a set of fast and robust hash functions."
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "mmh3-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:16ee043b1bac040b4324b8baee39df9fdca480a560a6d74f2eef66a5009a234e"},
@@ -1239,7 +1239,7 @@ files = [
name = "mpmath"
version = "1.3.0"
description = "Python library for arbitrary-precision floating-point arithmetic"
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
@@ -1408,7 +1408,7 @@ files = [
name = "nltk"
version = "3.8.1"
description = "Natural Language Toolkit"
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"},
@@ -1467,7 +1467,7 @@ files = [
name = "onnx"
version = "1.15.0"
description = "Open Neural Network Exchange"
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "onnx-1.15.0-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:51cacb6aafba308aaf462252ced562111f6991cdc7bc57a6c554c3519453a8ff"},
@@ -1508,7 +1508,7 @@ reference = ["Pillow", "google-re2"]
name = "onnxruntime"
version = "1.16.3"
description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "onnxruntime-1.16.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:3bc41f323ac77acfed190be8ffdc47a6a75e4beeb3473fbf55eeb075ccca8df2"},
@@ -1623,7 +1623,7 @@ ptyprocess = ">=0.5"
name = "pinecone-text"
version = "0.7.1"
description = "Text utilities library by Pinecone.io"
-optional = false
+optional = true
python-versions = ">=3.8,<4.0"
files = [
{file = "pinecone_text-0.7.1-py3-none-any.whl", hash = "sha256:b806b5d66190d09888ed2d3bcdef49534aa9200b9da521371a062e6ccc79bb2c"},
@@ -1690,7 +1690,7 @@ wcwidth = "*"
name = "protobuf"
version = "4.25.1"
description = ""
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"},
@@ -1841,7 +1841,7 @@ windows-terminal = ["colorama (>=0.4.6)"]
name = "pyreadline3"
version = "3.4.1"
description = "A python implementation of GNU readline."
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
@@ -1870,24 +1870,6 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
-[[package]]
-name = "pytest-asyncio"
-version = "0.23.3"
-description = "Pytest support for asyncio"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pytest-asyncio-0.23.3.tar.gz", hash = "sha256:af313ce900a62fbe2b1aed18e37ad757f1ef9940c6b6a88e2954de38d6b1fb9f"},
- {file = "pytest_asyncio-0.23.3-py3-none-any.whl", hash = "sha256:37a9d912e8338ee7b4a3e917381d1c95bfc8682048cb0fbc35baba316ec1faba"},
-]
-
-[package.dependencies]
-pytest = ">=7.0.0"
-
-[package.extras]
-docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
-testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
-
[[package]]
name = "pytest-cov"
version = "4.1.0"
@@ -1992,6 +1974,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1999,8 +1982,15 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -2017,6 +2007,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -2024,6 +2015,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -2138,7 +2130,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""}
name = "regex"
version = "2023.12.25"
description = "Alternative regular expression module, to replace re."
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
@@ -2328,7 +2320,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
name = "sympy"
version = "1.12"
description = "Computer algebra system (CAS) in Python"
-optional = false
+optional = true
python-versions = ">=3.8"
files = [
{file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"},
@@ -2353,7 +2345,7 @@ files = [
name = "tokenizers"
version = "0.15.0"
description = ""
-optional = false
+optional = true
python-versions = ">=3.7"
files = [
{file = "tokenizers-0.15.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cd3cd0299aaa312cd2988957598f80becd04d5a07338741eca076057a2b37d6e"},
@@ -2570,20 +2562,20 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "wcwidth"
-version = "0.2.12"
+version = "0.2.13"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
files = [
- {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"},
- {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"},
+ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
+ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
]
[[package]]
name = "wget"
version = "3.2"
description = "pure python download utility"
-optional = false
+optional = true
python-versions = "*"
files = [
{file = "wget-3.2.zip", hash = "sha256:35e630eca2aa50ce998b9b1a127bb26b30dfee573702782aa982f875e3f16061"},
@@ -2707,7 +2699,11 @@ files = [
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+[extras]
+fastembed = ["fastembed"]
+hybrid = ["pinecone-text"]
+
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
-content-hash = "887cf3e564b33d43b6bdcf5d089d9eb12931312e8eeadd3f8488cb5fbe384fab"
+content-hash = "64f0fef330108fe47110c203bf96403e8d986f8b751f6eed1abfec3ce57539a6"
diff --git a/pyproject.toml b/pyproject.toml
index 9ce7f26d..d3561c64 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -18,12 +18,14 @@ pydantic = "^1.8.2"
openai = "^1.3.9"
cohere = "^4.32"
numpy = "^1.25.2"
-pinecone-text = "^0.7.0"
+pinecone-text = {version = "^0.7.0", optional = true}
colorlog = "^6.8.0"
pyyaml = "^6.0.1"
-pytest-asyncio = "^0.23.2"
-fastembed = "^0.1.3"
+fastembed = {version = "^0.1.3", optional = true}
+[tool.poetry.extras]
+hybrid = ["pinecone-text"]
+fastembed = ["fastembed"]
[tool.poetry.group.dev.dependencies]
ipykernel = "^6.26.0"
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index f43e1780..e597b4fe 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -1,7 +1,5 @@
from typing import Any
-from pinecone_text.sparse import BM25Encoder as encoder
-
from semantic_router.encoders import BaseEncoder
@@ -12,6 +10,13 @@ class BM25Encoder(BaseEncoder):
def __init__(self, name: str = "bm25"):
super().__init__(name=name)
+ try:
+ from pinecone_text.sparse import BM25Encoder as encoder
+ except ImportError:
+ raise ImportError(
+ "Please install pinecone-text to use BM25Encoder. "
+ "You can install it with: `pip install semantic-router[hybrid]`"
+ )
self.model = encoder.default()
params = self.model.get_params()
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index 4bb46b85..c730d4c6 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -21,8 +21,9 @@ def _initialize_client(self):
from fastembed.embedding import FlagEmbedding as Embedding
except ImportError:
raise ImportError(
- "Please install fastembed to use FastEmbedEncoder"
- "You can install it with: `pip install fastembed`"
+ "Please install fastembed to use FastEmbedEncoder. "
+ "You can install it with: "
+ "`pip install semantic-router[fastembed]`"
)
embedding_args = {
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index e2c8286b..6d85508c 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -8,6 +8,7 @@
BaseEncoder,
CohereEncoder,
OpenAIEncoder,
+ FastEmbedEncoder,
)
from semantic_router.linear import similarity_matrix, top_scores
from semantic_router.route import Route
@@ -60,10 +61,14 @@ def __init__(
self.encoder_type = encoder_type
if encoder_name is None:
# if encoder_name is not provided, use the default encoder for type
+ # TODO base these values on default values in encoders themselves..
+ # TODO without initializing them (as this is just config)
if encoder_type == EncoderType.OPENAI:
encoder_name = "text-embedding-ada-002"
elif encoder_type == EncoderType.COHERE:
encoder_name = "embed-english-v3.0"
+ elif encoder_type == EncoderType.FASTEMBED:
+ encoder_name = "BAAI/bge-small-en-v1.5"
elif encoder_type == EncoderType.HUGGINGFACE:
raise NotImplementedError
logger.info(f"Using default {encoder_type} encoder: {encoder_name}")
@@ -159,10 +164,14 @@ def __init__(
self.encoder = encoder if encoder is not None else CohereEncoder()
self.routes: list[Route] = routes if routes is not None else []
# decide on default threshold based on encoder
+ # TODO move defaults to the encoder objects and extract from there
if isinstance(encoder, OpenAIEncoder):
self.score_threshold = 0.82
elif isinstance(encoder, CohereEncoder):
self.score_threshold = 0.3
+ elif isinstance(encoder, FastEmbedEncoder):
+ # TODO default not thoroughly tested, should optimize
+ self.score_threshold = 0.5
else:
self.score_threshold = 0.82
# if routes list has been passed, we initialize index now
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index 360442f6..826cb686 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -7,12 +7,14 @@
BaseEncoder,
CohereEncoder,
OpenAIEncoder,
+ FastEmbedEncoder,
)
from semantic_router.utils.splitters import semantic_splitter
class EncoderType(Enum):
HUGGINGFACE = "huggingface"
+ FASTEMBED = "fastembed"
OPENAI = "openai"
COHERE = "cohere"
@@ -33,6 +35,8 @@ def __init__(self, type: str, name: str | None):
self.name = name
if self.type == EncoderType.HUGGINGFACE:
raise NotImplementedError
+ elif self.type == EncoderType.FASTEMBED:
+ self.model = FastEmbedEncoder(name)
elif self.type == EncoderType.OPENAI:
self.model = OpenAIEncoder(name)
elif self.type == EncoderType.COHERE:
From c9715a343e1ea1d30233121e209e9615b68bdb14 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sat, 6 Jan 2024 16:37:04 +0100
Subject: [PATCH 05/24] add all-extras flag to poetry install in actions
---
.github/workflows/test.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 5ab1b013..cf22ea9b 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -35,7 +35,7 @@ jobs:
cache: poetry
- name: Install dependencies
run: |
- poetry install
+ poetry install --all-extras
- name: Install nltk
run: |
pip install nltk
From 5006b49ba8952ca44e1e9698f3926c507f48aae8 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sat, 6 Jan 2024 16:53:58 +0100
Subject: [PATCH 06/24] lint fixes
---
semantic_router/encoders/fastembed.py | 10 ++++++----
semantic_router/schema.py | 6 +++---
2 files changed, 9 insertions(+), 7 deletions(-)
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index c730d4c6..fb845ce7 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -1,12 +1,14 @@
from typing import Any, List, Optional
import numpy as np
-from pydantic import BaseModel, PrivateAttr
+from pydantic import PrivateAttr
+from semantic_router.encoders import BaseEncoder
-class FastEmbedEncoder(BaseModel):
+
+class FastEmbedEncoder(BaseEncoder):
type: str = "fastembed"
- model_name: str = "BAAI/bge-small-en-v1.5"
+ name: str = "BAAI/bge-small-en-v1.5"
max_length: int = 512
cache_dir: Optional[str] = None
threads: Optional[int] = None
@@ -27,7 +29,7 @@ def _initialize_client(self):
)
embedding_args = {
- "model_name": self.model_name,
+ "model_name": self.name,
"max_length": self.max_length,
"cache_dir": self.cache_dir,
"threads": self.threads,
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index 826cb686..64480355 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -36,11 +36,11 @@ def __init__(self, type: str, name: str | None):
if self.type == EncoderType.HUGGINGFACE:
raise NotImplementedError
elif self.type == EncoderType.FASTEMBED:
- self.model = FastEmbedEncoder(name)
+ self.model = FastEmbedEncoder(name=name)
elif self.type == EncoderType.OPENAI:
- self.model = OpenAIEncoder(name)
+ self.model = OpenAIEncoder(name=name)
elif self.type == EncoderType.COHERE:
- self.model = CohereEncoder(name)
+ self.model = CohereEncoder(name=name)
else:
raise ValueError
From 5141474a1670f09e020fa8defa7d04502fb5fe58 Mon Sep 17 00:00:00 2001
From: Ismail Ashraq
Date: Sun, 7 Jan 2024 01:21:39 +0500
Subject: [PATCH 07/24] fix test issues
---
tests/unit/test_route.py | 41 ++++++++++++++++++----------------------
1 file changed, 18 insertions(+), 23 deletions(-)
diff --git a/tests/unit/test_route.py b/tests/unit/test_route.py
index 09a5d235..2eb784d4 100644
--- a/tests/unit/test_route.py
+++ b/tests/unit/test_route.py
@@ -1,6 +1,7 @@
-from unittest.mock import Mock, patch # , AsyncMock
+from unittest.mock import patch # , AsyncMock
# import pytest
+from semantic_router.llms import BaseLLM
from semantic_router.route import Route, is_valid
@@ -41,11 +42,9 @@ def test_is_valid_with_invalid_json():
mock_logger.error.assert_called_once()
-class TestRoute:
- @patch("semantic_router.route.llm", new_callable=Mock)
- def test_generate_dynamic_route(self, mock_llm):
- print(f"mock_llm: {mock_llm}")
- mock_llm.return_value = """
+class MockLLM(BaseLLM):
+ def __call__(self, prompt):
+ llm_output = """
{
"name": "test_function",
@@ -58,8 +57,16 @@ def test_generate_dynamic_route(self, mock_llm):
}
"""
+ return llm_output
+
+
+class TestRoute:
+ def test_generate_dynamic_route(self):
+ mock_llm = MockLLM(name="test")
function_schema = {"name": "test_function", "type": "function"}
- route = Route._generate_dynamic_route(function_schema)
+ route = Route._generate_dynamic_route(
+ llm=mock_llm, function_schema=function_schema
+ )
assert route.name == "test_function"
assert route.utterances == [
"example_utterance_1",
@@ -105,6 +112,7 @@ def test_to_dict(self):
"utterances": ["utterance"],
"description": None,
"function_schema": None,
+ "llm": None,
}
assert route.to_dict() == expected_dict
@@ -114,28 +122,15 @@ def test_from_dict(self):
assert route.name == "test"
assert route.utterances == ["utterance"]
- @patch("semantic_router.route.llm", new_callable=Mock)
- def test_from_dynamic_route(self, mock_llm):
+ def test_from_dynamic_route(self):
# Mock the llm function
- mock_llm.return_value = """
-
- {
- "name": "test_function",
- "utterances": [
- "example_utterance_1",
- "example_utterance_2",
- "example_utterance_3",
- "example_utterance_4",
- "example_utterance_5"]
- }
-
- """
+ mock_llm = MockLLM(name="test")
def test_function(input: str):
"""Test function docstring"""
pass
- dynamic_route = Route.from_dynamic_route(test_function)
+ dynamic_route = Route.from_dynamic_route(llm=mock_llm, entity=test_function)
assert dynamic_route.name == "test_function"
assert dynamic_route.utterances == [
From 548bd40393b8e4483d9092359051c699558c709f Mon Sep 17 00:00:00 2001
From: Ismail Ashraq
Date: Sun, 7 Jan 2024 03:51:54 +0500
Subject: [PATCH 08/24] add tests for llms
---
semantic_router/llms/base.py | 3 +-
tests/unit/llms/test_llm_base.py | 16 +++++++
tests/unit/llms/test_llm_cohere.py | 52 +++++++++++++++++++++++
tests/unit/llms/test_llm_openai.py | 55 ++++++++++++++++++++++++
tests/unit/llms/test_llm_openrouter.py | 59 ++++++++++++++++++++++++++
tests/unit/test_route.py | 17 +++++++-
tests/unit/test_schema.py | 27 +++++++++++-
7 files changed, 226 insertions(+), 3 deletions(-)
create mode 100644 tests/unit/llms/test_llm_base.py
create mode 100644 tests/unit/llms/test_llm_cohere.py
create mode 100644 tests/unit/llms/test_llm_openai.py
create mode 100644 tests/unit/llms/test_llm_openrouter.py
diff --git a/semantic_router/llms/base.py b/semantic_router/llms/base.py
index 2a1a038e..dd8a0afa 100644
--- a/semantic_router/llms/base.py
+++ b/semantic_router/llms/base.py
@@ -1,4 +1,5 @@
from pydantic import BaseModel
+from semantic_router.schema import Message
class BaseLLM(BaseModel):
@@ -7,5 +8,5 @@ class BaseLLM(BaseModel):
class Config:
arbitrary_types_allowed = True
- def __call__(self, prompt) -> str | None:
+ def __call__(self, messages: list[Message]) -> str | None:
raise NotImplementedError("Subclasses must implement this method")
diff --git a/tests/unit/llms/test_llm_base.py b/tests/unit/llms/test_llm_base.py
new file mode 100644
index 00000000..df78d8f5
--- /dev/null
+++ b/tests/unit/llms/test_llm_base.py
@@ -0,0 +1,16 @@
+import pytest
+
+from semantic_router.llms import BaseLLM
+
+
+class TestBaseLLM:
+ @pytest.fixture
+ def base_llm(self):
+ return BaseLLM(name="TestLLM")
+
+ def test_base_llm_initialization(self, base_llm):
+ assert base_llm.name == "TestLLM", "Initialization of name failed"
+
+ def test_base_llm_call_method_not_implemented(self, base_llm):
+ with pytest.raises(NotImplementedError):
+ base_llm("test")
diff --git a/tests/unit/llms/test_llm_cohere.py b/tests/unit/llms/test_llm_cohere.py
new file mode 100644
index 00000000..32443f04
--- /dev/null
+++ b/tests/unit/llms/test_llm_cohere.py
@@ -0,0 +1,52 @@
+import pytest
+
+from semantic_router.llms import Cohere
+from semantic_router.schema import Message
+
+
+@pytest.fixture
+def cohere_llm(mocker):
+ mocker.patch("cohere.Client")
+ return Cohere(cohere_api_key="test_api_key")
+
+
+class TestCohereLLM:
+ def test_initialization_with_api_key(self, cohere_llm):
+ assert cohere_llm.client is not None, "Client should be initialized"
+ assert cohere_llm.name == "command", "Default name not set correctly"
+
+ def test_initialization_without_api_key(self, mocker, monkeypatch):
+ monkeypatch.delenv("COHERE_API_KEY", raising=False)
+ mocker.patch("cohere.Client")
+ with pytest.raises(ValueError):
+ Cohere()
+
+ def test_call_method(self, cohere_llm, mocker):
+ mock_llm = mocker.MagicMock()
+ mock_llm.text = "test"
+ cohere_llm.client.chat.return_value = mock_llm
+
+ llm_input = [Message(role="user", content="test")]
+ result = cohere_llm(llm_input)
+ assert isinstance(result, str), "Result should be a str"
+ cohere_llm.client.chat.assert_called_once()
+
+ def test_raises_value_error_if_cohere_client_fails_to_initialize(self, mocker):
+ mocker.patch(
+ "cohere.Client", side_effect=Exception("Failed to initialize client")
+ )
+ with pytest.raises(ValueError):
+ Cohere(cohere_api_key="test_api_key")
+
+ def test_raises_value_error_if_cohere_client_is_not_initialized(self, mocker):
+ mocker.patch("cohere.Client", return_value=None)
+ llm = Cohere(cohere_api_key="test_api_key")
+ with pytest.raises(ValueError):
+ llm("test")
+
+ def test_call_method_raises_error_on_api_failure(self, cohere_llm, mocker):
+ mocker.patch.object(
+ cohere_llm.client, "__call__", side_effect=Exception("API call failed")
+ )
+ with pytest.raises(ValueError):
+ cohere_llm("test")
diff --git a/tests/unit/llms/test_llm_openai.py b/tests/unit/llms/test_llm_openai.py
new file mode 100644
index 00000000..4b2b2f54
--- /dev/null
+++ b/tests/unit/llms/test_llm_openai.py
@@ -0,0 +1,55 @@
+import pytest
+from semantic_router.llms import OpenAI
+from semantic_router.schema import Message
+
+
+@pytest.fixture
+def openai_llm(mocker):
+ mocker.patch("openai.Client")
+ return OpenAI(openai_api_key="test_api_key")
+
+
+class TestOpenAILLM:
+ def test_openai_llm_init_with_api_key(self, openai_llm):
+ assert openai_llm.client is not None, "Client should be initialized"
+ assert openai_llm.name == "gpt-3.5-turbo", "Default name not set correctly"
+
+ def test_openai_llm_init_success(self, mocker):
+ mocker.patch("os.getenv", return_value="fake-api-key")
+ llm = OpenAI()
+ assert llm.client is not None
+
+ def test_openai_llm_init_without_api_key(self, mocker):
+ mocker.patch("os.getenv", return_value=None)
+ with pytest.raises(ValueError) as _:
+ OpenAI()
+
+ def test_openai_llm_call_uninitialized_client(self, openai_llm):
+ # Set the client to None to simulate an uninitialized client
+ openai_llm.client = None
+ with pytest.raises(ValueError) as e:
+ llm_input = [Message(role="user", content="test")]
+ openai_llm(llm_input)
+ assert "OpenAI client is not initialized." in str(e.value)
+
+ def test_openai_llm_init_exception(self, mocker):
+ mocker.patch("os.getenv", return_value="fake-api-key")
+ mocker.patch("openai.OpenAI", side_effect=Exception("Initialization error"))
+ with pytest.raises(ValueError) as e:
+ OpenAI()
+ assert (
+ "OpenAI API client failed to initialize. Error: Initialization error"
+ in str(e.value)
+ )
+
+ def test_openai_llm_call_success(self, openai_llm, mocker):
+ mock_completion = mocker.MagicMock()
+ mock_completion.choices[0].message.content = "test"
+
+ mocker.patch("os.getenv", return_value="fake-api-key")
+ mocker.patch.object(
+ openai_llm.client.chat.completions, "create", return_value=mock_completion
+ )
+ llm_input = [Message(role="user", content="test")]
+ output = openai_llm(llm_input)
+ assert output == "test"
diff --git a/tests/unit/llms/test_llm_openrouter.py b/tests/unit/llms/test_llm_openrouter.py
new file mode 100644
index 00000000..3009e293
--- /dev/null
+++ b/tests/unit/llms/test_llm_openrouter.py
@@ -0,0 +1,59 @@
+import pytest
+from semantic_router.llms import OpenRouter
+from semantic_router.schema import Message
+
+
+@pytest.fixture
+def openrouter_llm(mocker):
+ mocker.patch("openai.Client")
+ return OpenRouter(openrouter_api_key="test_api_key")
+
+
+class TestOpenRouterLLM:
+ def test_openrouter_llm_init_with_api_key(self, openrouter_llm):
+ assert openrouter_llm.client is not None, "Client should be initialized"
+ assert (
+ openrouter_llm.name == "mistralai/mistral-7b-instruct"
+ ), "Default name not set correctly"
+
+ def test_openrouter_llm_init_success(self, mocker):
+ mocker.patch("os.getenv", return_value="fake-api-key")
+ llm = OpenRouter()
+ assert llm.client is not None
+
+ def test_openrouter_llm_init_without_api_key(self, mocker):
+ mocker.patch("os.getenv", return_value=None)
+ with pytest.raises(ValueError) as _:
+ OpenRouter()
+
+ def test_openrouter_llm_call_uninitialized_client(self, openrouter_llm):
+ # Set the client to None to simulate an uninitialized client
+ openrouter_llm.client = None
+ with pytest.raises(ValueError) as e:
+ llm_input = [Message(role="user", content="test")]
+ openrouter_llm(llm_input)
+ assert "OpenRouter client is not initialized." in str(e.value)
+
+ def test_openrouter_llm_init_exception(self, mocker):
+ mocker.patch("os.getenv", return_value="fake-api-key")
+ mocker.patch("openai.OpenAI", side_effect=Exception("Initialization error"))
+ with pytest.raises(ValueError) as e:
+ OpenRouter()
+ assert (
+ "OpenRouter API client failed to initialize. Error: Initialization error"
+ in str(e.value)
+ )
+
+ def test_openrouter_llm_call_success(self, openrouter_llm, mocker):
+ mock_completion = mocker.MagicMock()
+ mock_completion.choices[0].message.content = "test"
+
+ mocker.patch("os.getenv", return_value="fake-api-key")
+ mocker.patch.object(
+ openrouter_llm.client.chat.completions,
+ "create",
+ return_value=mock_completion,
+ )
+ llm_input = [Message(role="user", content="test")]
+ output = openrouter_llm(llm_input)
+ assert output == "test"
diff --git a/tests/unit/test_route.py b/tests/unit/test_route.py
index 2eb784d4..e7842d39 100644
--- a/tests/unit/test_route.py
+++ b/tests/unit/test_route.py
@@ -1,6 +1,6 @@
from unittest.mock import patch # , AsyncMock
-# import pytest
+import pytest
from semantic_router.llms import BaseLLM
from semantic_router.route import Route, is_valid
@@ -61,6 +61,21 @@ def __call__(self, prompt):
class TestRoute:
+ def test_value_error_in_route_call(self):
+ function_schema = {"name": "test_function", "type": "function"}
+
+ route = Route(
+ name="test_function",
+ utterances=["utterance1", "utterance2"],
+ function_schema=function_schema,
+ )
+
+ with pytest.raises(
+ ValueError,
+ match="LLM is required for dynamic routes. Please ensure the 'llm' is set.",
+ ):
+ route("test_query")
+
def test_generate_dynamic_route(self):
mock_llm = MockLLM(name="test")
function_schema = {"name": "test_function", "type": "function"}
diff --git a/tests/unit/test_schema.py b/tests/unit/test_schema.py
index 97b5028e..a9e794cb 100644
--- a/tests/unit/test_schema.py
+++ b/tests/unit/test_schema.py
@@ -1,9 +1,10 @@
import pytest
-
+from pydantic import ValidationError
from semantic_router.schema import (
CohereEncoder,
Encoder,
EncoderType,
+ Message,
OpenAIEncoder,
)
@@ -38,3 +39,27 @@ def test_encoder_call_method(self, mocker):
encoder = Encoder(type="openai", name="test-engine")
result = encoder(["test"])
assert result == [0.1, 0.2, 0.3]
+
+
+class TestMessageDataclass:
+ def test_message_creation(self):
+ message = Message(role="user", content="Hello!")
+ assert message.role == "user"
+ assert message.content == "Hello!"
+
+ with pytest.raises(ValidationError):
+ Message(user_role="invalid_role", message="Hello!")
+
+ def test_message_to_openai(self):
+ message = Message(role="user", content="Hello!")
+ openai_format = message.to_openai()
+ assert openai_format == {"role": "user", "content": "Hello!"}
+
+ message = Message(role="invalid_role", content="Hello!")
+ with pytest.raises(ValueError):
+ message.to_openai()
+
+ def test_message_to_cohere(self):
+ message = Message(role="user", content="Hello!")
+ cohere_format = message.to_cohere()
+ assert cohere_format == {"role": "user", "message": "Hello!"}
From 7862efdbb09da27aa05f76e231a9a23a70abf583 Mon Sep 17 00:00:00 2001
From: Ismail Ashraq
Date: Sun, 7 Jan 2024 18:35:39 +0500
Subject: [PATCH 09/24] Move score_threshold to encoders
---
semantic_router/encoders/base.py | 1 +
semantic_router/encoders/bm25.py | 6 ++++--
semantic_router/encoders/cohere.py | 3 ++-
semantic_router/encoders/fastembed.py | 6 ++++--
semantic_router/encoders/openai.py | 3 ++-
semantic_router/layer.py | 20 +++++---------------
tests/unit/encoders/test_base.py | 3 ++-
tests/unit/test_layer.py | 7 +++++--
8 files changed, 25 insertions(+), 24 deletions(-)
diff --git a/semantic_router/encoders/base.py b/semantic_router/encoders/base.py
index bd952403..f5968578 100644
--- a/semantic_router/encoders/base.py
+++ b/semantic_router/encoders/base.py
@@ -3,6 +3,7 @@
class BaseEncoder(BaseModel):
name: str
+ score_threshold: float
type: str = Field(default="base")
class Config:
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index e597b4fe..68150cb7 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -1,6 +1,7 @@
from typing import Any
from semantic_router.encoders import BaseEncoder
+from semantic_router.utils.logger import logger
class BM25Encoder(BaseEncoder):
@@ -8,8 +9,8 @@ class BM25Encoder(BaseEncoder):
idx_mapping: dict[int, int] | None = None
type: str = "sparse"
- def __init__(self, name: str = "bm25"):
- super().__init__(name=name)
+ def __init__(self, name: str = "bm25", score_threshold: float = 0.82):
+ super().__init__(name=name, score_threshold=score_threshold)
try:
from pinecone_text.sparse import BM25Encoder as encoder
except ImportError:
@@ -17,6 +18,7 @@ def __init__(self, name: str = "bm25"):
"Please install pinecone-text to use BM25Encoder. "
"You can install it with: `pip install semantic-router[hybrid]`"
)
+ logger.info("Downloading and initializing BM25 model parameters.")
self.model = encoder.default()
params = self.model.get_params()
diff --git a/semantic_router/encoders/cohere.py b/semantic_router/encoders/cohere.py
index f7aef0e6..2f80aaaf 100644
--- a/semantic_router/encoders/cohere.py
+++ b/semantic_router/encoders/cohere.py
@@ -13,10 +13,11 @@ def __init__(
self,
name: str | None = None,
cohere_api_key: str | None = None,
+ score_threshold: float = 0.3,
):
if name is None:
name = os.getenv("COHERE_MODEL_NAME", "embed-english-v3.0")
- super().__init__(name=name)
+ super().__init__(name=name, score_threshold=score_threshold)
cohere_api_key = cohere_api_key or os.getenv("COHERE_API_KEY")
if cohere_api_key is None:
raise ValueError("Cohere API key cannot be 'None'.")
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index fb845ce7..413e3a6a 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -14,8 +14,10 @@ class FastEmbedEncoder(BaseEncoder):
threads: Optional[int] = None
_client: Any = PrivateAttr()
- def __init__(self, **data):
- super().__init__(**data)
+ def __init__(
+ self, score_threshold: float = 0.5, **data
+ ): # TODO default score_threshold not thoroughly tested, should optimize
+ super().__init__(score_threshold=score_threshold, **data)
self._client = self._initialize_client()
def _initialize_client(self):
diff --git a/semantic_router/encoders/openai.py b/semantic_router/encoders/openai.py
index f9348a12..4ec87638 100644
--- a/semantic_router/encoders/openai.py
+++ b/semantic_router/encoders/openai.py
@@ -17,10 +17,11 @@ def __init__(
self,
name: str | None = None,
openai_api_key: str | None = None,
+ score_threshold: float = 0.82,
):
if name is None:
name = os.getenv("OPENAI_MODEL_NAME", "text-embedding-ada-002")
- super().__init__(name=name)
+ super().__init__(name=name, score_threshold=score_threshold)
api_key = openai_api_key or os.getenv("OPENAI_API_KEY")
if api_key is None:
raise ValueError("OpenAI API key cannot be 'None'.")
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 6d85508c..a71411db 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -7,8 +7,6 @@
from semantic_router.encoders import (
BaseEncoder,
CohereEncoder,
- OpenAIEncoder,
- FastEmbedEncoder,
)
from semantic_router.linear import similarity_matrix, top_scores
from semantic_router.route import Route
@@ -153,27 +151,19 @@ def remove(self, name: str):
class RouteLayer:
index: np.ndarray | None = None
categories: np.ndarray | None = None
- score_threshold: float = 0.82
+ score_threshold: float
def __init__(
- self, encoder: BaseEncoder | None = None, routes: list[Route] | None = None
+ self,
+ encoder: BaseEncoder | None = None,
+ routes: list[Route] | None = None,
):
logger.info("Initializing RouteLayer")
self.index = None
self.categories = None
self.encoder = encoder if encoder is not None else CohereEncoder()
self.routes: list[Route] = routes if routes is not None else []
- # decide on default threshold based on encoder
- # TODO move defaults to the encoder objects and extract from there
- if isinstance(encoder, OpenAIEncoder):
- self.score_threshold = 0.82
- elif isinstance(encoder, CohereEncoder):
- self.score_threshold = 0.3
- elif isinstance(encoder, FastEmbedEncoder):
- # TODO default not thoroughly tested, should optimize
- self.score_threshold = 0.5
- else:
- self.score_threshold = 0.82
+ self.score_threshold = self.encoder.score_threshold
# if routes list has been passed, we initialize index now
if len(self.routes) > 0:
# initialize index now
diff --git a/tests/unit/encoders/test_base.py b/tests/unit/encoders/test_base.py
index d2c39645..4d4b87ae 100644
--- a/tests/unit/encoders/test_base.py
+++ b/tests/unit/encoders/test_base.py
@@ -6,10 +6,11 @@
class TestBaseEncoder:
@pytest.fixture
def base_encoder(self):
- return BaseEncoder(name="TestEncoder")
+ return BaseEncoder(name="TestEncoder", score_threshold=0.5)
def test_base_encoder_initialization(self, base_encoder):
assert base_encoder.name == "TestEncoder", "Initialization of name failed"
+ assert base_encoder.score_threshold == 0.5
def test_base_encoder_call_method_not_implemented(self, base_encoder):
with pytest.raises(NotImplementedError):
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 32754997..6652579f 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -69,7 +69,7 @@ def layer_yaml():
@pytest.fixture
def base_encoder():
- return BaseEncoder(name="test-encoder")
+ return BaseEncoder(name="test-encoder", score_threshold=0.5)
@pytest.fixture
@@ -95,6 +95,7 @@ def routes():
class TestRouteLayer:
def test_initialization(self, openai_encoder, routes):
route_layer = RouteLayer(encoder=openai_encoder, routes=routes)
+ assert openai_encoder.score_threshold == 0.82
assert route_layer.score_threshold == 0.82
assert len(route_layer.index) if route_layer.index is not None else 0 == 5
assert (
@@ -105,9 +106,11 @@ def test_initialization(self, openai_encoder, routes):
def test_initialization_different_encoders(self, cohere_encoder, openai_encoder):
route_layer_cohere = RouteLayer(encoder=cohere_encoder)
+ assert cohere_encoder.score_threshold == 0.3
assert route_layer_cohere.score_threshold == 0.3
route_layer_openai = RouteLayer(encoder=openai_encoder)
+ assert openai_encoder.score_threshold == 0.82
assert route_layer_openai.score_threshold == 0.82
def test_add_route(self, openai_encoder):
@@ -173,7 +176,7 @@ def test_pass_threshold(self, openai_encoder):
def test_failover_score_threshold(self, base_encoder):
route_layer = RouteLayer(encoder=base_encoder)
- assert route_layer.score_threshold == 0.82
+ assert route_layer.score_threshold == 0.5
def test_json(self, openai_encoder, routes):
with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as temp:
From d551201404a2ed2c67e989bb55487d2f5a2f976e Mon Sep 17 00:00:00 2001
From: Ismail Ashraq
Date: Sun, 7 Jan 2024 19:15:03 +0500
Subject: [PATCH 10/24] Update hybrid layer to use score_threshold from
encoders
---
semantic_router/encoders/bm25.py | 6 +++++-
semantic_router/hybrid_layer.py | 12 ++----------
tests/unit/test_hybrid_layer.py | 6 ++++--
3 files changed, 11 insertions(+), 13 deletions(-)
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index 68150cb7..69ca58ec 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -19,7 +19,11 @@ def __init__(self, name: str = "bm25", score_threshold: float = 0.82):
"You can install it with: `pip install semantic-router[hybrid]`"
)
logger.info("Downloading and initializing BM25 model parameters.")
- self.model = encoder.default()
+ # self.model = encoder.default()
+ self.model = encoder()
+ self.model.fit(
+ corpus=["test test", "this is another message", "hello how are you"]
+ )
params = self.model.get_params()
doc_freq = params["doc_freq"]
diff --git a/semantic_router/hybrid_layer.py b/semantic_router/hybrid_layer.py
index fc63cfa6..cd9f7ccb 100644
--- a/semantic_router/hybrid_layer.py
+++ b/semantic_router/hybrid_layer.py
@@ -4,8 +4,6 @@
from semantic_router.encoders import (
BaseEncoder,
BM25Encoder,
- CohereEncoder,
- OpenAIEncoder,
)
from semantic_router.route import Route
from semantic_router.utils.logger import logger
@@ -15,21 +13,15 @@ class HybridRouteLayer:
index = None
sparse_index = None
categories = None
- score_threshold = 0.82
+ score_threshold: float
def __init__(
self, encoder: BaseEncoder, routes: list[Route] = [], alpha: float = 0.3
):
self.encoder = encoder
+ self.score_threshold = self.encoder.score_threshold
self.sparse_encoder = BM25Encoder()
self.alpha = alpha
- # decide on default threshold based on encoder
- if isinstance(encoder, OpenAIEncoder):
- self.score_threshold = 0.82
- elif isinstance(encoder, CohereEncoder):
- self.score_threshold = 0.3
- else:
- self.score_threshold = 0.82
# if routes list has been passed, we initialize index now
if routes:
# initialize index now
diff --git a/tests/unit/test_hybrid_layer.py b/tests/unit/test_hybrid_layer.py
index f87cb1d2..6896c4de 100644
--- a/tests/unit/test_hybrid_layer.py
+++ b/tests/unit/test_hybrid_layer.py
@@ -19,7 +19,7 @@ def mock_encoder_call(utterances):
@pytest.fixture
def base_encoder():
- return BaseEncoder(name="test-encoder")
+ return BaseEncoder(name="test-encoder", score_threshold=0.5)
@pytest.fixture
@@ -46,6 +46,7 @@ class TestHybridRouteLayer:
def test_initialization(self, openai_encoder, routes):
route_layer = HybridRouteLayer(encoder=openai_encoder, routes=routes)
assert route_layer.index is not None and route_layer.categories is not None
+ assert openai_encoder.score_threshold == 0.82
assert route_layer.score_threshold == 0.82
assert len(route_layer.index) == 5
assert len(set(route_layer.categories)) == 2
@@ -112,7 +113,8 @@ def test_pass_threshold(self, openai_encoder):
def test_failover_score_threshold(self, base_encoder):
route_layer = HybridRouteLayer(encoder=base_encoder)
- assert route_layer.score_threshold == 0.82
+ assert base_encoder.score_threshold == 0.50
+ assert route_layer.score_threshold == 0.50
# Add more tests for edge cases and error handling as needed.
From 20d16a8282825be530e032bc7cf40de9bf3287d0 Mon Sep 17 00:00:00 2001
From: Ismail Ashraq
Date: Sun, 7 Jan 2024 19:16:19 +0500
Subject: [PATCH 11/24] remove test code
---
semantic_router/encoders/bm25.py | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index 69ca58ec..68150cb7 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -19,11 +19,7 @@ def __init__(self, name: str = "bm25", score_threshold: float = 0.82):
"You can install it with: `pip install semantic-router[hybrid]`"
)
logger.info("Downloading and initializing BM25 model parameters.")
- # self.model = encoder.default()
- self.model = encoder()
- self.model.fit(
- corpus=["test test", "this is another message", "hello how are you"]
- )
+ self.model = encoder.default()
params = self.model.get_params()
doc_freq = params["doc_freq"]
From 45b2079d5772434e9f453f41db291cff02b6e75e Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 15:24:33 +0100
Subject: [PATCH 12/24] added LLM to llm classes, update version and docs
---
README.md | 22 ++++----
docs/02-dynamic-routes.ipynb | 80 ++++++++++++++++--------------
pyproject.toml | 2 +-
semantic_router/layer.py | 16 +++---
semantic_router/llms/__init__.py | 8 +--
semantic_router/llms/cohere.py | 2 +-
semantic_router/llms/openai.py | 2 +-
semantic_router/llms/openrouter.py | 2 +-
semantic_router/route.py | 7 ++-
9 files changed, 73 insertions(+), 68 deletions(-)
diff --git a/README.md b/README.md
index da3fe685..fe5db343 100644
--- a/README.md
+++ b/README.md
@@ -12,7 +12,7 @@
-Semantic Router is a superfast decision layer for your LLMs and agents. Rather than waiting for slow LLM generations to make tool-use decisions, we use the magic of semantic vector space to make those decisions — _routing_ our requests using _semantic_ meaning.
+Semantic Router is a superfast decision-making layer for your LLMs and agents. Rather than waiting for slow LLM generations to make tool-use decisions, we use the magic of semantic vector space to make those decisions — _routing_ our requests using _semantic_ meaning.
## Quickstart
@@ -22,7 +22,9 @@ To get started with _semantic-router_ we install it like so:
pip install -qU semantic-router
```
-We begin by defining a set of `Decision` objects. These are the decision paths that the semantic router can decide to use, let's try two simple decisions for now — one for talk on _politics_ and another for _chitchat_:
+❗️ _If wanting to use local embeddings you can use `FastEmbedEncoder` (`pip install -qU semantic-router[fastembed]`). To use the `HybridRouteLayer` you must `pip install -qU semantic-router[hybrid]`._
+
+We begin by defining a set of `Route` objects. These are the decision paths that the semantic router can decide to use, let's try two simple routes for now — one for talk on _politics_ and another for _chitchat_:
```python
from semantic_router import Route
@@ -56,7 +58,7 @@ chitchat = Route(
routes = [politics, chitchat]
```
-We have our decisions ready, now we initialize an embedding / encoder model. We currently support a `CohereEncoder` and `OpenAIEncoder` — more encoders will be added soon. To initialize them we do:
+We have our routes ready, now we initialize an embedding / encoder model. We currently support a `CohereEncoder` and `OpenAIEncoder` — more encoders will be added soon. To initialize them we do:
```python
import os
@@ -71,18 +73,18 @@ os.environ["OPENAI_API_KEY"] = ""
encoder = OpenAIEncoder()
```
-With our `decisions` and `encoder` defined we now create a `DecisionLayer`. The decision layer handles our semantic decision making.
+With our `routes` and `encoder` defined we now create a `RouteLayer`. The route layer handles our semantic decision making.
```python
from semantic_router.layer import RouteLayer
-dl = RouteLayer(encoder=encoder, routes=routes)
+rl = RouteLayer(encoder=encoder, routes=routes)
```
-We can now use our decision layer to make super fast decisions based on user queries. Let's try with two queries that should trigger our decisions:
+We can now use our route layer to make super fast decisions based on user queries. Let's try with two queries that should trigger our route decisions:
```python
-dl("don't you love politics?").name
+rl("don't you love politics?").name
```
```
@@ -92,7 +94,7 @@ dl("don't you love politics?").name
Correct decision, let's try another:
```python
-dl("how's the weather today?").name
+rl("how's the weather today?").name
```
```
@@ -102,14 +104,14 @@ dl("how's the weather today?").name
We get both decisions correct! Now lets try sending an unrelated query:
```python
-dl("I'm interested in learning about llama 2").name
+rl("I'm interested in learning about llama 2").name
```
```
[Out]:
```
-In this case, no decision could be made as we had no matches — so our decision layer returned `None`!
+In this case, no decision could be made as we had no matches — so our route layer returned `None`!
## 📚 [Resources](https://github.com/aurelio-labs/semantic-router/tree/main/docs)
diff --git a/docs/02-dynamic-routes.ipynb b/docs/02-dynamic-routes.ipynb
index d8078cb2..c695838e 100644
--- a/docs/02-dynamic-routes.ipynb
+++ b/docs/02-dynamic-routes.ipynb
@@ -36,7 +36,7 @@
"metadata": {},
"outputs": [],
"source": [
- "!pip install -qU semantic-router==0.0.14"
+ "!pip install -qU semantic-router==0.0.15"
]
},
{
@@ -64,17 +64,7 @@
"cell_type": "code",
"execution_count": 1,
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/Users/jamesbriggs/opt/anaconda3/envs/decision-layer/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
- " from .autonotebook import tqdm as notebook_tqdm\n",
- "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"from semantic_router import Route\n",
"\n",
@@ -102,16 +92,23 @@
"routes = [politics, chitchat]"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We initialize our `RouteLayer` with our `encoder` and `routes`. We can use popular encoder APIs like `CohereEncoder` and `OpenAIEncoder`, or local alternatives like `FastEmbedEncoder`."
+ ]
+ },
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-28 19:19:39 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ "\u001b[32m2024-01-07 15:23:12 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
]
}
],
@@ -119,13 +116,21 @@
"import os\n",
"from getpass import getpass\n",
"from semantic_router import RouteLayer\n",
+ "from semantic_router.encoders import CohereEncoder, OpenAIEncoder\n",
"\n",
"# dashboard.cohere.ai\n",
- "os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n",
- " \"Enter Cohere API Key: \"\n",
+ "# os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n",
+ "# \"Enter Cohere API Key: \"\n",
+ "# )\n",
+ "# platform.openai.com\n",
+ "os.environ[\"OPENAI_API_KEY\"] = os.getenv(\"OPENAI_API_KEY\") or getpass(\n",
+ " \"Enter OpenAI API Key: \"\n",
")\n",
"\n",
- "rl = RouteLayer(routes=routes)"
+ "# encoder = CohereEncoder()\n",
+ "encoder = OpenAIEncoder()\n",
+ "\n",
+ "rl = RouteLayer(encoder=encoder, routes=routes)"
]
},
{
@@ -137,7 +142,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 3,
"metadata": {},
"outputs": [
{
@@ -146,7 +151,7 @@
"RouteChoice(name='chitchat', function_call=None)"
]
},
- "execution_count": 5,
+ "execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
@@ -171,7 +176,7 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
@@ -193,16 +198,16 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "'13:19'"
+ "'09:23'"
]
},
- "execution_count": 7,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -220,7 +225,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
@@ -232,7 +237,7 @@
" 'output': \"\"}"
]
},
- "execution_count": 8,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -253,7 +258,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
@@ -277,16 +282,14 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 8,
"metadata": {},
"outputs": [
{
- "name": "stdout",
+ "name": "stderr",
"output_type": "stream",
"text": [
- "Adding route `get_time`\n",
- "Adding route to categories\n",
- "Adding route to index\n"
+ "\u001b[32m2024-01-07 15:23:16 INFO semantic_router.utils.logger Adding `get_time` route\u001b[0m\n"
]
}
],
@@ -303,31 +306,32 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-28 19:21:58 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[33m2024-01-07 15:23:17 WARNING semantic_router.utils.logger No LLM provided for dynamic route, will use OpenAI LLM default. Ensure API key is set in OPENAI_API_KEY environment variable.\u001b[0m\n",
+ "\u001b[32m2024-01-07 15:23:17 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
"data": {
"text/plain": [
- "RouteChoice(name='get_time', function_call={'timezone': 'America/New_York'})"
+ "RouteChoice(name='get_time', function_call={'timezone': 'new york city'})"
]
},
- "execution_count": 11,
+ "execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
- "# https://openrouter.ai/keys\n",
- "os.environ[\"OPENROUTER_API_KEY\"] = os.getenv(\"OPENROUTER_API_KEY\") or getpass(\n",
- " \"Enter OpenRouter API Key: \"\n",
+ "# https://platform.openai.com/\n",
+ "os.environ[\"OPENAI_API_KEY\"] = os.getenv(\"OPENAI_API_KEY\") or getpass(\n",
+ " \"Enter OpenAI API Key: \"\n",
")\n",
"\n",
"rl(\"what is the time in new york city?\")"
diff --git a/pyproject.toml b/pyproject.toml
index d3561c64..b24ed4f3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "semantic-router"
-version = "0.0.14"
+version = "0.0.15"
description = "Super fast semantic router for AI decision making"
authors = [
"James Briggs ",
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index eed04e36..b3173728 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -10,7 +10,7 @@
OpenAIEncoder,
FastEmbedEncoder,
)
-from semantic_router.llms import BaseLLM
+from semantic_router.llms import BaseLLM, OpenAILLM
from semantic_router.linear import similarity_matrix, top_scores
from semantic_router.route import Route
from semantic_router.schema import Encoder, EncoderType, RouteChoice
@@ -193,9 +193,13 @@ def __call__(self, text: str) -> RouteChoice:
route = [route for route in self.routes if route.name == top_class][0]
if route.function_schema and not isinstance(route.llm, BaseLLM):
if not self.llm:
- raise ValueError(
- "LLM is required for dynamic routes. Please ensure the 'llm' is set."
+ logger.warning(
+ "No LLM provided for dynamic route, will use OpenAI LLM "
+ "default. Ensure API key is set in OPENAI_API_KEY environment "
+ "variable."
)
+ self.llm = OpenAILLM()
+ route.llm = self.llm
else:
route.llm = self.llm
return route(text)
@@ -228,24 +232,20 @@ def from_config(cls, config: LayerConfig):
return cls(encoder=encoder, routes=config.routes)
def add(self, route: Route):
- print(f"Adding route `{route.name}`")
+ logger.info(f"Adding `{route.name}` route")
# create embeddings
embeds = self.encoder(route.utterances)
# create route array
if self.categories is None:
- print("Initializing categories array")
self.categories = np.array([route.name] * len(embeds))
else:
- print("Adding route to categories")
str_arr = np.array([route.name] * len(embeds))
self.categories = np.concatenate([self.categories, str_arr])
# create utterance array (the index)
if self.index is None:
- print("Initializing index array")
self.index = np.array(embeds)
else:
- print("Adding route to index")
embed_arr = np.array(embeds)
self.index = np.concatenate([self.index, embed_arr])
# add route to routes list
diff --git a/semantic_router/llms/__init__.py b/semantic_router/llms/__init__.py
index 446f7c42..c7d6962b 100644
--- a/semantic_router/llms/__init__.py
+++ b/semantic_router/llms/__init__.py
@@ -1,7 +1,7 @@
from semantic_router.llms.base import BaseLLM
-from semantic_router.llms.openai import OpenAI
-from semantic_router.llms.openrouter import OpenRouter
-from semantic_router.llms.cohere import Cohere
+from semantic_router.llms.openai import OpenAILLM
+from semantic_router.llms.openrouter import OpenRouterLLM
+from semantic_router.llms.cohere import CohereLLM
-__all__ = ["BaseLLM", "OpenAI", "OpenRouter", "Cohere"]
+__all__ = ["BaseLLM", "OpenAILLM", "OpenRouterLLM", "CohereLLM"]
diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py
index 80512d5c..be99bbc4 100644
--- a/semantic_router/llms/cohere.py
+++ b/semantic_router/llms/cohere.py
@@ -4,7 +4,7 @@
from semantic_router.schema import Message
-class Cohere(BaseLLM):
+class CohereLLM(BaseLLM):
client: cohere.Client | None = None
def __init__(
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 18b6e706..5ee56398 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -5,7 +5,7 @@
from semantic_router.schema import Message
-class OpenAI(BaseLLM):
+class OpenAILLM(BaseLLM):
client: openai.OpenAI | None
temperature: float | None
max_tokens: int | None
diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py
index 3b7a9b49..5c3b317f 100644
--- a/semantic_router/llms/openrouter.py
+++ b/semantic_router/llms/openrouter.py
@@ -5,7 +5,7 @@
from semantic_router.schema import Message
-class OpenRouter(BaseLLM):
+class OpenRouterLLM(BaseLLM):
client: openai.OpenAI | None
base_url: str | None
temperature: float | None
diff --git a/semantic_router/route.py b/semantic_router/route.py
index 454cfe79..0d8269f0 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -5,12 +5,10 @@
from pydantic import BaseModel
from semantic_router.llms import BaseLLM
-from semantic_router.schema import RouteChoice
+from semantic_router.schema import Message, RouteChoice
from semantic_router.utils import function_call
from semantic_router.utils.logger import logger
-from semantic_router.schema import Message
-
def is_valid(route_config: str) -> bool:
try:
@@ -51,7 +49,8 @@ def __call__(self, query: str) -> RouteChoice:
if self.function_schema:
if not self.llm:
raise ValueError(
- "LLM is required for dynamic routes. Please ensure the 'llm' is set."
+ "LLM is required for dynamic routes. Please ensure the `llm` "
+ "attribute is set."
)
# if a function schema is provided we generate the inputs
extracted_inputs = function_call.extract_function_inputs(
From 0696edcfc1073dd25315f6ee4f4c20f37b48a4b9 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 15:32:04 +0100
Subject: [PATCH 13/24] lint and test fix
---
semantic_router/layer.py | 4 ++--
semantic_router/llms/__init__.py | 3 +--
semantic_router/llms/base.py | 1 +
semantic_router/llms/cohere.py | 2 ++
semantic_router/llms/openai.py | 4 +++-
semantic_router/llms/openrouter.py | 4 +++-
semantic_router/schema.py | 2 +-
semantic_router/utils/function_call.py | 3 +--
tests/unit/llms/test_llm_cohere.py | 10 +++++-----
tests/unit/llms/test_llm_openai.py | 11 ++++++-----
tests/unit/llms/test_llm_openrouter.py | 11 ++++++-----
tests/unit/test_route.py | 1 +
tests/unit/test_schema.py | 1 +
13 files changed, 33 insertions(+), 24 deletions(-)
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index b3173728..72f8a8f0 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -7,11 +7,11 @@
from semantic_router.encoders import (
BaseEncoder,
CohereEncoder,
- OpenAIEncoder,
FastEmbedEncoder,
+ OpenAIEncoder,
)
-from semantic_router.llms import BaseLLM, OpenAILLM
from semantic_router.linear import similarity_matrix, top_scores
+from semantic_router.llms import BaseLLM, OpenAILLM
from semantic_router.route import Route
from semantic_router.schema import Encoder, EncoderType, RouteChoice
from semantic_router.utils.logger import logger
diff --git a/semantic_router/llms/__init__.py b/semantic_router/llms/__init__.py
index c7d6962b..e5aedc85 100644
--- a/semantic_router/llms/__init__.py
+++ b/semantic_router/llms/__init__.py
@@ -1,7 +1,6 @@
from semantic_router.llms.base import BaseLLM
+from semantic_router.llms.cohere import CohereLLM
from semantic_router.llms.openai import OpenAILLM
from semantic_router.llms.openrouter import OpenRouterLLM
-from semantic_router.llms.cohere import CohereLLM
-
__all__ = ["BaseLLM", "OpenAILLM", "OpenRouterLLM", "CohereLLM"]
diff --git a/semantic_router/llms/base.py b/semantic_router/llms/base.py
index dd8a0afa..51db1fd0 100644
--- a/semantic_router/llms/base.py
+++ b/semantic_router/llms/base.py
@@ -1,4 +1,5 @@
from pydantic import BaseModel
+
from semantic_router.schema import Message
diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py
index be99bbc4..77581700 100644
--- a/semantic_router/llms/cohere.py
+++ b/semantic_router/llms/cohere.py
@@ -1,5 +1,7 @@
import os
+
import cohere
+
from semantic_router.llms import BaseLLM
from semantic_router.schema import Message
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 5ee56398..43ddd642 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -1,8 +1,10 @@
import os
+
import openai
-from semantic_router.utils.logger import logger
+
from semantic_router.llms import BaseLLM
from semantic_router.schema import Message
+from semantic_router.utils.logger import logger
class OpenAILLM(BaseLLM):
diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py
index 5c3b317f..587eeb12 100644
--- a/semantic_router/llms/openrouter.py
+++ b/semantic_router/llms/openrouter.py
@@ -1,8 +1,10 @@
import os
+
import openai
-from semantic_router.utils.logger import logger
+
from semantic_router.llms import BaseLLM
from semantic_router.schema import Message
+from semantic_router.utils.logger import logger
class OpenRouterLLM(BaseLLM):
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index f4e4e8b3..5e94c23b 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -6,8 +6,8 @@
from semantic_router.encoders import (
BaseEncoder,
CohereEncoder,
- OpenAIEncoder,
FastEmbedEncoder,
+ OpenAIEncoder,
)
from semantic_router.utils.splitters import semantic_splitter
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index 19afcc47..cedd9b6e 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -5,8 +5,7 @@
from pydantic import BaseModel
from semantic_router.llms import BaseLLM
-from semantic_router.schema import Message
-from semantic_router.schema import RouteChoice
+from semantic_router.schema import Message, RouteChoice
from semantic_router.utils.logger import logger
diff --git a/tests/unit/llms/test_llm_cohere.py b/tests/unit/llms/test_llm_cohere.py
index 32443f04..aaf8a7e5 100644
--- a/tests/unit/llms/test_llm_cohere.py
+++ b/tests/unit/llms/test_llm_cohere.py
@@ -1,13 +1,13 @@
import pytest
-from semantic_router.llms import Cohere
+from semantic_router.llms import CohereLLM
from semantic_router.schema import Message
@pytest.fixture
def cohere_llm(mocker):
mocker.patch("cohere.Client")
- return Cohere(cohere_api_key="test_api_key")
+ return CohereLLM(cohere_api_key="test_api_key")
class TestCohereLLM:
@@ -19,7 +19,7 @@ def test_initialization_without_api_key(self, mocker, monkeypatch):
monkeypatch.delenv("COHERE_API_KEY", raising=False)
mocker.patch("cohere.Client")
with pytest.raises(ValueError):
- Cohere()
+ CohereLLM()
def test_call_method(self, cohere_llm, mocker):
mock_llm = mocker.MagicMock()
@@ -36,11 +36,11 @@ def test_raises_value_error_if_cohere_client_fails_to_initialize(self, mocker):
"cohere.Client", side_effect=Exception("Failed to initialize client")
)
with pytest.raises(ValueError):
- Cohere(cohere_api_key="test_api_key")
+ CohereLLM(cohere_api_key="test_api_key")
def test_raises_value_error_if_cohere_client_is_not_initialized(self, mocker):
mocker.patch("cohere.Client", return_value=None)
- llm = Cohere(cohere_api_key="test_api_key")
+ llm = CohereLLM(cohere_api_key="test_api_key")
with pytest.raises(ValueError):
llm("test")
diff --git a/tests/unit/llms/test_llm_openai.py b/tests/unit/llms/test_llm_openai.py
index 4b2b2f54..2f1171db 100644
--- a/tests/unit/llms/test_llm_openai.py
+++ b/tests/unit/llms/test_llm_openai.py
@@ -1,12 +1,13 @@
import pytest
-from semantic_router.llms import OpenAI
+
+from semantic_router.llms import OpenAILLM
from semantic_router.schema import Message
@pytest.fixture
def openai_llm(mocker):
mocker.patch("openai.Client")
- return OpenAI(openai_api_key="test_api_key")
+ return OpenAILLM(openai_api_key="test_api_key")
class TestOpenAILLM:
@@ -16,13 +17,13 @@ def test_openai_llm_init_with_api_key(self, openai_llm):
def test_openai_llm_init_success(self, mocker):
mocker.patch("os.getenv", return_value="fake-api-key")
- llm = OpenAI()
+ llm = OpenAILLM()
assert llm.client is not None
def test_openai_llm_init_without_api_key(self, mocker):
mocker.patch("os.getenv", return_value=None)
with pytest.raises(ValueError) as _:
- OpenAI()
+ OpenAILLM()
def test_openai_llm_call_uninitialized_client(self, openai_llm):
# Set the client to None to simulate an uninitialized client
@@ -36,7 +37,7 @@ def test_openai_llm_init_exception(self, mocker):
mocker.patch("os.getenv", return_value="fake-api-key")
mocker.patch("openai.OpenAI", side_effect=Exception("Initialization error"))
with pytest.raises(ValueError) as e:
- OpenAI()
+ OpenAILLM()
assert (
"OpenAI API client failed to initialize. Error: Initialization error"
in str(e.value)
diff --git a/tests/unit/llms/test_llm_openrouter.py b/tests/unit/llms/test_llm_openrouter.py
index 3009e293..9b1ee150 100644
--- a/tests/unit/llms/test_llm_openrouter.py
+++ b/tests/unit/llms/test_llm_openrouter.py
@@ -1,12 +1,13 @@
import pytest
-from semantic_router.llms import OpenRouter
+
+from semantic_router.llms import OpenRouterLLM
from semantic_router.schema import Message
@pytest.fixture
def openrouter_llm(mocker):
mocker.patch("openai.Client")
- return OpenRouter(openrouter_api_key="test_api_key")
+ return OpenRouterLLM(openrouter_api_key="test_api_key")
class TestOpenRouterLLM:
@@ -18,13 +19,13 @@ def test_openrouter_llm_init_with_api_key(self, openrouter_llm):
def test_openrouter_llm_init_success(self, mocker):
mocker.patch("os.getenv", return_value="fake-api-key")
- llm = OpenRouter()
+ llm = OpenRouterLLM()
assert llm.client is not None
def test_openrouter_llm_init_without_api_key(self, mocker):
mocker.patch("os.getenv", return_value=None)
with pytest.raises(ValueError) as _:
- OpenRouter()
+ OpenRouterLLM()
def test_openrouter_llm_call_uninitialized_client(self, openrouter_llm):
# Set the client to None to simulate an uninitialized client
@@ -38,7 +39,7 @@ def test_openrouter_llm_init_exception(self, mocker):
mocker.patch("os.getenv", return_value="fake-api-key")
mocker.patch("openai.OpenAI", side_effect=Exception("Initialization error"))
with pytest.raises(ValueError) as e:
- OpenRouter()
+ OpenRouterLLM()
assert (
"OpenRouter API client failed to initialize. Error: Initialization error"
in str(e.value)
diff --git a/tests/unit/test_route.py b/tests/unit/test_route.py
index e7842d39..0f7c4d8d 100644
--- a/tests/unit/test_route.py
+++ b/tests/unit/test_route.py
@@ -1,6 +1,7 @@
from unittest.mock import patch # , AsyncMock
import pytest
+
from semantic_router.llms import BaseLLM
from semantic_router.route import Route, is_valid
diff --git a/tests/unit/test_schema.py b/tests/unit/test_schema.py
index a9e794cb..a41d5fa7 100644
--- a/tests/unit/test_schema.py
+++ b/tests/unit/test_schema.py
@@ -1,5 +1,6 @@
import pytest
from pydantic import ValidationError
+
from semantic_router.schema import (
CohereEncoder,
Encoder,
From 3c3dce4cb16d1ad7e85a52ef7f641a3672b8d75b Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 15:37:25 +0100
Subject: [PATCH 14/24] update test for ValueError in missing LLM
---
tests/unit/test_route.py | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/tests/unit/test_route.py b/tests/unit/test_route.py
index 0f7c4d8d..33a9ac13 100644
--- a/tests/unit/test_route.py
+++ b/tests/unit/test_route.py
@@ -71,10 +71,7 @@ def test_value_error_in_route_call(self):
function_schema=function_schema,
)
- with pytest.raises(
- ValueError,
- match="LLM is required for dynamic routes. Please ensure the 'llm' is set.",
- ):
+ with pytest.raises(ValueError):
route("test_query")
def test_generate_dynamic_route(self):
From e9660ee0388f0aa2798786c41dfb0c54d0c6f659 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 15:48:50 +0100
Subject: [PATCH 15/24] added dynamic route init test
---
tests/unit/test_layer.py | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 32754997..1511a983 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -91,6 +91,13 @@ def routes():
Route(name="Route 2", utterances=["Goodbye", "Bye", "Au revoir"]),
]
+@pytest.fixture
+def dynamic_routes():
+ return [
+ Route(name="Route 1", utterances=["Hello", "Hi"], function_schema="test"),
+ Route(name="Route 2", utterances=["Goodbye", "Bye", "Au revoir"]),
+ ]
+
class TestRouteLayer:
def test_initialization(self, openai_encoder, routes):
@@ -106,7 +113,12 @@ def test_initialization(self, openai_encoder, routes):
def test_initialization_different_encoders(self, cohere_encoder, openai_encoder):
route_layer_cohere = RouteLayer(encoder=cohere_encoder)
assert route_layer_cohere.score_threshold == 0.3
+ route_layer_openai = RouteLayer(encoder=openai_encoder)
+ assert route_layer_openai.score_threshold == 0.82
+ def test_initialization_dynamic_route(self, cohere_encoder, openai_encoder):
+ route_layer_cohere = RouteLayer(encoder=cohere_encoder)
+ assert route_layer_cohere.score_threshold == 0.3
route_layer_openai = RouteLayer(encoder=openai_encoder)
assert route_layer_openai.score_threshold == 0.82
From 774b8eda4500ea3b32cc0c49c09bcd8ce2e3b55d Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 15:49:51 +0100
Subject: [PATCH 16/24] lint
---
tests/unit/test_layer.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 1511a983..495d1bdc 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -91,6 +91,7 @@ def routes():
Route(name="Route 2", utterances=["Goodbye", "Bye", "Au revoir"]),
]
+
@pytest.fixture
def dynamic_routes():
return [
From f67b051c84b8ca343ba1c749815867f106a0dc7a Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 16:19:17 +0100
Subject: [PATCH 17/24] add type definition for self.encoder
---
semantic_router/layer.py | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index dd9006ca..08261756 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -4,10 +4,7 @@
import numpy as np
import yaml
-from semantic_router.encoders import (
- BaseEncoder,
- OpenAIEncoder
-)
+from semantic_router.encoders import BaseEncoder, OpenAIEncoder
from semantic_router.linear import similarity_matrix, top_scores
from semantic_router.llms import BaseLLM, OpenAILLM
from semantic_router.route import Route
@@ -153,6 +150,7 @@ class RouteLayer:
index: np.ndarray | None = None
categories: np.ndarray | None = None
score_threshold: float
+ encoder: BaseEncoder
def __init__(
self,
From 34135011c8770ac82b3a7c635f00e00f615e9934 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 16:30:14 +0100
Subject: [PATCH 18/24] add test for encoder is None
---
tests/unit/test_layer.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 286e7ace..92577f34 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -119,6 +119,10 @@ def test_initialization_different_encoders(self, cohere_encoder, openai_encoder)
route_layer_openai = RouteLayer(encoder=openai_encoder)
assert route_layer_openai.score_threshold == 0.82
+ def test_initialization_no_encoder(self, openai_encoder):
+ route_layer_none = RouteLayer(encoder=None)
+ assert route_layer_none.score_threshold == openai_encoder.score_threshold
+
def test_initialization_dynamic_route(self, cohere_encoder, openai_encoder):
route_layer_cohere = RouteLayer(encoder=cohere_encoder)
assert route_layer_cohere.score_threshold == 0.3
From 65f194ef37cce3498c8a05bb95f0a6f7c4b9d454 Mon Sep 17 00:00:00 2001
From: hananel
Date: Sun, 7 Jan 2024 17:32:14 +0200
Subject: [PATCH 19/24] exclude idea
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index 8335baaf..cb4c0022 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,6 +5,7 @@ __pycache__
venv/
/.vscode
.vscode
+.idea
**/__pycache__
**/*.py[cod]
From 89029f0177d10899da9c27909bd0fd05a5447f4f Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 16:48:30 +0100
Subject: [PATCH 20/24] add patch for encoder in None test
---
tests/unit/test_layer.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index 92577f34..cdf86bbc 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -120,8 +120,9 @@ def test_initialization_different_encoders(self, cohere_encoder, openai_encoder)
assert route_layer_openai.score_threshold == 0.82
def test_initialization_no_encoder(self, openai_encoder):
- route_layer_none = RouteLayer(encoder=None)
- assert route_layer_none.score_threshold == openai_encoder.score_threshold
+ with patch("semantic_router.encoders.OpenAIEncoder") as _:
+ route_layer_none = RouteLayer(encoder=None)
+ assert route_layer_none.score_threshold == openai_encoder.score_threshold
def test_initialization_dynamic_route(self, cohere_encoder, openai_encoder):
route_layer_cohere = RouteLayer(encoder=cohere_encoder)
From 534d9eef02acdc085b693c38fc32683c77ea22ed Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 16:59:27 +0100
Subject: [PATCH 21/24] add env var
---
tests/unit/test_layer.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/unit/test_layer.py b/tests/unit/test_layer.py
index cdf86bbc..6e2ac317 100644
--- a/tests/unit/test_layer.py
+++ b/tests/unit/test_layer.py
@@ -120,9 +120,9 @@ def test_initialization_different_encoders(self, cohere_encoder, openai_encoder)
assert route_layer_openai.score_threshold == 0.82
def test_initialization_no_encoder(self, openai_encoder):
- with patch("semantic_router.encoders.OpenAIEncoder") as _:
- route_layer_none = RouteLayer(encoder=None)
- assert route_layer_none.score_threshold == openai_encoder.score_threshold
+ os.environ["OPENAI_API_KEY"] = "test_api_key"
+ route_layer_none = RouteLayer(encoder=None)
+ assert route_layer_none.score_threshold == openai_encoder.score_threshold
def test_initialization_dynamic_route(self, cohere_encoder, openai_encoder):
route_layer_cohere = RouteLayer(encoder=cohere_encoder)
From 7a363791b448bf082d63b462937099b79ed6d28d Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 18:19:22 +0100
Subject: [PATCH 22/24] updated version in notebooks
---
docs/00-introduction.ipynb | 24 +++++++--------
docs/01-save-load-from-file.ipynb | 47 ++++++++++++-----------------
docs/02-dynamic-routes.ipynb | 17 ++++++++---
docs/03-basic-langchain-agent.ipynb | 4 +--
4 files changed, 46 insertions(+), 46 deletions(-)
diff --git a/docs/00-introduction.ipynb b/docs/00-introduction.ipynb
index ae6b768d..4d672f37 100644
--- a/docs/00-introduction.ipynb
+++ b/docs/00-introduction.ipynb
@@ -41,7 +41,7 @@
"metadata": {},
"outputs": [],
"source": [
- "!pip install -qU semantic-router==0.0.14"
+ "!pip install -qU semantic-router==0.0.15"
]
},
{
@@ -60,7 +60,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -88,7 +88,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
@@ -115,7 +115,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
@@ -143,14 +143,14 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-28 19:14:34 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ "\u001b[32m2024-01-07 18:08:29 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
]
}
],
@@ -169,7 +169,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
@@ -178,7 +178,7 @@
"RouteChoice(name='politics', function_call=None)"
]
},
- "execution_count": 7,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -189,7 +189,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
@@ -198,7 +198,7 @@
"RouteChoice(name='chitchat', function_call=None)"
]
},
- "execution_count": 8,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -216,7 +216,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 7,
"metadata": {},
"outputs": [
{
@@ -225,7 +225,7 @@
"RouteChoice(name=None, function_call=None)"
]
},
- "execution_count": 9,
+ "execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
diff --git a/docs/01-save-load-from-file.ipynb b/docs/01-save-load-from-file.ipynb
index 715679ce..43937d2b 100644
--- a/docs/01-save-load-from-file.ipynb
+++ b/docs/01-save-load-from-file.ipynb
@@ -36,7 +36,7 @@
"metadata": {},
"outputs": [],
"source": [
- "!pip install -qU semantic-router==0.0.14"
+ "!pip install -qU semantic-router==0.0.15"
]
},
{
@@ -64,17 +64,7 @@
"cell_type": "code",
"execution_count": 1,
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/Users/jamesbriggs/opt/anaconda3/envs/decision-layer/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
- " from .autonotebook import tqdm as notebook_tqdm\n",
- "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"from semantic_router import Route\n",
"\n",
@@ -106,19 +96,19 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "We define a route layer using these routes and using the default Cohere encoder."
+ "We define a route layer using these routes and using the Cohere encoder."
]
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-28 19:16:54 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ "\u001b[32m2024-01-07 18:10:03 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
]
}
],
@@ -126,13 +116,16 @@
"import os\n",
"from getpass import getpass\n",
"from semantic_router import RouteLayer\n",
+ "from semantic_router.encoders import CohereEncoder\n",
"\n",
"# dashboard.cohere.ai\n",
"os.environ[\"COHERE_API_KEY\"] = os.getenv(\"COHERE_API_KEY\") or getpass(\n",
" \"Enter Cohere API Key: \"\n",
")\n",
"\n",
- "rl = RouteLayer(routes=routes)"
+ "encoder = CohereEncoder()\n",
+ "\n",
+ "rl = RouteLayer(encoder=encoder, routes=routes)"
]
},
{
@@ -144,14 +137,14 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-28 19:17:03 INFO semantic_router.utils.logger Saving route config to layer.json\u001b[0m\n"
+ "\u001b[32m2024-01-07 18:10:05 INFO semantic_router.utils.logger Saving route config to layer.json\u001b[0m\n"
]
}
],
@@ -175,14 +168,14 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "{'encoder_type': 'cohere', 'encoder_name': 'embed-english-v3.0', 'routes': [{'name': 'politics', 'utterances': [\"isn't politics the best thing ever\", \"why don't you tell me about your political opinions\", \"don't you just love the presidentdon't you just hate the president\", \"they're going to destroy this country!\", 'they will save the country!'], 'description': None, 'function_schema': None}, {'name': 'chitchat', 'utterances': [\"how's the weather today?\", 'how are things going?', 'lovely weather today', 'the weather is horrendous', \"let's go to the chippy\"], 'description': None, 'function_schema': None}]}\n"
+ "{'encoder_type': 'cohere', 'encoder_name': 'embed-english-v3.0', 'routes': [{'name': 'politics', 'utterances': [\"isn't politics the best thing ever\", \"why don't you tell me about your political opinions\", \"don't you just love the presidentdon't you just hate the president\", \"they're going to destroy this country!\", 'they will save the country!'], 'description': None, 'function_schema': None, 'llm': None}, {'name': 'chitchat', 'utterances': [\"how's the weather today?\", 'how are things going?', 'lovely weather today', 'the weather is horrendous', \"let's go to the chippy\"], 'description': None, 'function_schema': None, 'llm': None}]}\n"
]
}
],
@@ -204,15 +197,15 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2023-12-28 19:17:08 INFO semantic_router.utils.logger Loading route config from layer.json\u001b[0m\n",
- "\u001b[32m2023-12-28 19:17:08 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ "\u001b[32m2024-01-07 18:10:14 INFO semantic_router.utils.logger Loading route config from layer.json\u001b[0m\n",
+ "\u001b[32m2024-01-07 18:10:14 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
]
}
],
@@ -229,16 +222,16 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "layer.encoder.type='cohere'\n",
- "layer.encoder.name='embed-english-v3.0'\n",
- "layer.routes=[Route(name='politics', utterances=[\"isn't politics the best thing ever\", \"why don't you tell me about your political opinions\", \"don't you just love the presidentdon't you just hate the president\", \"they're going to destroy this country!\", 'they will save the country!'], description=None, function_schema=None), Route(name='chitchat', utterances=[\"how's the weather today?\", 'how are things going?', 'lovely weather today', 'the weather is horrendous', \"let's go to the chippy\"], description=None, function_schema=None)]\n"
+ "rl.encoder.type='cohere'\n",
+ "rl.encoder.name='embed-english-v3.0'\n",
+ "rl.routes=[Route(name='politics', utterances=[\"isn't politics the best thing ever\", \"why don't you tell me about your political opinions\", \"don't you just love the presidentdon't you just hate the president\", \"they're going to destroy this country!\", 'they will save the country!'], description=None, function_schema=None, llm=None), Route(name='chitchat', utterances=[\"how's the weather today?\", 'how are things going?', 'lovely weather today', 'the weather is horrendous', \"let's go to the chippy\"], description=None, function_schema=None, llm=None)]\n"
]
}
],
diff --git a/docs/02-dynamic-routes.ipynb b/docs/02-dynamic-routes.ipynb
index c695838e..70c5eced 100644
--- a/docs/02-dynamic-routes.ipynb
+++ b/docs/02-dynamic-routes.ipynb
@@ -108,7 +108,7 @@
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-07 15:23:12 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
+ "\u001b[32m2024-01-07 18:11:05 INFO semantic_router.utils.logger Initializing RouteLayer\u001b[0m\n"
]
}
],
@@ -204,7 +204,7 @@
{
"data": {
"text/plain": [
- "'09:23'"
+ "'12:11'"
]
},
"execution_count": 5,
@@ -289,7 +289,7 @@
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[32m2024-01-07 15:23:16 INFO semantic_router.utils.logger Adding `get_time` route\u001b[0m\n"
+ "\u001b[32m2024-01-07 18:11:20 INFO semantic_router.utils.logger Adding `get_time` route\u001b[0m\n"
]
}
],
@@ -313,8 +313,8 @@
"name": "stderr",
"output_type": "stream",
"text": [
- "\u001b[33m2024-01-07 15:23:17 WARNING semantic_router.utils.logger No LLM provided for dynamic route, will use OpenAI LLM default. Ensure API key is set in OPENAI_API_KEY environment variable.\u001b[0m\n",
- "\u001b[32m2024-01-07 15:23:17 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
+ "\u001b[33m2024-01-07 18:11:23 WARNING semantic_router.utils.logger No LLM provided for dynamic route, will use OpenAI LLM default. Ensure API key is set in OPENAI_API_KEY environment variable.\u001b[0m\n",
+ "\u001b[32m2024-01-07 18:11:23 INFO semantic_router.utils.logger Extracting function input...\u001b[0m\n"
]
},
{
@@ -337,6 +337,13 @@
"rl(\"what is the time in new york city?\")"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Our dynamic route provides both the route itself _and_ the input parameters required to use the route."
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
diff --git a/docs/03-basic-langchain-agent.ipynb b/docs/03-basic-langchain-agent.ipynb
index 3bfd3ba5..c8c48b3d 100644
--- a/docs/03-basic-langchain-agent.ipynb
+++ b/docs/03-basic-langchain-agent.ipynb
@@ -78,7 +78,7 @@
],
"source": [
"!pip install -qU \\\n",
- " semantic-router==0.0.14 \\\n",
+ " semantic-router==0.0.15 \\\n",
" langchain==0.0.352 \\\n",
" openai==1.6.1"
]
@@ -458,7 +458,7 @@
"from langchain.chat_models import ChatOpenAI\n",
"from langchain.memory import ConversationBufferWindowMemory\n",
"\n",
- "llm = ChatOpenAI(openai_api_key=\"\", model=\"gpt-3.5-turbo-1106\")\n",
+ "llm = ChatOpenAI(model=\"gpt-3.5-turbo-1106\")\n",
"\n",
"memory1 = ConversationBufferWindowMemory(\n",
" memory_key=\"chat_history\", k=5, return_messages=True, output_key=\"output\"\n",
From cb0b406808c449e9bdfce574e67e6ea3b64b77e6 Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 18:20:39 +0100
Subject: [PATCH 23/24] updated to use py3.10 or higher
---
poetry.lock | 22 ++++++++++------------
pyproject.toml | 2 +-
2 files changed, 11 insertions(+), 13 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 815226ea..d0f80d03 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1069,13 +1069,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio"
[[package]]
name = "ipython"
-version = "8.18.1"
+version = "8.19.0"
description = "IPython: Productive Interactive Computing"
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.10"
files = [
- {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"},
- {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"},
+ {file = "ipython-8.19.0-py3-none-any.whl", hash = "sha256:2f55d59370f59d0d2b2212109fe0e6035cfea436b1c0e6150ad2244746272ec5"},
+ {file = "ipython-8.19.0.tar.gz", hash = "sha256:ac4da4ecf0042fb4e0ce57c60430c2db3c719fa8bdf92f8631d6bd8a5785d1f0"},
]
[package.dependencies]
@@ -1089,20 +1089,19 @@ prompt-toolkit = ">=3.0.41,<3.1.0"
pygments = ">=2.4.0"
stack-data = "*"
traitlets = ">=5"
-typing-extensions = {version = "*", markers = "python_version < \"3.10\""}
[package.extras]
-all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
+all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
black = ["black"]
-doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"]
+doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"]
kernel = ["ipykernel"]
nbconvert = ["nbconvert"]
nbformat = ["nbformat"]
notebook = ["ipywidgets", "notebook"]
parallel = ["ipyparallel"]
qtconsole = ["qtconsole"]
-test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"]
-test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"]
+test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"]
+test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath", "trio"]
[[package]]
name = "jedi"
@@ -1146,7 +1145,6 @@ files = [
]
[package.dependencies]
-importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""}
jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
python-dateutil = ">=2.8.2"
pyzmq = ">=23.0"
@@ -2705,5 +2703,5 @@ hybrid = ["pinecone-text"]
[metadata]
lock-version = "2.0"
-python-versions = ">=3.9,<3.12"
-content-hash = "64f0fef330108fe47110c203bf96403e8d986f8b751f6eed1abfec3ce57539a6"
+python-versions = ">=3.10,<3.12"
+content-hash = "42a58d13a0f9d9a1bca34b4c29cafee6a5c884b80d47848cb7c552ad91e54743"
diff --git a/pyproject.toml b/pyproject.toml
index b24ed4f3..3a37674d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -13,7 +13,7 @@ readme = "README.md"
packages = [{include = "semantic_router"}]
[tool.poetry.dependencies]
-python = ">=3.9,<3.12"
+python = ">=3.10,<3.12"
pydantic = "^1.8.2"
openai = "^1.3.9"
cohere = "^4.32"
From 1595d64540279f7439c724ae1e9c4b7fb7c9658e Mon Sep 17 00:00:00 2001
From: James Briggs <35938317+jamescalam@users.noreply.github.com>
Date: Sun, 7 Jan 2024 23:46:25 +0100
Subject: [PATCH 24/24] update pyproject
---
pyproject.toml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 3a37674d..5b0e1f30 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -7,7 +7,8 @@ authors = [
"Siraj Aizlewood ",
"Simonas Jakubonis ",
"Luca Mannini ",
- "Bogdan Buduroiu "
+ "Bogdan Buduroiu ",
+ "Ismail Ashraq "
]
readme = "README.md"
packages = [{include = "semantic_router"}]