From 7a36cc9ebf9ad320b98c003236deaeff6237bdeb Mon Sep 17 00:00:00 2001
From: William Espegren <131612909+WilliamEspegren@users.noreply.github.com>
Date: Thu, 8 Aug 2024 14:53:05 +0200
Subject: [PATCH 01/38] feat: add Spider Web Scraper & Crawler (#2439)
* spider files
* rebuild required
* add spider-client here
* Feat: Spider Web Crawler & Scraper
* Feat: spider integration
* new input not working
* [autofix.ci] apply automated fixes
* [autofix.ci] apply automated fixes (attempt 2/3)
* fix: add outputs and configure build method
* style: run ruff
* Refactor SpiderTool to use 'crawl' instead of 'build' for generating Markdown content
* chore: add type ignore
* chore: new lock
* chore: Update mem0ai dependency to version 0.0.5
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida
---
poetry.lock | 143 +-
pyproject.toml | 1 +
.../langchain_utilities/spider_constants.py | 1 +
.../components/embeddings/AstraVectorize.py | 15 +-
.../langchain_utilities/SpiderTool.py | 121 +
src/backend/base/langflow/main.py | 2 +-
src/backend/base/poetry.lock | 2333 ++++++++++++++++-
src/backend/base/pyproject.toml | 4 +
src/frontend/src/icons/Spider/SpiderIcon.jsx | 18 +
src/frontend/src/icons/Spider/index.tsx | 9 +
src/frontend/src/icons/Spider/spider_logo.svg | 1 +
src/frontend/src/utils/styleUtils.ts | 2 +
12 files changed, 2581 insertions(+), 69 deletions(-)
create mode 100644 src/backend/base/langflow/base/langchain_utilities/spider_constants.py
create mode 100644 src/backend/base/langflow/components/langchain_utilities/SpiderTool.py
create mode 100644 src/frontend/src/icons/Spider/SpiderIcon.jsx
create mode 100644 src/frontend/src/icons/Spider/index.tsx
create mode 100644 src/frontend/src/icons/Spider/spider_logo.svg
diff --git a/poetry.lock b/poetry.lock
index 60ce75152319..8ed44e90b426 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -2124,13 +2124,13 @@ idna = ">=2.0.0"
[[package]]
name = "embedchain"
-version = "0.1.119"
+version = "0.1.118"
description = "Simplest open source retrieval (RAG) framework"
optional = false
python-versions = "<=3.13,>=3.9"
files = [
- {file = "embedchain-0.1.119-py3-none-any.whl", hash = "sha256:8ec3e7f139939fa1dc8fda898f8d8d9d31a5abfe08e184b607e38733d863d606"},
- {file = "embedchain-0.1.119.tar.gz", hash = "sha256:0f4f45e092b7f3192ea6fe82575726532573b1231d7af6c22edc695b701b4223"},
+ {file = "embedchain-0.1.118-py3-none-any.whl", hash = "sha256:38ead471df9d9234bf42e6f7a32cab26431d50d6f2f894f18a6cabc0b02bf31a"},
+ {file = "embedchain-0.1.118.tar.gz", hash = "sha256:1fa1e799882a1dc4e63af344595b043f1c1f30fbd59461b6660b1934b85a1e4b"},
]
[package.dependencies]
@@ -2144,7 +2144,7 @@ langchain = ">0.2,<=0.3"
langchain-cohere = ">=0.1.4,<0.2.0"
langchain-community = ">=0.2.6,<0.3.0"
langchain-openai = ">=0.1.7,<0.2.0"
-mem0ai = ">=0.0.9,<0.0.10"
+mem0ai = ">=0.0.5,<0.0.6"
openai = ">=1.1.1"
posthog = ">=3.0.2,<4.0.0"
pypdf = ">=4.0.1,<5.0.0"
@@ -2211,6 +2211,20 @@ django = ["dj-database-url", "dj-email-url", "django-cache-url"]
lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"]
tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"]
+[[package]]
+name = "eval-type-backport"
+version = "0.2.0"
+description = "Like `typing._eval_type`, but lets older Python versions use newer typing features."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"},
+ {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"},
+]
+
+[package.extras]
+tests = ["pytest"]
+
[[package]]
name = "exceptiongroup"
version = "1.2.2"
@@ -4735,19 +4749,19 @@ tests = ["aiohttp", "duckdb", "pandas (>=1.4)", "polars (>=0.19)", "pytest", "py
[[package]]
name = "langchain"
-version = "0.2.11"
+version = "0.2.10"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain-0.2.11-py3-none-any.whl", hash = "sha256:5a7a8b4918f3d3bebce9b4f23b92d050699e6f7fb97591e8941177cf07a260a2"},
- {file = "langchain-0.2.11.tar.gz", hash = "sha256:d7a9e4165f02dca0bd78addbc2319d5b9286b5d37c51d784124102b57e9fd297"},
+ {file = "langchain-0.2.10-py3-none-any.whl", hash = "sha256:b4fb58c7faf4f4999cfe3325474979a7121a1737dd101655a723a1d957ef0617"},
+ {file = "langchain-0.2.10.tar.gz", hash = "sha256:1f861c1b59ac9c91b02bb0fa58d3adad1c1d0686636872b5b357bbce3ce41d06"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
-langchain-core = ">=0.2.23,<0.3.0"
+langchain-core = ">=0.2.22,<0.3.0"
langchain-text-splitters = ">=0.2.0,<0.3.0"
langsmith = ">=0.1.17,<0.2.0"
numpy = [
@@ -4851,20 +4865,20 @@ langchain-community = ["langchain-community (>=0.2.4)"]
[[package]]
name = "langchain-community"
-version = "0.2.10"
+version = "0.2.9"
description = "Community contributed LangChain integrations."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_community-0.2.10-py3-none-any.whl", hash = "sha256:9f4d1b5ab7f0b0a704f538e26e50fce45a461da6d2bf6b7b636d24f22fbc088a"},
- {file = "langchain_community-0.2.10.tar.gz", hash = "sha256:3a0404bad4bd07d6f86affdb62fb3d080a456c66191754d586a409d9d6024d62"},
+ {file = "langchain_community-0.2.9-py3-none-any.whl", hash = "sha256:b51d3adf9346a1161c1098917585b9e303cf24e2f5c71f5d232a0504edada5f2"},
+ {file = "langchain_community-0.2.9.tar.gz", hash = "sha256:1e7c180232916cbe35fe00509680dd1f805e32d7c87b5e80b3a9ec8754ecae37"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.7"
langchain = ">=0.2.9,<0.3.0"
-langchain-core = ">=0.2.23,<0.3.0"
+langchain-core = ">=0.2.22,<0.3.0"
langsmith = ">=0.1.0,<0.2.0"
numpy = [
{version = ">=1,<2", markers = "python_version < \"3.12\""},
@@ -4877,13 +4891,13 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
[[package]]
name = "langchain-core"
-version = "0.2.24"
+version = "0.2.29"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_core-0.2.24-py3-none-any.whl", hash = "sha256:9444fc082d21ef075d925590a684a73fe1f9688a3d90087580ec929751be55e7"},
- {file = "langchain_core-0.2.24.tar.gz", hash = "sha256:f2e3fa200b124e8c45d270da9bf836bed9c09532612c96ff3225e59b9a232f5a"},
+ {file = "langchain_core-0.2.29-py3-none-any.whl", hash = "sha256:846c04a3bb72e409a9b928e0eb3ea1762e1473f2c4fb6df2596fbd7b3ab75973"},
+ {file = "langchain_core-0.2.29.tar.gz", hash = "sha256:491324745a7afee5a7b285c3904edd9dd0c6efa7daf26b92fec6e84a2d2f5d10"},
]
[package.dependencies]
@@ -4896,6 +4910,7 @@ pydantic = [
]
PyYAML = ">=5.3"
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
+typing-extensions = ">=4.7"
[[package]]
name = "langchain-experimental"
@@ -5181,6 +5196,7 @@ asyncer = "^0.0.5"
bcrypt = "4.0.1"
cachetools = "^5.3.1"
chardet = "^5.2.0"
+crewai = "^0.36.0"
cryptography = "^42.0.5"
docstring-parser = "^0.16"
duckdb = "^1.0.0"
@@ -5220,6 +5236,7 @@ python-multipart = "^0.0.7"
rich = "^13.7.0"
sentry-sdk = {version = "^2.5.1", extras = ["fastapi", "loguru"]}
setuptools = ">=70"
+spider-client = "^0.0.27"
sqlmodel = "^0.0.18"
typer = "^0.12.0"
uncurl = "^0.0.11"
@@ -5281,13 +5298,13 @@ requests = ">=2,<3"
[[package]]
name = "langwatch"
-version = "0.1.16"
+version = "0.1.18"
description = "Python SDK for LangWatch for monitoring your LLMs"
optional = false
-python-versions = "<4.0,>=3.9"
+python-versions = "<3.13,>=3.9"
files = [
- {file = "langwatch-0.1.16-py3-none-any.whl", hash = "sha256:61ccb1f1efbffc1b2e8bbd3b9c7ed53440d3a66b9fd741f3d1a30d31d0b936f7"},
- {file = "langwatch-0.1.16.tar.gz", hash = "sha256:d8c453a4dcdb500bb55df19ef5fa2c43d450236d84e47fd72348fb3184cc3f6a"},
+ {file = "langwatch-0.1.18-py3-none-any.whl", hash = "sha256:73e469fee96d1bebfc54f27b8413d2f108124139ca2df7510063e3a1ea2dc3c1"},
+ {file = "langwatch-0.1.18.tar.gz", hash = "sha256:a0e6dbfedca02502bf5abafd7cf9ef8d1bffd8c0d6bac0d932d1209c97752c0e"},
]
[package.dependencies]
@@ -5309,13 +5326,13 @@ openai = ["openai (>=1.3.7,<2.0.0)"]
[[package]]
name = "litellm"
-version = "1.42.5"
+version = "1.41.25"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
files = [
- {file = "litellm-1.42.5-py3-none-any.whl", hash = "sha256:c8c2f9e40b5aa1c2dcfcac9adb854b8ac22ce2112825d742d8fce516d26e9a65"},
- {file = "litellm-1.42.5.tar.gz", hash = "sha256:64ea24040751009e70e816e9340c5c82717d9a309f4480e5ece9f3f67328e04e"},
+ {file = "litellm-1.41.25-py3-none-any.whl", hash = "sha256:80ef35f141402be4ef106a9c720169f6f613ff47df717ab3d1b8ba845c2a5b38"},
+ {file = "litellm-1.41.25.tar.gz", hash = "sha256:f6f000b8e666b51914436c26659d4a91d67b350bcc44e47b3837d3b8f0e1640d"},
]
[package.dependencies]
@@ -5736,20 +5753,23 @@ files = [
[[package]]
name = "mem0ai"
-version = "0.0.9"
+version = "0.0.5"
description = "Long-term memory for AI Agents"
optional = false
python-versions = "<4.0,>=3.8"
files = [
- {file = "mem0ai-0.0.9-py3-none-any.whl", hash = "sha256:d4de435729af4fd3d597d022ffb2af89a0630d6c3b4769792bbe27d2ce816858"},
- {file = "mem0ai-0.0.9.tar.gz", hash = "sha256:e4374d5d04aa3f543cd3325f700e4b62f5358ae1c6fa5c44b2ff790c10c4e5f1"},
+ {file = "mem0ai-0.0.5-py3-none-any.whl", hash = "sha256:6f6e5356fd522adf0510322cd581476ea456fd7ccefca11b5ac050e9a6f00f36"},
+ {file = "mem0ai-0.0.5.tar.gz", hash = "sha256:f2ac35d15e4e620becb8d06b8ebeb1ffa85fac0b7cb2d3138056babec48dd5dd"},
]
[package.dependencies]
+boto3 = ">=1.34.144,<2.0.0"
+groq = ">=0.9.0,<0.10.0"
openai = ">=1.33.0,<2.0.0"
posthog = ">=3.5.0,<4.0.0"
pydantic = ">=2.7.3,<3.0.0"
qdrant-client = ">=1.9.1,<2.0.0"
+together = ">=1.2.1,<2.0.0"
[[package]]
name = "metal-sdk"
@@ -6566,13 +6586,13 @@ sympy = "*"
[[package]]
name = "openai"
-version = "1.37.1"
+version = "1.37.0"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-1.37.1-py3-none-any.whl", hash = "sha256:9a6adda0d6ae8fce02d235c5671c399cfa40d6a281b3628914c7ebf244888ee3"},
- {file = "openai-1.37.1.tar.gz", hash = "sha256:faf87206785a6b5d9e34555d6a3242482a6852bc802e453e2a891f68ee04ce55"},
+ {file = "openai-1.37.0-py3-none-any.whl", hash = "sha256:a903245c0ecf622f2830024acdaa78683c70abb8e9d37a497b851670864c9f73"},
+ {file = "openai-1.37.0.tar.gz", hash = "sha256:dc8197fc40ab9d431777b6620d962cc49f4544ffc3011f03ce0a805e6eb54adb"},
]
[package.dependencies]
@@ -8316,13 +8336,13 @@ files = [
[[package]]
name = "pytest"
-version = "8.3.2"
+version = "8.3.1"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
- {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
+ {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"},
+ {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"},
]
[package.dependencies]
@@ -9729,13 +9749,13 @@ tornado = ["tornado (>=6)"]
[[package]]
name = "setuptools"
-version = "72.1.0"
+version = "71.1.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"},
- {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"},
+ {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"},
+ {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"},
]
[package.extras]
@@ -9861,6 +9881,19 @@ files = [
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
]
+[[package]]
+name = "spider-client"
+version = "0.0.27"
+description = "Python SDK for Spider Cloud API"
+optional = false
+python-versions = "*"
+files = [
+ {file = "spider-client-0.0.27.tar.gz", hash = "sha256:c3feaf5c491bd9a6c509efa0c8789452497073d9f68e70fc90e7626a6a8365aa"},
+]
+
+[package.dependencies]
+requests = "*"
+
[[package]]
name = "sqlalchemy"
version = "2.0.31"
@@ -10204,6 +10237,34 @@ requests = ">=2.26.0"
[package.extras]
blobfile = ["blobfile (>=2)"]
+[[package]]
+name = "together"
+version = "1.2.6"
+description = "Python client for Together's Cloud Platform!"
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "together-1.2.6-py3-none-any.whl", hash = "sha256:b3ccf467919edcf3a3927dcf7aad6dee95c4a276ced7bff523a2b361fc766d56"},
+ {file = "together-1.2.6.tar.gz", hash = "sha256:f79f383d258fc964809ebe60870c94f2104c15b34451c5b4808bd11d956a1702"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.9.3,<4.0.0"
+click = ">=8.1.7,<9.0.0"
+eval-type-backport = ">=0.1.3,<0.3.0"
+filelock = ">=3.13.1,<4.0.0"
+numpy = [
+ {version = ">=1.23.5", markers = "python_version < \"3.12\""},
+ {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
+]
+pillow = ">=10.3.0,<11.0.0"
+pyarrow = ">=10.0.1"
+pydantic = ">=2.6.3,<3.0.0"
+requests = ">=2.31.0,<3.0.0"
+tabulate = ">=0.9.0,<0.10.0"
+tqdm = ">=4.66.2,<5.0.0"
+typer = ">=0.9,<0.13"
+
[[package]]
name = "tokenizers"
version = "0.19.1"
@@ -10761,13 +10822,13 @@ urllib3 = ">=2"
[[package]]
name = "types-setuptools"
-version = "71.1.0.20240726"
+version = "71.0.0.20240722"
description = "Typing stubs for setuptools"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-setuptools-71.1.0.20240726.tar.gz", hash = "sha256:85ba28e9461bb1be86ebba4db0f1c2408f2b11115b1966334ea9dc464e29303e"},
- {file = "types_setuptools-71.1.0.20240726-py3-none-any.whl", hash = "sha256:a7775376f36e0ff09bcad236bf265777590a66b11623e48c20bfc30f1444ea36"},
+ {file = "types-setuptools-71.0.0.20240722.tar.gz", hash = "sha256:8f1fd5281945ed8f5a896f05dd50bc31917d6e2487ff9508f4bac522d13ad395"},
+ {file = "types_setuptools-71.0.0.20240722-py3-none-any.whl", hash = "sha256:04a383bd1a2dcdb6a85397516ce2d7b46617d89f1d758f686d0d9069943d9811"},
]
[[package]]
@@ -11331,13 +11392,13 @@ files = [
[[package]]
name = "weaviate-client"
-version = "4.7.1"
+version = "4.6.7"
description = "A python native Weaviate client"
optional = false
python-versions = ">=3.8"
files = [
- {file = "weaviate_client-4.7.1-py3-none-any.whl", hash = "sha256:342f5c67b126cee4dc3a60467ad1ae74971cd5614e27af6fb13d687a345352c4"},
- {file = "weaviate_client-4.7.1.tar.gz", hash = "sha256:af99ac4e53613d2ff5b797372e95d004d0c8a1dd10a7f592068bcb423a30af30"},
+ {file = "weaviate_client-4.6.7-py3-none-any.whl", hash = "sha256:8793de35264cab33a84fe8cb8c422a257fe4d8334657aaddd8ead853da3fb34a"},
+ {file = "weaviate_client-4.6.7.tar.gz", hash = "sha256:202b32e160536f5f44e4a635d30c3d3a0790b1a7ff997f5e243919d1ac5b68a1"},
]
[package.dependencies]
@@ -12012,4 +12073,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "b0b58a9883d3eacc262701a7938ff839365386ba5e155dd2582986501d2b5d7c"
+content-hash = "6554abddd0322e28c531774986c897c560099a974f985aa48b21b36b12e2be52"
diff --git a/pyproject.toml b/pyproject.toml
index f29810c7a88c..337431790852 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -106,6 +106,7 @@ langchain-google-community = "1.0.7"
wolframalpha = "^5.1.3"
astra-assistants = "^2.0.15"
composio-langchain = "^0.3.28"
+spider-client = "^0.0.27"
[tool.poetry.group.dev.dependencies]
diff --git a/src/backend/base/langflow/base/langchain_utilities/spider_constants.py b/src/backend/base/langflow/base/langchain_utilities/spider_constants.py
new file mode 100644
index 000000000000..8630e942e020
--- /dev/null
+++ b/src/backend/base/langflow/base/langchain_utilities/spider_constants.py
@@ -0,0 +1 @@
+MODES = ["scrape", "crawl"]
diff --git a/src/backend/base/langflow/components/embeddings/AstraVectorize.py b/src/backend/base/langflow/components/embeddings/AstraVectorize.py
index d90bc051c8c3..4de49eb758c9 100644
--- a/src/backend/base/langflow/components/embeddings/AstraVectorize.py
+++ b/src/backend/base/langflow/components/embeddings/AstraVectorize.py
@@ -1,6 +1,7 @@
from typing import Any
+
from langflow.custom import Component
-from langflow.inputs.inputs import DictInput, SecretStrInput, MessageTextInput, DropdownInput
+from langflow.inputs.inputs import DictInput, DropdownInput, MessageTextInput, SecretStrInput
from langflow.template.field.base import Output
@@ -60,13 +61,19 @@ class AstraVectorizeComponent(Component):
name="model_name",
display_name="Model Name",
info=f"The embedding model to use for the selected provider. Each provider has a different set of models "
- f"available (https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\n\n{VECTORIZE_MODELS_STR}",
+ f"available (full list at https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\n\n{VECTORIZE_MODELS_STR}",
required=True,
),
MessageTextInput(
name="api_key_name",
- display_name="Provider API Key Name",
- info="The name of the embeddings provider API key stored on Astra.",
+ display_name="API Key name",
+ info="The name of the embeddings provider API key stored on Astra. If set, it will override the 'ProviderKey' in the authentication parameters.",
+ ),
+ DictInput(
+ name="authentication",
+ display_name="Authentication parameters",
+ is_list=True,
+ advanced=True,
),
SecretStrInput(
name="provider_api_key",
diff --git a/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py b/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py
new file mode 100644
index 000000000000..ee4b732735c5
--- /dev/null
+++ b/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py
@@ -0,0 +1,121 @@
+from spider.spider import Spider # type: ignore
+
+from langflow.base.langchain_utilities.spider_constants import MODES
+from langflow.custom import Component
+from langflow.io import BoolInput, DictInput, DropdownInput, IntInput, Output, SecretStrInput, StrInput
+from langflow.schema import Data
+
+
+class SpiderTool(Component):
+ display_name: str = "Spider Web Crawler & Scraper"
+ description: str = "Spider API for web crawling and scraping."
+ output_types: list[str] = ["Document"]
+ documentation: str = "https://spider.cloud/docs/api"
+
+ inputs = [
+ SecretStrInput(
+ name="spider_api_key",
+ display_name="Spider API Key",
+ required=True,
+ password=True,
+ info="The Spider API Key, get it from https://spider.cloud",
+ ),
+ StrInput(
+ name="url",
+ display_name="URL",
+ required=True,
+ info="The URL to scrape or crawl",
+ ),
+ DropdownInput(
+ name="mode",
+ display_name="Mode",
+ required=True,
+ options=MODES,
+ value=MODES[0],
+ info="The mode of operation: scrape or crawl",
+ ),
+ IntInput(
+ name="limit",
+ display_name="Limit",
+ info="The maximum amount of pages allowed to crawl per website. Set to 0 to crawl all pages.",
+ advanced=True,
+ ),
+ IntInput(
+ name="depth",
+ display_name="Depth",
+ info="The crawl limit for maximum depth. If 0, no limit will be applied.",
+ advanced=True,
+ ),
+ StrInput(
+ name="blacklist",
+ display_name="Blacklist",
+ info="Blacklist paths that you do not want to crawl. Use Regex patterns.",
+ advanced=True,
+ ),
+ StrInput(
+ name="whitelist",
+ display_name="Whitelist",
+ info="Whitelist paths that you want to crawl, ignoring all other routes. Use Regex patterns.",
+ advanced=True,
+ ),
+ BoolInput(
+ name="use_readability",
+ display_name="Use Readability",
+ info="Use readability to pre-process the content for reading.",
+ advanced=True,
+ ),
+ IntInput(
+ name="request_timeout",
+ display_name="Request Timeout",
+ info="Timeout for the request in seconds.",
+ advanced=True,
+ ),
+ BoolInput(
+ name="metadata",
+ display_name="Metadata",
+ info="Include metadata in the response.",
+ advanced=True,
+ ),
+ DictInput(
+ name="params",
+ display_name="Additional Parameters",
+ info="Additional parameters to pass to the API. If provided, other inputs will be ignored.",
+ ),
+ ]
+
+ outputs = [
+ Output(display_name="Markdown", name="content", method="crawl"),
+ ]
+
+ def crawl(self) -> list[Data]:
+ if self.params:
+ parameters = self.params.data
+ else:
+ parameters = {
+ "limit": self.limit,
+ "depth": self.depth,
+ "blacklist": self.blacklist,
+ "whitelist": self.whitelist,
+ "use_readability": self.use_readability,
+ "request_timeout": self.request_timeout,
+ "metadata": self.metadata,
+ "return_format": "markdown",
+ }
+
+ app = Spider(api_key=self.spider_api_key)
+ try:
+ if self.mode == "scrape":
+ parameters["limit"] = 1
+ result = app.scrape_url(self.url, parameters)
+ elif self.mode == "crawl":
+ result = app.crawl_url(self.url, parameters)
+ else:
+ raise ValueError(f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'.")
+ except Exception as e:
+ raise Exception(f"Error: {str(e)}")
+
+ records = []
+
+ for record in result:
+ records.append(Data(data={"content": record["content"], "url": record["url"]}))
+ return records
diff --git a/src/backend/base/langflow/main.py b/src/backend/base/langflow/main.py
index ca723ea299da..e4a2611f8eec 100644
--- a/src/backend/base/langflow/main.py
+++ b/src/backend/base/langflow/main.py
@@ -157,7 +157,7 @@ async def flatten_query_string_lists(request: Request, call_next):
raise ValueError(f"Invalid port number {prome_port_str}")
if settings.prometheus_enabled:
- from prometheus_client import start_http_server
+ from prometheus_client import start_http_server # type: ignore
start_http_server(settings.prometheus_port)
diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock
index 0ac1f4607ff3..e52b0c5b7754 100644
--- a/src/backend/base/poetry.lock
+++ b/src/backend/base/poetry.lock
@@ -185,6 +185,17 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"]
+[[package]]
+name = "appdirs"
+version = "1.4.4"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = "*"
+files = [
+ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
+ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
+]
+
[[package]]
name = "appnope"
version = "0.1.4"
@@ -275,6 +286,17 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
+[[package]]
+name = "backoff"
+version = "2.2.1"
+description = "Function decoration for backoff and retry"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
+ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
+]
+
[[package]]
name = "bcrypt"
version = "4.0.1"
@@ -309,6 +331,27 @@ files = [
tests = ["pytest (>=3.2.1,!=3.3.0)"]
typecheck = ["mypy"]
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.3"
+description = "Screen-scraping library"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
+ {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
+]
+
+[package.dependencies]
+soupsieve = ">1.2"
+
+[package.extras]
+cchardet = ["cchardet"]
+chardet = ["chardet"]
+charset-normalizer = ["charset-normalizer"]
+html5lib = ["html5lib"]
+lxml = ["lxml"]
+
[[package]]
name = "blinker"
version = "1.8.2"
@@ -320,6 +363,44 @@ files = [
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
]
+[[package]]
+name = "boto3"
+version = "1.34.145"
+description = "The AWS SDK for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "boto3-1.34.145-py3-none-any.whl", hash = "sha256:69d5afb7a017d07dd6bdfb680d2912d5d369b3fafa0a45161207d9f393b14d7e"},
+ {file = "boto3-1.34.145.tar.gz", hash = "sha256:ac770fb53dde1743aec56bd8e56b7ee2e2f5ad42a37825968ec4ff8428822640"},
+]
+
+[package.dependencies]
+botocore = ">=1.34.145,<1.35.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.10.0,<0.11.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
+
+[[package]]
+name = "botocore"
+version = "1.34.145"
+description = "Low-level, data-driven core of boto 3."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "botocore-1.34.145-py3-none-any.whl", hash = "sha256:2e72e262de02adcb0264ac2bac159a28f55dbba8d9e52aa0308773a42950dff5"},
+ {file = "botocore-1.34.145.tar.gz", hash = "sha256:edf0fb4c02186ae29b76263ac5fda18b0a085d334a310551c9984407cf1079e6"},
+]
+
+[package.dependencies]
+jmespath = ">=0.7.1,<2.0.0"
+python-dateutil = ">=2.1,<3.0.0"
+urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
+
+[package.extras]
+crt = ["awscrt (==0.20.11)"]
+
[[package]]
name = "brotli"
version = "1.1.0"
@@ -412,6 +493,31 @@ files = [
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
]
+[[package]]
+name = "build"
+version = "1.2.1"
+description = "A simple, correct Python build frontend"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"},
+ {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "os_name == \"nt\""}
+importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""}
+packaging = ">=19.1"
+pyproject_hooks = "*"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
+test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"]
+typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"]
+uv = ["uv (>=0.1.18)"]
+virtualenv = ["virtualenv (>=20.0.35)"]
+
[[package]]
name = "cachetools"
version = "5.4.0"
@@ -634,6 +740,84 @@ files = [
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
+[[package]]
+name = "chroma-hnswlib"
+version = "0.7.3"
+description = "Chromas fork of hnswlib"
+optional = false
+python-versions = "*"
+files = [
+ {file = "chroma-hnswlib-0.7.3.tar.gz", hash = "sha256:b6137bedde49fffda6af93b0297fe00429fc61e5a072b1ed9377f909ed95a932"},
+ {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59d6a7c6f863c67aeb23e79a64001d537060b6995c3eca9a06e349ff7b0998ca"},
+ {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d71a3f4f232f537b6152947006bd32bc1629a8686df22fd97777b70f416c127a"},
+ {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c92dc1ebe062188e53970ba13f6b07e0ae32e64c9770eb7f7ffa83f149d4210"},
+ {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da700a6656fed8753f68d44b8cc8ae46efc99fc8a22a6d970dc1697f49b403"},
+ {file = "chroma_hnswlib-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:108bc4c293d819b56476d8f7865803cb03afd6ca128a2a04d678fffc139af029"},
+ {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11e7ca93fb8192214ac2b9c0943641ac0daf8f9d4591bb7b73be808a83835667"},
+ {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f552e4d23edc06cdeb553cdc757d2fe190cdeb10d43093d6a3319f8d4bf1c6b"},
+ {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f96f4d5699e486eb1fb95849fe35ab79ab0901265805be7e60f4eaa83ce263ec"},
+ {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368e57fe9ebae05ee5844840fa588028a023d1182b0cfdb1d13f607c9ea05756"},
+ {file = "chroma_hnswlib-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:b7dca27b8896b494456db0fd705b689ac6b73af78e186eb6a42fea2de4f71c6f"},
+ {file = "chroma_hnswlib-0.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70f897dc6218afa1d99f43a9ad5eb82f392df31f57ff514ccf4eeadecd62f544"},
+ {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aef10b4952708f5a1381c124a29aead0c356f8d7d6e0b520b778aaa62a356f4"},
+ {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee2d8d1529fca3898d512079144ec3e28a81d9c17e15e0ea4665697a7923253"},
+ {file = "chroma_hnswlib-0.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a4021a70e898783cd6f26e00008b494c6249a7babe8774e90ce4766dd288c8ba"},
+ {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8f61fa1d417fda848e3ba06c07671f14806a2585272b175ba47501b066fe6b1"},
+ {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7563be58bc98e8f0866907368e22ae218d6060601b79c42f59af4eccbbd2e0a"},
+ {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51b8d411486ee70d7b66ec08cc8b9b6620116b650df9c19076d2d8b6ce2ae914"},
+ {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d706782b628e4f43f1b8a81e9120ac486837fbd9bcb8ced70fe0d9b95c72d77"},
+ {file = "chroma_hnswlib-0.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:54f053dedc0e3ba657f05fec6e73dd541bc5db5b09aa8bc146466ffb734bdc86"},
+ {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e607c5a71c610a73167a517062d302c0827ccdd6e259af6e4869a5c1306ffb5d"},
+ {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2358a795870156af6761890f9eb5ca8cade57eb10c5f046fe94dae1faa04b9e"},
+ {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cea425df2e6b8a5e201fff0d922a1cc1d165b3cfe762b1408075723c8892218"},
+ {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:454df3dd3e97aa784fba7cf888ad191e0087eef0fd8c70daf28b753b3b591170"},
+ {file = "chroma_hnswlib-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:df587d15007ca701c6de0ee7d5585dd5e976b7edd2b30ac72bc376b3c3f85882"},
+]
+
+[package.dependencies]
+numpy = "*"
+
+[[package]]
+name = "chromadb"
+version = "0.4.24"
+description = "Chroma."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da"},
+ {file = "chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72"},
+]
+
+[package.dependencies]
+bcrypt = ">=4.0.1"
+build = ">=1.0.3"
+chroma-hnswlib = "0.7.3"
+fastapi = ">=0.95.2"
+grpcio = ">=1.58.0"
+importlib-resources = "*"
+kubernetes = ">=28.1.0"
+mmh3 = ">=4.0.1"
+numpy = ">=1.22.5"
+onnxruntime = ">=1.14.1"
+opentelemetry-api = ">=1.2.0"
+opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0"
+opentelemetry-instrumentation-fastapi = ">=0.41b0"
+opentelemetry-sdk = ">=1.2.0"
+orjson = ">=3.9.12"
+overrides = ">=7.3.1"
+posthog = ">=2.4.0"
+pulsar-client = ">=3.1.0"
+pydantic = ">=1.9"
+pypika = ">=0.48.9"
+PyYAML = ">=6.0.0"
+requests = ">=2.28"
+tenacity = ">=8.2.3"
+tokenizers = ">=0.13.2"
+tqdm = ">=4.65.0"
+typer = ">=0.9.0"
+typing-extensions = ">=4.5.0"
+uvicorn = {version = ">=0.18.3", extras = ["standard"]}
+
[[package]]
name = "click"
version = "8.1.7"
@@ -648,6 +832,29 @@ files = [
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
+[[package]]
+name = "cohere"
+version = "5.6.2"
+description = ""
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "cohere-5.6.2-py3-none-any.whl", hash = "sha256:cfecf1343bcaa4091266c5a231fbcb3ccbd80cad05ea093ef80024a117aa3a2f"},
+ {file = "cohere-5.6.2.tar.gz", hash = "sha256:6bb901afdfb02f62ad8ed2d82f12d8ea87a6869710f5f880cb89190c4e994805"},
+]
+
+[package.dependencies]
+boto3 = ">=1.34.0,<2.0.0"
+fastavro = ">=1.9.4,<2.0.0"
+httpx = ">=0.21.2"
+httpx-sse = ">=0.4.0,<0.5.0"
+parameterized = ">=0.9.0,<0.10.0"
+pydantic = ">=1.9.2"
+requests = ">=2.0.0,<3.0.0"
+tokenizers = ">=0.15,<1"
+types-requests = ">=2.0.0,<3.0.0"
+typing_extensions = ">=4.0.0"
+
[[package]]
name = "colorama"
version = "0.4.6"
@@ -659,6 +866,23 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+[[package]]
+name = "coloredlogs"
+version = "15.0.1"
+description = "Colored terminal output for Python's logging module"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
+ {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
+]
+
+[package.dependencies]
+humanfriendly = ">=9.1"
+
+[package.extras]
+cron = ["capturer (>=2.4)"]
+
[[package]]
name = "comm"
version = "0.2.2"
@@ -778,6 +1002,36 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "crewai"
+version = "0.36.1"
+description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
+optional = false
+python-versions = "<=3.13,>=3.10"
+files = [
+ {file = "crewai-0.36.1-py3-none-any.whl", hash = "sha256:dbaa50d102542ea0c790bd62511b35234b2f5fa8d2333a6598beb84f407f0e00"},
+ {file = "crewai-0.36.1.tar.gz", hash = "sha256:ea50ec5d3ef2df85e1b520efd9331bebb49ed7143e6cd1feec645da49217d2b0"},
+]
+
+[package.dependencies]
+appdirs = ">=1.4.4,<2.0.0"
+click = ">=8.1.7,<9.0.0"
+embedchain = ">=0.1.114,<0.2.0"
+instructor = "1.3.3"
+jsonref = ">=1.1.0,<2.0.0"
+langchain = ">0.2,<=0.3"
+openai = ">=1.13.3,<2.0.0"
+opentelemetry-api = ">=1.22.0,<2.0.0"
+opentelemetry-exporter-otlp-proto-http = ">=1.22.0,<2.0.0"
+opentelemetry-sdk = ">=1.22.0,<2.0.0"
+pydantic = ">=2.4.2,<3.0.0"
+python-dotenv = ">=1.0.0,<2.0.0"
+regex = ">=2023.12.25,<2024.0.0"
+
+[package.extras]
+agentops = ["agentops (>=0.1.9,<0.2.0)"]
+tools = ["crewai-tools (>=0.4.7,<0.5.0)"]
+
[[package]]
name = "cryptography"
version = "42.0.8"
@@ -965,6 +1219,17 @@ files = [
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
]
+[[package]]
+name = "distro"
+version = "1.9.0"
+description = "Distro - an OS platform information API"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
+ {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
+]
+
[[package]]
name = "dnspython"
version = "2.6.1"
@@ -1084,6 +1349,57 @@ files = [
dnspython = ">=2.0.0"
idna = ">=2.0.0"
+[[package]]
+name = "embedchain"
+version = "0.1.118"
+description = "Simplest open source retrieval (RAG) framework"
+optional = false
+python-versions = "<=3.13,>=3.9"
+files = [
+ {file = "embedchain-0.1.118-py3-none-any.whl", hash = "sha256:38ead471df9d9234bf42e6f7a32cab26431d50d6f2f894f18a6cabc0b02bf31a"},
+ {file = "embedchain-0.1.118.tar.gz", hash = "sha256:1fa1e799882a1dc4e63af344595b043f1c1f30fbd59461b6660b1934b85a1e4b"},
+]
+
+[package.dependencies]
+alembic = ">=1.13.1,<2.0.0"
+beautifulsoup4 = ">=4.12.2,<5.0.0"
+chromadb = ">=0.4.24,<0.5.0"
+cohere = ">=5.3,<6.0"
+google-cloud-aiplatform = ">=1.26.1,<2.0.0"
+gptcache = ">=0.1.43,<0.2.0"
+langchain = ">0.2,<=0.3"
+langchain-cohere = ">=0.1.4,<0.2.0"
+langchain-community = ">=0.2.6,<0.3.0"
+langchain-openai = ">=0.1.7,<0.2.0"
+mem0ai = ">=0.0.5,<0.0.6"
+openai = ">=1.1.1"
+posthog = ">=3.0.2,<4.0.0"
+pypdf = ">=4.0.1,<5.0.0"
+pysbd = ">=0.3.4,<0.4.0"
+python-dotenv = ">=1.0.0,<2.0.0"
+rich = ">=13.7.0,<14.0.0"
+schema = ">=0.7.5,<0.8.0"
+sqlalchemy = ">=2.0.27,<3.0.0"
+tiktoken = ">=0.7.0,<0.8.0"
+
+[package.extras]
+elasticsearch = ["elasticsearch (>=8.9.0,<9.0.0)"]
+gmail = ["google-api-core (>=2.15.0,<3.0.0)", "google-api-python-client (>=2.111.0,<3.0.0)", "google-auth (>=2.25.2,<3.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)", "requests (>=2.31.0,<3.0.0)"]
+google = ["google-generativeai (>=0.3.0,<0.4.0)"]
+googledrive = ["google-api-python-client (>=2.111.0,<3.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)"]
+lancedb = ["lancedb (>=0.6.2,<0.7.0)"]
+llama2 = ["replicate (>=0.15.4,<0.16.0)"]
+milvus = ["pymilvus (==2.4.3)"]
+mistralai = ["langchain-mistralai (>=0.1.9,<0.2.0)"]
+mysql = ["mysql-connector-python (>=8.1.0,<9.0.0)"]
+opensearch = ["opensearch-py (==2.3.1)"]
+opensource = ["gpt4all (==2.0.2)", "sentence-transformers (>=2.2.2,<3.0.0)", "torch (==2.3.0)"]
+postgres = ["psycopg (>=3.1.12,<4.0.0)", "psycopg-binary (>=3.1.12,<4.0.0)", "psycopg-pool (>=3.1.8,<4.0.0)"]
+qdrant = ["qdrant-client (>=1.6.3,<2.0.0)"]
+together = ["together (>=1.2.1,<2.0.0)"]
+vertexai = ["langchain-google-vertexai (>=1.0.6,<2.0.0)"]
+weaviate = ["weaviate-client (>=3.24.1,<4.0.0)"]
+
[[package]]
name = "emoji"
version = "2.12.1"
@@ -1101,6 +1417,20 @@ typing-extensions = ">=4.7.0"
[package.extras]
dev = ["coverage", "pytest (>=7.4.4)"]
+[[package]]
+name = "eval-type-backport"
+version = "0.2.0"
+description = "Like `typing._eval_type`, but lets older Python versions use newer typing features."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"},
+ {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"},
+]
+
+[package.extras]
+tests = ["pytest"]
+
[[package]]
name = "exceptiongroup"
version = "1.2.2"
@@ -1186,6 +1516,52 @@ uvicorn = {version = ">=0.15.0", extras = ["standard"]}
[package.extras]
standard = ["uvicorn[standard] (>=0.15.0)"]
+[[package]]
+name = "fastavro"
+version = "1.9.5"
+description = "Fast read/write of AVRO files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fastavro-1.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:61253148e95dd2b6457247b441b7555074a55de17aef85f5165bfd5facf600fc"},
+ {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b604935d671ad47d888efc92a106f98e9440874108b444ac10e28d643109c937"},
+ {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0adbf4956fd53bd74c41e7855bb45ccce953e0eb0e44f5836d8d54ad843f9944"},
+ {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:53d838e31457db8bf44460c244543f75ed307935d5fc1d93bc631cc7caef2082"},
+ {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:07b6288e8681eede16ff077632c47395d4925c2f51545cd7a60f194454db2211"},
+ {file = "fastavro-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:ef08cf247fdfd61286ac0c41854f7194f2ad05088066a756423d7299b688d975"},
+ {file = "fastavro-1.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c52d7bb69f617c90935a3e56feb2c34d4276819a5c477c466c6c08c224a10409"},
+ {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e05969956003df8fa4491614bc62fe40cec59e94d06e8aaa8d8256ee3aab82"},
+ {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06e6df8527493a9f0d9a8778df82bab8b1aa6d80d1b004e5aec0a31dc4dc501c"},
+ {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27820da3b17bc01cebb6d1687c9d7254b16d149ef458871aaa207ed8950f3ae6"},
+ {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:195a5b8e33eb89a1a9b63fa9dce7a77d41b3b0cd785bac6044df619f120361a2"},
+ {file = "fastavro-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:be612c109efb727bfd36d4d7ed28eb8e0506617b7dbe746463ebbf81e85eaa6b"},
+ {file = "fastavro-1.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b133456c8975ec7d2a99e16a7e68e896e45c821b852675eac4ee25364b999c14"},
+ {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf586373c3d1748cac849395aad70c198ee39295f92e7c22c75757b5c0300fbe"},
+ {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:724ef192bc9c55d5b4c7df007f56a46a21809463499856349d4580a55e2b914c"},
+ {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bfd11fe355a8f9c0416803afac298960eb4c603a23b1c74ff9c1d3e673ea7185"},
+ {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9827d1654d7bcb118ef5efd3e5b2c9ab2a48d44dac5e8c6a2327bc3ac3caa828"},
+ {file = "fastavro-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:d84b69dca296667e6137ae7c9a96d060123adbc0c00532cc47012b64d38b47e9"},
+ {file = "fastavro-1.9.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:fb744e9de40fb1dc75354098c8db7da7636cba50a40f7bef3b3fb20f8d189d88"},
+ {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:240df8bacd13ff5487f2465604c007d686a566df5cbc01d0550684eaf8ff014a"},
+ {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bb35c25bbc3904e1c02333bc1ae0173e0a44aa37a8e95d07e681601246e1f1"},
+ {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b47a54a9700de3eabefd36dabfb237808acae47bc873cada6be6990ef6b165aa"},
+ {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:48c7b5e6d2f3bf7917af301c275b05c5be3dd40bb04e80979c9e7a2ab31a00d1"},
+ {file = "fastavro-1.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:05d13f98d4e325be40387e27da9bd60239968862fe12769258225c62ec906f04"},
+ {file = "fastavro-1.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5b47948eb196263f6111bf34e1cd08d55529d4ed46eb50c1bc8c7c30a8d18868"},
+ {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85b7a66ad521298ad9373dfe1897a6ccfc38feab54a47b97922e213ae5ad8870"},
+ {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44cb154f863ad80e41aea72a709b12e1533b8728c89b9b1348af91a6154ab2f5"},
+ {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7f2b1fe21231fd01f1a2a90e714ae267fe633cd7ce930c0aea33d1c9f4901"},
+ {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88fbbe16c61d90a89d78baeb5a34dc1c63a27b115adccdbd6b1fb6f787deacf2"},
+ {file = "fastavro-1.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:753f5eedeb5ca86004e23a9ce9b41c5f25eb64a876f95edcc33558090a7f3e4b"},
+ {file = "fastavro-1.9.5.tar.gz", hash = "sha256:6419ebf45f88132a9945c51fe555d4f10bb97c236288ed01894f957c6f914553"},
+]
+
+[package.extras]
+codecs = ["cramjam", "lz4", "zstandard"]
+lz4 = ["lz4"]
+snappy = ["cramjam"]
+zstandard = ["zstandard"]
+
[[package]]
name = "filelock"
version = "3.15.4"
@@ -1267,6 +1643,17 @@ files = [
Flask = ">=1.0.4"
Werkzeug = ">=1.0.1"
+[[package]]
+name = "flatbuffers"
+version = "24.3.25"
+description = "The FlatBuffers serialization format for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"},
+ {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"},
+]
+
[[package]]
name = "frozenlist"
version = "1.4.1"
@@ -1353,6 +1740,45 @@ files = [
{file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
]
+[[package]]
+name = "fsspec"
+version = "2024.6.1"
+description = "File-system specification"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"},
+ {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"},
+]
+
+[package.extras]
+abfs = ["adlfs"]
+adl = ["adlfs"]
+arrow = ["pyarrow (>=1)"]
+dask = ["dask", "distributed"]
+dev = ["pre-commit", "ruff"]
+doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
+dropbox = ["dropbox", "dropboxdrivefs", "requests"]
+full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
+fuse = ["fusepy"]
+gcs = ["gcsfs"]
+git = ["pygit2"]
+github = ["requests"]
+gs = ["gcsfs"]
+gui = ["panel"]
+hdfs = ["pyarrow (>=1)"]
+http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"]
+libarchive = ["libarchive-c"]
+oci = ["ocifs"]
+s3 = ["s3fs"]
+sftp = ["paramiko"]
+smb = ["smbprotocol"]
+ssh = ["paramiko"]
+test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"]
+test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"]
+test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"]
+tqdm = ["tqdm"]
+
[[package]]
name = "gevent"
version = "24.2.1"
@@ -1502,6 +1928,313 @@ benchmarks = ["httplib2", "httpx", "requests", "urllib3"]
dev = ["dpkt", "pytest", "requests"]
examples = ["oauth2"]
+[[package]]
+name = "google-api-core"
+version = "2.19.1"
+description = "Google API client core library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"},
+ {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"},
+]
+
+[package.dependencies]
+google-auth = ">=2.14.1,<3.0.dev0"
+googleapis-common-protos = ">=1.56.2,<2.0.dev0"
+grpcio = [
+ {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
+ {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
+]
+grpcio-status = [
+ {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
+ {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
+]
+proto-plus = ">=1.22.3,<2.0.0dev"
+protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
+requests = ">=2.18.0,<3.0.0.dev0"
+
+[package.extras]
+grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
+grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
+grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
+
+[[package]]
+name = "google-auth"
+version = "2.32.0"
+description = "Google Authentication Library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"},
+ {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"},
+]
+
+[package.dependencies]
+cachetools = ">=2.0.0,<6.0"
+pyasn1-modules = ">=0.2.1"
+rsa = ">=3.1.4,<5"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
+enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
+pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
+reauth = ["pyu2f (>=0.1.5)"]
+requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
+
+[[package]]
+name = "google-cloud-aiplatform"
+version = "1.59.0"
+description = "Vertex AI API client library"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "google-cloud-aiplatform-1.59.0.tar.gz", hash = "sha256:2bebb59c0ba3e3b4b568305418ca1b021977988adbee8691a5bed09b037e7e63"},
+ {file = "google_cloud_aiplatform-1.59.0-py2.py3-none-any.whl", hash = "sha256:549e6eb1844b0f853043309138ebe2db00de4bbd8197b3bde26804ac163ef52a"},
+]
+
+[package.dependencies]
+docstring-parser = "<1"
+google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]}
+google-auth = ">=2.14.1,<3.0.0dev"
+google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev"
+google-cloud-resource-manager = ">=1.3.3,<3.0.0dev"
+google-cloud-storage = ">=1.32.0,<3.0.0dev"
+packaging = ">=14.3"
+proto-plus = ">=1.22.3,<2.0.0dev"
+protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
+pydantic = "<3"
+shapely = "<3.0.0dev"
+
+[package.extras]
+autologging = ["mlflow (>=1.27.0,<=2.1.1)"]
+cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
+datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
+endpoint = ["requests (>=2.28.1)"]
+full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
+langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "tenacity (<=8.3)"]
+langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"]
+lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
+metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"]
+pipelines = ["pyyaml (>=5.3.1,<7)"]
+prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"]
+preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"]
+private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
+rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"]
+ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"]
+ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
+reasoningengine = ["cloudpickle (>=3.0,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"]
+tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
+testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
+tokenization = ["sentencepiece (>=0.2.0)"]
+vizier = ["google-vizier (>=0.1.6)"]
+xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
+
+[[package]]
+name = "google-cloud-bigquery"
+version = "3.25.0"
+description = "Google BigQuery API client library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"},
+ {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"},
+]
+
+[package.dependencies]
+google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
+google-auth = ">=2.14.1,<3.0.0dev"
+google-cloud-core = ">=1.6.0,<3.0.0dev"
+google-resumable-media = ">=0.6.0,<3.0dev"
+packaging = ">=20.0.0"
+python-dateutil = ">=2.7.2,<3.0dev"
+requests = ">=2.21.0,<3.0.0dev"
+
+[package.extras]
+all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"]
+bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"]
+bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"]
+geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"]
+ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"]
+ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"]
+opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"]
+pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"]
+tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"]
+
+[[package]]
+name = "google-cloud-core"
+version = "2.4.1"
+description = "Google Cloud API client core library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"},
+ {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"},
+]
+
+[package.dependencies]
+google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev"
+google-auth = ">=1.25.0,<3.0dev"
+
+[package.extras]
+grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
+
+[[package]]
+name = "google-cloud-resource-manager"
+version = "1.12.4"
+description = "Google Cloud Resource Manager API client library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-cloud-resource-manager-1.12.4.tar.gz", hash = "sha256:3eda914a925e92465ef80faaab7e0f7a9312d486dd4e123d2c76e04bac688ff0"},
+ {file = "google_cloud_resource_manager-1.12.4-py2.py3-none-any.whl", hash = "sha256:0b6663585f7f862166c0fb4c55fdda721fce4dc2dc1d5b52d03ee4bf2653a85f"},
+]
+
+[package.dependencies]
+google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
+google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev"
+grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
+proto-plus = ">=1.22.3,<2.0.0dev"
+protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
+
+[[package]]
+name = "google-cloud-storage"
+version = "2.17.0"
+description = "Google Cloud Storage API client library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-cloud-storage-2.17.0.tar.gz", hash = "sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388"},
+ {file = "google_cloud_storage-2.17.0-py2.py3-none-any.whl", hash = "sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1"},
+]
+
+[package.dependencies]
+google-api-core = ">=2.15.0,<3.0.0dev"
+google-auth = ">=2.26.1,<3.0dev"
+google-cloud-core = ">=2.3.0,<3.0dev"
+google-crc32c = ">=1.0,<2.0dev"
+google-resumable-media = ">=2.6.0"
+requests = ">=2.18.0,<3.0.0dev"
+
+[package.extras]
+protobuf = ["protobuf (<5.0.0dev)"]
+
+[[package]]
+name = "google-crc32c"
+version = "1.5.0"
+description = "A python wrapper of the C library 'Google CRC32C'"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"},
+ {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"},
+ {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"},
+ {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"},
+ {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"},
+ {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"},
+ {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"},
+ {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"},
+ {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"},
+ {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"},
+ {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"},
+ {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"},
+ {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"},
+ {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"},
+ {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"},
+ {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"},
+ {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"},
+ {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"},
+ {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"},
+ {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"},
+ {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"},
+]
+
+[package.extras]
+testing = ["pytest"]
+
+[[package]]
+name = "google-resumable-media"
+version = "2.7.1"
+description = "Utilities for Google Media Downloads and Resumable Uploads"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "google-resumable-media-2.7.1.tar.gz", hash = "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33"},
+ {file = "google_resumable_media-2.7.1-py2.py3-none-any.whl", hash = "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c"},
+]
+
+[package.dependencies]
+google-crc32c = ">=1.0,<2.0dev"
+
+[package.extras]
+aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"]
+requests = ["requests (>=2.18.0,<3.0.0dev)"]
+
+[[package]]
+name = "googleapis-common-protos"
+version = "1.63.2"
+description = "Common protobufs used in Google APIs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"},
+ {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"},
+]
+
+[package.dependencies]
+grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""}
+protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
+
+[package.extras]
+grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
+
[[package]]
name = "gprof2dot"
version = "2024.6.6"
@@ -1513,6 +2246,22 @@ files = [
{file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"},
]
+[[package]]
+name = "gptcache"
+version = "0.1.43"
+description = "GPTCache, a powerful caching library that can be used to speed up and lower the cost of chat applications that rely on the LLM service. GPTCache works as a memcache for AIGC applications, similar to how Redis works for traditional applications."
+optional = false
+python-versions = ">=3.8.1"
+files = [
+ {file = "gptcache-0.1.43-py3-none-any.whl", hash = "sha256:9c557ec9cc14428942a0ebf1c838520dc6d2be801d67bb6964807043fc2feaf5"},
+ {file = "gptcache-0.1.43.tar.gz", hash = "sha256:cebe7ec5e32a3347bf839e933a34e67c7fcae620deaa7cb8c6d7d276c8686f1a"},
+]
+
+[package.dependencies]
+cachetools = "*"
+numpy = "*"
+requests = "*"
+
[[package]]
name = "grandalf"
version = "0.8"
@@ -1601,6 +2350,183 @@ files = [
docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
+[[package]]
+name = "groq"
+version = "0.9.0"
+description = "The official Python library for the groq API"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "groq-0.9.0-py3-none-any.whl", hash = "sha256:d0e46f4ad645504672bb09c8100af3ced3a7db0d5119dc13e4aca535fc455874"},
+ {file = "groq-0.9.0.tar.gz", hash = "sha256:130ed5e35d3acfaab46b9e7a078eeaebf91052f4a9d71f86f87fb319b5fec332"},
+]
+
+[package.dependencies]
+anyio = ">=3.5.0,<5"
+distro = ">=1.7.0,<2"
+httpx = ">=0.23.0,<1"
+pydantic = ">=1.9.0,<3"
+sniffio = "*"
+typing-extensions = ">=4.7,<5"
+
+[[package]]
+name = "grpc-google-iam-v1"
+version = "0.13.1"
+description = "IAM API client library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"},
+ {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"},
+]
+
+[package.dependencies]
+googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]}
+grpcio = ">=1.44.0,<2.0.0dev"
+protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
+
+[[package]]
+name = "grpcio"
+version = "1.65.1"
+description = "HTTP/2-based RPC framework"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"},
+ {file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"},
+ {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719"},
+ {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c"},
+ {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77"},
+ {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857"},
+ {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364"},
+ {file = "grpcio-1.65.1-cp310-cp310-win32.whl", hash = "sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875"},
+ {file = "grpcio-1.65.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad"},
+ {file = "grpcio-1.65.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037"},
+ {file = "grpcio-1.65.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5"},
+ {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719"},
+ {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462"},
+ {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a"},
+ {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8"},
+ {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2"},
+ {file = "grpcio-1.65.1-cp311-cp311-win32.whl", hash = "sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8"},
+ {file = "grpcio-1.65.1-cp311-cp311-win_amd64.whl", hash = "sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2"},
+ {file = "grpcio-1.65.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:54cb822e177374b318b233e54b6856c692c24cdbd5a3ba5335f18a47396bac8f"},
+ {file = "grpcio-1.65.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aaf3c54419a28d45bd1681372029f40e5bfb58e5265e3882eaf21e4a5f81a119"},
+ {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:557de35bdfbe8bafea0a003dbd0f4da6d89223ac6c4c7549d78e20f92ead95d9"},
+ {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bfd95ef3b097f0cc86ade54eafefa1c8ed623aa01a26fbbdcd1a3650494dd11"},
+ {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6a8f3d6c41e6b642870afe6cafbaf7b61c57317f9ec66d0efdaf19db992b90"},
+ {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1faaf7355ceed07ceaef0b9dcefa4c98daf1dd8840ed75c2de128c3f4a4d859d"},
+ {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:60f1f38eed830488ad2a1b11579ef0f345ff16fffdad1d24d9fbc97ba31804ff"},
+ {file = "grpcio-1.65.1-cp312-cp312-win32.whl", hash = "sha256:e75acfa52daf5ea0712e8aa82f0003bba964de7ae22c26d208cbd7bc08500177"},
+ {file = "grpcio-1.65.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff5a84907e51924973aa05ed8759210d8cdae7ffcf9e44fd17646cf4a902df59"},
+ {file = "grpcio-1.65.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:1fbd6331f18c3acd7e09d17fd840c096f56eaf0ef830fbd50af45ae9dc8dfd83"},
+ {file = "grpcio-1.65.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:de5b6be29116e094c5ef9d9e4252e7eb143e3d5f6bd6d50a78075553ab4930b0"},
+ {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:e4a3cdba62b2d6aeae6027ae65f350de6dc082b72e6215eccf82628e79efe9ba"},
+ {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941c4869aa229d88706b78187d60d66aca77fe5c32518b79e3c3e03fc26109a2"},
+ {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40cebe5edb518d78b8131e87cb83b3ee688984de38a232024b9b44e74ee53d3"},
+ {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2ca684ba331fb249d8a1ce88db5394e70dbcd96e58d8c4b7e0d7b141a453dce9"},
+ {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8558f0083ddaf5de64a59c790bffd7568e353914c0c551eae2955f54ee4b857f"},
+ {file = "grpcio-1.65.1-cp38-cp38-win32.whl", hash = "sha256:8d8143a3e3966f85dce6c5cc45387ec36552174ba5712c5dc6fcc0898fb324c0"},
+ {file = "grpcio-1.65.1-cp38-cp38-win_amd64.whl", hash = "sha256:76e81a86424d6ca1ce7c16b15bdd6a964a42b40544bf796a48da241fdaf61153"},
+ {file = "grpcio-1.65.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3"},
+ {file = "grpcio-1.65.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df"},
+ {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3"},
+ {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8"},
+ {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57"},
+ {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68"},
+ {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e"},
+ {file = "grpcio-1.65.1-cp39-cp39-win32.whl", hash = "sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57"},
+ {file = "grpcio-1.65.1-cp39-cp39-win_amd64.whl", hash = "sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c"},
+ {file = "grpcio-1.65.1.tar.gz", hash = "sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b"},
+]
+
+[package.extras]
+protobuf = ["grpcio-tools (>=1.65.1)"]
+
+[[package]]
+name = "grpcio-status"
+version = "1.62.2"
+description = "Status proto mapping for gRPC"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "grpcio-status-1.62.2.tar.gz", hash = "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a"},
+ {file = "grpcio_status-1.62.2-py3-none-any.whl", hash = "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f"},
+]
+
+[package.dependencies]
+googleapis-common-protos = ">=1.5.5"
+grpcio = ">=1.62.2"
+protobuf = ">=4.21.6"
+
+[[package]]
+name = "grpcio-tools"
+version = "1.62.2"
+description = "Protobuf code generator for gRPC"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "grpcio-tools-1.62.2.tar.gz", hash = "sha256:5fd5e1582b678e6b941ee5f5809340be5e0724691df5299aae8226640f94e18f"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:1679b4903aed2dc5bd8cb22a452225b05dc8470a076f14fd703581efc0740cdb"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:9d41e0e47dd075c075bb8f103422968a65dd0d8dc8613288f573ae91eb1053ba"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:987e774f74296842bbffd55ea8826370f70c499e5b5f71a8cf3103838b6ee9c3"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40cd4eeea4b25bcb6903b82930d579027d034ba944393c4751cdefd9c49e6989"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6746bc823958499a3cf8963cc1de00072962fb5e629f26d658882d3f4c35095"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2ed775e844566ce9ce089be9a81a8b928623b8ee5820f5e4d58c1a9d33dfc5ae"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bdc5dd3f57b5368d5d661d5d3703bcaa38bceca59d25955dff66244dbc987271"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-win32.whl", hash = "sha256:3a8d6f07e64c0c7756f4e0c4781d9d5a2b9cc9cbd28f7032a6fb8d4f847d0445"},
+ {file = "grpcio_tools-1.62.2-cp310-cp310-win_amd64.whl", hash = "sha256:e33b59fb3efdddeb97ded988a871710033e8638534c826567738d3edce528752"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:472505d030135d73afe4143b0873efe0dcb385bd6d847553b4f3afe07679af00"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:ec674b4440ef4311ac1245a709e87b36aca493ddc6850eebe0b278d1f2b6e7d1"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:184b4174d4bd82089d706e8223e46c42390a6ebac191073b9772abc77308f9fa"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c195d74fe98541178ece7a50dad2197d43991e0f77372b9a88da438be2486f12"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a34d97c62e61bfe9e6cff0410fe144ac8cca2fc979ad0be46b7edf026339d161"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cbb8453ae83a1db2452b7fe0f4b78e4a8dd32be0f2b2b73591ae620d4d784d3d"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f989e5cebead3ae92c6abf6bf7b19949e1563a776aea896ac5933f143f0c45d"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-win32.whl", hash = "sha256:c48fabe40b9170f4e3d7dd2c252e4f1ff395dc24e49ac15fc724b1b6f11724da"},
+ {file = "grpcio_tools-1.62.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c616d0ad872e3780693fce6a3ac8ef00fc0963e6d7815ce9dcfae68ba0fc287"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:10cc3321704ecd17c93cf68c99c35467a8a97ffaaed53207e9b2da6ae0308ee1"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:9be84ff6d47fd61462be7523b49d7ba01adf67ce4e1447eae37721ab32464dd8"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d82f681c9a9d933a9d8068e8e382977768e7779ddb8870fa0cf918d8250d1532"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04c607029ae3660fb1624ed273811ffe09d57d84287d37e63b5b802a35897329"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72b61332f1b439c14cbd3815174a8f1d35067a02047c32decd406b3a09bb9890"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8214820990d01b52845f9fbcb92d2b7384a0c321b303e3ac614c219dc7d1d3af"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:462e0ab8dd7c7b70bfd6e3195eebc177549ede5cf3189814850c76f9a340d7ce"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-win32.whl", hash = "sha256:fa107460c842e4c1a6266150881694fefd4f33baa544ea9489601810c2210ef8"},
+ {file = "grpcio_tools-1.62.2-cp312-cp312-win_amd64.whl", hash = "sha256:759c60f24c33a181bbbc1232a6752f9b49fbb1583312a4917e2b389fea0fb0f2"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:45db5da2bcfa88f2b86b57ef35daaae85c60bd6754a051d35d9449c959925b57"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:ab84bae88597133f6ea7a2bdc57b2fda98a266fe8d8d4763652cbefd20e73ad7"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:7a49bccae1c7d154b78e991885c3111c9ad8c8fa98e91233de425718f47c6139"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7e439476b29d6dac363b321781a113794397afceeb97dad85349db5f1cb5e9a"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ea369c4d1567d1acdf69c8ea74144f4ccad9e545df7f9a4fc64c94fa7684ba3"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f955702dc4b530696375251319d05223b729ed24e8673c2129f7a75d2caefbb"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3708a747aa4b6b505727282ca887041174e146ae030ebcadaf4c1d346858df62"},
+ {file = "grpcio_tools-1.62.2-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce149ea55eadb486a7fb75a20f63ef3ac065ee6a0240ed25f3549ce7954c653"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:58cbb24b3fa6ae35aa9c210fcea3a51aa5fef0cd25618eb4fd94f746d5a9b703"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:6413581e14a80e0b4532577766cf0586de4dd33766a31b3eb5374a746771c07d"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:47117c8a7e861382470d0e22d336e5a91fdc5f851d1db44fa784b9acea190d87"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f1ba79a253df9e553d20319c615fa2b429684580fa042dba618d7f6649ac7e4"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04a394cf5e51ba9be412eb9f6c482b6270bd81016e033e8eb7d21b8cc28fe8b5"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3c53b221378b035ae2f1881cbc3aca42a6075a8e90e1a342c2f205eb1d1aa6a1"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c384c838b34d1b67068e51b5bbe49caa6aa3633acd158f1ab16b5da8d226bc53"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-win32.whl", hash = "sha256:19ea69e41c3565932aa28a202d1875ec56786aea46a2eab54a3b28e8a27f9517"},
+ {file = "grpcio_tools-1.62.2-cp38-cp38-win_amd64.whl", hash = "sha256:1d768a5c07279a4c461ebf52d0cec1c6ca85c6291c71ec2703fe3c3e7e28e8c4"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:5b07b5874187e170edfbd7aa2ca3a54ebf3b2952487653e8c0b0d83601c33035"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:d58389fe8be206ddfb4fa703db1e24c956856fcb9a81da62b13577b3a8f7fda7"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:7d8b4e00c3d7237b92260fc18a561cd81f1da82e8be100db1b7d816250defc66"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fe08d2038f2b7c53259b5c49e0ad08c8e0ce2b548d8185993e7ef67e8592cca"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19216e1fb26dbe23d12a810517e1b3fbb8d4f98b1a3fbebeec9d93a79f092de4"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b8574469ecc4ff41d6bb95f44e0297cdb0d95bade388552a9a444db9cd7485cd"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4f6f32d39283ea834a493fccf0ebe9cfddee7577bdcc27736ad4be1732a36399"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-win32.whl", hash = "sha256:76eb459bdf3fb666e01883270beee18f3f11ed44488486b61cd210b4e0e17cc1"},
+ {file = "grpcio_tools-1.62.2-cp39-cp39-win_amd64.whl", hash = "sha256:217c2ee6a7ce519a55958b8622e21804f6fdb774db08c322f4c9536c35fdce7c"},
+]
+
+[package.dependencies]
+grpcio = ">=1.62.2"
+protobuf = ">=4.21.6,<5.0dev"
+setuptools = "*"
+
[[package]]
name = "gunicorn"
version = "22.0.0"
@@ -1633,6 +2559,32 @@ files = [
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
+[[package]]
+name = "h2"
+version = "4.1.0"
+description = "HTTP/2 State-Machine based protocol implementation"
+optional = false
+python-versions = ">=3.6.1"
+files = [
+ {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"},
+ {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"},
+]
+
+[package.dependencies]
+hpack = ">=4.0,<5"
+hyperframe = ">=6.0,<7"
+
+[[package]]
+name = "hpack"
+version = "4.0.0"
+description = "Pure-Python HPACK header compression"
+optional = false
+python-versions = ">=3.6.1"
+files = [
+ {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"},
+ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"},
+]
+
[[package]]
name = "httpcore"
version = "1.0.5"
@@ -1716,6 +2668,7 @@ files = [
[package.dependencies]
anyio = "*"
certifi = "*"
+h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""}
httpcore = "==1.*"
idna = "*"
sniffio = "*"
@@ -1726,6 +2679,76 @@ cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
+[[package]]
+name = "httpx-sse"
+version = "0.4.0"
+description = "Consume Server-Sent Event (SSE) messages with HTTPX."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"},
+ {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"},
+]
+
+[[package]]
+name = "huggingface-hub"
+version = "0.24.0"
+description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "huggingface_hub-0.24.0-py3-none-any.whl", hash = "sha256:7ad92edefb93d8145c061f6df8d99df2ff85f8379ba5fac8a95aca0642afa5d7"},
+ {file = "huggingface_hub-0.24.0.tar.gz", hash = "sha256:6c7092736b577d89d57b3cdfea026f1b0dc2234ae783fa0d59caf1bf7d52dfa7"},
+]
+
+[package.dependencies]
+filelock = "*"
+fsspec = ">=2023.5.0"
+packaging = ">=20.9"
+pyyaml = ">=5.1"
+requests = "*"
+tqdm = ">=4.42.1"
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
+cli = ["InquirerPy (==0.3.4)"]
+dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
+fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
+hf-transfer = ["hf-transfer (>=0.1.4)"]
+inference = ["aiohttp", "minijinja (>=1.0)"]
+quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"]
+tensorflow = ["graphviz", "pydot", "tensorflow"]
+tensorflow-testing = ["keras (<3.0)", "tensorflow"]
+testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
+torch = ["safetensors[torch]", "torch"]
+typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"]
+
+[[package]]
+name = "humanfriendly"
+version = "10.0"
+description = "Human friendly output for text interfaces using Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
+ {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
+]
+
+[package.dependencies]
+pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""}
+
+[[package]]
+name = "hyperframe"
+version = "6.0.1"
+description = "HTTP/2 framing layer for Python"
+optional = false
+python-versions = ">=3.6.1"
+files = [
+ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"},
+ {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
+]
+
[[package]]
name = "identify"
version = "2.6.0"
@@ -1770,6 +2793,21 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
+[[package]]
+name = "importlib-resources"
+version = "6.4.0"
+description = "Read resources from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"},
+ {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"]
+
[[package]]
name = "iniconfig"
version = "2.0.0"
@@ -1781,6 +2819,38 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
+[[package]]
+name = "instructor"
+version = "1.3.3"
+description = "structured outputs for llm"
+optional = false
+python-versions = "<4.0,>=3.9"
+files = [
+ {file = "instructor-1.3.3-py3-none-any.whl", hash = "sha256:94b114b39a1181fa348d162e6e4ff5c4d985324736020c0233fed5d4db444dbd"},
+ {file = "instructor-1.3.3.tar.gz", hash = "sha256:e27bf3c1187b0b2130ea38ecde7c2b4f571d6a5ce1397fb15c27490988b45441"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.9.1,<4.0.0"
+docstring-parser = ">=0.16,<0.17"
+jiter = ">=0.4.1,<0.5.0"
+openai = ">=1.1.0,<2.0.0"
+pydantic = ">=2.7.0,<3.0.0"
+pydantic-core = ">=2.18.0,<3.0.0"
+rich = ">=13.7.0,<14.0.0"
+tenacity = ">=8.2.3,<9.0.0"
+typer = ">=0.9.0,<1.0.0"
+
+[package.extras]
+anthropic = ["anthropic (>=0.27.0,<0.28.0)", "xmltodict (>=0.13.0,<0.14.0)"]
+cohere = ["cohere (>=5.1.8,<6.0.0)"]
+google-generativeai = ["google-generativeai (>=0.5.4,<0.6.0)"]
+groq = ["groq (>=0.4.2,<0.5.0)"]
+litellm = ["litellm (>=1.35.31,<2.0.0)"]
+mistralai = ["mistralai (>=0.1.8,<0.2.0)"]
+test-docs = ["anthropic (>=0.27.0,<0.28.0)", "cohere (>=5.1.8,<6.0.0)", "diskcache (>=5.6.3,<6.0.0)", "fastapi (>=0.109.2,<0.110.0)", "groq (>=0.4.2,<0.5.0)", "litellm (>=1.35.31,<2.0.0)", "mistralai (>=0.1.8,<0.2.0)", "pandas (>=2.2.0,<3.0.0)", "pydantic_extra_types (>=2.6.0,<3.0.0)", "redis (>=5.0.1,<6.0.0)", "tabulate (>=0.9.0,<0.10.0)"]
+vertexai = ["google-cloud-aiplatform (>=1.52.0,<2.0.0)", "jsonref (>=1.1.0,<2.0.0)"]
+
[[package]]
name = "ipykernel"
version = "6.29.5"
@@ -1899,6 +2969,87 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
+[[package]]
+name = "jiter"
+version = "0.4.2"
+description = "Fast iterable JSON parser."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jiter-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c2b003ff58d14f5e182b875acd5177b2367245c19a03be9a2230535d296f7550"},
+ {file = "jiter-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b48c77c25f094707731cd5bad6b776046846b60a27ee20efc8fadfb10a89415f"},
+ {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f50ad6b172bde4d45f4d4ea10c49282a337b8bb735afc99763dfa55ea84a743"},
+ {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f6001e86f525fbbc9706db2078dc22be078b0950de55b92d37041930f5f940"},
+ {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16646ef23b62b007de80460d303ebb2d81e355dac9389c787cec87cdd7ffef2f"},
+ {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b4e847c13b0bf1255c711a92330e7a8cb8b5cdd1e37d7db309627bcdd3367ff"},
+ {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c536589be60e4c5f2b20fadc4db7e9f55d4c9df3551f29ddf1c4a18dcc9dd54"},
+ {file = "jiter-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3b2763996167830889a854b4ded30bb90897f9b76be78069c50c3ec4540950e"},
+ {file = "jiter-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:675e8ab98c99495091af6b6e9bf2b6353bcf81f25ab6ce27d36127e315b4505d"},
+ {file = "jiter-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e48e43d9d999aaf55f53406b8846ff8cbe3e47ee4b9dc37e5a10a65ce760809f"},
+ {file = "jiter-0.4.2-cp310-none-win32.whl", hash = "sha256:881b6e67c50bc36acb3570eda693763c8cd77d590940e06fa6d325d0da52ec1b"},
+ {file = "jiter-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:bb8f7b43259efc6add0d721ade2953e064b24e2026d26d979bc09ec080844cef"},
+ {file = "jiter-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:24ad336ac47f274fa83f6fbedcabff9d3387c80f67c66b992688e6a8ba2c47e9"},
+ {file = "jiter-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc392a220095730afe365ce1516f2f88bb085a2fd29ea191be9c6e3c71713d9a"},
+ {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1fdc408de36c81460896de0176f2f7b9f3574dcd35693a0b2c00f4ca34c98e4"},
+ {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c10ad76722ee6a8c820b0db06a793c08b7d679e5201b9563015bd1e06c959a09"},
+ {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbb46d1e9c82bba87f0cbda38413e49448a7df35b1e55917124bff9f38974a23"},
+ {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:194e28ef4b5f3b61408cb2ee6b6dcbcdb0c9063d01b92b01345b7605692849f5"},
+ {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0a447533eccd62748a727e058efa10a8d7cf1de8ffe1a4d705ecb41dad9090"},
+ {file = "jiter-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5f7704d7260bbb88cca3453951af739589132b26e896a3144fa2dae2263716d7"},
+ {file = "jiter-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:01427458bc9550f2eda09d425755330e7d0eb09adce099577433bebf05d28d59"},
+ {file = "jiter-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159b8416879c0053b17c352f70b67b749ef5b2924c6154318ecf71918aab0905"},
+ {file = "jiter-0.4.2-cp311-none-win32.whl", hash = "sha256:f2445234acfb79048ce1a0d5d0e181abb9afd9e4a29d8d9988fe26cc5773a81a"},
+ {file = "jiter-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:e15a65f233b6b0e5ac10ddf3b97ceb18aa9ffba096259961641d78b4ee321bd5"},
+ {file = "jiter-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d61d59521aea9745447ce50f74d39a16ef74ec9d6477d9350d77e75a3d774ad2"},
+ {file = "jiter-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eef607dc0acc251923427808dbd017f1998ae3c1a0430a261527aa5cbb3a942"},
+ {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af6bf39954646e374fc47429c656372ac731a6a26b644158a5a84bcdbed33a47"},
+ {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f509d23606e476852ee46a2b65b5c4ad3905f17424d9cc19c1dffa1c94ba3c6"},
+ {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59672774daa44ee140aada0c781c82bee4d9ac5e522966186cfb6b3c217d8a51"},
+ {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a0458efac5afeca254cf557b8a654e17013075a69905c78f88d557f129d871"},
+ {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8860766d1c293e75c1bb4e25b74fa987e3adf199cac3f5f9e6e49c2bebf092f"},
+ {file = "jiter-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a109f3281b72bbf4921fe43db1005c004a38559ca0b6c4985add81777dfe0a44"},
+ {file = "jiter-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:faa7e667454b77ad2f0ef87db39f4944de759617aadf210ea2b73f26bb24755f"},
+ {file = "jiter-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3512f8b00cafb6780b427cb6282800d2bf8277161d9c917830661bd4ed1d3528"},
+ {file = "jiter-0.4.2-cp312-none-win32.whl", hash = "sha256:853b35d508ee5b66d06630473c1c0b7bb5e29bf4785c9d2202437116c94f7e21"},
+ {file = "jiter-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:4a3a8197784278eb8b24cb02c45e1cad67c2ce5b5b758adfb19b87f74bbdff9c"},
+ {file = "jiter-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ca2a4d750aed3154b89f2efb148609fc985fad8db739460797aaf9b478acedda"},
+ {file = "jiter-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0e6c304b3cc6896256727e1fb8991c7179a345eca8224e201795e9cacf4683b0"},
+ {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cc34ac708ae1750d077e490321761ec4b9a055b994cbdd1d6fbd37099e4aa7b"},
+ {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c93383875ab8d2e4f760aaff335b4a12ff32d4f9cf49c4498d657734f611466"},
+ {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce197ee044add576afca0955b42142dd0312639adb6ebadbdbe4277f2855614f"},
+ {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a427716813ff65480ca5b5117cfa099f49b49cd38051f8609bd0d5493013ca0"},
+ {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:479990218353356234669e70fac53e5eb6f739a10db25316171aede2c97d9364"},
+ {file = "jiter-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d35a91ec5ac74cf33234c431505299fa91c0a197c2dbafd47400aca7c69489d4"},
+ {file = "jiter-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b27189847193708c94ad10ca0d891309342ae882725d2187cf5d2db02bde8d1b"},
+ {file = "jiter-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76c255308cd1093fb411a03756b7bb220e48d4a98c30cbc79ed448bf3978e27d"},
+ {file = "jiter-0.4.2-cp38-none-win32.whl", hash = "sha256:bb77438060bad49cc251941e6701b31138365c8a0ddaf10cdded2fcc6dd30701"},
+ {file = "jiter-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:ce858af19f7ce0d4b51c9f6c0c9d08f1e9dcef1986c5875efd0674a7054292ca"},
+ {file = "jiter-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6128838a2f357b3921b2a3242d5dc002ae4255ecc8f9f05c20d56d7d2d79c5ad"},
+ {file = "jiter-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f2420cebb9ba856cb57dcab1d2d8def949b464b0db09c22a4e4dbd52fff7b200"},
+ {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5d13d8128e853b320e00bb18bd4bb8b136cc0936091dc87633648fc688eb705"},
+ {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eba5d6e54f149c508ba88677f97d3dc7dd75e9980d234bbac8027ac6db0763a3"},
+ {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fad5d64af0bc0545237419bf4150d8de56f0bd217434bdd1a59730327252bef"},
+ {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d179e7bca89cf5719bd761dd37a341ff0f98199ecaa9c14af09792e47e977cc"},
+ {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36353caee9f103d8ee7bda077f6400505b0f370e27eabcab33a33d21de12a2a6"},
+ {file = "jiter-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dd146c25bce576ca5db64fc7eccb8862af00f1f0e30108796953f12a53660e4c"},
+ {file = "jiter-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:14b7c08cadbcd703041c66dc30e24e17de2f340281cac0e69374223ecf153aa4"},
+ {file = "jiter-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a90f1a8b3d29aea198f8ea2b01148276ced8056e5103f32525266b3d880e65c9"},
+ {file = "jiter-0.4.2-cp39-none-win32.whl", hash = "sha256:25b174997c780337b61ae57b1723455eecae9a17a9659044fd3c3b369190063f"},
+ {file = "jiter-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:bef62cea18521c5b99368147040c7e560c55098a35c93456f110678a2d34189a"},
+ {file = "jiter-0.4.2.tar.gz", hash = "sha256:29b9d44f23f0c05f46d482f4ebf03213ee290d77999525d0975a17f875bf1eea"},
+]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+description = "JSON Matching Expressions"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
+ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
+]
+
[[package]]
name = "jq"
version = "1.7.0"
@@ -2006,6 +3157,17 @@ files = [
{file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
]
+[[package]]
+name = "jsonref"
+version = "1.1.0"
+description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9"},
+ {file = "jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552"},
+]
+
[[package]]
name = "jupyter-client"
version = "8.6.2"
@@ -2048,21 +3210,47 @@ traitlets = ">=5.3"
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"]
test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"]
+[[package]]
+name = "kubernetes"
+version = "30.1.0"
+description = "Kubernetes python client"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"},
+ {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"},
+]
+
+[package.dependencies]
+certifi = ">=14.05.14"
+google-auth = ">=1.0.1"
+oauthlib = ">=3.2.2"
+python-dateutil = ">=2.5.3"
+pyyaml = ">=5.4.1"
+requests = "*"
+requests-oauthlib = "*"
+six = ">=1.9.0"
+urllib3 = ">=1.24.2"
+websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0"
+
+[package.extras]
+adal = ["adal (>=1.0.2)"]
+
[[package]]
name = "langchain"
-version = "0.2.12"
+version = "0.2.10"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain-0.2.12-py3-none-any.whl", hash = "sha256:565d2f5df1c06815d1c684400218ec4ae5e1027887aad343226fad846c54e726"},
- {file = "langchain-0.2.12.tar.gz", hash = "sha256:fe7bd409c133017446fec54c38a5e7cb14f74e020090d7b5065374badf71e6d1"},
+ {file = "langchain-0.2.10-py3-none-any.whl", hash = "sha256:b4fb58c7faf4f4999cfe3325474979a7121a1737dd101655a723a1d957ef0617"},
+ {file = "langchain-0.2.10.tar.gz", hash = "sha256:1f861c1b59ac9c91b02bb0fa58d3adad1c1d0686636872b5b357bbce3ce41d06"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
-langchain-core = ">=0.2.27,<0.3.0"
+langchain-core = ">=0.2.22,<0.3.0"
langchain-text-splitters = ">=0.2.0,<0.3.0"
langsmith = ">=0.1.17,<0.2.0"
numpy = [
@@ -2075,22 +3263,43 @@ requests = ">=2,<3"
SQLAlchemy = ">=1.4,<3"
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
+[[package]]
+name = "langchain-cohere"
+version = "0.1.9"
+description = "An integration package connecting Cohere and LangChain"
+optional = false
+python-versions = "<4.0,>=3.8.1"
+files = [
+ {file = "langchain_cohere-0.1.9-py3-none-any.whl", hash = "sha256:96d6a15125797319474ac84b54024e5024f3f5fc45032ebf228d95d6998c9b13"},
+ {file = "langchain_cohere-0.1.9.tar.gz", hash = "sha256:549620d23bc3d77f62d1045787095fe2c1cfa233dba69455139f9a2f65f952fa"},
+]
+
+[package.dependencies]
+cohere = ">=5.5.6,<6.0"
+langchain-core = ">=0.2.2,<0.3"
+langchain-experimental = ">=0.0.6"
+pandas = ">=1.4.3"
+tabulate = ">=0.9.0,<0.10.0"
+
+[package.extras]
+langchain-community = ["langchain-community (>=0.2.4)"]
+
[[package]]
name = "langchain-community"
-version = "0.2.11"
+version = "0.2.9"
description = "Community contributed LangChain integrations."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langchain_community-0.2.11-py3-none-any.whl", hash = "sha256:465c03ba1603975d141533424185e09546ecf09e379c93aee2671bdc9b325cda"},
- {file = "langchain_community-0.2.11.tar.gz", hash = "sha256:ede261ff8202f1433f004ee90baf89f371cee37cb1abfc16dd0f8392db10b23e"},
+ {file = "langchain_community-0.2.9-py3-none-any.whl", hash = "sha256:b51d3adf9346a1161c1098917585b9e303cf24e2f5c71f5d232a0504edada5f2"},
+ {file = "langchain_community-0.2.9.tar.gz", hash = "sha256:1e7c180232916cbe35fe00509680dd1f805e32d7c87b5e80b3a9ec8754ecae37"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
dataclasses-json = ">=0.5.7,<0.7"
-langchain = ">=0.2.12,<0.3.0"
-langchain-core = ">=0.2.27,<0.3.0"
+langchain = ">=0.2.9,<0.3.0"
+langchain-core = ">=0.2.22,<0.3.0"
langsmith = ">=0.1.0,<0.2.0"
numpy = [
{version = ">=1,<2", markers = "python_version < \"3.12\""},
@@ -2139,6 +3348,22 @@ files = [
langchain-community = ">=0.2.5,<0.3.0"
langchain-core = ">=0.2.7,<0.3.0"
+[[package]]
+name = "langchain-openai"
+version = "0.1.17"
+description = "An integration package connecting OpenAI and LangChain"
+optional = false
+python-versions = "<4.0,>=3.8.1"
+files = [
+ {file = "langchain_openai-0.1.17-py3-none-any.whl", hash = "sha256:30bef5574ecbbbb91b8025b2dc5a1bd81fd62157d3ad1a35d820141f31c5b443"},
+ {file = "langchain_openai-0.1.17.tar.gz", hash = "sha256:c5d70ddecdcb93e146f376bdbadbb6ec69de9ac0f402cd5b83de50b655ba85ee"},
+]
+
+[package.dependencies]
+langchain-core = ">=0.2.20,<0.3.0"
+openai = ">=1.32.0,<2.0.0"
+tiktoken = ">=0.7,<1"
+
[[package]]
name = "langchain-text-splitters"
version = "0.2.2"
@@ -2171,13 +3396,13 @@ types-requests = ">=2.31.0.2,<3.0.0.0"
[[package]]
name = "langsmith"
-version = "0.1.98"
+version = "0.1.93"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.98-py3-none-any.whl", hash = "sha256:f79e8a128652bbcee4606d10acb6236973b5cd7dde76e3741186d3b97b5698e9"},
- {file = "langsmith-0.1.98.tar.gz", hash = "sha256:e07678219a0502e8f26d35294e72127a39d25e32fafd091af5a7bb661e9a6bd1"},
+ {file = "langsmith-0.1.93-py3-none-any.whl", hash = "sha256:811210b9d5f108f36431bd7b997eb9476a9ecf5a2abd7ddbb606c1cdcf0f43ce"},
+ {file = "langsmith-0.1.93.tar.gz", hash = "sha256:285b6ad3a54f50fa8eb97b5f600acc57d0e37e139dd8cf2111a117d0435ba9b4"},
]
[package.dependencies]
@@ -2550,6 +3775,146 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
+[[package]]
+name = "mem0ai"
+version = "0.0.5"
+description = "Long-term memory for AI Agents"
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "mem0ai-0.0.5-py3-none-any.whl", hash = "sha256:6f6e5356fd522adf0510322cd581476ea456fd7ccefca11b5ac050e9a6f00f36"},
+ {file = "mem0ai-0.0.5.tar.gz", hash = "sha256:f2ac35d15e4e620becb8d06b8ebeb1ffa85fac0b7cb2d3138056babec48dd5dd"},
+]
+
+[package.dependencies]
+boto3 = ">=1.34.144,<2.0.0"
+groq = ">=0.9.0,<0.10.0"
+openai = ">=1.33.0,<2.0.0"
+posthog = ">=3.5.0,<4.0.0"
+pydantic = ">=2.7.3,<3.0.0"
+qdrant-client = ">=1.9.1,<2.0.0"
+together = ">=1.2.1,<2.0.0"
+
+[[package]]
+name = "mmh3"
+version = "4.1.0"
+description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions."
+optional = false
+python-versions = "*"
+files = [
+ {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"},
+ {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"},
+ {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"},
+ {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"},
+ {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"},
+ {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"},
+ {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"},
+ {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"},
+ {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"},
+ {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"},
+ {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"},
+ {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"},
+ {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"},
+ {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"},
+ {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"},
+ {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"},
+ {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"},
+ {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"},
+ {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"},
+ {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"},
+ {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"},
+ {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"},
+ {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"},
+ {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"},
+ {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"},
+ {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"},
+ {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"},
+ {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"},
+ {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"},
+ {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"},
+ {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"},
+ {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"},
+ {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"},
+ {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"},
+ {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"},
+ {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"},
+ {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"},
+ {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"},
+ {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"},
+ {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"},
+ {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"},
+ {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"},
+ {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"},
+ {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"},
+ {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"},
+ {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"},
+ {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"},
+ {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"},
+ {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"},
+ {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"},
+ {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"},
+ {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"},
+ {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"},
+ {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"},
+ {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"},
+ {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"},
+ {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"},
+ {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"},
+ {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"},
+ {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"},
+ {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"},
+ {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"},
+ {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"},
+ {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"},
+ {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"},
+ {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"},
+ {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"},
+ {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"},
+ {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"},
+ {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"},
+ {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"},
+ {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"},
+ {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"},
+ {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"},
+ {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"},
+ {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"},
+ {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"},
+ {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"},
+ {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"},
+ {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"},
+]
+
+[package.extras]
+test = ["mypy (>=1.0)", "pytest (>=7.0.0)"]
+
+[[package]]
+name = "monotonic"
+version = "1.6"
+description = "An implementation of time.monotonic() for Python 2 & < 3.3"
+optional = false
+python-versions = "*"
+files = [
+ {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
+ {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
+]
+
+[[package]]
+name = "mpmath"
+version = "1.3.0"
+description = "Python library for arbitrary-precision floating-point arithmetic"
+optional = false
+python-versions = "*"
+files = [
+ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
+ {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
+]
+
+[package.extras]
+develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
+docs = ["sphinx"]
+gmpy = ["gmpy2 (>=2.1.0a4)"]
+tests = ["pytest (>=4.6)"]
+
[[package]]
name = "msgpack"
version = "1.0.8"
@@ -2874,6 +4239,87 @@ files = [
{file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
]
+[[package]]
+name = "oauthlib"
+version = "3.2.2"
+description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
+ {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
+]
+
+[package.extras]
+rsa = ["cryptography (>=3.0.0)"]
+signals = ["blinker (>=1.4.0)"]
+signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
+
+[[package]]
+name = "onnxruntime"
+version = "1.18.1"
+description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
+optional = false
+python-versions = "*"
+files = [
+ {file = "onnxruntime-1.18.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:29ef7683312393d4ba04252f1b287d964bd67d5e6048b94d2da3643986c74d80"},
+ {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05"},
+ {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a"},
+ {file = "onnxruntime-1.18.1-cp310-cp310-win32.whl", hash = "sha256:221e5b16173926e6c7de2cd437764492aa12b6811f45abd37024e7cf2ae5d7e3"},
+ {file = "onnxruntime-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:75211b619275199c861ee94d317243b8a0fcde6032e5a80e1aa9ded8ab4c6060"},
+ {file = "onnxruntime-1.18.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f26582882f2dc581b809cfa41a125ba71ad9e715738ec6402418df356969774a"},
+ {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c"},
+ {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434"},
+ {file = "onnxruntime-1.18.1-cp311-cp311-win32.whl", hash = "sha256:9b6a33419b6949ea34e0dc009bc4470e550155b6da644571ecace4b198b0d88f"},
+ {file = "onnxruntime-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c1380a9f1b7788da742c759b6a02ba771fe1ce620519b2b07309decbd1a2fe1"},
+ {file = "onnxruntime-1.18.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:31bd57a55e3f983b598675dfc7e5d6f0877b70ec9864b3cc3c3e1923d0a01919"},
+ {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa"},
+ {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6"},
+ {file = "onnxruntime-1.18.1-cp312-cp312-win32.whl", hash = "sha256:3a2d9ab6254ca62adbb448222e630dc6883210f718065063518c8f93a32432be"},
+ {file = "onnxruntime-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ad93c560b1c38c27c0275ffd15cd7f45b3ad3fc96653c09ce2931179982ff204"},
+ {file = "onnxruntime-1.18.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:3b55dc9d3c67626388958a3eb7ad87eb7c70f75cb0f7ff4908d27b8b42f2475c"},
+ {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f80dbcfb6763cc0177a31168b29b4bd7662545b99a19e211de8c734b657e0669"},
+ {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1ff2c61a16d6c8631796c54139bafea41ee7736077a0fc64ee8ae59432f5c58"},
+ {file = "onnxruntime-1.18.1-cp38-cp38-win32.whl", hash = "sha256:219855bd272fe0c667b850bf1a1a5a02499269a70d59c48e6f27f9c8bcb25d02"},
+ {file = "onnxruntime-1.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdf16aa607eb9a2c60d5ca2d5abf9f448e90c345b6b94c3ed14f4fb7e6a2d07"},
+ {file = "onnxruntime-1.18.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:128df253ade673e60cea0955ec9d0e89617443a6d9ce47c2d79eb3f72a3be3de"},
+ {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9"},
+ {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66"},
+ {file = "onnxruntime-1.18.1-cp39-cp39-win32.whl", hash = "sha256:34657c78aa4e0b5145f9188b550ded3af626651b15017bf43d280d7e23dbf195"},
+ {file = "onnxruntime-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:9c14fd97c3ddfa97da5feef595e2c73f14c2d0ec1d4ecbea99c8d96603c89589"},
+]
+
+[package.dependencies]
+coloredlogs = "*"
+flatbuffers = "*"
+numpy = ">=1.21.6,<2.0"
+packaging = "*"
+protobuf = "*"
+sympy = "*"
+
+[[package]]
+name = "openai"
+version = "1.37.0"
+description = "The official Python library for the openai API"
+optional = false
+python-versions = ">=3.7.1"
+files = [
+ {file = "openai-1.37.0-py3-none-any.whl", hash = "sha256:a903245c0ecf622f2830024acdaa78683c70abb8e9d37a497b851670864c9f73"},
+ {file = "openai-1.37.0.tar.gz", hash = "sha256:dc8197fc40ab9d431777b6620d962cc49f4544ffc3011f03ce0a805e6eb54adb"},
+]
+
+[package.dependencies]
+anyio = ">=3.5.0,<5"
+distro = ">=1.7.0,<2"
+httpx = ">=0.23.0,<1"
+pydantic = ">=1.9.0,<3"
+sniffio = "*"
+tqdm = ">4"
+typing-extensions = ">=4.7,<5"
+
+[package.extras]
+datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
+
[[package]]
name = "opentelemetry-api"
version = "1.25.0"
@@ -2887,7 +4333,61 @@ files = [
[package.dependencies]
deprecated = ">=1.2.6"
-importlib-metadata = ">=6.0,<=7.1"
+importlib-metadata = ">=6.0,<=7.1"
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-common"
+version = "1.25.0"
+description = "OpenTelemetry Protobuf encoding"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"},
+ {file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"},
+]
+
+[package.dependencies]
+opentelemetry-proto = "1.25.0"
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-grpc"
+version = "1.25.0"
+description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"},
+]
+
+[package.dependencies]
+deprecated = ">=1.2.6"
+googleapis-common-protos = ">=1.52,<2.0"
+grpcio = ">=1.0.0,<2.0.0"
+opentelemetry-api = ">=1.15,<2.0"
+opentelemetry-exporter-otlp-proto-common = "1.25.0"
+opentelemetry-proto = "1.25.0"
+opentelemetry-sdk = ">=1.25.0,<1.26.0"
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-http"
+version = "1.25.0"
+description = "OpenTelemetry Collector Protobuf over HTTP Exporter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl", hash = "sha256:2eca686ee11b27acd28198b3ea5e5863a53d1266b91cda47c839d95d5e0541a6"},
+ {file = "opentelemetry_exporter_otlp_proto_http-1.25.0.tar.gz", hash = "sha256:9f8723859e37c75183ea7afa73a3542f01d0fd274a5b97487ea24cb683d7d684"},
+]
+
+[package.dependencies]
+deprecated = ">=1.2.6"
+googleapis-common-protos = ">=1.52,<2.0"
+opentelemetry-api = ">=1.15,<2.0"
+opentelemetry-exporter-otlp-proto-common = "1.25.0"
+opentelemetry-proto = "1.25.0"
+opentelemetry-sdk = ">=1.25.0,<1.26.0"
+requests = ">=2.7,<3.0"
[[package]]
name = "opentelemetry-exporter-prometheus"
@@ -2963,6 +4463,20 @@ opentelemetry-util-http = "0.46b0"
[package.extras]
instruments = ["fastapi (>=0.58,<1.0)"]
+[[package]]
+name = "opentelemetry-proto"
+version = "1.25.0"
+description = "OpenTelemetry Python Proto"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"},
+ {file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"},
+]
+
+[package.dependencies]
+protobuf = ">=3.19,<5.0"
+
[[package]]
name = "opentelemetry-sdk"
version = "1.25.0"
@@ -3064,6 +4578,17 @@ files = [
{file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"},
]
+[[package]]
+name = "overrides"
+version = "7.7.0"
+description = "A decorator to automatically detect mismatch when overriding a method."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"},
+ {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"},
+]
+
[[package]]
name = "packaging"
version = "24.1"
@@ -3163,6 +4688,20 @@ files = [
numpy = ">=1.23.5"
types-pytz = ">=2022.1.1"
+[[package]]
+name = "parameterized"
+version = "0.9.0"
+description = "Parameterized testing with any Python test framework"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"},
+ {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"},
+]
+
+[package.extras]
+dev = ["jinja2"]
+
[[package]]
name = "parso"
version = "0.8.4"
@@ -3337,6 +4876,48 @@ files = [
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
+[[package]]
+name = "portalocker"
+version = "2.10.1"
+description = "Wraps the portalocker recipe for easy usage"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"},
+ {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"},
+]
+
+[package.dependencies]
+pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+docs = ["sphinx (>=1.7.1)"]
+redis = ["redis"]
+tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"]
+
+[[package]]
+name = "posthog"
+version = "3.5.0"
+description = "Integrate PostHog into any python application."
+optional = false
+python-versions = "*"
+files = [
+ {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"},
+ {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"},
+]
+
+[package.dependencies]
+backoff = ">=1.10.0"
+monotonic = ">=1.5"
+python-dateutil = ">2.1"
+requests = ">=2.7,<3.0"
+six = ">=1.5"
+
+[package.extras]
+dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"]
+sentry = ["django", "sentry-sdk"]
+test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"]
+
[[package]]
name = "pre-commit"
version = "3.8.0"
@@ -3383,6 +4964,43 @@ files = [
[package.dependencies]
wcwidth = "*"
+[[package]]
+name = "proto-plus"
+version = "1.24.0"
+description = "Beautiful, Pythonic protocol buffers."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
+ {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
+]
+
+[package.dependencies]
+protobuf = ">=3.19.0,<6.0.0dev"
+
+[package.extras]
+testing = ["google-api-core (>=1.31.5)"]
+
+[[package]]
+name = "protobuf"
+version = "4.25.3"
+description = ""
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"},
+ {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"},
+ {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"},
+ {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"},
+ {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"},
+ {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"},
+ {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"},
+ {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"},
+ {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"},
+ {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"},
+ {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
+]
+
[[package]]
name = "psutil"
version = "6.0.0"
@@ -3423,6 +5041,53 @@ files = [
{file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
]
+[[package]]
+name = "pulsar-client"
+version = "3.5.0"
+description = "Apache Pulsar Python client library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pulsar_client-3.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c18552edb2f785de85280fe624bc507467152bff810fc81d7660fa2dfa861f38"},
+ {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d438e456c146f01be41ef146f649dedc8f7bc714d9eaef94cff2e34099812b"},
+ {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18a26a0719841103c7a89eb1492c4a8fedf89adaa386375baecbb4fa2707e88f"},
+ {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab0e1605dc5f44a126163fd06cd0a768494ad05123f6e0de89a2c71d6e2d2319"},
+ {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdef720891b97656fdce3bf5913ea7729b2156b84ba64314f432c1e72c6117fa"},
+ {file = "pulsar_client-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a42544e38773191fe550644a90e8050579476bb2dcf17ac69a4aed62a6cb70e7"},
+ {file = "pulsar_client-3.5.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:fd94432ea5d398ea78f8f2e09a217ec5058d26330c137a22690478c031e116da"},
+ {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6252ae462e07ece4071213fdd9c76eab82ca522a749f2dc678037d4cbacd40b"},
+ {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b4d440b2d74323784328b082872ee2f206c440b5d224d7941eb3c083ec06c6"},
+ {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f60af840b8d64a2fac5a0c1ce6ae0ddffec5f42267c6ded2c5e74bad8345f2a1"},
+ {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2277a447c3b7f6571cb1eb9fc5c25da3fdd43d0b2fb91cf52054adfadc7d6842"},
+ {file = "pulsar_client-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f20f3e9dd50db2a37059abccad42078b7a4754b8bc1d3ae6502e71c1ad2209f0"},
+ {file = "pulsar_client-3.5.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d61f663d85308e12f44033ba95af88730f581a7e8da44f7a5c080a3aaea4878d"},
+ {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1ba0be25b6f747bcb28102b7d906ec1de48dc9f1a2d9eacdcc6f44ab2c9e17"},
+ {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181e3e60ac39df72ccb3c415d7aeac61ad0286497a6e02739a560d5af28393a"},
+ {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3c72895ff7f51347e4f78b0375b2213fa70dd4790bbb78177b4002846f1fd290"},
+ {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:547dba1b185a17eba915e51d0a3aca27c80747b6187e5cd7a71a3ca33921decc"},
+ {file = "pulsar_client-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:443b786eed96bc86d2297a6a42e79f39d1abf217ec603e0bd303f3488c0234af"},
+ {file = "pulsar_client-3.5.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:15b58f5d759dd6166db8a2d90ed05a38063b05cda76c36d190d86ef5c9249397"},
+ {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af34bfe813dddf772a8a298117fa0a036ee963595d8bc8f00d969a0329ae6ed9"},
+ {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0fec1dd74e1367d3742ce16679c1807994df60f5e666f440cf39323938fad"},
+ {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbcd26ef9c03f96fb9cd91baec3bbd3c4b997834eb3556670d31f41cc25b5f64"},
+ {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:afea1d0b6e793fd56e56463145751ff3aa79fdcd5b26e90d0da802a1bbabe07e"},
+ {file = "pulsar_client-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:da1ab2fb1bef64b966e9403a0a186ebc90368d99e054ce2cae5b1128478f4ef4"},
+ {file = "pulsar_client-3.5.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:9ad5dcc0eb8d2a7c0fb8e1fa146a0c6d4bdaf934f1169080b2c64b2f0573e086"},
+ {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5870c6805b1a57962ed908d1173e97e13470415998393925c86a43694420389"},
+ {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29cb5fedb969895b78301dc00a979133e69940812b8332e4de948bb0ad3db7cb"},
+ {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e53c74bfa59b20c66adea95023169060f5048dd8d843e6ef9cd3b8ee2d23e93b"},
+ {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99dbadb13967f1add57010971ed36b5a77d24afcdaea01960d0e55e56cf4ba6f"},
+ {file = "pulsar_client-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:058887661d438796f42307dcc8054c84dea88a37683dae36498b95d7e1c39b37"},
+]
+
+[package.dependencies]
+certifi = "*"
+
+[package.extras]
+all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
+avro = ["fastavro (>=1.9.2)"]
+functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"]
+
[[package]]
name = "pure-eval"
version = "0.2.3"
@@ -3437,6 +5102,57 @@ files = [
[package.extras]
tests = ["pytest"]
+[[package]]
+name = "pyarrow"
+version = "17.0.0"
+description = "Python library for Apache Arrow"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"},
+ {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"},
+ {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"},
+ {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"},
+ {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"},
+ {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"},
+ {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"},
+ {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"},
+ {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"},
+ {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"},
+ {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"},
+ {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"},
+ {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"},
+ {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"},
+ {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"},
+ {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"},
+ {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"},
+ {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"},
+ {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"},
+ {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"},
+ {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"},
+ {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"},
+ {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"},
+ {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"},
+ {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"},
+ {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"},
+ {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"},
+ {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"},
+ {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"},
+ {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"},
+ {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"},
+ {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"},
+ {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"},
+ {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"},
+ {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"},
+ {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"},
+]
+
+[package.dependencies]
+numpy = ">=1.16.6"
+
+[package.extras]
+test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"]
+
[[package]]
name = "pyasn1"
version = "0.6.0"
@@ -3448,6 +5164,20 @@ files = [
{file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"},
]
+[[package]]
+name = "pyasn1-modules"
+version = "0.4.0"
+description = "A collection of ASN.1-based protocols modules"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
+ {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.4.6,<0.7.0"
+
[[package]]
name = "pycparser"
version = "2.22"
@@ -3658,15 +5388,57 @@ files = [
{file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"},
]
+[[package]]
+name = "pypika"
+version = "0.48.9"
+description = "A SQL query builder API for Python"
+optional = false
+python-versions = "*"
+files = [
+ {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"},
+]
+
+[[package]]
+name = "pyproject-hooks"
+version = "1.1.0"
+description = "Wrappers to call pyproject.toml-based build backend hooks."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"},
+ {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"},
+]
+
+[[package]]
+name = "pyreadline3"
+version = "3.4.1"
+description = "A python implementation of GNU readline."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
+ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
+]
+
+[[package]]
+name = "pysbd"
+version = "0.3.4"
+description = "pysbd (Python Sentence Boundary Disambiguation) is a rule-based sentence boundary detection that works out-of-the-box across many languages."
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "pysbd-0.3.4-py3-none-any.whl", hash = "sha256:cd838939b7b0b185fcf86b0baf6636667dfb6e474743beeff878e9f42e022953"},
+]
+
[[package]]
name = "pytest"
-version = "8.3.2"
+version = "8.3.1"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
- {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
+ {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"},
+ {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"},
]
[package.dependencies]
@@ -4114,6 +5886,135 @@ files = [
[package.dependencies]
cffi = {version = "*", markers = "implementation_name == \"pypy\""}
+[[package]]
+name = "qdrant-client"
+version = "1.10.1"
+description = "Client library for the Qdrant vector search engine"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "qdrant_client-1.10.1-py3-none-any.whl", hash = "sha256:b9fb8fe50dd168d92b2998be7c6135d5a229b3a3258ad158cc69c8adf9ff1810"},
+ {file = "qdrant_client-1.10.1.tar.gz", hash = "sha256:2284c8c5bb1defb0d9dbacb07d16f344972f395f4f2ed062318476a7951fd84c"},
+]
+
+[package.dependencies]
+grpcio = ">=1.41.0"
+grpcio-tools = ">=1.41.0"
+httpx = {version = ">=0.20.0", extras = ["http2"]}
+numpy = [
+ {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""},
+ {version = ">=1.26", markers = "python_version >= \"3.12\""},
+]
+portalocker = ">=2.7.0,<3.0.0"
+pydantic = ">=1.10.8"
+urllib3 = ">=1.26.14,<3"
+
+[package.extras]
+fastembed = ["fastembed (==0.2.7)"]
+fastembed-gpu = ["fastembed-gpu (==0.2.7)"]
+
+[[package]]
+name = "regex"
+version = "2023.12.25"
+description = "Alternative regular expression module, to replace re."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"},
+ {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"},
+ {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"},
+ {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"},
+ {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"},
+ {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"},
+ {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"},
+ {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"},
+ {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"},
+ {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"},
+ {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"},
+ {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"},
+ {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"},
+ {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"},
+ {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"},
+]
+
[[package]]
name = "requests"
version = "2.32.3"
@@ -4135,6 +6036,24 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+[[package]]
+name = "requests-oauthlib"
+version = "2.0.0"
+description = "OAuthlib authentication support for Requests."
+optional = false
+python-versions = ">=3.4"
+files = [
+ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"},
+ {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"},
+]
+
+[package.dependencies]
+oauthlib = ">=3.0.0"
+requests = ">=2.0.0"
+
+[package.extras]
+rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
+
[[package]]
name = "respx"
version = "0.21.1"
@@ -4207,6 +6126,34 @@ files = [
{file = "ruff-0.4.10.tar.gz", hash = "sha256:3aa4f2bc388a30d346c56524f7cacca85945ba124945fe489952aadb6b5cd804"},
]
+[[package]]
+name = "s3transfer"
+version = "0.10.2"
+description = "An Amazon S3 Transfer Manager"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"},
+ {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"},
+]
+
+[package.dependencies]
+botocore = ">=1.33.2,<2.0a.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
+
+[[package]]
+name = "schema"
+version = "0.7.7"
+description = "Simple data validation library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"},
+ {file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"},
+]
+
[[package]]
name = "sentry-sdk"
version = "2.12.0"
@@ -4261,13 +6208,13 @@ tornado = ["tornado (>=6)"]
[[package]]
name = "setuptools"
-version = "72.1.0"
+version = "71.1.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"},
- {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"},
+ {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"},
+ {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"},
]
[package.extras]
@@ -4275,6 +6222,58 @@ core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.te
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+[[package]]
+name = "shapely"
+version = "2.0.5"
+description = "Manipulation and analysis of geometric objects"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"},
+ {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"},
+ {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"},
+ {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"},
+ {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"},
+ {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"},
+ {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"},
+ {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"},
+ {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"},
+ {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"},
+ {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"},
+ {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"},
+ {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"},
+ {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"},
+ {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"},
+ {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"},
+ {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"},
+ {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"},
+ {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"},
+ {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"},
+ {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"},
+ {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"},
+ {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"},
+ {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"},
+ {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"},
+ {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"},
+ {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"},
+ {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"},
+ {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"},
+ {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"},
+ {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"},
+ {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"},
+ {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"},
+ {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"},
+ {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"},
+ {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"},
+]
+
+[package.dependencies]
+numpy = ">=1.14,<3"
+
+[package.extras]
+docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"]
+test = ["pytest", "pytest-cov"]
+
[[package]]
name = "shellingham"
version = "1.5.4"
@@ -4308,6 +6307,30 @@ files = [
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
]
+[[package]]
+name = "soupsieve"
+version = "2.5"
+description = "A modern CSS selector implementation for Beautiful Soup."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
+ {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
+]
+
+[[package]]
+name = "spider-client"
+version = "0.0.27"
+description = "Python SDK for Spider Cloud API"
+optional = false
+python-versions = "*"
+files = [
+ {file = "spider-client-0.0.27.tar.gz", hash = "sha256:c3feaf5c491bd9a6c509efa0c8789452497073d9f68e70fc90e7626a6a8365aa"},
+]
+
+[package.dependencies]
+requests = "*"
+
[[package]]
name = "sqlalchemy"
version = "2.0.32"
@@ -4446,6 +6469,37 @@ anyio = ">=3.4.0,<5"
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
+[[package]]
+name = "sympy"
+version = "1.13.1"
+description = "Computer algebra system (CAS) in Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"},
+ {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"},
+]
+
+[package.dependencies]
+mpmath = ">=1.1.0,<1.4"
+
+[package.extras]
+dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"]
+
+[[package]]
+name = "tabulate"
+version = "0.9.0"
+description = "Pretty-print tabular data"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
+ {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
+]
+
+[package.extras]
+widechars = ["wcwidth"]
+
[[package]]
name = "tenacity"
version = "8.5.0"
@@ -4475,6 +6529,203 @@ files = [
[package.extras]
tests = ["pytest", "pytest-cov"]
+[[package]]
+name = "tiktoken"
+version = "0.7.0"
+description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"},
+ {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"},
+ {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"},
+ {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"},
+ {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"},
+ {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"},
+ {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"},
+ {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"},
+ {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"},
+ {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"},
+ {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"},
+ {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"},
+ {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"},
+ {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"},
+ {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"},
+ {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"},
+ {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"},
+ {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"},
+ {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"},
+ {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"},
+ {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"},
+ {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"},
+ {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"},
+ {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"},
+ {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"},
+ {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"},
+ {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"},
+ {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"},
+ {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"},
+ {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"},
+ {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"},
+ {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"},
+ {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"},
+ {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"},
+ {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"},
+ {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"},
+]
+
+[package.dependencies]
+regex = ">=2022.1.18"
+requests = ">=2.26.0"
+
+[package.extras]
+blobfile = ["blobfile (>=2)"]
+
+[[package]]
+name = "together"
+version = "1.2.2"
+description = "Python client for Together's Cloud Platform!"
+optional = false
+python-versions = "<4.0,>=3.8"
+files = [
+ {file = "together-1.2.2-py3-none-any.whl", hash = "sha256:7ce89f902dbaca67e46e693d90182514494f510f3bc16cb89d816a5031ab0433"},
+ {file = "together-1.2.2.tar.gz", hash = "sha256:fd026f4a604e1fb3ee2fa5803f31e5e36ad31b3d182ef47f611326de66907d13"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.9.3,<4.0.0"
+click = ">=8.1.7,<9.0.0"
+eval-type-backport = ">=0.1.3,<0.3.0"
+filelock = ">=3.13.1,<4.0.0"
+numpy = [
+ {version = ">=1.23.5", markers = "python_version < \"3.12\""},
+ {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
+]
+pillow = ">=10.3.0,<11.0.0"
+pyarrow = ">=10.0.1"
+pydantic = ">=2.6.3,<3.0.0"
+requests = ">=2.31.0,<3.0.0"
+tabulate = ">=0.9.0,<0.10.0"
+tqdm = ">=4.66.2,<5.0.0"
+typer = ">=0.9,<0.13"
+
+[[package]]
+name = "tokenizers"
+version = "0.19.1"
+description = ""
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"},
+ {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"},
+ {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"},
+ {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"},
+ {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"},
+ {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"},
+ {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"},
+ {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"},
+ {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"},
+ {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"},
+ {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"},
+ {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"},
+ {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"},
+ {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"},
+ {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"},
+ {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"},
+ {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"},
+ {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"},
+ {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"},
+ {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"},
+ {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"},
+ {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"},
+ {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"},
+ {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"},
+ {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"},
+ {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"},
+ {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"},
+ {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"},
+ {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"},
+ {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"},
+ {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"},
+ {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"},
+ {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"},
+ {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"},
+ {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"},
+ {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"},
+ {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"},
+ {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"},
+ {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"},
+ {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"},
+ {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"},
+ {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"},
+ {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"},
+ {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"},
+ {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"},
+ {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"},
+ {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"},
+ {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"},
+ {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"},
+ {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"},
+ {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"},
+ {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"},
+ {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"},
+ {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"},
+ {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"},
+ {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"},
+ {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"},
+ {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"},
+ {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"},
+ {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"},
+ {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"},
+ {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"},
+ {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"},
+ {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"},
+ {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"},
+ {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"},
+ {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"},
+ {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"},
+ {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"},
+ {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"},
+ {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"},
+ {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"},
+ {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"},
+]
+
+[package.dependencies]
+huggingface-hub = ">=0.16.4,<1.0"
+
+[package.extras]
+dev = ["tokenizers[testing]"]
+docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
+testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"]
+
[[package]]
name = "tomli"
version = "2.0.1"
@@ -4506,6 +6757,26 @@ files = [
{file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"},
]
+[[package]]
+name = "tqdm"
+version = "4.66.4"
+description = "Fast, Extensible Progress Meter"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
+ {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
[[package]]
name = "traitlets"
version = "5.14.3"
@@ -4769,13 +7040,13 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "uvicorn"
-version = "0.30.5"
+version = "0.30.3"
description = "The lightning-fast ASGI server."
optional = false
python-versions = ">=3.8"
files = [
- {file = "uvicorn-0.30.5-py3-none-any.whl", hash = "sha256:b2d86de274726e9878188fa07576c9ceeff90a839e2b6e25c917fe05f5a6c835"},
- {file = "uvicorn-0.30.5.tar.gz", hash = "sha256:ac6fdbd4425c5fd17a9fe39daf4d4d075da6fdc80f653e5894cdc2fd98752bee"},
+ {file = "uvicorn-0.30.3-py3-none-any.whl", hash = "sha256:94a3608da0e530cea8f69683aa4126364ac18e3826b6630d1a65f4638aade503"},
+ {file = "uvicorn-0.30.3.tar.gz", hash = "sha256:0d114d0831ff1adbf231d358cbf42f17333413042552a624ea6a9b4c33dcfd81"},
]
[package.dependencies]
@@ -4981,6 +7252,22 @@ files = [
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
]
+[[package]]
+name = "websocket-client"
+version = "1.8.0"
+description = "WebSocket client for Python with low level API options"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"},
+ {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"]
+optional = ["python-socks", "wsaccel"]
+test = ["websockets"]
+
[[package]]
name = "websockets"
version = "12.0"
@@ -5367,4 +7654,4 @@ local = []
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "877648288f4f9d5d3304c36853d1b2c5f833d673665bf9b1c707c37bbf02e6ad"
+content-hash = "747dad35b9e5b1338a989ea6bfd4ac3465ba34f792639aeabda3c1ca9b40c689"
diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml
index 6182d84b4aae..d98c673e0767 100644
--- a/src/backend/base/pyproject.toml
+++ b/src/backend/base/pyproject.toml
@@ -77,6 +77,10 @@ setuptools = ">=70"
nanoid = "^2.0.0"
filelock = "^3.15.4"
grandalf = "^0.8.0"
+crewai = "^0.36.0"
+spider-client = "^0.0.27"
+
+
[tool.poetry.extras]
deploy = ["celery", "redis", "flower"]
local = ["llama-cpp-python", "sentence-transformers", "ctransformers"]
diff --git a/src/frontend/src/icons/Spider/SpiderIcon.jsx b/src/frontend/src/icons/Spider/SpiderIcon.jsx
new file mode 100644
index 000000000000..1d10d9fee820
--- /dev/null
+++ b/src/frontend/src/icons/Spider/SpiderIcon.jsx
@@ -0,0 +1,18 @@
+const SvgSpiderIcon = (props) => (
+
+ Spider v0 Logo
+
+
+);
+export default SvgSpiderIcon;
diff --git a/src/frontend/src/icons/Spider/index.tsx b/src/frontend/src/icons/Spider/index.tsx
new file mode 100644
index 000000000000..65e41c36412c
--- /dev/null
+++ b/src/frontend/src/icons/Spider/index.tsx
@@ -0,0 +1,9 @@
+import React, { forwardRef } from "react";
+import SvgSpiderIcon from "./SpiderIcon";
+
+export const SpiderIcon = forwardRef<
+ SVGSVGElement,
+ React.PropsWithChildren<{}>
+>((props, ref) => {
+ return ;
+});
diff --git a/src/frontend/src/icons/Spider/spider_logo.svg b/src/frontend/src/icons/Spider/spider_logo.svg
new file mode 100644
index 000000000000..604a09d01d74
--- /dev/null
+++ b/src/frontend/src/icons/Spider/spider_logo.svg
@@ -0,0 +1 @@
+Spider v1 Logo
diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts
index eb37edf578eb..553439403f28 100644
--- a/src/frontend/src/utils/styleUtils.ts
+++ b/src/frontend/src/utils/styleUtils.ts
@@ -211,6 +211,7 @@ import { SearxIcon } from "../icons/Searx";
import { ShareIcon } from "../icons/Share";
import { Share2Icon } from "../icons/Share2";
import SvgSlackIcon from "../icons/Slack/SlackIcon";
+import { SpiderIcon } from "../icons/Spider";
import { Streamlit } from "../icons/Streamlit";
import { UpstashSvgIcon } from "../icons/Upstash";
import { VectaraIcon } from "../icons/VectaraIcon";
@@ -412,6 +413,7 @@ export const nodeIconsLucide: iconsType = {
Weaviate: WeaviateIcon,
Searx: SearxIcon,
SlackDirectoryLoader: SvgSlackIcon,
+ SpiderTool: SpiderIcon,
SupabaseVectorStore: SupabaseIcon,
Supabase: SupabaseIcon,
VertexAI: VertexAIIcon,
From 4d1c8ad92a51bc0c89afc8aa73d1cb4216ea9493 Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Thu, 8 Aug 2024 12:04:36 -0300
Subject: [PATCH 02/38] ci: add version check in workflow to skip jobs for
unreleased versions of langflow-base (#3244)
feat: add version check in workflow to skip jobs for unreleased versions of langflow-base
---
.github/workflows/python_test.yml | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/.github/workflows/python_test.yml b/.github/workflows/python_test.yml
index ba4b2b892a64..5f6671a024ad 100644
--- a/.github/workflows/python_test.yml
+++ b/.github/workflows/python_test.yml
@@ -69,14 +69,30 @@ jobs:
python-version: ${{ matrix.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }}
+ - name: Check Version
+ id: check-version
+ run: |
+ version=$(cd src/backend/base && poetry version --short)
+ last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
+ if [ "$version" != "$last_released_version" ]; then
+ echo "Version $version has not been released yet. Skipping the rest of the job."
+ echo skipped=true >> $GITHUB_OUTPUT
+ exit 0
+ else
+ echo version=$version >> $GITHUB_OUTPUT
+ echo skipped=false >> $GITHUB_OUTPUT
+ fi
- name: Build wheel
+ if: steps.check-version.outputs.skipped == 'false'
run: |
poetry env use ${{ matrix.python-version }}
make build main=true
- name: Install wheel
+ if: steps.check-version.outputs.skipped == 'false'
run: |
python -m pip install dist/*.whl
- name: Test CLI
+ if: steps.check-version.outputs.skipped == 'false'
run: |
python -m langflow run --host 127.0.0.1 --port 7860 --backend-only &
SERVER_PID=$!
From 881828c4a40295d44a4674fae7c6429bae29c07a Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Thu, 8 Aug 2024 12:04:50 -0300
Subject: [PATCH 03/38] ci: refactor release workflow and Docker build process
(#3245)
* feat: update docker-build.yml to conditionally retrieve version and adjust tagging logic for Docker images in workflows
* Refactor release workflow to separate base and main package handling
- Split `release_package` input into `release_package_base` and `release_package_main`
- Add new inputs for building Docker images: `build_docker_base` and `build_docker_main`
- Update conditional checks and job dependencies to reflect new inputs
- Separate Docker build workflows for base and main packages
* Refactor release.yml to introduce separate inputs for base and main packages, enhancing workflow flexibility and clarity
* chore: update release.yml to set default pre-release option to false, reflecting new workflow strategy
* chore: add pre-release check to release.yml to validate version format before proceeding with the workflow
* chore: remove deprecated pre-release workflows, consolidating configuration for cleaner CI/CD process
* chore: modify pre-release check in release.yml to use poetry version for validation, enhancing version format accuracy
* chore: refine pre-release version check in release.yml for improved regex validation, ensuring accurate version detection
---
.github/workflows/docker-build.yml | 41 +++++--
.github/workflows/pre-release-base.yml | 80 -------------
.github/workflows/pre-release-langflow.yml | 133 ---------------------
.github/workflows/pre-release.yml | 129 --------------------
.github/workflows/release.yml | 62 ++++++++--
5 files changed, 85 insertions(+), 360 deletions(-)
delete mode 100644 .github/workflows/pre-release-base.yml
delete mode 100644 .github/workflows/pre-release-langflow.yml
delete mode 100644 .github/workflows/pre-release.yml
diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml
index ae4b4a0395c4..7b425c7e2575 100644
--- a/.github/workflows/docker-build.yml
+++ b/.github/workflows/docker-build.yml
@@ -3,7 +3,7 @@ on:
workflow_call:
inputs:
version:
- required: true
+ required: false
type: string
release_type:
required: true
@@ -33,8 +33,33 @@ env:
TEST_TAG: "langflowai/langflow:test"
jobs:
+ get-version:
+ name: Get Version
+ runs-on: ubuntu-latest
+ outputs:
+ version: ${{ steps.get-version-input.outputs.version || steps.get-version-base.outputs.version || steps.get-version-main.outputs.version }}
+ steps:
+ - name: Get Version from Input
+ if : ${{ inputs.version != '' }}
+ id: get-version-input
+ run: |
+ version=${{ inputs.version }}
+ echo version=$version >> $GITHUB_OUTPUT
+ - name: Get Version Main
+ if : ${{ inputs.version == '' && inputs.release_type == 'base' }}
+ id: get-version-base
+ run: |
+ version=$(poetry version --short)
+ echo version=$version >> $GITHUB_OUTPUT
+ - name: Get Version Base
+ if : ${{ inputs.version == '' && inputs.release_type == 'main' }}
+ id: get-version-main
+ run: |
+ version=$(cd src/backend/base && poetry version --short)
+ echo version=$version >> $GITHUB_OUTPUT
setup:
runs-on: ubuntu-latest
+ needs: get-version
outputs:
tags: ${{ steps.set-vars.outputs.tags }}
file: ${{ steps.set-vars.outputs.file }}
@@ -44,13 +69,13 @@ jobs:
id: set-vars
run: |
if [[ "${{ inputs.release_type }}" == "base" ]]; then
- echo "tags=langflowai/langflow:base-${{ inputs.version }},langflowai/langflow:base-latest" >> $GITHUB_OUTPUT
+ echo "tags=langflowai/langflow:base-${{ needs.get-version.outputs.version }},langflowai/langflow:base-latest" >> $GITHUB_OUTPUT
echo "file=./docker/build_and_push_base.Dockerfile" >> $GITHUB_OUTPUT
else
if [[ "${{ inputs.pre_release }}" == "true" ]]; then
- echo "tags=langflowai/langflow:${{ inputs.version }}" >> $GITHUB_OUTPUT
+ echo "tags=langflowai/langflow:${{ needs.get-version.outputs.version }}" >> $GITHUB_OUTPUT
else
- echo "tags=langflowai/langflow:${{ inputs.version }},langflowai/langflow:latest" >> $GITHUB_OUTPUT
+ echo "tags=langflowai/langflow:${{ needs.get-version.outputs.version }},langflowai/langflow:latest" >> $GITHUB_OUTPUT
fi
echo "file=./docker/build_and_push.Dockerfile" >> $GITHUB_OUTPUT
fi
@@ -79,17 +104,17 @@ jobs:
build_components:
if: ${{ inputs.release_type == 'main' }}
runs-on: ubuntu-latest
- needs: build
+ needs: [build, get-version]
strategy:
matrix:
component: [backend, frontend]
include:
- component: backend
dockerfile: ./docker/build_and_push_backend.Dockerfile
- tags: ${{ inputs.pre_release == 'true' && format('langflowai/langflow-backend:{0}', inputs.version) || format('langflowai/langflow-backend:{0},langflowai/langflow-backend:latest', inputs.version) }}
+ tags: ${{ inputs.pre_release == 'true' && format('langflowai/langflow-backend:{0}', needs.get-version.outputs.version) || format('langflowai/langflow-backend:{0},langflowai/langflow-backend:latest', needs.get-version.outputs.version) }}
- component: frontend
dockerfile: ./docker/frontend/build_and_push_frontend.Dockerfile
- tags: ${{ inputs.pre_release == 'true' && format('langflowai/langflow-frontend:{0}', inputs.version) || format('langflowai/langflow-frontend:{0},langflowai/langflow-frontend:latest', inputs.version) }}
+ tags: ${{ inputs.pre_release == 'true' && format('langflowai/langflow-frontend:{0}', needs.get-version.outputs.version) || format('langflowai/langflow-frontend:{0},langflowai/langflow-frontend:latest', needs.get-version.outputs.version) }}
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
@@ -107,7 +132,7 @@ jobs:
context: .
push: true
build-args: |
- LANGFLOW_IMAGE=langflowai/langflow:${{ inputs.version }}
+ LANGFLOW_IMAGE=langflowai/langflow:${{ needs.get-version.outputs.version }}
file: ${{ matrix.dockerfile }}
tags: ${{ matrix.tags }}
# provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines)
diff --git a/.github/workflows/pre-release-base.yml b/.github/workflows/pre-release-base.yml
deleted file mode 100644
index 6d9e2f0bdc38..000000000000
--- a/.github/workflows/pre-release-base.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-name: Langflow Base Pre-release
-run-name: Langflow Base Pre-release by @${{ github.actor }}
-on:
- workflow_dispatch:
- inputs:
- release_package:
- description: "Release package"
- required: true
- type: boolean
- default: false
-
-env:
- POETRY_VERSION: "1.8.2"
-
-jobs:
- release:
- name: Release Langflow Base
- if: inputs.release_package == true
- runs-on: ubuntu-latest
- outputs:
- version: ${{ steps.check-version.outputs.version }}
- steps:
- - uses: actions/checkout@v4
- - name: Install poetry
- run: pipx install poetry==${{ env.POETRY_VERSION }}
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- cache: "poetry"
- - name: Check Version
- id: check-version
- # In this step, we should check the version of the package
- # and see if it is a version that is already released
- # echo version=$(cd src/backend/base && poetry version --short) >> $GITHUB_OUTPUT
- # cd src/backend/base && poetry version --short should
- # be different than the last release version in pypi
- # which we can get from curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1
- run: |
- version=$(cd src/backend/base && poetry version --short)
- last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
- if [ "$version" = "$last_released_version" ]; then
- echo "Version $version is already released. Skipping release."
- exit 1
- else
- echo version=$version >> $GITHUB_OUTPUT
- fi
- - name: Build project for distribution
- run: make build base=true
- - name: Publish to PyPI
- env:
- POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
- run: |
- make publish base=true
- docker_build:
- name: Build Docker Image
- runs-on: ubuntu-latest
- needs: release
- steps:
- - uses: actions/checkout@v4
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- id: qemu
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Build and push
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- file: ./docker/build_and_push_base.Dockerfile
- tags: |
- langflowai/langflow:base-${{ needs.release.outputs.version }}
- # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines)
- provenance: false
diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml
deleted file mode 100644
index 5a052dcc84d2..000000000000
--- a/.github/workflows/pre-release-langflow.yml
+++ /dev/null
@@ -1,133 +0,0 @@
-name: Langflow Pre-release
-run-name: Langflow Pre-release by @${{ github.actor }}
-on:
- workflow_dispatch:
- inputs:
- release_package:
- description: "Release package"
- required: true
- type: boolean
- default: false
- workflow_run:
- workflows: ["pre-release-base"]
- types: [completed]
- branches: [dev]
-
-env:
- POETRY_VERSION: "1.8.2"
-
-jobs:
- release:
- name: Release Langflow
- if: inputs.release_package == true
- runs-on: ubuntu-latest
- outputs:
- version: ${{ steps.check-version.outputs.version }}
- steps:
- - uses: actions/checkout@v4
- - name: Install poetry
- run: pipx install poetry==${{ env.POETRY_VERSION }}
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- cache: "poetry"
- - name: Check Version
- id: check-version
- run: |
- version=$(poetry version --short)
- last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
- if [ "$version" = "$last_released_version" ]; then
- echo "Version $version is already released. Skipping release."
- exit 1
- else
- echo version=$version >> $GITHUB_OUTPUT
- fi
- - name: Build project for distribution
- run: make build main=true
- - name: Display pyproject.toml langflow-base Version
- run: cat pyproject.toml | grep langflow-base
- - name: Publish to PyPI
- env:
- POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
- run: |
- make publish main=true
- - name: Upload Artifact
- uses: actions/upload-artifact@v4
- with:
- name: dist
- path: dist
-
- docker_build:
- name: Build Docker Image
- runs-on: ubuntu-latest
- needs: release
- steps:
- - uses: actions/checkout@v4
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- id: qemu
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Build and push
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- file: ./docker/build_and_push.Dockerfile
- # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines)
- provenance: false
- tags: |
- langflowai/langflow:${{ needs.release.outputs.version }}
- langflowai/langflow:1.0-alpha
- - name: Build and push (frontend)
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- file: ./docker/frontend/build_and_push_frontend.Dockerfile
- # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines)
- provenance: false
- tags: |
- langflowai/langflow-frontend:${{ needs.release.outputs.version }}
- langflowai/langflow-frontend:1.0-alpha
- - name: Wait for Docker Hub to propagate
- run: sleep 120
- - name: Build and push (backend)
- uses: docker/build-push-action@v6
- with:
- context: .
- push: true
- file: ./docker/build_and_push_backend.Dockerfile
- # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines)
- provenance: false
- build-args: |
- LANGFLOW_IMAGE=langflowai/langflow:${{ needs.release.outputs.version }}
- tags: |
- langflowai/langflow-backend:${{ needs.release.outputs.version }}
- langflowai/langflow-backend:1.0-alpha
-
- create_release:
- name: Create Release
- runs-on: ubuntu-latest
- needs: [docker_build, release]
- steps:
- - uses: actions/download-artifact@v4
- with:
- name: dist
- path: dist
- - name: Create Release
- uses: ncipollo/release-action@v1
- with:
- artifacts: "dist/*"
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: false
- generateReleaseNotes: true
- prerelease: true
- tag: v${{ needs.release.outputs.version }}
- commit: dev
diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml
deleted file mode 100644
index 814edd7beb61..000000000000
--- a/.github/workflows/pre-release.yml
+++ /dev/null
@@ -1,129 +0,0 @@
-name: Langflow Pre-release (Unified)
-run-name: Langflow (${{inputs.release_type}}) Pre-release by @${{ github.actor }}
-on:
- workflow_dispatch:
- inputs:
- release_package:
- description: "Release package"
- required: true
- type: boolean
- default: false
- release_type:
- description: "Type of release (base or main)"
- required: true
- type: choice
- options:
- - base
- - main
-
-env:
- POETRY_VERSION: "1.8.2"
-
-jobs:
- release:
- name: Release Langflow
- if: inputs.release_package == true
- runs-on: ubuntu-latest
- outputs:
- version: ${{ steps.check-version.outputs.version }}
- steps:
- - uses: actions/checkout@v4
- - name: Install poetry
- run: pipx install poetry==${{ env.POETRY_VERSION }}
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- cache: "poetry"
- - name: Set up Nodejs 20
- uses: actions/setup-node@v4
- with:
- node-version: "20"
- - name: Check Version
- id: check-version
- run: |
- if [ "${{ inputs.release_type }}" == "base" ]; then
- version=$(cd src/backend/base && poetry version --short)
- last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
- else
- version=$(poetry version --short)
- last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1)
- fi
- if [ "$version" = "$last_released_version" ]; then
- echo "Version $version is already released. Skipping release."
- exit 1
- else
- echo version=$version >> $GITHUB_OUTPUT
- fi
- - name: Build project for distribution
- run: |
- if [ "${{ inputs.release_type }}" == "base" ]; then
- make build base=true
- else
- make build main=true
- fi
- - name: Test CLI
- run: |
- if [ "${{ inputs.release_type }}" == "base" ]; then
- python -m pip install src/backend/base/dist/*.whl
- else
- python -m pip install dist/*.whl
- fi
- python -m langflow run --host 127.0.0.1 --port 7860 &
- SERVER_PID=$!
- # Wait for the server to start
- timeout 120 bash -c 'until curl -f http://127.0.0.1:7860/health; do sleep 2; done' || (echo "Server did not start in time" && kill $SERVER_PID && exit 1)
- # Terminate the server
- kill $SERVER_PID || (echo "Failed to terminate the server" && exit 1)
- sleep 10 # give the server some time to terminate
- # Check if the server is still running
- if kill -0 $SERVER_PID 2>/dev/null; then
- echo "Failed to terminate the server"
- exit 1
- else
- echo "Server terminated successfully"
- fi
- - name: Publish to PyPI
- env:
- POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
- run: |
- if [ "${{ inputs.release_type }}" == "base" ]; then
- make publish base=true
- else
- make publish main=true
- fi
- - name: Upload Artifact
- uses: actions/upload-artifact@v4
- with:
- name: dist${{ inputs.release_type }}
- path: ${{ inputs.release_type == 'base' && 'src/backend/base/dist' || 'dist' }}
-
- call_docker_build:
- name: Call Docker Build Workflow
- needs: release
- uses: langflow-ai/langflow/.github/workflows/docker-build.yml@dev
- with:
- version: ${{ needs.release.outputs.version }}
- release_type: ${{ inputs.release_type }}
- secrets: inherit
-
- create_release:
- name: Create Release
- runs-on: ubuntu-latest
- needs: [release]
- if: ${{ inputs.release_type == 'main' }}
- steps:
- - uses: actions/download-artifact@v4
- with:
- name: dist${{ inputs.release_type }}
- path: dist
- - name: Create Release
- uses: ncipollo/release-action@v1
- with:
- artifacts: "dist/*"
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: false
- generateReleaseNotes: true
- prerelease: true
- tag: v${{ needs.release.outputs.version }}
- commit: dev
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index bb049907e419..57b5e3ed9c22 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -4,8 +4,23 @@ run-name: Langflow Release by @${{ github.actor }}
on:
workflow_dispatch:
inputs:
- release_package:
- description: "Release package"
+ release_package_base:
+ description: "Release Langflow Base"
+ required: true
+ type: boolean
+ default: false
+ release_package_main:
+ description: "Release Langflow"
+ required: true
+ type: boolean
+ default: false
+ build_docker_base:
+ description: "Build Docker Image for Langflow Base"
+ required: true
+ type: boolean
+ default: false
+ build_docker_main:
+ description: "Build Docker Image for Langflow"
required: true
type: boolean
default: false
@@ -13,7 +28,7 @@ on:
description: "Pre-release"
required: false
type: boolean
- default: true
+ default: false
env:
@@ -27,7 +42,7 @@ jobs:
release-base:
name: Release Langflow Base
needs: [ci]
- if: inputs.release_package == true
+ if: inputs.release_package_base == true
runs-on: ubuntu-latest
outputs:
version: ${{ steps.check-version.outputs.version }}
@@ -93,7 +108,7 @@ jobs:
release-main:
name: Release Langflow Main
- if: inputs.release_package == true
+ if: inputs.release_package_main == true
needs: [release-base]
runs-on: ubuntu-latest
outputs:
@@ -111,6 +126,18 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: "20"
+ # If pre-release is true, we need to check if ["a", "b", "rc", "dev", "post"] is in the version string
+ # if the version string is incorrect, we need to exit the workflow
+ - name: Check if pre-release
+ if: inputs.pre_release == true
+ run: |
+ version=$(poetry version --short)
+ if [[ "${version}" =~ ^([0-9]+\.)?([0-9]+\.)?[0-9]+((a|b|rc|dev|post)([0-9]+))$ ]]; then
+ echo "Pre-release version detected. Continuing with the release."
+ else
+ echo "Invalid pre-release version detected. Exiting the workflow."
+ exit 1
+ fi
- name: Check Version
id: check-version
run: |
@@ -155,19 +182,34 @@ jobs:
name: dist-main
path: dist
- call_docker_build:
- name: Call Docker Build Workflow
- needs: [release-base, release-main]
- uses: langflow-ai/langflow/.github/workflows/docker-build.yml@main
+ call_docker_build_base:
+ name: Call Docker Build Workflow for Langflow Base
+ if : inputs.build_docker_base == true
+ uses: ./.github/workflows/docker-build.yml
strategy:
matrix:
release_type:
- base
+ with:
+ # version should be needs.release-base.outputs.version if release_type is base
+ # version should be needs.release-main.outputs.version if release_type is main
+ version: ''
+ release_type: ${{ matrix.release_type }}
+ pre_release: ${{ inputs.pre_release }}
+ secrets: inherit
+
+ call_docker_build_main:
+ name: Call Docker Build Workflow for Langflow
+ if : inputs.build_docker_main == true
+ uses: ./.github/workflows/docker-build.yml
+ strategy:
+ matrix:
+ release_type:
- main
with:
# version should be needs.release-base.outputs.version if release_type is base
# version should be needs.release-main.outputs.version if release_type is main
- version: ${{ matrix.release_type == 'base' && needs.release-base.outputs.version || matrix.release_type == 'main' && needs.release-main.outputs.version }}
+ version: ''
release_type: ${{ matrix.release_type }}
pre_release: ${{ inputs.pre_release }}
secrets: inherit
From ca008a13468e55d8dba0a8050a3ff14e17cc655f Mon Sep 17 00:00:00 2001
From: Daniel Gines
Date: Thu, 8 Aug 2024 12:15:41 -0300
Subject: [PATCH 04/38] fix: update PYTHON_REQUIRED extraction to correctly
capture Python version (#3199)
* Updates and changes to the Makefile:
1. Added removal of `frontend` directory inside `src/backend/base/langflow/` and `build` directory inside `src/frontend/` to the `clean_npm_cache` target.
2. Added descriptive comments for the `build_and_install`, `build_and_run`, `fix_codespell`, `setup_poetry`, `unit_tests`, `integration_tests`, and `tests_frontend` targets.
Looking forward to your feedback.
* Improvements: Structure and Functionality Improvements
- Reorganized commands to facilitate understanding of the structure
- Enhanced check_tools to detect the Python version
- Added a multi-environment script to support check_tools
- make init now builds the frontend and runs the application
- Aesthetic improvements in output messages
TO-DO:
- Reorganize container-related commands
- Reorganize other miscellaneous utilities
- Document usage in the application docs
- Prepare the dev environment following the maintainers' recommended practices
* Removed pre-commit as it is no longer used.
* Restored 'patch' command in Makefile, it updates the 'pyproject.toml' with the new project version.
* fix: update PYTHON_REQUIRED extraction to correctly capture Python version
Changed the method of extracting the required Python version from `pyproject.toml`:
- Old method:
`PYTHON_REQUIRED=$(shell grep "^python" pyproject.toml | sed -n 's/.*"\(.*\)"$$/\1/p')`
- New method:
`PYTHON_REQUIRED=$(shell grep '^python[[:space:]]*=' pyproject.toml | sed -n 's/.*"\([^"]*\)".*/\1/p')`
The old method of capturing the Python version was too broad and could inadvertently match other dependencies starting with "python" (e.g., `python-socketio`, `python-dotenv`). This could lead to incorrect extraction of the required Python version, potentially causing version mismatches and failures in environment setup.
* fix: update PYTHON_REQUIRED extraction to correctly capture Python version
Changed the method of extracting the required Python version from `pyproject.toml`:
- Old method:
`PYTHON_REQUIRED=$(shell grep "^python" pyproject.toml | sed -n 's/.*"\(.*\)"$$/\1/p')`
- New method:
`PYTHON_REQUIRED=$(shell grep '^python[[:space:]]*=' pyproject.toml | sed -n 's/.*"\([^"]*\)".*/\1/p')`
The old method of capturing the Python version was too broad and could inadvertently match other dependencies starting with "python" (e.g., `python-socketio`, `python-dotenv`). This could lead to incorrect extraction of the required Python version, potentially causing version mismatches and failures in environment setup.
---------
Co-authored-by: Gabriel Luiz Freitas Almeida
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index 484fc27bdaa4..6e84e64c926a 100644
--- a/Makefile
+++ b/Makefile
@@ -6,7 +6,7 @@ DOCKERFILE=docker/build_and_push.Dockerfile
DOCKERFILE_BACKEND=docker/build_and_push_backend.Dockerfile
DOCKERFILE_FRONTEND=docker/frontend/build_and_push_frontend.Dockerfile
DOCKER_COMPOSE=docker_example/docker-compose.yml
-PYTHON_REQUIRED=$(shell grep "^python" pyproject.toml | sed -n 's/.*"\(.*\)"$$/\1/p')
+PYTHON_REQUIRED=$(shell grep '^python[[:space:]]*=' pyproject.toml | sed -n 's/.*"\([^"]*\)".*/\1/p')
RED=\033[0;31m
NC=\033[0m # No Color
GREEN=\033[0;32m
From 3e6c863a8b596eb812335bec993861a9fb4065b5 Mon Sep 17 00:00:00 2001
From: Edwin Jose
Date: Thu, 8 Aug 2024 14:53:21 -0400
Subject: [PATCH 05/38] feat: create google drive ingest component (#3129)
* feat: create google drive ingest component
It adds the basic implementation of the Google Drive Loader.
* feat: create google drive ingest component
Created a basic Google Drive loader component
* Updated the Icon
* updated the formatting
ran make format to update the formatting
* Addressed comments
---
.../langflow/components/data/GoogleDrive.py | 87 +++++++++++++++++++
1 file changed, 87 insertions(+)
create mode 100644 src/backend/base/langflow/components/data/GoogleDrive.py
diff --git a/src/backend/base/langflow/components/data/GoogleDrive.py b/src/backend/base/langflow/components/data/GoogleDrive.py
new file mode 100644
index 000000000000..4cc7450ebd20
--- /dev/null
+++ b/src/backend/base/langflow/components/data/GoogleDrive.py
@@ -0,0 +1,87 @@
+import json
+from typing import Optional
+from google.oauth2.credentials import Credentials
+from google.auth.exceptions import RefreshError
+from langflow.custom import Component
+from langflow.inputs import MessageTextInput
+from langflow.io import SecretStrInput
+from langflow.template import Output
+from langflow.schema import Data
+from langchain_google_community import GoogleDriveLoader
+from langflow.helpers.data import docs_to_data
+
+from json.decoder import JSONDecodeError
+
+
+class GoogleDriveComponent(Component):
+ display_name = "Google Drive Loader"
+ description = "Loads documents from Google Drive using provided credentials."
+ icon = "Google"
+
+ inputs = [
+ SecretStrInput(
+ name="json_string",
+ display_name="JSON String of the Service Account Token",
+ info="JSON string containing OAuth 2.0 access token information for service account access",
+ required=True,
+ ),
+ MessageTextInput(
+ name="document_id", display_name="Document ID", info="Single Google Drive document ID", required=True
+ ),
+ ]
+
+ outputs = [
+ Output(display_name="Loaded Documents", name="docs", method="load_documents"),
+ ]
+
+ def load_documents(self) -> Data:
+ class CustomGoogleDriveLoader(GoogleDriveLoader):
+ creds: Optional[Credentials] = None
+ """Credentials object to be passed directly."""
+
+ def _load_credentials(self):
+ """Load credentials from the provided creds attribute or fallback to the original method."""
+ if self.creds:
+ return self.creds
+ else:
+ raise ValueError("No credentials provided.")
+
+ class Config:
+ arbitrary_types_allowed = True
+
+ json_string = self.json_string
+
+ document_ids = [self.document_id]
+ if len(document_ids) != 1:
+ raise ValueError("Expected a single document ID")
+
+ # TODO: Add validation to check if the document ID is valid
+
+ # Load the token information from the JSON string
+ try:
+ token_info = json.loads(json_string)
+ except JSONDecodeError as e:
+ raise ValueError("Invalid JSON string") from e
+
+ # Initialize the custom loader with the provided credentials and document IDs
+ loader = CustomGoogleDriveLoader(
+ creds=Credentials.from_authorized_user_info(token_info), document_ids=document_ids
+ )
+
+ # Load the documents
+ try:
+ docs = loader.load()
+ # catch google.auth.exceptions.RefreshError
+ except RefreshError as e:
+ raise ValueError(
+ "Authentication error: Unable to refresh authentication token. Please try to reauthenticate."
+ ) from e
+ except Exception as e:
+ raise ValueError(f"Error loading documents: {e}") from e
+
+ assert len(docs) == 1, "Expected a single document to be loaded."
+
+ data = docs_to_data(docs)
+ # Return the loaded documents
+ self.status = data
+ return Data(data={"text": data})
From e42b6bdb94ca84a7c91bb9b9cbea432ccf034eb0 Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Thu, 8 Aug 2024 17:21:21 -0300
Subject: [PATCH 06/38] fix: change ValueError into Warning to allow
disconnected flows to run and other small fixes (#3249)
* fix: add task to end all traces on asyncio.CancelledError in build_flow function for better cleanup handling
* fix: replace ValueError with warnings in Graph class when vertices exist without edges for better logging and handling
* chore: add type annotations to test_vector_store_rag_add function
* feat: Fix assertion in test_create_flows to check for substring in name field
The assertion in the test_create_flows function was modified to check if the name field contains the substring "Flow 1" instead of an exact match. This change allows for more flexibility in the test and ensures that the test passes even if there are additional characters in the name field.
---
src/backend/base/langflow/api/v1/chat.py | 1 +
src/backend/base/langflow/graph/graph/base.py | 3 ++-
src/backend/tests/unit/graph/graph/test_base.py | 2 +-
.../initial_setup/starter_projects/test_vector_store_rag.py | 2 +-
src/backend/tests/unit/test_database.py | 4 ++--
5 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py
index a58b667fb02f..143937d1d146 100644
--- a/src/backend/base/langflow/api/v1/chat.py
+++ b/src/backend/base/langflow/api/v1/chat.py
@@ -370,6 +370,7 @@ async def event_generator(queue: asyncio.Queue, client_consumed_queue: asyncio.Q
try:
await asyncio.gather(*tasks)
except asyncio.CancelledError:
+ background_tasks.add_task(graph.end_all_traces)
for task in tasks:
task.cancel()
return
diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py
index 42de21ac918e..d1dbe1c71bd4 100644
--- a/src/backend/base/langflow/graph/graph/base.py
+++ b/src/backend/base/langflow/graph/graph/base.py
@@ -7,6 +7,7 @@
from functools import partial
from itertools import chain
from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Tuple, Type, Union
+import warnings
import nest_asyncio
from loguru import logger
@@ -1425,7 +1426,7 @@ def _build_edges(self) -> List[ContractEdge]:
new_edge = self.build_edge(edge)
edges.add(new_edge)
if self.vertices and not edges:
- raise ValueError("Graph has vertices but no edges")
+ warnings.warn("Graph has vertices but no edges")
return list(edges)
def build_edge(self, edge: EdgeData) -> ContractEdge:
diff --git a/src/backend/tests/unit/graph/graph/test_base.py b/src/backend/tests/unit/graph/graph/test_base.py
index 45be6c609ac7..59908d9dca43 100644
--- a/src/backend/tests/unit/graph/graph/test_base.py
+++ b/src/backend/tests/unit/graph/graph/test_base.py
@@ -32,7 +32,7 @@ async def test_graph():
graph = Graph()
graph.add_component("chat_input", chat_input)
graph.add_component("chat_output", chat_output)
- with pytest.raises(ValueError, match="Graph has vertices but no edges"):
+ with pytest.warns(UserWarning, match="Graph has vertices but no edges"):
graph.prepare()
diff --git a/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py b/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py
index 587c65779a2d..e216c0c567cc 100644
--- a/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py
+++ b/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py
@@ -212,7 +212,7 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph):
assert (source, target) in expected_rag_edges, f"Edge {source} -> {target} not found"
-def test_vector_store_rag_add(ingestion_graph, rag_graph):
+def test_vector_store_rag_add(ingestion_graph: Graph, rag_graph: Graph):
ingestion_graph_copy = copy.deepcopy(ingestion_graph)
rag_graph_copy = copy.deepcopy(rag_graph)
ingestion_graph_copy += rag_graph_copy
diff --git a/src/backend/tests/unit/test_database.py b/src/backend/tests/unit/test_database.py
index aebdef327129..44e347f1e118 100644
--- a/src/backend/tests/unit/test_database.py
+++ b/src/backend/tests/unit/test_database.py
@@ -212,7 +212,7 @@ def test_create_flows(client: TestClient, session: Session, json_flow: str, logg
# Check response data
response_data = response.json()
assert len(response_data) == 2
- assert response_data[0]["name"] == "Flow 1"
+ assert "Flow 1" in response_data[0]["name"]
assert response_data[0]["description"] == "description"
assert response_data[0]["data"] == data
assert response_data[1]["name"] == "Flow 2"
@@ -241,7 +241,7 @@ def test_upload_file(client: TestClient, session: Session, json_flow: str, logge
# Check response data
response_data = response.json()
assert len(response_data) == 2
- assert response_data[0]["name"] == "Flow 1"
+ assert "Flow 1" in response_data[0]["name"]
assert response_data[0]["description"] == "description"
assert response_data[0]["data"] == data
assert response_data[1]["name"] == "Flow 2"
From d606a4dac3b1b250b2e4333c758f8f7edbeaf892 Mon Sep 17 00:00:00 2001
From: vinicius Mello <45274355+vmellos@users.noreply.github.com>
Date: Thu, 8 Aug 2024 16:41:42 -0400
Subject: [PATCH 07/38] feat: add huggingface endpoint retry (#3236)
* feat: added retry when calling huggingface endpoint
* chore: added default value to retry input
* [autofix.ci] apply automated fixes
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
---
.../components/models/HuggingFaceModel.py | 27 ++++++++++++++-----
1 file changed, 20 insertions(+), 7 deletions(-)
diff --git a/src/backend/base/langflow/components/models/HuggingFaceModel.py b/src/backend/base/langflow/components/models/HuggingFaceModel.py
index 313d440019df..069d63d18301 100644
--- a/src/backend/base/langflow/components/models/HuggingFaceModel.py
+++ b/src/backend/base/langflow/components/models/HuggingFaceModel.py
@@ -1,9 +1,11 @@
+from tenacity import retry, stop_after_attempt, wait_fixed
+
from langchain_community.chat_models.huggingface import ChatHuggingFace
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
-from langflow.io import DictInput, DropdownInput, SecretStrInput, StrInput
+from langflow.io import DictInput, DropdownInput, SecretStrInput, StrInput, IntInput
class HuggingFaceEndpointsComponent(LCModelComponent):
@@ -26,8 +28,24 @@ class HuggingFaceEndpointsComponent(LCModelComponent):
),
SecretStrInput(name="huggingfacehub_api_token", display_name="API token", password=True),
DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True),
+ IntInput(name="retry_attempts", display_name="Retry Attempts", value=1),
]
+ def create_huggingface_endpoint(self, endpoint_url, task, huggingfacehub_api_token, model_kwargs):
+ @retry(stop=stop_after_attempt(self.retry_attempts), wait=wait_fixed(2))
+ def _attempt_create():
+ try:
+ return HuggingFaceEndpoint( # type: ignore
+ endpoint_url=endpoint_url,
+ task=task,
+ huggingfacehub_api_token=huggingfacehub_api_token,
+ model_kwargs=model_kwargs,
+ )
+ except Exception as e:
+ raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
+
+ return _attempt_create()
+
def build_model(self) -> LanguageModel: # type: ignore[type-var]
endpoint_url = self.endpoint_url
task = self.task
@@ -35,12 +53,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
model_kwargs = self.model_kwargs or {}
try:
- llm = HuggingFaceEndpoint( # type: ignore
- endpoint_url=endpoint_url,
- task=task,
- huggingfacehub_api_token=huggingfacehub_api_token,
- model_kwargs=model_kwargs,
- )
+ llm = self.create_huggingface_endpoint(endpoint_url, task, huggingfacehub_api_token, model_kwargs)
except Exception as e:
raise ValueError("Could not connect to HuggingFace Endpoints API.") from e
From edb3c9e4d2a2acda5cd82ce719ec984b4f0a601b Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Thu, 8 Aug 2024 19:23:02 -0300
Subject: [PATCH 08/38] fix: change sync_get_file_content_dicts to use
get_file_content_dicts (#3250)
feat: refactor sync_get_file_content_dicts to use get_file_content_dicts
Refactor the `sync_get_file_content_dicts` method in the `Message` class to use the `get_file_content_dicts` method instead. This change improves the code by using a more descriptive and accurate method name.
---
src/backend/base/langflow/schema/message.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/backend/base/langflow/schema/message.py b/src/backend/base/langflow/schema/message.py
index bd1c3d848f39..53d7f014138b 100644
--- a/src/backend/base/langflow/schema/message.py
+++ b/src/backend/base/langflow/schema/message.py
@@ -164,7 +164,7 @@ def serialize_text(self, value):
return value
def sync_get_file_content_dicts(self):
- coro = self.aget_file_content_dicts()
+ coro = self.get_file_content_dicts()
loop = asyncio.get_event_loop()
return loop.run_until_complete(coro)
From 56ecb18ef5feea651e85894c9803c090441f9131 Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Fri, 9 Aug 2024 09:42:21 -0300
Subject: [PATCH 09/38] feat: Improve caching logic and add disk caching option
(#3246)
* feat: add diskcache package version 5.6.3 to poetry.lock and pyproject.toml for improved caching functionality
* refactor: simplify CacheMiss import in cache service files for better clarity and maintainability
* feat: Add AsyncDiskCache class for disk-based caching
* feat: Add disk caching option in CacheServiceFactory with AsyncDiskCache
* feat: Restrict cache_type to specific literals: async, redis, memory, disk for enhanced type safety and clarity
* feat: Change get_requester_result to async await for proper async handling in Vertex class
* fix: Update outputs dictionary in ResultData class to use key-value pairs for better readability and maintainability
* fix: Improve caching logic in Graph class by ensuring vertex builds properly handle exceptions and cache updates more reliably
* feat: Add teardown method to AsyncDiskCache for clearing cache directory during cleanup process
* fix: Correct variable name in Graph class to ensure proper handling of vertex results in caching logic
* feat: Clear AsyncDiskCache on initialization to align behavior with in-memory cache until frontend handling is implemented
---
src/backend/base/langflow/graph/graph/base.py | 58 ++++++-----
src/backend/base/langflow/graph/schema.py | 2 +-
.../base/langflow/graph/vertex/base.py | 2 +-
.../base/langflow/services/cache/disk.py | 96 +++++++++++++++++++
.../base/langflow/services/cache/factory.py | 6 ++
.../base/langflow/services/cache/service.py | 4 +-
.../base/langflow/services/cache/utils.py | 3 +
.../base/langflow/services/settings/base.py | 4 +-
src/backend/base/poetry.lock | 13 ++-
src/backend/base/pyproject.toml | 1 +
10 files changed, 157 insertions(+), 32 deletions(-)
create mode 100644 src/backend/base/langflow/services/cache/disk.py
diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py
index d1dbe1c71bd4..918ce23de023 100644
--- a/src/backend/base/langflow/graph/graph/base.py
+++ b/src/backend/base/langflow/graph/graph/base.py
@@ -1126,41 +1126,51 @@ async def build_vertex(
self.run_manager.add_to_vertices_being_run(vertex_id)
try:
params = ""
- if vertex.frozen:
+ should_build = False
+ if not vertex.frozen:
+ should_build = True
+ else:
# Check the cache for the vertex
if get_cache is not None:
cached_result = await get_cache(key=vertex.id)
else:
cached_result = None
if isinstance(cached_result, CacheMiss):
- await vertex.build(
- user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars, files=files
- )
- if set_cache is not None:
- await set_cache(key=vertex.id, data=vertex)
- if cached_result and not isinstance(cached_result, CacheMiss):
- cached_vertex = cached_result["result"]
- # Now set update the vertex with the cached vertex
- vertex._built = cached_vertex._built
- vertex.result = cached_vertex.result
- vertex.results = cached_vertex.results
- vertex.artifacts = cached_vertex.artifacts
- vertex._built_object = cached_vertex._built_object
- vertex._custom_component = cached_vertex._custom_component
- if vertex.result is not None:
- vertex.result.used_frozen_result = True
+ should_build = True
else:
- await vertex.build(
- user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars, files=files
- )
- if set_cache is not None:
- await set_cache(key=vertex.id, data=vertex)
- else:
+ try:
+ cached_vertex_dict = cached_result["result"]
+ # Now set update the vertex with the cached vertex
+ vertex._built = cached_vertex_dict["_built"]
+ vertex.artifacts = cached_vertex_dict["artifacts"]
+ vertex._built_object = cached_vertex_dict["_built_object"]
+ vertex._built_result = cached_vertex_dict["_built_result"]
+ vertex._data = cached_vertex_dict["_data"]
+ vertex.results = cached_vertex_dict["results"]
+ try:
+ vertex._finalize_build()
+ if vertex.result is not None:
+ vertex.result.used_frozen_result = True
+ except Exception:
+ should_build = True
+ except KeyError:
+ should_build = True
+
+ if should_build:
await vertex.build(
user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars, files=files
)
if set_cache is not None:
- await set_cache(key=vertex.id, data=vertex)
+ vertex_dict = {
+ "_built": vertex._built,
+ "results": vertex.results,
+ "artifacts": vertex.artifacts,
+ "_built_object": vertex._built_object,
+ "_built_result": vertex._built_result,
+ "_data": vertex._data,
+ }
+
+ await set_cache(key=vertex.id, data=vertex_dict)
if vertex.result is not None:
params = f"{vertex._built_object_repr()}{params}"
diff --git a/src/backend/base/langflow/graph/schema.py b/src/backend/base/langflow/graph/schema.py
index eab0040c6d3b..fdabcdaaa64a 100644
--- a/src/backend/base/langflow/graph/schema.py
+++ b/src/backend/base/langflow/graph/schema.py
@@ -43,7 +43,7 @@ def validate_model(cls, values):
stream_url = StreamURL(location=message["stream_url"])
values["outputs"].update({key: OutputValue(message=stream_url, type=message["type"])})
elif "type" in message:
- values["outputs"].update({OutputValue(message=message, type=message["type"])})
+ values["outputs"].update({key: OutputValue(message=message, type=message["type"])})
return values
diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py
index 8284736acf3d..0b7941734b48 100644
--- a/src/backend/base/langflow/graph/vertex/base.py
+++ b/src/backend/base/langflow/graph/vertex/base.py
@@ -753,7 +753,7 @@ async def build(
return
if self.frozen and self._built:
- return self.get_requester_result(requester)
+ return await self.get_requester_result(requester)
elif self._built and requester is not None:
# This means that the vertex has already been built
# and we are just getting the result for the requester
diff --git a/src/backend/base/langflow/services/cache/disk.py b/src/backend/base/langflow/services/cache/disk.py
new file mode 100644
index 000000000000..dbbd85f1335c
--- /dev/null
+++ b/src/backend/base/langflow/services/cache/disk.py
@@ -0,0 +1,96 @@
+import asyncio
+import pickle
+import time
+from typing import Generic, Optional
+
+from diskcache import Cache
+from loguru import logger
+
+from langflow.services.cache.base import AsyncBaseCacheService, AsyncLockType
+from langflow.services.cache.utils import CACHE_MISS
+
+
+class AsyncDiskCache(AsyncBaseCacheService, Generic[AsyncLockType]): # type: ignore
+ def __init__(self, cache_dir, max_size=None, expiration_time=3600):
+ self.cache = Cache(cache_dir)
+ # Let's clear the cache for now to maintain a similar
+ # behavior as the in-memory cache
+ # Later we should implement endpoints for the frontend to grab
+ # output logs from the cache
+ if len(self.cache) > 0:
+ self.cache.clear()
+ self.lock = asyncio.Lock()
+ self.max_size = max_size
+ self.expiration_time = expiration_time
+
+ async def get(self, key, lock: Optional[asyncio.Lock] = None):
+ if not lock:
+ async with self.lock:
+ return await self._get(key)
+ else:
+ return await self._get(key)
+
+ async def _get(self, key):
+ item = await asyncio.to_thread(self.cache.get, key, default=None)
+ if item:
+ if time.time() - item["time"] < self.expiration_time:
+ await asyncio.to_thread(self.cache.touch, key) # Refresh the expiry time
+ return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"]
+ else:
+ logger.info(f"Cache item for key '{key}' has expired and will be deleted.")
+ await self._delete(key) # Log before deleting the expired item
+ return CACHE_MISS
+
+ async def set(self, key, value, lock: Optional[asyncio.Lock] = None):
+ if not lock:
+ async with self.lock:
+ await self._set(key, value)
+ else:
+ await self._set(key, value)
+
+ async def _set(self, key, value):
+ if self.max_size and len(self.cache) >= self.max_size:
+ await asyncio.to_thread(self.cache.cull)
+ item = {"value": pickle.dumps(value) if not isinstance(value, (str, bytes)) else value, "time": time.time()}
+ await asyncio.to_thread(self.cache.set, key, item)
+
+ async def delete(self, key, lock: Optional[asyncio.Lock] = None):
+ if not lock:
+ async with self.lock:
+ await self._delete(key)
+ else:
+ await self._delete(key)
+
+ async def _delete(self, key):
+ await asyncio.to_thread(self.cache.delete, key)
+
+ async def clear(self, lock: Optional[asyncio.Lock] = None):
+ if not lock:
+ async with self.lock:
+ await self._clear()
+ else:
+ await self._clear()
+
+ async def _clear(self):
+ await asyncio.to_thread(self.cache.clear)
+
+ async def upsert(self, key, value, lock: Optional[asyncio.Lock] = None):
+ if not lock:
+ async with self.lock:
+ await self._upsert(key, value)
+ else:
+ await self._upsert(key, value)
+
+ async def _upsert(self, key, value):
+ existing_value = await self.get(key)
+ if existing_value is not CACHE_MISS and isinstance(existing_value, dict) and isinstance(value, dict):
+ existing_value.update(value)
+ value = existing_value
+ await self.set(key, value)
+
+ def __contains__(self, key):
+ return asyncio.run(asyncio.to_thread(self.cache.__contains__, key))
+
+ async def teardown(self):
+ # Clean up the cache directory
+ self.cache.clear(retry=True)
diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py
index 5cc6b12afe0e..74364dbfc0e5 100644
--- a/src/backend/base/langflow/services/cache/factory.py
+++ b/src/backend/base/langflow/services/cache/factory.py
@@ -1,5 +1,6 @@
from typing import TYPE_CHECKING
+from langflow.services.cache.disk import AsyncDiskCache
from langflow.services.cache.service import AsyncInMemoryCache, CacheService, RedisCache, ThreadingInMemoryCache
from langflow.services.factory import ServiceFactory
from langflow.utils.logger import logger
@@ -36,3 +37,8 @@ def create(self, settings_service: "SettingsService"):
return ThreadingInMemoryCache(expiration_time=settings_service.settings.cache_expire)
elif settings_service.settings.cache_type == "async":
return AsyncInMemoryCache(expiration_time=settings_service.settings.cache_expire)
+ elif settings_service.settings.cache_type == "disk":
+ return AsyncDiskCache(
+ cache_dir=settings_service.settings.config_dir,
+ expiration_time=settings_service.settings.cache_expire,
+ )
diff --git a/src/backend/base/langflow/services/cache/service.py b/src/backend/base/langflow/services/cache/service.py
index 3d4131c239c1..021c33f90281 100644
--- a/src/backend/base/langflow/services/cache/service.py
+++ b/src/backend/base/langflow/services/cache/service.py
@@ -8,9 +8,7 @@
from loguru import logger
from langflow.services.cache.base import AsyncBaseCacheService, AsyncLockType, CacheService, LockType
-from langflow.services.cache.utils import CacheMiss
-
-CACHE_MISS = CacheMiss()
+from langflow.services.cache.utils import CACHE_MISS
class ThreadingInMemoryCache(CacheService, Generic[LockType]): # type: ignore
diff --git a/src/backend/base/langflow/services/cache/utils.py b/src/backend/base/langflow/services/cache/utils.py
index a89963f5681c..c2f3c961124c 100644
--- a/src/backend/base/langflow/services/cache/utils.py
+++ b/src/backend/base/langflow/services/cache/utils.py
@@ -166,3 +166,6 @@ def update_build_status(cache_service, flow_id: str, status: "BuildStatus"):
cache_service[flow_id] = cached_flow
cached_flow["status"] = status
cache_service[flow_id] = cached_flow
+
+
+CACHE_MISS = CacheMiss()
diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py
index 658edd57e1c7..88c2a64b49fe 100644
--- a/src/backend/base/langflow/services/settings/base.py
+++ b/src/backend/base/langflow/services/settings/base.py
@@ -3,7 +3,7 @@
import os
from pathlib import Path
from shutil import copy2
-from typing import Any, List, Optional, Tuple, Type
+from typing import Any, List, Literal, Optional, Tuple, Type
import orjson
import yaml
@@ -79,7 +79,7 @@ class Settings(BaseSettings):
"""SQLite pragmas to use when connecting to the database."""
# cache configuration
- cache_type: str = "async"
+ cache_type: Literal["async", "redis", "memory", "disk"] = "async"
"""The cache type can be 'async' or 'redis'."""
cache_expire: int = 3600
"""The cache expire in seconds."""
diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock
index e52b0c5b7754..8deb46e0bfab 100644
--- a/src/backend/base/poetry.lock
+++ b/src/backend/base/poetry.lock
@@ -1208,6 +1208,17 @@ files = [
graph = ["objgraph (>=1.7.2)"]
profile = ["gprof2dot (>=2022.7.29)"]
+[[package]]
+name = "diskcache"
+version = "5.6.3"
+description = "Disk Cache -- Disk and file backed persistent cache."
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"},
+ {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"},
+]
+
[[package]]
name = "distlib"
version = "0.3.8"
@@ -7654,4 +7665,4 @@ local = []
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "747dad35b9e5b1338a989ea6bfd4ac3465ba34f792639aeabda3c1ca9b40c689"
+content-hash = "fe6710d7325bc2cceeaa298d94d6f1157cfe1533c2acbabe3ecdca5594d9e007"
diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml
index d98c673e0767..a8de78d9e01f 100644
--- a/src/backend/base/pyproject.toml
+++ b/src/backend/base/pyproject.toml
@@ -79,6 +79,7 @@ filelock = "^3.15.4"
grandalf = "^0.8.0"
crewai = "^0.36.0"
spider-client = "^0.0.27"
+diskcache = "^5.6.3"
[tool.poetry.extras]
From 645c723d21aec1011bfe8d48c3a76537940ae385 Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Fri, 9 Aug 2024 10:59:49 -0300
Subject: [PATCH 10/38] feat: start using dev mode flag and add exception
filter in logger (#3260)
* refactor: reorganize logger module and add setup.py for logging configuration
* refactor: update logger import path to align with recent module restructuring
* refactor: add logging configuration parameter to Graph initialization for improved logging setup flexibility
* feat: create logging init module for improved logger configuration and management
* refactor: update Settings class to include development mode flag and associated validator for enhanced configuration management
* refactor: enhance logger.py with DEV mode handling and TypedDict for log configuration settings
* feat: add settings module with DEV mode flag and helper functions for setting development state
* refactor: update flow assertions in tests to check for Data object type instead of Flow object type
* feat: add deepcopy method to Graph class to copy start and end components, ensuring proper graph cloning functionality
* feat: implement deepcopy method in Component class for proper cloning of inputs and configuration attributes
* feat: enhance attribute access in Component class to support backwards-compatible private attributes retrieval
* feat: improve test assertion for list_flows in custom component to display types of returned objects for better debugging
* feat: refactor imports in constants.py and remove redundant Data class definition for cleaner code structure
* feat: refactor imports in logger.py to include NotRequired from typing_extensions for better type hinting support
---
src/backend/base/langflow/__main__.py | 2 +-
src/backend/base/langflow/api/log_router.py | 2 +-
.../custom/custom_component/component.py | 24 ++++++++++
.../base/langflow/field_typing/constants.py | 7 +--
src/backend/base/langflow/graph/graph/base.py | 44 ++++++++++++++++++-
src/backend/base/langflow/load/load.py | 2 +-
src/backend/base/langflow/logging/__init__.py | 4 ++
.../langflow/{utils => logging}/logger.py | 16 ++++++-
src/backend/base/langflow/logging/setup.py | 16 +++++++
src/backend/base/langflow/main.py | 2 +-
src/backend/base/langflow/server.py | 2 +-
.../base/langflow/services/cache/factory.py | 2 +-
.../base/langflow/services/settings/base.py | 9 ++++
src/backend/base/langflow/settings.py | 10 +++++
src/backend/base/langflow/utils/util.py | 2 +-
.../unit/test_custom_component_with_client.py | 6 ++-
src/backend/tests/unit/test_logger.py | 2 +-
17 files changed, 133 insertions(+), 19 deletions(-)
create mode 100644 src/backend/base/langflow/logging/__init__.py
rename src/backend/base/langflow/{utils => logging}/logger.py (95%)
create mode 100644 src/backend/base/langflow/logging/setup.py
create mode 100644 src/backend/base/langflow/settings.py
diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py
index 25e1ca4bfd23..c342bf218922 100644
--- a/src/backend/base/langflow/__main__.py
+++ b/src/backend/base/langflow/__main__.py
@@ -26,7 +26,7 @@
from langflow.services.deps import get_db_service, get_settings_service, session_scope
from langflow.services.settings.constants import DEFAULT_SUPERUSER
from langflow.services.utils import initialize_services
-from langflow.utils.logger import configure, logger
+from langflow.logging.logger import configure, logger
from langflow.utils.util import update_settings
console = Console()
diff --git a/src/backend/base/langflow/api/log_router.py b/src/backend/base/langflow/api/log_router.py
index 45f7b3e510c6..4e4aa9a7dbad 100644
--- a/src/backend/base/langflow/api/log_router.py
+++ b/src/backend/base/langflow/api/log_router.py
@@ -5,7 +5,7 @@
from fastapi import APIRouter, Query, HTTPException, Request
from fastapi.responses import JSONResponse, StreamingResponse
from http import HTTPStatus
-from langflow.utils.logger import log_buffer
+from langflow.logging.logger import log_buffer
log_router = APIRouter(tags=["Log"])
diff --git a/src/backend/base/langflow/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py
index 5a222e8a9610..997736bf8f53 100644
--- a/src/backend/base/langflow/custom/custom_component/component.py
+++ b/src/backend/base/langflow/custom/custom_component/component.py
@@ -1,4 +1,5 @@
import inspect
+from copy import deepcopy
from typing import TYPE_CHECKING, Any, Callable, ClassVar, List, Optional, Union, get_type_hints
from uuid import UUID
@@ -34,6 +35,7 @@ class Component(CustomComponent):
def __init__(self, **kwargs):
# if key starts with _ it is a config
# else it is an input
+
inputs = {}
config = {}
for key, value in kwargs.items():
@@ -53,6 +55,8 @@ def __init__(self, **kwargs):
config = config or {}
if "_id" not in config:
config |= {"_id": f"{self.__class__.__name__}-{nanoid.generate(size=5)}"}
+ self.__inputs = inputs
+ self.__config = config
super().__init__(**config)
if hasattr(self, "_trace_type"):
self.trace_type = self._trace_type
@@ -66,6 +70,24 @@ def __init__(self, **kwargs):
self._set_output_types()
self.set_class_code()
+ def __deepcopy__(self, memo):
+ if id(self) in memo:
+ return memo[id(self)]
+ kwargs = deepcopy(self.__config)
+ kwargs["inputs"] = deepcopy(self.__inputs)
+ new_component = type(self)(**kwargs)
+ new_component._code = self._code
+ new_component._outputs = self._outputs
+ new_component._inputs = self._inputs
+ new_component._edges = self._edges
+ new_component._components = self._components
+ new_component._parameters = self._parameters
+ new_component._attributes = self._attributes
+ new_component._output_logs = self._output_logs
+ new_component._logs = self._logs
+ memo[id(self)] = new_component
+ return new_component
+
def set_class_code(self):
# Get the source code of the calling class
if self._code:
@@ -331,6 +353,8 @@ def __getattr__(self, name: str) -> Any:
return self.__dict__["_inputs"][name].value
if name in BACKWARDS_COMPATIBLE_ATTRIBUTES:
return self.__dict__[f"_{name}"]
+ if name.startswith("_") and name[1:] in BACKWARDS_COMPATIBLE_ATTRIBUTES:
+ return self.__dict__[name]
raise AttributeError(f"{name} not found in {self.__class__.__name__}")
def _set_input_value(self, name: str, value: Any):
diff --git a/src/backend/base/langflow/field_typing/constants.py b/src/backend/base/langflow/field_typing/constants.py
index a5857ee12d41..dfa8309e71cd 100644
--- a/src/backend/base/langflow/field_typing/constants.py
+++ b/src/backend/base/langflow/field_typing/constants.py
@@ -13,10 +13,11 @@
from langchain_core.output_parsers import BaseOutputParser
from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate
from langchain_core.retrievers import BaseRetriever
-from langchain_core.tools import Tool, BaseTool
+from langchain_core.tools import BaseTool, Tool
from langchain_core.vectorstores import VectorStore, VectorStoreRetriever
from langchain_text_splitters import TextSplitter
+from langflow.schema.data import Data
from langflow.schema.message import Message
NestedDict: TypeAlias = Dict[str, Union[str, Dict]]
@@ -33,10 +34,6 @@ class Object:
pass
-class Data:
- pass
-
-
class Code:
pass
diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py
index 918ce23de023..a47b11b998f5 100644
--- a/src/backend/base/langflow/graph/graph/base.py
+++ b/src/backend/base/langflow/graph/graph/base.py
@@ -2,12 +2,12 @@
import copy
import json
import uuid
+import warnings
from collections import defaultdict, deque
from datetime import datetime, timezone
from functools import partial
from itertools import chain
from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Tuple, Type, Union
-import warnings
import nest_asyncio
from loguru import logger
@@ -24,6 +24,7 @@
from langflow.graph.vertex.base import Vertex, VertexStates
from langflow.graph.vertex.schema import NodeData
from langflow.graph.vertex.types import ComponentVertex, InterfaceVertex, StateVertex
+from langflow.logging.logger import LogConfig, configure
from langflow.schema import Data
from langflow.schema.schema import INPUT_FIELD_NAME, InputType
from langflow.services.cache.utils import CacheMiss
@@ -47,6 +48,7 @@ def __init__(
flow_id: Optional[str] = None,
flow_name: Optional[str] = None,
user_id: Optional[str] = None,
+ log_config: Optional[LogConfig] = None,
) -> None:
"""
Initializes a new instance of the Graph class.
@@ -56,6 +58,11 @@ def __init__(
edges (List[Dict[str, str]]): A list of dictionaries representing the edges of the graph.
flow_id (Optional[str], optional): The ID of the flow. Defaults to None.
"""
+ if not log_config:
+ log_config = {"disable": False}
+ configure(**log_config)
+ self._start = start
+ self._end = end
self._prepared = False
self._runs = 0
self._updates = 0
@@ -803,7 +810,6 @@ def __getstate__(self):
"vertices_layers": self.vertices_layers,
"vertices_to_run": self.vertices_to_run,
"stop_vertex": self.stop_vertex,
- "vertex_map": self.vertex_map,
"_run_queue": self._run_queue,
"_first_layer": self._first_layer,
"_vertices": self._vertices,
@@ -814,6 +820,39 @@ def __getstate__(self):
"_sorted_vertices_layers": self._sorted_vertices_layers,
}
+ def __deepcopy__(self, memo):
+ # Check if we've already copied this instance
+ if id(self) in memo:
+ return memo[id(self)]
+
+ if self._start is not None and self._end is not None:
+ # Deep copy start and end components
+ start_copy = copy.deepcopy(self._start, memo)
+ end_copy = copy.deepcopy(self._end, memo)
+ new_graph = type(self)(
+ start_copy,
+ end_copy,
+ copy.deepcopy(self.flow_id, memo),
+ copy.deepcopy(self.flow_name, memo),
+ copy.deepcopy(self.user_id, memo),
+ )
+ else:
+ # Create a new graph without start and end, but copy flow_id, flow_name, and user_id
+ new_graph = type(self)(
+ None,
+ None,
+ copy.deepcopy(self.flow_id, memo),
+ copy.deepcopy(self.flow_name, memo),
+ copy.deepcopy(self.user_id, memo),
+ )
+ # Deep copy vertices and edges
+ new_graph.add_nodes_and_edges(copy.deepcopy(self._vertices, memo), copy.deepcopy(self._edges, memo))
+
+ # Store the newly created object in memo
+ memo[id(self)] = new_graph
+
+ return new_graph
+
def __setstate__(self, state):
run_manager = state["run_manager"]
if isinstance(run_manager, RunnableVerticesManager):
@@ -821,6 +860,7 @@ def __setstate__(self, state):
else:
state["run_manager"] = RunnableVerticesManager.from_dict(run_manager)
self.__dict__.update(state)
+ self.vertex_map = {vertex.id: vertex for vertex in self.vertices}
self.state_manager = GraphStateManager()
self.tracing_service = get_tracing_service()
self.set_run_id(self._run_id)
diff --git a/src/backend/base/langflow/load/load.py b/src/backend/base/langflow/load/load.py
index b56f22c81b8e..972c2c7e7d5f 100644
--- a/src/backend/base/langflow/load/load.py
+++ b/src/backend/base/langflow/load/load.py
@@ -8,7 +8,7 @@
from langflow.graph import Graph
from langflow.graph.schema import RunOutputs
from langflow.processing.process import process_tweaks, run_graph
-from langflow.utils.logger import configure
+from langflow.logging.logger import configure
from langflow.utils.util import update_settings
diff --git a/src/backend/base/langflow/logging/__init__.py b/src/backend/base/langflow/logging/__init__.py
new file mode 100644
index 000000000000..b7d7bb5a1cf5
--- /dev/null
+++ b/src/backend/base/langflow/logging/__init__.py
@@ -0,0 +1,4 @@
+from .logger import configure, logger
+from .setup import disable_logging, enable_logging
+
+__all__ = ["configure", "logger", "disable_logging", "enable_logging"]
diff --git a/src/backend/base/langflow/utils/logger.py b/src/backend/base/langflow/logging/logger.py
similarity index 95%
rename from src/backend/base/langflow/utils/logger.py
rename to src/backend/base/langflow/logging/logger.py
index 55c33ce63c5c..01b146b68afe 100644
--- a/src/backend/base/langflow/utils/logger.py
+++ b/src/backend/base/langflow/logging/logger.py
@@ -2,15 +2,18 @@
import logging
import os
import sys
-from pathlib import Path
from collections import deque
+from pathlib import Path
from threading import Lock, Semaphore
-from typing import Optional
+from typing import Optional, TypedDict
import orjson
from loguru import logger
from platformdirs import user_cache_dir
from rich.logging import RichHandler
+from typing_extensions import NotRequired
+
+from langflow.settings import DEV
VALID_LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
@@ -129,6 +132,15 @@ def serialize_log(record):
def patching(record):
record["extra"]["serialized"] = serialize_log(record)
+ if DEV is False:
+ record.pop("exception", None)
+
+
+class LogConfig(TypedDict):
+ log_level: NotRequired[str]
+ log_file: NotRequired[Path]
+ disable: NotRequired[bool]
+ log_env: NotRequired[str]
def configure(
diff --git a/src/backend/base/langflow/logging/setup.py b/src/backend/base/langflow/logging/setup.py
new file mode 100644
index 000000000000..fdf1e22b6945
--- /dev/null
+++ b/src/backend/base/langflow/logging/setup.py
@@ -0,0 +1,16 @@
+from loguru import logger
+
+LOGGING_CONFIGURED = False
+
+
+def disable_logging():
+ global LOGGING_CONFIGURED
+ if not LOGGING_CONFIGURED:
+ logger.disable("langflow")
+ LOGGING_CONFIGURED = True
+
+
+def enable_logging():
+ global LOGGING_CONFIGURED
+ logger.enable("langflow")
+ LOGGING_CONFIGURED = True
diff --git a/src/backend/base/langflow/main.py b/src/backend/base/langflow/main.py
index e4a2611f8eec..b79f092bcf76 100644
--- a/src/backend/base/langflow/main.py
+++ b/src/backend/base/langflow/main.py
@@ -31,7 +31,7 @@
from langflow.services.deps import get_cache_service, get_settings_service, get_telemetry_service
from langflow.services.plugins.langfuse_plugin import LangfuseInstance
from langflow.services.utils import initialize_services, teardown_services
-from langflow.utils.logger import configure
+from langflow.logging.logger import configure
# Ignore Pydantic deprecation warnings from Langchain
warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20)
diff --git a/src/backend/base/langflow/server.py b/src/backend/base/langflow/server.py
index 67061fbdd7da..0c3a21a2e25f 100644
--- a/src/backend/base/langflow/server.py
+++ b/src/backend/base/langflow/server.py
@@ -6,7 +6,7 @@
from gunicorn.app.base import BaseApplication # type: ignore
from uvicorn.workers import UvicornWorker
-from langflow.utils.logger import InterceptHandler # type: ignore
+from langflow.logging.logger import InterceptHandler # type: ignore
class LangflowUvicornWorker(UvicornWorker):
diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py
index 74364dbfc0e5..32bb94f872ff 100644
--- a/src/backend/base/langflow/services/cache/factory.py
+++ b/src/backend/base/langflow/services/cache/factory.py
@@ -3,7 +3,7 @@
from langflow.services.cache.disk import AsyncDiskCache
from langflow.services.cache.service import AsyncInMemoryCache, CacheService, RedisCache, ThreadingInMemoryCache
from langflow.services.factory import ServiceFactory
-from langflow.utils.logger import logger
+from langflow.logging.logger import logger
if TYPE_CHECKING:
from langflow.services.settings.service import SettingsService
diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py
index 88c2a64b49fe..592279af0766 100644
--- a/src/backend/base/langflow/services/settings/base.py
+++ b/src/backend/base/langflow/services/settings/base.py
@@ -66,6 +66,7 @@ class Settings(BaseSettings):
"""Define if langflow database should be saved in LANGFLOW_CONFIG_DIR or in the langflow directory (i.e. in the package directory)."""
dev: bool = False
+ """If True, Langflow will run in development mode."""
database_url: Optional[str] = None
"""Database URL for Langflow. If not provided, Langflow will use a SQLite database."""
pool_size: int = 10
@@ -151,6 +152,14 @@ class Settings(BaseSettings):
vertex_builds_storage_enabled: bool = True
"""If set to True, Langflow will keep track of each vertex builds (outputs) in the UI for any flow."""
+ @field_validator("dev")
+ @classmethod
+ def set_dev(cls, value):
+ from langflow.settings import set_dev
+
+ set_dev(value)
+ return value
+
@field_validator("user_agent", mode="after")
@classmethod
def set_user_agent(cls, value):
diff --git a/src/backend/base/langflow/settings.py b/src/backend/base/langflow/settings.py
new file mode 100644
index 000000000000..9a1d985c359b
--- /dev/null
+++ b/src/backend/base/langflow/settings.py
@@ -0,0 +1,10 @@
+DEV = False
+
+
+def _set_dev(value):
+ global DEV
+ DEV = value
+
+
+def set_dev(value):
+ _set_dev(value)
diff --git a/src/backend/base/langflow/utils/util.py b/src/backend/base/langflow/utils/util.py
index 1c91869041f3..cb3eec752d03 100644
--- a/src/backend/base/langflow/utils/util.py
+++ b/src/backend/base/langflow/utils/util.py
@@ -12,7 +12,7 @@
from langflow.services.deps import get_settings_service
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
from langflow.utils import constants
-from langflow.utils.logger import logger
+from langflow.logging.logger import logger
def unescape_string(s: str):
diff --git a/src/backend/tests/unit/test_custom_component_with_client.py b/src/backend/tests/unit/test_custom_component_with_client.py
index 3be2ce657f20..ab8e7859e80f 100644
--- a/src/backend/tests/unit/test_custom_component_with_client.py
+++ b/src/backend/tests/unit/test_custom_component_with_client.py
@@ -1,7 +1,7 @@
import pytest
from langflow.custom.custom_component.custom_component import CustomComponent
-from langflow.services.database.models.flow import Flow
+from langflow.field_typing.constants import Data
@pytest.fixture
@@ -20,4 +20,6 @@ def component(client, active_user):
def test_list_flows_flow_objects(component):
flows = component.list_flows()
- assert all(isinstance(flow, Flow) for flow in flows)
+ are_flows = [isinstance(flow, Data) for flow in flows]
+ flow_types = [type(flow) for flow in flows]
+ assert all(are_flows), f"Expected all flows to be Data objects, got {flow_types}"
diff --git a/src/backend/tests/unit/test_logger.py b/src/backend/tests/unit/test_logger.py
index 34414fc617f8..e86c703d23cd 100644
--- a/src/backend/tests/unit/test_logger.py
+++ b/src/backend/tests/unit/test_logger.py
@@ -2,7 +2,7 @@
import os
import json
from unittest.mock import patch
-from langflow.utils.logger import SizedLogBuffer
+from langflow.logging.logger import SizedLogBuffer
@pytest.fixture
From d0484ba41212f259b40a01e9fc9abafc75695c5b Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Fri, 9 Aug 2024 11:15:35 -0300
Subject: [PATCH 11/38] fix: enhance error handling in build_flow and add error
handling for Flow build (#3259)
* feat: add error handling for Flow build
The code changes in `buildUtils.ts` add error handling for the Flow build process. It includes a new case for handling errors in the switch statement, which displays the error message and triggers the `onBuildError` function. This change ensures that errors during the Flow build are properly handled and the build process is stopped.
* feat: enhance error handling in build_flow to capture and report HTTP exceptions in the flow building process
---
src/backend/base/langflow/api/v1/chat.py | 15 ++++++++++++++-
src/frontend/src/utils/buildUtils.ts | 8 ++++++++
2 files changed, 22 insertions(+), 1 deletion(-)
diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py
index 143937d1d146..49bf23410dfc 100644
--- a/src/backend/base/langflow/api/v1/chat.py
+++ b/src/backend/base/langflow/api/v1/chat.py
@@ -356,10 +356,23 @@ async def event_generator(queue: asyncio.Queue, client_consumed_queue: asyncio.Q
except asyncio.CancelledError:
vertices_task.cancel()
return
+ except Exception as e:
+ if isinstance(e, HTTPException):
+ send_event("error", {"error": str(e.detail), "statusCode": e.status_code}, queue)
+ raise e
+ send_event("error", {"error": str(e)}, queue)
+ raise e
ids, vertices_to_run, graph = vertices_task.result()
else:
- ids, vertices_to_run, graph = await build_graph_and_get_order()
+ try:
+ ids, vertices_to_run, graph = await build_graph_and_get_order()
+ except Exception as e:
+ if isinstance(e, HTTPException):
+ send_event("error", {"error": str(e.detail), "statusCode": e.status_code}, queue)
+ raise e
+ send_event("error", {"error": str(e)}, queue)
+ raise e
send_event("vertices_sorted", {"ids": ids, "to_run": vertices_to_run}, queue)
await client_consumed_queue.get()
diff --git a/src/frontend/src/utils/buildUtils.ts b/src/frontend/src/utils/buildUtils.ts
index 969138598ee4..77dbb8054e75 100644
--- a/src/frontend/src/utils/buildUtils.ts
+++ b/src/frontend/src/utils/buildUtils.ts
@@ -264,6 +264,14 @@ export async function buildFlowVertices({
useFlowStore.getState().setIsBuilding(false);
return true;
}
+ case "error": {
+ const errorMessage = data.error;
+ console.log(data);
+ onBuildError!("Error Running Flow", [errorMessage], []);
+ buildResults.push(false);
+ useFlowStore.getState().setIsBuilding(false);
+ return true;
+ }
default:
return true;
}
From b0f80e463834d716ad221a8229c871e88bb3baf8 Mon Sep 17 00:00:00 2001
From: Gabriel Luiz Freitas Almeida
Date: Fri, 9 Aug 2024 11:46:15 -0300
Subject: [PATCH 12/38] feat: add log_builds parameter to build_flow for
optional vertex build logging (#3262)
---
src/backend/base/langflow/api/v1/chat.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py
index 49bf23410dfc..2156b62b9ba7 100644
--- a/src/backend/base/langflow/api/v1/chat.py
+++ b/src/backend/base/langflow/api/v1/chat.py
@@ -146,6 +146,7 @@ async def build_flow(
files: Optional[list[str]] = None,
stop_component_id: Optional[str] = None,
start_component_id: Optional[str] = None,
+ log_builds: Optional[bool] = True,
chat_service: "ChatService" = Depends(get_chat_service),
current_user=Depends(get_current_active_user),
telemetry_service: "TelemetryService" = Depends(get_telemetry_service),
@@ -250,7 +251,7 @@ async def _build_vertex(vertex_id: str, graph: "Graph") -> VertexBuildResponse:
result_data_response.message = artifacts
# Log the vertex build
- if not vertex.will_stream:
+ if not vertex.will_stream and log_builds:
background_tasks.add_task(
log_vertex_build,
flow_id=flow_id_str,
From 5ce8cbda9b65cdc37380da4cc5b05f0da56b4004 Mon Sep 17 00:00:00 2001
From: goliath-yamon <141193714+goliath-yamon@users.noreply.github.com>
Date: Fri, 9 Aug 2024 16:30:23 +0100
Subject: [PATCH 13/38] Refactored and improved PythonCodeStructuredTool,
SearXNGTool, and RunnableExecutor (#3239)
* enhancement: Update PythonCodeStructuredTool to create inputs automatically and accept global variables
* [autofix.ci] apply automated fixes
* feat: Create a tool to search using SearXNG
* [autofix.ci] apply automated fixes
* refactor: reorganize imports and type annotations in PythonCodeStructuredTool.py for clarity and consistency
* refactor: clean up imports and enhance type annotations in SearXNGTool.py for improved readability and type safety
* refactor: Improved PythonCodeStructuredTool to allow arguments to have any types
* refactor: Formatted and refactored SearXNGTool
* refactor: Allowed RunnableExecutor to stream output and changed its build method to asynchronous.
---------
Co-authored-by: Haseong Kim
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida
---
.../components/prototypes/RunnableExecutor.py | 25 +-
.../tools/PythonCodeStructuredTool.py | 358 ++++++++++++++----
.../langflow/components/tools/SearXNGTool.py | 141 +++++++
.../langflow/components/tools/__init__.py | 2 +
4 files changed, 450 insertions(+), 76 deletions(-)
create mode 100644 src/backend/base/langflow/components/tools/SearXNGTool.py
diff --git a/src/backend/base/langflow/components/prototypes/RunnableExecutor.py b/src/backend/base/langflow/components/prototypes/RunnableExecutor.py
index 8aec1886f95c..0e872080afbf 100644
--- a/src/backend/base/langflow/components/prototypes/RunnableExecutor.py
+++ b/src/backend/base/langflow/components/prototypes/RunnableExecutor.py
@@ -1,7 +1,8 @@
from langflow.custom import Component
-from langflow.inputs import HandleInput, MessageTextInput
+from langflow.inputs import HandleInput, MessageTextInput, BoolInput
from langflow.schema.message import Message
from langflow.template import Output
+from langchain.agents import AgentExecutor
class RunnableExecComponent(Component):
@@ -30,6 +31,11 @@ class RunnableExecComponent(Component):
value="output",
advanced=True,
),
+ BoolInput(
+ name="use_stream",
+ display_name="Stream",
+ value=False,
+ ),
]
outputs = [
@@ -108,11 +114,24 @@ def get_input_dict(self, runnable, input_key, input_value):
status = f"Warning: The input key is not '{input_key}'. The input key is '{runnable.input_keys}'."
return input_dict, status
- def build_executor(self) -> Message:
+ async def build_executor(self) -> Message:
input_dict, status = self.get_input_dict(self.runnable, self.input_key, self.input_value)
- result = self.runnable.invoke(input_dict)
+ if not isinstance(self.runnable, AgentExecutor):
+ raise ValueError("The runnable must be an AgentExecutor")
+
+ if self.use_stream:
+ return self.astream_events(input_dict)
+ else:
+ result = await self.runnable.ainvoke(input_dict)
result_value, _status = self.get_output(result, self.input_key, self.output_key)
status += _status
status += f"\n\nOutput: {result_value}\n\nRaw Output: {result}"
self.status = status
return result_value
+
+ async def astream_events(self, input):
+ async for event in self.runnable.astream_events(input, version="v1"):
+ if event.get("event") != "on_chat_model_stream":
+ continue
+
+ yield event.get("data").get("chunk")
diff --git a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py
index 3d70785458ce..0a0e35ae3289 100644
--- a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py
+++ b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py
@@ -1,95 +1,307 @@
import ast
-from typing import Any, Dict, List, Optional
+import json
+from typing import Any
from langchain.agents import Tool
+from langflow.base.langchain_utilities.model import LCToolComponent
+from langflow.inputs.inputs import MultilineInput, MessageTextInput, BoolInput, DropdownInput, HandleInput, FieldTypes
from langchain_core.tools import StructuredTool
+from langflow.io import Output
-from langflow.custom import CustomComponent
from langflow.schema.dotdict import dotdict
+from langflow.schema import Data
+from pydantic.v1 import Field, create_model
+from pydantic.v1.fields import Undefined
-class PythonCodeStructuredTool(CustomComponent):
- display_name = "PythonCodeTool"
+
+class PythonCodeStructuredTool(LCToolComponent):
+ DEFAULT_KEYS = [
+ "code",
+ "_type",
+ "text_key",
+ "tool_code",
+ "tool_name",
+ "tool_description",
+ "return_direct",
+ "tool_function",
+ "global_variables",
+ "_classes",
+ "_functions",
+ ]
+ display_name = "Python Code Structured Tool"
description = "structuredtool dataclass code to tool"
documentation = "https://python.langchain.com/docs/modules/tools/custom_tools/#structuredtool-dataclass"
name = "PythonCodeStructuredTool"
icon = "🐍"
- field_order = ["name", "description", "tool_code", "return_direct", "tool_function", "tool_class"]
-
- def build_config(self) -> Dict[str, Any]:
- return {
- "tool_code": {
- "display_name": "Tool Code",
- "info": "Enter the dataclass code.",
- "placeholder": "def my_function(args):\n pass",
- "multiline": True,
- "refresh_button": True,
- "field_type": "code",
- },
- "name": {
- "display_name": "Tool Name",
- "info": "Enter the name of the tool.",
- },
- "description": {
- "display_name": "Description",
- "info": "Provide a brief description of what the tool does.",
- },
- "return_direct": {
- "display_name": "Return Directly",
- "info": "Should the tool return the function output directly?",
- },
- "tool_function": {
- "display_name": "Tool Function",
- "info": "Select the function for additional expressions.",
- "options": [],
- "refresh_button": True,
- },
- "tool_class": {
- "display_name": "Tool Class",
- "info": "Select the class for additional expressions.",
- "options": [],
- "refresh_button": True,
- "required": False,
- },
- }
-
- def parse_source_name(self, code: str) -> Dict:
- parsed_code = ast.parse(code)
- class_names = [node.name for node in parsed_code.body if isinstance(node, ast.ClassDef)]
- function_names = [node.name for node in parsed_code.body if isinstance(node, ast.FunctionDef)]
- return {"class": class_names, "function": function_names}
+ field_order = ["name", "description", "tool_code", "return_direct", "tool_function"]
+
+ inputs = [
+ MultilineInput(
+ name="tool_code",
+ display_name="Tool Code",
+ info="Enter the dataclass code.",
+ placeholder="def my_function(args):\n pass",
+ required=True,
+ real_time_refresh=True,
+ refresh_button=True,
+ ),
+ MessageTextInput(name="tool_name", display_name="Tool Name", info="Enter the name of the tool.", required=True),
+ MessageTextInput(
+ name="tool_description",
+ display_name="Description",
+ info="Enter the description of the tool.",
+ required=True,
+ ),
+ BoolInput(
+ name="return_direct",
+ display_name="Return Directly",
+ info="Should the tool return the function output directly?",
+ ),
+ DropdownInput(
+ name="tool_function",
+ display_name="Tool Function",
+ info="Select the function for additional expressions.",
+ options=[],
+ required=True,
+ real_time_refresh=True,
+ refresh_button=True,
+ ),
+ HandleInput(
+ name="global_variables",
+ display_name="Global Variables",
+ info="Enter the global variables or Create Data Component.",
+ input_types=["Data"],
+ field_type=FieldTypes.DICT,
+ is_list=True,
+ ),
+ MessageTextInput(name="_classes", display_name="Classes", advanced=True),
+ MessageTextInput(name="_functions", display_name="Functions", advanced=True),
+ ]
+
+ outputs = [
+ Output(display_name="Tool", name="result_tool", method="build_tool"),
+ ]
def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:
- if field_name == "tool_code" or field_name == "tool_function" or field_name == "tool_class":
- try:
- names = self.parse_source_name(build_config.tool_code.value)
- build_config.tool_class.options = names["class"]
- build_config.tool_function.options = names["function"]
- except Exception as e:
- self.status = f"Failed to extract class names: {str(e)}"
- build_config.tool_class.options = ["Failed to parse", str(e)]
- build_config.tool_function.options = []
+ if field_name is None:
+ return build_config
+
+ if field_name != "tool_code" and field_name != "tool_function":
+ return build_config
+
+ try:
+ named_functions = {}
+ [classes, functions] = self._parse_code(build_config["tool_code"]["value"])
+ existing_fields = {}
+ if len(build_config) > len(self.DEFAULT_KEYS):
+ for key in build_config.copy():
+ if key not in self.DEFAULT_KEYS:
+ existing_fields[key] = build_config.pop(key)
+
+ names = []
+ for func in functions:
+ named_functions[func["name"]] = func
+ names.append(func["name"])
+
+ for arg in func["args"]:
+ field_name = f"{func['name']}|{arg['name']}"
+ if field_name in existing_fields:
+ build_config[field_name] = existing_fields[field_name]
+ continue
+
+ field = MessageTextInput(
+ display_name=f"{arg['name']}: Description",
+ name=field_name,
+ info=f"Enter the description for {arg['name']}",
+ required=True,
+ )
+ build_config[field_name] = field.to_dict()
+ build_config["_functions"]["value"] = json.dumps(named_functions)
+ build_config["_classes"]["value"] = json.dumps(classes)
+ build_config["tool_function"]["options"] = names
+ except Exception as e:
+ self.status = f"Failed to extract names: {str(e)}"
+ build_config["tool_function"]["options"] = ["Failed to parse", str(e)]
return build_config
- async def build(
- self,
- tool_code: str,
- name: str,
- description: str,
- tool_function: List[str],
- return_direct: bool,
- tool_class: Optional[List[str]] = None,
- ) -> Tool:
- local_namespace = {} # type: ignore
- exec(tool_code, globals(), local_namespace)
+ async def build_tool(self) -> Tool:
+ _local_namespace = {} # type: ignore
+ modules = self._find_imports(self.tool_code)
+ import_code = ""
+ for module in modules["imports"]:
+ import_code += f"global {module}\nimport {module}\n"
+ for from_module in modules["from_imports"]:
+ for alias in from_module.names:
+ import_code += f"global {alias.name}\n"
+ import_code += (
+ f"from {from_module.module} import {', '.join([alias.name for alias in from_module.names])}\n"
+ )
+ exec(import_code, globals())
+ exec(self.tool_code, globals(), _local_namespace)
+
+ class PythonCodeToolFunc:
+ params: dict = {}
+
+ def run(**kwargs):
+ for key in kwargs:
+ if key not in PythonCodeToolFunc.params:
+ PythonCodeToolFunc.params[key] = kwargs[key]
+ return _local_namespace[self.tool_function](**PythonCodeToolFunc.params)
+
+ _globals = globals()
+ _local = {} # type: ignore
+ _local[self.tool_function] = PythonCodeToolFunc
+ _globals.update(_local)
+
+ if isinstance(self.global_variables, list):
+ for data in self.global_variables:
+ if isinstance(data, Data):
+ _globals.update(data.data)
+ elif isinstance(self.global_variables, dict):
+ _globals.update(self.global_variables)
+
+ classes = json.loads(self._attributes["_classes"])
+ for class_dict in classes:
+ exec("\n".join(class_dict["code"]), _globals)
+
+ named_functions = json.loads(self._attributes["_functions"])
+ schema_fields = {}
+
+ for attr in self._attributes:
+ if attr in self.DEFAULT_KEYS:
+ continue
+
+ func_name = attr.split("|")[0]
+ field_name = attr.split("|")[1]
+ func_arg = self._find_arg(named_functions, func_name, field_name)
+ if func_arg is None:
+ raise Exception(f"Failed to find arg: {field_name}")
+
+ field_annotation = func_arg["annotation"]
+ field_description = self._get_value(self._attributes[attr], str)
- func = local_namespace[tool_function]
- _class = None
+ if field_annotation:
+ exec(f"temp_annotation_type = {field_annotation}", _globals)
+ schema_annotation = _globals["temp_annotation_type"]
+ else:
+ schema_annotation = Any
+ schema_fields[field_name] = (
+ schema_annotation,
+ Field(
+ default=func_arg["default"] if "default" in func_arg else Undefined, description=field_description
+ ),
+ )
- if tool_class:
- _class = local_namespace[tool_class]
+ if "temp_annotation_type" in _globals:
+ _globals.pop("temp_annotation_type")
+
+ PythonCodeToolSchema = None
+ if schema_fields:
+ PythonCodeToolSchema = create_model("PythonCodeToolSchema", **schema_fields) # type: ignore
tool = StructuredTool.from_function(
- func=func, args_schema=_class, name=name, description=description, return_direct=return_direct
+ func=_local[self.tool_function].run,
+ args_schema=PythonCodeToolSchema,
+ name=self.tool_name,
+ description=self.tool_description,
+ return_direct=self.return_direct,
)
return tool # type: ignore
+
+ def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):
+ """
+ This function is called after the code validation is done.
+ """
+ frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)
+ frontend_node["template"] = self.update_build_config(
+ frontend_node["template"], frontend_node["template"]["tool_code"]["value"], "tool_code"
+ )
+ frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)
+ for key in frontend_node["template"]:
+ if key in self.DEFAULT_KEYS:
+ continue
+ frontend_node["template"] = self.update_build_config(
+ frontend_node["template"], frontend_node["template"][key]["value"], key
+ )
+ frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)
+ return frontend_node
+
+ def _parse_code(self, code: str) -> tuple[list[dict], list[dict]]:
+ parsed_code = ast.parse(code)
+ lines = code.split("\n")
+ classes = []
+ functions = []
+ for node in parsed_code.body:
+ if isinstance(node, ast.ClassDef):
+ class_lines = lines[node.lineno - 1 : node.end_lineno]
+ class_lines[-1] = class_lines[-1][: node.end_col_offset]
+ class_lines[0] = class_lines[0][node.col_offset :]
+ classes.append(
+ {
+ "name": node.name,
+ "code": class_lines,
+ }
+ )
+ continue
+
+ if not isinstance(node, ast.FunctionDef):
+ continue
+
+ func = {"name": node.name, "args": []}
+ for arg in node.args.args:
+ if arg.lineno != arg.end_lineno:
+ raise Exception("Multiline arguments are not supported")
+
+ func_arg = {
+ "name": arg.arg,
+ "annotation": None,
+ }
+
+ for default in node.args.defaults:
+ if (
+ arg.lineno > default.lineno
+ or arg.col_offset > default.col_offset
+ or arg.end_lineno < default.end_lineno
+ or arg.end_col_offset < default.end_col_offset
+ ):
+ continue
+
+ if isinstance(default, ast.Name):
+ func_arg["default"] = default.id
+ elif isinstance(default, ast.Constant):
+ func_arg["default"] = default.value
+
+ if arg.annotation:
+ annotation_line = lines[arg.annotation.lineno - 1]
+ annotation_line = annotation_line[: arg.annotation.end_col_offset]
+ annotation_line = annotation_line[arg.annotation.col_offset :]
+ func_arg["annotation"] = annotation_line
+ if func_arg["annotation"].count("=") > 0:
+ func_arg["annotation"] = "=".join(func_arg["annotation"].split("=")[:-1]).strip()
+
+ func["args"].append(func_arg)
+ functions.append(func)
+
+ return classes, functions
+
+ def _find_imports(self, code: str) -> dotdict:
+ imports = []
+ from_imports = []
+ parsed_code = ast.parse(code)
+ for node in parsed_code.body:
+ if isinstance(node, ast.Import):
+ for alias in node.names:
+ imports.append(alias.name)
+ elif isinstance(node, ast.ImportFrom):
+ from_imports.append(node)
+ return {"imports": imports, "from_imports": from_imports}
+
+ def _get_value(self, value: Any, annotation: Any) -> Any:
+ return value if isinstance(value, annotation) else value["value"]
+
+ def _find_arg(self, named_functions: dict, func_name: str, arg_name: str) -> dict | None:
+ for arg in named_functions[func_name]["args"]:
+ if arg["name"] == arg_name:
+ return arg
+ return None
diff --git a/src/backend/base/langflow/components/tools/SearXNGTool.py b/src/backend/base/langflow/components/tools/SearXNGTool.py
new file mode 100644
index 000000000000..3749a70da529
--- /dev/null
+++ b/src/backend/base/langflow/components/tools/SearXNGTool.py
@@ -0,0 +1,141 @@
+from typing import Any
+import requests
+import json
+
+from pydantic.v1 import Field, create_model
+
+from langchain.agents import Tool
+from langflow.base.langchain_utilities.model import LCToolComponent
+from langflow.inputs import MessageTextInput, MultiselectInput, DropdownInput, IntInput
+from langflow.schema.dotdict import dotdict
+from langflow.io import Output
+
+
+class SearXNGToolComponent(LCToolComponent):
+ search_headers: dict = {}
+ display_name = "SearXNG Search Tool"
+ description = "A component that searches for tools using SearXNG."
+ name = "SearXNGTool"
+
+ inputs = [
+ MessageTextInput(
+ name="url",
+ display_name="URL",
+ value="http://localhost",
+ required=True,
+ refresh_button=True,
+ ),
+ IntInput(
+ name="max_results",
+ display_name="Max Results",
+ value=10,
+ required=True,
+ ),
+ MultiselectInput(
+ name="categories",
+ display_name="Categories",
+ options=[],
+ value=[],
+ ),
+ DropdownInput(
+ name="language",
+ display_name="Language",
+ options=[],
+ ),
+ ]
+
+ outputs = [
+ Output(display_name="Tool", name="result_tool", method="build_tool"),
+ ]
+
+ def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:
+ if field_name is None:
+ return build_config
+
+ if field_name != "url":
+ return build_config
+
+ try:
+ url = f"{field_value}/config"
+
+ response = requests.get(url=url, headers=self.search_headers.copy())
+ data = None
+ if response.headers.get("Content-Encoding") == "zstd":
+ data = json.loads(response.content)
+ else:
+ data = response.json()
+ build_config["categories"]["options"] = data["categories"].copy()
+ for selected_category in build_config["categories"]["value"]:
+ if selected_category not in build_config["categories"]["options"]:
+ build_config["categories"]["value"].remove(selected_category)
+ languages = []
+ for language in data["locales"]:
+ languages.append(language)
+ build_config["language"]["options"] = languages.copy()
+ except Exception as e:
+ self.status = f"Failed to extract names: {str(e)}"
+ build_config["categories"]["options"] = ["Failed to parse", str(e)]
+ return build_config
+
+ def build_tool(self) -> Tool:
+ class SearxSearch:
+ _url: str = ""
+ _categories: list[str] = []
+ _language: str = ""
+ _headers: dict = {}
+ _max_results: int = 10
+
+ @staticmethod
+ def search(query: str, categories: list[str] = []) -> list:
+ if not SearxSearch._categories and not categories:
+ raise ValueError("No categories provided.")
+ all_categories = SearxSearch._categories + list(set(categories) - set(SearxSearch._categories))
+ try:
+ url = f"{SearxSearch._url}/"
+ headers = SearxSearch._headers.copy()
+ response = requests.get(
+ url=url,
+ headers=headers,
+ params={
+ "q": query,
+ "categories": ",".join(all_categories),
+ "language": SearxSearch._language,
+ "format": "json",
+ },
+ ).json()
+
+ results = []
+ num_results = min(SearxSearch._max_results, len(response["results"]))
+ for i in range(num_results):
+ results.append(response["results"][i])
+ return results
+ except Exception as e:
+ return [f"Failed to search: {str(e)}"]
+
+ SearxSearch._url = self.url
+ SearxSearch._categories = self.categories.copy()
+ SearxSearch._language = self.language
+ SearxSearch._headers = self.search_headers.copy()
+ SearxSearch._max_results = self.max_results
+
+ _globals = globals()
+ _local = {}
+ _local["SearxSearch"] = SearxSearch
+ _globals.update(_local)
+
+ schema_fields = {
+ "query": (str, Field(..., description="The query to search for.")),
+ "categories": (list[str], Field(default=[], description="The categories to search in.")),
+ }
+
+ SearxSearchSchema = create_model("SearxSearchSchema", **schema_fields) # type: ignore
+
+ tool = Tool.from_function(
+ func=_local["SearxSearch"].search,
+ args_schema=SearxSearchSchema,
+ name="searxng_search_tool",
+ description="A tool that searches for tools using SearXNG.\nThe available categories are: "
+ + ", ".join(self.categories),
+ )
+ self.status = tool
+ return tool
diff --git a/src/backend/base/langflow/components/tools/__init__.py b/src/backend/base/langflow/components/tools/__init__.py
index 3ebec76ee83b..6c411d6301ac 100644
--- a/src/backend/base/langflow/components/tools/__init__.py
+++ b/src/backend/base/langflow/components/tools/__init__.py
@@ -5,6 +5,7 @@
from .GoogleSerperAPI import GoogleSerperAPIComponent
from .PythonCodeStructuredTool import PythonCodeStructuredTool
from .SearchAPI import SearchAPIComponent
+from .SearXNGTool import SearXNGToolComponent
from .SerpAPI import SerpAPIComponent
from .WikipediaAPI import WikipediaAPIComponent
from .WolframAlphaAPI import WolframAlphaAPIComponent
@@ -18,6 +19,7 @@
"PythonCodeStructuredTool",
"PythonREPLToolComponent",
"SearchAPIComponent",
+ "SearXNGToolComponent",
"SerpAPIComponent",
"WikipediaAPIComponent",
"WolframAlphaAPIComponent",
From 98f00d75f0a503f5ae379156e27daac62a2b2898 Mon Sep 17 00:00:00 2001
From: Lucas Oliveira <62335616+lucaseduoli@users.noreply.github.com>
Date: Fri, 9 Aug 2024 12:46:17 -0300
Subject: [PATCH 14/38] Update README.md (#3204)
* Update README.md
Updated Readme with suggestions
* Changed core features
* Changed thumbnail of video
* Fixed hero image
* Fixed readme texts
* Update README.md
---
README.md | 194 +++++----------------------------------
docs/static/img/hero.png | Bin 124069 -> 156938 bytes
2 files changed, 25 insertions(+), 169 deletions(-)
diff --git a/README.md b/README.md
index a8676da6b3b9..7aa3e8c90c11 100644
--- a/README.md
+++ b/README.md
@@ -1,33 +1,16 @@
-
-
Langflow 1.0 is OUT! 🎉
-
Read all about it here !
-
-
# [![Langflow](./docs/static/img/hero.png)](https://www.langflow.org)
-
- A visual framework for building multi-agent and RAG applications
-
- Open-source, Python-powered, fully customizable, LLM and vector store agnostic
+ Langflow is a low-code app builder for RAG (retrieval augmented generation) and multi-agent AI applications. It’s Python-based and agnostic to any model, API, data source or database.
Docs -
+ Free Cloud Service -
Join our Discord -
- Follow us on X -
- Live demo
-
-
-
-
-
-
-
-
-
+ Follow us on X
@@ -39,168 +22,41 @@
-
-
-
-# 📝 Content
-
-- [📝 Content](#-content)
-- [📦 Get Started](#-get-started)
-- [Running Langflow from a Cloned Repository](#running-langflow-from-a-cloned-repository)
-- [🎨 Create Flows](#-create-flows)
-- [Deploy](#deploy)
- - [DataStax Langflow](#datastax-langflow)
- - [Deploy Langflow on Hugging Face Spaces](#deploy-langflow-on-hugging-face-spaces)
- - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
- - [Deploy on Railway](#deploy-on-railway)
- - [Deploy on Render](#deploy-on-render)
- - [Deploy on Kubernetes](#deploy-on-kubernetes)
-- [🖥️ Command Line Interface (CLI)](#️-command-line-interface-cli)
- - [Usage](#usage)
- - [Environment Variables](#environment-variables)
-- [👋 Contribute](#-contribute)
-- [🌟 Contributors](#-contributors)
-- [📄 License](#-license)
-
-# 📦 Get Started
-
-You can install Langflow with pip:
-
-```shell
-# Make sure you have >=Python 3.10 installed on your system.
-python -m pip install langflow -U
-```
-
-Then, run Langflow with:
-
-```shell
-python -m langflow run
-```
-# Running Langflow from a Cloned Repository
+https://github.com/user-attachments/assets/a1a36011-6169-4804-87ad-cfd4c5a79872
-If you prefer to run Langflow from a cloned repository rather than installing it via pip, follow these steps:
-
-1. **Clone the Repository**
-
-First, clone the Langflow repository from GitHub:
-
-```shell
-git clone https://github.com/langflow-ai/langflow.git
-```
-
-Navigate into the cloned directory:
-
-```shell
-cd langflow
-```
-
-2. **Build and Install Dependencies**
-
-To build and install Langflow’s frontend and backend, use the following commands:
-
-```shell
-make install_frontend && make build_frontend && make install_backend
-```
-
-3. **Run Langflow**
-
-Once the installation is complete, you can run Langflow with:
-
-```shell
-poetry run python -m langflow run
-```
-
-# 🎨 Create Flows
-
-Creating flows with Langflow is easy. Simply drag components from the sidebar onto the workspace and connect them to start building your application.
-
-Explore by editing prompt parameters, grouping components into a single high-level component, and building your own Custom Components.
-
-Once you’re done, you can export your flow as a JSON file.
-
-Load the flow with:
-
-```python
-from langflow.load import run_flow_from_json
-
-results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
-```
-
-# Deploy
-
-## DataStax Langflow
-
-DataStax Langflow is a hosted version of Langflow integrated with [AstraDB](https://www.datastax.com/products/datastax-astra). Be up and running in minutes with no installation or setup required. [Sign up for free](https://langflow.datastax.com).
-
-## Deploy Langflow on Hugging Face Spaces
-
-You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow). [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) to create your own Langflow workspace in minutes.
-
-## Deploy Langflow on Google Cloud Platform
-
-Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](./docs/docs/Deployment/deployment-gcp.md) document.
-
-Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project.
-
-[![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial_spot.md)
-
-## Deploy on Railway
-
-Use this template to deploy Langflow 1.0 on Railway:
-
-[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg)
-
-## Deploy on Render
+
-
-
-
+# Core features
+1. Python-based and agnostic to models, APIs, data sources, or databases.
+2. Visual IDE for drag-and-drop building and testing of workflows.
+3. Playground to immediately test and iterate workflows with step-by-step control.
+4. Multi-agent orchestration and conversation management and retrieval.
+5. Free cloud service - get started in minutes with no setup.
+6. Publish workflows as back-end APIs or export as a Python application.
+7. Real time observability with LangSmith, LangFuse, or LangWatch integration.
+8. Support enterprise security and scale with free DataStax Langflow cloud service.
+9. Prebuilt ecosystem integrations to any model, API or database.
-## Deploy on Kubernetes
-Follow our step-by-step guide to deploy [Langflow on Kubernetes](./docs/docs/Deployment/deployment-kubernetes.md).
+![Integrations](https://github.com/user-attachments/assets/df4a6714-60de-4a8b-aff0-982c5aa467e3)
-# 🖥️ Command Line Interface (CLI)
+# Stay up-to-date
-Langflow provides a command-line interface (CLI) for easy management and configuration.
+Star Langflow on GitHub to be instantly notified of new releases.
-## Usage
+![Star Langflow](https://github.com/user-attachments/assets/03168b17-a11d-4b2a-b0f7-c1cce69e5a2c)
-You can run the Langflow using the following command:
+# 📦 Quick Start
+- **Install Langflow with pip** (Python version 3.10 or greater):
```shell
-langflow run [OPTIONS]
+python -m pip install langflow -U
```
-Each option is detailed below:
-
-- `--help`: Displays all available options.
-- `--host`: Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`.
-- `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`.
-- `--timeout`: Sets the worker timeout in seconds. The default is `60`.
-- `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`.
-- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`.
-- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`.
-- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`.
-- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`.
-- `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`.
-- `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`.
-- `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable.
-- `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`.
-- `--remove-api-keys/--no-remove-api-keys`: Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`.
-- `--install-completion [bash|zsh|fish|powershell|pwsh]`: Installs completion for the specified shell.
-- `--show-completion [bash|zsh|fish|powershell|pwsh]`: Shows completion for the specified shell, allowing you to copy it or customize the installation.
-- `--backend-only`: This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable.
-- `--store`: This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable.
-
-These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios.
-
-### Environment Variables
-
-You can configure many of the CLI options using environment variables. These can be exported in your operating system or added to a `.env` file and loaded using the `--env-file` option.
-
-A sample `.env` file named `.env.example` is included with the project. Copy this file to a new file named `.env` and replace the example values with your actual settings. If you're setting values in both your OS and the `.env` file, the `.env` settings will take precedence.
+- **Cloud:** DataStax Langflow is a hosted environment with zero setup. [Sign up for a free account.](http://langflow.datastax.com)
+- **Self-managed:** Run Langflow in your environment. [Install Langflow](https://docs.langflow.org/getting-started-installation) to run a local Langflow server, and then use the [Quickstart](https://docs.langflow.org/getting-started-quickstart) guide to create and execute a flow.
+- **Hugging Face:** [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow?duplicate=true) to create a Langflow workspace.
# 👋 Contribute
diff --git a/docs/static/img/hero.png b/docs/static/img/hero.png
index 85bc2731dfb6c26354e8a53b1f0b87291a5d9706..5eff7db0dd54d0c23dce6403c2273efe69b03fdb 100644
GIT binary patch
literal 156938
zcmaI7byS=0wk-@4iWRpOmjcBdinqn3P+SwBxVu{m6nA$h?he5M6fIsfxNCyDbJP8u
zbH4rC=Z^bV#><;A!dlOgx#pTr*cT-kEDTZ%1Ox;uIax_n1O${N1O&tjw3qO|F#b4Z
zg?}JAtIB*rs2U^RM?j!PkdyqV?g2c=LQPedx&rx&a=vMCklGcEM^k5b4egMtHckG@Gqm1TMY7m7GWI~V5%+*1Hh
zx1KA<+3da=Q{w4N?mXqFv;3(Rf+#}6?2}e!q{gJF$nYJKR5%C)v6*!_l2q$Gxp{B<$4$Xc)4p^27QPCQR
zy{-SrkUUnYo(gx58^+G@mW3<$t@szygJ`m2Ym&2T9rMXwSKZLqL+=aqoCe1lg?U_U
z|Gs9ERl0k6ahK#q)G7(iU6tv)l$blwX7&K*Bjh7(qm8E?PT%o^tU)
zneHb_3^5v_rgWB$uil3l0#(m?!N#vv&N0jwFlU3UmvP(Oue_q{LPW#3y<6_cF1tAI
zH@(DOF2qO}1J?4^UQbyeI8SzEs$)1$7QQw}546`}PvsM@KaH4tqhh_({&{yY-iici
zj_Sj49j`3pAKv-vix%`)hw7zsg?u`sq^u8aKfF~xLL+V}85RnOzWNEDgeM$)x8^N0
z&0sKpq?y#iaAdbkYMk6ohn4_}k8UomHmh8dW{iSb#xOe2CF_ec2s8K&3i*xkt$N@3
z&iIy$vbq>c>GqZQ2D}r@Dlk~gQ=HRXAoe|7;1#eP*{?fvo*rFm6+F!70$vzJ^sLC@
zTPG~H+{HVresarF5lHt=m`sgM1v5jEwGLc{A|@>(M-ioe>JC~f?2c!XdS|i5$~Hqd
zET+o5%DkuwrhE~m6?FxZ=>DGkzZsef#iL#aUhZT{DL3>JZZCl}-y&rO555L^Y-S?%x@DM=(Wag}g~8^RieipgidIU>
zNyoeraJB6eil--b@Fq@E*f8!o{XtsMP<3?dMq`d5dL=Ew5&G3741=ECOkyQd8oIya
zayk`3Jo?0Am-iLokYChhkfl^?_oBvH-}FKd?_cr#Z&rQ_60ZCk`fesjrg%rc)VVt3
zmE&oD()PCH;7OzL&_JZN8t&$^@TJGH)$kSaiHLHopFXDw4T0d`{fknX_92aiHH9TbYvT$pY}G#rxx3eK#WvrP
zu=Fbj()n@QyBtEguq;XaC(^A$2H3WZ!L8t36Phrv80k7bweNG?k4|wS4OZ1=0h_u3
zVcGRa8%3g-kN=aa|DP=%W~e`OgRMq*qT=?HTLSi3U?-au&Z{#cdA}ty`^bpT)DTz3a5`C*HwqblkT*^*mQG
z)?~VHB`LkLxs&&Q>*j0Z>%u`;DNd>l*$Ho5k^RF?5(tq=af;;nDYx^sOtD~lntcgq
z(rGQGf$YVvU4zwBgH_dg8RF~uG39IrpHR*;F%d-4IzzMaaTS!I@cyoU=EZqdkE1Bn
zr+x}Ot&>*w6t+xPv)BE93g>DJV_Fb&-|i@*g#A}l`u|T2LAH5{8-9|8G7PaT4mhMh
zKr4(%KS)d73WcZmxAoPhA-rg8U
zOK-dV2`a5mmB2?W2;4-_#N?`$%f2T~74-x|A~@p?&oUfo0XZ>m$q>s#?s>{Yy_FJJ
z0eaoexOYJ<#QC3x$%DY1xO+~&Z#&%F0omY+Bhk`eerO5>vvo$AI39OEV>bW-R}5KDK$2%Gp4*6oxBRJ+S9zk6YwGQL)%Zq44KaA3NX4%7(tHybT~vL!g?
zf3+_W4^AjKT8ts9uK-i5dr^n=e3_}A6epYmu0e0#dXG?XsW=lj|F)X0pU`a?EaKpE
zo$pQq9MmCpL}4tfWJ}+zm?Wt({VykdlcUbSJM#4Ow5VUSd&k7|>p=YJgQKJ4k2R(3
ztz6)8>s*b|i}3L9x=HC>c0i5Iyc4T#Gd0U)5-8qkrtIgn-~CUyk7_=GPH$%`bO0;N
z1P0^CX5hQaTF0K3gv7)Q9ZDS9`O;WG86_obfA7PE#<>PZy5}_CWP@_z*(x16Z1=^F
z2Di7jCD|DfuGWyFT)GnSWhM5dz;LM>j@p-iQqwy8Wji?yiG$alCgthrgy?$|G$>z(
z*tpB;xjN+0TyMX7QD240MnA*G^}7l^f}!!TZrJtwr>tdlL5m}9!9k+4GW9-pW4(KP>C1?C&a{AkZvtg^2*xgppO^;Ei
z_n2tgSs>_#jy!Txjr0X0o+=C@hV;X}BTJQCN~)hf=?0
zquO)eGwov3WA(RCq$_#3Ub>`Kv>SkwRb*)Er(+9WHm!sBnIBU!-szbEje->
z{gU_P3dt_Og{H}dT~bAk>+JmOXne6ZgPDU(mh9mNoh}Wuwrj?-
z2cHT(-8^3RLR^=;?b*Nirwj2pjE;hq$L{aepM7JSI|@+dZ!kcFU!jSEavUF1ssN5h
zKS3z
z^!+rbSoN9`BNX|^b9^4~R=js9T%eOE1LW=^FL8VtMm}{aW(}Q5g;04CE*!ToW_zB6
zu|gDQOg~XOuU92zb1dk1d-1mUIap~Hw2{ZYe^6(pJ<8$zX*$<^)4(OY1xm?Sj$y{y
zA_Jvka3<%Fsq7#rx>$J64K(%C2U=*L#%sH!I7Hyj8J(qrO7mPOu)o}oEa5sxPDBvc
z<{KFKDo(m<)LTo?e|-^TU}jI-n+ZuX4CiT~uZ*H)^N-v*t{R!x@8XXE6C{2B1q0&e
zL%S8ML7CtJ07!XGRnxS7hOI68L=f9=L;ShHM*%p{)92|z>2dgg=1H<}8#k>|ov7>MF_^OFd2)UX;IAypU(iNq)0{T>0{$l?fFnacq
w_GZr4gJIfW
zHoc}#mO`U+vom~*Sxy+FymKhWo!9KdY@BrSdQDcNg;&s-@@8^ihmW8x$ZoV6(BMEL
zub|-B#&>?MqN^Iv|2f@oaHCJV(TUz3WRx%cj-7c2QV$u!B0Z7y=Rc|+6UAwnDc{V0
zexTYJH65@=Y~yG80QPd_@?R0LNsL>yLa3l%aK1j6!?W)=6*{)=3C5~58znthC_vY&
z*8MS4u9fe0xL~o+-oo_+|4;INv9}p6x4K%av@~MRrTG?
za_`4_yx&YBF_2VhB$sH0RO*qm5fh#`j^v0_^+`!tEK~@>Jr6~{&2n1^t2Z|`Iv*A@
zpEy+@=0JiC2i%_DKGfLjY;Eu6H`kykq_89EiL>c8N!ry2O>Dd)jDe`z{t(9bOI!d)btnc+Og4e26@)-tt^h>jNggTLWIfvx!e^A#y9Sf^
zqq^_$n>FoNDglO?QYWN8v3}tDcvp}-hBCIui_X1Eb;`k3a8p0DY7=Lgb$)!KSS=(!
z4clCn1{Lb&AQnWAQhJL^D=O$?FzR7j-$qZTzA)OCV|P;V{zwnE&2s;w%b
zH+Bs%*E|P9vRCi+XC%kQ+>7(!yBOi??&z$R*vI5K7;+B%%oIJCgf0EC*b&BSvv8$S
zFeUKTM7i%j`Y;|flG5JA@00HJH1XAb~ahp@uHHS9z+)ao=}~YhYu~IC{HO
zCH2Ple$;<+T+sO>hNVjK*!Q}p0G)tyt8cf~V}iCiN3kON=332R?IY^fnKF$M9Ug;3
zon~BtB`22fhzO5fp@hBX`y;(qkH<3lp}-js)!DiA&xVGe=lYYl`(ud^@O8f@iGI+6
zD|D@`y0g3Uc^ifQ^r~vlna?kqAB%u7QfAe~|&3zz@
z@sR@llQMF0T&c)U*RbdAtuO+|{`uY9g67v$?bqzp#}&@w+s5-XelOUYxA3RSG{}sl
zAHR`g`@SRrIxz{k?}sO`Xy!=+*`F?FWIyRUd9T<>rA>tp868_rn%BseXSPAEDB0iuqjhOUuA#?1wPHwD9o~0-yEvIpN3Z
zjtkn9BbnM3k)!t&fO_J67p4_Y7~8|$@Ob~-*Y3x)9_$`t9P!>lLD>DJ!1Bm$T9^aT
z`bwFrwf@XCX$1g8Jmchaa$lk2L%wU^!sVB7%F=Q7;0!!2EzbGw3Y*<1=jiy5;|Dpx
zihw7@*Ixm*bH*`l&D+5+hd;kU)_&*5d{j}PDN@RgT+ey6={(3U-ej{78AT?r)~E&C
z!GEk{c?zpFmNp*Rl52kwK5jwf5IKKk1(`=r(f1SNw_OUdnsNH$0RfysV^cVv2B>a}
zh8hKyezO7PEScc1zg@y+*Gqo|?(NrMp}!?L>hR?|$Hf#SU|8m%Oy{AUulqKJW|Ic(
zHHx^IaVx0GRuS#x?7P*0%prBCP7bC@1d}u`VwLyu+a@pJw6S*m!67oA%r0)w$_8?8
zcP^e2y__XD;TDaZVjd=@HSx{8*Y)-yrj;-iLU?JM!jqY(7;eu0J#CsCILnt)%g6Yq
zcKdZit@;+nRm*SF77pW(?RUB3{sAfxr1;^Uz!4A5Qg695&rXRWW|XZ8+3VQa0WJkP
zK8M@xv;z)TuEhS)MHhCq^@&5iP4=Sk|BdZckn3ocsylXcAKshs_D8?z$@m&K&UfA`
zuBBO_|DCj@rTKEK?PRV8=j;BY>E22{Dz?ZTb^F<5gvDG5*V8
zT$z2n40-kqp&E$(#Wj%AMOT^D)nou!aUdxAoE0#U@7E=YBNDVO@f=azaf83y>b{wQ
z!U`TbYCHZe2GNhutLjXS$u~bf2q=va3L_?*$DUEa6;$kG$Zs42uZG$Y60+wApBe@t^#dT%8Ew=8L-F
z(*rf-v)?X{h5gTczsK7XxmW!jiGp#+g5lp=zstBc{?jGn0lfA;@<+H{brx9tvIky$
zdVlbn?)0Vy=QQ&U+DEq(^B!qj>=rpZJYu5xv}(`&5zFK3bl^+58=G=7XvJ1wx>}2h
zpg(4FVqaP88tJI@QM17sLJ7WF#(6>IiRQ^?wYCyM+23=$JTQ*-9y(`yda?FVyVFlJ
z`aRTw+PbqOKiuz2X~E
zU1?p_iI^c^+r2jMa#F&K3}2Rg=}`&l1OR7@PICdXnDCZIfbD+8mI*>b^ibE+;t~6=
z&SuK;%~Gr(1{b7ZPJiZsl#U&X*WZ{8+9ZCLgY-pge;skeJJ6I4YKXGMaQ)D|^H3?Y
zc9#O;UD=}EQ>)fCH^pcL_$6xaFAh*7`p@0jg+5`u@&?9VXn*#L|DoDZTIY4>S70(H
zc|g}E7Sk1HY=GurOUGI;xwhEkp~VABQJfU_{#^?R#pEw`lK0}s68C04mg)%*ox8f_
zdvlh*@t@!f_dCLO8#H%f#}V2d8Q*8o>gV|3>b0+lgNG0UU!osB^gMdGg&z{Y+6&PV
z*y2cml)kAl5h3W;YUA$-T;0k7A;GV>?hc36pU%jh`$r+bo~bHc`w4vS2HRn}_%uQ)
zF_(9#vP#TL_E3MaY+opPHg~X;jDhoGls>tD{RC17g(o4U=ZDP3zW&p_Tt*LUd$%qo
zZ+g5V_vLGZm|3%lnFL0~HQ%PG_U52_J^S`w^g;@6dh%`ze6Dm@b?Q<<-V15wKiv*>
zU*GPd>HEx^dhCQT)>zHRv;WjdTQ7X>sz~c}T>m}UpqM7|=GpUE^kAkm
zfpN*%diq1TW@Q)cGHkXyCP7II1+?r#N3z2@Iyx2j@)RqHXH1L?wBka?{xo2kdcRBF
z5%&vd!?PaUcJzx#wYj{u&R(p>o-LA0n0T{4JStaI*rf|4ML)dFt`jQ-=q6R-0DRL~X=6G;liKYWFEtug`}T>9;X
z+(7&=66QryHaOWAs)L4e|9MSnMw!F#IT;H2(}`X%>c_?sWyho^TV#
zz@Ht<>*+3!cK9sn%kdBF=TLdtEgo1^QDq+mMLnwVbQKrNaoSexq^@}2C$S0<`FjK6
zJMiKZLo$4moEEW}vA5iCc=jeDDiw_{BQuB4tF~W4=)pX#BB`HxU_TXJFW%!WEU3gm
zAGldWTXw=nL*0xqY&6R2F;a%Y><=9Vjdtd9=RU=Mgwj2O%
zk;6SOoyGv&=W6#x8lAzZu-3wK-uZf0$FezbtasU>NpOnqAC&Yrv>^Q7!4Bql+n`ln
zf$TRTXj4T>+qSOIr8gu7ZYY%gHy>@7SbMPqRs+0`Nx#8an6vgZZ}+1hNAagn_-wge
z=J)8Z$@kd)a9mao_BxK}%<|zrYB|UDONAO>3x)2!dwyy=_7K8h0HvjxauBWi&8f&Z
ztDPR{*?ZwF8%sx>1qloLBo-3Z8hFX4m7ra-++X!vYF)3NHE#mFPhY5@V)+o>Q#l|g
zo1UNeUZ1(pU5j3zNk_jPqpQ_nCLjhpEeuQC6koz2Cekdiz@K+`1zkq3~(N7#&E(wX{Pu`tXNnP*;H4n&t6QX`rW?gMWZ5T53^*|
zyQtCpmXpYb!Y;jgR2|pY?@eFSIyb`tdGh-0_hZ~wy&@EB#`Z=*6VKAKDXSAk)nJEh
z4XyLfD9KGf3W=Z-cAVN*`iFDJeVX|EtOk@3%zekSe(y`|l3d&P
zu{Vm!MRHO+M+PThy&B`_mIlmRnQi>+&N#G+&%(1Zim35y1I;SE1;*|p%KDN%d2OQ+
zUMYp5(X#~I3kVL|QsGc#3xiK$p-}I}NvN%ARC)uLGfN)z8CD~LnN~&rA3>Nq^sg8^
z5jp}3u(upI`B^Y|+b%S`fn$0?v!^RE?S@mM5UTG4%1^?{>^cEOqv@MHI4ip$&fJ4T
zsxdX8#{*QJa|C-8*lre3bbsM6s-PB!b)WErn%`{u*!vn#%-s*8H
z%#%us)!>&}o?`ath&A
zHuD&dc7}qz0{NP@j?^s13pGah-00UYuU-zfgqr0jXiNqkGX<^{H;+JYirJSZ!x5a4wr3dxom{^
zX!{sU^n&svt;6SRK^1SM>A8V+=dy#zloYb+Kve9mm>3Rmt4J&@j|UK}C_LVAetsj+
zzc)@$`Y|6v88tWuU|m<2qB)lt;>z3WdG41;;vjwUfU>ttqWM<5Z6K&ddA70SbIPCF
zvHsLyJsHnxcgJh!h|gz+Z0VNdCAMZ%B1p#DGf|&KX+1(T1aJax}=(Dl)L;I;8V^J&;_B`Zp4R^68up=6H6$QBfTgmPWKH1T~{3eBMX
zFg{UNiL#;T+G48E%oE@WJqYS=(U05P
z)8AR>cHx|v#2ws400yutO=xo{CI=Gl0g
zx#qc$%k4O`$zh#ig?4Q>839bfQ6+8yc%IBwV>-pas^1(NQ}1QR4xO$Vy&oQ`YtjbT
zu)iK&XoTg~l;G;|+w;HQ-P!qCoCT%r)D$Bg$r)dqT{QL)uv-d(58o$V+I~J)o!i67
z#<$%5>>7oheAZ_@N^fND`_7tnAr*t-#-RI^7Xf@eh(?w@)`Vy$+EH
zsAqMxHKS2jFo9gJODl^%H8iVxd*0hbJ~lRE)B5$TsSW0yD{9Z;#rGROFrxwiu;X@~
zakg<)oSCwu<4P)DwtZjI%UZEUimpbjI+Hcysu&-cHz0PZoqlEMTGXey&;jrHP{2+I6sG+K>flG>bl
zP1g&>!UI>EL17Q$H*txdYpxI27v6}gG$ib^bQt9IXD_#%szkDw3s>zKg2zQYgl$Av
z_3FJWwf39dGf!({M~+uzhe>PhH}Y7AD@8JGzH_Hi
z%ye-?lLOgg3>_%pGI2JO(8|61XwgFWl4pG^o-#Xe0_hLNlz73AbaXSGfb_Rn#L5@C
zsHS=^dZU_|kS@sC7ImrZdMopC^5+SUOfxDecFTOdF1EU}R72GtGHYe8jg{ZWMLZY3
z#OdyjBJ<2=#m}Pf5ptV;*KzA~Q9JN;VEpkS+*Zd!*`8gYN^V~B*nG%pko%rI_z?fudB@Tn#sF1s`eG=Ljd<;YtrSN*O4*0yNvPJRpkrK
ztQCWf!G2pbcmA3NYQigwhBrvRF5O6PLebrYI2-kqaavRVT?p_Of@L9~UvI&_Z?mkH
z+5xf|LV!dHhM;BlU8Zk-=|WM(-&8n=OIM7>MJJkkQK!qY#+_%Cvw7oWuBXba@X-j4
z{1#r*lyfH^(7ccGviUh6r9n+TRUwDc%4E$Zm~M04vL!VpW>
zFjVpJ_nbx4>y7YX7d1&+tQ(>tFf|}XN&6|H4rgW0KXsbu$3p?RDQQExi8yY{$V@8D
zqf?r723A>T99E+HuO#ycGq5e<3^4zSSR?lv*6;TXGY_$=6nM_X$!6`XTK7Su$nKo2
z`mVa-w36Tn(fKz9tuT|Du=0iQS(w&wo0gJEK^wn=Ov|8RbM@ENSk{5EVuk)g?)1M+P1;maw!1Hy*r3FJ8hF@N;xp
zdJvH#^lrdXf>PSXrCbPkAGUZA9DhrY34$FaHOBnBj(}iwidU8FRe)fwvN`s(8;{L~
z*1;DBEM*eWfY{s<*J>4Ed!IehKj)(<=HzMmWWK{e;z~T0bZ0*gc+UOngiW2IR;uEa
zE4#42ol5dhYF2^i^RW&|9(^yJhsr-3^djfo7Dfqx;ompvltS+bpeMNU2J=BKptoxPL-Et04DPFbw
zopZ4$i4BMGIx<}PvKG15>Ccw|%;nfxG=VJQ46!r%@brb*`!7@u_+&c7AFjyv1>IbX
zj@v)G=*kJbV~M+D(3QACwtPIXGiu8*B4V`T6wzK*Kdi#AwI0K6=QCe?%n+|Zkdw$p
zU2L$|Nbvx{w#E@xG}sJc9C@qniemi@t8bI2lNVT^RZc#{>`NcJMa%u!)c%(;D%PQx
z?*npZSF|EEb2p%**QOvrF0B5us}Vu$_cMv|Om+b_(0&&s{iMceL7k}l`2FV);VuX7
z^zQ<^e4U3#Sa^HMABDNDZ5e(27cMS>r?F!%Sj-OH2)J9x+4NN=!596Z;Z;z5Ne
zuD@kZlP%{qTv|AY0^1NAtHvHZZ`yu7sap$GjjoX=y3Wjl9qY#%#Leszl`itFdq7k}
zLfFr(h6X(XC_}5dbwLlb?7CWaQ0!3LQ`7I+gf^2-pAhw~6BHHW3;gAV5?#4q#_xuU
zf@~nl^?4c_d!SS%h{)ZG6EhygWN%v`&1}ZW(SBEuy*iBWxGd%=xaee0zWH8xrK{OE
zdKe8*sw=KO&eSc+qD)*C^q+-I>aPF@1uiCl3xHIZOfMWwGlBgHTdGZD0f6c>dNWYo
z>h^%NOPhqnkFaOX%PQTO1j*3rjVg!2a7dNo#cGMlB_R2{zi&0=ld^NYpRTR=+1#@i
z!s0i>B343+wQ^);JHhvMpGW%GzxpFBEnR*r8=~FfF0L|9o!xfH)M*ru%k#s*9+_T~
zXGawP_9j{R${*1hHRb
zjb{zf9Q9c%0FF5~e01r@e~+Ts+L9SWF-44$fI!)2B2J|+kY=u+xLT`0h$mGma(hIb
zUco(9u)O=_ZwGO1aN{71M9^GPZj)LAJpNTQq!M}e{)*eU@=K8S0YM_hzc%^*wgZbr
zgjc&aU3q!%x!lGbdBdq0vr}!hfxF|wnIHFqdT6e?X9*oRSFb}3Of@v0GIkmdC{YAF
z>CQ0cly)8pe$nYXi>RX92Tt6JNC)UDa;wEoAc+)3Fe6GMFP^d6Tbt*-nwek3<);dH
z*+knqYRuqC!YZy4Z1=+ee=ZF@L&f@Ng8_KIq%qAf8uG4Fk@n~|_B?b(wq1S{g(;h`
z&W)q98H>v#=Cd%4V3VXw>oDgoCcSNjATPm~LXUL!~n+cg;?eqf5`0eBe3MB2!L
z>_Yne6Fb{XI<9IB@`1n{pM1&;?OUhT1cz~`+IQyvRK@+9UQ8~?TW4SZdWEFhGo5TJ
zo3s_pXHhhAjRHswg%IVhrqxUuiSgv_yBZgvD`@>L0mXE^6^T&=<(3RJUV}jwhT9`x
z&j6+imd_h06jMcgtP!p8(*!GAfUVXB&n5Dg*0ZMzl-ytSq@-JfM@*5*zSE8cOb`Ro5%a4Qw=XwbM8nu|pb38msHN>t9X_@Jkx^zp}1do=2D|$BQwi4pEocyZECKkW7k$3*r=1f
z`OayvqIhW+z&}-@O)pA!Q1Xj=sN>kB;ka(Ry}UzxSF7~mHDcg4{tz|~%j%f|qrkBn
z$?y)x@*#b-f>yapV#$OxTW$^20*=?sv@@O161@Tw}6^{6~v
zxkTUc&Q*RmHR{HKlX>8)473Bn&;}u^i)?TT+1uIG&s9C@5;?qYmq1gU^QC(Kq@fH^
z89#LUqUL;nis6-PAa7uQ9z}~`-)lC2Gr4vVHa{!7J3l*bS**|uP~Zb^M0qx*ehz`}
zz~c>mo{#QxMTUUvF??6(=9Jo2331eTqml0H=nMzgub%Y&s|#iG(v=bSXyScR6ybR;$LZWXvm9zdVjNc3&zsw*Pa$SoiZOeplT8FE
zm>DdU#ky}jSv!$Y;_bE4l62f#AHLYNzOK?67c49+bi8qRn#1%o@YxBxc<9I)$inpU
zZjP>`z6F@`I@GTg@m5z)$x@c9v@8(K=941=|3fHZfiIhOSyJU^P^cSiD
z^W(qltG~s%@n#zZ=xe3dUP~IT&u2c6KJ++^_Ca;Cf1zaMGLmDG++!u10O@n9`d)>$Akf@VaRIQdFf9qw7ymf7PX;CzySqq=0TOYSM1K
zjRU=|xt^ICCR(<;prc-@yuBJ%N*eyKOns>^H0Y}Qq#dRf~Q;R
z2OK)hjiH&)cQ406<#BWd3jnB?URBEV9*E_e{5}&INWUb2qq^i;)^2b~b<*Q$<{PSP
z`(96-@(3DvHI}^$evaI^=m2-mFS`p92kpdC6nmN#1H>rYr=?X#$kj2EX%hm01}Q+9
zX>17(-y=sI->GsX1ll@TX;-PN3fOBk*`B+_{s-aN;#19BvBo`97Z)UAAV>a?(AjV~
z%)`G@Cs!kLps`yI(ghN_SbdOM_!x
z0c2d+bV{5CDsM%*?*b;$r|!+hr5)^5NKW#MgKe~YT{H75pcqX*p%*L>E6A~xLp9G0
zj@d!lEl-zc?sScj0O=Qsct_}3WJLejD5$yMtujnSBhA??<;`LdekHGP#J~=
zA&9-Kn*BP<2!2Sbn8KQ*1k82#;>7N)b@_DM?mz{Pj;Il)!o>nLGfQNZfS+0ytWZP6
z0{jqo(vlRxf?OgdrV|2*1s3xIci$gc8&j&Q#C8)0EoU{G|IEj#4*x9|J|9_gH=rPV
z;Hk~_^VpiqZGA(}btz@B%qEf5ecO%XarVeZ=X)FFAwXlytVFWC@!RO=i^o!<<(R}C
zQ9G%r-G5XG_=Cx(Z$aJ+4JEAb3bIJ{-3i^@ma`wRbcn`@$t288!Om0V2rVPPBERB`tlf*z;9bA9JEPR~dt${-i+m3Y;oAi1jN!O2(E1A#JVUUzz9F-X)&5
zxg*#q#e?@m({XY;wC}SGMQeR)uE-w}oG*#gqREonyNu2N883B}oc$YvS^t!<)5p#{
zZ7)1X=?`#OCLFVpx=tbAmGi`%6F2>#(7z$l^+zH@id*FNv!qGOa)0U1V`bfSnZ~ww
zi4mz`IB4^H`kHR#5Jp=deB8O|JX>tI+LuwJKthHjUFG@YjhhOfLcewr;n`D#hyT9;
z$PdT?@WL=v25%~G@pdkzJKZX1sJq}{vO-zgXg1rTE8Eea
zpgqF+q<%GEk$`g~Q=z!c%e2GX>fk5z!-3s=)WOe)jWJWU3*DAQ{rLYKgEW;oo^Vz0u!ZHbnEOn!aA
zCMj(I@k0Xd^Y<7alHx)qJ+{TQ-;lc+1nJq
z^z@@lDc|mpgdR3Jjv@ojk=DYBvpdp1;XB%P(IP|G8IsjgIeNRanf-sce6UU1736Ix
z#}ytCKBR#IQ2gttIb!5g?Q#_!@sn4WTShpr%w(M9SmR#_FQO6&K8c~wCoJ1(iyvej
zHYo}C{(P7o^1@>n0pMk^1CH@885jT+WiI%itdj?H%J>6ZORjw2pDNwej>;8O=*O>O
zT!CBHjEd$jIh>tCJH+DjF4Hl%9e>4KP3`oDjZG$Z#9fx)6uP)#9L8c?fIRJo>c4H6
zw6hx?GGVq@x`-Y>=tx(hYJRI1vmNcb;+uc@qa`l@Hta29JjXRvDM)ydQia~d*O6%G`6?Bv*VNhQ^mYrwB5h>LA@4}IS5;gE
z#Wov*2JJ|W{-7Mqxx|_tD!jt3dI|>b0xS|5cEs&A$aC)b_FTgPj?xIWFD9!r$ZnRP
zWsCzv*51$Z#3$?t8=03(4wbHNU;0;|EY#EPNT^OWy9tA-uNrc8q+RqXq=bh6aCJ(v
z*243r-0%O?Re!gY_y+gcx>6+^eZ|rvB|9j;UC@})-zuHryL{3nmLqitIggyqKI%vf
zi47NeOb6#Q5DZa~^_Sp7Lit
z3E!Ayl3c=O%;`L)Ex(wAVBUPlxFf@+v-7K%)kOMLO&sSOoBjufi6nONXA^KLT}j2Z
z%FOtv@$9E_-2T_|p$uoUrAPK(IhMi;Jkk$)sR;#`ibYa<jRk7ktCy@VIc{ALfK~X
zr|CkLf7B89Ln!>tOI9-KUuuFEk!dLcb2yn;p@4NlRe!I)AGB(=RZU~tU8%zfR{SFm
zby~XD(R{*Cf?eUG?w=Ti-Z}Uafl1rQ{{s%!eRtES+y=%cuDB%kF*p3BOM@
z1}O!rqFQu%-zC^1l+aPUV38O|QXdfl`G0yzrc%5+=lKR~ZW`OKDGXc=H?9Lv3byGGG&SC3yqVM(}CdzD4PNQFgND4KV01tRPNFa?{fn>IkxWo+#C4gzlR#(8c^Bm
zH4)7hTpL9ey(*Wq?#aPpsZ6*AVDBQQDks*EF&wne<rzJpZ><)_&F%296Hc>v
zK|;6;D^i%Bz|0qff(w+zfZ`JQwm?(UJKJJuh9<=A@1@@bzu>_IKjHzv%djBt
z2cB&C|0mz$|5uqgU*ht!be@rYJL`$1#l}6%M$N~X;R`JMP04%8o}3XHG|Zywe&gnDnK)BXw@X;i%zb?E+R)VU}O+JNjyF0A3{BbXy)G2exn{%*H*
zV*HqD5om!(qo8JlH|Lm8Zm+s$>)|=^`6Golby}WTD95h*Q?h*PRwYxCnWN*Jucgff{GJIrEY69|=@#@y6Gf&~0?EI}1K@%D%
zFKSs`>@iW`O|8{VykAxPjq67{$7K|1L;M-Bc|O*C#^-rirl~e_oRyf`&3>&tgZOEs
z`4fDBjefMh3)v5_TpR0e@GX?ox9ogVJHQ(Gi
zJ|M?Qyk%3%-$T-fu}kT3#=LfzU$M?PYR_^cU|_y9*|*d4A6_*TS09BcuVQ9gIptIV
zn%`$~dWOfPGQ6HSia#`aX5$#TuUt5sZ6T=kpPbIA$kz=#KStRXN)C=l&9-Y%C{}8J
zVrr+jR4r3_sA^j}nK1_%r@$6G?VplYql;J$Uy7WT&PM1G8lhI-af2}J8GkUT6^C^@
zjnyO%x3h9oW6MT*9oawm27Ir#?0muf65
zvxsYrn?hZ@f7+jtUOWcR?kIFJpq#j=MrORmISzn5n7C#+BWA#+pXDQ0UvKV2gm}zY5h1J%m1f_n
zeV@%9+x;#?Js>0?AI*`@E)wDIim(BDsOcqqovwzwC#c)h2w$b^Nr}tIHK*7{O=!UJ
zxeX&S{*Hq{kT>;?UKj@vB@F4NbU5mXR5pOt^AnB9a=yl{3TLG*IHx
zp$$g8nEKT;2lMK%gklfw(S*b33*ND&N+!axN&BeBX$|zAQnjqYEK&6hU1J95?0IK~
zW=zBhFQ?~%GTs}pcq(ZtsFXps+u>VeK5H4UVu$W-b
zUB2yj?_wL~I>aSz^K@atY_M9Xg8p%sbgWI_)0z8W0}0jImZkWx0);c4n{Ppd+01Rm
z+kTG&n0{x-VYi2AV8U;)a2uHr0tl+(i=d8Hw-uI}l~2P14)}EFc4am0BJ1n|lKOfa
zEAf0=Z3dIiOY!Rq#56jYhEd|#>?gLUaN?uIL4%NKtbdkQ_}ohXXGA~=VYznj!t#Y537u}~a45@jHt8fD
zf1HJ&MvEQAt_kya;t@|B1bT4581
z%<*C;`X?YZsSF_+o*3b62aZL9P8T9LocW)z@#1Cq_J3;u{@#M6`8zyK!oOe`!jKCm
z=!CG@m0sXaH>k>~4Zan%m`?i+I1lK~FMXjpV}FoE&O2fmJ=@|<$DzS^<}*;RsjSOh
zJN;RA-A$K0Ep@d+$e?tWVKH#n%{NZr7j}!gEyvqM&a&S`DV8|gEC%6u(_SNTQ9Y!r
z_;mn+{Z{>yr)33_9w{($`LO%tAz8>A2^k_3`GwSH48L3b%i$aK%ZHPLndNULb(GO*9lv@n6ew@IDL$%9cu%4+PCur6U9Yq?&D+8d6fkj6b>miVM^^_ZGF>r@RPB
zGQ_Gk4~M$JtzQQexPacalDv8oX&j@lPOeCoPl_LHkoA?Rj=KC=D
z3N9n~&isMeQvMD5($reZd{o1bG+Nt8e$}g(vGHk`oD(Qx;;;^&Q)7>LA&D=h4tc
zztnf1UIM}SvMmn7@i5qmuu81ZYE%vKHq~?HrTjm1y>(cW?-uu~2m&JAt)PH(cc=&`
zDUC>XGjt3g-6bGMN|$sE42^^|2n@&&L+8*8&3V}GdH3GGeXjGTcwKWn&mHSt>wACK
z=K+4=zaF?sp8#->O6|a1#%jzJfW{^qd-p|JDV6lzK0%)u=2Ss98`%}RnPNiklidI=
zS-;rj<>ly#f-Yc1YrH}GzzCP%PkXFgVm4{Xz7`$J&Fc^S+*Gy`)HFBWWsv{%5dQ;d
z&e`%Ecq23XP$t_{f2)*LxSHP#cm{V@H0Ub7OwqMN>Nmb?APBIReMa4-@74x1BW?bvq_1dS
zr*af$PpkW3t-cET8DTz{V!W4DU#wM594Fs+s_egK|kMYGXbkX8BGrY+YZf61*NF&F^JBq?C0iAB@3r??)cGp
z>?yqOeOT(91Wg#!w>P;|-SyL@T!@U0M@cawlyUyoYZCC2sQ5i%z`9^2e}}d6AD#E3qC%Q&M)vpPCk;5kSuir&0Rfr5E7s(yL)Clk7su!
zN2flLL9A93p+QMlY==%H2eDOM1zI;E<5uadv*lzPu1w&v*6$j@_S^wne}4BXV!r)p
zThmw8zyFBs58FO^B&gDEC%IP|y_DeLOSu0CB~zB?H&+x!vp)f=3S(Zux5G-}rCmK_
zjV_}Uly4x@a8C;C=B3ir`h-KCiuJ$u2apm{p|h$%YQs@{qY{{vcwUrj-?-$$T1A&9
zftH2L#)Z)Pwt)}K+0mo2K9Pwk5~M-&;EWV&ppE_drYPG7O(sO<^P`9L0P_Lx9dr;O
z7e3|dg7W%jIrAGgrZ2#}>}stgxpRZh;HR7JT)CO%JReJ*YCLPrhIJ%eYoSYGqiDTU
z|7GYfLU%<}{DP#U=*v_n9NVv&r%cSJ>==A(VV>?;TYp%(kT>_9n$cafDHujK>1XDf
zrZzC<{J;L!XVCk%p?6;;6kdm{aC>#VtNas2=OoN2x$i@F=rvj0w*(eW8#>!A_=SNYaxbys%Vf68d
zi9vaB5@gI-CqhrDMUxaf&f>U;kaH%aAJg9%VKNbXQevCyd-w
zXEge1SZ-*H<0F?CKYZX~%T3~~)A?V+O+RZuBncAiz8ii#B1Mlcb?{)iLO&|g
zV}VkR&X<3|_sZUNbB54gYK`RSof(D7^W){(QDa0`h#n-{`O+F_&w9hc&OWrQQ^I>6
zM4tx~(;#(S#c<+QyXKPHGmE|^cunKJv&f}5O}MN0sSfRxI@fSiAo+RXX8~`fY5rzH_@6J=G0pHXp(`^cK&HC(j^lGP2`&{f5P!j!
ze5Lm`ocapY7ks$Fee{R$e5Vt0g$t859240@E?_(TOR4b;z%K{@t^9mCEmq^xZ{O?@
z0QZoLLK7DI1+MDE_*4q;fN+t`DKcEG(_r{^XFK
zd{`hR1NjsA|Je^^=ryn4-J)9Mx&pg@%>dtZgSgYA_nv5dwe<>9(zhf3*ztcIaSa%vSPBHrCdy%vY-I(cHV43!-jupQE7M@%AiH^;s-2m
zhnj5H_%5a@D6a?!{J=ktKn`p>Ttpj3pPW7LUmt&CzxePf4;`}6qHiNJ;K21$B2~FJ
z+JZdUE5f)giQoA9qKvGudif6FNwb~hQLCkr{44d}+FZDCSQ<8H4Iph5CEz@Gg(!cd
zA@Qnm!#QQt5r!zaX58!0+*jrLGl|u&Ht=}^d>{wJA^BE3YtTP)L!&a{XNb0`1oa@`
z?`DawlR(;EWg0A>xa_~sBs;Py$g7@ezG*6QY)Q*ZK&mZ3Z9QmHSo1&14=@99MUO>;
z90;|a)4wpo%kYygCN)C@fy*RL)ZrgS`BW0@W;-p
zwQh#L1(GQkH_->{P9@&23>#``ABghT*Kiq7(GwQK(UU-T_08{Jb+0vDJH$IL7dQv0
zUUNQy;OQhk+@?uo8_?Htpc~qT6**a$pFc8`oSF^zOM)z+{pU$~PT(As!`DxPLwcL!S?XP>_%B-ENV|H`ktUixWdR$7EhN7t7sd9~Nb4
z^XfEKBu0{%YxPf>E8{b@!38FMNB9KROBM{Mk183p!zCkB?2bm+>wYa6&tJ0X2$uVhRROGm!(>QAR$)5EU6UCF=
zJ-1eABuaT!;pcEbg
zqT~ucE}`KwW1Lwt)$XajVlZ@Pcqt=!m#j9RvCI17?>|G`Ytk3Ml6s;n
ziu)Z)g9$Cd88UY?hq)Bt#-$lRIf@kJ6xe;QsZx~^dD*i+zGW6Y<2j3o*!lp`E;N@n
zeU7G?@VX8}J}Po^pn>??6Ev1`xcut(;HR)6ZxKZ6Q=Y%ZWwNxr&uMcd2M`9NDKBEa
z|IY%VybOayVNgy{j0!I^T3>YnxXnc*d|e
zdn&;K3l{ablyAY|;lgNT
zEPoo6j%xQ{H6P*+HY>TxG7+~WU2
z$aJb7OtpF8i2P}LC~qzqTw|PbuK)r8DekpiFf`h7cixnq)5b5u=!$?ZOf1TOQoZ*D
zG4d7AU6Z&v-|YOI&v+tf%$4^iO&Ki8Nt@|IVW4(4&?LFBvB6lRTi<^%Q$>mapZ-iS
zNd)|8WXGNFwIK;Px#{&x_4#IZ+&!>>W&X)sc~S6HwBFsAMslBisUw{W@`DTI+Cr-W
zx!=I)c|_tl!EU!Y?r(=Re;6ZtxZlHGqH5D?FC3Q$IorB4{+
zxT5-TRkXXixxZIa&FFxDPwav7&XdyPEtY*JHae~;Uw1mZKLwmmTC`1$c&}}G
zLwMRD95z5GWvR7-pk@h*Jcn#SkBG;$Pp5
z%cYS13#A@2!nY2lUPap!8gp>~i;!#|x7C=(Y>{z1l$+O%jFY0f(>=_hZ>4RuxWP@|
zbTg@#b9Myv01gUk!rj;;-a-(*7~Yf*P1-CBgKOVMqo`15s+DYR1aDStniyQ
z1XiI>je`42r8U|GB(rt*P_d!rVTLP)m2IK9xeSw)hV*xka_Kj(YybO6O*q9|4krZN!c_HIAulT(2Xa#fvtkpGFQbRFEcr
z|9dbAefu3A)A8qen;6}C+m)%5Fr&XoCD#{Dy`L
zKIL0(em=w}D2^D}{8R-aTd5h;_~-WwA+j(wMf+$TqdH+TyN`+!Y5$3y#Bz
z^5!zv;O5e3{6YnfYACPgRQq&$s`_|D%yjl;vJyg+(SWoA%A0T;pb#CfMdv&$9Uz8CTgfD^q
z$N@^rruB2FtJS>ho9Fd-fOvD(lcUA(VBIa269{ZuWY}t=*eVHdrgWruAmNZCw@ss{|Gk=x9=ite}$@(^}
z;2{gVrs?{KUA0Uj95|eLnCr=8Vt=!*Brmu|2fDobJa$_q$Mw
zH~87Um)5T#wY<#P&QHz&auz;-bMyK*R{!o<8G2VR)W&RUqQG;d9>#4kx%o5STw;vp3W#hSw+Me>$MvyT3q*db{jY&p0lmtZyU&3hm9!&1eC2>D6h5s-HN
zk(Lp-&9|%`F~1?htZ(ezVB@$W(>aQxtS`+V6Ygo?jRz|10zZpZhaIC#UD~k%N>a
zX~irW1gU?>NiF9PJT=`O-i(X!t;w7-y3
zqMLFKt{L6kvwsQqnDh03gN~JSpziwbS=0E}M=po~d=i~hIM3(Go7)
z4ENqG0QUG20Lw*J;xIw8{k^ar!@a4sDs)2xq4k{$V2>_;$@2$x-KaCM{a+i5awaFm
zs^UjCxe8o82H^dWqHIs1dn}jQ%~0)VS7~w?xBIS6+xVaT%N>Z(^+}>i^z&d~{5M;0
zxR!jqE_zZRAWQpeRCgTY8QeVO@#f!hv9`-21w{(-+u<*Ehq>P2uhr0;x-zF~jk_QF
zNmYSu4s&Hbl*O`d$Urvyp1RAI5N$1F8!WhK;VRS@F~DdEk>cbTNjuAO3Wc^J%jWtm
zE}iNe{I0>wX9NzL&o
z5M*p^50`s94`(;H&3VO6ny$H|A#6EFVPQ@2OyB4G^cAd$BuBRSAeh>~jOuZl9%@aZ
za_)OyrrJP~e`v|$e@30x=)wUL%9uH;!ENtrrB#`uAFSV^?f!kd>5HZYZIG?Md-`r<
zm5ca~$@m`oWjwZr_rE+wye~VPt!r6OK;ERtMY@)V<&kVCk(b2l{@8=>c0O@a@#|;R
z?X%Z`UHYsZR~?gk?;*83lRrNtZisob{;hY6vvq8F$R%-yCi_(7TOhklV0xofEGC^7
zy8p=cAOK&**eJ_+W;I8)-+F@wpoe<7_9n`y$>BgxY}T2{ucB@q^9i%tS_Me0t&TV4
zspwM2Bd3F$u-xKef`S5Te!EEmK7d(N*|M5qY~CF#mUydux)+tqroA)_Tg1vwG8Q?;
z2PjZ9s77L^VNKtb;V(5DcFmDkW^u1GK!yNjs5h1_2GE!A2fuL)>OKhJgnPYcB3&~m
zb#6Eg8WJcO88M76F`j(BA03*)DqD~|e$-Odf_&SBgBYoF$}dti-naI8Eda!L$8uFA
zV1WW<=Kn1}f7lT9|9)c)HDHn063yQccqb}^U$XqxXjn~y`hv=t$@g*6Hzo_lVDPKR
zCyg)KLhpU^uhB^WO<5roq{8!oNPLF>tCpfX<+t7rv9U=4q*@SItydYt^v(g>!%K%1
zcC-*svQ5Pm7V~XOKl*L{SJf{4wj%nLQraKQDho~&+WbU2^7CQf-tycUy0n39^RQsq
z*0aB0$EOe!U5ntV`O{>~sNPfN?PGOiG15$^TQ}ITeUEf!vWahW*l$ir=y+0K_t_MX
zCGFM>a&FF4GscSCdW!Q{5s-1}5U|0i1stLQVi`48`vHSrI?tnYLHn*XZG(ue+^%2u
zr>Yj@V*1u65lE;$jL;x|W_GGbh4|~a@(4Q2e58Bf%0bvq6~K)c$!a`aAL(&Xk?P)$
zvTGMd7z7n05q4#?qItm!K&XiUw8a^uW7$BfWk76$Ll_np#UKM}^z$yP`9Xr*jY9
zj{~NYERRw^2l>y?12;1IXkr5IntwaI7qEJ2r-yTWffLUi6O9_BCyV)>DBab%UBN&l
zLdYWYP@*d{6FY$vQGpGbe#5mE#RTUYUUZnuOA;AuT%MAv`IUDpD6O018AT+to>@!l=}!j9^ri)M0+&0i192z~@in
zf)ge{v56(j_8V|ftxe+HEdY*j{wvx?kC&P(vw8q(<9L-Sqn^h~Oe(+A+hWk2@3f-N
z#Ra+1PK`_ynec8nW0moZXv_IEj<|PbL6Y%}C!%TNe4pKXI6;m+B&*dMktV5ad3x~M
zdC^MaO)`@aj>lO!fqS;k?W`rmLo^c9N`v7+o`l<(rOx7Z7vBvxJ9V>>3=xX+vF=YIdexdcTATh?dyu_vw>s5uDXCzY
zSwsKh;Z#1bV4TnXpGq!KkG+9GZp0`a8?A^5`o3vgmc%%!+4k_b&YI$SX5}99c76qL
zV)y{M(ftKUdse2DnhqocwkQ9pn_>17iBwdvWr^86LDaGR;&HbmU8EAV&{6Yukkmz;
zdbN4fc_{e`FKV>O^KRaQ+buQ>y7zy*07oRvvk@Kc(j8<-fce338ghKf{aE0_DTAS}M
z^5kNA?^=(^EQZc$-FBv=^R&PWmG1UG^_z&w*6%`bsRftE81Z^K!jPT)35E#R@4SQY
za8;dF3;SFiSpT4Ox;yQ7?$*fWgt1UQIXgxvsmbM$z`6Sgza6|vQdQ!FCX&?M%CHf(4R{zcU&{b7AU8dN+>0q|hzu5b*!n>qpuTWWN
zFtt8aem)MQ-8HXu4X{j;efP%Bff;)*yL1yLiGIa2_}TF~eZy{1mx~kbu^sxUNm`Y4
zuaU5z;;0E6_M=ezVP$?xip5Zrk!8u#5Ee@=m8#E#NCQbvpc>0)QPUgE+Is;XY<*4HvRVFWPmlZ
zYv^#%czkz}rAYgtFsb4n=rI=NfFo=a$z7?hb(gfd?PPJlgez5#uEz5ySu;$8eUjlg
zKDDx+UmJOS|F*FqLM3hId$Q$TmGlworDmBHN1el;$2+jf4X?miA!{MW1-n*z1O^EGFXcNjfOLl0?+$nf=GC^s`~(-Rob6Ax_Nf#uY<_PcVoqo
zx*DR572dHPj@cUPPL{A^M`r9T2OJMSk#^aB@aMYpyS@H_v7C}MgY&5}&29r)59vie
zZBvDEi+g$F;A)k}YSEnjdT&*|+IYp{TG*P^BIb&UO{=_@_K}0!U?7^$$^P^6wYa9#
zYZsr}vtEp0&{;vsN;ENEw~Bh6E&iN7R*oJ_1JtC0E)TROkE$Z@WYETCI<+02@fTmKQo8mEKRi~?fHcEFd<)G4%wT5H3&_NPd*#N=<~Ouy+XZnzmq|a!iK3KgcC9h-WE4Z*
z5OTbzq0!=A&bi$k0+r-ZYy_I*wZ7RT_Tr8?jLsj#`5Odn9cv*@EgzdK2I2J-r09KV
z!qGB17c%8OJ#HM&L-}pGOp#JtR{9%Xj}mhq7K!N8I!K$*Z%>v-@o_bwX&bsy-QIL5
zocqD8*!3HpdU;(Kjh{BL^cKcYkNLpJgjatKavwH^oZqg%cr4c)KT%R^zWrx-<$d{4
z5|MR1ra@*rl7DetEwwzzC5dWo+V{Ic1h;1f&p9-G!Yb8x0@UbltgRymPbJT{EJ0Ts
zLV4z8)56CYF87?o^9zMiA5r}Q#+fNEMbWU@5s?)CbhrK}uEz(#`uFusgWW>3ZoN%j
zF(mx%XS>b|G3dAY@1OG?%ST3CSae7~qL*&>H~7s62k$|-&h7>uQk)-1U#p$jKhVmS
z6r5JJ{t!XlI=XG{R)fW^<>+v_T|IvHeMBUg8EuX@ATk<+QX6>xi4o{_J}CsjrkW2+
z-WAZ6l&)FBTsoj%$7aU&42IPpSL1uSeP-md_4CKEJ&@c49qncta2oG6&bPsCO;}Y(m(q_Qf@FK0IhTKGz
z`E+|d=Z^s`={@$Y4ko5l2*s^_}veS*pu|2QYZUsGZw$w6w@ztW|py#t7*Y`{ifKl=+MPW8;<
z@fDT4-Ivp8bfDGi%KUu_B=fjn_f0JKgT-h{z5@j2ejWre^Nf%Q{t3*S?ak^AC=-*#
z31Jt<>&u3ft&hJ2t^@SgdyUL>e>}0M<>DmKn1auSBxRS~q)l!#%joMJ%+P<$_vX4b
zLHIQK&mpXxE;@pk>ELL05ltSDaV)cZ@pS9#B9_0JC}#3v0K|NrIAo<^yX1mDiq>Eh
zAmaA7M_dh#O6AB^8d$+9Xn~xheUQ7>6`OE|2P;(sep7kJjP!jkPnlCC0Y!#=6b*s0
z?6toICfu{0IapE}+9a36%XZ1WI1d
ziT`sNxEmVP8{anw7Hika4;(jdCVaUsEG)nR$KaQLmDtmx&R6-^y;S`XRM6&&gMM=k
zDkp4ZawV>}c<~z|?Cu8tPdjB~OqrI6^r6MiPwfwDZ}rI1ZCka8?BFysN4qjBRpOZZTGUoDG74urSBCEdx}`jn)<{C%ddgaOcdbAs
z_g?dVI(E#^8yY}{fXc?Cl)B_rA>G$HR$!}5hgbl&-+(3-ReFB}#F+rRE67o)WXo><
zsH|`ZxDh$;G?O5Qi0Gr2j)CjxIcd@>Qb^%3%0<6_0jJ(?W>c0rW^X}_%!x;~>(?{&vewF1Jw98j*eHG=6=FCUdv_I_v1{E|&(-{FxR
z*U^t}&|NtExe9bB&u&;Rsnt+hAz%Z?@_^6M@BQ72*r0EvHf2)jt4ZEb72}G@+@9{C
zvqyCV&hk~8_)@)mX}!9UygdF!#boO*!}UQA$me97dtfbVO{8%9A6s=9`+lRcs4xxc
zyDd;
zh})%B)Y1f7#nK{0rm(R%*J7(2eg+7jATQ2)9EN+GMgkTKs4{Jx!#tHzxy%x~k#v-%A2MUplw=r0%Vj<)9-Y~uiEZMq{7pa;6o
zOI0GH8W%iiUDk&XFj~LW1gmyS%$Cz$>hp~$?UhrQLC-z7jdW1^$BF^%;gJFtqO|O4
z8MxE@;;PeuH+wp7daE6Rd;3=Kr@z6WJ^ilml{r&1by#J6
zxINjFAN-IRgz!5qZ_LThr-&lw_}Z#sJp8ww;&MC;n9$82{3QUk+_AwZ*)uA2^J{Rw
z9>^iAUB+W8nSPC)>l^lAxEU<%j=ZiTv>_oP_j^M)L_nCi+|4%(1shUr4aFextya9%smftWco?
zWP2)Hj8E)%*GzgO3%me)4#33Gs+b9qxRbGL_k^4?dV&6|XSCKL*Z%4Z;(kgcXs^Q+
zTB%l0+Mkr2+q_#Y0{_6z-09nk`U7IrjVSdwAD0^kNRx_L{MZuSXI(MHED0Bq*30NV
zGwtn0@0|Gg+V1Fn|L(o^Wai=!GDW)xd~JY@tg$L33?zYTevqIJgwkk}iD^cE)nc%6
z{o2~}XP!VhZ`x3RJ_rNlQ|;^ZRq+e<%yT0Gcmaj-o_Q!VdH3{bPEi@2XL+0&x8{PQ
zlGpygZ>1g`%?+rsM2ZNqkj0ZqA#zXNpA3F%L=6UmjnDg^d#+3&XWUlBIcevEhIe2l
zH(G!C^fE>4Gn)l?e7ChB-h2<|v~MGO${H9iwOWQN%G0KL>$dnn=hwJ2i>-K4L)WC(
z*byTck#pP+nL5ychEpRn;JLU1^cdrbJ2Qz$;c$tXRZycgfBoEG<&W2$Xy{eZmx&R7-b2(FDe$5-gkU$za4GNl7|W+1QRKeQ1uw-d|z2YhkjxY8F_9
z0RAs@y@ZZdVz~JOXuLK_f=tSBnaU@j`j0&nxEZdEpUHQVk9@v8=WD?5DEJ86)f((%
zZiV)?>G4mIqS!|xu(=;@V5Q<9U87#S(RV5?!%)Iib}tZr+ys}QzN8nZ)W66wNU7Xn
zFt6irTZW3t%!Q~=T?vp@TZ93hY~W`r_wi}rF`Ib++@B5#Q#s*Ub_tr^G<>xC3mGn9
zq4cV-P}*6Ub8PE~nbFu?sG^X%X&`De`H6$Py}h-%0lD~GJNduUQ~8zi2(!w3$|OV7
zO^{7}qv3v9{xe6<`p})3AeItpy;)1j-?v*AyvP$0$P+S>g1t|vfvfnn8EPUEO%T`T
ziVT)C`L6nu#zrfZJQV)i4J|95&T+s%HhIWjO;-6CU|7sSn
z-F<@m{ri>Y@t(l}2=!V`jc9Z6%g^!uGkJe;(n;j6t7K*jI^r22~F5;
zzSU&L@ZkBz{Vq+G-vh#C$%~EbWhicbPf6sFzr^)eU_>4Kl9(L=J?dR+;(5rl6#s<;
z&d8!By8&vXWRuQq9-BN><+gFs;4tVt$s$0I+AomsF}7=2TDn5mf>`i8${*q&@hZ&(
zsp6Mz4}ahv%EMAX*F!Ji!X5-r!1I#EapkmP5⪚!EPMO(2s|1gLCm$o4@uFs$FPF
zOEF{WRwjz%ZDUH-o47`A;|=5u1bjm>?Km4@|9n8sxYlJToA1%-_9ThEcIA9{LY~y<
z2sga%almxB?vKx~5zq6SbwSGE3<-KZ|FkgFf*0|A>wNcfh1msTX@~#bYvM2N_gdka
zUH;Pu_2e>*&hm62LSoZP4?$7_+^0{7az$umB1vRU(*$0B+TR$-l3DGE>cUd5*A+OF
zz+UO>iCVdyt$oYQrP&-^oRakITWFqK>@&`hqs65ctAFmc;#>#`yCe)-WtEa%X_KWe
zGWMWWQ<(`{^e&E;eOBv=E>#NTle>URgoLy-w7V}4NI29qo^E{ZAkQ@P!B#KwVf(99
zp-0&lOGmm-ii;=P8HmwCWpz^^pF-wv7ChHT=(id!j8B`DmBnJ1gXXZ%5&@af84t@i
z;qO~Di_IMB-W!#MJ_rzZf|OZ5?A3Eq_k%LkSCYI&d_Ijz)RU!3*
zGB~|cv60h9b3HR%hy_BhI&8%~^!RTuj-FZ;+I?(#ec=%gw&s6gg{}k=GW%K9)s)Qc
z;muxxePXj4{q+?iPI(&q@DnjS8OJnhWdsi;K2!Aqg4(;n6O}^IDAR~UfA!m4)
z>yQ6yxhJ>6D<~x62X*1`PNYo)x+(zvj);_kR#hguW@%fvJ@wfHFvXU4r<
zO79hi{CHlF!84oWKRNzs{cNdtb*4}xTC;>CVRHEO?;{985s-n69I=EzXKHFCXz}s!
z??1tyB7X70$H#}jX&q3lbwi4GV6_rV9%g9xya6pla9kPnf~ATlJ7=^=3srs_#b^^7
z?H#k
z08f@osXja@ArR`?Q--0#f#JcZMFw~AO24?c%2Q0%wbH7ju}R_zbIG|vXm{a1Cr2)O
zPb%q&cnnyl$cG#i@OTd~AFydW0lPc&04A9LHtoER&2P;;ACt<->azN@wt}`+pYvEW
zYC}y|#~#kKSm1V{9J_zYr2mrpSPQON%F=f}yV`0ph;9k?dlZT=Sv4=q1ln(8a@a0)
zTt9yNF(uf3Ee>d_(?t6p0wf(RM)G+7b24;@potK}Ge>Q}5bev?Y|b$u_p7IqZFY3Z5G4xjo`dB%
z#FXMSk>YShlN)-|%{R9PjvZiK9}L4Fej-HXrPJ>&YRnO94}ZOU!6?d$rm8ybc^4$P
zIeqbo{mxC$eRnR*--BL88DcI28vX56Q{#jTJAP*u+>-V4dNTDduh6{Haw;mFT?519^r5I}?i
z!A!dR)~qetImX6J5p%MkV%4z0B^xtng;l9n$J2W4$F_fq6RgPH4}P34w0M^OMpU%1
zFaH;tTK6c^J37R!U
z5uPx#@aF4UX=;m45pbA3xvrslB|YH>Dk=S28MLd1ciXL>qg8F=c0$LEH7YSpK|Sr*
z5hz8*An}qEk{;aE6G))*lZL9%n?{K5iP3(nKZ4L-lSHKk^`i?0!#v%Ab;;46$;uji
zx)wi{qxoBlYtE+KSj_wgZ9M4l9>Vq{ar6Ni;`lim;82
z!AqLu#@EUi9w}{4P*%hUv&^#eIk^5r!hUDO^WA>Q*
zW0RhY`1~F&T=BT4BIeErwtm{*-(Mv*SW^&{V{jw7c_xn!S9I@&TPG6e)sQ^Kr43kJ
zb^W;rCc4rdl^)dM7|)T$ycR#?(~FJh>LgA{$jJE0i1rCM4_p{n`*j!#hC`-9t$b_Rh(enmlY>
zRJ+GnKe!qA{OW|kZ$}vOABsGqO|h#4-kMg)ZhfA{baek$(Aw_#t+G*EV5NYJ(RA~p
zN9AkZQ+#?cr8A~MyzDl+(=}c}Dk(7NtPQK!XQtT?J*RN#oL>!wMVizuWgY8lR63as
zWeMPl!R$MtDgO}Af~gG*=vavK&3%gh$yqI{ye5N@S{c``l^@X3_)vO
zPj?+e*c=@lNw~0QcinTfupbTJE%eq$T@JO2~+?(G3-0BaiHz_xT)12q)3scGvmJtxQ^5NDI!08
z#P?7|MdkUof}$ex`OIza_nxvqG6H8rTRk{CH>O`}xsy;ufwc3!wrZfi?-ZMQ_&T<9%<=gxfN;q9E`
zQo2f?(I4alvPrN%?eQjMOi#6BlRQK}CmbSz6aPiaNSBEDl(x&MA^x7g5)
zczXThGWR3U+JJGH=bp@Zz^X@L7)>8mD@0W7H|Lizf5vQ5q4&nbf>${U8d{v*NE^GQ
z7}P=l|L}}i6+YW-K={$6GxJveIfm!wl|;VGcOKqL#yeHa4TF;WBmLvqrhgj?>w&^g
zqd)1F>9I7|L>!F-9YRiJCIbf>9!p2C8rq!+-{4mkIVFA+TQNG6y&iG8gTD=xS~WcU
zD$ohfuw5aue?8*mMZiV`-_mJye*~m=D^^t3lK30AV4Q*|dA`9%d=
zq@3jX0xD6!S$&uGK<2lnr)LxerMFJXUU$Nh@*|uW*=q4f3UV)|4`RXehzL1Pm`FUy3Q;!58
z)5vPVcA^s*brChD4Y8iZMSMfQQ;|}wioon5pAd<#*rnxUigVI~cb#YkEZS}2xs`ga
z-`0pNBVXiM<)BgUyK?&k$*(NB4OV%d%D#9hGE#VQcJ?mkwG=wWMUKgo&G|*&76inG
zEhdd;vcK&m>~_h{sTU*d<1>=E_f_5j2l-+;Luw{7QYX{pW66*gK`IX{k-h6=xt>pqx<*I=t`*(-!vo``PsRo3~8{c188&
zpBf1dx81ByKjPGR7dHj0gpaRaP~SYK-TqZ>Gt3lz3k|J7h&QCJq+
zWB2W6E#<4c$epx9ILSrs2c#CT!y=I0qtG7M+bfRcey;2E=V#ydx6P&)mK>xrg<_YMlrfEnvL2#Jz6izbOUUD=+S
z*lBp@bCb3=?9|`k=eeIr7rL2z%?e4EGE3$6qH0SMpW1f2Slgg{wjoLTB16b}K^Cm@
zw$lDn`&)1}y2r&cnY)GD9{)T*uiz%%ebw%Jo3UkL=fblvD>1nDZ`JA{>{+~er~Y+L
zuBa?W6zw)&OnJFBSJn4HJ++s$f}yt(vnuo(KcrGFcGg$E$C=yPeTEH8&jiW~a#qzJ
z);+KRzV)J8D_$>R=nMXA_}SEQhsw+&vth(I>U*oN*^c+tm2h)m?6hS}a%-!t8X|JWSVEaL|i;ZpE`
zmV`uqQI&C*?$lVxx5GgeiUqNVax~BOgDk_qg2)_r2q5!lGc1iE9LP|)aghm4dnf6?
zRxPysxUk3d+?t{m-!|u#kU6;52k^#jq0-oYu+oJCVW8Gty~5{{kI)*BSXjAwCq{mbX!^cJ;cDrhfL-$vd
z0A{`9#INrn+ZP|fE}g*CP+~pNc=BZ%zm+X3*Zw58&TCG_pOY{ft;%DSUoCX~A}I~5
zf=`)NXJvZqdO1U38+`)HA5P4CIxdljoAi}dDKN*2-d_CT&`_Etcw0>^O!bm=Ie9`Q
zql3aJ&+q4qu-sal(Dx5qmCq+(*@L5v3ZJc%R1b>!8JH=VVUsYSH0spCeLJ61o^%U~Kz!V4J
zZb=^5EEcey0+x`OUwSa6h|6y+llZH5cif7+6NZoeWWl6P#-6dj6eJ@26uZ-sv&FmJ
zxc9(=*)p~+gVXxsX-gGoe4e9I?A|@zo36dvt*T$hm=r4f4qi?
z;7V?vL;lQz%1=KX?t(_AwU(5DQ~*NuFmI`XOiI&Vh2@T&cfI;E7IGq?qNgwQ*4H{b
zuX+VU-e@n-Ec*mmxEL|u2^0aK#mS$lq={{pPa`6}3%ePK`CXV&DrZA&+%)Lv9qX+B
zJjs4a8(0i=n5psOzzeqPMoM62WXS;S*at&6qd!K$`^($IVpw9HPZOBWmPdOA6H=2_
zPlQMKhwMA&zqHNYkY9h5PS;X^X^|_EE40Pp^%j4<*$|C(Zx~)eXnr#5Vx>E=w%0d6_gSP
zJ$>qNDp*Es*m^qf(=&5$eigLw{yWWnh;DtU@%4Gam(Oo9gsW(4H*Nx45@6-AJ3A{u
zMPibXlD?81w^blLD2N0TkG^wVAE7*&2SPj(zZxR;ECcSyB%K~&H&!eyhnS`|8A%`)
z`U0W#SQM>GNW}!DV}`PuOMC&45ILMSqV1DCm&&?LAXsa|G!80BCVTe^?Lt()$nPdB
zV!3!-WM8e{V;vFkM~WLB|8ITyTm`FY={DPFBM}(E_|NDxq)Mc@%_F)Mz;pF5KBdWFMS1EzkAk^$5Q$@
zx$jf9Cc&q@2fVI$9?n@&Vs+)YCEH+EoY8pPVZB4$Q0TR5=BEG+rO6c2SC%9jNtj}A
z7f2baCTkQ=UKr!cd1BZoeP5uA0_ge#qbyIY0$guHs#@~)Q~qL0#8X3+cY2UOmjqSi
z(Oj$Xw=4$iSb%sNYWR3*>w83SlG*Nz|6^L57Q+8EVo70&iSW|@|<`l^jfv%4v
zwzkOYhkKta^8F3vwFk-tVIE)KlE`@n?hww3h(++xqy9WSjCbofa3X^r#(Qizf=wr`
zrcOA)>^zICjQtd~=@4i~kGktGVQJLq+Fc@Pp}}|DzNh+EkTQvJj(B3;D>05^K#@+P
zx`nZVj6KXcO8P11)!!gf^GQ`hv!B0KuzzvX(E`PrT`D!FVJ%fSA!Cp$xg&BjN+C@M
zD$|*eL)wK!&UdE@9rJA_1iN9$s>e}_W{MR3@@p0|401*F1B|X&R6{+>%(;Jm#q%;O@xEM9wvNDg#nG5t+>g7e0=sm8MT$f8~MA
zesvcghKy=;WWC@Db8FQ%>+R{cDs>&%Yx=XlKWyz#`}t+#O;_-*U?E5_j3Lmdq4p&3
z39!t_wCnkL=hUvCdb2@E%6`5uit&(`pr-q!}4
zhIH$xl`H~>5A~vxJ8SkEAjlKu!s22Hx}h1YMqXGdzY&Fg+a>y0e%{DP80PT>E6n+6
z8#(~)1QmPxTiM)p`x5BU27OcGq5e_{WGh*7wD9=9cOgU?9gIHaMUd2B^zjxG{YVh5
zWcbX3CrR3j<9$X9%9F)bF;Ty$)f=mYCOVhj_r8t;1O|{LF0W5R!J^wAU%)`eg?Ap5
z0X8wh6(csddC!|AIy^7GNSj=1BD$yy)5uVxdC|$pz_UlBYW|N$a4Y0%xuURW3kA2r
zZ@UiZsfU_q78X)_5YhvmV`LiGK_E5Hbi$!
zQz>^wGOCt-JF_DD@B;LxX=8l9oW4vi1_l$7c9T|
z+R9bPl*+L|#f&1(_V^*6+pevP;rfdyyS8V|lZ{8Whi-#_{d>T29C`~6b&MgD6AJce
z3yUSco^tA#`J)lL#AS?+1iiRX0X{{vl3!mbEiYU9fF&`IhK&WEs)+@v}J~Eu)a7@GI+_qcCMN@
z?~M)zS?k{+f{5I#vR~xLQ}79}(tUyfJ)6x8CD-~s2|Vihl_Q$=qWx5|XIbo(Erdl%
zf}6$e4R=cEDG&P@DRFwj>|Sy}tZyo_*q}y)_77H_CN2r()6WTq;GoX*oEWIYMY$&!
zQE;D`L?sIVhT<1>sW`#gD}gh}?DE+nNS+)S*PjG)>4i5ckb6AYm-G(8ucFPJkJ?X%
z8meC`y$G#*8mXPtjF{n=eLk{003&j{iyd#K=;|6h*O<&{MstZxsF*UvG{r91c;z2snd{`u;CyqDPrgV{vEaH|Se
z3Le-3^gM~O8-u|T-KWEhxRo3U7w?#3OV%&J*Wxr1aa)fcl`o7XTTkJn0fPz;$8rAN
zclV-m?GtZF?Hh0;f7GSi-L3Je*Wd`vj_zC?e&%v$gTvr>$@!(iIJg`NwCz3irftO5
zPzLaKVlm6H)0G^RCh36{@T}$505yZ0^8IKm>;m_X?^LxTJBrFhiD?-VuInAbj^=z8
zt(Nccq@R=F)=Jhj>YRN#id$wW($cD
ze{qxam2g3-TkWSM&v&1$4Q@FF_OmUy$40Y$F7549>%
z&oghu5~B{dC2B>Qn3x!KE07VN?f|#t0`K|hBBpV3Kunk0wBy@n!OcR8ji*uvO2X@{
zzKKZf-(yuD@}1XO0F=r5;4bwvWeE0>Pz&k~_`e*62NtnBuIg2iKP;|aVmcDuJ0Rqv
z;4=?CfqCgNO|(#zN%8lL5JCU^{&wp2`D3Hd-zPTYruqxed9ber8tZ=68(8W2D7>!i~`m~Kefh$eG<(mUKi~e
zLq=DmiOALx&@o%SmpC)F9G*GiNK%GaH;Y?)$M0
zVx2@}(cr{;cRdiS^ryCc%-Se7lJ51~2>gTwUu{42&R@?N3amGN+&Nch$Tgj*HPM#c
zWqEFPy(2&3+r74X;ErY43jYW|xA+yyQTG!bkq?rfo$70Xh@D|SrGJd>I4(#5bH2R&aBhK%S|!9?L+ki9vrj|`
zo&7`dtt#%6gNW0pbvr08JpYTk8D~aTH^5p$*m-{$OGm_J<<+2vpojX-WhU?>s}thd
z-*p!w6Q18R_%^;pC$fP;_aeQ7<5YSRoUcl9c9zxG5+&$0rZO@pf8+$F;LSRJ{V>
zXiag2*TX2YR4Z{;A7Vo5#ArSsomL?wV?{A@rlFKC_gHS5uXo?j5*X*r&8c==oLlxi
z`4vs$luqL70b+yw@bx^`ooor;pH)Wmq_T`58&iwb)|Xa13!e!^tv|(-qEION`9ccz
zz;oVsb6D6CbA|x)ilw{vgQBzDDEI6KyJWi}S*56h4OxQ?t6_0%maEFSyhx+4Q;B*<
zq>g5C{A6T7UtTkI^f3~4ztK-6^1e^k+~CwbX@*_f+6gCvza*JdG;>T!
zs^J_n(sp!SaO}v^)*Ny6dHlG+lNSREGn!{lWaA-?m7b{*q@;1&6pZ0Ec6?@|4_ta@
zbV7!(2_|c|RtAQ9+*NuEKjw(&&Lgd0Xn&_%#Jig&X<(=`$qHSw{VDn+2Gl>i`C6HdmQ+?Q(c*Yj9D$&hi1??z0h+P8Kp`+ymOjj%rb)O*oRrh#(v&&F8!W;%+Vt6@$QPpxGNU({k
z;Mz$ZN*~vL^xb+SdnBLU_EkebsrQ$&D)R3|1C+&H0H(7c@D~Yy1vQN1C9X)Ullx2=$7OL1p1jz<#+wD4DQ^V#UGy9
z)z|bwcB7O%Y8CYE{;FQvV2lR&vO(zWA8_$9`{*Oav){R5xfZJzxvj6aGKH@ITwniC
zC7wYgA=}@z)(-{B_b_4o{^^znvmLj
z>JCd{QPdS!4Q{!Ta*|iHits&pc>DO~(WKx|3Bgk|`HJkzOiqOH2%sjZEM>Dq@RWwn
znqH^%<2u_ABb|!D9k~#!dfiq%?{3p33@9sUdbQCQGtDvr#e#x+24`-mGunf
zip=Z}4_uu&I;lPetRJg;mFCi6N{&Y?;^Oy4r_OV;5g!S*9t4iKzdjw2fHNAHw>bTO
zo2QBBU~UgS4{@vTF49DNWdrXXg6enExul^_YYnu5NwmgjGyB4jAGHEooVTxjbTdnR
z1|G^vIP1q8+ul$qO?+}A|M7kPXj~~vj93Dw?QV3}+^3z7q`}Rqxw?k#%(;$<0un~e
z-~5bp*qnFSJIwsW)PGNz;d#P-FZLen?mn708~(l;)#t+JhYc)ZqiFlIJL@9(D}CN~
zR7i5cTu@YeK=Ea;`;(+6I#(mr?s2m=YbYaL-@CR{)`t%n?r}ePB!|pb)ld)8>A0ns
zBg;kC&;J#Ex7GJ+{6q}s5obtG9ETZ7Mv`K17)tXF!qjb17q>BkDF3svAE42K9P+!O
z10zlDE4yWteOZ2zV`G~-y0xAxNpN|=c6Ly&zJU1VB55P#jRu1ny
zkhF2oSE*44G~yF#VwC7U;jXe@)*-3+O)~d;2k@be1kiggl;ge--~A~+BOaIha_rMb
zW7Yn7e{Y|4vr8$=I~3h@a|?1g=2e{>ti(C-^mtX@@C(fwu}1T+IG;A(Q=f2n!y1u*
z`165vN4qiPAQrzF9*lmeTjHeUzO~0v*$wa6ZW~cLm~iBLR&?Lwvps?AH50rgDO3pC
zbLp&}6QAZxpe&g?rtPs
ze52GEXw{TrQf)lM{rqb+HP<0kM`$@5zLb%T0-{&q!~Z6$z!*^lJTSXJSvQ@V9yTbb
zK$Y4f8jw(k1vTuaWs=rViiNRi+0fh6t0`TcV?T@U(ne7{$0l^x%7ntoS?dreblR8_
z05!;I;~GO@8_~sj#m(M@_(95tw2|i?(;Z#4+GSiBbGqTpk1|t7pQYeKtt%z*P!Fq|
zJ+IpH5?)JBB7uLGlh`7kS$lq143XynPK%Tj;{7|Uwp$i#Y{j|ebK#|jZFZ2|Jr7gK
z8p2fm%m!|lKA-QvV%YaQ|7QlyCl4k+nZHr+Rd`&XZRYoV+xp}j8<~?80dburfy0R7
zyxm?r-ybKRu5qI$$ov}asM}zi_H9*XkKfHM*V=cevvM;0S)C{A{MwAZ%aKQ{Drdq2C<&5>!hN9b97@m_9`_
zZO;JbvAwS1gnGDd0oGyS`f7aj?TAcD_yl&TPeY68Cjg*(%Z@JRe{8Jb9!%y98Ercw
zvAuJO%mA~Ai@ke=c7-9P+gAYrJI9`V>yd7`7EN>JoAExcbNb@r9Cv>^?B0AE8>grUKH3z>7Dpp*ugxx(S{yIFkAgzu-{jdG~luzsB23P+K7r0
z^E?ym8(2mxYG>@-Yl3dQf`-CQyuO{ilwsG(P7e_RMs@#T@xlYa?rXcQ6uPEc{x61Tzkb>*m%k!EkYcI
zM=3w>uIFuba*)vPN+#(a^oI9Sday5LAN|}?ytv$W6&i|yiiIIq0&w_9LVsubqZ$4Y
z>COH=M9w6!ws
z@|iWo22Q?G^z)Olw)uM;2hPw3tnI;)&bS~Fdm?C{KmBT1#*@HtKyBvQbl)jsvWjwn
z*P@<+s{lIY=-r;K=**L@{)0%#VB!sr5Zp~xuXkaAaejW@N`NfIS5js^4>>wCgeT%K
z*}bk`qR3eG1}e(@h})QR;QI#P7ptnO_W$5j+|P5HYjoo<8d=)dz=IMJ))f1C!y21e
z;|#3(Z{1?LrKb4E-~rPuTSo8P$Mc|@0m
z=D`qq{{r^a)z?y!ujm&skYM->G{q8{0dS}DMvUhngL?xlBcQEi_tUUsY>&D
z@fQ$?06wD-mQb-adGHyj&X=xF>pv3Ty|Y@|@4seHsHd3_u$%n!YIt-;d)VZ&;w9GI
zo&8TAMm%d~QM(+!@?9q!(xly#D`S6eG8Jj0cAW5YJYS7vB&u~x?Q*)Oo~$^A$QScG
zDDC?4h34e+e5oJ-JIZD4Y^*EQ&2H3c-69aVSEM>taozr?)TBbfX`%5hzmATgk`l7P
z+s$QVki4t<63Z|4?OZc)tzE;BPh!5(E>h)E!2g#pJTiW8T&lID^~PL;PTY@5Eh_5i
zAIEvv1ZzU>kcL^07+V2{MV^SNz&8t2}O1`#>pg-TG;L{Vs$%$i(VrpaJ$rdF&
zPsr&g+Hk?SvlCL{dncNlJG@(R7v>>J-#|*DoA*k<@B43mGJKrg`7GLMtF4FK-4egw
z9dWP4bUq3wHLs%q9$VCu*6*sJ+X1hx+$fG1quT4&j3XBP<0)^17XOu-9-v|0lm@(=
zf0YK0OU2+ZDGFHP;iOhb&yD+U6N^u#wLYLl|EdV6vaOK>T=7I}16}3wUeEC6%Aygi
zD$2DKw8I(HySDd7e9^^Cd3ib`n8(arDu}g2*6FVv+J7
zvQp%h+5Oa&Kk2;5CyqwU?d&rVx1`J^ezD(W3ehXI&-~{37<&48-%Hi>k)hYm|3Iy9
zg&ATLJ)b~ABtsR`rZ;3%4yltH+Xtg|yu_Pk0B9fcjo(7{+mxkb^9
zfPg^R#HWc$VHsy6|Y%w*YN$*}&W{m8=D)~M3XH2ELr8j}Pq9Wml
zwTJEp)xm(DiKO`WZ2flRpNVp7FX&<4kWF*ZLDhH9gCa&
zNG#}EObEk+AmQF#=O?*JL+|Y``wLE-_Kf5W7{y=k$ScTKLq{3Be7qNTzieT2mLn#y?6RVq}IPIfMBCzZB(W!w8q-y
z@=PR|&y2>b$)oR`EG4VNeH!Tne@c!7MwPW}>3l}qFRoSU^TLc)=GD23f`URAOGBxA
zQQqU(2i(R)4et8`Q^((Y#k}|Nq0F_-k0Q6gWU2xeCFm1g%s
zvn4nfM8f*cb8OH#AT?P8$NYchHwzM=$<|*Ng(s~^+llO*b0ld7!y94)Q~C1
z(}kZje-QJwL~C8E9ox2x0!|{FuJ(g=Sw^MwHl>K>jBe0+bn^3L!xO7R~S!~PkAb5eWpjxkJW-92Z>p$QJ
zEXa1=hcqY-1!SWu4Sd`{^b65M(RgL1j@$cZii4cn=~r#9oZVp=#|`e?v}`_RiH7>j
zMomv9=}_*w+b+2F3qHL3rjs;x%B!i8J-v(hi@&f3@p{fUon
z*U%L9{)_8SYa}upPxHLmZK~3KvgMwjkPzf*^XY{kln*tpB?q`_2u4K0os|35>x(_p
zjn>VRM*OfO#Mz$>=Un+7^_|_rjvGN4BK5R$0A6e8DD`7qoVl2t{hn3F#i>}pW~s6D
z0vcn1!N+m=_Ev9JlMAW02nN#@v9rT#s_bkA(3Eyt#86l*%s=jGQM2FPeWH`hV`vgN
z$e}~m$M5zV2kok!!THW%)vLz6+>`f>vcvKxiCs?uu_EH=!WIHAF%?u|JA=M5+9y7A
z*D?<{6e&!iSV4tv19=g~0$NB!=iHo@;<;w}j<`?jc{V}|{rH{ib!QLW@7+Og1BxH@
z`ZY(}M_xaxaqV&K8f_$l2cEfLjVNal@8|pW`1;EbX&;MRel|LK*Jymgs7rRA+Z?xr*3(H{bsJG}Qu`ut15J9)n2jE+=`_DyS3|730=i
zoPX5sR(P`rL{&wp7Rp8U4HL7|!?R`^p0DiLQe6x{zL!vrC@ETwbz+Kr4wTLIM~O>a
ztXT&OMJ>r*?X*&D)QkG$KF;ebHEtBVnrp!=f5J1h`lR3mFFutN#-;3Lf%wX8boB17
z6pmTwjc^f2`{_65NP{hYk1Eai9PU~*IPDzrGBMoYG1)1H%!oRSD{eB9T)nIc5OG2O
z1zn5mo0aB*Ei}44+dQNl8xx1QNE@5+Mi-Ysm+;pgj8?xxR@yd|qo9YrE*w*o%7!r&
z(Nh&TsA5&>A(u61@RfgjvWu%llfq@`o2Tc|FV{(qR|U&oK!JU{@Yolp1HG!f>+UbX(d%swMIl17mUdrWz2tun1Hs|VDtj7uUb
zPp>$sA`jxXnOkRm^hMCP!yXkR!8$}mcrz-EfNE*nS89_LvpX=d#4wzRf9c=fX+@@s%u?pAs_x<}XB{u7ywdz6cAfh?2M5h(ezY!3
z!TcJc$`5M;dqiM$&~^y1wG?;k6Bsm{@z83tG7I2DQ#GPCgd@s|h;=&&KqhE9+1uY;
z`*lflf9WpUd~1xY#+7p8-imI8^4(X)&67K#6BD!+b;$>{9N9I8*JWo5-8Lodpp-=PAosL-W%d{UwK1|3tt`%6ap;@GFB^>=IsQ@W~=tO8bRM9F;mST~|yu=+{Sy?>`R
zSrwqN1mOdG4e*+xff{E$@o)@A<%8p}qb`9)rf{$+`r>#4B4X7R7
z$8vr&A(Q60xs1jBlGFnO16ao`IXR5;o2L(>Q#hGd{UFx?nUb{3sF1>~v5;ui2!beo
ztwq@iTQ~4np)|
z+@Q(F2NuIo{DIm4hXqq@fJ;?_luPaj;aLwU85w7NG
z#CMsI%}}@ZS}7^}ZqNRCz*!~$N<)imTKnar+~2dUp^*<=r#U(s?{lN%
zpruBQ;Gj%0cyLj2PCyOs&PJS`IiGUhsI!t@vc~gJ_9+QmO^jaHhVx;iMU}0W>^|JB
z)Zk++5kgXC{}W{7y3R^Go?g-ciSFs<))#SkzVvf<$+*EYZaU)bfbPXB4nGkW7MZmv
zLT2A1oa4IFJW9CTa7IE6tBSn~Cibct&OaHSYg|Yk&_OR+3ij^RDc_c;#IEp2M_$5b!ICcJMr~gs*O@!A
z4(=~1phGK6>cdXjyqB-rpqD6|+3b6Lu_5++vTV0jhs(zGU%?E7zcN|xkZ%67-
z^y@enHz$g`oWwu;BPzH5o)EeD;YZqT*Ky}=B_*6Iy|hbz)Up#M@JoUI7h~Hg`A+qiyJZ8Hiu?>ORf+AqZ2Lf6i0A_(kf%2N8XuN-%+=wy8~KRf^W
zL=ECW2Kun-Knp5RDcwhW_TN*J`uT?$*6eFJv^qRpeCP3&q-VE6#j~jW(D2YZQ{0W}
zTt16MPG-6_diUtW-KkIvD65tNP$M1lWwr&~3%tCy_p#2k@c4pcy<_F=>D;Vb=F=7h
z%Lj0o^t;Dt8#~$3s-mB`p7ctQD94e1q2W-t-Jk!l@H^F=(z4*z3RUVPs;VJ~5!U1A
z(sXLfIBHZQ2O!bds;6*;$S@vtfmR%EaM^PbDYG@}9E#^Zv1cn*9oU9Av*{~&T;bz!
zu`@@8WMi*M3Pts5W#E>GE5-HGIs06L)|E!_@R!HNVvp0DlKVjO_AxP^hbwaiqff`>
zlv0cmT}I7@Jr~F)+YRNW%~L1%Z^q6XHXsYu&Kw4_3}RXBo%e>ZzlbG8?~0%-?w#T5pklv5@hg|C~4`|
zZiU#L9oFe4@1#eNKw!dgH(%l~FD)}}V=^j|Wl;@(ohdJyC56*TnPGW;q17z{sjv97
z5h`u}8nhtP4w*4TV?q^*gT}=0VWH_1AmpUf5?Se`UGa!d_Wth4UX}XybmT9F;<5l3
z2<=lnARNU&5uYeY`Kg)Ui)-_|gLajOHl-n*o+N`K#r(>|+{vol2Nyni*M16`Lh3Em#aXU-G)
zi}37_JXqS6;dp(PvhDgTqyHy^uzP8w`*f?LD9uBMQe~}+;5kX?Vv;T^alpv~f=$_=
z2$On%@ZA$|>%~`43}v->*+s#^%6{j=kNJC;Kk^%AgBo!@J-&N*#So@|O&G2DvckSB
zgYO}G1~tm3uc_#j6LZ~$AY2RW;iM4!C6^P+lkgWGo)O##pyLmw7JTr80#6>{On7We
z^@%2ZB|EoXPIH;AeOWN9N7sFfinJ)CTaHu%sL-=o%AR4ukx~
zVIRxSPiD^)$G?lK?G)kUj4CI{|9CO1+xA>R*C3f7+Hgz~uHq*_Gu!Z*bWGad=izS)
zO2iMH@L%L`F1xQ!_bIGoWQQ}~0F#TSI4HKvW7-`7Wq(1(;N8{E<9@^D6altcPt(c{Lnk1IB=Bk?ci3>&c%Xe#H(8A4^rj6aq#WPWU=o
z=8us6t=TWpI&v-7z@O2oB#rjhaZ*V@0x|?UItLRxP(q(ypI>vpcibPpBG?l5QI*f}
z$_-%&IH}h9H+A-OBUxQuzWFhnUU85rI;%`Tcu{1~(x}@9FQWM=ew;natVv4=&NdHh
zpYO##p+rq4BU~UyDi`y9z`m11%+w(*ao=S5agK}|4eexQmrz!=(v0g0glmr^8jF%V
z_#~(!qT@pm|aiOWdJ5-RefU
zI+`69+8iC^07(I3TVDDN2c50EN!D}O2&Z+3QA}~!uJD)W;`_nPv@~a*M^o}YcJxFq
z4X$V`GOGa%Lyh&ulZ!fsqW&LOdl}A3yLu!fT%`W{%Co_ehUp(F`El)^hXh>qbW!PwRymnnJn*=$og>8zA0H8e
zJN_)Ym#@#4Da+sO3?NKDV0Wvkrqz))DnnVrNw)N?Mo-Dh_Z+7qD}Wt~kTzZc`>2bb
z=@!}*TK?mWWM}r6{B)hqjvNeRhE;}mcB7fZ1b{_wfye)qjc)fqE>CY|P+qebht>`g
z>ht)~ei0R;w?V~|yHzb?w`H|6qEX--AwlhO%uS+BlbWVEix@T7z1k<}QMkA#j=PL3eObn1mthP+Kg*36-2fiag-sagZ_TY2ZuQ#d
zc#GrCw`^G`fybXog;%d)`vXdJku+
zgI?7GnA%;@=QAzoRFrT_qk
zJBNo#KFU}D+2VL=YHD7ev}k`;SP+s;5RuY`$WmMo48{l4+3D!Q-@*^rzHKishWq;-
z=UH4QJn-v(d>2!E9*UY~hL1A({nQ~7DGX8cg*84ueOJWtzH^545=ogjkEFNkF|a|I
zqL)@ahc$OqC~@S@a;$B-P*)l8aVko?dj06aA{oZFY9jOd6gps>tamTbbSQgFsoVG?
zqaDfkiqYB8pC)?c!DUlO3H)Htuq-#%;*nC?V1M_PBvf%s!xjCqP2Az!Vfr8P`d=X6$>q0x_PNX-9u-?GI#>2PtO
zij;$oIUZ%_zXL!~SVKeVds=P6Z2JKBujQ%ycs0okcfMaZPE>~E_XHEWaVtk&uL|5h
zuvELiC%;P(?DXOVR(?T24~LAsDK%2wd;MKXz+LkDPvqSVl8H`N9<|&z{K(S%k|dW)
zKihRwty|Xn0!DPP0vc&<&b^f=e@)HU_q*DJpX|*ag{_^Lk~!uzt}#-4_qLB076r2?
zJz{bXFh^N=By~F^_^0%{H`CK%yG%;5@d47^AxGF-xR;0dZv7sn@v&rK=~3hr1+Qjm
zOCL9qyi0o5T|#W5-P)g9^+ZeRTP}=(rogGyCpR#|R0^ssgL5Q6?Qj^xvj=dmmeTP7gH)NTBCoIXtVP2l6Q3U$R&y=Qu5oE
ziWSh7>K!L7
z5pO$K&I;Vu$AQO`qPySOOJ>-$Ae2Fd=A;Iaq2G;8zf8jZCIv%_#7K>-1Qu$v;H6+w
zsUI7DBN?uH8W{aEKT{Fv}|zWWILhcXkPQwwP^b`SnWG*NsI`-3arj3iHb~rk5A`;T5*j
z*lkoI$79#ShPAU-DpE&Yv;u9d%y}os=52_mz!+PiU6K8L?~%H7h^TEZplj~tc9bD1
zeHPug^w>uq(h`2Vp&vnlgTlJ9GPj(Z*dv-*JIu(J;)0u5ChhpU=GbaAB3fV@u44uq
zmF%L=|L5p!e+fuDl5x?rf@rDiAX+KhPjU|n5^|5R87