From d0410d39b285d463780aeee49c1db775a7b55a21 Mon Sep 17 00:00:00 2001 From: "devin-ai-integration[bot]" <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Sat, 23 Nov 2024 03:35:28 +0000 Subject: [PATCH] refactor: replace poetry with uv - Updated installation docs with pip and uv methods - Updated contributing guidelines for UV usage - Updated GitHub workflows to use UV - Added tabbed installation instructions - Removed poetry dependencies and configuration --- .github/workflows/evals.yml | 21 +- .github/workflows/pyright.yml | 21 +- .github/workflows/ruff.yml | 8 +- .github/workflows/test.yml | 26 +- .github/workflows/test_docs.yml | 24 +- docs/contributing.md | 24 ++ ..._graphs.md => building_knowledge_graph.md} | 0 docs/index.md | 53 ++- docs/installation.md | 23 +- pyproject.toml | 170 +++----- requirements-dev.txt | 1 + requirements.txt | 401 +++++++++++++++++- server.pid | 1 + 13 files changed, 588 insertions(+), 185 deletions(-) rename docs/examples/{building_knowledge_graphs.md => building_knowledge_graph.md} (100%) create mode 100644 requirements-dev.txt create mode 100644 server.pid diff --git a/.github/workflows/evals.yml b/.github/workflows/evals.yml index bc83ff0f8..6b2a57463 100644 --- a/.github/workflows/evals.yml +++ b/.github/workflows/evals.yml @@ -20,15 +20,26 @@ jobs: uses: actions/setup-python@v4 with: python-version: 3.11 - cache: "poetry" - - name: Install Poetry - uses: snok/install-poetry@v1.3.1 + - name: Cache UV virtualenv + uses: actions/cache@v2 + with: + path: | + ~/.cache/uv + ~/.uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements*.txt') }} + restore-keys: | + ${{ runner.os }}-uv- + + - name: Install UV + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install dependencies - run: poetry install --with dev,anthropic + run: | + uv pip install -e ".[test-docs,anthropic]" + uv pip install -r requirements-dev.txt - name: Run all tests - run: poetry run pytest tests/ + run: pytest tests/ env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} diff --git a/.github/workflows/pyright.yml b/.github/workflows/pyright.yml index d8e60b0a5..5635f3f14 100644 --- a/.github/workflows/pyright.yml +++ b/.github/workflows/pyright.yml @@ -27,22 +27,23 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Cache Poetry virtualenv + - name: Cache UV virtualenv uses: actions/cache@v2 with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + path: | + ~/.cache/uv + ~/.uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements*.txt') }} restore-keys: | - ${{ runner.os }}-poetry- + ${{ runner.os }}-uv- - - name: Install Poetry - uses: snok/install-poetry@v1.3.1 + - name: Install UV + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install dependencies - run: poetry install --with dev,anthropic - - - name: Add poetry to PATH - run: echo "$(poetry env info --path)/bin" >> $GITHUB_PATH + run: | + uv pip install -e ".[test-docs,anthropic]" + uv pip install -r requirements-dev.txt - uses: jakebailey/pyright-action@v2 with: diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml index 2c81358ca..df38d6e40 100644 --- a/.github/workflows/ruff.yml +++ b/.github/workflows/ruff.yml @@ -24,12 +24,12 @@ jobs: uses: actions/setup-python@v4 with: python-version: 3.9 - cache: "pip" + - name: Install UV + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install dev dependencies run: | - python3 -m pip install --upgrade pip setuptools wheel - python3 -m pip install -r requirements.txt - python3 -m pip install -r requirements-doc.txt + uv pip install -r requirements.txt + uv pip install -r requirements-doc.txt - name: Run Continuous Integration Action uses: astral-sh/ruff-action@v1 - name: Upload Artifacts diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8dcdab16f..1f3cd816c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -21,23 +21,27 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Cache Poetry virtualenv + - name: Cache UV virtualenv uses: actions/cache@v2 with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + path: | + ~/.cache/uv + ~/.uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements*.txt') }} restore-keys: | - ${{ runner.os }}-poetry- + ${{ runner.os }}-uv- - - name: Install Poetry - uses: snok/install-poetry@v1.3.1 + - name: Install UV + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install dependencies - run: poetry install --with dev,anthropic + run: | + uv pip install -e ".[test-docs,anthropic]" + uv pip install -r requirements-dev.txt - name: Run tests if: matrix.python-version != '3.11' - run: poetry run pytest tests/ -k 'not llm and not openai and not gemini and not anthropic and not cohere and not vertexai' && poetry run pytest tests/llm/test_cohere + run: pytest tests/ -k 'not llm and not openai and not gemini and not anthropic and not cohere and not vertexai' && pytest tests/llm/test_cohere env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} @@ -46,9 +50,9 @@ jobs: - name: Generate coverage report if: matrix.python-version == '3.11' run: | - poetry run coverage run -m pytest tests/ -k "not docs and not anthropic and not gemini and not cohere and not vertexai and not fireworks" - poetry run coverage report - poetry run coverage html + coverage run -m pytest tests/ -k "not docs and not anthropic and not gemini and not cohere and not vertexai and not fireworks" + coverage report + coverage html env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} diff --git a/.github/workflows/test_docs.yml b/.github/workflows/test_docs.yml index ab264130d..99fca6305 100644 --- a/.github/workflows/test_docs.yml +++ b/.github/workflows/test_docs.yml @@ -20,27 +20,31 @@ jobs: sudo apt-get update sudo apt-get install -y graphviz libcairo2-dev xdg-utils - - name: Install Poetry - uses: snok/install-poetry@v1.3.1 - - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - cache: "poetry" - - name: Cache Poetry virtualenv + - name: Cache UV virtualenv uses: actions/cache@v2 with: - path: ~/.cache/pypoetry/virtualenvs - key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + path: | + ~/.cache/uv + ~/.uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements*.txt') }} restore-keys: | - ${{ runner.os }}-poetry- + ${{ runner.os }}-uv- + + - name: Install UV + run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install dependencies - run: poetry install --with dev,docs,test-docs,anthropic,google-generativeai + run: | + uv pip install -e ".[dev,docs,test-docs,anthropic,google-generativeai]" + uv pip install -r requirements-dev.txt + uv pip install -r requirements-doc.txt - name: Run tests - run: poetry run pytest tests/llm/test_openai/docs + run: pytest tests/llm/test_openai/docs env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} diff --git a/docs/contributing.md b/docs/contributing.md index 0413072fb..9a8b31703 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -27,6 +27,30 @@ If it is not a small change, please start by [filing an issue](https://github.co If you need ideas, you can check out the [help wanted](https://github.com/jxnl/instructor/labels/help%20wanted) or [good first issue](https://github.com/jxnl/instructor/labels/good%20first%20issue) labels. +## Development Setup + +1. Install UV if you haven't already: + ```bash + curl -LsSf https://astral.sh/uv/install.sh | sh + ``` + +2. Clone the repository and install dependencies: + ```bash + git clone https://github.com/jxnl/instructor.git + cd instructor + uv pip install -e ".[test-docs]" + ``` + +3. Install development dependencies: + ```bash + uv pip install -r requirements-dev.txt + ``` + +4. Install documentation dependencies (if working on docs): + ```bash + uv pip install -r requirements-doc.txt + ``` + [Grit](https://docs.grit.io/) is used to enforce best practices. You can run `grit check` to check your code before submitting a pull request. # Contributors diff --git a/docs/examples/building_knowledge_graphs.md b/docs/examples/building_knowledge_graph.md similarity index 100% rename from docs/examples/building_knowledge_graphs.md rename to docs/examples/building_knowledge_graph.md diff --git a/docs/index.md b/docs/index.md index 72f870f2b..e3febc801 100644 --- a/docs/index.md +++ b/docs/index.md @@ -19,6 +19,22 @@ It stands out for its simplicity, transparency, and user-centric design, built o [:material-star: Star the Repo](https://github.com/jxnl/instructor){: .md-button .md-button--primary } [:material-book-open-variant: Cookbooks](./examples/index.md){: .md-button } [:material-lightbulb: Prompting Guide](./prompting/index.md){: .md-button } +=== "pip" + ```bash + pip install instructor + ``` + +=== "uv" + ```bash + uv pip install instructor + ``` + +If you ever get stuck, you can always run `instructor docs` to open the documentation in your browser. It even supports searching for specific topics. + +```bash +instructor docs [QUERY] +``` + ## Newsletter If you want to be notified of tips, new blog posts, and research, subscribe to our newsletter. Here's what you can expect: @@ -77,9 +93,15 @@ Subscribe to our newsletter for updates on AI development. We provide content to ## Getting Started -``` -pip install -U instructor -``` +=== "pip" + ```bash + pip install -U instructor + ``` + +=== "uv" + ```bash + uv pip install instructor + ``` If you ever get stuck, you can always run `instructor docs` to open the documentation in your browser. It even supports searching for specific topics. @@ -93,12 +115,25 @@ You can also check out our [cookbooks](./examples/index.md) and [concepts](./con To keep the bundle size small, `instructor` only ships with the OpenAI client. Before using the other clients and their respective `from_xx` method, make sure you've installed the dependencies following the instructions below. - 1. Anthropic : `pip install "instructor[anthropic]"` - 2. Google Generative AI: `pip install "instructor[google-generativeai]"` - 3. Vertex AI: `pip install "instructor[vertexai]"` - 4. Cohere: `pip install "instructor[cohere]"` - 5. Litellm: `pip install "instructor[litellm]"` - 6. Mistral: `pip install "instructor[mistralai]"` + === "pip" + ```bash + pip install "instructor[anthropic]" # For Anthropic + pip install "instructor[google-generativeai]" # For Google Generative AI + pip install "instructor[vertexai]" # For Vertex AI + pip install "instructor[cohere]" # For Cohere + pip install "instructor[litellm]" # For Litellm + pip install "instructor[mistralai]" # For Mistral + ``` + + === "uv" + ```bash + uv pip install "instructor[anthropic]" # For Anthropic + uv pip install "instructor[google-generativeai]" # For Google Generative AI + uv pip install "instructor[vertexai]" # For Vertex AI + uv pip install "instructor[cohere]" # For Cohere + uv pip install "instructor[litellm]" # For Litellm + uv pip install "instructor[mistralai]" # For Mistral + ``` Now, let's see Instructor in action with a simple example: diff --git a/docs/installation.md b/docs/installation.md index 139bd2d89..00bd1f736 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,13 +1,19 @@ --- -title: Installing Instructor with Pip -description: Learn how to install Instructor and its dependencies using pip for Python 3.9+. Simple setup guide included. +title: Installing Instructor +description: Learn how to install Instructor and its dependencies using pip or uv for Python 3.9+. Simple setup guide included. --- -Installation is as simple as: +# Installation -```bash -pip install instructor -``` +=== "pip" + ```bash + pip install instructor + ``` + +=== "uv" + ```bash + uv pip install instructor + ``` Instructor has a few dependencies: @@ -16,4 +22,7 @@ Instructor has a few dependencies: - [`docstring-parser`](https://pypi.org/project/docstring-parser/): A parser for Python docstrings, to improve the experience of working with docstrings in jsonschema. - [`pydantic`](https://pypi.org/project/pydantic/): Data validation and settings management using python type annotations. -If you've got Python 3.9+ and `pip` installed, you're good to go. +If you've got Python 3.9+ and either `pip` or `uv` installed, you're good to go. + +!!! tip "Using UV" + [UV](https://github.com/astral-sh/uv) is a new, extremely fast Python package installer and resolver. It's a great alternative to pip, offering significantly faster installation times. diff --git a/pyproject.toml b/pyproject.toml index de057805b..b342aecec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,134 +1,66 @@ -[tool.poetry] +[project] name = "instructor" version = "1.7.0" description = "structured outputs for llm" -authors = ["Jason Liu "] +authors = [{name = "Jason Liu", email = "jason@jxnl.co"}] license = "MIT" readme = "README.md" -packages = [{ include = "instructor" }] -repository = "https://github.com/jxnl/instructor" - -[tool.poetry.dependencies] -python = "^3.9" -openai = "^1.52.0" -pydantic = "^2.8.0" -docstring-parser = "^0.16" -typer = ">=0.9.0,<1.0.0" -rich = "^13.7.0" -aiohttp = "^3.9.1" -tenacity = ">=9.0.0,<10.0.0" -pydantic-core = "^2.18.0" -jiter = ">=0.6.1,<0.7" -jinja2 = "^3.1.4" -requests = "^2.32.3" - -# dependency versions for extras -fastapi = { version = ">=0.109.2,<0.116.0", optional = true } -redis = { version = "^5.0.1", optional = true } -diskcache = { version = "^5.6.3", optional = true } -pandas = { version = "^2.2.0", optional = true } -tabulate = { version = "^0.9.0", optional = true } -pydantic_extra_types = { version = "^2.6.0", optional = true } -litellm = { version = "^1.35.31", optional = true } -anthropic = { version = ">=0.36.2,<0.38.0", optional = true } -xmltodict = { version = ">=0.13,<0.15", optional = true } -groq = { version = ">=0.4.2,<0.12.0", optional = true } -cohere = { version = "^5.1.8", optional = true } -mistralai = { version = "^1.0.3", optional = true } -google-generativeai = { version = "^0.8.2", optional = true } -google-cloud-aiplatform = { version = "^1.53.0", optional = true } -jsonref = { version = "^1.1.0", optional = true } -cerebras_cloud_sdk = { version = "^1.5.0", optional = true } -fireworks-ai = { version = "^0.15.4", optional = true } -writer-sdk = { version = "^1.2.0", optional = true } +requires-python = ">=3.9" +dependencies = [ + "openai>=1.52.0", + "pydantic>=2.8.0", + "docstring-parser>=0.16", + "typer>=0.9.0,<1.0.0", + "rich>=13.7.0", + "aiohttp>=3.9.1", + "tenacity>=9.0.0,<10.0.0", + "pydantic-core>=2.18.0", + "jiter>=0.6.1,<0.7", + "jinja2>=3.1.4", + "requests>=2.32.3", +] -[tool.poetry.extras] -anthropic = ["anthropic", "xmltodict"] -groq = ["groq"] -cohere = ["cohere"] +[project.optional-dependencies] +anthropic = ["anthropic>=0.36.2,<0.38.0", "xmltodict>=0.13,<0.15"] +groq = ["groq>=0.4.2,<0.12.0"] +cohere = ["cohere>=5.1.8"] test-docs = [ - "fastapi", - "redis", - "diskcache", - "pandas", - "tabulate", - "pydantic_extra_types", - "litellm", - "anthropic", - "groq", - "cohere", - "mistralai", + "fastapi>=0.109.2,<0.116.0", + "redis>=5.0.1", + "diskcache>=5.6.3", + "pandas>=2.2.0", + "tabulate>=0.9.0", + "pydantic_extra_types>=2.6.0", + "litellm>=1.35.31", + "anthropic>=0.36.2,<0.38.0", + "groq>=0.4.2,<0.12.0", + "cohere>=5.1.8", + "mistralai>=1.0.3", + "phonenumbers>=8.13.33", + "graphviz>=0.20.3", + "sqlmodel>=0.0.22", + "trafilatura>=1.12.2", + "pydub>=0.25.1", + "datasets>=3.0.1", + "writer-sdk>=1.2.0", ] -mistralai = ["mistralai"] -litellm = ["litellm"] -google-generativeai = ["google-generativeai"] -vertexai = ["google-cloud-aiplatform", "jsonref"] -cerebras_cloud_sdk = ["cerebras_cloud_sdk"] -fireworks-ai = ["fireworks-ai"] -writer = ["writer-sdk"] +mistralai = ["mistralai>=1.0.3"] +litellm = ["litellm>=1.35.31"] +google-generativeai = ["google-generativeai>=0.8.2"] +vertexai = ["google-cloud-aiplatform>=1.53.0", "jsonref>=1.1.0"] +cerebras_cloud_sdk = ["cerebras_cloud_sdk>=1.5.0"] +fireworks-ai = ["fireworks-ai>=0.15.4"] +writer = ["writer-sdk>=1.2.0"] -[tool.poetry.scripts] +[project.scripts] instructor = "instructor.cli.cli:app" -[tool.poetry.group.dev.dependencies] -pytest = "^8.3.3" -pytest-asyncio = "^0.24.0" -coverage = "^7.3.2" -pyright = "^1.1.360" -jsonref = "^1.1.0" -black = "^24.10.0" - -[tool.poetry.group.docs.dependencies] -mkdocs = "^1.4.3" -mkdocs-material = { extras = ["imaging"], version = "^9.5.9" } -mkdocstrings = "^0.26.1" -mkdocstrings-python = "^1.11.1" -pytest-examples = "^0.0.13" -mkdocs-jupyter = ">=0.24.6,<0.26.0" -mkdocs-rss-plugin = "^1.12.0" -mkdocs-minify-plugin = "^0.8.0" -mkdocs-redirects = "^1.2.1" - -[tool.poetry.group.anthropic.dependencies] -anthropic = ">=0.36.2,<0.38.0" - -[tool.poetry.group.test-docs.dependencies] -fastapi = ">=0.109.2,<0.116.0" -redis = "^5.0.1" -diskcache = "^5.6.3" -pandas = "^2.2.0" -tabulate = "^0.9.0" -pydantic_extra_types = "^2.6.0" -litellm = "^1.35.31" -anthropic = ">=0.36.2,<0.38.0" -xmltodict = ">=0.13,<0.15" -groq = ">=0.4.2,<0.12.0" -phonenumbers = "^8.13.33" -cohere = "^5.1.8" -mistralai = "^1.0.3" -cerebras_cloud_sdk = "^1.5.0" -fireworks-ai = "^0.15.4" -graphviz = "^0.20.3" -sqlmodel = "^0.0.22" -trafilatura = "^1.12.2" -pydub = "^0.25.1" -datasets = "^3.0.1" -writer-sdk = "^1.2.0" - -[tool.poetry.group.litellm.dependencies] -litellm = "^1.35.31" - -[tool.poetry.group.google-generativeai.dependencies] -google-generativeai = "^0.8.2" -jsonref = "^1.1.0" - -[tool.poetry.group.vertexai.dependencies] -google-cloud-aiplatform = "^1.53.0" -jsonref = "^1.1.0" - [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["instructor"] [tool.pyright] include = ["instructor"] diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 000000000..e97be41e5 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1 @@ +pytest>=8.3.3\npytest-asyncio>=0.24.0\ncoverage>=7.3.2\npyright>=1.1.360\njsonref>=1.1.0 diff --git a/requirements.txt b/requirements.txt index fd1cc9899..b2b531f78 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,391 @@ -openai>=1.1.0 -pydantic -docstring-parser -rich -aiohttp -ruff==0.7.1 -pre-commit==4.0.1 -pyright==1.1.386 -typer -cohere \ No newline at end of file +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml --extra test-docs --extra anthropic --extra groq --extra cohere --extra mistralai --extra litellm --extra google-generativeai --extra vertexai --extra cerebras_cloud_sdk --extra fireworks-ai -o requirements.txt +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.11.7 + # via + # instructor (pyproject.toml) + # litellm +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.7.0 + # via pydantic +anthropic==0.37.1 + # via instructor (pyproject.toml) +anyio==4.6.2.post1 + # via + # anthropic + # cerebras-cloud-sdk + # groq + # httpx + # httpx-ws + # openai + # starlette +attrs==24.2.0 + # via + # aiohttp + # jsonschema + # referencing +cachetools==5.5.0 + # via google-auth +cerebras-cloud-sdk==1.12.1 + # via instructor (pyproject.toml) +certifi==2024.8.30 + # via + # httpcore + # httpx + # requests +charset-normalizer==3.4.0 + # via requests +click==8.1.7 + # via + # litellm + # typer +cohere==5.11.4 + # via instructor (pyproject.toml) +diskcache==5.6.3 + # via instructor (pyproject.toml) +distro==1.9.0 + # via + # anthropic + # cerebras-cloud-sdk + # groq + # openai +docstring-parser==0.16 + # via + # instructor (pyproject.toml) + # google-cloud-aiplatform +eval-type-backport==0.2.0 + # via mistralai +fastapi==0.115.5 + # via instructor (pyproject.toml) +fastavro==1.9.7 + # via cohere +filelock==3.16.1 + # via huggingface-hub +fireworks-ai==0.15.8 + # via instructor (pyproject.toml) +frozenlist==1.5.0 + # via + # aiohttp + # aiosignal +fsspec==2024.10.0 + # via huggingface-hub +google-ai-generativelanguage==0.6.10 + # via google-generativeai +google-api-core==2.23.0 + # via + # google-ai-generativelanguage + # google-api-python-client + # google-cloud-aiplatform + # google-cloud-bigquery + # google-cloud-core + # google-cloud-resource-manager + # google-cloud-storage + # google-generativeai +google-api-python-client==2.154.0 + # via google-generativeai +google-auth==2.36.0 + # via + # google-ai-generativelanguage + # google-api-core + # google-api-python-client + # google-auth-httplib2 + # google-cloud-aiplatform + # google-cloud-bigquery + # google-cloud-core + # google-cloud-resource-manager + # google-cloud-storage + # google-generativeai +google-auth-httplib2==0.2.0 + # via google-api-python-client +google-cloud-aiplatform==1.73.0 + # via instructor (pyproject.toml) +google-cloud-bigquery==3.27.0 + # via google-cloud-aiplatform +google-cloud-core==2.4.1 + # via + # google-cloud-bigquery + # google-cloud-storage +google-cloud-resource-manager==1.13.1 + # via google-cloud-aiplatform +google-cloud-storage==2.18.2 + # via google-cloud-aiplatform +google-crc32c==1.6.0 + # via + # google-cloud-storage + # google-resumable-media +google-generativeai==0.8.3 + # via instructor (pyproject.toml) +google-resumable-media==2.7.2 + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos==1.66.0 + # via + # google-api-core + # grpc-google-iam-v1 + # grpcio-status +groq==0.11.0 + # via instructor (pyproject.toml) +grpc-google-iam-v1==0.13.1 + # via google-cloud-resource-manager +grpcio==1.68.0 + # via + # google-api-core + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-status +grpcio-status==1.68.0 + # via google-api-core +h11==0.14.0 + # via + # httpcore + # wsproto +httpcore==1.0.7 + # via + # httpx + # httpx-ws +httplib2==0.22.0 + # via + # google-api-python-client + # google-auth-httplib2 +httpx==0.27.2 + # via + # anthropic + # cerebras-cloud-sdk + # cohere + # fireworks-ai + # groq + # httpx-ws + # mistralai + # openai +httpx-sse==0.4.0 + # via + # cohere + # fireworks-ai +httpx-ws==0.6.2 + # via fireworks-ai +huggingface-hub==0.26.2 + # via tokenizers +idna==3.10 + # via + # anyio + # httpx + # requests + # yarl +importlib-metadata==8.5.0 + # via litellm +jinja2==3.1.4 + # via + # instructor (pyproject.toml) + # litellm +jiter==0.6.1 + # via + # instructor (pyproject.toml) + # anthropic + # openai +jsonpath-python==1.0.6 + # via mistralai +jsonref==1.1.0 + # via instructor (pyproject.toml) +jsonschema==4.23.0 + # via litellm +jsonschema-specifications==2024.10.1 + # via jsonschema +litellm==1.52.14 + # via instructor (pyproject.toml) +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +mistralai==1.2.3 + # via instructor (pyproject.toml) +multidict==6.1.0 + # via + # aiohttp + # yarl +mypy-extensions==1.0.0 + # via typing-inspect +numpy==2.1.3 + # via + # pandas + # shapely +openai==1.55.0 + # via + # instructor (pyproject.toml) + # litellm +packaging==24.2 + # via + # google-cloud-aiplatform + # google-cloud-bigquery + # huggingface-hub +pandas==2.2.3 + # via instructor (pyproject.toml) +parameterized==0.9.0 + # via cohere +pillow==11.0.0 + # via fireworks-ai +propcache==0.2.0 + # via + # aiohttp + # yarl +proto-plus==1.25.0 + # via + # google-ai-generativelanguage + # google-api-core + # google-cloud-aiplatform + # google-cloud-resource-manager +protobuf==5.28.3 + # via + # google-ai-generativelanguage + # google-api-core + # google-cloud-aiplatform + # google-cloud-resource-manager + # google-generativeai + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-status + # proto-plus +pyasn1==0.6.1 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 + # via google-auth +pydantic==2.10.1 + # via + # instructor (pyproject.toml) + # anthropic + # cerebras-cloud-sdk + # cohere + # fastapi + # fireworks-ai + # google-cloud-aiplatform + # google-generativeai + # groq + # litellm + # mistralai + # openai + # pydantic-extra-types +pydantic-core==2.27.1 + # via + # instructor (pyproject.toml) + # cohere + # pydantic +pydantic-extra-types==2.10.0 + # via instructor (pyproject.toml) +pygments==2.18.0 + # via rich +pyparsing==3.2.0 + # via httplib2 +python-dateutil==2.8.2 + # via + # google-cloud-bigquery + # mistralai + # pandas +python-dotenv==1.0.1 + # via litellm +pytz==2024.2 + # via pandas +pyyaml==6.0.2 + # via huggingface-hub +redis==5.2.0 + # via instructor (pyproject.toml) +referencing==0.35.1 + # via + # jsonschema + # jsonschema-specifications +regex==2024.11.6 + # via tiktoken +requests==2.32.3 + # via + # instructor (pyproject.toml) + # cohere + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # huggingface-hub + # litellm + # tiktoken +rich==13.9.4 + # via + # instructor (pyproject.toml) + # typer +rpds-py==0.21.0 + # via + # jsonschema + # referencing +rsa==4.9 + # via google-auth +shapely==2.0.6 + # via google-cloud-aiplatform +shellingham==1.5.4 + # via typer +six==1.16.0 + # via python-dateutil +sniffio==1.3.1 + # via + # anthropic + # anyio + # cerebras-cloud-sdk + # groq + # httpx + # openai +starlette==0.41.3 + # via fastapi +tabulate==0.9.0 + # via instructor (pyproject.toml) +tenacity==9.0.0 + # via instructor (pyproject.toml) +tiktoken==0.8.0 + # via litellm +tokenizers==0.20.3 + # via + # anthropic + # cohere + # litellm +tqdm==4.67.0 + # via + # google-generativeai + # huggingface-hub + # openai +typer==0.13.1 + # via instructor (pyproject.toml) +types-requests==2.32.0.20241016 + # via cohere +typing-extensions==4.12.2 + # via + # anthropic + # cerebras-cloud-sdk + # cohere + # fastapi + # google-generativeai + # groq + # huggingface-hub + # openai + # pydantic + # pydantic-core + # pydantic-extra-types + # typer + # typing-inspect +typing-inspect==0.9.0 + # via mistralai +tzdata==2024.2 + # via pandas +uritemplate==4.1.1 + # via google-api-python-client +urllib3==2.2.3 + # via + # requests + # types-requests +wsproto==1.2.0 + # via httpx-ws +xmltodict==0.14.2 + # via instructor (pyproject.toml) +yarl==1.18.0 + # via aiohttp +zipp==3.21.0 + # via importlib-metadata diff --git a/server.pid b/server.pid new file mode 100644 index 000000000..809963076 --- /dev/null +++ b/server.pid @@ -0,0 +1 @@ +46401