diff --git a/examples/README.md b/examples/README.md index 81aadd3..4165510 100644 --- a/examples/README.md +++ b/examples/README.md @@ -41,6 +41,16 @@ See [First time set up](../README.sdk.md#First-time-set-up) on how to generate i The Nyx SDK currently supports a variety of [LangChain](https://python.langchain.com)-based LLM specific plugins, including [Cohere](https://cohere.com/) and [OpenAI](https://openai.com/). To use these, you will need to expose the specific API key to your application through environment variables or instantiate the relevant configuration object with the key. +```shell +export OPENAI_API_KEY= +``` +or +```python +from nyx_client.configuration import ConfigProvider, ConfigType + +ConfigProvider.create_config(ConfigType.OPENAI, api_key="your_api_key_here") +``` +or if using Cohere ```shell export COHERE_API_KEY= ``` @@ -59,7 +69,7 @@ These use additional dependencies for an out-of-the-box experience with minimal Example | Summary | Notes --|--|-- -[`highlevel.py::main()`](./high_level/highlevel.py) | Minimal CLI chat prompt, considering all subscribed-to Nyx data. | Defaults to [Cohere](https://cohere.com/) LLM but can easily be changed to use [OpenAI](https://openai.com/) +[`highlevel.py::main()`](./high_level/highlevel.py) | Minimal CLI chat prompt, considering all subscribed-to Nyx data. | Defaults to [OpenAI](https://openai.com/) LLM but can easily be changed to use [Cohere](https://cohere) [`highlevel.py::custom_data()`](./high_level/highlevel.py) | Use a filtered set of data rather than all subscribed-to | [`highlevel.py::include_own_data()`](./high_level/highlevel.py) | Include own data in addition to subscribed-to | [`highlevel.py::custom_openai_llm()`](./high_level/highlevel.py) | Use a custom model instead of the nyx default one for an LLM. | This also demonstrates how do specify your own [`BaseChatModel`](https://api.python.langchain.com/en/latest/language_models/langchain_core.language_models.chat_models.BaseChatModel.html), i.e. any LLM provider supporting said LangChain interface. diff --git a/examples/advanced/advanced.ipynb b/examples/advanced/advanced.ipynb index 17c41d2..09173b5 100644 --- a/examples/advanced/advanced.ipynb +++ b/examples/advanced/advanced.ipynb @@ -71,7 +71,8 @@ }, "outputs": [], "source": [ - "# Uninstall any previous version first to ensure up-to-date client. (pip -U would instead update ALL packages). Note: keep the argument quoted for zsh support.\n", + "# Uninstall any previous version first to ensure up-to-date client. (pip -U would instead update ALL packages).\n", + "# Note: keep the argument quoted for zsh support.\n", "!pip uninstall -y nyx-client\n", "!pip install 'nyx-client[langchain-openai]'" ] @@ -126,8 +127,10 @@ } ], "source": [ - "# NOTE: From a CLI you can also just run: nyx-client init (but Jupyter Labs/Notebook don't support stdin via CLI - hence call via Python).\n", + "# NOTE: From a CLI you can also just run: nyx-client init\n", + "# (but Jupyter Labs/Notebook don't support stdin via CLI - hence call via Python).\n", "from nyx_client.cli.init_env import init_env\n", + "\n", "init_env()" ] }, @@ -153,6 +156,7 @@ "source": [ "# Set LLM provider API key (for either Cohere or OpenAI)\n", "from getpass import getpass\n", + "\n", "llm_api_key = getpass(\"Enter your backend-specific API key: \")" ] }, @@ -195,8 +199,8 @@ "from langchain_community.utilities import SQLDatabase\n", "from langchain_openai import ChatOpenAI\n", "\n", - "from nyx_client.configuration import ConfigProvider, ConfigType\n", - "from nyx_client import NyxClient, Parser, Utils" + "from nyx_client import NyxClient, Parser, Utils\n", + "from nyx_client.configuration import ConfigProvider, ConfigType" ] }, { diff --git a/examples/high_level/highlevel.ipynb b/examples/high_level/highlevel.ipynb index 593ce1a..73eccdc 100644 --- a/examples/high_level/highlevel.ipynb +++ b/examples/high_level/highlevel.ipynb @@ -50,7 +50,7 @@ "id": "IACaFGOA1ziU" }, "source": [ - "Install the Iotics NYX SDK client (with [Cohere](https://cohere.com/) extension). **Note**: You can can also install the `[langchain-openai]` extra to use OpenAI instead." + "Install the Iotics NYX SDK client (with [OpenAI](https://openai.com/) extension). **Note**: You can can also install the `[langchain-cohere]` extra to use Cohere instead." ] }, { @@ -66,9 +66,10 @@ }, "outputs": [], "source": [ - "# Uninstall any previous version first to ensure up-to-date client. (pip -U would instead update ALL packages). Note: keep the argument quoted for zsh support.\n", + "# Uninstall any previous version first to ensure up-to-date client. (pip -U would instead update ALL packages).\n", + "# Note: keep the argument quoted for zsh support.\n", "!pip uninstall -y nyx-client\n", - "!pip install 'nyx-client[langchain-cohere]'" + "!pip install 'nyx-client[langchain-openai]'" ] }, { @@ -115,8 +116,10 @@ } ], "source": [ - "# NOTE: From a CLI you can also just run: nyx-client init (but Jupyter Labs/Notebook don't support stdin via CLI - hence call via Python).\n", + "# NOTE: From a CLI you can also just run: nyx-client init\n", + "# (but Jupyter Labs/Notebook don't support stdin via CLI - hence call via Python).\n", "from nyx_client.cli.init_env import init_env\n", + "\n", "init_env()" ] }, @@ -156,7 +159,7 @@ "id": "4L9D9sjo1dDS" }, "source": [ - "Set up the langchain client, backed by [Cohere](https://cohere.com/). (You can also use e.g. `ConfigType.OPENAI` for an [OpenAI](https://openai.com/)-backed agent.)" + "Set up the langchain client, backed by [OpenAI](https://openai.com/). (You can also use e.g. `ConfigType.COHERE` for an [Cohere](https://cohere.com/)-backed agent.)" ] }, { @@ -181,6 +184,7 @@ "source": [ "# Set LLM provider API key (for either Cohere or OpenAI)\n", "from getpass import getpass\n", + "\n", "llm_api_key = getpass(\"Enter your backend-specific API key: \")" ] }, @@ -192,7 +196,7 @@ }, "outputs": [], "source": [ - "config = ConfigProvider.create_config(ConfigType.COHERE, api_key=llm_api_key)\n", + "config = ConfigProvider.create_config(ConfigType.OPENAI, api_key=llm_api_key)\n", "client = NyxLangChain(config=config)" ] }, diff --git a/examples/high_level/highlevel.py b/examples/high_level/highlevel.py index 3f77da7..ab21932 100644 --- a/examples/high_level/highlevel.py +++ b/examples/high_level/highlevel.py @@ -29,8 +29,8 @@ def main(): When instantiating a language model specific config, the relevant API key must be available as an environment variable, or it must be passed in explicitly. """ - # Supply ConfigType.OPENAI to use OpenAI LLM instead - config = ConfigProvider.create_config(ConfigType.COHERE, api_key="your_api_key_here") + # Supply ConfigType.COHERE to use Cohere LLM instead + config = ConfigProvider.create_config(ConfigType.OPENAI, api_key="your_api_key_here") client = NyxLangChain(config=config, log_level=logging.DEBUG) while True: prompt = input("What is your question? ") @@ -46,7 +46,7 @@ def custom_data(): speed up the prompt, by reducing the data, and also prevents the data being downloaded and processed automatically, giving you more control. """ - config = ConfigProvider.create_config(ConfigType.COHERE, api_key="your_api_key_here") + config = ConfigProvider.create_config(ConfigType.OPENAI, api_key="your_api_key_here") client = NyxLangChain(config=config) # Get data with the climate category only @@ -64,7 +64,7 @@ def include_own_data(): """ This displays how to include your own data, created in Nyx, in the query. """ - config = ConfigProvider.create_config(ConfigType.COHERE, api_key="your_api_key_here") + config = ConfigProvider.create_config(ConfigType.OPENAI, api_key="your_api_key_here") client = NyxLangChain(config=config) while True: