From ffba6ad832d37241491110f336b40f32a829ecf4 Mon Sep 17 00:00:00 2001 From: hazeone <709547807@qq.com> Date: Fri, 31 Oct 2025 14:54:16 +0800 Subject: [PATCH 1/2] add openai compatible provider --- .env.example | 7 +- python/configs/config.yaml | 4 + .../configs/providers/openai-compatible.yaml | 72 ++++++++++++++++++ python/valuecell/adapters/models/factory.py | 73 +++++++++++++++++++ python/valuecell/config/manager.py | 1 + 5 files changed, 156 insertions(+), 1 deletion(-) create mode 100644 python/configs/providers/openai-compatible.yaml diff --git a/.env.example b/.env.example index 88419e42e..bdffa3ee8 100644 --- a/.env.example +++ b/.env.example @@ -33,8 +33,9 @@ AGENT_DEBUG_MODE=false # Model Provider Settings # ============================================ # For more details, see the documentation in the /doc directory. -# To access full functionality, make sure to include an embedding service. +# To access full functionality, make sure to include an embedding-capable provider. # You can configure multiple providers below. +# The primary setup uses OpenRouter, along with another provider that supports embeddings. # Get your API key from: https://openrouter.ai/ # Note: OpenRouter does not currently support embedding or reranker models. @@ -53,6 +54,10 @@ SILICONFLOW_API_KEY= # Get your API key from: https://platform.openai.com/api-keys OPENAI_API_KEY= +# You can set any OpenAI-compatitble API KEY, but you neeed to configure python/configs/providers/openai-compatible.yaml manully. +OPENAI_COMPATIBLE_API_KEY= +OPENAI_COMPATIBLE_BASE_URL= + # ============================================ # Research Agent Configurations diff --git a/python/configs/config.yaml b/python/configs/config.yaml index 454b06283..e652dc147 100644 --- a/python/configs/config.yaml +++ b/python/configs/config.yaml @@ -34,6 +34,10 @@ models: openai: config_file: "providers/openai.yaml" api_key_env: "OPENAI_API_KEY" + + openai-compatible: + config_file: "providers/openai-compatible.yaml" + api_key_env: "OPENAI_COMPATIBLE_API_KEY" # Agent Configuration agents: diff --git a/python/configs/providers/openai-compatible.yaml b/python/configs/providers/openai-compatible.yaml new file mode 100644 index 000000000..62ff2ac94 --- /dev/null +++ b/python/configs/providers/openai-compatible.yaml @@ -0,0 +1,72 @@ +# ============================================ +# OpenAI Compatible API Provider Configuration +# ============================================ +# This configuration supports any OpenAI-compatible API including: + +# +# Usage: +# 1. Set OPENAI_API_BASE environment variable to your API endpoint +# 2. Set OPENAI_API_KEY if your service requires authentication +# 3. Set PRIMARY_PROVIDER=openai-compatible +# +# Examples: +# +# # vLLM on remote server +# export OPENAI_API_BASE=http://your-vllm-server:8000/v1 +# export PRIMARY_PROVIDER=openai-compatible +# +# # Together.ai +# export OPENAI_API_BASE=https://api.together.xyz/v1 +# export OPENAI_API_KEY=your-together-api-key +# export PRIMARY_PROVIDER=openai-compatible + +name: "OpenAI-Compatible" +provider_type: "openai-compatible" + +enabled: true + +# Connection Configuration +connection: + # Use environment variable with fallback to local Ollama + base_url: "${OPENAI_COMPATIBLE_BASE_URL:http://localhost:11434/v1}" + api_key_env: "OPENAI_COMPATIBLE_API_KEY" + +# Default model if agents.yaml specified +# Example: qwen3-max +default_model: "qwen3-max" + +# Model Parameters Defaults +defaults: + temperature: 0.7 + max_tokens: 4096 + +# Available Models (examples - adjust based on your provider model id) +models: + - id: "qwen3-max" + name: "Qwen3 Max" + context_length: 256000 + description: "Qwen3 Max model" + + +# ============================================ +# Embedding Models Configuration +# ============================================ +# Note: Embedding support depends on your provider + +# Example: text-embedding-v4 +embedding: + # Default embedding model + default_model: "text-embedding-v4" + + # Default parameters + defaults: + dimensions: 2048 + encoding_format: "float" + + # Available embedding models + models: + - id: "text-embedding-v4" + name: "Text Embedding V4" + dimensions: 2048 + max_input: 8192 + description: "Text Embedding V4 model" diff --git a/python/valuecell/adapters/models/factory.py b/python/valuecell/adapters/models/factory.py index 15a838ae9..917259d04 100644 --- a/python/valuecell/adapters/models/factory.py +++ b/python/valuecell/adapters/models/factory.py @@ -272,6 +272,7 @@ def create_model(self, model_id: Optional[str] = None, **kwargs): return OpenAIChat( id=model_id, api_key=self.config.api_key, + base_url=self.config.base_url, temperature=params.get("temperature"), max_tokens=params.get("max_tokens"), top_p=params.get("top_p"), @@ -302,6 +303,7 @@ def create_embedder(self, model_id: Optional[str] = None, **kwargs): return OpenAIEmbedder( id=model_id, api_key=self.config.api_key, + base_url=self.config.base_url, dimensions=int(params.get("dimensions", 1536)) if params.get("dimensions") else None, @@ -309,6 +311,76 @@ def create_embedder(self, model_id: Optional[str] = None, **kwargs): ) +class OpenAICompatibleProvider(ModelProvider): + """OpenAI-compatible model provider with role compatibility handling + + This provider handles OpenAI-compatible APIs (like DashScope, vLLM, etc.) that may not + support newer OpenAI features like the 'developer' role. It wraps the OpenAIChat model + and converts incompatible roles to compatible ones. + """ + + def create_model(self, model_id: Optional[str] = None, **kwargs): + """Create OpenAI-compatible model via agno with role compatibility""" + try: + from agno.models.openai import OpenAILike + except ImportError: + raise ImportError( + "agno package not installed. Install with: pip install agno" + ) + + model_id = model_id or self.config.default_model + params = {**self.config.parameters, **kwargs} + + logger.info( + f"Creating OpenAI-compatible model: {model_id} (base_url: {self.config.base_url})" + ) + + # Create the base OpenAILike model + return OpenAILike( + id=model_id, + api_key=self.config.api_key, + base_url=self.config.base_url, + temperature=params.get("temperature"), + max_tokens=params.get("max_tokens"), + top_p=params.get("top_p"), + frequency_penalty=params.get("frequency_penalty"), + presence_penalty=params.get("presence_penalty"), + ) + + + def create_embedder(self, model_id: Optional[str] = None, **kwargs): + """Create embedder via OpenAI-compatible API""" + try: + from agno.knowledge.embedder.openai import OpenAIEmbedder + except ImportError: + raise ImportError("agno package not installed") + + # Use provided model_id or default embedding model + model_id = model_id or self.config.default_embedding_model + + if not model_id: + raise ValueError( + f"No embedding model specified for provider '{self.config.name}'" + ) + + # Merge parameters: provider embedding defaults < kwargs + params = {**self.config.embedding_parameters, **kwargs} + + logger.info(f"Creating OpenAI-compatible embedder: {model_id}") + + return OpenAIEmbedder( + id=model_id, + api_key=self.config.api_key, + base_url=self.config.base_url, + dimensions=int(params.get("dimensions", 1024)), + encoding_format=params.get("encoding_format"), + ) + + def is_available(self) -> bool: + """Check if provider is available (needs both API key and base URL)""" + return bool(self.config.api_key and self.config.base_url) + + class ModelFactory: """ Factory for creating model instances with provider abstraction @@ -327,6 +399,7 @@ class ModelFactory: "azure": AzureProvider, "siliconflow": SiliconFlowProvider, "openai": OpenAIProvider, + "openai-compatible": OpenAICompatibleProvider, } def __init__(self, config_manager: Optional[ConfigManager] = None): diff --git a/python/valuecell/config/manager.py b/python/valuecell/config/manager.py index aece7154e..42333162d 100644 --- a/python/valuecell/config/manager.py +++ b/python/valuecell/config/manager.py @@ -133,6 +133,7 @@ def primary_provider(self) -> str: "siliconflow", "google", "openai", + "openai-compatible", ] for preferred in preferred_order: From 6a5642e52c8bdac21ad6289989ad07f2ad0b0eb7 Mon Sep 17 00:00:00 2001 From: hazeone <709547807@qq.com> Date: Fri, 31 Oct 2025 14:54:30 +0800 Subject: [PATCH 2/2] format --- python/valuecell/adapters/models/factory.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/valuecell/adapters/models/factory.py b/python/valuecell/adapters/models/factory.py index 917259d04..69f499338 100644 --- a/python/valuecell/adapters/models/factory.py +++ b/python/valuecell/adapters/models/factory.py @@ -347,7 +347,6 @@ def create_model(self, model_id: Optional[str] = None, **kwargs): presence_penalty=params.get("presence_penalty"), ) - def create_embedder(self, model_id: Optional[str] = None, **kwargs): """Create embedder via OpenAI-compatible API""" try: