Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,8 @@ OPENAI_API_KEY=
OPENAI_COMPATIBLE_API_KEY=
OPENAI_COMPATIBLE_BASE_URL=

# Get your API key from: https://bailian.console.aliyun.com/#/home
DASHSCOPE_API_KEY=

SEC_EMAIL=your.name@example.com

14 changes: 13 additions & 1 deletion docs/CONFIGURATION_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ ValueCell supports multiple LLM providers. Choose at least one:
| **SiliconFlow** | [siliconflow.cn](https://www.siliconflow.cn/) |
| **Google** | [ai.google.dev](https://ai.google.dev/) |
| **OpenAI** | [platform.openai.com](https://platform.openai.com/) |
| **DashScope** | [bailian.console.aliyun.com](https://bailian.console.aliyun.com/#/home) |

### Step 2: Configure .env File

Expand Down Expand Up @@ -86,6 +87,7 @@ python/
│ ├── providers/
│ │ ├── openrouter.yaml # OpenRouter provider config
│ │ ├── siliconflow.yaml # SiliconFlow provider config
│ │ ├── dashscope.yaml # DashScope (Alibaba Cloud) provider config
│ │ └── other_provider.yaml
│ ├── agents/
│ │ ├── super_agent.yaml # Super Agent configuration
Expand Down Expand Up @@ -195,6 +197,9 @@ models:
google:
config_file: "providers/google.yaml"
api_key_env: "GOOGLE_API_KEY"
dashscope:
config_file: "providers/dashscope.yaml"
api_key_env: "DASHSCOPE_API_KEY"

# Agent registry
agents:
Expand Down Expand Up @@ -324,7 +329,10 @@ The selection logic is implemented in `python/valuecell/config/manager.py`:
1. OpenRouter
2. SiliconFlow
3. Google
4. Other configured providers
4. OpenAI
5. OpenAI-Compatible
6. Azure
7. Other configured providers (including DashScope, DeepSeek, etc.)

Override this with an environment variable:

Expand Down Expand Up @@ -417,6 +425,9 @@ GOOGLE_API_KEY=AIzaSyDxxxxxxxxxxxxx
AZURE_OPENAI_API_KEY=xxxxxxxxxxxxx
AZURE_OPENAI_ENDPOINT=https://xxxxx.openai.azure.com/
OPENAI_API_VERSION=2024-10-21

# DashScope (Alibaba Cloud Qwen3 models)
DASHSCOPE_API_KEY=sk-xxxxxxxxxxxxx
```

### Model Configuration
Expand Down Expand Up @@ -462,6 +473,7 @@ AGENT_DEBUG_MODE=true
OPENROUTER_API_KEY=sk-or-v1-xxxxx # Primary: access to many models
SILICONFLOW_API_KEY=sk-xxxxx # Fallback: cost-effective
GOOGLE_API_KEY=AIzaSyD-xxxxx # Second fallback: specialized
DASHSCOPE_API_KEY=sk-xxxxx # DashScope: Qwen3 models (Chinese optimized)

# config.yaml
models:
Expand Down
1 change: 1 addition & 0 deletions frontend/src/assets/png/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export { default as OkxPng } from "./exchanges/okx.png";
export { default as IconGroupPng } from "./icon-group.png";
export { default as MessageGroupPng } from "./message-group.png";
export { default as AzurePng } from "./model-providers/azure.png";
export { default as DashScopePng } from "./model-providers/dashscope.png";
export { default as DeepSeekPng } from "./model-providers/deepseek.png";
export { default as GooglePng } from "./model-providers/google.png";
export { default as OpenAiPng } from "./model-providers/openai.png";
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 2 additions & 0 deletions frontend/src/constants/icons.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {
BinancePng,
BlockchainPng,
CoinbasePng,
DashScopePng,
DeepSeekPng,
GatePng,
GooglePng,
Expand All @@ -23,6 +24,7 @@ export const MODEL_PROVIDER_ICONS = {
deepseek: DeepSeekPng,
google: GooglePng,
azure: AzurePng,
dashscope: DashScopePng,
};

export const EXCHANGE_ICONS = {
Expand Down
4 changes: 4 additions & 0 deletions python/configs/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ models:
deepseek:
config_file: "providers/deepseek.yaml"
api_key_env: "DEEPSEEK_API_KEY"

dashscope:
config_file: "providers/dashscope.yaml"
api_key_env: "DASHSCOPE_API_KEY"

# Agent Configuration
agents:
Expand Down
85 changes: 85 additions & 0 deletions python/configs/providers/dashscope.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# ============================================
# DashScope Provider Configuration
# ============================================
# DashScope (Alibaba Cloud Bailian) exposes an OpenAI-compatible API for Qwen3 models.
# Configure the API key via DASHSCOPE_API_KEY or override using env vars at runtime.

name: DashScope
provider_type: dashscope
enabled: true

# Connection parameters for DashScope compatible-mode endpoint.
connection:
base_url: https://dashscope.aliyuncs.com/compatible-mode/v1
api_key_env: DASHSCOPE_API_KEY

# Default chat model used when no model_id is specified.
default_model: qwen3-max

# Global default inference parameters.
defaults:
temperature: 0.7
max_tokens: 16384

# Commonly used Qwen3 models available via DashScope.
models:
- id: qwen3-max
name: Qwen3 Max
context_length: 256000
max_output_tokens: 16384
description: Qwen3 Max model with strongest performance
supported_inputs:
- text
supported_outputs:
- text

- id: qwen3-max-preview
name: Qwen3 Max Preview
context_length: 256000
max_output_tokens: 16384
description: Qwen3 Max preview model
supported_inputs:
- text
supported_outputs:
- text

- id: qwen-plus
name: Qwen Plus
context_length: 256000
max_output_tokens: 16384
description: Qwen Plus model with balanced performance
supported_inputs:
- text
supported_outputs:
- text

- id: qwen-flash
name: Qwen Flash
context_length: 256000
max_output_tokens: 16384
description: Qwen Flash model optimized for fast response
supported_inputs:
- text
supported_outputs:
- text

# Embedding configuration for DashScope text embedding models.
embedding:
default_model: text-embedding-v4

defaults:
dimensions: 2048
encoding_format: "float"

models:
- id: text-embedding-v4
name: Text Embedding V4
dimensions: 2048
max_input: 8192
description: DashScope text embedding v4 model (latest)

- id: text-embedding-v3
name: Text Embedding V3
dimensions: 1024
max_input: 8192
description: DashScope text embedding v3 model
2 changes: 2 additions & 0 deletions python/valuecell/adapters/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from valuecell.adapters.models.factory import (
AzureProvider,
DashScopeProvider,
DeepSeekProvider,
GoogleProvider,
ModelFactory,
Expand All @@ -47,6 +48,7 @@
"AzureProvider",
"SiliconFlowProvider",
"DeepSeekProvider",
"DashScopeProvider",
# Convenience functions
"create_model",
"create_model_for_agent",
Expand Down
63 changes: 63 additions & 0 deletions python/valuecell/adapters/models/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,68 @@ def create_model(self, model_id: Optional[str] = None, **kwargs):
)


class DashScopeProvider(ModelProvider):
"""DashScope model provider (native)"""

def create_model(self, model_id: Optional[str] = None, **kwargs):
"""Create DashScope model via agno (native)"""
try:
from agno.models.dashscope import DashScope
except ImportError:
raise ImportError(
"agno package not installed. Install with: pip install agno"
)

model_id = model_id or self.config.default_model
params = {**self.config.parameters, **kwargs}

# Prefer native endpoint; ignore compatible-mode base_url if present
base_url = self.config.base_url
if base_url and "compatible-mode" in base_url:
base_url = None

logger.info(f"Creating DashScope (native) model: {model_id}")

return DashScope(
id=model_id,
api_key=self.config.api_key,
base_url=base_url,
temperature=params.get("temperature"),
max_tokens=params.get("max_tokens"),
top_p=params.get("top_p"),
)

def create_embedder(self, model_id: Optional[str] = None, **kwargs):
"""Create embedder via DashScope (OpenAI-compatible)"""
try:
from agno.knowledge.embedder.openai import OpenAIEmbedder
except ImportError:
raise ImportError("agno package not installed")

# Use provided model_id or default embedding model
model_id = model_id or self.config.default_embedding_model

if not model_id:
raise ValueError(
f"No embedding model specified for provider '{self.config.name}'"
)

# Merge parameters: provider embedding defaults < kwargs
params = {**self.config.embedding_parameters, **kwargs}

logger.info(f"Creating DashScope embedder: {model_id}")

return OpenAIEmbedder(
id=model_id,
api_key=self.config.api_key,
base_url=self.config.base_url,
dimensions=int(params.get("dimensions", 2048))
if params.get("dimensions")
else None,
encoding_format=params.get("encoding_format", "float"),
)


class ModelFactory:
"""
Factory for creating model instances with provider abstraction
Expand All @@ -523,6 +585,7 @@ class ModelFactory:
"openai": OpenAIProvider,
"openai-compatible": OpenAICompatibleProvider,
"deepseek": DeepSeekProvider,
"dashscope": DashScopeProvider,
}

def __init__(self, config_manager: Optional[ConfigManager] = None):
Expand Down
1 change: 1 addition & 0 deletions python/valuecell/server/api/routers/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ def _api_key_url_for(provider: str) -> str | None:
"azure": "https://azure.microsoft.com/en-us/products/ai-foundry/models/openai/",
"siliconflow": "https://cloud.siliconflow.cn/account/ak",
"deepseek": "https://platform.deepseek.com/api_keys",
"dashscope": "https://bailian.console.aliyun.com/#/home",
}
return mapping.get(provider)

Expand Down