Skip to content

Commit f76e513

Browse files
maang-holgamurraft
authored andcommitted
docs: Standardize Tongyi (langchain-ai#25103)
- **Description:** Standardize Tongyi LLM,include: - docs, the issue langchain-ai#24803 - model init arg names, the issue langchain-ai#20085
1 parent e76a13d commit f76e513

File tree

2 files changed

+86
-10
lines changed

2 files changed

+86
-10
lines changed

libs/community/langchain_community/llms/tongyi.py

Lines changed: 79 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -158,33 +158,102 @@ async def agenerate_with_last_element_mark(
158158

159159

160160
class Tongyi(BaseLLM):
161-
"""Tongyi Qwen large language models.
161+
"""Tongyi completion model integration.
162162
163-
To use, you should have the ``dashscope`` python package installed, and the
164-
environment variable ``DASHSCOPE_API_KEY`` set with your API key, or pass
165-
it as a named parameter to the constructor.
163+
Setup:
164+
Install ``dashscope`` and set environment variables ``DASHSCOPE_API_KEY``.
166165
167-
Example:
166+
.. code-block:: bash
167+
168+
pip install dashscope
169+
export DASHSCOPE_API_KEY="your-api-key"
170+
171+
Key init args — completion params:
172+
model: str
173+
Name of Tongyi model to use.
174+
top_p: float
175+
Total probability mass of tokens to consider at each step.
176+
streaming: bool
177+
Whether to stream the results or not.
178+
179+
Key init args — client params:
180+
api_key: Optional[str]
181+
Dashscope API KEY. If not passed in will be read from env var DASHSCOPE_API_KEY.
182+
max_retries: int
183+
Maximum number of retries to make when generating.
184+
185+
See full list of supported init args and their descriptions in the params section.
186+
187+
Instantiate:
168188
.. code-block:: python
169189
170190
from langchain_community.llms import Tongyi
171-
tongyi = tongyi()
172-
"""
191+
192+
llm = Tongyi(
193+
model="qwen-max",
194+
# top_p="...",
195+
# api_key="...",
196+
# other params...
197+
)
198+
199+
Invoke:
200+
.. code-block:: python
201+
202+
messages = [
203+
("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"),
204+
("human", "我喜欢编程。"),
205+
]
206+
llm.invoke(messages)
207+
208+
.. code-block:: python
209+
210+
'I enjoy programming.'
211+
212+
Stream:
213+
.. code-block:: python
214+
215+
for chunk in llm.stream(messages):
216+
print(chunk)
217+
218+
.. code-block:: python
219+
220+
I
221+
enjoy
222+
programming
223+
.
224+
225+
Async:
226+
.. code-block:: python
227+
228+
await llm.ainvoke(messages)
229+
230+
# stream:
231+
# async for chunk in llm.astream(messages):
232+
# print(chunk)
233+
234+
# batch:
235+
# await llm.abatch([messages])
236+
237+
.. code-block:: python
238+
239+
'I enjoy programming.'
240+
241+
""" # noqa: E501
173242

174243
@property
175244
def lc_secrets(self) -> Dict[str, str]:
176245
return {"dashscope_api_key": "DASHSCOPE_API_KEY"}
177246

178247
client: Any #: :meta private:
179-
model_name: str = "qwen-plus"
248+
model_name: str = Field(default="qwen-plus", alias="model")
180249

181250
"""Model name to use."""
182251
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
183252

184253
top_p: float = 0.8
185254
"""Total probability mass of tokens to consider at each step."""
186255

187-
dashscope_api_key: Optional[str] = None
256+
dashscope_api_key: Optional[str] = Field(default=None, alias="api_key")
188257
"""Dashscope api key provide by Alibaba Cloud."""
189258

190259
streaming: bool = False
@@ -202,7 +271,7 @@ def _llm_type(self) -> str:
202271
def validate_environment(cls, values: Dict) -> Dict:
203272
"""Validate that api key and python package exists in environment."""
204273
values["dashscope_api_key"] = get_from_dict_or_env(
205-
values, "dashscope_api_key", "DASHSCOPE_API_KEY"
274+
values, ["dashscope_api_key", "api_key"], "DASHSCOPE_API_KEY"
206275
)
207276
try:
208277
import dashscope

libs/community/tests/integration_tests/llms/test_tongyi.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,3 +27,10 @@ def test_tongyi_generate_stream() -> None:
2727
print(output) # noqa: T201
2828
assert isinstance(output, LLMResult)
2929
assert isinstance(output.generations, list)
30+
31+
32+
def test_tongyi_with_param_alias() -> None:
33+
"""Test tongyi parameters alias"""
34+
llm = Tongyi(model="qwen-max", api_key="your-api_key") # type: ignore[call-arg]
35+
assert llm.model_name == "qwen-max"
36+
assert llm.dashscope_api_key == "your-api_key"

0 commit comments

Comments
 (0)