Skip to content

Commit 0ba125c

Browse files
authored
docs: Standardize QianfanLLMEndpoint LLM (#25139)
- **Description:** Standardize QianfanLLMEndpoint LLM,include: - docs, the issue #24803 - model init arg names, the issue #20085
1 parent 28e0958 commit 0ba125c

File tree

2 files changed

+110
-17
lines changed

2 files changed

+110
-17
lines changed

libs/community/langchain_community/llms/baidu_qianfan_endpoint.py

Lines changed: 96 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -23,22 +23,102 @@
2323

2424

2525
class QianfanLLMEndpoint(LLM):
26-
"""Baidu Qianfan hosted open source or customized models.
26+
"""Baidu Qianfan completion model integration.
27+
28+
Setup:
29+
Install ``qianfan`` and set environment variables ``QIANFAN_AK``, ``QIANFAN_SK``.
30+
31+
.. code-block:: bash
32+
33+
pip install qianfan
34+
export QIANFAN_AK="your-api-key"
35+
export QIANFAN_SK="your-secret_key"
36+
37+
Key init args — completion params:
38+
model: str
39+
Name of Qianfan model to use.
40+
temperature: Optional[float]
41+
Sampling temperature.
42+
endpoint: Optional[str]
43+
Endpoint of the Qianfan LLM
44+
top_p: Optional[float]
45+
What probability mass to use.
46+
47+
Key init args — client params:
48+
timeout: Optional[int]
49+
Timeout for requests.
50+
api_key: Optional[str]
51+
Qianfan API KEY. If not passed in will be read from env var QIANFAN_AK.
52+
secret_key: Optional[str]
53+
Qianfan SECRET KEY. If not passed in will be read from env var QIANFAN_SK.
54+
55+
See full list of supported init args and their descriptions in the params section.
56+
57+
Instantiate:
58+
.. code-block:: python
2759
28-
To use, you should have the ``qianfan`` python package installed, and
29-
the environment variable ``qianfan_ak`` and ``qianfan_sk`` set with
30-
your API key and Secret Key.
60+
from langchain_community.llms import QianfanLLMEndpoint
3161
32-
ak, sk are required parameters which you could get from
33-
https://cloud.baidu.com/product/wenxinworkshop
62+
llm = QianfanLLMEndpoint(
63+
model="ERNIE-3.5-8K",
64+
# api_key="...",
65+
# secret_key="...",
66+
# other params...
67+
)
3468
35-
Example:
69+
Invoke:
3670
.. code-block:: python
3771
38-
from langchain_community.llms import QianfanLLMEndpoint
39-
qianfan_model = QianfanLLMEndpoint(model="ERNIE-Bot",
40-
endpoint="your_endpoint", qianfan_ak="your_ak", qianfan_sk="your_sk")
41-
"""
72+
messages = [
73+
("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"),
74+
("human", "我喜欢编程。"),
75+
]
76+
llm.invoke(messages)
77+
78+
.. code-block:: python
79+
80+
'I like programming.'
81+
82+
Stream:
83+
.. code-block:: python
84+
85+
for chunk in llm.stream(messages):
86+
print(chunk)
87+
88+
.. code-block:: python
89+
90+
I like
91+
programming.
92+
93+
.. code-block:: python
94+
95+
stream = llm.stream(messages)
96+
full = next(stream)
97+
for chunk in stream:
98+
full += chunk
99+
full
100+
101+
.. code-block::
102+
103+
'I like programming.'
104+
105+
Async:
106+
.. code-block:: python
107+
108+
await llm.ainvoke(messages)
109+
110+
# stream:
111+
# async for chunk in llm.astream(messages):
112+
# print(chunk)
113+
114+
# batch:
115+
# await llm.abatch([messages])
116+
117+
.. code-block:: python
118+
119+
'I like programming.'
120+
121+
""" # noqa: E501
42122

43123
init_kwargs: Dict[str, Any] = Field(default_factory=dict)
44124
"""init kwargs for qianfan client init, such as `query_per_second` which is
@@ -49,8 +129,8 @@ class QianfanLLMEndpoint(LLM):
49129

50130
client: Any
51131

52-
qianfan_ak: Optional[SecretStr] = None
53-
qianfan_sk: Optional[SecretStr] = None
132+
qianfan_ak: Optional[SecretStr] = Field(default=None, alias="api_key")
133+
qianfan_sk: Optional[SecretStr] = Field(default=None, alias="secret_key")
54134

55135
streaming: Optional[bool] = False
56136
"""Whether to stream the results or not."""
@@ -68,7 +148,7 @@ class QianfanLLMEndpoint(LLM):
68148
endpoint: Optional[str] = None
69149
"""Endpoint of the Qianfan LLM, required if custom model used."""
70150

71-
request_timeout: Optional[int] = 60
151+
request_timeout: Optional[int] = Field(default=60, alias="timeout")
72152
"""request timeout for chat http requests"""
73153

74154
top_p: Optional[float] = 0.8
@@ -83,15 +163,15 @@ def validate_environment(cls, values: Dict) -> Dict:
83163
values["qianfan_ak"] = convert_to_secret_str(
84164
get_from_dict_or_env(
85165
values,
86-
"qianfan_ak",
166+
["qianfan_ak", "api_key"],
87167
"QIANFAN_AK",
88168
default="",
89169
)
90170
)
91171
values["qianfan_sk"] = convert_to_secret_str(
92172
get_from_dict_or_env(
93173
values,
94-
"qianfan_sk",
174+
["qianfan_sk", "secret_key"],
95175
"QIANFAN_SK",
96176
default="",
97177
)

libs/community/tests/integration_tests/llms/test_qianfan_endpoint.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
"""Test Baidu Qianfan LLM Endpoint."""
22

3-
from typing import Generator
3+
from typing import Generator, cast
44

55
from langchain_core.outputs import LLMResult
6+
from langchain_core.pydantic_v1 import SecretStr
67

78
from langchain_community.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint
89

@@ -42,3 +43,15 @@ def test_rate_limit() -> None:
4243
output = llm.generate(["write a joke"])
4344
assert isinstance(output, LLMResult)
4445
assert isinstance(output.generations, list)
46+
47+
48+
def test_qianfan_with_param_alias() -> None:
49+
"""Test with qianfan llm parameter alias."""
50+
llm = QianfanLLMEndpoint( # type: ignore[call-arg]
51+
api_key="your-api-key", # type: ignore[arg-type]
52+
secret_key="your-secret-key", # type: ignore[arg-type]
53+
timeout=50,
54+
) # type: ignore[call-arg]
55+
assert cast(SecretStr, llm.qianfan_ak).get_secret_value() == "your-api-key"
56+
assert cast(SecretStr, llm.qianfan_sk).get_secret_value() == "your-secret-key"
57+
assert llm.request_timeout == 50

0 commit comments

Comments
 (0)