Skip to content

Commit 70321b9

Browse files
maang-holgamurraft
authored andcommitted
Standardize SparkLLM (langchain-ai#25239)
- **Description:** Standardize SparkLLM, include: - docs, the issue langchain-ai#24803 - to support stream - update api url - model init arg names, the issue langchain-ai#20085
1 parent df5170d commit 70321b9

File tree

2 files changed

+138
-28
lines changed

2 files changed

+138
-28
lines changed

libs/community/langchain_community/llms/sparkllm.py

Lines changed: 113 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -24,64 +24,149 @@
2424

2525

2626
class SparkLLM(LLM):
27-
"""iFlyTek Spark large language model.
27+
"""iFlyTek Spark completion model integration.
28+
29+
Setup:
30+
To use, you should set environment variables ``IFLYTEK_SPARK_APP_ID``,
31+
``IFLYTEK_SPARK_API_KEY`` and ``IFLYTEK_SPARK_API_SECRET``.
32+
33+
.. code-block:: bash
34+
35+
export IFLYTEK_SPARK_APP_ID="your-app-id"
36+
export IFLYTEK_SPARK_API_KEY="your-api-key"
37+
export IFLYTEK_SPARK_API_SECRET="your-api-secret"
38+
39+
Key init args — completion params:
40+
model: Optional[str]
41+
Name of IFLYTEK SPARK model to use.
42+
temperature: Optional[float]
43+
Sampling temperature.
44+
top_k: Optional[float]
45+
What search sampling control to use.
46+
streaming: Optional[bool]
47+
Whether to stream the results or not.
48+
49+
Key init args — client params:
50+
app_id: Optional[str]
51+
IFLYTEK SPARK API KEY. Automatically inferred from env var `IFLYTEK_SPARK_APP_ID` if not provided.
52+
api_key: Optional[str]
53+
IFLYTEK SPARK API KEY. If not passed in will be read from env var IFLYTEK_SPARK_API_KEY.
54+
api_secret: Optional[str]
55+
IFLYTEK SPARK API SECRET. If not passed in will be read from env var IFLYTEK_SPARK_API_SECRET.
56+
api_url: Optional[str]
57+
Base URL for API requests.
58+
timeout: Optional[int]
59+
Timeout for requests.
60+
61+
See full list of supported init args and their descriptions in the params section.
62+
63+
Instantiate:
64+
.. code-block:: python
65+
66+
from langchain_community.llms import SparkLLM
2867
29-
To use, you should pass `app_id`, `api_key`, `api_secret`
30-
as a named parameter to the constructor OR set environment
31-
variables ``IFLYTEK_SPARK_APP_ID``, ``IFLYTEK_SPARK_API_KEY`` and
32-
``IFLYTEK_SPARK_API_SECRET``
68+
llm = SparkLLM(
69+
app_id="your-app-id",
70+
api_key="your-api_key",
71+
api_secret="your-api-secret",
72+
# model='Spark4.0 Ultra',
73+
# temperature=...,
74+
# other params...
75+
)
3376
34-
Example:
77+
Invoke:
3578
.. code-block:: python
3679
37-
client = SparkLLM(
38-
spark_app_id="<app_id>",
39-
spark_api_key="<api_key>",
40-
spark_api_secret="<api_secret>"
41-
)
42-
"""
80+
input_text = "用50个字左右阐述,生命的意义在于"
81+
llm.invoke(input_text)
82+
83+
.. code-block:: python
84+
85+
'生命的意义在于实现自我价值,追求内心的平静与快乐,同时为他人和社会带来正面影响。'
86+
87+
Stream:
88+
.. code-block:: python
89+
90+
for chunk in llm.stream(input_text):
91+
print(chunk)
92+
93+
.. code-block:: python
94+
95+
生命 | 的意义在于 | 不断探索和 | 实现个人潜能,通过 | 学习 | 、成长和对社会 | 的贡献,追求内心的满足和幸福。
96+
97+
Async:
98+
.. code-block:: python
99+
100+
await llm.ainvoke(input_text)
101+
102+
# stream:
103+
# async for chunk in llm.astream(input_text):
104+
# print(chunk)
105+
106+
# batch:
107+
# await llm.abatch([input_text])
108+
109+
.. code-block:: python
110+
111+
'生命的意义在于实现自我价值,追求内心的平静与快乐,同时为他人和社会带来正面影响。'
112+
113+
""" # noqa: E501
43114

44115
client: Any = None #: :meta private:
45-
spark_app_id: Optional[str] = None
46-
spark_api_key: Optional[str] = None
47-
spark_api_secret: Optional[str] = None
48-
spark_api_url: Optional[str] = None
49-
spark_llm_domain: Optional[str] = None
116+
spark_app_id: Optional[str] = Field(default=None, alias="app_id")
117+
"""Automatically inferred from env var `IFLYTEK_SPARK_APP_ID`
118+
if not provided."""
119+
spark_api_key: Optional[str] = Field(default=None, alias="api_key")
120+
"""IFLYTEK SPARK API KEY. If not passed in will be read from
121+
env var IFLYTEK_SPARK_API_KEY."""
122+
spark_api_secret: Optional[str] = Field(default=None, alias="api_secret")
123+
"""IFLYTEK SPARK API SECRET. If not passed in will be read from
124+
env var IFLYTEK_SPARK_API_SECRET."""
125+
spark_api_url: Optional[str] = Field(default=None, alias="api_url")
126+
"""Base URL path for API requests, leave blank if not using a proxy or service
127+
emulator."""
128+
spark_llm_domain: Optional[str] = Field(default=None, alias="model")
129+
"""Model name to use."""
50130
spark_user_id: str = "lc_user"
51131
streaming: bool = False
52-
request_timeout: int = 30
132+
"""Whether to stream the results or not."""
133+
request_timeout: int = Field(default=30, alias="timeout")
134+
"""request timeout for chat http requests"""
53135
temperature: float = 0.5
136+
"""What sampling temperature to use."""
54137
top_k: int = 4
138+
"""What search sampling control to use."""
55139
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
140+
"""Holds any model parameters valid for API call not explicitly specified."""
56141

57142
@pre_init
58143
def validate_environment(cls, values: Dict) -> Dict:
59144
values["spark_app_id"] = get_from_dict_or_env(
60145
values,
61-
"spark_app_id",
146+
["spark_app_id", "app_id"],
62147
"IFLYTEK_SPARK_APP_ID",
63148
)
64149
values["spark_api_key"] = get_from_dict_or_env(
65150
values,
66-
"spark_api_key",
151+
["spark_api_key", "api_key"],
67152
"IFLYTEK_SPARK_API_KEY",
68153
)
69154
values["spark_api_secret"] = get_from_dict_or_env(
70155
values,
71-
"spark_api_secret",
156+
["spark_api_secret", "api_secret"],
72157
"IFLYTEK_SPARK_API_SECRET",
73158
)
74159
values["spark_api_url"] = get_from_dict_or_env(
75160
values,
76-
"spark_api_url",
161+
["spark_api_url", "api_url"],
77162
"IFLYTEK_SPARK_API_URL",
78-
"wss://spark-api.xf-yun.com/v3.1/chat",
163+
"wss://spark-api.xf-yun.com/v3.5/chat",
79164
)
80165
values["spark_llm_domain"] = get_from_dict_or_env(
81166
values,
82-
"spark_llm_domain",
167+
["spark_llm_domain", "model"],
83168
"IFLYTEK_SPARK_LLM_DOMAIN",
84-
"generalv3",
169+
"generalv3.5",
85170
)
86171
# put extra params into model_kwargs
87172
values["model_kwargs"]["temperature"] = values["temperature"] or cls.temperature
@@ -163,7 +248,7 @@ def _stream(
163248
[{"role": "user", "content": prompt}],
164249
self.spark_user_id,
165250
self.model_kwargs,
166-
self.streaming,
251+
True,
167252
)
168253
for content in self.client.subscribe(timeout=self.request_timeout):
169254
if "data" not in content:
@@ -200,11 +285,11 @@ def __init__(
200285
)
201286

202287
self.api_url = (
203-
"wss://spark-api.xf-yun.com/v3.1/chat" if not api_url else api_url
288+
"wss://spark-api.xf-yun.com/v3.5/chat" if not api_url else api_url
204289
)
205290
self.app_id = app_id
206291
self.model_kwargs = model_kwargs
207-
self.spark_domain = spark_domain or "generalv3"
292+
self.spark_domain = spark_domain or "generalv3.5"
208293
self.queue: Queue[Dict] = Queue()
209294
self.blocking_message = {"content": "", "role": "assistant"}
210295
self.api_key = api_key

libs/community/tests/integration_tests/llms/test_sparkllm.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,28 @@ def test_generate() -> None:
1818
output = llm.generate(["Say foo:"])
1919
assert isinstance(output, LLMResult)
2020
assert isinstance(output.generations, list)
21+
22+
23+
def test_spark_llm_with_param_alias() -> None:
24+
"""Test SparkLLM with parameters alias."""
25+
llm = SparkLLM( # type: ignore[call-arg]
26+
app_id="your-app-id",
27+
api_key="your-api-key",
28+
api_secret="your-api-secret",
29+
model="Spark4.0 Ultra",
30+
api_url="your-api-url",
31+
timeout=20,
32+
)
33+
assert llm.spark_app_id == "your-app-id"
34+
assert llm.spark_api_key == "your-api-key"
35+
assert llm.spark_api_secret == "your-api-secret"
36+
assert llm.spark_llm_domain == "Spark4.0 Ultra"
37+
assert llm.spark_api_url == "your-api-url"
38+
assert llm.request_timeout == 20
39+
40+
41+
def test_spark_llm_with_stream() -> None:
42+
"""Test SparkLLM with stream."""
43+
llm = SparkLLM() # type: ignore[call-arg]
44+
for chunk in llm.stream("你好呀"):
45+
assert isinstance(chunk, str)

0 commit comments

Comments
 (0)