Skip to content

Commit f0a646b

Browse files
author
Erick Friis
committed
Merge branch 'erick/core-openai-rfc-structured-output-tracing' of github.com:langchain-ai/langchain into erick/core-openai-rfc-structured-output-tracing
2 parents 3b59ef2 + 2666914 commit f0a646b

File tree

1 file changed

+16
-85
lines changed

1 file changed

+16
-85
lines changed

libs/standard-tests/langchain_tests/integration_tests/chat_models.py

Lines changed: 16 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,10 @@
1919
from langchain_core.output_parsers import StrOutputParser
2020
from langchain_core.prompts import ChatPromptTemplate
2121
from langchain_core.tools import BaseTool, tool
22-
from langchain_core.utils.function_calling import tool_example_to_messages
22+
from langchain_core.utils.function_calling import (
23+
convert_to_openai_tool,
24+
tool_example_to_messages,
25+
)
2326
from pydantic import BaseModel, Field
2427
from pydantic.v1 import BaseModel as BaseModelV1
2528
from pydantic.v1 import Field as FieldV1
@@ -1244,27 +1247,9 @@ def has_tool_calling(self) -> bool:
12441247
assert isinstance(
12451248
invoke_callback.metadatas[0]["structured_output_format"]["schema"], dict
12461249
)
1247-
assert invoke_callback.metadatas[0]["structured_output_format"]["schema"] == {
1248-
"type": "function",
1249-
"function": {
1250-
"name": "Joke",
1251-
"description": "Joke to tell user.",
1252-
"parameters": {
1253-
"properties": {
1254-
"setup": {
1255-
"description": "question to set up a joke",
1256-
"type": "string",
1257-
},
1258-
"punchline": {
1259-
"description": "answer to resolve the joke",
1260-
"type": "string",
1261-
},
1262-
},
1263-
"required": ["setup", "punchline"],
1264-
"type": "object",
1265-
},
1266-
},
1267-
}
1250+
assert invoke_callback.metadatas[0]["structured_output_format"][
1251+
"schema"
1252+
] == convert_to_openai_tool(schema)
12681253

12691254
stream_callback = _TestCallbackHandler()
12701255

@@ -1281,27 +1266,9 @@ def has_tool_calling(self) -> bool:
12811266
assert isinstance(
12821267
stream_callback.metadatas[0]["structured_output_format"]["schema"], dict
12831268
)
1284-
assert stream_callback.metadatas[0]["structured_output_format"]["schema"] == {
1285-
"type": "function",
1286-
"function": {
1287-
"name": "Joke",
1288-
"description": "Joke to tell user.",
1289-
"parameters": {
1290-
"properties": {
1291-
"setup": {
1292-
"description": "question to set up a joke",
1293-
"type": "string",
1294-
},
1295-
"punchline": {
1296-
"description": "answer to resolve the joke",
1297-
"type": "string",
1298-
},
1299-
},
1300-
"required": ["setup", "punchline"],
1301-
"type": "object",
1302-
},
1303-
},
1304-
}
1269+
assert stream_callback.metadatas[0]["structured_output_format"][
1270+
"schema"
1271+
] == convert_to_openai_tool(schema)
13051272

13061273
@pytest.mark.parametrize("schema_type", ["pydantic", "typeddict", "json_schema"])
13071274
async def test_structured_output_async(
@@ -1353,27 +1320,9 @@ def has_tool_calling(self) -> bool:
13531320
assert isinstance(
13541321
ainvoke_callback.metadatas[0]["structured_output_format"]["schema"], dict
13551322
)
1356-
assert ainvoke_callback.metadatas[0]["structured_output_format"]["schema"] == {
1357-
"type": "function",
1358-
"function": {
1359-
"name": "Joke",
1360-
"description": "Joke to tell user.",
1361-
"parameters": {
1362-
"properties": {
1363-
"setup": {
1364-
"description": "question to set up a joke",
1365-
"type": "string",
1366-
},
1367-
"punchline": {
1368-
"description": "answer to resolve the joke",
1369-
"type": "string",
1370-
},
1371-
},
1372-
"required": ["setup", "punchline"],
1373-
"type": "object",
1374-
},
1375-
},
1376-
}
1323+
assert ainvoke_callback.metadatas[0]["structured_output_format"][
1324+
"schema"
1325+
] == convert_to_openai_tool(schema)
13771326

13781327
astream_callback = _TestCallbackHandler()
13791328

@@ -1391,27 +1340,9 @@ def has_tool_calling(self) -> bool:
13911340
assert isinstance(
13921341
astream_callback.metadatas[0]["structured_output_format"]["schema"], dict
13931342
)
1394-
assert astream_callback.metadatas[0]["structured_output_format"]["schema"] == {
1395-
"type": "function",
1396-
"function": {
1397-
"name": "Joke",
1398-
"description": "Joke to tell user.",
1399-
"parameters": {
1400-
"properties": {
1401-
"setup": {
1402-
"description": "question to set up a joke",
1403-
"type": "string",
1404-
},
1405-
"punchline": {
1406-
"description": "answer to resolve the joke",
1407-
"type": "string",
1408-
},
1409-
},
1410-
"required": ["setup", "punchline"],
1411-
"type": "object",
1412-
},
1413-
},
1414-
}
1343+
assert astream_callback.metadatas[0]["structured_output_format"][
1344+
"schema"
1345+
] == convert_to_openai_tool(schema)
14151346

14161347
@pytest.mark.skipif(PYDANTIC_MAJOR_VERSION != 2, reason="Test requires pydantic 2.")
14171348
def test_structured_output_pydantic_2_v1(self, model: BaseChatModel) -> None:

0 commit comments

Comments
 (0)