File tree Expand file tree Collapse file tree 2 files changed +34
-0
lines changed Expand file tree Collapse file tree 2 files changed +34
-0
lines changed Original file line number Diff line number Diff line change
1
+ from langsmith import traceable , wrappers
2
+ from openai import Client
3
+
4
+ import os
5
+ os .environ ["LANGCHAIN_PROJECT" ] = "llm_messages_test_py"
6
+ os .environ ["LANGSMITH_USE_PYO3_CLIENT" ] = "true"
7
+
8
+ openai = wrappers .wrap_openai (Client ())
9
+
10
+ import openai
11
+ from langsmith import traceable
12
+ from langsmith .wrappers import wrap_openai
13
+
14
+ client = wrap_openai (openai .Client ())
15
+
16
+ @traceable (run_type = "tool" , name = "Retrieve Context" )
17
+ def my_tool (question : str ) -> str :
18
+ return "During this morning's meeting, we solved all world conflict."
19
+
20
+ @traceable (name = "Chat Pipeline" )
21
+ def chat_pipeline (question : str ):
22
+ context = my_tool (question )
23
+ messages = [
24
+ { "role" : "system" , "content" : "You are a helpful assistant. Please respond to the user's request only based on the given context." },
25
+ { "role" : "user" , "content" : f"Question: { question } \n Context: { context } " }
26
+ ]
27
+ chat_completion = client .chat .completions .create (
28
+ model = "gpt-4o-mini" , messages = messages
29
+ )
30
+ return chat_completion .choices [0 ].message .content
31
+
32
+ if __name__ == "__main__" :
33
+ chat_pipeline ("Can you summarize this morning's meetings?" )
Original file line number Diff line number Diff line change @@ -1288,6 +1288,7 @@ def create_run(
1288
1288
and run_create .get ("dotted_order" ) is not None
1289
1289
):
1290
1290
if self ._pyo3_client is not None :
1291
+ print ("RUN_CREATE" , run_create )
1291
1292
self ._pyo3_client .create_run (run_create )
1292
1293
elif self .tracing_queue is not None :
1293
1294
serialized_op = serialize_run_dict ("post" , run_create )
You can’t perform that action at this time.
0 commit comments