Skip to content

Commit 33dc4e1

Browse files
committed
add repro script
1 parent c1dad12 commit 33dc4e1

File tree

2 files changed

+34
-0
lines changed

2 files changed

+34
-0
lines changed

python/bench/tracing_script.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
from langsmith import traceable, wrappers
2+
from openai import Client
3+
4+
import os
5+
os.environ["LANGCHAIN_PROJECT"] = "llm_messages_test_py"
6+
os.environ["LANGSMITH_USE_PYO3_CLIENT"] = "true"
7+
8+
openai = wrappers.wrap_openai(Client())
9+
10+
import openai
11+
from langsmith import traceable
12+
from langsmith.wrappers import wrap_openai
13+
14+
client = wrap_openai(openai.Client())
15+
16+
@traceable(run_type="tool", name="Retrieve Context")
17+
def my_tool(question: str) -> str:
18+
return "During this morning's meeting, we solved all world conflict."
19+
20+
@traceable(name="Chat Pipeline")
21+
def chat_pipeline(question: str):
22+
context = my_tool(question)
23+
messages = [
24+
{ "role": "system", "content": "You are a helpful assistant. Please respond to the user's request only based on the given context." },
25+
{ "role": "user", "content": f"Question: {question}\nContext: {context}"}
26+
]
27+
chat_completion = client.chat.completions.create(
28+
model="gpt-4o-mini", messages=messages
29+
)
30+
return chat_completion.choices[0].message.content
31+
32+
if __name__ == "__main__":
33+
chat_pipeline("Can you summarize this morning's meetings?")

python/langsmith/client.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1288,6 +1288,7 @@ def create_run(
12881288
and run_create.get("dotted_order") is not None
12891289
):
12901290
if self._pyo3_client is not None:
1291+
print("RUN_CREATE", run_create)
12911292
self._pyo3_client.create_run(run_create)
12921293
elif self.tracing_queue is not None:
12931294
serialized_op = serialize_run_dict("post", run_create)

0 commit comments

Comments
 (0)