Skip to content

Commit 482054c

Browse files
update thread how-to (#651)
Co-authored-by: Tanushree <87711021+tanushree-sharma@users.noreply.github.com>
1 parent fddad4e commit 482054c

File tree

3 files changed

+181
-4
lines changed

3 files changed

+181
-4
lines changed
Loading
Loading

docs/observability/how_to_guides/monitoring/threads.mdx

Lines changed: 181 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
1+
import {
2+
CodeTabs,
3+
PythonBlock,
4+
TypeScriptBlock,
5+
} from "@site/src/components/InstructionsWithCode";
6+
17
# Set up threads
28

39
:::tip Recommended Reading
410
Before diving into this content, it might be helpful to read the following:
511

612
- [Add metadata and tags to traces](../tracing/add_metadata_tags)
7-
8-
:::
13+
:::
914

1015
Many LLM applications have a chatbot-like interface in which the user and the LLM application engage in a multi-turn conversation. In order to track these conversations, you can use the `Threads` feature in LangSmith.
1116

@@ -22,11 +27,183 @@ The key name should be one of:
2227
- `thread_id`
2328
- `conversation_id`.
2429

25-
The value should be a UUID, such as `f47ac10b-58cc-4372-a567-0e02b2c3d479`.
30+
The value can be any string you want, but we recommend using UUIDs, such as `f47ac10b-58cc-4372-a567-0e02b2c3d479`.
31+
32+
### Code example
33+
34+
This example demonstrates how to log and retrieve conversation history from LangSmith to maintain long-running chats.
35+
36+
You can [add metadata to your traces](../tracing/add_metadata_tags) in LangSmith in a variety of ways, this code will show how to do so dynamically, but read the
37+
previously linked guide to learn about all the ways you can add thread identifier metadata to your traces.
38+
39+
<CodeTabs
40+
tabs={[
41+
PythonBlock(`import openai
42+
from langsmith import traceable
43+
from langsmith import Client
44+
import langsmith as ls
45+
from langsmith.wrappers import wrap_openai
46+
47+
client = wrap_openai(openai.Client())
48+
langsmith_client = Client()
49+
50+
# Config used for this example
51+
52+
langsmith_project = "project-with-threads"
53+
54+
# highlight-next-line
55+
56+
session_id = "thread-id-1"
57+
58+
# highlight-next-line
59+
60+
langsmith_extra={"project_name": langsmith_project, "metadata":{"session_id": session_id}}
61+
62+
# gets a history of all LLM calls in the thread to construct conversation history
63+
64+
def get_thread_history(thread_id: str, project_name: str): # Filter runs by the specific thread and project
65+
filter_string = f'and(in(metadata_key, ["session_id","conversation_id","thread_id"]), eq(metadata_value, "{thread_id}"))' # Only grab the LLM runs
66+
runs = [r for r in langsmith_client.list_runs(project_name=project_name, filter=filter_string, run_type="llm")]
67+
68+
# Sort by start time to get the most recent interaction
69+
runs = sorted(runs, key=lambda run: run.start_time, reverse=True)
70+
# The current state of the conversation
71+
return runs[0].inputs['messages'] + [runs[0].outputs['choices'][0]['message']]
72+
73+
# if an existing conversation is continued, this function looks up the current run’s metadata to get the session_id, calls get_thread_history, and appends the new user question before making a call to the chat model
74+
75+
@traceable(name="Chat Bot")
76+
def chat_pipeline(question: str, get_chat_history: bool = False): # Whether to continue an existing thread or start a new one
77+
if get_chat_history:
78+
run_tree = ls.get_current_run_tree()
79+
messages = get_thread_history(run_tree.extra["metadata"]["session_id"],run_tree.session_name) + [{"role": "user", "content": question}]
80+
else:
81+
messages = [{"role": "user", "content": question}]
82+
83+
# Invoke the model
84+
chat_completion = client.chat.completions.create(
85+
model="gpt-4o-mini", messages=messages
86+
)
87+
return chat_completion.choices[0].message.content
88+
89+
# Start the conversation
90+
91+
chat_pipeline("Hi, my name is Bob", langsmith_extra=langsmith_extra)`),
92+
TypeScriptBlock(`import OpenAI from "openai";
93+
import { traceable, getCurrentRunTree } from "langsmith/traceable";
94+
import { Client } from "langsmith";
95+
import { wrapOpenAI } from "langsmith/wrappers";
96+
97+
// Config used for this example
98+
const langsmithProject = "project-with-threads";
99+
// highlight-next-line
100+
const threadId = "thread-id-1";
101+
102+
const client = wrapOpenAI(new OpenAI(), {
103+
project_name: langsmithProject,
104+
metadata: { session_id: threadId }
105+
});
106+
const langsmithClient = new Client();
107+
108+
async function getThreadHistory(threadId: string, projectName: string) {
109+
// Filter runs by the specific thread and project
110+
const filterString = \`and(in(metadata_key, ["session_id","conversation_id","thread_id"]), eq(metadata_value, "\${threadId}"))\`;
111+
112+
// Only grab the LLM runs
113+
const runs = langsmithClient.listRuns({
114+
projectName: projectName,
115+
filter: filterString,
116+
runType: "llm"
117+
});
118+
119+
// Sort by start time to get the most recent interaction
120+
const runsArray = [];
121+
for await (const run of runs) {
122+
runsArray.push(run);
123+
}
124+
const sortedRuns = runsArray.sort((a, b) =>
125+
new Date(b.start_time).getTime() - new Date(a.start_time).getTime()
126+
);
127+
128+
// The current state of the conversation
129+
return [
130+
...sortedRuns[0].inputs.messages,
131+
sortedRuns[0].outputs.choices[0].message
132+
];
133+
}
134+
135+
const chatPipeline = traceable(
136+
async (
137+
question: string,
138+
options: {
139+
getChatHistory?: boolean;
140+
} = {}
141+
) => {
142+
const {
143+
getChatHistory = false,
144+
} = options;
145+
146+
let messages = [];
147+
// Whether to continue an existing thread or start a new one
148+
if (getChatHistory) {
149+
const runTree = await getCurrentRunTree();
150+
const historicalMessages = await getThreadHistory(
151+
runTree.extra.metadata.session_id,
152+
runTree.project_name
153+
);
154+
messages = [
155+
...historicalMessages,
156+
{ role:"user", content: question }
157+
];
158+
} else {
159+
messages = [{ role:"user", content: question }];
160+
}
161+
162+
// Invoke the model
163+
const chatCompletion = await client.chat.completions.create({
164+
model: "gpt-4o-mini",
165+
messages: messages
166+
});
167+
return chatCompletion.choices[0].message.content;
168+
169+
},
170+
{
171+
name: "Chat Bot",
172+
project_name: langsmithProject,
173+
metadata: { session_id: threadId }
174+
}
175+
);
176+
177+
// Start the conversation
178+
await chatPipeline("Hi, my name is Bob");`),
179+
]}
180+
groupId="client-language"
181+
/>
182+
183+
After waiting a few seconds, you can make the following calls to contineu the conversation. By passing `getChatHistory: true`,
184+
you can continue the conversation from where it left off. This means that the LLM will receive the entire message history and respond to it,
185+
instead of just responding to the latest message.
186+
187+
<CodeTabs
188+
tabs={[
189+
PythonBlock(`# Continue the conversation (WAIT A FEW SECONDS BEFORE RUNNING THIS SO THE FRIST TRACE CAN BE INGESTED)
190+
chat_pipeline("What is my name?", get_chat_history=True, langsmith_extra=langsmith_extra)
191+
192+
# Keep the conversation going (WAIT A FEW SECONDS BEFORE RUNNING THIS SO THE PREVIOUS TRACE CAN BE INGESTED)
193+
194+
chat_pipeline("What was the first message I sent you", get_chat_history=True, langsmith_extra=langsmith_extra)`),
195+
TypeScriptBlock(`// Continue the conversation (WAIT A FEW SECONDS BEFORE RUNNING THIS SO THE FRIST TRACE CAN BE INGESTED)
196+
await chatPipeline("What is my name?", { getChatHistory: true });
197+
198+
// Keep the conversation going (WAIT A FEW SECONDS BEFORE RUNNING THIS SO THE PREVIOUS TRACE CAN BE INGESTED)
199+
await chatPipeline("What was the first message I sent you", { getChatHistory: true });`),
200+
]}
201+
groupId="client-language"
202+
/>
26203

27204
## View threads
28205

29-
You can view threads by clicking on the `Threads` tad in any project details page. You will then see a list of all threads, sorted by the most recent activity.
206+
You can view threads by clicking on the `Threads` tab in any project details page. You will then see a list of all threads, sorted by the most recent activity.
30207

31208
![Thread Tab](./static/convo_tab.png)
32209

0 commit comments

Comments
 (0)