Skip to content

Commit b9dc8f2

Browse files
authored
fix(js): Limit queue batch concurrency, bump timeout, add maximum wait period for serverless envs (#1124)
Fixes #1101 Bumps timeouts related to large uploads, limit the total amount of bandwidth used, and avoid blocking for too long in the default case Adds `LANGSMITH_TRACING_BACKGROUND` to set blocking behavior Bump to 0.2 will come along with this.
1 parent 1fbcc55 commit b9dc8f2

File tree

9 files changed

+173
-69
lines changed

9 files changed

+173
-69
lines changed

js/package.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "langsmith",
3-
"version": "0.1.67",
3+
"version": "0.2.0",
44
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
55
"packageManager": "yarn@1.22.19",
66
"files": [
@@ -109,9 +109,9 @@
109109
"@babel/preset-env": "^7.22.4",
110110
"@faker-js/faker": "^8.4.1",
111111
"@jest/globals": "^29.5.0",
112-
"@langchain/core": "^0.3.1",
113-
"@langchain/langgraph": "^0.2.3",
114-
"@langchain/openai": "^0.3.0",
112+
"@langchain/core": "^0.3.14",
113+
"@langchain/langgraph": "^0.2.18",
114+
"@langchain/openai": "^0.3.11",
115115
"@tsconfig/recommended": "^1.0.2",
116116
"@types/jest": "^29.5.1",
117117
"@typescript-eslint/eslint-plugin": "^5.59.8",
@@ -126,7 +126,7 @@
126126
"eslint-plugin-no-instanceof": "^1.0.1",
127127
"eslint-plugin-prettier": "^4.2.1",
128128
"jest": "^29.5.0",
129-
"langchain": "^0.3.2",
129+
"langchain": "^0.3.3",
130130
"openai": "^4.67.3",
131131
"prettier": "^2.8.8",
132132
"ts-jest": "^29.1.0",

js/src/client.ts

Lines changed: 51 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ import {
4040
isLangChainMessage,
4141
} from "./utils/messages.js";
4242
import {
43+
getEnvironmentVariable,
4344
getLangChainEnvVarsMetadata,
4445
getLangSmithEnvironmentVariable,
4546
getRuntimeEnvironment,
@@ -74,6 +75,7 @@ export interface ClientConfig {
7475
autoBatchTracing?: boolean;
7576
batchSizeBytesLimit?: number;
7677
blockOnRootRunFinalization?: boolean;
78+
traceBatchConcurrency?: number;
7779
fetchOptions?: RequestInit;
7880
}
7981

@@ -473,7 +475,10 @@ export class Client {
473475

474476
private settings: Promise<LangSmithSettings> | null;
475477

476-
private blockOnRootRunFinalization = true;
478+
private blockOnRootRunFinalization =
479+
getEnvironmentVariable("LANGSMITH_TRACING_BACKGROUND") === "false";
480+
481+
private traceBatchConcurrency = 5;
477482

478483
private _serverInfo: RecordStringAny | undefined;
479484

@@ -493,9 +498,16 @@ export class Client {
493498
if (this.webUrl?.endsWith("/")) {
494499
this.webUrl = this.webUrl.slice(0, -1);
495500
}
496-
this.timeout_ms = config.timeout_ms ?? 12_000;
501+
this.timeout_ms = config.timeout_ms ?? 90_000;
497502
this.caller = new AsyncCaller(config.callerOptions ?? {});
503+
this.traceBatchConcurrency =
504+
config.traceBatchConcurrency ?? this.traceBatchConcurrency;
505+
if (this.traceBatchConcurrency < 1) {
506+
throw new Error("Trace batch concurrency must be positive.");
507+
}
498508
this.batchIngestCaller = new AsyncCaller({
509+
maxRetries: 2,
510+
maxConcurrency: this.traceBatchConcurrency,
499511
...(config.callerOptions ?? {}),
500512
onFailedResponseHook: handle429,
501513
});
@@ -753,35 +765,44 @@ export class Client {
753765
}
754766

755767
private async drainAutoBatchQueue() {
756-
while (this.autoBatchQueue.items.length >= 0) {
757-
const [batch, done] = this.autoBatchQueue.pop(
758-
await this._getBatchSizeLimitBytes()
759-
);
760-
if (!batch.length) {
761-
done();
762-
return;
763-
}
764-
try {
765-
const ingestParams = {
766-
runCreates: batch
767-
.filter((item) => item.action === "create")
768-
.map((item) => item.item) as RunCreate[],
769-
runUpdates: batch
770-
.filter((item) => item.action === "update")
771-
.map((item) => item.item) as RunUpdate[],
772-
};
773-
const serverInfo = await this._ensureServerInfo();
774-
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
775-
await this.multipartIngestRuns(ingestParams);
776-
} else {
777-
await this.batchIngestRuns(ingestParams);
768+
const batchSizeLimit = await this._getBatchSizeLimitBytes();
769+
while (this.autoBatchQueue.items.length > 0) {
770+
for (let i = 0; i < this.traceBatchConcurrency; i++) {
771+
const [batch, done] = this.autoBatchQueue.pop(batchSizeLimit);
772+
if (!batch.length) {
773+
done();
774+
break;
778775
}
779-
} finally {
780-
done();
776+
await this.processBatch(batch, done);
781777
}
782778
}
783779
}
784780

781+
private async processBatch(batch: AutoBatchQueueItem[], done: () => void) {
782+
if (!batch.length) {
783+
done();
784+
return;
785+
}
786+
try {
787+
const ingestParams = {
788+
runCreates: batch
789+
.filter((item) => item.action === "create")
790+
.map((item) => item.item) as RunCreate[],
791+
runUpdates: batch
792+
.filter((item) => item.action === "update")
793+
.map((item) => item.item) as RunUpdate[],
794+
};
795+
const serverInfo = await this._ensureServerInfo();
796+
if (serverInfo?.batch_ingest_config?.use_multipart_endpoint) {
797+
await this.multipartIngestRuns(ingestParams);
798+
} else {
799+
await this.batchIngestRuns(ingestParams);
800+
}
801+
} finally {
802+
done();
803+
}
804+
}
805+
785806
private async processRunOperation(
786807
item: AutoBatchQueueItem,
787808
immediatelyTriggerBatch?: boolean
@@ -4152,8 +4173,9 @@ export class Client {
41524173
* @returns A promise that resolves once all currently pending traces have sent.
41534174
*/
41544175
public awaitPendingTraceBatches() {
4155-
return Promise.all(
4156-
this.autoBatchQueue.items.map(({ itemPromise }) => itemPromise)
4157-
);
4176+
return Promise.all([
4177+
...this.autoBatchQueue.items.map(({ itemPromise }) => itemPromise),
4178+
this.batchIngestCaller.queue.onIdle(),
4179+
]);
41584180
}
41594181
}

js/src/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,4 @@ export { RunTree, type RunTreeConfig } from "./run_trees.js";
1414
export { overrideFetchImplementation } from "./singletons/fetch.js";
1515

1616
// Update using yarn bump-version
17-
export const __version__ = "0.1.67";
17+
export const __version__ = "0.2.0";

js/src/tests/batch_client.int.test.ts

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import {
1010
waitUntilProjectFound,
1111
waitUntilRunFound,
1212
} from "./utils.js";
13+
import { traceable } from "../traceable.js";
1314

1415
test.concurrent(
1516
"Test persist update run",
@@ -241,3 +242,43 @@ test.concurrent(
241242
},
242243
180_000
243244
);
245+
246+
test.skip("very large runs", async () => {
247+
const langchainClient = new Client({
248+
autoBatchTracing: true,
249+
timeout_ms: 120_000,
250+
});
251+
252+
const projectName = "__test_large_runs" + uuidv4().substring(0, 4);
253+
await deleteProject(langchainClient, projectName);
254+
255+
console.time("largeRunTimer");
256+
257+
const promises = [];
258+
for (let i = 0; i < 10; i++) {
259+
promises.push(
260+
traceable(
261+
async () => {
262+
return "x".repeat(9000000);
263+
},
264+
{
265+
project_name: projectName,
266+
client: langchainClient,
267+
tracingEnabled: true,
268+
}
269+
)()
270+
);
271+
}
272+
273+
await Promise.all(promises);
274+
275+
console.timeLog("largeRunTimer");
276+
277+
await langchainClient.awaitPendingTraceBatches();
278+
279+
console.timeLog("largeRunTimer");
280+
281+
await Promise.all([waitUntilProjectFound(langchainClient, projectName)]);
282+
283+
await langchainClient.deleteProject({ projectName });
284+
}, 180_000);

js/src/tests/batch_client.test.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ describe.each(ENDPOINT_TYPES)(
176176
await new Promise((resolve) => setTimeout(resolve, 300));
177177
});
178178

179-
it("Create + update batching should merge into a single call", async () => {
179+
it.only("Create + update batching should merge into a single call", async () => {
180180
const client = new Client({
181181
apiKey: "test-api-key",
182182
autoBatchTracing: true,
@@ -219,7 +219,7 @@ describe.each(ENDPOINT_TYPES)(
219219
end_time: endTime,
220220
});
221221

222-
await new Promise((resolve) => setTimeout(resolve, 100));
222+
await client.awaitPendingTraceBatches();
223223

224224
const calledRequestParam: any = callSpy.mock.calls[0][2];
225225
expect(await parseMockRequestBody(calledRequestParam?.body)).toEqual({
@@ -331,10 +331,11 @@ describe.each(ENDPOINT_TYPES)(
331331
);
332332
});
333333

334-
it("should immediately trigger a batch on root run end", async () => {
334+
it("should immediately trigger a batch on root run end if blockOnRootRunFinalization is set", async () => {
335335
const client = new Client({
336336
apiKey: "test-api-key",
337337
autoBatchTracing: true,
338+
blockOnRootRunFinalization: true,
338339
});
339340
const callSpy = jest
340341
.spyOn((client as any).batchIngestCaller, "call")

js/src/tests/fetch.test.ts

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
/* eslint-disable no-process-env */
12
/* eslint-disable @typescript-eslint/no-explicit-any */
23
import { jest } from "@jest/globals";
34
import { Client } from "../client.js";
@@ -14,14 +15,24 @@ describe.each([[""], ["mocked"]])("Client uses %s fetch", (description) => {
1415
globalFetchMock = jest.fn(() =>
1516
Promise.resolve({
1617
ok: true,
17-
json: () => Promise.resolve({}),
18+
json: () =>
19+
Promise.resolve({
20+
batch_ingest_config: {
21+
use_multipart_endpoint: true,
22+
},
23+
}),
1824
text: () => Promise.resolve(""),
1925
})
2026
);
2127
overriddenFetch = jest.fn(() =>
2228
Promise.resolve({
2329
ok: true,
24-
json: () => Promise.resolve({}),
30+
json: () =>
31+
Promise.resolve({
32+
batch_ingest_config: {
33+
use_multipart_endpoint: true,
34+
},
35+
}),
2536
text: () => Promise.resolve(""),
2637
})
2738
);
@@ -78,6 +89,7 @@ describe.each([[""], ["mocked"]])("Client uses %s fetch", (description) => {
7889
});
7990

8091
test("basic traceable implementation", async () => {
92+
process.env.LANGSMITH_TRACING_BACKGROUND = "false";
8193
const llm = traceable(
8294
async function* llm(input: string) {
8395
const response = input.repeat(2).split("");

js/src/tests/traceable.test.ts

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ test("404s should only log, not throw an error", async () => {
4444
overrideFetchImplementation(overriddenFetch);
4545
const client = new Client({
4646
apiUrl: "https://foobar.notreal",
47+
autoBatchTracing: false,
4748
});
4849
const llm = traceable(
4950
async function* llm(input: string) {
@@ -1111,8 +1112,12 @@ test("argsConfigPath", async () => {
11111112

11121113
test("traceable continues execution when client throws error", async () => {
11131114
const errorClient = {
1114-
createRun: jest.fn().mockRejectedValue(new Error("Client error") as never),
1115-
updateRun: jest.fn().mockRejectedValue(new Error("Client error") as never),
1115+
createRun: jest
1116+
.fn()
1117+
.mockRejectedValue(new Error("Expected test client error") as never),
1118+
updateRun: jest
1119+
.fn()
1120+
.mockRejectedValue(new Error("Expected test client error") as never),
11161121
};
11171122

11181123
const tracedFunction = traceable(
@@ -1214,7 +1219,7 @@ test("traceable with processInputs throwing error does not affect invocation", a
12141219
const { client, callSpy } = mockClient();
12151220

12161221
const processInputs = jest.fn((_inputs: Readonly<KVMap>) => {
1217-
throw new Error("processInputs error");
1222+
throw new Error("totally expected test processInputs error");
12181223
});
12191224

12201225
const func = traceable(
@@ -1250,7 +1255,7 @@ test("traceable with processOutputs throwing error does not affect invocation",
12501255
const { client, callSpy } = mockClient();
12511256

12521257
const processOutputs = jest.fn((_outputs: Readonly<KVMap>) => {
1253-
throw new Error("processOutputs error");
1258+
throw new Error("totally expected test processInputs error");
12541259
});
12551260

12561261
const func = traceable(

js/src/utils/async_caller.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ export class AsyncCaller {
5555

5656
protected maxRetries: AsyncCallerParams["maxRetries"];
5757

58-
private queue: typeof import("p-queue")["default"]["prototype"];
58+
queue: typeof import("p-queue")["default"]["prototype"];
5959

6060
private onFailedResponseHook?: ResponseCallback;
6161

0 commit comments

Comments
 (0)