Skip to content

Commit

Permalink
Merge pull request #3372 from quantified-uncertainty/llm-runner-styles-2
Browse files Browse the repository at this point in the history
Updated AI workflow action styles
  • Loading branch information
OAGr authored Sep 23, 2024
2 parents f135798 + 1c9aa83 commit 790f109
Show file tree
Hide file tree
Showing 27 changed files with 11,537 additions and 14,762 deletions.
22 changes: 14 additions & 8 deletions packages/ai/src/LLMClient.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import Anthropic from "@anthropic-ai/sdk";
import OpenAI from "openai";

import { LLMName, MODEL_CONFIGS } from "./modelConfigs.js";
import { LlmId, MODEL_CONFIGS } from "./modelConfigs.js";
import { squiggleSystemContent } from "./prompts.js";

export type Message = {
Expand Down Expand Up @@ -87,21 +87,21 @@ export interface LlmMetrics {
apiCalls: number;
inputTokens: number;
outputTokens: number;
llmName: LLMName;
LlmId: LlmId;
}

export function calculatePriceMultipleCalls(
metrics: Partial<Record<LLMName, LlmMetrics>>
metrics: Partial<Record<LlmId, LlmMetrics>>
): number {
let totalCost = 0;

for (const [llmName, { inputTokens, outputTokens }] of Object.entries(
for (const [LlmId, { inputTokens, outputTokens }] of Object.entries(
metrics
)) {
const modelConfig = MODEL_CONFIGS[llmName as LLMName];
const modelConfig = MODEL_CONFIGS.find((model) => model.id === LlmId);

if (!modelConfig) {
console.warn(`No pricing information found for LLM: ${llmName}`);
console.warn(`No pricing information found for LLM: ${LlmId}`);
continue;
}

Expand Down Expand Up @@ -132,7 +132,7 @@ export class LLMClient {
private anthropicClient?: Anthropic;

constructor(
public llmName: LLMName,
public LlmId: LlmId,
openaiApiKey?: string,
anthropicApiKey?: string
) {
Expand Down Expand Up @@ -167,7 +167,13 @@ export class LLMClient {
async run(
conversationHistory: Message[]
): Promise<StandardizedChatCompletion> {
const selectedModelConfig = MODEL_CONFIGS[this.llmName];
const selectedModelConfig = MODEL_CONFIGS.find(
(model) => model.id === this.LlmId
);

if (!selectedModelConfig) {
throw new Error(`No model config found for LLM: ${this.LlmId}`);
}

try {
if (selectedModelConfig.provider === "anthropic") {
Expand Down
8 changes: 4 additions & 4 deletions packages/ai/src/LLMStep.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {
Message,
} from "./LLMClient.js";
import { LogEntry, Logger, TimestampedLogEntry } from "./Logger.js";
import { LLMName } from "./modelConfigs.js";
import { LlmId } from "./modelConfigs.js";
import { PromptPair } from "./prompts.js";
import { Workflow } from "./workflows/Workflow.js";

Expand Down Expand Up @@ -150,10 +150,10 @@ export class LLMStepInstance<const Shape extends StepShape = StepShape> {
const totalCost = calculatePriceMultipleCalls(
this.llmMetricsList.reduce(
(acc, metrics) => {
acc[metrics.llmName] = metrics;
acc[metrics.LlmId] = metrics;
return acc;
},
{} as Record<LLMName, LlmMetrics>
{} as Record<LlmId, LlmMetrics>
)
);

Expand Down Expand Up @@ -236,7 +236,7 @@ export class LLMStepInstance<const Shape extends StepShape = StepShape> {
apiCalls: 1,
inputTokens: completion?.usage?.prompt_tokens ?? 0,
outputTokens: completion?.usage?.completion_tokens ?? 0,
llmName: workflow.llmConfig.llmName,
LlmId: workflow.llmConfig.llmId,
});

if (!completion?.content) {
Expand Down
8 changes: 4 additions & 4 deletions packages/ai/src/generateSummary.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ function generateOverview(workflow: Workflow): string {
let overview = `- Total Steps: ${steps.length}\n`;
overview += `- Total Time: ${(totalTime / 1000).toFixed(2)} seconds\n`;

for (const [llmName, metrics] of Object.entries(metricsByLLM)) {
overview += `- ${llmName}:\n`;
for (const [LlmId, metrics] of Object.entries(metricsByLLM)) {
overview += `- ${LlmId}:\n`;
overview += ` - API Calls: ${metrics.apiCalls}\n`;
overview += ` - Input Tokens: ${metrics.inputTokens}\n`;
overview += ` - Output Tokens: ${metrics.outputTokens}\n`;
Expand Down Expand Up @@ -82,8 +82,8 @@ function generateDetailedStepLogs(workflow: Workflow): string {
detailedLogs += `- ⏱️ Duration: ${step.getDuration() / 1000} seconds\n`;

step.llmMetricsList.forEach((metrics) => {
const cost = calculatePriceMultipleCalls({ [metrics.llmName]: metrics });
detailedLogs += `- ${metrics.llmName}:\n`;
const cost = calculatePriceMultipleCalls({ [metrics.LlmId]: metrics });
detailedLogs += `- ${metrics.LlmId}:\n`;
detailedLogs += ` - API Calls: ${metrics.apiCalls}\n`;
detailedLogs += ` - Input Tokens: ${metrics.inputTokens}\n`;
detailedLogs += ` - Output Tokens: ${metrics.outputTokens}\n`;
Expand Down
2 changes: 1 addition & 1 deletion packages/ai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export {

export { llmLinker } from "./Code.js";

export { type LLMName, MODEL_CONFIGS } from "./modelConfigs.js";
export { type LlmId, type LlmName, MODEL_CONFIGS } from "./modelConfigs.js";

// Export type only! We can't import SquiggleWorkflow.js because it depends on Node.js modules such as "fs".
export { type SquiggleWorkflowInput } from "./workflows/SquiggleWorkflow.js";
Expand Down
34 changes: 25 additions & 9 deletions packages/ai/src/modelConfigs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,53 +6,69 @@ type ModelConfig = {
outputRate: number;
contextWindow: number;
maxTokens?: number;
name: string;
id: string;
};

export const MODEL_CONFIGS = {
GPT4: {
// After some discussion, we decided to store this as an array of objects instead of a record. This is because it can be a pain to iterate and filter over records, and there aren't any significant performance benefits to using a record for this data.
export const MODEL_CONFIGS = [
{
id: "GPT4",
provider: "openrouter",
model: "openai/gpt-4o-2024-08-06",
inputRate: 0.0000025,
outputRate: 0.00001,
contextWindow: 128000,
name: "GPT-4o",
},
"GPT4-mini": {
{
id: "GPT4-mini",
provider: "openrouter",
model: "openai/gpt-4o-mini-2024-07-18",
inputRate: 0.00000015,
outputRate: 0.0000006,
contextWindow: 128000,
name: "GPT-4o mini",
},
"Claude-Sonnet": {
{
id: "Claude-Sonnet",
provider: "anthropic",
model: "claude-3-5-sonnet-20240620",
inputRate: 0.000003,
outputRate: 0.000015,
contextWindow: 200000,
maxTokens: 8192,
name: "Claude Sonnet 3.5",
},
"Claude-Haiku": {
{
id: "Claude-Haiku",
provider: "anthropic",
model: "claude-3-haiku-20240307",
inputRate: 0.00000025,
outputRate: 0.00000125,
contextWindow: 200000,
maxTokens: 4096,
name: "Claude Haiku",
},
"DeepSeek-Coder-V2": {
{
id: "DeepSeek-Coder-V2",
provider: "openrouter",
model: "deepseek/deepseek-coder",
inputRate: 0.00000014,
outputRate: 0.00000028,
contextWindow: 128000,
name: "DeepSeek Coder V2",
},
"Llama-3.1": {
{
id: "Llama-3.1",
provider: "openrouter",
model: "meta-llama/llama-3.1-405b-instruct",
inputRate: 0.0000027,
outputRate: 0.0000027,
contextWindow: 131072,
name: "Llama 3.1",
},
} as const satisfies Record<string, ModelConfig>;
] as const;

export type LLMName = keyof typeof MODEL_CONFIGS;
export type LlmId = (typeof MODEL_CONFIGS)[number]["id"];
export type LlmName = (typeof MODEL_CONFIGS)[number]["name"];
22 changes: 11 additions & 11 deletions packages/ai/src/workflows/Workflow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,18 @@ import {
StepShape,
} from "../LLMStep.js";
import { TimestampedLogEntry } from "../Logger.js";
import { LLMName } from "../modelConfigs.js";
import { LlmId } from "../modelConfigs.js";
import { WorkflowResult } from "../types.js";

export interface LlmConfig {
llmName: LLMName;
llmId: LlmId;
priceLimit: number;
durationLimitMinutes: number;
messagesInHistoryToKeep: number;
}

export const llmConfigDefault: LlmConfig = {
llmName: "Claude-Sonnet",
llmId: "Claude-Sonnet",
priceLimit: 0.3,
durationLimitMinutes: 1,
messagesInHistoryToKeep: 4,
Expand Down Expand Up @@ -98,7 +98,7 @@ export class Workflow {
this.startTime = Date.now();

this.llmClient = new LLMClient(
llmConfig.llmName,
llmConfig.llmId,
openaiApiKey,
anthropicApiKey
);
Expand Down Expand Up @@ -212,21 +212,21 @@ export class Workflow {
};
}

llmMetricSummary(): Record<LLMName, LlmMetrics> {
llmMetricSummary(): Record<LlmId, LlmMetrics> {
return this.getSteps().reduce(
(acc, step) => {
step.llmMetricsList.forEach((metrics) => {
if (!acc[metrics.llmName]) {
acc[metrics.llmName] = { ...metrics };
if (!acc[metrics.LlmId]) {
acc[metrics.LlmId] = { ...metrics };
} else {
acc[metrics.llmName].apiCalls += metrics.apiCalls;
acc[metrics.llmName].inputTokens += metrics.inputTokens;
acc[metrics.llmName].outputTokens += metrics.outputTokens;
acc[metrics.LlmId].apiCalls += metrics.apiCalls;
acc[metrics.LlmId].inputTokens += metrics.inputTokens;
acc[metrics.LlmId].outputTokens += metrics.outputTokens;
}
});
return acc;
},
{} as Record<LLMName, LlmMetrics>
{} as Record<LlmId, LlmMetrics>
);
}

Expand Down
2 changes: 0 additions & 2 deletions packages/hub/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
"build-last-revision": "tsx src/scripts/buildRecentModelRevision/main.ts"
},
"dependencies": {
"@dagrejs/dagre": "^1.1.3",
"@next-auth/prisma-adapter": "^1.0.7",
"@pothos/core": "^3.41.1",
"@pothos/plugin-errors": "^3.11.1",
Expand Down Expand Up @@ -63,7 +62,6 @@
"react-markdown": "^9.0.1",
"react-relay": "^16.2.0",
"react-select": "^5.8.0",
"reactflow": "^11.11.4",
"relay-runtime": "^16.2.0",
"remark-breaks": "^4.0.0",
"remark-gfm": "^4.0.0",
Expand Down
4 changes: 2 additions & 2 deletions packages/hub/src/app/RootLayout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { FC, PropsWithChildren } from "react";
import { useLazyLoadQuery } from "react-relay";
import { graphql } from "relay-runtime";

import { isModelRoute } from "@/routes";
import { isAiRoute, isModelRoute } from "@/routes";

import { PageFooter } from "../components/layout/RootLayout/PageFooter";
import { PageMenu } from "../components/layout/RootLayout/PageMenu";
Expand All @@ -27,7 +27,7 @@ const InnerRootLayout: FC<PropsWithChildren> = ({ children }) => {

const pathname = usePathname();

const showFooter = !isModelRoute(pathname);
const showFooter = !isModelRoute(pathname) && !isAiRoute(pathname);

return (
<div className={"flex min-h-screen flex-col bg-white"}>
Expand Down
2 changes: 1 addition & 1 deletion packages/hub/src/app/ai/Badge.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ export const Badge: FC<
className={clsx(
"rounded-full px-2 py-1 text-xs",
theme === "blue" && "bg-blue-100 text-blue-800",
theme === "green" && "bg-green-100 text-green-800",
theme === "green" && "bg-emerald-100 text-emerald-800",
theme === "purple" && "bg-purple-100 text-purple-800"
)}
>
Expand Down
8 changes: 6 additions & 2 deletions packages/hub/src/app/ai/LogsView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,12 @@ export const LogsView: FC<{
}> = ({ logSummary }) => {
return (
<div className="h-full w-full bg-white p-4">
<h2 className="text-xl font-bold">Logs</h2>
<MarkdownViewer md={logSummary} textSize="sm" linker={llmLinker} />
<MarkdownViewer
md={logSummary}
textSize="sm"
linker={llmLinker}
className="max-w-none"
/>
</div>
);
};
Loading

0 comments on commit 790f109

Please sign in to comment.