Skip to content

Commit

Permalink
Improve test generation for long reviewer content
Browse files Browse the repository at this point in the history
  • Loading branch information
HansGabriel committed Dec 16, 2023
1 parent c21112a commit 53cd52a
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 15 deletions.
2 changes: 1 addition & 1 deletion apps/expo/app.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { ExpoConfig, ConfigContext } from "@expo/config";
const CLERK_PUBLISHABLE_KEY =
"pk_test_Z3Jvd2luZy1kb2Jlcm1hbi04OC5jbGVyay5hY2NvdW50cy5kZXYk";
const SERVER_URL = "https://test-trek-prod.vercel.app";
const SERVER_ENV: "development" | "production" = "production";
const SERVER_ENV: "development" | "production" = "development";

const defineConfig = (_ctx: ConfigContext): ExpoConfig => ({

Check warning on line 8 in apps/expo/app.config.ts

View workflow job for this annotation

GitHub Actions / build-lint

'_ctx' is defined but never used
name: "TestTrek",
Expand Down
16 changes: 12 additions & 4 deletions apps/expo/src/forms/CreateTestForm/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,10 @@ const CreateTestForm: FC<Props> = ({
setBottomSheetOpen(false);
};

const createMultipleQuestions = (inputMessage: string) => {
const createMultipleQuestions = (
inputMessage: string,
messageType: "batch-messages" | "generate-topics",
) => {
const numOfQuestions =
numberOfQuestionOptions.find((option) => option.isSelected)?.value ?? 5;

Expand All @@ -331,7 +334,7 @@ const CreateTestForm: FC<Props> = ({
{
message: inputMessage,
numOfQuestions: numOfQuestions,
messageType: "batch-messages",
messageType,
},
{
onSuccess: (data) => {
Expand Down Expand Up @@ -772,7 +775,10 @@ const CreateTestForm: FC<Props> = ({
setIsSidebarOpen(true);
} else {
const reviewerContent = selectedReviewer.content;
createMultipleQuestions(extractHighlightedText(reviewerContent));
createMultipleQuestions(
extractHighlightedText(reviewerContent),
"batch-messages",
);
}
}}
/>
Expand All @@ -782,7 +788,9 @@ const CreateTestForm: FC<Props> = ({
aiQuestion={aiQuestion}
setAiQuestion={setAiQuestion}
isGenerating={isGenerating}
handleQuestionGeneration={() => createMultipleQuestions(aiQuestion)}
handleQuestionGeneration={() =>
createMultipleQuestions(aiQuestion, "generate-topics")
}
handleClose={() => {
setAiQuestion("");
setErrorInAIQuestion(false);
Expand Down
2 changes: 1 addition & 1 deletion apps/expo/src/screens/create-reviewer/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ export const CreateReviewerScreen = ({
{
message: inputMessage,
numOfQuestions: numOfQuestions,
messageType: "generate-topics",
messageType: "batch-messages",
},
{
onSuccess: (data) => {
Expand Down
3 changes: 1 addition & 2 deletions packages/api/src/functions/gptHandlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,7 @@ export const generateTopicsPrompt = (
message: string,
numTopics?: number,
): string => {
return `Create a list of topics (should be of length ${numTopics} topics) based on: "${message}". Format as:
[Topic 1] | [Topic 2] | [Topic 3] | ... | [Topic ${numTopics}]`;
return `Create a list of topics (should be of length ${numTopics} topics) based on: "${message}". Format as: [Topic 1] | [Topic 2] | [Topic 3] | ... | [Topic ${numTopics}]`;
};

export const generatePromptForType = (
Expand Down
27 changes: 22 additions & 5 deletions packages/api/src/functions/randomQuestionsHandlers.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { generateChoicesPrompt, timeAndPointsPrompt } from "./gptHandlers";
import { fetchGPT } from "../services/gptApiHandlers";
import { chunk } from "lodash";
import { questionsSchema } from "@acme/schema/src/question";
import {
parseMultipleChoiceResponse,
Expand Down Expand Up @@ -77,11 +76,29 @@ export const parseTopicsList = (topicsList: string): string[] => {

export const divideStringIntoChunks = (
string: string,
chunkSize = 3000,
chunkSize = 1000,
): string[] => {
const chunks = chunk(string.split("\n"), chunkSize);
const sentences = string.split(/\.(?!\d)/); // Split sentences based on period not followed by a digit

return chunks.map((chunk) => chunk.join("\n"));
let currentBatch = "";
const batches: string[] = [];

for (const sentence of sentences) {
const potentialBatch = currentBatch + sentence + ".";

if (potentialBatch.length <= chunkSize) {
currentBatch = potentialBatch;
} else {
batches.push(currentBatch.trim());
currentBatch = sentence + ".";
}
}

if (currentBatch.trim() !== "") {
batches.push(currentBatch.trim());
}

return batches;
};

export const generateCombinedQuestionPrompts = (
Expand Down Expand Up @@ -189,7 +206,7 @@ export const generateCombinedQuestions = async (
}
if (Array.isArray(messages)) {
const processedQuestions = await Promise.all(
messages.map(async (_, index) => {
Array.from({ length: arrayLength }).map(async (_, index) => {
let retryCount = 0;
let finalProcessedQuestions: ParsedQuestion[] = [];
let hasOneAnswer = false;
Expand Down
4 changes: 2 additions & 2 deletions packages/api/src/router/gptApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ export const gptApiRouter = router({
let topics: string[] = [];

if (messageType === "generate-topics") {
const topicsPrompt = generateTopicsPrompt(message);
const topicsPrompt = generateTopicsPrompt(message, numOfQuestions);

const topicsResponse = await fetchGPT(topicsPrompt);

Expand All @@ -147,7 +147,7 @@ export const gptApiRouter = router({

while (remainingQuestionsLength > 0) {
const remainingQuestions = await generateCombinedQuestions(
message,
!messageType ? message : topics,
remainingQuestionsLength,
1,
);
Expand Down

0 comments on commit 53cd52a

Please sign in to comment.