Skip to content

Commit

Permalink
chore: small improvements (#125)
Browse files Browse the repository at this point in the history
* feat: small improvements´

* feat: improve config
  • Loading branch information
comoser authored Aug 7, 2023
1 parent 05d7ce8 commit 7262a50
Show file tree
Hide file tree
Showing 8 changed files with 41 additions and 17 deletions.
6 changes: 5 additions & 1 deletion packages/api/config/default.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@
"ai": {
"defaultTemperature": 0.2,
"defaultChatContextTTL": 604800,
"defaultTokenLimitForSummarization": 15000
"defaultTokenLimitForSummarization": 15000,
"defaultAiModel": "gpt-3.5-turbo-16k"
},
"chat": {
"maxNumberOfDocumentsPerChat": 2
},
"authorization": {
"roles": [
Expand Down
5 changes: 3 additions & 2 deletions packages/api/src/ai/services/ai.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export class AiService {
) {
this.llmModel = new ChatOpenAI({
temperature: this.appConfigService.getAiAppConfig().defaultTemperature,
modelName: 'gpt-3.5-turbo-16k',
modelName: this.appConfigService.getAiAppConfig().defaultAiModel,
});
}

Expand Down Expand Up @@ -74,7 +74,8 @@ export class AiService {
const chain = RetrievalQAChain.fromLLM(this.llmModel, vectorStoreRetriever);

const title = await chain.call({
query: 'Give me a single word that can reflect this document content',
query:
'Give me three words, joined together with "-", that can reflect this document content',
});
const description = await chain.call({
query: 'Summarize this document content in a single sentence. Be concise',
Expand Down
4 changes: 2 additions & 2 deletions packages/api/src/ai/services/memory.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { AppConfigService } from '@/app-config/app-config.service';
import { CACHE_CLIENT } from '@/common/constants/cache';
import { Inject, Injectable } from '@nestjs/common';
import { BufferMemory, ChatMessageHistory } from 'langchain/memory';
import { AIChatMessage } from 'langchain/schema';
import { AIMessage } from 'langchain/schema';
import { RedisChatMessageHistory } from 'langchain/stores/message/redis';
import { RedisClientType } from 'redis';

Expand Down Expand Up @@ -56,7 +56,7 @@ export class MemoryService {
new BufferMemory({
returnMessages: true,
memoryKey: 'history',
chatHistory: new ChatMessageHistory([new AIChatMessage(summary)]),
chatHistory: new ChatMessageHistory([new AIMessage(summary)]),
})
);
}
Expand Down
21 changes: 16 additions & 5 deletions packages/api/src/app-config/app-config.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@ import { AppConfigNotFoundException } from '@/app-config/exceptions/app-config-n
import { Injectable } from '@nestjs/common';
import * as appConfig from 'config';

export type AiAppConfig = {
defaultTemperature: number;
defaultChatContextTTL: number;
defaultTokenLimitForSummarization: number;
};
import {
ai as aiAppConfig,
chat as chatAppConfig,
} from '../../config/default.json';

type ChatAppConfig = typeof chatAppConfig;

type AiAppConfig = typeof aiAppConfig;

@Injectable()
export class AppConfigService {
Expand All @@ -18,6 +21,14 @@ export class AppConfigService {
return appConfig.get<AiAppConfig>('ai');
}

getChatConfig(): ChatAppConfig {
if (!appConfig.has('chat')) {
throw new AppConfigNotFoundException();
}

return appConfig.get<ChatAppConfig>('chat');
}

async getAppRoles(): Promise<string[]> {
if (!appConfig.has('authorization.roles')) {
throw new AppConfigNotFoundException();
Expand Down
6 changes: 4 additions & 2 deletions packages/api/src/chats/chat-socket.gateway.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { AiService } from '@/ai/services/ai.service';
import { AppConfigService } from '@/app-config/app-config.service';
import { ChatsRepository } from '@/chats/chats.repository';
import { createSocketMessageResponseFactory } from '@/chats/factory/create-socket-message.factory';
import { AddMessageToChatUsecase } from '@/chats/usecases/add-message-to-chat.usecase';
Expand All @@ -25,7 +26,8 @@ export class ChatSocketGateway {
private readonly joinChatUsecase: JoinChatUsecase,
private readonly addMessageToChatUsecase: AddMessageToChatUsecase,
private readonly chatsRepository: ChatsRepository,
private readonly aiService: AiService
private readonly aiService: AiService,
private readonly appConfigService: AppConfigService
) {}

@SubscribeMessage('joinRoom')
Expand Down Expand Up @@ -107,7 +109,7 @@ export class ChatSocketGateway {
tokens: encode(aiResponse).length,
replyTo: addedMessage.id,
ai: {
llmModel: 'gpt-3.5-turbo-16k',
llmModel: this.appConfigService.getAiAppConfig().defaultAiModel,
},
},
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ export class TransformDocToVectorJobConsumer {
): Promise<Document[]> {
let loader: BaseDocumentLoader;
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize: 500, // default is 1000
chunkOverlap: 100, // default is 200
chunkSize: 1000, // default is 1000
chunkOverlap: 200, // default is 200
});
const documentBlob = new Blob([document.src]);

Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { AppConfigService } from '@/app-config/app-config.service';
import { ClerkAuthUserProvider } from '@/auth/providers/clerk/clerk-auth-user.provider';
import { ChatsRepository } from '@/chats/chats.repository';
import { ChatNotFoundException } from '@/chats/exceptions/chat-not-found.exception';
Expand All @@ -19,7 +20,8 @@ export class UploadDocumentsToChatUsecase implements Usecase {
private readonly chatsRepository: ChatsRepository,
private readonly clerkAuthUserProvider: ClerkAuthUserProvider,
@InjectQueue(CHAT_DOCUMENT_UPLOAD_QUEUE)
private readonly chatDocUploadQueue: Queue
private readonly chatDocUploadQueue: Queue,
private readonly appConfigService: AppConfigService
) {}

async execute(
Expand Down Expand Up @@ -59,9 +61,12 @@ export class UploadDocumentsToChatUsecase implements Usecase {
chat: Chat,
files: Express.Multer.File[]
) {
const maxNumberOfDocumentsPerChat =
this.appConfigService.getChatConfig().maxNumberOfDocumentsPerChat;

if (
chat.documents.length >= 2 ||
files.length + chat.documents.length >= 2
chat.documents.length >= maxNumberOfDocumentsPerChat ||
files.length + chat.documents.length > maxNumberOfDocumentsPerChat
) {
throw new NoMoreDocumentsCanBeUploadedToChatException();
}
Expand Down
1 change: 1 addition & 0 deletions packages/api/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"resolveJsonModule": true,
"target": "es2017",
"sourceMap": true,
"outDir": "./dist",
Expand Down

0 comments on commit 7262a50

Please sign in to comment.