From 409c811a4286b2ca17be38acda326faef7e04004 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 26 Dec 2024 19:02:20 +0530 Subject: [PATCH] feat(ollama): use axios instead of fetch --- src/lib/providers/ollama.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/lib/providers/ollama.ts b/src/lib/providers/ollama.ts index e23fb766..7277b278 100644 --- a/src/lib/providers/ollama.ts +++ b/src/lib/providers/ollama.ts @@ -2,6 +2,7 @@ import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; import { getKeepAlive, getOllamaApiEndpoint } from '../../config'; import logger from '../../utils/logger'; import { ChatOllama } from '@langchain/community/chat_models/ollama'; +import axios from 'axios'; export const loadOllamaChatModels = async () => { const ollamaEndpoint = getOllamaApiEndpoint(); @@ -10,13 +11,13 @@ export const loadOllamaChatModels = async () => { if (!ollamaEndpoint) return {}; try { - const response = await fetch(`${ollamaEndpoint}/api/tags`, { + const response = await axios.get(`${ollamaEndpoint}/api/tags`, { headers: { 'Content-Type': 'application/json', }, }); - const { models: ollamaModels } = (await response.json()) as any; + const { models: ollamaModels } = response.data; const chatModels = ollamaModels.reduce((acc, model) => { acc[model.model] = { @@ -45,13 +46,13 @@ export const loadOllamaEmbeddingsModels = async () => { if (!ollamaEndpoint) return {}; try { - const response = await fetch(`${ollamaEndpoint}/api/tags`, { + const response = await axios.get(`${ollamaEndpoint}/api/tags`, { headers: { 'Content-Type': 'application/json', }, }); - const { models: ollamaModels } = (await response.json()) as any; + const { models: ollamaModels } = response.data; const embeddingsModels = ollamaModels.reduce((acc, model) => { acc[model.model] = {