Skip to content

Commit

Permalink
feat(BE-190): As a user, i want able to use ollama-client
Browse files Browse the repository at this point in the history
 - update Getting Started doc
 - add OpenAIITest
 - code refactor
  • Loading branch information
hanrw committed Apr 20, 2024
1 parent 0cea381 commit b7f73df
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 64 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import com.tddworks.openai.api.chat.api.ChatMessage
import com.tddworks.openai.api.chat.api.Model
import com.tddworks.openai.di.initOpenAI

val openAI = initOpenAI(OpenAIConfig(
val openAI = initOpenAI(OpenAIConfig(
baseUrl = { "YOUR_BASE_URL" },
apiKey = { "YOUR_API_KEY" }
))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,56 +96,4 @@ class DefaultChatApiTest {
awaitComplete()
}
}

// @Disabled
// @Test
// fun chatCompletion() = runBlocking {
// val url = System.getenv("OPEN_API_URL") ?: ""
// val token = System.getenv("OPEN_API_TOKEN") ?: ""
//
// val ktorCompletionsApi = KtorChatApi(
// openAIHttpClient = OpenAIHttpClient.default(
// httpClient = createHttpClient(
// apiUrl = { url },
// apiToken = { token },
// engine = CIO
// )
// )
// )
//
//
// val r = ktorCompletionsApi.chatCompletion(
// ChatCompletionRequest.chatCompletionRequest(
// listOf(ChatMessage("hello")),
// )
// )
//
// assertEquals(1, r.choices.size)
// }
//
// @Disabled
// @Test
// fun chatCompletions() = runBlocking {
// val url = System.getenv("OPEN_API_URL") ?: ""
// val token = System.getenv("OPEN_API_TOKEN") ?: ""
//
// val ktorCompletionsApi = KtorChatApi(
// openAIHttpClient = OpenAIHttpClient.default(
// httpClient = createHttpClient(
// apiUrl = { url },
// apiToken = { token },
// engine = CIO
// )
// )
// )
//
//
// val r = ktorCompletionsApi.chatCompletions(
// ChatCompletionRequest.chatCompletionsRequest(
// listOf(ChatMessage("hello")),
// )
// ).toList()
//
// assertEquals(11, r.size)
// }
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package com.tddworks.openai.gateway.api
import app.cash.turbine.test
import com.tddworks.anthropic.api.AnthropicConfig
import com.tddworks.anthropic.api.Model
import com.tddworks.di.getInstance
import com.tddworks.ollama.api.OllamaConfig
import com.tddworks.ollama.api.OllamaModel
import com.tddworks.openai.api.OpenAIConfig
Expand All @@ -16,6 +15,7 @@ import org.junit.jupiter.api.Test
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable
import org.koin.test.junit5.AutoCloseKoinTest
import kotlin.test.assertNotNull
import kotlin.time.Duration.Companion.seconds
import com.tddworks.openai.api.chat.api.ChatCompletionRequest as OpenAIChatCompletionRequest
import com.tddworks.openai.api.chat.api.Model as OpenAIModel

Expand All @@ -28,16 +28,18 @@ import com.tddworks.openai.api.chat.api.Model as OpenAIModel
@ExperimentalSerializationApi
class OpenAIGatewayITest : AutoCloseKoinTest() {

private lateinit var gateway: OpenAIGateway

@BeforeEach
fun setUp() {
initOpenAIGateway(
gateway = initOpenAIGateway(
openAIConfig = OpenAIConfig(
baseUrl = { "api.openai.com" },
apiKey = { System.getenv("OPENAI_API_KEY") ?: "" }
apiKey = { System.getenv("OPENAI_API_KEY") ?: "CONFIGURE_ME" }
),
anthropicConfig = AnthropicConfig(
baseUrl = { "api.anthropic.com" },
apiKey = { System.getenv("ANTHROPIC_API_KEY") ?: "" },
apiKey = { System.getenv("ANTHROPIC_API_KEY") ?: "CONFIGURE_ME" },
anthropicVersion = { "2023-06-01" }
),
ollamaConfig = OllamaConfig()
Expand All @@ -47,14 +49,14 @@ class OpenAIGatewayITest : AutoCloseKoinTest() {
@Test
@EnabledIfEnvironmentVariable(named = "OLLAMA_STARTED", matches = "true")
fun `should use ollama client to get chat completions`() = runTest {
val gateway = getInstance<OpenAIGateway>()
gateway.streamCompletions(
OpenAIChatCompletionRequest(
messages = listOf(ChatMessage.UserMessage("hello")),
maxTokens = 1024,
model = OpenAIModel(OllamaModel.LLAMA2.value)
model = OpenAIModel(OllamaModel.LLAMA3.value)
)
).test {
).test(timeout = 10.seconds) {
assertNotNull(awaitItem())
assertNotNull(awaitItem())
cancelAndIgnoreRemainingEvents()
}
Expand All @@ -63,14 +65,14 @@ class OpenAIGatewayITest : AutoCloseKoinTest() {
@Test
@EnabledIfEnvironmentVariable(named = "OPENAI_API_KEY", matches = ".+")
fun `should use openai client to get chat completions`() = runTest {
val gateway = getInstance<OpenAIGateway>()
gateway.streamCompletions(
OpenAIChatCompletionRequest(
messages = listOf(ChatMessage.UserMessage("hello")),
maxTokens = 1024,
model = OpenAIModel.GPT_3_5_TURBO
)
).test {
).test(timeout = 10.seconds) {
assertNotNull(awaitItem())
assertNotNull(awaitItem())
cancelAndIgnoreRemainingEvents()
}
Expand All @@ -79,14 +81,14 @@ class OpenAIGatewayITest : AutoCloseKoinTest() {
@Test
@EnabledIfEnvironmentVariable(named = "ANTHROPIC_API_KEY", matches = ".+")
fun `should use anthropic client to get chat completions`() = runTest {
val gateway = getInstance<OpenAIGateway>()
gateway.streamCompletions(
OpenAIChatCompletionRequest(
messages = listOf(ChatMessage.UserMessage("hello")),
maxTokens = 1024,
model = OpenAIModel(Model.CLAUDE_3_HAIKU.value)
)
).test {
).test(timeout = 10.seconds) {
assertNotNull(awaitItem())
assertNotNull(awaitItem())
cancelAndIgnoreRemainingEvents()
}
Expand Down

0 comments on commit b7f73df

Please sign in to comment.